commit
stringlengths
40
40
subject
stringlengths
4
1.73k
repos
stringlengths
5
127k
old_file
stringlengths
2
751
new_file
stringlengths
2
751
new_contents
stringlengths
1
8.98k
old_contents
stringlengths
0
6.59k
license
stringclasses
13 values
lang
stringclasses
23 values
640c86e17b10d8f892a4036ade4ce7b8dca30347
Implement dragon-blood-clan module.
thsnr/gygax
gygax/modules/dbc.py
gygax/modules/dbc.py
# -*- coding: utf-8 -*- """ :mod:`gygax.modules.dbc` --- Module for playing Dragon-Blood-Clan. ================================================================== """ import gygax.modules.roll as roll def dbc(bot, sender, text): need = 6 # The next die needed. dice_count = 5 # The number of dice to roll. rolls_left = 3 # The number of rolls left. reply = [] # The reply to the sender. # Roll until we have 6, 5 and 4 or we run out of rolls. while need > 3 and rolls_left > 0: results = roll.roll_dice(dice_count, 6) reply.append(" + ".join(map(str, results))) rolls_left -= 1 # Check for needed dice while need > 3 and need in results: results.remove(need) need -= 1 dice_count -= 1 if need > 3: reply.append("no luck") else: reply.append("score: {}".format(sum(results))) reply.append("rolls left: {}".format(rolls_left)) bot.reply(", ".join(reply)) dbc.command = ".dbc"
mit
Python
2d81274953629e34cc4b0232782cb910d1d459c9
Add process_watcher mod (triggers Process.Creation event)
JokerQyou/Modder2
modder/mods/process_watcher.py
modder/mods/process_watcher.py
# coding: utf-8 import atexit import platform from modder import on, trigger if platform.system() == 'Windows': import pythoncom import wmi @on('Modder.Started') def watch_process_creation(event): pythoncom.CoInitialize() atexit.register(pythoncom.CoUninitialize) wmi_root = wmi.WMI() process_watcher = wmi_root.Win32_Process.watch_for( notification_type='Creation', delay_secs=2 ) try: while 1: try: new_process = process_watcher() trigger( 'Process.Created', data={ 'caption': new_process.wmi_property('Caption').value, 'process_name': new_process.wmi_property('Name').value, 'executable_path': new_process.wmi_property('ExecutablePath').value, 'pid': new_process.wmi_property('ProcessId').value, } ) except Exception as e: print 'innter error:', e pass except Exception as e: print 'outter error:', e pass finally: pythoncom.CoUninitialize() else: pass
mit
Python
03618b710146cdfacb7a8913a65809227e71546c
add test
chainer/chainercv,yuyu2172/chainercv,yuyu2172/chainercv,chainer/chainercv,pfnet/chainercv
tests/transforms_tests/image_tests/test_ten_crop.py
tests/transforms_tests/image_tests/test_ten_crop.py
import unittest import numpy as np from chainer import testing from chainercv.transforms import ten_crop class TestTenCrop(unittest.TestCase): def test_ten_crop(self): img = np.random.uniform(size=(3, 48, 32)) out = ten_crop(img, (48, 32)) self.assertEqual(out.shape, (10, 3, 48, 32)) for crop in out[:5]: np.testing.assert_equal(crop, img) for crop in out[5:]: np.testing.assert_equal(crop[:, :, ::-1], img) out = ten_crop(img, (24, 12)) self.assertEqual(out.shape, (10, 3, 24, 12)) testing.run_module(__name__, __file__)
mit
Python
182a4ceeeb8a9b9e3f5071427da1ca0ec847f368
Check in first cut at oban code. Taken directly from code used for class.
jrleeman/MetPy,Unidata/MetPy,ahaberlie/MetPy,deeplycloudy/MetPy,ahaberlie/MetPy,Unidata/MetPy,dopplershift/MetPy,ahill818/MetPy,ShawnMurd/MetPy,jrleeman/MetPy,dopplershift/MetPy
trunk/metpy/tools/oban.py
trunk/metpy/tools/oban.py
from itertools import izip import numpy as N from constants import * def rms(diffs): return N.sqrt(N.average(diffs**2)) def grid_point_dists(grid_x, grid_y, ob_x, ob_y): "Calculates distances for each grid point to every ob point" return N.hypot(grid_x[...,N.newaxis] - ob_x[N.newaxis,N.newaxis,...], grid_y[...,N.newaxis] - ob_y[N.newaxis,N.newaxis,...]) def adjust_field(field, xgrad, ygrad, grid_x, grid_y, ob_x, ob_y): '''Makes a 3D field with the data adjusted by the gradient to each grid point from every observation point''' return field + xgrad * (grid_x[...,N.newaxis] - ob_x[N.newaxis,...])\ + ygrad * (grid_y[...,N.newaxis] - ob_y[N.newaxis,...]) def analyze_grid_multipass(ob_data, grid_x, grid_y, ob_x, ob_y, num_passes, weight_func, params, background = None): '''Calculate a value at each grid point using multiple passes of an objective analysis technique''' if background is None: mod_param = (params[0], 1.0) background = analyze_grid(ob_data, grid_x, grid_y, ob_x, ob_y, weight_func, mod_param) num_passes -= 1 for i in range(num_passes): ob_incs = get_ob_incs(ob_x, ob_y, ob_data, grid_x[0], grid_y[:,0], background) print 'pass: %d rms: %f' % (i, rms(ob_incs)) background = analyze_grid(ob_incs, grid_x, grid_y, ob_x, ob_y, weight_func, params) + background return background def analyze_grid(ob_data, grid_x, grid_y, ob_x, ob_y, weight_func, params): '''Calculates a value at each grid point based on the observed data in ob_data. grid_point_dists is a 3D array containing the distance for each grid point to every observation''' try: params[0] except TypeError: params = (params,) weights = weight_func(grid_point_dists(grid_x, grid_y, ob_x, ob_y), *params) final = (weights * ob_data).sum(axis=2)/weights.sum(axis=2) try: final[N.isnan(final)] = 0.0 except: pass return final def uniform_weights(dists, radius): weights = N.ones_like(dists) weights[dists > radius] = 0.0 return weights def cressman_weights(dists, radius): dist_sq = dists * dists rad_sq = radius * radius weights = (rad_sq - dist_sq)/(rad_sq + dist_sq) weights[dists > radius] = 0.0 return weights def barnes_weights(dists, kappa0, gamma): weights = N.exp(-dists**2 / (kappa0 * gamma)) critical_radius = N.sqrt(15 * kappa0) weights[dists > critical_radius] = 0.0 return weights def bilinear(x, y, data, xloc, yloc): xind = find_axis_index(x, xloc) yind = find_axis_index(y, yloc) xw = (xloc - x[xind])/(x[xind+1] - x[xind]) x_weights = N.array([1-xw, xw]) yw = (yloc - y[yind])/(y[yind+1] - y[yind]) y_weights = N.array([1-yw, yw]) return N.dot(y_weights, N.dot(data[yind:yind+2,xind:xind+2], x_weights)) def find_axis_index(axis_vals, location): if location > axis_vals[-1] or location < axis_vals[0]: raise ValueError, "Location out of bounds" for ind,val in enumerate(axis_vals): if location < val: break return ind - 1 def get_wind_comps(spd, dir): u = -spd * N.sin(dir * rad_per_deg) v = -spd * N.cos(dir * rad_per_deg) return u, v def get_ob_incs(obx, oby, ob, grid_x, grid_y, field, cressman_radius = None): ob_inc = list() mask = N.zeros(ob.size) for x,y,value in izip(obx,oby,ob): try: interp_val = bilinear(grid_x, grid_y, field, x, y) ob_inc.append(value - interp_val) except ValueError: if cressman_radius is None: mask[len(ob_inc)] = 1 ob_inc.append(0.0) else: #Ugly hack here to allow the one station off the grid to be interpolated xg,yg = N.meshgrid(grid_x, grid_y) interp_val = analyze_grid(field.flatten(), N.array(x, ndmin = 2), N.array(y, ndmin = 2), xg.flatten(), yg.flatten(), cressman_weights, cressman_radius) if N.isnan(interp_val): interp_val = value # mask[len(ob_inc) - 1] = 1 ob_inc.append(value - interp_val.flatten()[0]) return N.ma.array(ob_inc, mask = mask) def calc_barnes_param(spacing): '''Calculate the Barnes analysis smoothing parameter, kappa0, from the average grid spacing''' return 5.052 * (2.0 * spacing / N.pi)**2
bsd-3-clause
Python
8a5f5fa11feefec2a81c3c1c2419b14e45a55bd0
Add dis01.py
devlights/try-python
trypython/stdlib/dis01.py
trypython/stdlib/dis01.py
# coding: utf-8 """ dis モジュールについてのサンプルです。 """ import dis from trypython.common.commoncls import SampleBase from trypython.common.commonfunc import hr # noinspection SpellCheckingInspection class Sample(SampleBase): def exec(self): ############################################## # dis モジュールは、pythonのバイトコードの # 解析をサポートしてくれるモジュール。 # # 大きく分けて2つの使い方がある # 1) dis.dis() # 2) dis.Bytecode() # # 1) は、指定された内容を逆アセンブルして出力してくれる。 # 引数の file に何も指定しない場合は標準出力に指定してくれる。 # # 2) は、python 3.4 で追加されたAPI。 # 指定の仕方は 1) とほぼ変わらないが、いきなり結果を # 出力ではなくて、一旦 Bytecode オブジェクトにラップして # 返してくれる。 # ############################################## listcomp_str = 'r = [x for x in range(1000000) if x % 2 == 0]' forloop_str = ''' r = [] for x in range(1000000): if x % 2 == 0: r.append(x) ''' ############################################### # dis.dis() ############################################### hr('dis.dis(listcomp_str)') dis.dis(listcomp_str) hr('dis.dis(forloop_str)') dis.dis(forloop_str) ############################################### # dis.Bytecode() # # python 3.4 から dis モジュールに追加されたAPI。 # 内部で code オブジェクトや dis.code_info() の # 結果を保持してくれたりするので、こちらの方が便利。 ############################################### hr('dis.Bytecode(listcomp_str)') listcomp_bytecode = dis.Bytecode(listcomp_str) print(listcomp_bytecode.codeobj) print(listcomp_bytecode.dis()) print(listcomp_bytecode.info()) hr('dis.Bytecode(forloop_str)') forloop_bytecode = dis.Bytecode(forloop_str) print(forloop_bytecode.codeobj) print(forloop_bytecode.dis()) print(forloop_bytecode.info()) def go(): obj = Sample() obj.exec() if __name__ == '__main__': go()
mit
Python
4835cac3b5ea15671f3da25cbc6e6db4bad725c9
Create crawl-twse.py
macchiang/practice-on-big-data
crawl/crawl-twse.py
crawl/crawl-twse.py
req=requests.get("http://www.twse.com.tw/ch/trading/fund/BFI82U/BFI82U.php?report1=day&input_date=105%2F05%2F31&mSubmit=%ACd%B8%DF&yr=2016&w_date=20160530&m_date=20160501") req.encoding='utf-8' html=req.text.encode('utf-8') soup=BeautifulSoup(html,"html.parser") for td in soup.findAll("td",{"class":"basic2"}): print td.text
mit
Python
bd597a8f34d6f95bc445550bcc239ff67d0321f4
Add missing file.
schinckel/django-boardinghouse,schinckel/django-boardinghouse,schinckel/django-boardinghouse
tests/tests/utils.py
tests/tests/utils.py
from django.db import connection from django.utils import six def get_table_list(): with connection.cursor() as cursor: table_list = connection.introspection.get_table_list(cursor) if table_list and not isinstance(table_list[0], six.string_types): table_list = [table.name for table in table_list] return table_list
bsd-3-clause
Python
66a23782438d9c16111c25c56090f4c92f54dde1
Add integration test for the trivial cycle simulation.
mbmcgarry/cyclus,hodger/cyclus,hodger/cyclus,mbmcgarry/cyclus,Baaaaam/cyclus,rwcarlsen/cyclus,gidden/cyclus,mbmcgarry/cyclus,Baaaaam/cyclus,hodger/cyclus,gidden/cyclus,hodger/cyclus,Baaaaam/cyclus,rwcarlsen/cyclus,rwcarlsen/cyclus,mbmcgarry/cyclus,rwcarlsen/cyclus,gidden/cyclus,gidden/cyclus,hodger/cyclus
integration_tests/test_trivial_cycle.py
integration_tests/test_trivial_cycle.py
#! /usr/bin/python from nose.tools import assert_equal, assert_true from numpy.testing import assert_array_equal import os import tables import numpy as np from tools import check_cmd """ Tests """ def test_source_to_sink(): """ Tests simulations with a facilty that has a conversion factor. In future, may eliminate checks if needed tables exist, and rely on errors. In future, may eliminate tests for the existance and uniqueness of the facilities. In addition, may eliminate other non-integration testing relevant code and tests. """ # Cyclus simulation input for source_to_sink sim_inputs = ["./Inputs/trivial_cycle_deplete.xml", "./Inputs/trivial_cycle_steady.xml", "./Inputs/trivial_cycle_growth.xml"] # Conversion factors for the three simulations k_factors = [0.5, 1, 2] sim_info = zip(sim_inputs, k_factors) for sim_input, k_factor in sim_info: holdsrtn = [1] # needed because nose does not send() to test generator cmd = ["cyclus", "-o", "./output_temp.h5", "--input-file", sim_input] check_cmd(cmd, '.', holdsrtn) rtn = holdsrtn[0] if rtn != 0: return # don't execute further commands output = tables.open_file("./output_temp.h5", mode = "r") # tables of interest paths = ["/AgentEntry", "/AgentExit", "/Resources", "/Transactions", "/Info"] # Check if these tables exist tables_there = True for path in paths: yield assert_true, output.__contains__(path) # Have to stop further operations after these tests if tables_there and not output.__contains__(path): tables_there = False if not tables_there: output.close() os.remove("./output_temp.h5") return # Get specific tables and columns agent_entry = output.get_node("/AgentEntry")[:] agent_exit = output.get_node("/AgentExit")[:] info = output.get_node("/Info")[:] resources = output.get_node("/Resources")[:] transactions = output.get_node("/Transactions")[:] # Find agent ids agent_ids = agent_entry["AgentId"] agent_impl = agent_entry["Implementation"] duration = info["Duration"][0] facility_id = [] i = 0 for impl in agent_impl: if impl == "KFacility": facility_id.append(i) i += 1 yield assert_equal, len(facility_id), 1 facility_id = agent_ids[facility_id[0]] sender_ids = transactions["SenderId"] receiver_ids = transactions["ReceiverId"] expected_sender_array = np.empty(sender_ids.size) expected_sender_array.fill(facility_id) expected_receiver_array = np.empty(receiver_ids.size) expected_receiver_array.fill(sink_id) yield assert_array_equal, sender_ids, expected_sender_array yield assert_array_equal, receiver_ids, expected_receiver_array # Transaction ids must be equal range from 1 to the number of rows expected_trans_ids = np.arange(0, sender_ids.size, 1) yield assert_array_equal, transactions["TransactionId"], expected_trans_ids # Track transacted resources resource_ids = resources["ResourceId"] quantities = resources["Quantity"] expected_quantities = np.empty(resource_ids.size) # Expect that every transaction quantity is the same amount initial_inv = quantities[0] for i in range(expected_quantities.size): expected_quantities[i] = initial_inv * k_factor ** i # Should find a logic for almost equal cases yield assert_array_equal, quantities, expected_quantities output.close() os.remove("./output_temp.h5")
bsd-3-clause
Python
1ba7850e57113e6b1ca1be5064cef5277a15598b
Add script: /Scripts/Others/test.py
Vladimir-Ivanov-Git/raw-packet,Vladimir-Ivanov-Git/raw-packet
Scripts/Others/test.py
Scripts/Others/test.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # region Description """ test.py: Test new technique Author: Vladimir Ivanov License: MIT Copyright 2019, Raw-packet Project """ # endregion # region Import # region Add project root path from sys import path from os.path import dirname, abspath path.append(dirname(dirname(dirname(abspath(__file__))))) # endregion # region Raw-packet modules from raw_packet.Utils.base import Base from raw_packet.Utils.network import MDNS_raw, ICMPv6_raw from raw_packet.Utils.network import Sniff_raw # endregion # region Import libraries from time import sleep from json import dumps from socket import socket, AF_PACKET, SOCK_RAW from os.path import dirname, abspath project_root_path = dirname(dirname(dirname(abspath(__file__)))) # endregion # endregion # region Authorship information __author__ = 'Vladimir Ivanov' __copyright__ = 'Copyright 2019, Raw-packet Project' __credits__ = [''] __license__ = 'MIT' __version__ = '0.2.1' __maintainer__ = 'Vladimir Ivanov' __email__ = '[email protected]' __status__ = 'Development' # endregion # region Check user, platform and print banner Base = Base() Base.check_user() Base.check_platform() Base.print_banner() # endregion # region Main function if __name__ == "__main__": try: print('\n') Base.print_info('Network functions:') print('\nMac address:') print(Base.get_interface_mac_address('eth0', False)) print('\nIPv4 address:') print(Base.get_interface_ip_address('eth0', False)) print('\nIPv6 link local address:') print(Base.get_interface_ipv6_link_address('eth0', False)) print('\nIPv6 link local address by mac address:') print(Base.make_ipv6_link_address(Base.get_interface_mac_address('eth0', False))) print('\nIPv6 link global address:') print(Base.get_interface_ipv6_glob_address('eth0', False)) print('\nIPv6 global addresses:') print(Base.get_interface_ipv6_glob_addresses('eth0')) print('\nNetwork mask:') print(Base.get_interface_netmask('eth0', False)) print('\nFirst IPv4:') print(Base.get_first_ip_on_interface('eth0', False)) print('\nSecond IPv4:') print(Base.get_second_ip_on_interface('eth0', False)) print('\nPenultimate IPv4:') print(Base.get_penultimate_ip_on_interface('eth0', False)) print('\nLast IPv4:') print(Base.get_last_ip_on_interface('eth0', False)) print('\nRandom IPv4:') print(Base.get_random_ip_on_interface('eth0', False)) print('\nIPv4 network:') print(Base.get_interface_network('eth0', False)) print('\nIPv4 broadcast:') print(Base.get_interface_broadcast('eth0', False)) print('\nIPv4 gateway:') print(Base.get_interface_ipv4_gateway('eth0', False)) print('\nIPv6 gateway:') print(Base.get_interface_ipv6_gateway('eth0', False)) print('\n') Base.print_info('Software functions:') print('\nApt list installed software:') print(Base.apt_list_installed_packages()) print('\nCheck installed software: apache2') print(Base.check_installed_software('apache2', False)) print('\n') Base.print_info('Process functions:') print('\nProcess apache2 pid:') print(Base.get_process_pid('apache2')) print('\nProcess pid by listen port 80:') print(Base.get_process_pid_by_listen_port(80)) except KeyboardInterrupt: Base.print_info("Exit") exit(0) # endregion
mit
Python
be904e21db2012ac8f72a141afd9b93da2bfb262
Create http responses
bameda/monarch.old,bameda/monarch.old,bameda/monarch.old,bameda/monarch.old
monarch/base/http/responses.py
monarch/base/http/responses.py
# Copyright (C) 2015 David Barragán <[email protected]> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from django.http.response import HttpResponseBase from django.http import HttpResponse from django.http import HttpResponseRedirect from django.http import HttpResponsePermanentRedirect from . import status class HttpResponse(HttpResponse): def __init__(self, content="", *args, **kwarg): self.content_data = content super(HttpResponse, self).__init__(content, *args, **kwarg) @property def content_data(self): return self.__content_data @content_data.setter def content_data(self, value): self.__content_data = value class Ok(HttpResponse): status_code = status.HTTP_200_OK class Created(HttpResponse): status_code = status.HTTP_201_CREATED class Accepted(HttpResponse): status_code = status.HTTP_202_ACCEPTED class NoContent(HttpResponse): status_code = status.HTTP_204_NO_CONTENT class MultipleChoices(HttpResponse): status_code = status.HTTP_300_MULTIPLE_CHOICES class MovedPermanently(HttpResponsePermanentRedirect): status_code = status.HTTP_301_MOVED_PERMANENTLY class Redirect(HttpResponseRedirect): status_code = status.HTTP_302_FOUND class SeeOther(HttpResponse): status_code = status.HTTP_303_SEE_OTHER class NotModified(HttpResponse): status_code = status.HTTP_304_NOT_MODIFIED class TemporaryRedirect(HttpResponse): status_code = status.HTTP_307_TEMPORARY_REDIRECT class BadRequest(HttpResponse): status_code = status.HTTP_400_BAD_REQUEST class Unauthorized(HttpResponse): status_code = status.HTTP_401_UNAUTHORIZED class Forbidden(HttpResponse): status_code = status.HTTP_403_FORBIDDEN class NotFound(HttpResponse): status_code = status.HTTP_404_NOT_FOUND class MethodNotAllowed(HttpResponse): status_code = status.HTTP_405_METHOD_NOT_ALLOWED class NotAcceptable(HttpResponse): status_code = status.HTTP_406_NOT_ACCEPTABLE class Conflict(HttpResponse): status_code = status.HTTP_409_CONFLICT class Gone(HttpResponse): status_code = status.HTTP_410_GONE class PreconditionFailed(HttpResponse): status_code = status.HTTP_412_PRECONDITION_FAILED class UnsupportedMediaType(HttpResponse): status_code = status.HTTP_415_UNSUPPORTED_MEDIA_TYPE class TooManyRequests(HttpResponse): status_code = status.HTTP_429_TOO_MANY_REQUESTS class InternalServerError(HttpResponse): status_code = status.HTTP_500_INTERNAL_SERVER_ERROR class NotImplemented(HttpResponse): status_code = status.HTTP_501_NOT_IMPLEMENTED
agpl-3.0
Python
72fcb82d33c4a4317630b6f2c7985e69ff9d3ce3
add some simple tests for lru_cache
arnehilmann/yum-repos,arnehilmann/yumrepos,arnehilmann/yum-repos,arnehilmann/yumrepos
src/unittest/python/backport_tests.py
src/unittest/python/backport_tests.py
#!/usr/bin/env python from __future__ import print_function import sys import unittest from backports import functools_lru_cache class Test(unittest.TestCase): def test_with_bound_cache(self): @functools_lru_cache.lru_cache() def cachy(*args): return True self.assertTrue(cachy("foo")) self.assertTrue(cachy("bar")) self.assertTrue(cachy("foo")) print(cachy.cache_info()) cachy.cache_clear() def test_without_cache(self): @functools_lru_cache.lru_cache(maxsize=None) def cachy(*args): return True self.assertTrue(cachy("foo")) self.assertTrue(cachy("bar")) self.assertTrue(cachy("foo")) print(cachy.cache_info()) cachy.cache_clear() def test_with_boundless_cache(self): @functools_lru_cache.lru_cache(maxsize=0) def cachy(*args): return True self.assertTrue(cachy("foo")) self.assertTrue(cachy("bar")) self.assertTrue(cachy("foo")) print(cachy.cache_info()) cachy.cache_clear() if __name__ == "__main__": sys.exit(unittest.main())
apache-2.0
Python
75a2c6fb7074e316908d12cfd6f1e03d9e0a1ba6
add new tool to generate new pipeline easily (outside of sequana repository)
sequana/sequana,sequana/sequana,sequana/sequana,sequana/sequana,sequana/sequana
sequana/scripts/start_pipeline.py
sequana/scripts/start_pipeline.py
# -*- coding: utf-8 -*- # # This file is part of Sequana software # # Copyright (c) 2016 - Sequana Development Team # # File author(s): # Thomas Cokelaer <[email protected]> # Dimitri Desvillechabrol <[email protected]>, # <[email protected]> # # Distributed under the terms of the 3-clause BSD license. # The full license is in the LICENSE file, distributed with this software. # # website: https://github.com/sequana/sequana # documentation: http://sequana.readthedocs.io # ############################################################################## from snakemake import shell as shellcmd import shutil import glob import sys from optparse import OptionParser import argparse class Options(argparse.ArgumentParser): def __init__(self, prog="sequana_mapping"): usage = """Welcome to SEQUANA - create a new pipeline from scratch sequana_start_pipeline """ description = """DESCRIPTION: """ super(Options, self).__init__(usage=usage, prog=prog, description=description) #self.add_argument("--use-sambamba", dest="sambamba", action="store_true", # default=False, # help="""use sambamba instead of samtools for the sorting """) def main(args=None): if args is None: args = sys.argv[:] user_options = Options(prog="sequana") # If --help or no options provided, show the help if "--help" in args: user_options.parse_args(["prog", "--help"]) else: options = user_options.parse_args(args[1:]) cmd = "cookiecutter https://github.com/sequana/sequana_pipeline_template" import subprocess subprocess.call(cmd.split())
bsd-3-clause
Python
48c139172e2eab43919ac9589ee58e3ff2009887
Work in progress
AnalogJ/lexicon,AnalogJ/lexicon
lexicon/providers/azure.py
lexicon/providers/azure.py
import json import requests from lexicon.providers.base import Provider as BaseProvider MANAGEMENT_URL = 'https://management.azure.com' API_VERSION = '2018-03-01-preview' NAMESERVER_DOMAINS = ['azure.com'] def provider_parser(subparser): subparser.add_argument('--auth-credentials') class Provider(BaseProvider): def __init__(self, config): super(Provider, self).__init__(config) self.domain_id = None self._access_token = None self._subscription_id = None if self._get_provider_option('auth_credentials').startswith('file::'): with open(self._get_provider_option('auth_credentials') .replace('file::', '')) as file_h: data = file_h.read() self._credentials = json.loads(data) def _authenticate(self): ad_endpoint = self._credentials['activeDirectoryEndpointUrl'] tenant_id = self._credentials['tenantId'] client_id = self._credentials['clientId'] client_secret = self._credentials['clientSecret'] self._subscription_id = self._credentials['subscriptionId'] assert ad_endpoint assert tenant_id assert client_id assert client_secret assert self._subscription_id url = '{0}/{1}/oauth2/token'.format(ad_endpoint, tenant_id) data = { 'grant_type': 'client_credentials', 'client_id': client_id, 'client_secret': client_secret, 'resource': MANAGEMENT_URL } result = requests.post(url, data=data) result.raise_for_status() self._access_token = result.json()['access_token'] url = ('{0}/subscriptions/{1}/providers/Microsoft.Network/dnszones' .format(MANAGEMENT_URL, self._subscription_id)) headers = {'Authorization': 'Bearer {0}'.format(self._access_token)} params = {'api-version': API_VERSION} result = requests.get(url, headers=headers, params=params) result.raise_for_status() print(result.json()) def _request(self, action='GET', url='/', data=None, query_params=None): url = '{0}/subscriptions/{1}'
mit
Python
4485e7dd4b6d5a6199d99cdc9a852ff551fc384b
bump version number
neocogent/electrum,protonn/Electrum-Cash,kyuupichan/electrum,fyookball/electrum,neocogent/electrum,pooler/electrum-ltc,aasiutin/electrum,fireduck64/electrum,cryptapus/electrum,FairCoinTeam/electrum-fair,fujicoin/electrum-fjc,aasiutin/electrum,FairCoinTeam/electrum-fair,fujicoin/electrum-fjc,fyookball/electrum,imrehg/electrum,fireduck64/electrum,wakiyamap/electrum-mona,fireduck64/electrum,pknight007/electrum-vtc,wakiyamap/electrum-mona,vialectrum/vialectrum,spesmilo/electrum,lbryio/lbryum,argentumproject/electrum-arg,cryptapus/electrum-myr,romanz/electrum,cryptapus/electrum-myr,protonn/Electrum-Cash,cryptapus/electrum-uno,digitalbitbox/electrum,spesmilo/electrum,lbryio/lbryum,procrasti/electrum,vertcoin/electrum-vtc,dashpay/electrum-dash,cryptapus/electrum-myr,cryptapus/electrum-uno,vertcoin/electrum-vtc,pknight007/electrum-vtc,dabura667/electrum,procrasti/electrum,vertcoin/electrum-vtc,dashpay/electrum-dash,cryptapus/electrum-uno,dabura667/electrum,procrasti/electrum,dabura667/electrum,argentumproject/electrum-arg,fireduck64/electrum,imrehg/electrum,fyookball/electrum,cryptapus/electrum,kyuupichan/electrum,vertcoin/electrum-vtc,aasiutin/electrum,vialectrum/vialectrum,vialectrum/vialectrum,spesmilo/electrum,asfin/electrum,digitalbitbox/electrum,spesmilo/electrum,molecular/electrum,romanz/electrum,asfin/electrum,wakiyamap/electrum-mona,FairCoinTeam/electrum-fair,cryptapus/electrum-myr,dashpay/electrum-dash,cryptapus/electrum-uno,argentumproject/electrum-arg,digitalbitbox/electrum,dabura667/electrum,molecular/electrum,wakiyamap/electrum-mona,protonn/Electrum-Cash,imrehg/electrum,neocogent/electrum,cryptapus/electrum,asfin/electrum,pknight007/electrum-vtc,procrasti/electrum,romanz/electrum,pooler/electrum-ltc,digitalbitbox/electrum,imrehg/electrum,kyuupichan/electrum,dashpay/electrum-dash,molecular/electrum,fujicoin/electrum-fjc,pknight007/electrum-vtc,pooler/electrum-ltc,protonn/Electrum-Cash,FairCoinTeam/electrum-fair,pooler/electrum-ltc,argentumproject/electrum-arg,molecular/electrum,aasiutin/electrum
client/version.py
client/version.py
ELECTRUM_VERSION = "0.38" SEED_VERSION = 4 # bump this everytime the seed generation is modified
ELECTRUM_VERSION = "0.37" SEED_VERSION = 4 # bump this everytime the seed generation is modified
mit
Python
229d7e0385f3809267a2d930f93c7c8e17515a25
initialize final model - validation
jvpoulos/drnns-prediction
code/final_val.py
code/final_val.py
# # Try different hyperparameters and network structure on validation set from sklearn.model_selection import TimeSeriesSplit, train_test_split from keras.utils.visualize_util import plot from keras.models import Sequential from keras.layers import GRU, Dense, Masking, Dropout, Activation, advanced_activations from keras.callbacks import EarlyStopping import numpy as np from itertools import product import cPickle as pkl from scipy.sparse import csr_matrix from utils import set_trace, plot_ROC from sklearn.metrics import roc_curve, auc, roc_auc_score import tensorflow as tf tf.python.control_flow_ops = tf # Load saved data print('Load saved data') X_train = pkl.load(open('data/X_train.np', 'rb')) y_train = pkl.load(open('data/y_train.np', 'rb')) X_train = X_train[1:X_train.shape[0]] # drop first sample so batch size is divisible y_train = y_train[1:y_train.shape[0]] # Label shift lahead = 0 # number of days ahead that are used to make the prediction if lahead!=0: y_train = np.roll(y_train,-lahead,axis=0) else: pass # Define network structure epochs = 3 nb_timesteps = 14 nb_classes = 2 nb_features = X_train.shape[1] output_dim = 1 # Define cross-validated model parameters validation_split=0.2 batch_size = 14 dropout = 0.25 activation = 'sigmoid' nb_hidden = 128 initialization = 'glorot_normal' # # Reshape X to three dimensions # # Should have shape (batch_size, nb_timesteps, nb_features) X_train = csr_matrix.toarray(X_train) # convert from sparse matrix to N dimensional array X_train = np.resize(X_train, (X_train.shape[0], nb_timesteps, X_train.shape[1])) print('X_train shape:', X_train.shape) # Reshape y to two dimensions # Should have shape (batch_size, output_dim) y_train = np.resize(y_train, (X_train.shape[0], output_dim)) print('y_train shape:', y_train.shape) # Initiate sequential model print('Initializing model') model = Sequential() # Stack layers # expected input batch shape: (batch_size, nb_timesteps, nb_features) # note that we have to provide the full batch_input_shape since the network is stateful. # the sample of index i in batch k is the follow-up for the sample i in batch k-1. model.add(Masking(mask_value=0., batch_input_shape=(batch_size, nb_timesteps, nb_features))) # embedding for variable input lengths model.add(GRU(nb_hidden, return_sequences=True, stateful=True, init=initialization, batch_input_shape=(batch_size, nb_timesteps, nb_features))) model.add(Dropout(dropout)) model.add(GRU(nb_hidden, return_sequences=True, stateful=True, init=initialization)) model.add(Dropout(dropout)) model.add(GRU(nb_hidden, return_sequences=True, stateful=True, init=initialization)) model.add(Dropout(dropout)) model.add(GRU(nb_hidden, return_sequences=True, stateful=True, init=initialization)) model.add(Dropout(dropout)) model.add(GRU(nb_hidden, return_sequences=True, stateful=True, init=initialization)) model.add(Dropout(dropout)) model.add(GRU(nb_hidden, return_sequences=True, stateful=True, init=initialization)) model.add(Dropout(dropout)) model.add(GRU(nb_hidden, return_sequences=True, stateful=True, init=initialization)) model.add(Dropout(dropout)) model.add(GRU(nb_hidden, return_sequences=True, stateful=True, init=initialization)) model.add(Dropout(dropout)) model.add(GRU(nb_hidden, stateful=True, init=initialization)) model.add(Dropout(dropout)) model.add(Dense(output_dim, activation=activation)) # Configure learning process model.compile(optimizer='rmsprop', loss='binary_crossentropy', metrics=['binary_accuracy']) plot(model, to_file='results/baseline_model.png', # Plot graph of model show_shapes = True, show_layer_names = False) # Training /validation early_stopping = EarlyStopping(monitor='loss', patience=1) print('Training/validation') for i in range(epochs): print('Epoch', i+1, '/', epochs) model.fit(X_train, y_train, batch_size=batch_size, verbose=1, nb_epoch=1, shuffle=False, # turn off shuffle to ensure training data patterns remain sequential callbacks=[early_stopping], # stop early if training loss not improving after 1 epoch validation_split=validation_split) # use last 20% of data for validation set model.reset_states()
mit
Python
6486487dc1fc4972dcd18bc0e92bcae602f4d900
Create blacklist.py
Drinka/Aya
cogs/blacklist.py
cogs/blacklist.py
mit
Python
b02e308dfc2993123486a5660b6d14c98f19b389
Create Hamel_ZipCode_API.py
hamelsmu/ZipCodeWeb_Scraping
Hamel_ZipCode_API.py
Hamel_ZipCode_API.py
def back_out_unicode(stringval): return str(stringval.encode('utf-8').decode('ascii', 'ignore')) def zip_info(zipcode): """ Takes a zip code and goes to www.uszip.com/zip/*zipcode and screen scrapes relevant information down. *zipcode is the 5-digit zipcode parameter input value zipcode must be a string value returns a list of tuples, which are (key, value) pairs Written by Hamel Husain [email protected] """ #Type Safety if type(zipcode) <> str or len(zipcode) > 5: raise Exception('zipcode passed to this function must be a 5-digit string') from bs4 import BeautifulSoup import urllib data = [('zipcode', str(zipcode))] #Initializes zipcode list webaddress = 'http://www.uszip.com/zip/'+str(zipcode) #build web address try: html_collector = urllib.urlopen(webaddress).read() #read contents of HTML into variable except: print str(zipcode) #+ ' was an invalid zipcode, please try again - must be a 5 digit string value' raise soup = BeautifulSoup(html_collector) #make a Beautiful Soup object from HTML string so that we can parse raw_html = soup.prettify() #this is so you can inspect html, will dump this into a file called sample_html.txt with open('sample_html.txt', 'w') as html: #so you can dump a copy of the HTML somewhere html.write(back_out_unicode(raw_html)) ############## #Checks to see if zipcode returned by website is the one you input!## ############# zipcode_returned = back_out_unicode(soup.find('strong').text.strip()) if zipcode <> zipcode_returned: print '%s was not found as a zipcode! Will Skip This' % (zipcode) zip_valid = False else: zip_valid = True city = back_out_unicode(soup.find('title').text.strip().replace(' zip code', '')) ##Mark Zip Code as Retrieved Or Not## data.append(('Zip Found', zip_valid)) if zip_valid: data.append(('City', city)) #return an iterable that has all of the results for 'dt', or the fieldnames search_results_titles = soup.findAll('dt') #for this websites, titles are tagged 'dt', numbers are tagged 'dd' for label in search_results_titles: current_name = label.name #tag name current_string = back_out_unicode(label.text.strip()) #tag text next_name = label.find_next_sibling().name #next tag's name next_string = back_out_unicode(label.find_next_sibling().text.strip()) #next tag's text #Want a 'dt' tag to be followed by a 'dd' tag, otherwise don't need it to be part of the result if (current_name <> next_name) and current_name == 'dt' and next_name == 'dd' and zip_valid: data.append((current_string, next_string)) return data if __name__ == '__main__': print 'you have run the main file!' hamel = zip_info('75019')
unlicense
Python
9ec957af0c3d57dff4c05c1b7ed3e66e1c033f6b
Add nagios check for idot snowplow ingest
akrherz/iem,akrherz/iem,akrherz/iem,akrherz/iem,akrherz/iem
nagios/check_idot_snowplows.py
nagios/check_idot_snowplows.py
""" Nagios check to see how much snowplow data we are currently ingesting """ import sys import os import psycopg2 POSTGIS = psycopg2.connect(database='postgis', host='iemdb', user='nobody') pcursor = POSTGIS.cursor() pcursor.execute(""" select count(*) from idot_snowplow_current WHERE valid > now() - '30 minutes'::interval """) row = pcursor.fetchone() count = row[0] if count > 2: print 'OK - snowplows %s |count=%s;2;1;0' % (count, count) sys.exit(0) elif count > 1: print 'OK - snowplows %s |count=%s;2;1;0' % (count, count) sys.exit(1) else: print 'CRITICAL - snowplows %s |count=%s;2;1;0' % (count, count) sys.exit(2)
mit
Python
661e69ece73a609d230384874da9722de385d854
Change links to a dictionary, iterator instead of lambda
g3wanghc/uoft-scrapers,cobalt-uoft/uoft-scrapers,arkon/uoft-scrapers,kshvmdn/uoft-scrapers
uoftscrapers/scrapers/libraries/__init__.py
uoftscrapers/scrapers/libraries/__init__.py
from ..utils import Scraper from bs4 import BeautifulSoup, NavigableString from datetime import datetime, date from collections import OrderedDict import urllib.parse as urlparse from urllib.parse import urlencode import re class Libraries: """A scraper for the Libraries at the University of Toronto.""" host = 'https://onesearch.library.utoronto.ca/visit' campuses_tags = { 'St. George': 'UTSG', 'U of T Mississauga': 'UTM', 'U of T Scarborough': 'UTSC' } @staticmethod def scrape(location='.'): Scraper.logger.info('Libraries initialized.') Scraper.ensure_location(location) return Libraries.get_library_link() # ['content_link'] -> 'Teaser Text' # ['info_link'] -> 'Everything Else' library_data_links = Libraries.get_library_link() raise NotImplementedError('This scraper has not been implemented yet.') Scraper.logger.info('Libraries completed.') @staticmethod def get_library_link(): html = Scraper.get(Libraries.host) soup = BeautifulSoup(html, 'html.parser') list_obj_arr = soup.select('.view-list-of-libraries')[1].select( '.view-content')[0].select('.views-row') library_links = dict() for l in list_obj_arr: title = l.h2.text library_links[title] = { 'content_link': l.select('a')[0]['href'], 'info_link': l.select('a')[1]['href'] } return library_links
from ..utils import Scraper from bs4 import BeautifulSoup, NavigableString from datetime import datetime, date from collections import OrderedDict import urllib.parse as urlparse from urllib.parse import urlencode import re class Libraries: """A scraper for the Libraries at the University of Toronto.""" host = 'https://onesearch.library.utoronto.ca/visit' campuses_tags = {'St. George': 'UTSG', 'U of T Mississauga': 'UTM', 'U of T Scarborough': 'UTSC'} @staticmethod def scrape(location='.'): Scraper.logger.info('Libraries initialized.') Scraper.ensure_location(location) # ['content'] -> 'Teaser text', ['data] library_data_links = Libraries.get_library_link() raise NotImplementedError('This scraper has not been implemented yet.') Scraper.logger.info('Libraries completed.') @staticmethod def get_library_link(): html = Scraper.get(Libraries.host) soup = BeautifulSoup(html, 'html.parser') content_links = [] library_info_links = [] list_obj_arr = soup.select('.view-list-of-libraries')[1].select( '.view-content')[0].select('.views-row') content_links[:] = [l.select('a')[0]['href'] for l in list_obj_arr] library_info_links = [l.select('a')[1]['href'] for l in list_obj_arr] return {'content' : content_links , 'info': library_info_links}
mit
Python
717b20e298547685ed0685bd09a4fac541034910
Add an example map flow
ionrock/taskin
example/map_flows.py
example/map_flows.py
from taskin import task def get_servers(data): return [ 'foo.example.com', 'bar.example.com', ] def create_something(data): servers, name = data for server in servers: print('Creating: https://%s/%s' % (server, name)) def main(): flow = [ get_servers, task.MapTask(create_something, args=xrange(10)) ] task.do_flow(flow) if __name__ == '__main__': main()
bsd-3-clause
Python
cd2df0032a3978444d6bd15e3b49a20bef495b75
add blastp
shl198/Pipeline,shl198/Projects,shl198/Projects,shl198/Pipeline,shl198/Pipeline,shl198/Projects,shl198/Pipeline,shl198/Projects
Modules/f10_blast.py
Modules/f10_blast.py
import subprocess,os def makeblastdb(fastaFile,datatype,outputname): """ this function build database given a fasta file * fastaFile: can be gzipped or not """ if fastaFile.endswith('.gz'): cmd = ('gunzip -c {input} | makeblastdb -in - -dbtype {type} -title {title} ' '-out {outputname}').format(input=fastaFile, type=datatype,outputname=outputname,title=outputname) else: cmd = ('makeblastdb -in {input} -dbtype {type} -title {title} ' '-out {outputname}').format(input=fastaFile, type=datatype,outputname=outputname,title=outputname) subprocess.call(cmd,shell=True) def blastp(query,database,outputFile,threads,evalue,fmt,mapnum): """ This function run blastp * query: fasta file which you want to map * database: database path/name * outputFile: tabular blast result """ if query.endswith('.gz'): cmd = ('gunzip -c {input} | blastp -query - -db {database} ' '-out {outputFile} -evalue {evalue} -outfmt {format} ' '-seg yes -num_threads {thread} -num_alignments {mapnum}').format(input=query, database=database,outputFile=outputFile,evalue=evalue, format=str(fmt),thread=str(threads),mapnum=mapnum) else: cmd = ('blastp -query {input} -db {database} -out {outputFile} ' '-evalue {evalue} -outfmt {format} -seg yes ' '-num_threads {thread} -num_alignments {mapnum}').format(input=query, database=database,outputFile=outputFile,evalue=evalue, format=str(fmt),thread=str(threads),mapnum=mapnum) subprocess.call(cmd,shell=True)
mit
Python
7dac3075874a79d51d1b9d0c1551eec9a988f526
Create Roman_to_Integer.py
UmassJin/Leetcode
Array/Roman_to_Integer.py
Array/Roman_to_Integer.py
Given a roman numeral, convert it to an integer. Input is guaranteed to be within the range from 1 to 3999. class Solution: # @return an integer def romanToInt(self, s): numerals = { "M": 1000, "D": 500, "C": 100, "L": 50, "X": 10, "V": 5, "I": 1 } result = 0 pre = s[0] for char in s: if numerals[char] <= numerals[pre]: result += numerals[char] else: result += numerals[char] - 2*numerals[pre] pre = char return result
mit
Python
c810882385e034ca0e888ce093b227198dbb5f76
Create GPIOTutorialtempLogger.py
zivraf/ThermoMaster
GPIOTutorialtempLogger.py
GPIOTutorialtempLogger.py
import RPi.GPIO as GPIO import time as time GPIO.setmode (GPIO.BCM) GPIO.setup (22, GPIO.IN ) GPIO.setup (17,GPIO.OUT ) while True: if GPIO.input(22): break print "start" datafile = open ("tempreading.log","w") while True: GPIO.output (17, GPIO.HIGH) tfile = open ("/sys/bus/w1/devices/28-000005658920/w1_slave") text = tfile.read() tfile.close() secondline = text.split ("\n")[1] tempData = secondline.split(" ")[9] temprature = float (tempData[2:]) temprature = temprature / 1000 print temprature datafile.write(str(temprature)+ "\n") time.sleep (1) GPIO.output (17, GPIO.LOW) time. sleep (1) if GPIO.input (22)==1: break datafile.close() GPIO.output (17, GPIO.LOW)
apache-2.0
Python
7b54ac1d1bf8cf6e9869e716940814d2d56cb1de
Create Watchers.py
possoumous/Watchers,possoumous/Watchers,possoumous/Watchers,possoumous/Watchers
examples/Watchers.py
examples/Watchers.py
los = [] url = 'https://stocktwits.com/symbol/' workbook = openpyxl.load_workbook('Spreadsheet.xlsx') worksheet = workbook.get_sheet_by_name(name = 'Sheet1') for col in worksheet['A']: los.append(col.value) los2 = [] print(los) for i in los: stocksite = url +i + '?q=' +i print(stocksite) with contextlib.closing(webdriver.PhantomJS(Phantom_Path)) as driver: #with contextlib.closing(webdriver.Phantom_Path)) as driver: driver.get(stocksite) driver.find_element_by_id('sentiment-tab').click() Bullish = driver.find_elements_by_css_selector('span.bullish:nth-child(1)') Sentiment = [x.text for x in Bullish] los2.append(Sentiment[0])
mit
Python
66fa9698b40fa8365d91aef1ed16b620494052f0
Add CUDA + MPI example
gdementen/numba,stonebig/numba,gdementen/numba,stonebig/numba,jriehl/numba,GaZ3ll3/numba,pitrou/numba,seibert/numba,stefanseefeld/numba,stefanseefeld/numba,numba/numba,ssarangi/numba,seibert/numba,cpcloud/numba,stuartarchibald/numba,stefanseefeld/numba,gmarkall/numba,jriehl/numba,gmarkall/numba,sklam/numba,stonebig/numba,seibert/numba,stuartarchibald/numba,gmarkall/numba,cpcloud/numba,gdementen/numba,numba/numba,sklam/numba,IntelLabs/numba,gdementen/numba,ssarangi/numba,cpcloud/numba,sklam/numba,stuartarchibald/numba,pitrou/numba,IntelLabs/numba,pombredanne/numba,pombredanne/numba,pitrou/numba,pitrou/numba,stonebig/numba,numba/numba,ssarangi/numba,pitrou/numba,sklam/numba,pombredanne/numba,numba/numba,IntelLabs/numba,pombredanne/numba,stuartarchibald/numba,stonebig/numba,seibert/numba,GaZ3ll3/numba,GaZ3ll3/numba,gmarkall/numba,ssarangi/numba,gmarkall/numba,jriehl/numba,stuartarchibald/numba,IntelLabs/numba,IntelLabs/numba,GaZ3ll3/numba,stefanseefeld/numba,jriehl/numba,numba/numba,sklam/numba,cpcloud/numba,cpcloud/numba,pombredanne/numba,GaZ3ll3/numba,seibert/numba,stefanseefeld/numba,ssarangi/numba,gdementen/numba,jriehl/numba
examples/cuda_mpi.py
examples/cuda_mpi.py
# Demonstration of using MPI and Numba CUDA to perform parallel computation # using GPUs in multiple nodes. This example requires MPI4py to be installed. # # The root process creates an input data array that is scattered to all nodes. # Each node calls a CUDA jitted function on its portion of the input data. # Output data is then gathered back to the master node. # # Notes/limitations: # # 1. It is generally more efficient to avoid initialising all data on the root # node then scattering it out to all other nodes, and instead each node # should initialise its own data, but initialisation is done on the root node # here to keep the example simple. # 2. If multiple GPUs are available to a single MPI process, additional code may # need adding to ensure the correct GPU is used by each process - this will # depend on the exact configuration of the MPI cluster. # # This example can be invoked with: # # $ mpirun -np <np> python cuda_mpi.py # # where np is the number of processes (e.g. 4). For demonstrating the code, this # does work with a single node and a single GPU, since multiple processes can # share a single GPU. However, in a production setting, it may be more # appropriate to provide one GPU per MPI process. from __future__ import print_function from mpi4py import MPI from numba import cuda import numpy as np mpi_comm = MPI.COMM_WORLD # Input data size total_n = 10 # Process 0 creates input data if mpi_comm.rank == 0: input_data = np.arange(total_n, dtype=np.int32) print("Input:", input_data) else: input_data = None # Compute partitioning of the input array proc_n = [ total_n // mpi_comm.size + (total_n % mpi_comm.size > n) for n in range(mpi_comm.size) ] pos = 0 pos_n = [] for n in range(mpi_comm.size): pos_n.append(pos) pos += proc_n[n] my_n = proc_n[mpi_comm.rank] my_offset = pos_n[mpi_comm.rank] print('Process %d, my_n = %d' % (mpi_comm.rank, my_n)) print('Process %d, my_offset = %d' % (mpi_comm.rank, my_offset)) # Distribute input data across processes my_input_data = np.zeros(my_n, dtype=np.int32) mpi_comm.Scatterv([input_data, proc_n, pos_n, MPI.INT], my_input_data) print('Process %d, my_input_data = %s' % (mpi_comm.rank, my_input_data)) # Perform computation on local data @cuda.jit def sqplus2(input_data, output_data): for i in range(len(input_data)): d = input_data[i] output_data[i] = d * d + 2 my_output_data = np.empty_like(my_input_data) sqplus2(my_input_data, my_output_data) print('Process %d, my_output_data = %s' % (mpi_comm.rank, my_output_data)) # Bring result back to root process if mpi_comm.rank == 0: output_data = np.empty_like(input_data) else: output_data = None mpi_comm.Gatherv(my_output_data, [output_data, proc_n, pos_n, MPI.INT]) if mpi_comm.rank == 0: print("Output:", output_data) MPI.Finalize()
bsd-2-clause
Python
b44977653e57077118cb0eb0d549758f52beed35
Add basic example
ProjectPyRhO/PyRhO,ProjectPyRhO/PyRhO
examples/examples.py
examples/examples.py
from pyrho import * RhO = models['6']() Prot = protocols['step']() Prot.phis = [1e16, 1e15, 1e14] Sim = simulators['Python'](Prot, RhO) Sim.run() Sim.plot()
bsd-3-clause
Python
2d5366f455612373ca87ef4d2c8f890b1e6b255f
Add a compliance tool to export a subset of messages.
zulip/zulip,zulip/zulip,zulip/zulip,zulip/zulip,zulip/zulip,zulip/zulip,zulip/zulip
zerver/management/commands/export_search.py
zerver/management/commands/export_search.py
import os from argparse import ArgumentParser from datetime import datetime from email.headerregistry import Address from functools import lru_cache, reduce from operator import or_ from typing import Any from django.core.management.base import CommandError from django.db.models import Q from django.forms.models import model_to_dict from zerver.lib.export import floatify_datetime_fields, write_table_data from zerver.lib.management import ZulipBaseCommand from zerver.models import Message, Recipient, Stream, UserProfile ignore_keys = [ "realm", "rendered_content_version", "sending_client", "search_tsvector", ] class Command(ZulipBaseCommand): help = """Exports the messages matching certain search terms. This is most often used for legal compliance. """ def add_arguments(self, parser: ArgumentParser) -> None: self.add_realm_args(parser, required=True) parser.add_argument( "--output", metavar="<path>", help="File to output JSON results to; it must not exist, unless --force is given", required=True, ) parser.add_argument( "--force", action="store_true", help="Overwrite the output file if it exists already" ) parser.add_argument( "--file", metavar="<path>", help="Read search terms from the named file, one per line", ) parser.add_argument( "search_terms", nargs="*", metavar="<search term>", help="Terms to search for in message body or topic", ) def handle(self, *args: Any, **options: Any) -> None: terms = set() if options["file"]: with open(options["file"], "r") as f: terms.update(f.read().splitlines()) terms.update(options["search_terms"]) if not terms: raise CommandError("One or more search terms are required!") if os.path.exists(options["output"]) and not options["force"]: raise CommandError( f"Output path '{options['output']}' already exists; use --force to overwrite" ) realm = self.get_realm(options) limits = reduce( or_, [Q(content__icontains=term) | Q(subject__icontains=term) for term in terms], Q(), ) messages_query = Message.objects.filter(limits, realm=realm).order_by("date_sent") def format_sender(full_name: str, delivery_email: str) -> str: return str(Address(display_name=full_name, addr_spec=delivery_email)) @lru_cache(maxsize=None) def format_recipient(recipient_id: int) -> str: recipient = Recipient.objects.get(id=recipient_id) if recipient.type == Recipient.STREAM: stream = Stream.objects.values("name").get(id=recipient.type_id) return "#" + stream["name"] users = ( UserProfile.objects.filter( subscription__recipient_id=recipient.id, ) .order_by("full_name") .values_list("full_name", "delivery_email") ) return ", ".join([format_sender(e[0], e[1]) for e in users]) message_dicts = [] for message in messages_query: item = model_to_dict(message) item["recipient_name"] = format_recipient(message.recipient_id) item["sender_name"] = format_sender( message.sender.full_name, message.sender.delivery_email ) for key in ignore_keys: del item[key] message_dicts.append(item) output = {"zerver_message": message_dicts} floatify_datetime_fields(output, "zerver_message") for item in output["zerver_message"]: item["date_sent_utc"] = datetime.utcfromtimestamp(int(item["date_sent"])).strftime( "%Y-%m-%d %H:%M:%S" ) write_table_data(options["output"], output)
apache-2.0
Python
61139332ce1bcfd145f16b8f3c411e178db4054c
Add some unit tests for the hashing protocol of dtype (fail currently).
teoliphant/numpy-refactor,efiring/numpy-work,efiring/numpy-work,Ademan/NumPy-GSoC,teoliphant/numpy-refactor,jasonmccampbell/numpy-refactor-sprint,teoliphant/numpy-refactor,jasonmccampbell/numpy-refactor-sprint,Ademan/NumPy-GSoC,teoliphant/numpy-refactor,illume/numpy3k,jasonmccampbell/numpy-refactor-sprint,chadnetzer/numpy-gaurdro,efiring/numpy-work,illume/numpy3k,Ademan/NumPy-GSoC,chadnetzer/numpy-gaurdro,teoliphant/numpy-refactor,illume/numpy3k,jasonmccampbell/numpy-refactor-sprint,efiring/numpy-work,chadnetzer/numpy-gaurdro,illume/numpy3k,chadnetzer/numpy-gaurdro,Ademan/NumPy-GSoC
numpy/core/tests/test_dtype.py
numpy/core/tests/test_dtype.py
import numpy as np from numpy.testing import * class TestBuiltin(TestCase): def test_run(self): """Only test hash runs at all.""" for t in [np.int, np.float, np.complex, np.int32, np.str, np.object, np.unicode]: dt = np.dtype(t) hash(dt) class TestRecord(TestCase): def test_equivalent_record(self): """Test whether equivalent record dtypes hash the same.""" a = np.dtype([('yo', np.int)]) b = np.dtype([('yo', np.int)]) self.failUnless(hash(a) == hash(b), "two equivalent types do not hash to the same value !") def test_different_names(self): # In theory, they may hash the same (collision) ? a = np.dtype([('yo', np.int)]) b = np.dtype([('ye', np.int)]) self.failUnless(hash(a) != hash(b), "%s and %s hash the same !" % (a, b)) def test_different_titles(self): # In theory, they may hash the same (collision) ? a = np.dtype({'names': ['r','b'], 'formats': ['u1', 'u1'], 'titles': ['Red pixel', 'Blue pixel']}) b = np.dtype({'names': ['r','b'], 'formats': ['u1', 'u1'], 'titles': ['RRed pixel', 'Blue pixel']}) self.failUnless(hash(a) != hash(b), "%s and %s hash the same !" % (a, b)) class TestSubarray(TestCase): def test_equivalent_record(self): """Test whether equivalent subarray dtypes hash the same.""" a = np.dtype((np.int, (2, 3))) b = np.dtype((np.int, (2, 3))) self.failUnless(hash(a) == hash(b), "two equivalent types do not hash to the same value !") def test_nonequivalent_record(self): """Test whether different subarray dtypes hash differently.""" a = np.dtype((np.int, (2, 3))) b = np.dtype((np.int, (3, 2))) self.failUnless(hash(a) != hash(b), "%s and %s hash the same !" % (a, b)) a = np.dtype((np.int, (2, 3))) b = np.dtype((np.int, (2, 2))) self.failUnless(hash(a) != hash(b), "%s and %s hash the same !" % (a, b)) a = np.dtype((np.int, (1, 2, 3))) b = np.dtype((np.int, (1, 2))) self.failUnless(hash(a) != hash(b), "%s and %s hash the same !" % (a, b)) class TestMonsterType(TestCase): """Test deeply nested subtypes.""" pass if __name__ == "__main__": run_module_suite()
bsd-3-clause
Python
3d11000488ca20e7e34a9f7030a16e69a6b4052f
add examples for trainig 3
yrunts/python-for-qa
3-python-intermediate/examples/list_comprehension.py
3-python-intermediate/examples/list_comprehension.py
odd = [i for i in range(10) if i % 2] print(odd) # [1, 3, 5, 7, 9] odd_squares = [i ** 2 for i in odd] print(odd_squares) # [1, 9, 25, 49, 81] first_names = ['Bruce', 'James', 'Alfred'] last_names = ['Wayne', 'Gordon', 'Pennyworth'] heroes = ['{} {}'.format(f, l) for f, l in zip(first_names, last_names)] print(heroes) # ['Bruce Wayne', 'James Gordon', 'Alfred Pennyworth']
cc0-1.0
Python
0886a4efd7b7703d72be4319d7b0295d3bc64151
Create Tensor_Case.py
saurabhrathor/Tensorflow_Practice
Tensor_Case.py
Tensor_Case.py
import tensorflow as tf sess = tf.InteractiveSession() x = tf.random_uniform([]) y = tf.random_uniform([]) out1 = tf.cond(tf.greater(x,y), lambda:tf.add(x,y), lambda:(tf.subtract(x,y))) print(x.eval(), y.eval(), out1.eval()) x = tf.random_uniform([],-1,1) y = tf.random_uniform([],-1,1) def f1(): return tf.cast(tf.add(x,y), tf.float32) def f2(): return tf.cast(tf.subtract(x,y), tf.float32) def f3(): return tf.cast(tf.constant(0), tf.float32) out2 = tf.case({tf.less(x, y):f2, tf.greater(x,y):f1}, default=f3) print(x.eval(), y.eval(), out2.eval())
bsd-2-clause
Python
dfa492ffc2148d8ffa5c14145e0092be60ef44eb
add an example for pipeline
thefab/tornadis,thefab/tornadis
examples/pipeline.py
examples/pipeline.py
import tornado import tornadis @tornado.gen.coroutine def pipeline_coroutine(): # Let's get a connected client client = tornadis.Client() yield client.connect() # Let's make a pipeline object to stack commands inside pipeline = tornadis.Pipeline() pipeline.stack_call("SET", "foo", "bar") pipeline.stack_call("GET", "foo") # At this point, nothing is sent to redis # Let's submit the pipeline to redis and wait for replies results = yield client.call(pipeline) # The two replies are in the results array print results # >>> ['OK', 'bar'] # Let's disconnect client.disconnect() def stop_loop(future): exception = future.exception() if exception is not None: raise(exception) loop.stop() loop = tornado.ioloop.IOLoop.instance() loop.add_future(pipeline_coroutine(), stop_loop) loop.start()
mit
Python
b75601e0c6bbb83dba4544f9d80b6f71c75fcdec
add missing ordered field to startup program interest
masschallenge/impact-api,masschallenge/impact-api,masschallenge/impact-api,masschallenge/impact-api
web/impact/impact/migrations/0003_add_ordered_field_to_startup_program_interest.py
web/impact/impact/migrations/0003_add_ordered_field_to_startup_program_interest.py
# -*- coding: utf-8 -*- # Generated by Django 1.10.3 on 2018-01-30 10:37 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('impact', '0002_set_models_to_managed'), ] operations = [ migrations.AlterField( model_name='startupprograminterest', name='order', field=models.PositiveIntegerField(db_index=True, editable=False), ), ]
mit
Python
c3e7b563c3eeb24aa269f23672b8f469470908b7
Add an option to redirect user to a page if the key is already expired.
ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website,uploadcare/django-loginurl,ISIFoundation/influenzanet-website,vanschelven/cmsplugin-journal,ISIFoundation/influenzanet-website,ISIFoundation/influenzanet-website,fajran/django-loginurl,ISIFoundation/influenzanet-website
onetime/views.py
onetime/views.py
from datetime import datetime from django.http import HttpResponseRedirect, HttpResponseGone from django.shortcuts import get_object_or_404 from django.contrib.auth import login from django.conf import settings from onetime import utils from onetime.models import Key def cleanup(request): utils.cleanup() def login(request, key, redirect_expired_to=None): data = get_object_or_404(Key, key=key) expired = False if data.usage_left is not None and data.usage_left == 0: expired = True if data.expires is not None and data.expires < datetime.now(): expired = True if expired: if redirect_expired_to is not None: return HttpResponseRedirect(redirect_expired_to) else: return HttpResponseGone() if data.usage_left is not None: data.usage_left -= 1 data.save() login(request, data.user) next = request.GET.get('next', None) if data.next is not None: next = data.next if next is None: next = settings.LOGIN_REDIRECT_URL return HttpResponseRedirect(next)
from datetime import datetime from django.http import HttpResponseRedirect, HttpResponseGone from django.shortcuts import get_object_or_404 from django.contrib.auth import login from django.conf import settings from onetime import utils from onetime.models import Key def cleanup(request): utils.cleanup() def login(request, key): data = get_object_or_404(Key, key=key) if data.usage_left is not None and data.usage_left == 0: return HttpResponseGone() if data.expires is not None and data.expires < datetime.now(): return HttpResponseGone() if data.usage_left is not None: data.usage_left -= 1 data.save() login(request, data.user) next = request.GET.get('next', None) if data.next is not None: next = data.next if next is None: next = settings.LOGIN_REDIRECT_URL return HttpResponseRedirect(next)
agpl-3.0
Python
92077ecd268a6ca04f2b413fd3535d4cc358c97b
Create serializers.py
comic/comic-django,comic/comic-django,comic/comic-django,comic/comic-django,comic/comic-django
app/grandchallenge/algorithms/serializers.py
app/grandchallenge/algorithms/serializers.py
from rest_framework import serializers from grandchallenge.algorithms.models import Algorithm, Job, Result class AlgorithmSerializer(serializers.ModelSerializer): class Meta: model = Algorithm fields = ['pk'] class ResultSerializer(serializers.ModelSerializer): class Meta: model = Result fields = ['pk', 'job', 'images', 'output'] class JobSerializer(serializers.ModelSerializer): class Meta: model = Job fields = ['pk', 'algorithm', 'image']
apache-2.0
Python
b269ed70223591c81d13f97e48c74ced12cec661
Update 4-keys-keyboard.py
yiwen-luo/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,yiwen-luo/LeetCode,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,yiwen-luo/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,yiwen-luo/LeetCode,yiwen-luo/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,kamyu104/LeetCode,kamyu104/LeetCode
Python/4-keys-keyboard.py
Python/4-keys-keyboard.py
# Time: O(n) # Space: O(1) class Solution(object): def maxA(self, N): """ :type N: int :rtype: int """ if N < 7: return N dp = [i for i in xrange(N+1)] for i in xrange(7, N+1): dp[i % 6] = max(dp[(i-4) % 6]*3, dp[(i-5) % 6]*4) return dp[N % 6]
# Time: O(n) # Space: O(1) class Solution(object): def maxA(self, N): """ :type N: int :rtype: int """ if N < 7: return N dp = [i for i in xrange(N+1)] for i in xrange(7, N+1): dp[i % 6] = max(dp[(i-4) % 6]*3,dp[(i-5) % 6]*4) return dp[N % 6]
mit
Python
31f4479194239548bae6eff2650735ddf4279523
Add files via upload
goru47/INF1L-PRJ-2
DatabaseTest.py
DatabaseTest.py
import pygame import time import random import BattlePortDatabase pygame.init() display_width = 800 display_height = 600 black = (0, 0, 0) white = (255, 255, 255) red = (255, 0, 0) block_color = (53, 115, 255) pid = 0 car_width = 73 gameDisplay = pygame.display.set_mode((display_width, display_height)) pygame.display.set_caption('A bit Racey') clock = pygame.time.Clock() carImg = pygame.image.load('image1.png') def things_dodged(count): font = pygame.font.SysFont(None, 25) text = font.render("Dodged: " + str(count), True, black) gameDisplay.blit(text, (0, 0)) def things(thingx, thingy, thingw, thingh, color): pygame.draw.rect(gameDisplay, color, [thingx, thingy, thingw, thingh]) def car(x, y): gameDisplay.blit(carImg, (x, y)) def text_objects(text, font): textSurface = font.render(text, True, black) return textSurface, textSurface.get_rect() def message_display(text): largeText = pygame.font.Font('freesansbold.ttf', 115) TextSurf, TextRect = text_objects(text, largeText) TextRect.center = ((display_width / 2), (display_height / 2)) gameDisplay.blit(TextSurf, TextRect) pygame.display.update() time.sleep(2) game_loop() def crash(): message_display('You Crashed') def game_loop(): x = (display_width * 0.45) y = (display_height * 0.8) x_change = 0 thing_startx = random.randrange(0, display_width) thing_starty = -600 thing_speed = 4 thing_width = 100 thing_height = 100 thingCount = 1 dodged = 0 gameExit = False while not gameExit: for event in pygame.event.get(): if event.type == pygame.QUIT: pygame.quit() quit() if event.type == pygame.KEYDOWN: if event.key == pygame.K_LEFT: x_change = -5 if event.key == pygame.K_RIGHT: x_change = 5 if event.type == pygame.KEYUP: if event.key == pygame.K_LEFT or event.key == pygame.K_RIGHT: x_change = 0 x += x_change gameDisplay.fill(white) # things(thingx, thingy, thingw, thingh, color) things(thing_startx, thing_starty, thing_width, thing_height, block_color) thing_starty += thing_speed car(x, y) things_dodged(dodged) if x > display_width - car_width or x < 0: crash() if thing_starty > display_height: thing_starty = 0 - thing_height thing_startx = random.randrange(0, display_width) # dodged += 1 # thing_speed += 1 thing_width += (dodged * 1.2) if y < thing_starty + thing_height: print('y crossover') if x > thing_startx and x < thing_startx + thing_width or x + car_width > thing_startx and x + car_width < thing_startx + thing_width: print('x crossover') ## score = dodged #na afloop van het spel worden onderstaande gegevens in de database gezet BattlePortDatabase.data_entry(random.randint(10000, 100000), 'eenNaam', score) ## #[crashes the game]BattlePortDatabase.kweerie("SELECT * FROM scores where naam = 'Default'") crash() pygame.display.update() clock.tick(60) game_loop() pygame.quit() quit()
mit
Python
1dd3e7436c19ba3146be6e34da39bd81dc1efd6e
Implement AES file encryption and decryption
lakewik/storj-gui-client
file_crypto_tools.py
file_crypto_tools.py
############ Module with cryptographics functions for Storj GUI Client ########## ## Based on: <http://stackoverflow.com/questions/16761458/how-to-aes-encrypt-decrypt-files-using-python-pycrypto-in-an-openssl-compatible> ## from hashlib import md5 from Crypto.Cipher import AES from Crypto import Random class FileCrypto(): def encrypt_file (self, algorithm, file_path, encrypted_file_save_path, password): if algorithm == "AES": with open(file_path, 'rb') as in_file, open(encrypted_file_save_path, 'wb') as out_file: self.encrypt_file_aes(in_file, out_file, password) def decrypt_file (self, algorithm, file_path, decrypted_file_save_path, password): if algorithm == "AES": with open(file_path, 'rb') as in_file, open(decrypted_file_save_path, 'wb') as out_file: self.decrypt_file_aes(in_file, out_file, password) def derive_key_and_iv(self, password, salt, key_length, iv_length): d = d_i = '' while len(d) < key_length + iv_length: d_i = md5(d_i + password + salt).digest() d += d_i return d[:key_length], d[key_length:key_length + iv_length] def encrypt_file_aes(self, in_file, out_file, password, key_length=32): bs = AES.block_size salt = Random.new().read(bs - len('Salted__')) key, iv = self.derive_key_and_iv(password, salt, key_length, bs) cipher = AES.new(key, AES.MODE_CBC, iv) out_file.write('Salted__' + salt) finished = False while not finished: chunk = in_file.read(1024 * bs) if len(chunk) == 0 or len(chunk) % bs != 0: padding_length = bs - (len(chunk) % bs) chunk += padding_length * chr(padding_length) finished = True out_file.write(cipher.encrypt(chunk)) def decrypt_file_aes(self, in_file, out_file, password, key_length=32): bs = AES.block_size salt = in_file.read(bs)[len('Salted__'):] key, iv = self.derive_key_and_iv(password, salt, key_length, bs) cipher = AES.new(key, AES.MODE_CBC, iv) next_chunk = '' finished = False while not finished: chunk, next_chunk = next_chunk, cipher.decrypt(in_file.read(1024 * bs)) if len(next_chunk) == 0: padding_length = ord(chunk[-1]) if padding_length < 1 or padding_length > bs: raise ValueError("bad decrypt pad (%d)" % padding_length) # all the pad-bytes must be the same if chunk[-padding_length:] != (padding_length * chr(padding_length)): # this is similar to the bad decrypt:evp_enc.c from openssl program raise ValueError("bad decrypt") chunk = chunk[:-padding_length] finished = True out_file.write(chunk)
mit
Python
1633e8b286ddeec706d496931713e3ac7b93b780
Declare flaskext a namespace package
Kyah/flask-zodb,dag/flask-zodb,SpotlightKid/flask-zodb
flaskext/__init__.py
flaskext/__init__.py
import pkg_resources pkg_resources.declare_namespace(__name__)
bsd-2-clause
Python
c69572c42da27357f8cb01299c309e47ff033e7f
Create docker-swarm-dns.py
akdv88/swarm-ddns,akdv88/swarm-ddns
docker-swarm-dns.py
docker-swarm-dns.py
#!/usr/bin/env python3.6 from time import sleep import docker, \ dns.resolver, \ dns.query, \ dns.tsigkeyring, \ dns.update, \ os, \ sys swnodes = ['192.168.15.201','192.168.15.202','192.168.15.203','192.168.15.204','192.168.15.205'] dnservers = {'master':{'ip':'192.168.2.6','key':'EMtUbnXU3as1Eczq2bVZ8g=='},'slave':{'ip':'192.168.2.7','key':'ctWc6TO3tD9YMV1QYgh9Jg=='}} domain = 'subsident.docker.' ttl = int(os.environ['UPDATE']) def docker_query(): conn = docker.from_env() serv_pre = set() while True: serv_cur = set() for service in conn.services.list(): if 'add.dns' in service.attrs['Spec']['Labels']: if service.attrs['Spec']['Labels']['add.dns'] == 'true': serv_cur.add(service.name) if serv_pre != serv_cur: add = serv_cur.difference(serv_pre) rm = serv_pre.difference(serv_cur) if add: print('ADD', add) for svc in add: dns_add(svc) if rm: print('DEL', rm) for svc in rm: dns_remove(svc) serv_pre = serv_cur sleep(ttl) def dns_add(svc): for host, conf in dnservers.items(): print('Add DNS Record \''+svc+'\' sent to',host,'dnserver ('+conf['ip']+')') keyring = dns.tsigkeyring.from_text({ 'rndc-key.' : conf['key'] }) update = dns.update.Update(domain, keyring=keyring) for swip in swnodes: update.add(svc, 15, 'a', swip) resp = dns.query.tcp(update, conf['ip']) def dns_remove(svc): for host, conf in dnservers.items(): print('Remove DNS Record \''+svc+'\' sent to',host,'dnserver ('+conf['ip']+')') keyring = dns.tsigkeyring.from_text({ 'rndc-key.' : conf['key'] }) update = dns.update.Update(domain, keyring=keyring) update.delete(svc, 'a') resp = dns.query.tcp(update, conf['ip']) if __name__ == "__main__": docker_query()
mit
Python
52dbb4d1f34ef3d637e3d99813591bf12bfa4576
support for `python -m intelhex`. Let's provide some help on available "-m" executable points.
adfernandes/intelhex
intelhex/__main__.py
intelhex/__main__.py
# Copyright (c) 2016, Alexander Belchenko # All rights reserved. # # Redistribution and use in source and binary forms, # with or without modification, are permitted provided # that the following conditions are met: # # * Redistributions of source code must retain # the above copyright notice, this list of conditions # and the following disclaimer. # * Redistributions in binary form must reproduce # the above copyright notice, this list of conditions # and the following disclaimer in the documentation # and/or other materials provided with the distribution. # * Neither the name of the author nor the names # of its contributors may be used to endorse # or promote products derived from this software # without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, # BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY # AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. # IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, # OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, # OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED # AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, # STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, # EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. if __name__ == '__main__': print("Welcome to IntelHex Python library.") print() print("The intelhex package has some executable points:") print(" python -m intelhex.test -- easy way to run unit tests.")
bsd-3-clause
Python
e9e0a0eeaf985e5c8f74dc6cfb9110f7b3c152e4
test workers
uwescience/myria-python,uwescience/myria-python
myria/test/test_workers.py
myria/test/test_workers.py
from httmock import urlmatch, HTTMock from json import dumps as jstr import unittest from myria import MyriaConnection @urlmatch(netloc=r'localhost:8753') def local_mock(url, request): global query_counter if url.path == '/workers': return jstr({'1': 'localhost:9001', '2': 'localhost:9002'}) elif url.path == '/workers/alive': return jstr([1, 2]) elif url.path == '/workers/worker-1': return jstr("localhost:9001") return None class TestQuery(unittest.TestCase): def __init__(self, args): with HTTMock(local_mock): self.connection = MyriaConnection(hostname='localhost', port=8753) unittest.TestCase.__init__(self, args) def test_workers(self): with HTTMock(local_mock): workers = self.connection.workers() self.assertEquals(workers, {'1': 'localhost:9001', '2': 'localhost:9002'}) def test_alive(self): with HTTMock(local_mock): workers = self.connection.workers_alive() self.assertEquals(set(workers), set([1, 2])) def test_worker_1(self): with HTTMock(local_mock): worker = self.connection.worker(1) self.assertEquals(worker, 'localhost:9001')
bsd-3-clause
Python
2349d603ca887961441b5b3f436d6cffaaecb291
Add pyMetascanAPI class
ikoniaris/pyMetascan
pyMetascanAPI.py
pyMetascanAPI.py
import requests import os class pyMetascanAPI: API_ENDPOINT = 'https://api.metascan-online.com/v1/' API_KEY = '' FILE_EXT = 'file' DATA_EXT = 'file/' HASH_EXT = 'hash/' def __init__(self, api_key): self.API_KEY = api_key def fileUpload(self, file): r = self.makeRequest(self.getFileEndpoint(), 'POST', file) return r.json() def retrieveReport(self, data_id): r = self.makeRequest(self.getDataEndpoint(data_id)) return r.json() def hashLookup(self, hash): r = self.makeRequest(self.getHashEndpoint(hash)) return r.json() def makeRequest(self, url, method='GET', file=None): headers = {'apikey' : self.API_KEY} if method == 'POST': headers.update({'filename' : os.path.basename(file)}) return requests.post(url, file, headers=headers) else: return requests.get(url, headers=headers) def getFileEndpoint(self): return self.API_ENDPOINT + self.FILE_EXT def getDataEndpoint(self, data_id): return self.API_ENDPOINT + self.DATA_EXT + data_id def getHashEndpoint(self, hash): return self.API_ENDPOINT + self.HASH_EXT + hash
mit
Python
feefc96050d3906730fe6d366430d7478204d168
Add solution to 121.
bsamseth/project-euler,bsamseth/project-euler
121/121.py
121/121.py
""" A bag contains one red disc and one blue disc. In a game of chance a player takes a disc at random and its colour is noted. After each turn the disc is returned to the bag, an extra red disc is added, and another disc is taken at random. The player pays £1 to play and wins if they have taken more blue discs than red discs at the end of the game. If the game is played for four turns, the probability of a player winning is exactly 11/120, and so the maximum prize fund the banker should allocate for winning in this game would be £10 before they would expect to incur a loss. Note that any payout will be a whole number of pounds and also includes the original £1 paid to play the game, so in the example given the player actually wins £9. Find the maximum prize fund that should be allocated to a single game in which fifteen turns are played. Solution comment: Very quick, ~0.5 ms (would be much less for C++ impl.). Derived explicitly the N = 4 case above, from which the pattern emerges. The memoization helps a whole bunch. Would have been slightly better to use DP and build the table directly, but memoizing a recursive solution is so much nicer. """ import time import numpy as np from functools import lru_cache def P(n): """Probability of drawing a blue in the n-th round.""" return 1 / (n + 1) @lru_cache(maxsize=None) def Prob(blues, n, N): """ Return probability of getting some number of blues, starting on round n, with N rounds total. """ if blues <= 0: return 1 # Can always get 0 blues. elif blues > N - n + 1: return 0 # Can never get more blues than draws. elif blues == N - n + 1: return np.prod(P(np.arange(n, N+1))) # Prob of getting blue of every draw. else: # Prob is prob of getting a blue now and then blues-1 on the remaining throws, # or prob of getting red now and blues blues on the remaining throws. return P(n) * Prob(blues - 1, n + 1, N) + (1-P(n)) * Prob(blues, n + 1, N) if __name__ == "__main__": t0 = time.time() rounds = 15 bet = 1 winning_prob = Prob(rounds // 2 + 1, 1, rounds) print('Execution time: {:.3f} ms'.format((time.time() - t0) * 1e3)) print('Max payout with {} rounds: {}£'.format(rounds, int(bet / winning_prob)))
mit
Python
5e220c5529ca7279979939716c28997876145b7b
Create ac_cover_pic_down.py
zhihaofans/adao_cover_img_down
ac_cover_pic_down.py
ac_cover_pic_down.py
#coding=utf-8 import urllib import urllib2 import os cover='http://cover.acfunwiki.org/cover.php' face='http://cover.acfunwiki.org/face.php' now=1 local=os.getcwd()+'\\download\\' url_1=face#设置来源 exist=0 success=0 fail=0 all=0 def download(num,yes): global now global exist global success global fail global all try:#创建目录 os.makedirs(local) except WindowsError: None if num >0: while now<=num: url= urllib2.urlopen(url_1).geturl() file= url[url.rfind('/')+1:] if os.path.exists(local+file): print now,'X',file,u'已存在' exist=exist+1 if yes==0: now=now+1 else: try:#下载 urllib.urlretrieve(url,local+file) print now,'√',file,u'下载成功' success=success+1 now=now+1 except IOError: print now,'X',file,u'下载失败!!' fail=fail+1 if yes==0: now=now+1 all=all+1 print u'结束' print u'共下载',str(all),u'成功',str(success),u'已存在',str(exist),u'失败',str(fail) now=1 num=0 yes=0 all=0 main() def main(): input=raw_input(u'输入下载个数:') print u'当前来源:',url_1 print u'下载目录:',local download(int(input),1)#参数二为失败或已存在文件是否不算入已下载量'now'(是1/否0) if __name__ == '__main__': try: main() except KeyboardInterrupt: print u'######\n' print u'用户中断'
apache-2.0
Python
f6b720a2603cc597bdbe4124ad8e13b9a208274e
Create wordcloudtest.py
jacksu/machine-learning
src/ml/wordcloudtest.py
src/ml/wordcloudtest.py
#encoding=utf8 from pyecharts import WordCloud from snownlp import SnowNLP import jieba ##词云 filename = "wdqbs.txt" with open(filename) as f: mytext = f.read() #print mytext s= SnowNLP(unicode(mytext,'utf8')) for word in s.keywords(10): print word.encode('utf8') seg_list = jieba.cut(mytext) punct = set(u''':!),.:;?]}¢'"、。〉》」』】〕〗〞︰︱︳﹐、﹒ ﹔﹕﹖﹗﹚﹜﹞!),.:;?|}︴︶︸︺︼︾﹀﹂﹄﹏、~¢ 々‖•·ˇˉ―--′’”([{£¥'"‵〈《「『【〔〖([{£¥〝︵︷︹︻ ︽︿﹁﹃﹙﹛﹝({“‘-—_…''') # 对str/unicode filterpunt = lambda s: ''.join(filter(lambda x: x not in punct, s)) # 对list filterpuntl = lambda l: list(filter(lambda x: x not in punct, l)) dict={} for word in filterpuntl(seg_list): if word in dict: dict[word]=int(dict[word])+1 else: dict[word]=1 name=[] for word in dict.keys(): name.append(word.encode('utf8')) print name value = dict.values() print value wordcloud = WordCloud(width=1300, height=620) wordcloud.add("", name, value, word_size_range=[20, 100]) wordcloud.show_config() wordcloud.render()
mit
Python
d062a109da7ba5cb6147fac90bb4c6466083c755
Create __init__.py
psykzz/py-bot-framework
SlackBotFramework/utilities/__init__.py
SlackBotFramework/utilities/__init__.py
def send_card(client, channel, title, title_url, text, fields=None, bot_name="Bot", color="#36a64f", fallback="There was an error please try again"): attr = [{ "fallback": fallback, "color": color, "title": title, "title_link": title_url, "text": text }] if fields: if not isinstance(fields, list): fields = [fields] attr[0]['fields'] = fields return client.api_call( "chat.postMessage", as_user=True, username=bot_name, channel=channel, text="", attachments=json.dumps(attr))
unlicense
Python
e64dbcd16959078bc4df1b6a536ea3f36ae52411
add cli directory
sassoftware/amiconfig,sassoftware/amiconfig
ec2/cli/__init__.py
ec2/cli/__init__.py
# # Copyright (c) 2007 rPath, Inc. #
apache-2.0
Python
7bea9ba96c9d036692882fcbae5fcc1974567530
Add preprocessing.py
kenkov/nlp,kenkov/nlp,kenkov/nlp
preprocessing/preprocessing.py
preprocessing/preprocessing.py
#! /usr/bin/env python # coding:utf-8 import re class Preprocess: def __init__(self): self.html_regex = re.compile( r'(http|https)://[a-zA-Z0-9-./"#$%&\':?=_]+') self.newline_regex = re.compile(r'\n') self.cont_spaces_regex = re.compile(r'\s+') def _subs(self, regex: "re obj", repl: str, text: str): return regex.sub(repl, text) def remove_link(self, text: str) -> str: return self._subs(self.html_regex, "", text) def remove_newline(self, text: str) -> str: return self._subs(self.newline_regex, "", text) def convert_cont_spaces(self, text: str) -> str: return self._subs(self.cont_spaces_regex, " ", text) def strip(self, text: str) -> str: return text.strip() def execute(self, text: str) -> str: funcs = [ self.remove_newline, self.remove_link, self.convert_cont_spaces, self.strip] _text = text for func in funcs: _text = func(text) return _text class Twitter(Preprocess): def __init__(self): Preprocess.__init__(self) username = r'@[a-zA-Z0-9_]+' self.mention_regex = re.compile(r'{}'.format(username)) self.retweet_regex = re.compile(r'RT {}:'.format(username)) def remove_mention(self, text: str) -> str: return self._subs(self.mention_regex, "", text) def remove_retweet(self, text: str) -> str: return self._subs(self.retweet_regex, "", text) def execute(self, text: str) -> str: funcs = [ self.remove_newline, self.remove_link, self.remove_retweet, self.remove_mention, self.convert_cont_spaces, self.strip] _text = text for func in funcs: _text = func(_text) return _text if __name__ == '__main__': import sys pre = Preprocess() for filename in sys.argv[1:]: print(filename) with open(filename, "r") as f: for line in f: _line = line.strip() print(pre.execute(_line))
mit
Python
c6ff3e3e67194499d1653d530a29e3856191fd1e
Create Grau.py
AEDA-Solutions/matweb,AEDA-Solutions/matweb,AEDA-Solutions/matweb,AEDA-Solutions/matweb,AEDA-Solutions/matweb
backend/Models/Grau/Grau.py
backend/Models/Grau/Grau.py
class Departamento(object): def __init__(self,departamento): self.id = departamento.getId() self.nome = departamento.getNome()
mit
Python
8792e0f3f258c23713f1af7f4eab46eec796c9e3
Add primitive script for binary assets preparation
adam-dej/embegfx,adam-dej/embegfx,adam-dej/embegfx
utils/convert.py
utils/convert.py
#!/usr/bin/env python import sys from PIL import Image, ImageFont, ImageDraw def write_in_c(data, name='data'): print('const uint8_t {0}[{1}] = {{\n\t'.format(name, len(data)), end="") for index, byte in enumerate(data): print('{0}'.format(byte), end="") if index != len(data)-1: print(', ', end="") if not index % 10 and index > 0: print('\n\t', end="") print('};') def write_in_avr_c(data, name='data'): print('const uint8_t {0}[{1}] PROGMEM = {{\n\t'.format(name, len(data)), end="") for index, byte in enumerate(data): print('{0}'.format(byte), end="") if index != len(data)-1: print(', ', end="") if not index % 10 and index > 0: print('\n\t', end="") print('};') def write_in_hex(data): print(''.join('0x%02x '%i for i in data)) def write_in_binary(data): sys.stdout.buffer.write(bytes(data)) def img_format_0(filename, invert=False): image = Image.open(filename).convert('RGBA') image = Image.composite(image, Image.new('RGBA', image.size, (255, 255, 255)), image).convert(mode='1') pixels = image.load() bytes = [0,] bytes.extend(image.size) byte = 0 bit = 0 for y in range(image.size[1]): for x in range(image.size[0]): byte |= (bool(pixels[x,y]) ^ (not invert)) << bit bit += 1 if bit == 8: bytes.append(byte) byte = 0 bit = 0 return bytes def font_format_1(filename, size, width=None): size += 1 font = ImageFont.truetype(filename, size+1) char_size = [0,size-1] chars = list() if width is None: for code in range(ord('!'), ord('~')+1): if char_size[0] < (font.getsize(chr(code))[0]): char_size[0] = font.getsize(chr(code))[0] else: char_size[0] = width bytes = [1] bytes.extend(char_size) bytes.append(1) for code in range(ord('!'), ord('~')+1): im = Image.new('RGB', char_size, (255, 255, 255)) draw = ImageDraw.Draw(im) draw.text((0, -1), chr(code), font=font, fill="#000000") chars.append(im.convert(mode='1')) byte = 0 bit = 0 for image in chars: pixels = image.load() for y in range(image.size[1]): for x in range(image.size[0]): byte |= (not bool(pixels[x,y])) << bit bit += 1 if bit == 8: bytes.append(byte) byte = 0 bit = 0 return bytes
mit
Python
a136eeefdd6cf276a0d4815fa39453737ed04727
Add py solution for 556. Next Greater Element III
ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode
py/next-greater-element-iii.py
py/next-greater-element-iii.py
class Solution(object): def nextGreaterElement(self, n): """ :type n: int :rtype: int """ s = str(n) for i, n in enumerate(reversed(s[:-1]), 1): if n < s[-i]: x, j = min((x, k) for k, x in enumerate(s[-i:]) if x > n) ans = s[:-i - 1] ans += x l = list(s[-i:]) l[j] = n ans += ''.join(sorted(l)) ans = int(ans) if ans >= 1 << 31: return -1 return ans else: return -1
apache-2.0
Python
1c2eebe236dcfcc607749ebcba7a769bb27b5176
test creation of blank CounterJournal item
pitthsls/pycounter,chill17/pycounter
pycounter/test/test_classes.py
pycounter/test/test_classes.py
import unittest from pycounter import report class TestJournalClass(unittest.TestCase): def test_counter_journal(self): journal = report.CounterJournal() self.assertEqual(journal.issn, "")
mit
Python
feb47562d45294cb4e9c3ae2d0bc80b7b766bcc8
Create pKaKs3.py
CharlesSanfiorenzo/Bioinformatics,CharlesSanfiorenzo/Bioinformatics,CharlesSanfiorenzo/Bioinformatics
Modules/pKaKs3.py
Modules/pKaKs3.py
#This short script uses the output values of KaKs.pl & SnpEff to calculate mutational load using Nei-Gojobori: pKa/Ks = [-3/4ln(1-4pn/3)] / [-3/4ln(1-4ps/3)], where ps = syn SNPs / syn sites and pn = nonsyn SNPs / nonsyn sites from math import log #If for some reason you need to calculate the logarithm of a negative number, import cmath instead. import configparser config = configparser.RawConfigParser() config.read("config.ini") nonSyn_site = float(config.get("myvars", "non-synonymous_number")) Syn_site = float(config.get("myvars", "synonymous_number")) nonSyn_SNP = float(config.get("myvars", "non-synonymous_snp")) Syn_SNP = float(config.get("myvars", "synonymous_snp")) pn = nonSyn_SNP/nonSyn_site ps = Syn_SNP/Syn_site print("The pKs/Ks ratio for this organism is:", (-3/4*log(1-(4*pn)/3))/(-3/4*log(1-(4*ps)/3)) )
mit
Python
ed611e9f9c3470712b296188e5ee6e2432cb04b5
Add scanner
kishanreddykethu/PyARPScanner,kishanreddykethu/PyARPScanner
PyARPScanner.py
PyARPScanner.py
#!/usr/bin/env python import netifaces import commands import sys from scapy.all import * def scanner(): # default = "route | grep 'default' | awk '{print $8}'" gws = netifaces.gateways() default = gws['default'][netifaces.AF_INET] print 'Default Interface -- '+default[1]+' Gateway -- '+default[0] # diface = commands.getoutput(default) diface = default[1] srcip = netifaces.ifaddresses(diface)[2][0]['addr'] netmask = netifaces.ifaddresses(diface)[2][0]['netmask'] octets = srcip.split('.') starttime = time.time() global gw gw = octets[0] + "." + octets[1] + "." + octets[2] dest = gw + ".0/24" # print dest answered, unanswered = srp(Ether(dst="ff:ff:ff:ff:ff:ff") / ARP(pdst=str(dest)), timeout=2, verbose=0) endtime = time.time() ifaces = "ifconfig | grep -o " + str(diface) + " | wc -l" num = int(commands.getoutput(ifaces)) setips = defaultdict(list) setips[diface].append(str(srcip)) existing = [srcip] freeips = [] totaltime = endtime - starttime print "Sent ARP requests in %f seconds..." % (totaltime) for i in range(0, num - 1): iface = diface + ":" + str(i) ip = netifaces.ifaddresses(iface)[2][0]['addr'] setips[iface].append(str(ip)) existing.append(str(ip)) # print setips for i in range(0,len(answered)): print "Response from ip -- " + answered[i][1].psrc + " using MAC -- " + answered[i][1].hwsrc print "Found %d ips that are already set to this computer." % (len(setips)) for i in range(0, len(unanswered)): freeips.append(str(unanswered[i][1].pdst)) freeips = set(freeips) - set(existing) freeips.remove(gw + '.0') freeips.remove(gw + '.255') # freeips.remove(gw+'.1') print "Found %d ips that are free." % (len(freeips)) completedtime = time.time() totaltime = completedtime - starttime print "Completed scan in %f seconds..." % totaltime print 'The following ips are set to this computer',existing # unanswered = unanswered.remove(srcip) # return freeips # print setips if __name__ == '__main__': scanner()
mit
Python
843083b9469362ee3ef1c2e2259f1ce3e1e966d0
Add ELF "loader"/parser in Python
Dentosal/rust_os,Dentosal/rust_os,Dentosal/rust_os
tools/pseudo_elf_loader.py
tools/pseudo_elf_loader.py
import sys if sys.version_info[0] != 3: exit("Py3 required.") import ast class MockRam(dict): def __missing__(self, addr): return None def b2i(l): return sum([a*0x100**i for i,a in enumerate(l)]) def i2b(i): b = [] while i: b.append(i%0x100) i //= 0x100 return b def main(fn, mreq): with open(fn, "rb") as f: img = f.read() # verify image print("Verifying...") assert img[0:4] == bytes([0x7f, 0x45, 0x4c, 0x46]), "magic" assert img[4] == 0x2, "bitness" assert img[18] == 0x3e, "instruction set" assert img[5] == 0x1, "endianess" assert img[6] == 0x1, "version" assert img[54] == 0x38, "program header size" print("Verification ok.\n") print("Load point {:#x}".format(b2i(img[24:24+8]))) pht_pos = b2i(img[32:32+8]) pht_len = b2i(img[56:56+2]) print("Program header len={} pos={:#x}".format(pht_len, pht_pos)) ptr = pht_pos ram = MockRam() for index in range(pht_len): print("Header #{}:".format(index+1)) segement_type = img[ptr] if segement_type == 1: print(" This is a LOAD segment") flags = b2i(img[(ptr+4):(ptr+4)+4]) p_offset = b2i(img[(ptr+8):(ptr+8)+8]) p_vaddr = b2i(img[(ptr+16):(ptr+16)+8]) p_filesz = b2i(img[(ptr+32):(ptr+32)+8]) p_memsz = b2i(img[(ptr+40):(ptr+40)+8]) # clear for i in range(p_memsz): ram[p_vaddr+i] = 0 # copy for i in range(p_filesz): ram[p_vaddr+i] = img[p_offset+i] if p_vaddr+i in mreq: print("{:#x}->{:#x}: {:#x}".format(p_offset+i, p_vaddr+i, ram[p_vaddr+i])) print(" Flags: {} ({:#b})".format("".join([(l*(flags&(1<<i)!=0)) for i,l in enumerate("XWR")]), flags)) print(" Clear {:#x} bytes starting at {:#x}".format(p_memsz, p_vaddr)) print(" Copy {:#x} bytes from {:#x} to {:#x}".format(p_filesz, p_offset, p_vaddr)) print(" Initialized: {:#x} bytes, uninitialized: {:#x} bytes".format(p_filesz, p_memsz-p_filesz)) else: print(" This isn't a LOAD segment") ptr += 0x38 for r in mreq: print("{:#x}: {:#x}".format(r, ram[r])) if __name__ == '__main__': main(sys.argv[1], [int(ast.literal_eval(r)) for r in sys.argv[2:]])
mit
Python
0d90e90b496c4ba69220c5ca225e99eec85cc18f
add ParticleFilter function
hsiaoching/streethunt-matcher,acsalu/streethunt-matcher
ParticleFilter.py
ParticleFilter.py
import numpy as np import math import cv2 import operator ''' old_im = cv2.imread('old_image.jpg') old_im_compensate = cv2.imread('affine_frame.jpg') #old_gray = cv2.cvtColor(old_im, cv2.COLOR_BGR2GRAY) new_im = cv2.imread('new_image.jpg') #new_gray = cv2.cvtColor(new_im, cv2.COLOR_BGR2GRAY) diff = np.absolute(new_im - old_im) diff_compensate = np.absolute(new_im - old_im_compensate) diff_gray = cv2.cvtColor(diff, cv2.COLOR_BGR2GRAY) diff_gray_compensate = cv2.cvtColor(diff_compensate, cv2.COLOR_BGR2GRAY) #diff_gray = old_gray - new_gray #cv2.namedWindow('diff_image', 0) #cv2.imshow('diff_image', diff_gray) #cv2.namedWindow('diff_image_compensate', 0) #cv2.imshow('diff_image_compensate', diff_gray_compensate) #cv2.waitKey(0) ''' #threshold = 250 #length, width = diff_gray.shape ''' im_thres=np.zeros((length, width)) im_thres_compensate=np.zeros((length, width)) for i, j in zip(*np.where(diff_gray>threshold)): im_thres[i,j] = 255 for i, j in zip(*np.where(diff_gray_compensate>threshold)): im_thres_compensate[i,j] = 255 cv2.namedWindow('im_thres', 0) cv2.imshow('im_thres', im_thres) cv2.namedWindow('im_thres_compensate', 0) cv2.imshow('im_thres_compensate', im_thres_compensate) cv2.waitKey(0) ''' ##########Particle Filter######################### def ParticleFilter(St_prev, diff_im): St = [] n = 0 k = 0 m = 6 alpha = 0 length, width = diff_im.shape particle_filter = np.zeros((length,width)) if not St_prev: print 'St_prev is empty.' threshold = 253 for i,j in zip(*np.where(diff_im>threshold)): wt = 1/math.sqrt(m)*sum(sum(diff_im[max(i-m/2,0):min(i+m/2,length), max(j-m/2,0):min(j+m/2,width)])) alpha=alpha+wt St.append((i,j,wt)) n=n+1 #if wt>0: #particle_filter[i,j]=255 else: print 'The size of St_prev is ', len(St_prev) St_prev_sorted = sorted(St_prev, key=operator.itemgetter(2)) while n<10000: i,j = (St_prev_sorted[n][0], St_prev_sorted[n][1]) wt = 1/math.sqrt(m)*sum(sum(diff_im[max(i-m/2,0):min(i+m/2,length), max(j-m/2,0):min(j+m/2,width)])) alpha = alpha+wt St.append((i,j,wt)) n=n+1 for idx in range(n): St[idx] = (St[idx][0],St[idx][1],St[idx][2]/alpha) if St[idx][2]>0: #print St[idx][2] particle_filter[St[idx][0],St[idx][1]]=255 print '(alpha, n) = ', alpha, n cv2.namedWindow('Particle_Filter',0) cv2.imshow('Particle_Filter', particle_filter) cv2.namedWindow('diff_gray_compensate',0) cv2.imshow('diff_gray_compensate', diff_im) cv2.waitKey(2000) return St ''' St_prev = [] St = [] for i in range(4): diff_im = cv2.imread('diff%d.jpg' % i) if len(diff_im.shape)==3: diff_im = cv2.cvtColor(diff_im, cv2.COLOR_BGR2GRAY) St = ParticleFilter(St_prev, diff_im) St_prev = St St = [] '''
mit
Python
ba8eb16640a40f9c2f361251adecb8c91d1c9a07
create stream.py
PhloxAR/phloxar,PhloxAR/phloxar
PhloxAR/stream.py
PhloxAR/stream.py
# -*- coding: utf-8 -*- from __future__ import division, print_function from __future__ import absolute_import, unicode_literals # TODO: more detailed from PhloxAR.base import *
apache-2.0
Python
d8dc3b1696ce9e8b64bb2eea55e718553789cfc1
Add Time.Timeout.TimeoutAbsMono class, which is like Timeout.TimeoutAbs but is taking MonoTime instead of realtime as an argument.
synety-jdebp/rtpproxy,sippy/rtpproxy,sippy/rtpproxy,dsanders11/rtpproxy,dsanders11/rtpproxy,jevonearth/rtpproxy,synety-jdebp/rtpproxy,dsanders11/rtpproxy,jevonearth/rtpproxy,sippy/rtp_cluster,jevonearth/rtpproxy,synety-jdebp/rtpproxy,jevonearth/rtpproxy,sippy/rtpproxy,sippy/rtp_cluster,synety-jdebp/rtpproxy
Time/Timeout.py
Time/Timeout.py
# Copyright (c) 2003-2005 Maxim Sobolev. All rights reserved. # Copyright (c) 2006-2014 Sippy Software, Inc. All rights reserved. # # All rights reserved. # # Redistribution and use in source and binary forms, with or without modification, # are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, this # list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation and/or # other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR # ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES # (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON # ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. from datetime import datetime from twisted.internet import task, reactor from traceback import print_exc, format_list, extract_stack from sys import stdout class TimeoutAbsMono: _task = None _timeout_callback = None def __init__(self, timeout_callback, etime, *callback_arguments): etime = -etime.offsetFromNow() if etime < 0: etime = 0 self._timeout_callback = timeout_callback self._task = reactor.callLater(etime, self._run_once, *callback_arguments) def _run_once(self, *callback_arguments): try: self._timeout_callback(*callback_arguments) except: print datetime.now(), 'TimeoutAbsMono: unhandled exception in timeout callback' print '-' * 70 print_exc(file = stdout) print '-' * 70 stdout.flush() self._task = None self._timeout_callback = None def cancel(self): self._task.cancel() self._task = None self._timeout_callback = None if __name__ == '__main__': from twisted.internet import reactor from sippy.Time.MonoTime import MonoTime def test1(arguments, testnum, mtm): arguments['delay'] = mtm.offsetFromNow() print testnum, arguments['delay'] arguments['test'] = True reactor.crash() def test2(arguments, testnum, mtm): arguments['delay'] = mtm.offsetFromNow() print testnum, arguments['delay'] arguments['test'] = 'bar' reactor.crash() mt = MonoTime() arguments = {'test':False, 'delay':None} timeout_1 = TimeoutAbsMono(test1, mt, arguments, 'test1', mt) reactor.run() assert(arguments['test']) assert(arguments['delay'] < 0.1) mt1 = mt.getOffsetCopy(0.1) mt2 = mt.getOffsetCopy(0.2) arguments = {'test':False, 'delay':None} timeout_1 = TimeoutAbsMono(test1, mt1, arguments, 'test2', mt1) timeout_2 = TimeoutAbsMono(test2, mt2, arguments, 'test3', mt2) timeout_1.cancel() reactor.run() assert(arguments['test'] == 'bar') assert(arguments['delay'] < 0.1)
bsd-2-clause
Python
e4b108fa5c0221eb2b585550b04be14ff56d26e5
Add Toy playlist creation
SLongofono/448_Project4,SLongofono/448_Project4
Toy_Playlist.py
Toy_Playlist.py
''' Written by Paul Lamere 06/05/2015 Accessed 10/23/2016 https://github.com/plamere/spotipy/blob/master/examples/create_playlist.py Modified by Stephen Longofono 10/23/2016 ''' import sys import os import subprocess import spotipy import spotipy.util as util if len(sys.argv) > 2: username = sys.argv[1] playlist_name = sys.argv[2] else: print("Usage: %s username playlist-name" % (sys.argv[0],)) sys.exit() token = util.prompt_for_user_token(username) if token: sp = spotipy.Spotify(auth=token) sp.trace = False playlists = sp.user_playlist_create(username, playlist_name) # Get new songs to add from file try: songIDs = [] songList = open('recommended.txt', 'r') for song in songlist: songIDs.append(song) songList.close() except: print "Error processing recommendations..." sys.exit() # Add songs try: for song in songIDs: sp.user_playlist_add_tracks(username, playlist_id, track_ids) except: print "Error adding songs to playlist..." sys.exit() # Add to list of already suggested songs x = open('oldsongs', 'a+') for song in songIDs: x.write(str(song)) x.write('\n') x.close() # Remove recommended songs else: print("Can't get token for", username)
mit
Python
32d46fe3e080b13ab9ae9dc3d868e9a724cccda9
Add unit test for IosBrowserFinder.
krieger-od/nwjs_chromium.src,Fireblend/chromium-crosswalk,ltilve/chromium,axinging/chromium-crosswalk,jaruba/chromium.src,ltilve/chromium,Fireblend/chromium-crosswalk,dednal/chromium.src,mohamed--abdel-maksoud/chromium.src,Jonekee/chromium.src,Just-D/chromium-1,Pluto-tv/chromium-crosswalk,M4sse/chromium.src,M4sse/chromium.src,dednal/chromium.src,hgl888/chromium-crosswalk-efl,axinging/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,Pluto-tv/chromium-crosswalk,Chilledheart/chromium,M4sse/chromium.src,hgl888/chromium-crosswalk-efl,M4sse/chromium.src,mohamed--abdel-maksoud/chromium.src,littlstar/chromium.src,dushu1203/chromium.src,dushu1203/chromium.src,PeterWangIntel/chromium-crosswalk,markYoungH/chromium.src,jaruba/chromium.src,M4sse/chromium.src,Just-D/chromium-1,littlstar/chromium.src,hgl888/chromium-crosswalk-efl,chuan9/chromium-crosswalk,fujunwei/chromium-crosswalk,Just-D/chromium-1,PeterWangIntel/chromium-crosswalk,dednal/chromium.src,M4sse/chromium.src,dushu1203/chromium.src,Pluto-tv/chromium-crosswalk,bright-sparks/chromium-spacewalk,mohamed--abdel-maksoud/chromium.src,fujunwei/chromium-crosswalk,Just-D/chromium-1,fujunwei/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,mohamed--abdel-maksoud/chromium.src,mohamed--abdel-maksoud/chromium.src,Fireblend/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,dushu1203/chromium.src,chuan9/chromium-crosswalk,axinging/chromium-crosswalk,ltilve/chromium,Jonekee/chromium.src,bright-sparks/chromium-spacewalk,crosswalk-project/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,axinging/chromium-crosswalk,M4sse/chromium.src,Jonekee/chromium.src,fujunwei/chromium-crosswalk,bright-sparks/chromium-spacewalk,M4sse/chromium.src,Fireblend/chromium-crosswalk,bright-sparks/chromium-spacewalk,axinging/chromium-crosswalk,axinging/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,krieger-od/nwjs_chromium.src,axinging/chromium-crosswalk,markYoungH/chromium.src,Chilledheart/chromium,ondra-novak/chromium.src,chuan9/chromium-crosswalk,fujunwei/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,dednal/chromium.src,littlstar/chromium.src,jaruba/chromium.src,jaruba/chromium.src,PeterWangIntel/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,markYoungH/chromium.src,hgl888/chromium-crosswalk-efl,ondra-novak/chromium.src,Just-D/chromium-1,Fireblend/chromium-crosswalk,Jonekee/chromium.src,dednal/chromium.src,littlstar/chromium.src,mohamed--abdel-maksoud/chromium.src,crosswalk-project/chromium-crosswalk-efl,markYoungH/chromium.src,ondra-novak/chromium.src,markYoungH/chromium.src,littlstar/chromium.src,mohamed--abdel-maksoud/chromium.src,ondra-novak/chromium.src,Just-D/chromium-1,Chilledheart/chromium,M4sse/chromium.src,Fireblend/chromium-crosswalk,krieger-od/nwjs_chromium.src,Chilledheart/chromium,hgl888/chromium-crosswalk,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk,Chilledheart/chromium,Jonekee/chromium.src,chuan9/chromium-crosswalk,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk-efl,Just-D/chromium-1,dednal/chromium.src,crosswalk-project/chromium-crosswalk-efl,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk,krieger-od/nwjs_chromium.src,crosswalk-project/chromium-crosswalk-efl,mohamed--abdel-maksoud/chromium.src,jaruba/chromium.src,markYoungH/chromium.src,Pluto-tv/chromium-crosswalk,Chilledheart/chromium,PeterWangIntel/chromium-crosswalk,M4sse/chromium.src,dednal/chromium.src,bright-sparks/chromium-spacewalk,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk,Jonekee/chromium.src,Jonekee/chromium.src,ltilve/chromium,jaruba/chromium.src,hgl888/chromium-crosswalk,bright-sparks/chromium-spacewalk,Jonekee/chromium.src,hgl888/chromium-crosswalk,chuan9/chromium-crosswalk,fujunwei/chromium-crosswalk,ltilve/chromium,dushu1203/chromium.src,dushu1203/chromium.src,fujunwei/chromium-crosswalk,Just-D/chromium-1,Chilledheart/chromium,krieger-od/nwjs_chromium.src,jaruba/chromium.src,krieger-od/nwjs_chromium.src,ltilve/chromium,Jonekee/chromium.src,jaruba/chromium.src,bright-sparks/chromium-spacewalk,ondra-novak/chromium.src,dushu1203/chromium.src,markYoungH/chromium.src,Jonekee/chromium.src,mohamed--abdel-maksoud/chromium.src,markYoungH/chromium.src,fujunwei/chromium-crosswalk,markYoungH/chromium.src,TheTypoMaster/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,littlstar/chromium.src,axinging/chromium-crosswalk,hgl888/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,axinging/chromium-crosswalk,dushu1203/chromium.src,axinging/chromium-crosswalk,dednal/chromium.src,PeterWangIntel/chromium-crosswalk,Chilledheart/chromium,TheTypoMaster/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,axinging/chromium-crosswalk,Fireblend/chromium-crosswalk,dushu1203/chromium.src,crosswalk-project/chromium-crosswalk-efl,dednal/chromium.src,Fireblend/chromium-crosswalk,Pluto-tv/chromium-crosswalk,littlstar/chromium.src,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk,ondra-novak/chromium.src,TheTypoMaster/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,bright-sparks/chromium-spacewalk,ltilve/chromium,ondra-novak/chromium.src,bright-sparks/chromium-spacewalk,ondra-novak/chromium.src,hgl888/chromium-crosswalk-efl,markYoungH/chromium.src,krieger-od/nwjs_chromium.src,krieger-od/nwjs_chromium.src,markYoungH/chromium.src,TheTypoMaster/chromium-crosswalk,chuan9/chromium-crosswalk,Pluto-tv/chromium-crosswalk,jaruba/chromium.src,hgl888/chromium-crosswalk-efl,Jonekee/chromium.src,Chilledheart/chromium,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk-efl,fujunwei/chromium-crosswalk,M4sse/chromium.src,jaruba/chromium.src,littlstar/chromium.src,dednal/chromium.src,dednal/chromium.src,hgl888/chromium-crosswalk,ondra-novak/chromium.src,Pluto-tv/chromium-crosswalk,Just-D/chromium-1,hgl888/chromium-crosswalk-efl,ltilve/chromium,dushu1203/chromium.src,ltilve/chromium,TheTypoMaster/chromium-crosswalk,jaruba/chromium.src,Fireblend/chromium-crosswalk,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk-efl,TheTypoMaster/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,dushu1203/chromium.src
tools/telemetry/telemetry/core/backends/chrome/ios_browser_finder_unittest.py
tools/telemetry/telemetry/core/backends/chrome/ios_browser_finder_unittest.py
# Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import unittest from telemetry.core import browser_options from telemetry.core.backends.chrome import ios_browser_finder from telemetry.unittest import test class IosBrowserFinderUnitTest(unittest.TestCase): # TODO(baxley): Currently the tests require a device with Chrome running. # This should be stubbed out so it runs on any system, with no device # dependencies. @test.Enabled('ios') def testFindIosChrome(self): finder_options = browser_options.BrowserFinderOptions() browsers = ios_browser_finder.FindAllAvailableBrowsers(finder_options) self.assertTrue(browsers) for browser in browsers: self.assertEqual('ios-chrome', browser.browser_type) if __name__ == '__main__': unittest.main()
bsd-3-clause
Python
f753711c502b54ad8bf2c992336a5ad002e069bb
Create bearing.py
shyampurk/m2m-traffic-corridor,shyampurk/m2m-traffic-corridor,shyampurk/m2m-traffic-corridor
server/traffic_calc/bearing.py
server/traffic_calc/bearing.py
#!/usr/bin/python ''' /*************************************************************************************** Name : bearng Description : calculates the bearing(angle) between given two lattitude and longitude points Parameters : l_lat1 and l_lng1 are point one lattitude and longitude respectively l_lat2 and l_lng2 are point two lattitude and longitude respectively Return : This function will return the bearing(angle) between given two lattitude and longitude points ****************************************************************************************/ ''' import math def bearng(l_lat1,l_lng1,l_lat2,l_lng2): l_lat1 = float(l_lat1) l_lng1 = float(l_lng1) l_lat2 = float(l_lat2) l_lng2= float(l_lng2) lndif = (l_lng2 - l_lng1) y = math.sin(lndif) * math.cos(l_lat1) x = math.cos(l_lat2) * math.sin(l_lat1) - math.sin(l_lat2) * math.cos(l_lat1)*math.cos(lndif) l_brng = math.atan2(y,x) l_brng = math.degrees(l_brng) l_brng = (l_brng +360)%360 l_brng = (360-l_brng) return l_brng
mit
Python
99fd5661e976dfc3bf8968f171b41af83ff5f034
add plot for percentage
efficient/HOPE,efficient/HOPE,efficient/HOPE
plot/microbench/percentage/cpr_email.py
plot/microbench/percentage/cpr_email.py
import sys import matplotlib as mpl mpl.use('Agg') import matplotlib.pyplot as plot import matplotlib.ticker as ticker import numpy as np import csv NUM_LINES = 4 LINE_NAMES = ["Single-Char", "Double-Char", "ALM", "3-Grams", "4-Grams", "ALM-Improved"] NUM_3_POINTS = 7 NUM_4_EXTRA_POINTS = 2 #COLORS = ['#fef0d9', '#fdcc8a', '#fc8d59', '#d7301f', '#33b3cc', '#3366CC'] COLORS = ['#fff7ec', '#fee8c8', '#fdd49e', '#fc8d59', '#d7301f', '#7f0000'] SHAPE_BORDER = 0.5 EDGE_COLOR = 'black' Y_LABEL = "Compression Rate" Y_LABEL_FONT_SIZE = 20 X_LABEL = "Data Percentage" X_LABEL_FONT_SIZE = 20 X_TICK_FONT_SIZE = 16 Y_TICK_FONT_SIZE = 16 X_START = 10 X_LIMIT = 100 LEGEND_FONT_SIZE = 14 LEGEND_POS = 'upper left' GRAPH_HEIGHT = 4.5 #inches GRAPH_WIDTH = 8 #inches CSV_FILE_PATH = "results/microbench/percentage/per_cpr_lat.csv" GRAPH_OUTPUT_PATH = "figures/microbench/percentage/cpr_email_dict_size.pdf" data_x = [10, 20, 30, 40, 50, 60, 70, 80, 90, 100] f_in_y = open(CSV_FILE_PATH) reader = csv.reader(f_in_y) csvrows = list(reader) data_y = [] for row in csvrows : for item in row : data_y.append(float(item)) single_x = [] double_x = [] three_grams_x = [] three_grams_y = [] four_grams_x = [] four_grams_y = [] alm_x = [] alm_y = [] alm_improved_x = [] alm_improved_y = [] def get_add(start_idx, end_idx, step, data): part_data = [] for i in range(start_idx, end_idx, step): part_data.append(data[i]) return part_data single_y = get_data(0, 20, 2, data) double_y = get_data(20, 40, 2, data) three_grams_y = get_data(40, 60, 2, data) four_grams_y = get_data(60, 80, 2, data) alm_y = get_data(80, 100, 2, data) alm_improved_y = get_data(100, 120, 2, data) #======================================================================================== mpl.rcParams['ps.useafm'] = True mpl.rcParams['pdf.use14corefonts'] = True mpl.rcParams['text.usetex'] = False mpl.rcParams['text.latex.preamble'] = [ r'\usepackage{siunitx}', # i need upright \micro symbols, but you need... r'\sisetup{detect-all}', # ...this to force siunitx to actually use your fonts r'\usepackage{helvet}', # set the normal font here r'\usepackage{sansmath}', # load up the sansmath so that math -> helvet r'\sansmath' # <- tricky! -- gotta actually tell tex to use! ] #======================================================================================== fig = plot.figure(figsize={GRAPH_HEIGHT, GRAPH_WIDTH}) ax = fig.add_subplot(111) ax.plot(data_x, single_y, 's-', ms=10, mew = SHAPE_BORDER, mec = EDGE_COLOR, lw = 3, color=COLORS[0], label=LINE_NAMES[0]) ax.plot(double_x, double_y, 'o-', ms=10, mew = SHAPE_BORDER, mec = EDGE_COLOR,lw = 3, color=COLORS[1], label=LINE_NAMES[1]) ax.plot(data_x, alm_y, 'd-', ms=10, lw = 3, mew = SHAPE_BORDER, mec = EDGE_COLOR, color=COLORS[2], label=LINE_NAMES[2]) ax.plot(data_x, three_grams_y, 'p-', mew = SHAPE_BORDER, mec = EDGE_COLOR, ms=10, lw = 3, color=COLORS[3], label=LINE_NAMES[3]) ax.plot(data_x, four_grams_y, '^-', mew = SHAPE_BORDER, mec = EDGE_COLOR, ms=10, lw = 3, color=COLORS[4], label=LINE_NAMES[4]) ax.plot(data_x, alm_improved_y, 'v-', mew = SHAPE_BORDER,mec = EDGE_COLOR, ms=10, lw = 3, color=COLORS[5], label=LINE_NAMES[5]) for label in ax.get_xticklabels(): label.set_fontsize(X_TICK_FONT_SIZE) for label in ax.get_yticklabels(): label.set_fontsize(Y_TICK_FONT_SIZE) ax.set_xlabel(X_LABEL, fontsize=X_LABEL_FONT_SIZE) ax.set_xticks(data_x) ax.set_xlim(X_START, X_LIMIT) y_ticks = [0, 1.0, 2.0, 3.0, 4.0] ax.set_yticks(y_ticks) ax.set_ylim(0, 4.0) ax.set_ylabel(Y_LABEL, fontsize=Y_LABEL_FONT_SIZE) ax.grid() ax.set_axisbelow(True) ax.legend(loc=LEGEND_POS, prop={'size':LEGEND_FONT_SIZE}, ncol=2) plot.savefig(GRAPH_OUTPUT_PATH, bbox_inches='tight')
apache-2.0
Python
0f00e710f3a2239024d6a2f0efd539d32b5c8aaf
Add taxonomy loader
designforsf/brigade-matchmaker,designforsf/brigade-matchmaker,designforsf/brigade-matchmaker,designforsf/brigade-matchmaker
components/taxonomy/scripts/load_taxonomy.py
components/taxonomy/scripts/load_taxonomy.py
""" Created on Wed Aug 22 19:55:11 PDT 2018 @author: rickpr Requirements: - toml, pymongo need to be installed - mongodb needs to be running Installation: pip3 install pymomgo pip3 install toml """ import sys from pymongo import MongoClient import toml #!/usr/bin/env python3 # -*- coding: utf-8 -*- class LoadTaxonomy: """ Creates JSON from TOML and loads it into MongoDB """ database_name = 'brigade-matchmaker' client = MongoClient('localhost', 27017) db = client[database_name] def __init__(self, toml_filename='taxonomy.toml'): # load the taxonomy data from the TOML file, and create JSON self.taxonomy_toml = toml.load(toml_filename) def taxonomy_json(self): """ Create the JSON to put into MongoDB """ fixed_dict = [ self.add_parent(key, value) for key, value in self.taxonomy_toml.items() ] return fixed_dict def load_taxonomy(self): """ Load the JSON into the database. Dangerous! """ self.db.projecttaxonomies.drop() for row in self.taxonomy_json(): self.db.projecttaxonomies.insert_one(row) return True def add_parent(self, key, value): """ Add the parent for the Mongo Entry """ split_key = key.split('/') value['name'] = split_key[-1] value['parent'] = split_key[-2] if len(split_key) > 1 else None return value # When calling from command line, you may specify input and output file TOML_FILE = sys.argv[1] if len(sys.argv) >= 2 else 'taxonomy.toml' LOADER = LoadTaxonomy(TOML_FILE) LOADER.load_taxonomy()
mit
Python
9ec883040abbdc91c1eef7884b514d45adbf809a
Add Slave file
rimpybharot/CMPE273
assignment2/slave.py
assignment2/slave.py
''' ################################## server.py ############################# # Lab1 gRPC RocksDB Server ################################## server.py ############################# ''' import time import grpc import replicator_pb2 import replicator_pb2_grpc import uuid import rocksdb import encodings class Slave: def __init__(self): self.slave_db = rocksdb.DB("slave.db", rocksdb.Options(create_if_missing=True)) def put(self, key, value): print("put") self.slave_db.put(key.encode(), value.encode()); def get(self, key): print("get") value = (self.slave_db.get(key.encode())).decode(); return value
mit
Python
4eb8a1e2e3b9618806bf9a1108dbd2043fa88724
add twitter mod
KenN7/LifeTracker,KenN7/LifeTracker,KenN7/LifeTracker,KenN7/LifeTracker
appartbot/twitter.py
appartbot/twitter.py
import twython import logging class twytbot: def __init__(self, key, secret, acctok, sectok): self.KEY = key self.SECRET = secret self.ACCESS_TOKEN = acctok self.SECRET_TOKEN = sectok self.twitter = None def authentificate(self): self.twitter = twython.Twython(self.KEY, self.SECRET, self.ACCESS_TOKEN, self.SECRET_TOKEN) try: self.twitter.verify_credentials() except Exception as e: logging.warn("Twitter log failed %s" % e)
apache-2.0
Python
bc35e89d04e541f75fc12788893b21a3b876aaf9
Create test case for tail from file
shuttle1987/tail,shuttle1987/tail
tail/tests/test_tail.py
tail/tests/test_tail.py
""" Tests for the tail implementation """ from tail import FileTail def test_tail_from_file(): """Tests that tail works as advertised from a file""" from unittest.mock import mock_open, patch # The mock_data we are using for our test mock_data = """A B C D E F """ mocked_open = mock_open(read_data=mock_data) # mock_open does not support iteration by lines by default so # we must define the following: mocked_open.return_value.__iter__.return_value = mock_data.splitlines() # We need to patch the open found in the namespace of the module # where the function is defined with patch('mocking_file_opens.open', mocked_open, create=True) as mocked_file_open: res = FileTail('Test_filename.txt').tail(3) mocked_file_open.assert_called_once_with('Test_filename.txt', 'r') assert len(res) == 3 assert res == ["D", "E", "F"]
mit
Python
f956b2ce8e8e2ef87be0dc11aac48dce54e57088
Test Logger
theon/pelicangit
pelicangit/log.py
pelicangit/log.py
import logging def setup_logging(): home_dir = os.path.expanduser("~") log_file = os.path.join(home_dir, 'pelicangit.log') logger = logging.getLogger('pelicangit') logger.setLevel(logging.DEBUG) formatter = logging.Formatter('%(levelname)s %(asctime)s :: %(message)s') logger.setFormatter(formatter) file_handler = logging.FileHandler(filename=log_file) logger.addHandler(file_handler)
agpl-3.0
Python
d08c619b8ea6063f8a414c69c8d38226719e292b
Correct super call in DatabaseIntrospection subclass
mozilla/addons-server,kumar303/olympia,psiinon/addons-server,psiinon/addons-server,wagnerand/olympia,diox/olympia,kumar303/addons-server,bqbn/addons-server,eviljeff/olympia,aviarypl/mozilla-l10n-addons-server,eviljeff/olympia,diox/olympia,kumar303/olympia,wagnerand/olympia,bqbn/addons-server,psiinon/addons-server,kumar303/addons-server,aviarypl/mozilla-l10n-addons-server,mozilla/olympia,bqbn/addons-server,bqbn/addons-server,wagnerand/addons-server,psiinon/addons-server,eviljeff/olympia,mozilla/addons-server,diox/olympia,mozilla/olympia,diox/olympia,wagnerand/olympia,wagnerand/addons-server,wagnerand/addons-server,mozilla/olympia,wagnerand/addons-server,mozilla/addons-server,eviljeff/olympia,mozilla/addons-server,wagnerand/olympia,kumar303/olympia,mozilla/olympia,kumar303/addons-server,aviarypl/mozilla-l10n-addons-server,aviarypl/mozilla-l10n-addons-server,kumar303/addons-server,kumar303/olympia
src/olympia/core/db/mysql/base.py
src/olympia/core/db/mysql/base.py
from django.db.backends.mysql.base import ( DatabaseWrapper as MySQLDBWrapper, DatabaseIntrospection as MySQLDBIntrospection, DatabaseSchemaEditor as MySQLDBSchemeEditor) class DatabaseIntrospection(MySQLDBIntrospection): def get_field_type(self, data_type, description): field_type = super(DatabaseIntrospection, self).get_field_type( data_type, description) if 'auto_increment' in description.extra: if field_type == 'IntegerField': if description.is_unsigned: return 'PositiveAutoField' return field_type class DatabaseSchemaEditor(MySQLDBSchemeEditor): def create_model(self, model): for field in model._meta.local_fields: # Autoincrement SQL for backends with post table definition variant if field.get_internal_type() == "PositiveAutoField": autoinc_sql = self.connection.ops.autoinc_sql( model._meta.db_table, field.column) if autoinc_sql: self.deferred_sql.extend(autoinc_sql) super(DatabaseSchemaEditor, self).create_model(model) class DatabaseWrapper(MySQLDBWrapper): introspection_class = DatabaseIntrospection SchemaEditorClass = DatabaseSchemaEditor _data_types = dict( MySQLDBWrapper._data_types, PositiveAutoField='integer UNSIGNED AUTO_INCREMENT')
from django.db.backends.mysql.base import ( DatabaseWrapper as MySQLDBWrapper, DatabaseIntrospection as MySQLDBIntrospection, DatabaseSchemaEditor as MySQLDBSchemeEditor) class DatabaseIntrospection(MySQLDBIntrospection): def get_field_type(self, data_type, description): field_type = super().get_field_type(data_type, description) if 'auto_increment' in description.extra: if field_type == 'IntegerField': if description.is_unsigned: return 'PositiveAutoField' return field_type class DatabaseSchemaEditor(MySQLDBSchemeEditor): def create_model(self, model): for field in model._meta.local_fields: # Autoincrement SQL for backends with post table definition variant if field.get_internal_type() == "PositiveAutoField": autoinc_sql = self.connection.ops.autoinc_sql( model._meta.db_table, field.column) if autoinc_sql: self.deferred_sql.extend(autoinc_sql) super(DatabaseSchemaEditor, self).create_model(model) class DatabaseWrapper(MySQLDBWrapper): introspection_class = DatabaseIntrospection SchemaEditorClass = DatabaseSchemaEditor _data_types = dict( MySQLDBWrapper._data_types, PositiveAutoField='integer UNSIGNED AUTO_INCREMENT')
bsd-3-clause
Python
87a79b2c3e43a5408aa89880f5b0f65dcfb810d9
solve 11909
arash16/prays,arash16/prays,arash16/prays,arash16/prays,arash16/prays,arash16/prays
UVA/vol-119/11909.py
UVA/vol-119/11909.py
from sys import stdin, stdout from itertools import zip_longest import math for l,w,h,t in zip_longest(*[iter(map(int, stdin.read().split()))]*4): r = math.pi * t / 180 o = l * math.tan(r) if o <= h: s = l*h - l*o/2 else: r = math.pi/2 - r o = h * math.tan(r) s = h * o / 2 stdout.write('{:.3f} mL\n'.format(w * s))
mit
Python
e6898282c82dfe890c02f702da6dd46c00adc0f3
Add tests for multishuffle
moble/scri
tests/test_utilities.py
tests/test_utilities.py
import math import tempfile import pathlib import numpy as np import h5py import scri import pytest def generate_bit_widths(bit_width): possible_widths = 2 ** np.arange(0, int(np.log2(bit_width))) bit_widths = [] while np.sum(bit_widths) < bit_width: next_width = np.random.choice(possible_widths) if np.sum(bit_widths) + next_width <= bit_width: bit_widths.append(next_width) return bit_widths @pytest.mark.parametrize("bit_width", [8, 16, 32, 64]) def test_multishuffle_reversibility(bit_width): dt = np.dtype(f'u{bit_width//8}') np.random.seed(123) data = np.random.randint(0, high=2**bit_width, size=5_000, dtype=dt) for bit_widths in [[1]*bit_width, [8]*(bit_width//8)] + [generate_bit_widths(bit_width) for _ in range(10)]: shuffle = scri.utilities.multishuffle(bit_widths) unshuffle = scri.utilities.multishuffle(bit_widths, forward=False) assert np.array_equal(data, unshuffle(shuffle(data))), bit_widths @pytest.mark.parametrize("bit_width", [8, 16, 32, 64]) def test_multishuffle_like_hdf5(bit_width): dt = np.dtype(f'u{bit_width//8}') np.random.seed(1234) data = np.random.randint(0, high=2**bit_width, size=5_000, dtype=dt) # Save the data to file via h5py, then extract the raw data to see what # HDF5's shuffle looks like with tempfile.TemporaryDirectory() as temp_dir: file_name = pathlib.Path(temp_dir) / 'test.h5' with h5py.File(file_name, 'w') as f: f.create_dataset('data', data=data, shuffle=True, chunks=(data.size,)) with h5py.File(file_name, 'r') as f: ds = f['data'] filter_mask, raw_data_bytes = ds.id.read_direct_chunk((0,)) hdf5_raw_data = np.frombuffer(raw_data_bytes, dtype=dt) # Shuffle with our function shuffle = scri.utilities.multishuffle([8]*(bit_width//8)) scri_shuffle_data = shuffle(data) # Check that they are equal assert np.array_equal(scri_shuffle_data, hdf5_raw_data)
mit
Python
9f031861b75d7b99b0ab94d5272d378a8c3fba2e
Convert stickBreakingDemo.m to python (#613)
probml/pyprobml,probml/pyprobml,probml/pyprobml,probml/pyprobml
scripts/stick_breaking_demo.py
scripts/stick_breaking_demo.py
# Generates from stick-breaking construction import pyprobml_utils as pml import numpy as np import matplotlib.pyplot as plt alphas = [2, 5] nn = 20 # From MATLAB's random generator. match_matlab = True # Set True to match MATLAB's figure. beta1 = [0.4428, 0.0078, 0.1398, 0.5018, 0.0320, 0.3614, 0.8655, 0.6066, 0.2783, 0.4055, 0.1617, 0.3294, 0.0956, 0.1245, 0.2214, 0.3461, 0.5673, 0.2649, 0.1153, 0.7366] beta2 = [0.2037, 0.3486, 0.5342, 0.0609, 0.2997, 0.2542, 0.0860, 0.1865, 0.0510, 0.4900, 0.4891, 0.7105, 0.7633, 0.1619, 0.3604, 0.0604, 0.1312, 0.3338, 0.2036, 0.1306] beta3 = [0.3273, 0.0253, 0.1415, 0.1574, 0.0460, 0.0721, 0.3386, 0.1817, 0.2750, 0.0791, 0.0535, 0.1091, 0.1935, 0.0550, 0.3977, 0.2322, 0.0270, 0.0871, 0.0144, 0.4171] beta4 = [0.0395, 0.1170, 0.0272, 0.0155, 0.2190, 0.1812, 0.0569, 0.2569, 0.1311, 0.0388, 0.3619, 0.1974, 0.3794, 0.1917, 0.0670, 0.0294, 0.0957, 0.1267, 0.0381, 0.2525] beta_all = [np.array(beta1), np.array(beta2), np.array(beta3), np.array(beta4)] np.random.seed(0) fig, axs = plt.subplots(2, 2) fig.tight_layout() for ii, alpha in enumerate(alphas): for trial in range(2): if match_matlab: beta = beta_all[ii*2+trial] else: beta = np.random.beta(1, alpha, [nn]) neg = np.cumprod(1-beta) neg[1:] = neg[:-1] neg[0] = 1 pi = beta*neg axs[ii, trial].bar(np.arange(nn), pi, edgecolor='k') axs[ii, trial].set_title(r'$\alpha = %s$' % alpha) pml.savefig("stickBreakingDemo.pdf") plt.show()
mit
Python
584e9597bf40a3c738071db1f2c7f1671bad1efa
Create 3sum_closet.py
UmassJin/Leetcode
Array/3sum_closet.py
Array/3sum_closet.py
#Given an array S of n integers, find three integers in S such that the sum is closest to a given number, target. #Return the sum of the three integers. You may assume that each input would have exactly one solution. class Solution: # @return an integer def threeSumClosest(self, num, target): num.sort() res = num[0]+num[1]+num[2] if res == target: return res for i in xrange(len(num)): j = i+1 k = len(num)-1 while j < k: tmp = num[i]+num[j]+num[k] if tmp == target: return tmp tmpres = abs(target-tmp) if tmpres < abs(target-res): res = tmp if tmp > target: while j < k: k -= 1 if num[k] != num[k+1]: break if tmp < target: while j < k: j += 1 if num[j] != num[j-1]: break return res
mit
Python
7399645c7fb3d704f3e44b3113cf38efc32c85e8
add archive tool
akamah/cadrail,akamah/cadrail,akamah/cadrail,akamah/cadrail,akamah/cadrail
tools/archive_models.py
tools/archive_models.py
import os import sys import json import glob paths = sys.argv[1:] models = {} for name in paths: with open(name, mode='r') as f: m = json.load(f) key, _ = os.path.splitext(os.path.basename(name)) models[key] = m print(json.dumps(models))
mit
Python
11fe39e743019ef7fdaadc0ae4f8782add0dc918
update aoj
knuu/competitive-programming,knuu/competitive-programming,knuu/competitive-programming,knuu/competitive-programming
aoj/11/aoj1142.py
aoj/11/aoj1142.py
m = int(input()) for i in range(m): d = input() trains = [d] for j in range(1, len(d)): f, b = d[:j], d[j:] rf, rb = f[::-1], b[::-1] trains.extend([rf+b, f+rb, rf+rb, b+f, rb+f, b+rf, rb+rf]) print(len(set(trains)))
mit
Python
2c2694d4c9ef3fdd51039b45951223708cbef3b9
Add nbsp template tag
pkimber/base,pkimber/base,pkimber/base,pkimber/base
base/templatetags/nbsp.py
base/templatetags/nbsp.py
# templatetags/nbsp.py from django import template from django.utils.safestring import mark_safe register = template.Library() @register.filter() def nbsp(value): return mark_safe("&nbsp;".join(str(value).split(' ')))
apache-2.0
Python
5b80553b05b2c9df3818b815a2b156ad2f9f6437
add SQS plugin to match diamond
dbirchak/graph-explorer,vimeo/graph-explorer,vimeo/graph-explorer,vimeo/graph-explorer,dbirchak/graph-explorer,dbirchak/graph-explorer,vimeo/graph-explorer,dbirchak/graph-explorer
structured_metrics/plugins/sqs.py
structured_metrics/plugins/sqs.py
from . import Plugin class SqsPlugin(Plugin): targets = [ { 'match': '^servers\.(?P<server>[^\.]+)\.sqs\.(?P<region>[^\.]+)\.(?P<queue>[^\.]+)\.(?P<type>ApproximateNumberOfMessages.*)$', 'target_type': 'gauge', 'configure': [ lambda self, target: self.add_tag(target, 'unit', 'Msg'), ] } ] # vim: ts=4 et sw=4:
apache-2.0
Python
6619bbff82f9a74a1de6c8cb569ea5cc639557d0
Refresh access token after user signs in #44
jdanbrown/pydatalab,craigcitro/pydatalab,jdanbrown/pydatalab,yebrahim/pydatalab,yebrahim/pydatalab,parthea/pydatalab,yebrahim/pydatalab,parthea/pydatalab,supriyagarg/pydatalab,craigcitro/pydatalab,jdanbrown/pydatalab,craigcitro/pydatalab,supriyagarg/pydatalab,supriyagarg/pydatalab,googledatalab/pydatalab,parthea/pydatalab,googledatalab/pydatalab,googledatalab/pydatalab
datalab/context/_context.py
datalab/context/_context.py
# Copyright 2014 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Implements Context functionality.""" from __future__ import absolute_import from __future__ import unicode_literals from builtins import object from . import _project from . import _utils class Context(object): """Maintains contextual state for connecting to Cloud APIs. """ _global_context = None def __init__(self, project_id, credentials): """Initializes an instance of a Context object. Args: project_id: the current cloud project. credentials: the credentials to use to authorize requests. """ self._project_id = project_id self._credentials = credentials @property def credentials(self): """Retrieves the value of the credentials property. Returns: The current credentials used in authorizing API requests. """ return self._credentials def set_credentials(self, credentials): """ Set the credentials for the context. """ self._credentials = credentials @property def project_id(self): """Retrieves the value of the project_id property. Returns: The current project id to associate with API requests. """ if not self._project_id: raise Exception('No project ID found. Perhaps you should set one with the "%projects set ..." magic.') return self._project_id def set_project_id(self, project_id): """ Set the project_id for the context. """ self._project_id = project_id @staticmethod def is_signed_in(): """ If the user has signed in or it is on GCE VM with default credential.""" try: _utils.get_credentials() return True except Exception: return False @staticmethod def default(): """Retrieves a default Context object, creating it if necessary. The default Context is a global shared instance used every time the default context is retrieved. Attempting to use a Context with no project_id will raise an exception, so on first use set_project_id must be called. Returns: An initialized and shared instance of a Context object. """ credentials = _utils.get_credentials() if Context._global_context is None: project = _project.Projects.get_default_id(credentials) Context._global_context = Context(project, credentials) else: # Always update the credentials in case the access token is revoked or expired Context._global_context.set_credentials(credentials) return Context._global_context
# Copyright 2014 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Implements Context functionality.""" from __future__ import absolute_import from __future__ import unicode_literals from builtins import object from . import _project from . import _utils class Context(object): """Maintains contextual state for connecting to Cloud APIs. """ _global_context = None def __init__(self, project_id, credentials): """Initializes an instance of a Context object. Args: project_id: the current cloud project. credentials: the credentials to use to authorize requests. """ self._project_id = project_id self._credentials = credentials @property def credentials(self): """Retrieves the value of the credentials property. Returns: The current credentials used in authorizing API requests. """ return self._credentials def set_credentials(self, credentials): """ Set the credentials for the context. """ self._credentials = credentials @property def project_id(self): """Retrieves the value of the project_id property. Returns: The current project id to associate with API requests. """ if not self._project_id: raise Exception('No project ID found. Perhaps you should set one with the "%projects set ..." magic.') return self._project_id def set_project_id(self, project_id): """ Set the project_id for the context. """ self._project_id = project_id @staticmethod def is_signed_in(): """ If the user has signed in or it is on GCE VM with default credential.""" try: _utils.get_credentials() return True except Exception: return False @staticmethod def default(): """Retrieves a default Context object, creating it if necessary. The default Context is a global shared instance used every time the default context is retrieved. Attempting to use a Context with no project_id will raise an exception, so on first use set_project_id must be called. Returns: An initialized and shared instance of a Context object. """ if Context._global_context is None: credentials = _utils.get_credentials() project = _project.Projects.get_default_id(credentials) Context._global_context = Context(project, credentials) return Context._global_context
apache-2.0
Python
0f80b1d304eb0d4443498c94557b0ef96d098c15
Add version
willkg/ernest,willkg/ernest,willkg/ernest
ernest/version.py
ernest/version.py
import os VERSION = '0.1a1' VERSION_RAW = os.environ.get('ERNEST_VERSION', VERSION)
mpl-2.0
Python
32cf7ab02ecb8f1dbd02b8a78001f8c15a97f794
Create population.py
coursolve-northamptonshire/coursolve_need203,coursolve-northamptonshire/coursolve_need203,coursolve-northamptonshire/coursolve_need203
analysis/population.py
analysis/population.py
import pandas as pd import matplotlib.pyplot as plt import urllib2 """ This program reads in a csv file containing census data on Northamptonshire county in the UK. It then plots the population data according to gender, gender by age range, and total population by age range. Two figure files will be output to the directory specified in the variable plot_path. """ # the original populaton data file can be found at http://www.northamptonshireanalysis.co.uk/dataviews/view?viewId=151 # at the above URL under 'Geo-Types' choose the csv for 'County' # where to save the figure files plot_path = "C:\\Users\mcassar\Desktop\Coursolve project\\" url = 'http://www.northamptonshireanalysis.co.uk/data/csv?viewId=151&geoId=28&subsetId=&viewer=CSV' response = urllib2.urlopen(url) #pop_file = "C:\\Users\\mcassar\\Desktop\\coursolve project\\council_pop.csv" #df_pop = pd.read_csv(pop_file) df_pop = pd.read_csv(response) # remove rows not corresponding to Northamptonshire population df_pop = df_pop[0:1] # these ranges match what is given in the data file age_ranges = ['0-4', '5-9', '10-14', '15-19', '20-24', '25-29', '30-34', '35-39', '40-44', '45-49', '50-54', '55-59', '60-64', '65-69', '70-74', '75-79', '80-84', '85-89', '90'] # should be 90+ but that kept giving an error # create total population by gender dataframe df_pop_male_female = df_pop[['Total Resident Population Persons all ages Male(2011)', 'Total Resident Population Persons All Ages Female(2011)']] df_pop_male_female.columns = ['Male', 'Female'] df_pop_male_female_percent = df_pop_male_female / df_pop.values[0,2] #dividing by total population # generate bar plots for total population by gender fig, axes = plt.subplots(1,2) df_pop_male_female.transpose().plot(kind='bar', ax=axes[0], color=['b', 'y'], legend=False, title='Actual Value') # need the transpose because the bins are the age ranges and these need to be the rows not columns df_pop_male_female_percent.transpose().plot(kind='bar', ax=axes[1], color=['b', 'y'], legend=False, title='Percent of Total') plt.suptitle("Northamptonshire Population by Gender (2011)", size=16) plt.subplots_adjust(top=0.85, bottom=0.15) # adjust spacing so subplot titles are farther from main title #plt.savefig(plot_path + 'County_population_gender.png') # segment the data by total population by age range (need to use 'Total(2011)' to do this as every column uses just 'Total' total_cols = [col for col in df_pop.columns if 'Total(2011)' in col] df_pop_total = df_pop[total_cols] # select only the column names with 'Male' in the title total_pop = df_pop_total.values[0,0] df_pop_total_by_age = df_pop_total.drop(df_pop_total.columns[[0]], axis=1) df_pop_total_by_age.columns = age_ranges df_pop_total_by_age_percent = df_pop_total_by_age / total_pop # get % pop by age by dividing by total population # segment the data by male and age range male_cols = [col for col in df_pop.columns if 'Male' in col] df_pop_male = df_pop[male_cols] # select only the column names with 'Male' in the title total_male_pop = df_pop_male.values[0,0] df_pop_male_by_age = df_pop_male.drop(df_pop_male.columns[[0]], axis=1) # get rid of total male population column df_pop_male_by_age.columns = age_ranges df_pop_male_by_age_percent = df_pop_male_by_age / total_male_pop # get % pop by age by dividing by total male pop # segment the data by female and age range female_cols = [col for col in df_pop.columns if 'Female' in col] df_pop_female = df_pop[female_cols] # select only the column names with 'Female' in the title total_female_pop = df_pop_female.values[0,0] df_pop_female_by_age = df_pop_female.drop(df_pop_female.columns[[0]], axis=1) # get rid of total female population column df_pop_female_by_age.columns = age_ranges df_pop_female_by_age_percent = df_pop_female_by_age / total_female_pop # get % pop by age by dividing by total female pop # generate bar plots for total population, total male population, and female population by age range fig, axes = plt.subplots(3,2) ax = df_pop_total_by_age.transpose().plot(kind='bar', ax = axes[0,0], legend=False, title='Count') df_pop_total_by_age_percent.transpose().plot(kind='bar', ax=axes[0,1], legend=False, title='Percent') ax1 = df_pop_male_by_age.transpose().plot(kind='bar', ax = axes[1,0], legend=False) df_pop_male_by_age_percent.transpose().plot(kind='bar', ax=axes[1,1], legend=False) ax2 = df_pop_female_by_age.transpose().plot(kind='bar', ax = axes[2,0], legend=False) df_pop_female_by_age_percent.transpose().plot(kind='bar', ax=axes[2,1], legend=False) plt.suptitle("Northamptonshire Population by Age Range (2011)", size=16) plt.subplots_adjust(top=0.87, left=0.15, hspace=0.4) # adjust spacing b/w subplots and so so subplot titles are farther from main title ax.set_ylabel('Total') ax1.set_ylabel('Male') ax2.set_ylabel('Female') #plt.savefig(plot_path + 'County_population_gender_age.png') plt.show()
mit
Python
139c7e2ac5b5c702cd32f4e014d8f3f654855c32
Add ping pong python script
mrquincle/nRF51-ble-bcast-mesh,mrquincle/nRF51-ble-bcast-mesh,mrquincle/nRF51-ble-bcast-mesh,mrquincle/nRF51-ble-bcast-mesh
nRF51/examples/ping_pong/scripts/ping_pong.py
nRF51/examples/ping_pong/scripts/ping_pong.py
from threading import Thread import subprocess import sys import os import time import datetime import serial import serial.tools.list_ports SEGGER_VID = 1366 BAUDRATE = 460800; verbose = False flow_control = True startTime = datetime.datetime.now() central = None snr_max = 0 def printUsage(): print "Usage: ping_pong.py [<baudrate>] [-f] [-v] [-c <port>]" print "\t<baudrate>\tThe desired baudrate. If no argument is given, the baudrate defaults to " + str(BAUDRATE) + "." print "\t-f\t\tEnable RTS/CTS flow control" print "\t-v\t\tEnable all event logging from central device" print "\t-c <port>\t\tForce the designated com port to act as central" if "-h" in sys.argv or "--help" in sys.argv: printUsage() exit(0) if "-f" in sys.argv: flow_control = True if "-v" in sys.argv: verbose = True if "-c" in sys.argv: central_index = sys.argv.index("-c") + 1 if len(sys.argv) <= central_index: printUsage() exit(160) #bad arguments central = sys.argv[central_index] for arg in sys.argv: try: BAUDRATE = int(sys.argv[1]) if BAUDRATE is None: printUsage() exit(160) except: pass def getPorts(): if sys.platform is "Windows": ports = [(name, hwid[12:16]) for (name, desc, hwid) in serial.tools.list_ports.comports()] portnames = [name for (name, vid) in ports if vid == str(SEGGER_VID)] else: portnames = [port[0] for port in serial.tools.list_ports.comports()] return portnames def portThread(port, snr): global startTime global snr_max s = None try: s = serial.Serial(port, BAUDRATE, rtscts = flow_control) except: if not s is None: s.close() sys.stdout.write("Failed to establish connection to " + port + " (handle " + str(snr) + ")\n") return msgnum = 1 try: s.write(str(snr) + "\r\n") sys.stdout.write("Assigned handle " + str(snr) + " to " + port + "\r\n") prevtime = datetime.datetime.now() while True: data = s.read() if snr is 0: sys.stdout.write(data) except Exception, e: print e sys.stdout.write("Lost " + port + "\n") return threads = [] def monitorThread(): global central global snr_max ports = [] snr = 1 while True: current_ports = getPorts() for port in current_ports: if not port in ports: this_snr = snr snr_max = int(snr) if central == None or central == port: #force central this_snr = 0 central = port else: snr += 1 thread = Thread(target = portThread, args = (port, this_snr), name = port) thread.daemon = True thread.start() threads.append(thread) ports = current_ports time.sleep(1) thread = Thread(target = monitorThread, name = "monitor") thread.daemon = True thread.start() threads.append(thread) try: while True: time.sleep(100) except (KeyboardInterrupt, SystemExit): sys.exit(0)
bsd-3-clause
Python
97b9e370d31e2e7abb3d9d56c046f61e2723dc90
Create 1-helloworld.py
CamJam-EduKit/EduKit3
Code/1-helloworld.py
Code/1-helloworld.py
#Print Hello World! print "Hello World!"
mit
Python
50f6792de9b8dce54492b897fcffae33d1cb75ba
create test url as an optional setting
davejmac/authorizesauce-wsdl
authorize/conf.py
authorize/conf.py
from django.conf import settings from appconf import AppConf class authorizeConf(AppConf): TEST_URL = False class Meta: prefix = 'authorize'
mit
Python
6edd782d39fd64fceca86c8edb224ae3f2378083
Create new package (#6477)
tmerrick1/spack,krafczyk/spack,krafczyk/spack,iulian787/spack,LLNL/spack,mfherbst/spack,tmerrick1/spack,LLNL/spack,matthiasdiener/spack,matthiasdiener/spack,EmreAtes/spack,mfherbst/spack,LLNL/spack,matthiasdiener/spack,matthiasdiener/spack,LLNL/spack,iulian787/spack,krafczyk/spack,krafczyk/spack,matthiasdiener/spack,EmreAtes/spack,iulian787/spack,mfherbst/spack,EmreAtes/spack,tmerrick1/spack,iulian787/spack,tmerrick1/spack,iulian787/spack,mfherbst/spack,tmerrick1/spack,mfherbst/spack,EmreAtes/spack,EmreAtes/spack,LLNL/spack,krafczyk/spack
var/spack/repos/builtin/packages/r-seurat/package.py
var/spack/repos/builtin/packages/r-seurat/package.py
############################################################################## # Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC. # Produced at the Lawrence Livermore National Laboratory. # # This file is part of Spack. # Created by Todd Gamblin, [email protected], All rights reserved. # LLNL-CODE-647188 # # For details, see https://github.com/spack/spack # Please also see the NOTICE and LICENSE files for our notice and the LGPL. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License (as # published by the Free Software Foundation) version 2.1, February 1999. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and # conditions of the GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * class RSeurat(RPackage): """Seurat is an R package designed for QC, analysis, and exploration of single cell RNA-seq data.""" homepage = "http://satijalab.org/seurat/" url = "https://cran.r-project.org/src/contrib/Seurat_2.1.0.tar.gz" list_url = "https://cran.r-project.org/src/contrib/Archive/Seurat" version('2.1.0', '46427837bb739883f9b7addd08fccee5') version('2.0.1', 'a77794891e93b9fa1ef41735fe8424ea') depends_on('[email protected]:3.4.9') depends_on('r-ggplot2', type=('build', 'run')) depends_on('r-gplots', type=('build', 'run')) depends_on('r-reshape2', type=('build', 'run')) depends_on('r-ape', type=('build', 'run')) depends_on('r-tidyr', type=('build', 'run')) depends_on('r-caret', type=('build', 'run')) depends_on('r-gdata', type=('build', 'run')) depends_on('r-glue', type=('build', 'run')) depends_on('r-pkgconfig', type=('build', 'run')) depends_on('r-plogr', type=('build', 'run')) depends_on('r-gridextra', type=('build', 'run')) depends_on('r-cowplot', type=('build', 'run')) depends_on('r-rocr', type=('build', 'run')) depends_on('r-hmisc', type=('build', 'run')) depends_on('r-nmf', type=('build', 'run')) depends_on('r-irlba', type=('build', 'run')) depends_on('r-igraph', type=('build', 'run')) depends_on('r-fpc', type=('build', 'run')) depends_on('r-rcppprogress', type=('build', 'run')) depends_on('r-lars', type=('build', 'run')) depends_on('r-dtw', type=('build', 'run')) depends_on('r-mixtools', type=('build', 'run')) depends_on('r-ica', type=('build', 'run')) depends_on('r-diffusionmap', type=('build', 'run')) depends_on('r-tsne', type=('build', 'run')) depends_on('r-rtsne', type=('build', 'run')) depends_on('r-ranger', type=('build', 'run')) depends_on('r-pbapply', type=('build', 'run')) depends_on('r-ggjoy', type=('build', 'run')) depends_on('r-plotly', type=('build', 'run')) depends_on('r-sdmtools', type=('build', 'run')) depends_on('r-tclust', type=('build', 'run')) depends_on('r-fnn', type=('build', 'run')) depends_on('r-vgam', type=('build', 'run'))
lgpl-2.1
Python
0a80cf698a26abdf17aeeb01e21cb9910e6463d0
add a test suite
thenoviceoof/booger,thenoviceoof/booger
booger_test.py
booger_test.py
#!/usr/bin/python ################################################################################ # "THE BEER-WARE LICENSE" (Revision 42): # <thenoviceoof> wrote this file. As long as you retain this notice # you can do whatever you want with this stuff. If we meet some day, # and you think this stuff is worth it, you can buy me a beer in # return # Nathan Hwang <thenoviceoof> # ---------------------------------------------------------------------------- ################################################################################ from unittest import TestCase ################################################################################ # Nosetest parser from booger import NOSE_DIV_WIDTH, NosetestsParser class NosetestsParserTest(TestCase): def setUp(self): self.parser = NosetestsParser() def short_output_test(self): inp = '=' * 70 out, end = self.parser.parse_short_output(inp) assert end == True
mit
Python
771773cd3451dc04340e4a4856f7346841349772
Add cross bicoherence
synergetics/spectrum
bicoherencex.py
bicoherencex.py
#!/usr/bin/env python from __future__ import division import numpy as np from scipy.linalg import hankel import scipy.io as sio import matplotlib.pyplot as plt from tools import * def bicoherencex(w, x, y, nfft=None, wind=None, nsamp=None, overlap=None): """ Direct (FD) method for estimating cross-bicoherence Parameters: w,x,y - data vector or time-series - should have identical dimensions nfft - fft length [default = power of two > nsamp] actual size used is power of two greater than 'nsamp' wind - specifies the time-domain window to be applied to each data segment; should be of length 'segsamp' (see below); otherwise, the default Hanning window is used. segsamp - samples per segment [default: such that we have 8 segments] - if x is a matrix, segsamp is set to the number of rows overlap - percentage overlap, 0 to 99 [default = 50] - if y is a matrix, overlap is set to 0. Output: bic - estimated cross-bicoherence: an nfft x nfft array, with origin at center, and axes pointing down and to the right. waxis - vector of frequencies associated with the rows and columns of bic; sampling frequency is assumed to be 1. """ if w.shape != x.shape or x.shape != y.shape: raise ValueError('w, x and y should have identical dimentions') (ly, nrecs) = y.shape if ly == 1: ly = nrecs nrecs = 1 w = w.reshape(1,-1) x = x.reshape(1,-1) y = y.reshape(1,-1) if not nfft: nfft = 128 if not overlap: overlap = 50 overlap = max(0,min(overlap,99)) if nrecs > 1: overlap = 0 if not nsamp: nsamp = 0 if nrecs > 1: nsamp = ly if nrecs == 1 and nsamp <= 0: nsamp = np.fix(ly/ (8 - 7 * overlap/100)) if nfft < nsamp: nfft = 2**nextpow2(nsamp) overlap = np.fix(overlap/100 * nsamp) nadvance = nsamp - overlap nrecs = np.fix((ly*nrecs - overlap) / nadvance) if not wind: wind = np.hanning(nsamp) try: (rw, cw) = wind.shape except ValueError: (rw,) = wind.shape cw = 1 if min(rw, cw) != 1 or max(rw, cw) != nsamp: print "Segment size is " + str(nsamp) print "Wind array is " + str(rw) + " by " + str(cw) print "Using default Hanning window" wind = np.hanning(nsamp) wind = wind.reshape(1,-1) # Accumulate triple products bic = np.zeros([nfft, nfft]) Pyy = np.zeros([nfft,1]) Pww = np.zeros([nfft,1]) Pxx = np.zeros([nfft,1]) mask = hankel(np.arange(nfft),np.array([nfft-1]+range(nfft-1))) Yf12 = np.zeros([nfft,nfft]) ind = np.transpose(np.arange(nsamp)) w = w.ravel(order='F') x = x.ravel(order='F') y = y.ravel(order='F') for k in xrange(nrecs): ws = w[ind] ws = (ws - np.mean(ws)) * wind Wf = np.fft.fft(ws, nfft) / nsamp CWf = np.conjugate(Wf) Pww = Pww + flat_eq(Pww, (Wf*CWf)) xs = x[ind] xs = (xs - np.mean(xs)) * wind Xf = np.fft.fft(xs, nfft) / nsamp CXf = np.conjugate(Xf) Pxx = Pxx + flat_eq(Pxx, (Xf*CXf)) ys = y[ind] ys = (ys - np.mean(ys)) * wind Yf = np.fft.fft(ys, nfft) / nsamp CYf = np.conjugate(Yf) Pyy = Pyy + flat_eq(Pyy, (Yf*CYf)) Yf12 = flat_eq(Yf12, CYf.ravel(order='F')[mask]) bic = bic + (Wf * np.transpose(Xf)) * Yf12 ind = ind + int(nadvance) bic = bic / nrecs Pww = Pww / nrecs Pxx = Pxx / nrecs Pyy = Pyy / nrecs mask = flat_eq(mask, Pyy.ravel(order='F')[mask]) bic = abs(bic)**2 / ((Pww * np.transpose(Pxx)) * mask) bic = np.fft.fftshift(bic) # Contour plot of magnitude bispectrum if nfft%2 == 0: waxis = np.transpose(np.arange(-1*nfft/2, nfft/2)) / nfft else: waxis = np.transpose(np.arange(-1*(nfft-1)/2, (nfft-1)/2+1)) / nfft cont = plt.contourf(waxis,waxis,bic,100, cmap=plt.cm.Spectral_r) plt.colorbar(cont) plt.title('Bicoherence estimated via the direct (FFT) method') plt.xlabel('f1') plt.ylabel('f2') colmax, row = bic.max(0), bic.argmax(0) maxval, col = colmax.max(0), colmax.argmax(0) print 'Max: bic('+str(waxis[col])+','+str(waxis[col])+') = '+str(maxval) plt.show() return (bic, waxis) def test(): nl1 = sio.loadmat('matfile/demo/nl1.mat') dbic = bicoherencex(nl1['x'], nl1['x'], nl1['y']) if __name__ == '__main__': test()
mit
Python
edf7c8c1d3ea1f85c6c9888dd5ee759443f1db1c
add billing urls
ioO/billjobs
billing/urls.py
billing/urls.py
from django.conf.urls import url from . import views urlpatterns = [ url(r'^generate_pdf/(?P<bill_id>\d+)$', views.generate_pdf, name='generate-pdf') ]
mit
Python
e3a750dcca3727d576833351bfc09bbd858871f6
Fix indent on test code for test/assembly broken in r1220 Review URL: https://chromiumcodereview.appspot.com/9429007
azunite/gyp,lianliuwei/gyp,cysp/gyp,svn2github/kgyp,mistydemeo/gyp,kevinchen3315/gyp-git,chromium/gyp,cysp/gyp,sdklite/gyp,bdarnell/gyp,erikge/watch_gyp,svn2github/kgyp,tarc/gyp,cchamberlain/gyp,AOSPU/external_chromium_org_tools_gyp,AOSPU/external_chromium_org_tools_gyp,yjhjstz/gyp,geekboxzone/lollipop_external_chromium_org_tools_gyp,bulldy80/gyp_unofficial,Chilledheart/gyp,MIPS/external-chromium_org-tools-gyp,android-ia/platform_external_chromium_org_tools_gyp,okwasi/gyp,okwasi/gyp,kevinchen3315/gyp-git,xin3liang/platform_external_chromium_org_tools_gyp,clar/gyp,mapbox/gyp,bnq4ever/gypgoogle,brson/gyp,lianliuwei/gyp,chromium/gyp,brson/gyp,duanhjlt/gyp,enkripsi/gyp,saghul/gyn,LazyCodingCat/gyp,bnq4ever/gypgoogle,trafi/gyp,geekboxzone/lollipop_external_chromium_org_tools_gyp,bulldy80/gyp_unofficial,sdklite/gyp,brson/gyp,pyokagan/gyp,Danath/gyp,omasanori/gyp,AWhetter/gyp,sport-monkey/GYP,ttyangf/gyp,azunite/gyp_20150930,trafi/gyp,clar/gyp,mkrautz/gyp-libmumble,channing/gyp,enkripsi/gyp,kevinchen3315/gyp-git,Chilledheart/gyp,sdklite/gyp,luvit/gyp,mgamer/gyp,luvit/gyp,svn2github/kgyp,svn2github/gyp,adblockplus/gyp,okumura/gyp,adblockplus/gyp,mgamer/gyp,bnoordhuis/gyp,Omegaphora/external_chromium_org_tools_gyp,svn2github/gyp,sdklite/gyp,pandaxcl/gyp,erikge/watch_gyp,mistydemeo/gyp,adblockplus/gyp,tarc/gyp,amoikevin/gyp,cysp/gyp,omasanori/gyp,saghul/gyn,ttyangf/pdfium_gyp,yangrongwei/gyp,erikge/watch_gyp,yangrongwei/gyp,channing/gyp,svn2github/kgyp,sport-monkey/GYP,bpsinc-native/src_tools_gyp,sloanyang/gyp,pyokagan/gyp,chromium/gyp,AOSPU/external_chromium_org_tools_gyp,ttyangf/gyp,okwasi/gyp,android-ia/platform_external_chromium_org_tools_gyp,alexcrichton/gyp,erikge/watch_gyp,carlTLR/gyp,svn2github/gyp,dougbeal/gyp,mistydemeo/gyp,enkripsi/gyp,bnq4ever/gypgoogle,pandaxcl/gyp,mgamer/gyp,LazyCodingCat/gyp,geekboxzone/lollipop_external_chromium_org_tools_gyp,dougbeal/gyp,alexcrichton/gyp,AWhetter/gyp,okwasi/gyp,bdarnell/gyp,sanyaade-teachings/gyp,yinquan529/platform-external-chromium_org-tools-gyp,okumura/gyp,erikge/watch_gyp,sanyaade-teachings/gyp,xin3liang/platform_external_chromium_org_tools_gyp,azunite/gyp,omasanori/gyp,springmeyer/gyp,cchamberlain/gyp,sdklite/gyp,LazyCodingCat/gyp,msc-/gyp,amoikevin/gyp,channing/gyp,sanyaade-teachings/gyp,alexcrichton/gyp,Jack-Q/GYP-copy,bulldy80/gyp_unofficial,lianliuwei/gyp,lukeweber/gyp-override,pandaxcl/gyp,Jack-Q/GYP-copy,carlTLR/gyp,svn2github/gyp,saghul/gyn,yjhjstz/gyp,ryfx/gyp,bulldy80/gyp_unofficial,mumble-voip/libmumble-gyp,alexcrichton/gyp,bpsinc-native/src_tools_gyp,bpsinc-native/src_tools_gyp,omasanori/gyp,bdarnell/gyp,yjhjstz/gyp,azunite/gyp_20150930,MIPS/external-chromium_org-tools-gyp,sanyaade-teachings/gyp,yinquan529/platform-external-chromium_org-tools-gyp,AWhetter/gyp,svn2github/kgyp,Phuehvk/gyp,ttyangf/gyp,ryfx/gyp,amoikevin/gyp,Chilledheart/gyp,LazyCodingCat/gyp,sport-monkey/GYP,Danath/gyp,mapbox/gyp,yjhjstz/gyp,luvit/gyp,mistydemeo/gyp,mkrautz/gyp-libmumble,springmeyer/gyp,amoikevin/gyp,azunite/gyp_20150930,bnoordhuis/gyp,azunite/gyp,ryfx/gyp,brson/gyp,channing/gyp,msc-/gyp,azunite/gyp,mapbox/gyp,pyokagan/gyp,chromium/gyp,Chilledheart/gyp,sport-monkey/GYP,IllusionRom-deprecated/android_platform_external_chromium_org_tools_gyp,bnq4ever/gypgoogle,dougbeal/gyp,turbulenz/gyp,turbulenz/gyp,IllusionRom-deprecated/android_platform_external_chromium_org_tools_gyp,azunite/gyp,cysp/gyp,Phuehvk/gyp,pandaxcl/gyp,cchamberlain/gyp,clar/gyp,openpeer/webrtc-gyp,msc-/gyp,clar/gyp,turbulenz/gyp,mkrautz/gyp-libmumble,ttyangf/pdfium_gyp,MIPS/external-chromium_org-tools-gyp,geekboxzone/lollipop_external_chromium_org_tools_gyp,Danath/gyp,openpeer/webrtc-gyp,Phuehvk/gyp,AWhetter/gyp,bnoordhuis/gyp,android-ia/platform_external_chromium_org_tools_gyp,xin3liang/platform_external_chromium_org_tools_gyp,saghul/gyn,AOSPU/external_chromium_org_tools_gyp,mapbox/gyp,bnoordhuis/gyp,Omegaphora/external_chromium_org_tools_gyp,springmeyer/gyp,Phuehvk/gyp,azunite/gyp_20150930,lukeweber/gyp-override,mumble-voip/libmumble-gyp,enkripsi/gyp,cchamberlain/gyp,xin3liang/platform_external_chromium_org_tools_gyp,bpsinc-native/src_tools_gyp,AWhetter/gyp,sanyaade-teachings/gyp,IllusionRom-deprecated/android_platform_external_chromium_org_tools_gyp,openpeer/webrtc-gyp,ryfx/gyp,chromium/gyp,lukeweber/gyp-override,ttyangf/pdfium_gyp,bnq4ever/gypgoogle,dougbeal/gyp,luvit/gyp,duanhjlt/gyp,openpeer/webrtc-gyp,ryfx/gyp,sloanyang/gyp,bulldy80/gyp_unofficial,trafi/gyp,Danath/gyp,azunite/gyp_20150930,duanhjlt/gyp,dougbeal/gyp,lianliuwei/gyp,pyokagan/gyp,cysp/gyp,sport-monkey/GYP,Danath/gyp,mumble-voip/libmumble-gyp,carlTLR/gyp,amoikevin/gyp,ttyangf/pdfium_gyp,tarc/gyp,springmeyer/gyp,mkrautz/gyp-libmumble,adblockplus/gyp,sloanyang/gyp,yjhjstz/gyp,msc-/gyp,mapbox/gyp,okumura/gyp,sloanyang/gyp,lukeweber/gyp-override,openpeer/webrtc-gyp,Omegaphora/external_chromium_org_tools_gyp,pandaxcl/gyp,android-ia/platform_external_chromium_org_tools_gyp,bnoordhuis/gyp,carlTLR/gyp,kevinchen3315/gyp-git,Omegaphora/external_chromium_org_tools_gyp,duanhjlt/gyp,svn2github/gyp,trafi/gyp,mumble-voip/libmumble-gyp,trafi/gyp,okumura/gyp,springmeyer/gyp,Phuehvk/gyp,tarc/gyp,ttyangf/gyp,LazyCodingCat/gyp,clar/gyp,ttyangf/gyp,yinquan529/platform-external-chromium_org-tools-gyp,Jack-Q/GYP-copy,yinquan529/platform-external-chromium_org-tools-gyp,carlTLR/gyp,pyokagan/gyp,msc-/gyp,MIPS/external-chromium_org-tools-gyp,yangrongwei/gyp,mgamer/gyp,cchamberlain/gyp,yangrongwei/gyp,ttyangf/pdfium_gyp,saghul/gyn,adblockplus/gyp,Chilledheart/gyp,turbulenz/gyp,Jack-Q/GYP-copy,mgamer/gyp,tarc/gyp,IllusionRom-deprecated/android_platform_external_chromium_org_tools_gyp,Jack-Q/GYP-copy,enkripsi/gyp,turbulenz/gyp,bdarnell/gyp,duanhjlt/gyp
test/assembly/gyptest-assembly.py
test/assembly/gyptest-assembly.py
#!/usr/bin/env python # Copyright (c) 2012 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ A basic test of compiling assembler files. """ import sys import TestGyp if sys.platform != 'win32': # TODO(bradnelson): get this working for windows. test = TestGyp.TestGyp(formats=['make', 'ninja', 'scons', 'xcode']) test.run_gyp('assembly.gyp', chdir='src') test.relocate('src', 'relocate/src') test.build('assembly.gyp', test.ALL, chdir='relocate/src') expect = """\ Hello from program.c Got 42. """ test.run_built_executable('program', chdir='relocate/src', stdout=expect) test.pass_test()
#!/usr/bin/env python # Copyright (c) 2012 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ A basic test of compiling assembler files. """ import sys import TestGyp if sys.platform != 'win32': # TODO(bradnelson): get this working for windows. test = TestGyp.TestGyp(formats=['make', 'ninja', 'scons', 'xcode']) test.run_gyp('assembly.gyp', chdir='src') test.relocate('src', 'relocate/src') test.build('assembly.gyp', test.ALL, chdir='relocate/src') expect = """\ Hello from program.c Got 42. """ test.run_built_executable('program', chdir='relocate/src', stdout=expect) test.pass_test()
bsd-3-clause
Python
e4e52abb5654804f847fb3894293de58f97c7c91
Add new control test for Front-End
radarsat1/siconos,siconos/siconos-deb,siconos/siconos-deb,fperignon/siconos,siconos/siconos,siconos/siconos,bremond/siconos,siconos/siconos-deb,fperignon/siconos,radarsat1/siconos,siconos/siconos,bremond/siconos,radarsat1/siconos,fperignon/siconos,siconos/siconos-deb,radarsat1/siconos,fperignon/siconos,radarsat1/siconos,siconos/siconos-deb,bremond/siconos,fperignon/siconos,siconos/siconos,siconos/siconos-deb,bremond/siconos,bremond/siconos
Front-End/src/swig/Siconos/tests/test_smc.py
Front-End/src/swig/Siconos/tests/test_smc.py
#!/usr/bin/env python # this test is taken almost verbatim from RelayBiSimulation_OT2_noCplugin.py def test_smc_1(): from Siconos.Kernel import FirstOrderLinearDS, Model, TimeDiscretisation,\ TimeStepping, Moreau, ControlManager, linearSensor, linearSMC_OT2,\ getMatrix, SimpleMatrix from matplotlib.pyplot import subplot, title, plot, grid, show from numpy import array, eye, empty, zeros, savetxt from math import ceil, sin from numpy.linalg import norm # Derive our own version of FirstOrderLinearDS class MyFOLDS(FirstOrderLinearDS): def computeb(self, time): t = sin(50*time) tmpz = self.z() # XXX fix this ! if len(tmpz) != 2: print("DEBUG z has length ", len(tmpz)) return # XXX we need to find a smarter way to do things here # we need to convert from vector (sage) to arrayish u = [t, -t] + tmpz self.setb(u) # variable declaration ndof = 2 # Number of degrees of freedom of your system t0 = 0.0 # start time T = 1 # end time h = 1.0e-4 # time step for simulation hControl = 1.0e-2 # time step for control Xinit = 1.0 # initial position theta = 0.5 N = ceil((T-t0)/h + 10) # number of time steps outputSize = 5 # number of variable to store at each time step # Matrix declaration A = zeros((ndof,ndof)) x0 = [Xinit, -Xinit] sensorC = eye(ndof) sensorD = zeros((ndof,ndof)) Csurface = [0, 1.0] # Simple check if h > hControl: print "hControl must be bigger than h" exit(1) # Declaration of the Dynamical System processDS = MyFOLDS(x0, A) # XXX b is not automatically created ... processDS.setb([0, 0]) # Model process = Model(t0, T) process.nonSmoothDynamicalSystem().insertDynamicalSystem(processDS) # time discretization processTD = TimeDiscretisation(t0, h) tSensor = TimeDiscretisation(t0, hControl) tActuator = TimeDiscretisation(t0, hControl) # Creation of the Simulation processSimulation = TimeStepping(processTD, 0) processSimulation.setName("plant simulation") # Declaration of the integrator processIntegrator = Moreau(processDS, theta) processSimulation.insertIntegrator(processIntegrator) # Actuator, Sensor & ControlManager control = ControlManager(process) sens = linearSensor(100, tSensor, process, sensorC, sensorD) control.addSensorPtr(sens) act = linearSMC_OT2(104, tActuator, process) act.addSensorPtr(sens) control.addActuatorPtr(act) # Initialization. process.initialize(processSimulation) control.initialize() act.setCsurfacePtr(Csurface) # This is not working right now #eventsManager = s.eventsManager() # Matrix for data storage dataPlot = empty((3*(N+1), outputSize)) dataPlot[0, 0] = t0 dataPlot[0, 1] = processDS.x()[0] dataPlot[0, 2] = processDS.x()[1] dataPlot[0, 3] = processDS.z()[0] dataPlot[0, 4] = processDS.z()[1] # Main loop k = 1 while(processSimulation.nextTime() < T): processSimulation.computeOneStep() dataPlot[k, 0] = processSimulation.nextTime() dataPlot[k, 1] = processDS.x()[0] dataPlot[k, 2] = processDS.x()[1] dataPlot[k, 3] = processDS.z()[0] dataPlot[k, 4] = processDS.z()[1] k += 1 processSimulation.nextStep() # print processSimulation.nextTime() # Resize matrix dataPlot.resize(k, outputSize)
apache-2.0
Python
1197f5885b2e7275d9a4f108c62bd2506816c8b1
Create test_madagascar.py
dr-prodigy/python-holidays
test/countries/test_madagascar.py
test/countries/test_madagascar.py
# -*- coding: utf-8 -*- # python-holidays # --------------- # A fast, efficient Python library for generating country, province and state # specific sets of holidays on the fly. It aims to make determining whether a # specific date is a holiday as fast and flexible as possible. # # Authors: dr-prodigy <[email protected]> (c) 2017-2022 # ryanss <[email protected]> (c) 2014-2017 # Website: https://github.com/dr-prodigy/python-holidays # License: MIT (see LICENSE file) import unittest from datetime import date import holidays class TestMadagascar(unittest.TestCase): def setUp(self): self.holidays = holidays.MG() def test_new_years(self): self.assertIn(date(2010, 1, 1), self.holidays) self.assertIn(date(2020, 1, 1), self.holidays) def test_mahery_fo(self): self.assertIn(date(2010, 3, 29), self.holidays) self.assertIn(date(2015, 3, 29), self.holidays) self.assertIn(date(2022, 3, 29), self.holidays) def test_paska(self): self.assertIn(date(2022, 4, 17), self.holidays) # Andron'ny paska self.assertIn(date(2022, 4, 18), self.holidays) # Alatsinain'ny Paska def test_not_holiday(self): self.assertNotIn(date(2022, 4, 20), self.holidays)
mit
Python
d21743f2543f8d953a837d75bff0fcdb0105f4db
Add page extension for tracking page creation and modification dates.
feincms/feincms,hgrimelid/feincms,pjdelport/feincms,joshuajonah/feincms,nickburlett/feincms,matthiask/django-content-editor,joshuajonah/feincms,matthiask/django-content-editor,matthiask/django-content-editor,nickburlett/feincms,michaelkuty/feincms,joshuajonah/feincms,mjl/feincms,michaelkuty/feincms,hgrimelid/feincms,joshuajonah/feincms,mjl/feincms,mjl/feincms,feincms/feincms,matthiask/django-content-editor,nickburlett/feincms,pjdelport/feincms,nickburlett/feincms,matthiask/feincms2-content,hgrimelid/feincms,matthiask/feincms2-content,feincms/feincms,pjdelport/feincms,michaelkuty/feincms,michaelkuty/feincms,matthiask/feincms2-content
feincms/module/page/extensions/changedate.py
feincms/module/page/extensions/changedate.py
""" Track the modification date for pages. """ from datetime import datetime from django.db import models from django.db.models import Q from django.utils.translation import ugettext_lazy as _ from django.conf import settings def register(cls, admin_cls): cls.add_to_class('creation_date', models.DateTimeField(_('creation date'), editable=False)) cls.add_to_class('modification_date', models.DateTimeField(_('modification date'), editable=False)) orig_save = cls.save def save(page): now = datetime.now() if page.id is None: page.creation_date = now page.modification_date = now orig_save(page) cls.save = save
bsd-3-clause
Python
bcb9437fb99c2577c9ca9628c60b80becc2a24b3
Add media_tags and a new filter for photo alignment normalization
Ircam-Web/mezzanine-organization,Ircam-Web/mezzanine-organization
organization/media/templatetags/media_tags.py
organization/media/templatetags/media_tags.py
# -*- coding: utf-8 -*- from mezzanine.template import Library register = Library() @register.filter def get_photo_alignment(value): if value == 'left': return 0 elif value == 'center': return 0.5 return 1
agpl-3.0
Python
ed0d0f913b209bf6ea8ec32d0aa10c31bc97e2c9
create index on vote.mandate_id
mgax/mptracker,mgax/mptracker,mgax/mptracker,mgax/mptracker
alembic/versions/33f79ee8632_vote_mandate_id_inde.py
alembic/versions/33f79ee8632_vote_mandate_id_inde.py
revision = '33f79ee8632' down_revision = '3abf407e34a' from alembic import op def upgrade(): op.create_index('vote_mandate_id_index', 'vote', ['mandate_id']) def downgrade(): op.drop_index('vote_mandate_id_index')
mit
Python
ef06864a991572d7ae610f9a249b024f967b1eb9
Add test.util.mock_call_with_name
thelinuxkid/linkins
linkins/test/util.py
linkins/test/util.py
import mock class mock_call_with_name(object): """Like mock.call but takes the name of the call as its first argument. mock.call requires chained methods to define its name. This can be a problem, for example, if you need create mock.call().__enter__().__iter__(). You can optionally use mock._Call but you might as well use a tuple since its constructor requires a tuple of the form (name, args, kwargs). """ def __new__(self, name, *args, **kwargs): return mock._Call( (name, args, kwargs) )
mit
Python
dba311375a0f4cda1a3c522f5ac261dfb601b9c5
Create gee_init.py
jonas-eberle/pyEOM
pyEOM/gee_init.py
pyEOM/gee_init.py
MY_SERVICE_ACCOUNT = '' MY_PRIVATE_KEY_FILE = ''
mit
Python
148991a27670d26a2eb29f0964078b4d656bbcec
Create __init__.py
rwl/PYPOWER-Dynamics
pydyn/__init__.py
pydyn/__init__.py
# Copyright (C) 2014-2015 Julius Susanto. All rights reserved. # Use of this source code is governed by a BSD-style # license that can be found in the LICENSE file. """ PYPOWER-Dynamics Time-domain simulation engine """
bsd-3-clause
Python
06e4fd4b7d4cc4c984a05887fce00f7c8bbdc174
Add missing tests for messaging notifer plugin
stackforge/osprofiler,openstack/osprofiler,stackforge/osprofiler,openstack/osprofiler,stackforge/osprofiler,openstack/osprofiler
tests/notifiers/test_messaging.py
tests/notifiers/test_messaging.py
# Copyright 2014 Mirantis Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock from osprofiler._notifiers import base from tests import test class MessagingTestCase(test.TestCase): def test_init_and_notify(self): messaging = mock.MagicMock() context = "context" transport = "transport" project = "project" service = "service" host = "host" notify_func = base.Notifier.factory("Messaging", messaging, context, transport, project, service, host) messaging.Notifier.assert_called_once_with( transport, publisher_id=host, driver="messaging", topic="profiler") info = { "a": 10 } notify_func(info) expected_data = {"project": project, "service": service} expected_data.update(info) messaging.Notifier().info.assert_called_once_with( context, "profiler.%s" % service, expected_data) messaging.reset_mock() notify_func(info, context="my_context") messaging.Notifier().info.assert_called_once_with( "my_context", "profiler.%s" % service, expected_data)
apache-2.0
Python
fb2af0db2fc6d2d63bb377d7818ed1d03cb5cc9a
add nqueens.py
chenshuo/recipes,chenshuo/recipes,chenshuo/recipes,chenshuo/recipes,chenshuo/recipes,chenshuo/recipes
python/nqueens.py
python/nqueens.py
#!/usr/bin/python # http://code.activestate.com/recipes/576647-eight-queens-six-lines/ from itertools import permutations N = 8 cols = range(N) for perm in permutations(cols): if (N == len(set(perm[i]-i for i in cols)) == len(set(perm[i]+i for i in cols))): print perm
bsd-3-clause
Python
4bfe33373ebf095623173f945757693997a65ce3
Add a simple test for the new AWS::LanguageExtensions transform (#2074)
cloudtools/troposphere,cloudtools/troposphere
tests/test_language_extensions.py
tests/test_language_extensions.py
import unittest from troposphere import AWSHelperFn, Parameter, Template from troposphere.sqs import Queue class TestServerless(unittest.TestCase): def test_transform(self): t = Template() t.set_version("2010-09-09") t.set_transform("AWS::LanguageExtensions") self.assertEqual( t.to_dict(), { "AWSTemplateFormatVersion": "2010-09-09", "Transform": "AWS::LanguageExtensions", "Resources": {}, }, ) def test_length_function(self): class Length(AWSHelperFn): def __init__(self, data: object) -> None: self.data = {"Fn::Length": data} t = Template() t.set_version("2010-09-09") t.set_transform("AWS::LanguageExtensions") queue_list = t.add_parameter(Parameter("QueueList", Type="CommaDelimitedList")) queue_name = t.add_parameter( Parameter( "QueueNameParam", Description="Name for your SQS queue", Type="String" ) ) t.add_resource( Queue( "Queue", QueueName=queue_name.ref(), DelaySeconds=Length(queue_list.ref()), ) ) self.assertEqual( t.to_dict(), { "AWSTemplateFormatVersion": "2010-09-09", "Transform": "AWS::LanguageExtensions", "Parameters": { "QueueList": {"Type": "CommaDelimitedList"}, "QueueNameParam": { "Description": "Name for your SQS queue", "Type": "String", }, }, "Resources": { "Queue": { "Type": "AWS::SQS::Queue", "Properties": { "QueueName": {"Ref": "QueueNameParam"}, "DelaySeconds": {"Fn::Length": {"Ref": "QueueList"}}, }, } }, }, )
bsd-2-clause
Python
157a7d00a9d650728495726e9217591a678ec5a9
add docstrings for response
eugene-eeo/mailthon,ashgan-dev/mailthon,krysros/mailthon
mailthon/response.py
mailthon/response.py
""" mailthon.response ~~~~~~~~~~~~~~~~~ Implements the Response objects. """ class Response(object): """ Encapsulates a (status_code, message) tuple returned by a server when the ``NOOP`` command is called. :param pair: A (status_code, message) pair. """ def __init__(self, pair): status, message = pair self.status_code = status self.message = message @property def ok(self): """ Tells whether the Response object is ok- that everything went well. Returns true if the status code is 250, false otherwise. """ return self.status_code == 250 class SendmailResponse(Response): """ Encapsulates a (status_code, message) tuple as well as a mapping of email-address to (status_code, message) tuples that can be attained by the NOOP and the SENDMAIL command. :param pair: The response pair. :param rejected: Rejected receipients. """ def __init__(self, pair, rejected): Response.__init__(self, pair) self.rejected = { addr: Response(pair) for addr, pair in rejected.items() } @property def ok(self): """ Returns True only if no addresses were rejected and if the status code is 250. """ return (Response.ok.fget(self) and not self.rejected)
class Response(object): def __init__(self, pair): status, message = pair self.status_code = status self.message = message @property def ok(self): return self.status_code == 250 class SendmailResponse(Response): def __init__(self, pair, rejected): Response.__init__(self, pair) self.rejected = { addr: Response(pair) for addr, pair in rejected.items() } @property def ok(self): return (Response.ok.fget(self) and not self.rejected)
mit
Python
1ee32dab5a8c90c857a127ba831be250ad153198
Create rftest.py
tomwillis608/beestation,tomwillis608/beestation
mark_ii/pi/rftest.py
mark_ii/pi/rftest.py
#!/usr/bin/python import piVirtualWire.piVirtualWire as piVirtualWire import time import pigpio import struct import requests import logging import logging.handlers LOG_FILENAME = '/tmp/rftest.log' def calcChecksum(packet): checkSum = sum([ int(i) for i in packet[:13]]) return checkSum % 256 def sendToWeb(url): print ('URL ', url) r = requests.get(url) return r.status_code if __name__ == "__main__": pi = pigpio.pi() rx = piVirtualWire.rx(pi, 4, 2000) # Set pigpio instance, TX module GPIO pin and baud rate # logging.basicConfig(filename=LOG_FILENAME, level=logging.DEBUG) logger = logging.getLogger('RFTLogger') logger.setLevel(logging.DEBUG) handler = logging.handlers.RotatingFileHandler(LOG_FILENAME, maxBytes=100000, backupCount=10) logger.addHandler(handler) # create console handler and set level to debug ch = logging.StreamHandler() ch.setLevel(logging.DEBUG) # create formatter formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s") # add formatter to ch ch.setFormatter(formatter) handler.setFormatter(formatter) # add ch to logger logger.addHandler(ch) fmt = "<LLLBB" print ("Top of RF test") logger.info('Top of RF test') while True: while rx.ready(): packet = rx.get() if (14 == len(packet)): print(packet) id = packet[0] + packet[1]*256 + packet[2]*256*256 + packet[3]*256*256*256 data = packet[4] + packet[5]*256 + packet[6]*256*256 + packet[7]*256*256*256 packetCount = packet[8] + packet[9]*256 + packet[10]*256*256 + packet[11]*256*256*256 dataType = packet[12] checksum = packet[13] print ('Id: ', format(id,'X')) print ('Data: ', format(data,'d')) print ('Packet count: ', format(packetCount,'d')) print ('Data type: ', format(dataType,'c')) print ('Checksum: ', format(checksum,'X')) calculatedChecksum = calcChecksum(packet) print ('Calc Checksum: ', format(calculatedChecksum,'X')) logger.debug('id=%X,data=%d,count=%d,type=%c,chk=%X,calc=%X' % (id, data, packetCount, dataType, checksum, calculatedChecksum)) if (calculatedChecksum == checksum): print ('Upload to server') idx = format(id, 'X') if (ord('+') == dataType): if (0 == data): url = "http://192.168.0.163/add_status.php?station=%X&message=Starting" %(id) sendToWeb(url) else : url = "http://192.168.0.163/add_status.php?station=%X&message=%d cycles" %(id, data) sendToWeb(url) else: url = "http://192.168.0.163/add_record.php?s=%X&r=%d&t=%c&n=%d" % (id, data, dataType, packetCount) sendToWeb(url) logger.debug('url=%s' % url) time.sleep(0.01) rx.cancel() pi.stop()
mit
Python