commit
stringlengths
40
40
old_file
stringlengths
4
264
new_file
stringlengths
4
264
old_contents
stringlengths
0
3.26k
new_contents
stringlengths
1
4.43k
subject
stringlengths
15
624
message
stringlengths
15
4.7k
lang
stringclasses
3 values
license
stringclasses
13 values
repos
stringlengths
5
91.5k
da80b3deb8d1b3a0172a11fb46af99c1af003c56
clinical_db/experiment_script.py
clinical_db/experiment_script.py
''' Experiments are recorded in this script ''' # Experiment Date:06/24/2015 def compare_lab_tests_and_vitals(): ''' Compare the ability of prediction with lab tests and vital test for HF patients''' if __name__ == '__main__':
Add script for experiment records
Add script for experiment records
Python
mit
belemizz/mimic2_tools,belemizz/mimic2_tools
28acda0265c8913e761f9a63315fe17c09a3e5fc
src/zeit/content/text/tests/test_jinja.py
src/zeit/content/text/tests/test_jinja.py
import zeit.cms.testing import zeit.content.text.jinja import zeit.content.text.testing class PythonScriptTest(zeit.cms.testing.FunctionalTestCase): layer = zeit.content.text.testing.ZCML_LAYER def create(self, text): result = zeit.content.text.jinja.JinjaTemplate() result.uniqueId = 'http://xml.zeit.de/template' result.text = text return result def test_renders_template(self): tpl = self.create('{{foo}}') self.assertEqual('bar', tpl({'foo': 'bar'})) def test_mockdict_responds_to_any_variable(self): tpl = self.create('{{foo.bar.baz}}') self.assertIn('<MagicMock', tpl(zeit.content.text.jinja.MockDict()))
Add test for dummy rendering (belongs to commit:be85116)
ZON-4007: Add test for dummy rendering (belongs to commit:be85116)
Python
bsd-3-clause
ZeitOnline/zeit.content.text
de8d507e64894bdaaf036f99f179637c2660f0f1
tests/issue0078.py
tests/issue0078.py
# -*- coding: utf-8 -*- """ Created on Thu Nov 21 22:09:10 2013 @author: Jeff """ import logging try: print('Logger already instantiated, named: ', logger.name) except: # create logger logger = logging.getLogger() logger.setLevel(logging.CRITICAL) # create console handler with a higher log level ch = logging.StreamHandler() ch.setLevel(logging.INFO) # create formatter and add it to the handlers formatter = logging.Formatter('%(name)s: %(levelname)s - %(message)s') ch.setFormatter(formatter) # add the handlers to the logger logger.addHandler(ch) class foo: def __init__(self): self._logger = logging.getLogger(self.__class__.__name__) self.test() def test(self): self._logger.debug('test4_debug') self._logger.info('test4_info') self._logger.warning('test4_warning') self._logger.error('test4_error') self._logger.critical('test4_critical') class spam: def __init__(self): self._logger = logging.getLogger(self.__class__.__name__) self.test() def test(self): self._logger.debug('test5_debug') self._logger.info('test5_info') self._logger.warning('test5_warning') self._logger.error('test5_error') self._logger.critical('test5_critical') if __name__ =="__main__": y = foo() x = spam()
Test script show how we might use the Python logger more effectively
Test script show how we might use the Python logger more effectively Former-commit-id: cc69a6ab3b6c61fd2f3e60bd16085b81cda84e42 Former-commit-id: 28ba0ba57de3379bd99b9f508972cd0520c04fcb
Python
mit
amdouglas/OpenPNM,amdouglas/OpenPNM,stadelmanma/OpenPNM,PMEAL/OpenPNM,TomTranter/OpenPNM
2b7a2e0ffde981afd96395e5422eb9574e4ff51f
svm_experiments.py
svm_experiments.py
from svm import SVMModel if __name__ == '__main__': features = [ # 'refuting', 'ngrams', # 'polarity', 'named', # 'jaccard' ] model = SVMModel() train_data = model.get_data('data/train_bodies.csv', 'data/train_stances.csv') test_data = model.get_data('data/competition_test_bodies.csv', 'data/competition_test_stances.csv') X_test = test_data['X'] X_train = train_data['X'] Only_R_UR = True if Only_R_UR is True: y_test = model.related_unrelated(test_data['y']) y_train = model.related_unrelated(train_data['y']) else: y_test = test_data['y'] y_train = train_data['y'] classifier = model.get_trained_classifier(X_train, y_train) predicted = model.test_classifier(classifier, X_test, y_test) print str(model._use_features) print "Precision %f" % model.precision(y_test, predicted) print "Recal %f" % model.recal(y_test, predicted) print "Accuracy %f" % model.accuracy(y_test, predicted)
Add svm experiment on test data
Add svm experiment on test data
Python
apache-2.0
sarahannnicholson/FNC
1c4e1dc0fbd4681fbb1bf5c63145a2fbed130890
xpcom-leak-analyzer.py
xpcom-leak-analyzer.py
#!/usr/bin/python # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. import sys import os import re bloatStartPatt = re.compile('^..:..:.. INFO - \|<----------------Class--------------->\|<-----Bytes------>\|<----Objects---->\|$') bloatMidPatt = re.compile('^..:..:..\s+INFO -[^|]+\|([^|]+)\|([^|]+)\|([^|]+)\|$') bloatEndPatt = re.compile('^..:..:.. INFO - nsTraceRefcnt::DumpStatistics: \d+ entries$') def checkNumLeaked(numLeaked): if not 'PImageBridgeChild' in numLeaked: assert not 'base::Thread' in numLeaked return assert numLeaked['PImageBridgeChild'] == 1 assert numLeaked['base::Thread'] == 1 def checkBloatReports(f): inBloat = False numLeaked = None for l in f: bspm = bloatStartPatt.match(l) if bspm: assert not inBloat inBloat = True numLeaked = {} continue bepm = bloatEndPatt.match(l) if bepm: assert inBloat inBloat = False checkNumLeaked(numLeaked) numLeaked = None continue if not inBloat: continue bmpm = bloatMidPatt.match(l) assert bmpm leakedClass = bmpm.group(1).strip() if leakedClass == '' or leakedClass == 'TOTAL': continue nl = int(bmpm.group(3).split()[1]) if leakedClass == 'base::Thread' or leakedClass == 'PImageBridgeChild': assert not leakedClass in numLeaked numLeaked[leakedClass] = nl def extractTestName(fileName): startLen = len('try_ubuntu64_vm-debug_test-') return fileName[startLen:].split('-bm')[0] def analyzeAllFiles(): for (base, _, files) in os.walk('.'): for fileName in files: testName = extractTestName(fileName) if not testName.startswith('mochitest'): continue if not base.endswith("/"): base += "/" fullFileName = base + fileName f = open(fullFileName, 'r') print 'checking', testName checkBloatReports(f) f.close() analyzeAllFiles()
Add script to analyze bloatview logs
Add script to analyze bloatview logs
Python
mpl-2.0
amccreight/mochitest-logs
149879c6029efac02bec30b67d13592287f2898c
tests/test_flatc.py
tests/test_flatc.py
import pytest # TODO: Duplicate code # Figure out how to import from flatc.py def get_attrs_dict(attrs): attrs_dict = {x[0]: x[1] for x in attrs if len(x) == 2} ret = {x[0]: None for x in attrs} ret.update(attrs_dict) return ret def test_attrs_dict(): assert get_attrs_dict(['a', ['b', 10], 'c']) == {'a': None, 'b': 10, 'c': None}
Add a unit test for flatc
Add a unit test for flatc
Python
mit
adsharma/flattools,adsharma/flattools,adsharma/flattools
cd88e724f7f2f8293a509db3dc0558904beee6e4
tools/test/psa/psa_target_config_test.py
tools/test/psa/psa_target_config_test.py
#!/usr/bin/env python """ Copyright (c) 2019-2020 ARM Limited. All rights reserved. SPDX-License-Identifier: Apache-2.0 Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import pytest from tools.targets import TARGETS def test_psa_target_attributes(): psa_targets = (tar for tar in TARGETS if tar.is_TFM_target) for tar in psa_targets: msg = "tfm_target_name attribute cannot be empty" assert(tar.tfm_target_name != ""), msg msg = "tfm_bootloader_supported attribute cannot be empty" assert(tar.tfm_bootloader_supported != ""), msg msg = "tfm_default_toolchain attribute cannot be empty" assert(tar.tfm_default_toolchain != ""), msg msg = "tfm_supported_toolchains attribute cannot be empty" assert(tar.tfm_supported_toolchains != ""), msg msg = "delivery_dir attribute cannot be empty" assert(tar.tfm_delivery_dir != ""), msg
Add a script to validate PSA targets
psa: Add a script to validate PSA targets Add a script to parse `targets.json` to identify PSA targets and ensure mandatory parameters are configured correctly for all PSA targets. Signed-off-by: Devaraj Ranganna <[email protected]>
Python
apache-2.0
mbedmicro/mbed,mbedmicro/mbed,mbedmicro/mbed,mbedmicro/mbed,mbedmicro/mbed
85da0b9062d8266a3e3fe4cd29fbb5ea79a7cedf
tests/geneagrapher/local_data_grabber.py
tests/geneagrapher/local_data_grabber.py
import os import sys from BeautifulSoup import BeautifulSoup from geneagrapher.grabber import get_record_from_tree class LocalDataGrabber: """A class for grabbing locally-cached test data.""" def __init__(self): pass def __enter__(self): return self def __exit__(self, exc_type, exc_value, traceback): pass @classmethod def data_file(cls, filename): """Return the absolute path to the data file with given name.""" return os.path.join(cls.data_path, filename) def get_record(self, id): """Load the local data for the given id and use Grabber's functionas to extract the record data.""" with open(self.data_file('{0}.html'.format(id)), 'r') as fin: soup = BeautifulSoup(fin, convertEntities='html') return get_record_from_tree(soup, id) file_path = os.path.abspath(__file__) LocalDataGrabber.data_path = os.path.join(os.path.dirname(file_path), 'testdata')
Add LocalDataGrabber class so tests work with local data.
Add LocalDataGrabber class so tests work with local data. This changeset adds the LocalDataGrabber class in the test source tree. This class exposes the same interface as Grabber -- so that it can be used in place of it -- but is only able to grab test data from the local test data directory. This is useful for testing without making network requests.
Python
mit
davidalber/Geneagrapher,davidalber/Geneagrapher
1f3625b98de4edd8a0d0bcce882c5f79aeaf23b2
tests/rules_tests/NoRuleSpecifiedTest.py
tests/rules_tests/NoRuleSpecifiedTest.py
#!/usr/bin/env python """ :Author Patrik Valkovic :Created 23.06.2017 16:39 :Licence GNUv3 Part of grammpy """ from unittest import main, TestCase from grammpy import Rule class NoRuleSpecifiedTest(TestCase): pass if __name__ == '__main__': main()
Add file for tests, when no rule is specified
Add file for tests, when no rule is specified
Python
mit
PatrikValkovic/grammpy
2cb905a4a0da9650135ab297d9e55c02bc7261c5
pytest_run.py
pytest_run.py
# coding=utf-8 """This is a script for running pytest from the command line. This script exists so that the project directory gets added to sys.path, which prevents us from accidentally testing the globally installed willie version. pytest_run.py Copyright 2013, Ari Koivula, <[email protected]> Licensed under the Eiffel Forum License 2. http://willie.dfbta.net """ if __name__ == "__main__": import sys import pytest returncode = pytest.main() sys.exit(returncode)
Add a script for running pytest with the correct sys.path.
Add a script for running pytest with the correct sys.path.
Python
mit
Uname-a/knife_scraper,Uname-a/knife_scraper,Uname-a/knife_scraper
eb5e6bde589f3598d955c0d7389f7054a3a3c642
2017-code/test_scalability.py
2017-code/test_scalability.py
# test_scalability.py # Ronald L. Rivest with Huasyn Karimi import syn2 def tester(se): # copy code here from syn2.test; modify as needed def test_scale(k): # start timer se = syn2.SynElection() # ... set parameters here based on k # run "test" # stop timer; print k and elapsed time for k in range(3, 8): test_scale(k)
Add prototype code for testing scalability.
Add prototype code for testing scalability.
Python
mit
ron-rivest/2017-bayes-audit,ron-rivest/2017-bayes-audit
1fafccc7df5179ce89b100537e45307417658512
tests/test_validators.py
tests/test_validators.py
""" test_validators ~~~~~~~~~~~~~~ Unittests for bundled validators. :copyright: 2007-2008 by James Crasta, Thomas Johansson. :license: MIT, see LICENSE.txt for details. """ from py.test import raises from wtforms.validators import ValidationError, length, url, not_empty, email, ip_address class DummyForm(object): pass class DummyField(object): def __init__(self, data): self.data = data form = DummyForm() def test_email(): assert email(form, DummyField('[email protected]')) == None assert email(form, DummyField('[email protected]')) == None assert email(form, DummyField('[email protected]')) == None assert email(form, DummyField('[email protected]')) == None raises(ValidationError, email, form, DummyField('foo')) == None raises(ValidationError, email, form, DummyField('bar.dk')) == None raises(ValidationError, email, form, DummyField('foo@')) == None raises(ValidationError, email, form, DummyField('@bar.dk')) == None raises(ValidationError, email, form, DummyField('foo@bar')) == None raises(ValidationError, email, form, DummyField('[email protected]')) == None raises(ValidationError, email, form, DummyField('[email protected]')) == None def test_length(): field = DummyField('foobar') assert length(min=2, max=6)(form, field) == None raises(ValidationError, length(min=7), form, field) raises(ValidationError, length(max=5), form, field) def test_url(): assert url()(form, DummyField('http://foobar.dk')) == None assert url()(form, DummyField('http://foobar.dk/')) == None assert url()(form, DummyField('http://foobar.dk/foobar')) == None raises(ValidationError, url(), form, DummyField('http://foobar')) raises(ValidationError, url(), form, DummyField('foobar.dk')) raises(ValidationError, url(), form, DummyField('http://foobar.12')) def test_not_empty(): assert not_empty()(form, DummyField('foobar')) == None raises(ValidationError, not_empty(), form, DummyField('')) raises(ValidationError, not_empty(), form, DummyField(' ')) def test_ip_address(): assert ip_address(form, DummyField('127.0.0.1')) == None raises(ValidationError, ip_address, form, DummyField('abc.0.0.1')) raises(ValidationError, ip_address, form, DummyField('1278.0.0.1')) raises(ValidationError, ip_address, form, DummyField('127.0.0.abc'))
Add first basic unittests using py.test
Add first basic unittests using py.test
Python
bsd-3-clause
clones/wtforms
3644e3dc2d97f87b3810f1295a98b8977fa0c387
tests/test_template_filters.py
tests/test_template_filters.py
# -*- coding: utf-8 -*- from __future__ import absolute_import, unicode_literals from flask import render_template_string def test_nl2br_filter(app): s = '{{ "\n"|nl2br }}' rs = render_template_string(s) assert rs == '<p><br/>\n</p>' def test_blankspace2nbsp_filter(app): s = '{{ " \t"|blankspace2nbsp }}' rs = render_template_string(s) assert rs == '&nbsp;' * 5
Add template filters test cases
Add template filters test cases
Python
mit
bosondata/badwolf,bosondata/badwolf,bosondata/badwolf
58bee0ac0df709f9a5cee83e7828dce148182d10
tests/v6/test_tee_generator.py
tests/v6/test_tee_generator.py
from .context import tohu from tohu.v6.primitive_generators import Integer, TimestampPrimitive from tohu.v6.derived_generators import IntegerDerived, Tee from tohu.v6.derived_generators_v2 import TimestampDerived from tohu.v6.custom_generator import CustomGenerator def test_tee_generator(): class QuuxGenerator(CustomGenerator): aa = Integer(100, 200) bb = Integer(300, 400) cc = IntegerDerived(low=aa, high=bb) dd = Tee(cc, num=Integer(1, 8)) g = QuuxGenerator() items = g.generate(100, seed=12345) df = items.to_df() def check_dd_is_between_aa_and_bb(row): return all([row.aa <= x <= row.bb for x in row.dd]) dd_is_always_between_aa_and_bb = all(df.apply(check_dd_is_between_aa_and_bb, axis=1)) assert True == dd_is_always_between_aa_and_bb def test_tee_generator_v2(): class QuuxGenerator(CustomGenerator): aa = Integer(100, 200) bb = Integer(300, 400) cc = Tee(IntegerDerived(low=aa, high=bb), num=Integer(1, 8)) g = QuuxGenerator() items = g.generate(100, seed=12345) df = items.to_df() def check_cc_is_between_aa_and_bb(row): return all([row.aa <= x <= row.bb for x in row.cc]) cc_is_always_between_aa_and_bb = all(df.apply(check_cc_is_between_aa_and_bb, axis=1)) assert True == cc_is_always_between_aa_and_bb def test_tee_generator_with_timestamps(): """ Regression test to ensure that Tee properly deals with timestamp generators that output their timestamps as strings. """ class QuuxGenerator(CustomGenerator): aa = TimestampPrimitive(date="2018-01-01").strftime("%Y-%m-%d %H:%M:%S") bb = TimestampPrimitive(date="2018-01-02").strftime("%Y-%m-%d %H:%M:%S") cc = Tee(TimestampDerived(start=aa, end=bb).strftime("%Y-%m-%d %H:%M:%S"), num=Integer(1, 8)) g = QuuxGenerator() items = g.generate(100, seed=12345) df = items.to_df() def check_cc_is_between_aa_and_bb(row): return all([row.aa <= x <= row.bb for x in row.cc]) cc_is_always_between_aa_and_bb = all(df.apply(check_cc_is_between_aa_and_bb, axis=1)) assert True == cc_is_always_between_aa_and_bb
Add a few tests for Tee
Add a few tests for Tee
Python
mit
maxalbert/tohu
2d9cc52e6ce6a6d2584e1d73e3d84ca3538758d9
Sensors/recordCalibration.py
Sensors/recordCalibration.py
from pymavlink import mavutil fileout = open('acc.txt', 'w') filemag = open('mag.txt', 'w') filegyro = open('gyro.txt', 'w') # create a mavlink serial instance master = mavutil.mavlink_connection('/dev/ttyUSB1', baud=57600, source_system=255) try: while True: msg = master.recv_match(type='HEARTBEAT', blocking=True) msg = master.recv_match(type='HIGHRES_IMU', blocking=False) print msg if msg is not None: fileout.write('%f %f %f\n' % (msg.xacc, msg.yacc, msg.zacc)) filemag.write('%f %f %f\n' % (msg.xmag, msg.ymag, msg.zmag)) filegyro.write('%f %f %f\n' % (msg.xgyro, msg.ygyro, msg.zgyro)) except KeyboardInterrupt: fileout.close() filemag.close() filegyro.close()
Add a script to generate some files to be used for the sensor calibration
Add a script to generate some files to be used for the sensor calibration
Python
mit
baptistelabat/robokite,baptistelabat/robokite,baptistelabat/robokite,baptistelabat/robokite,baptistelabat/robokite,baptistelabat/robokite,baptistelabat/robokite,baptistelabat/robokite
6ca79d2ce16f8745c8d58c3dea20174931820ef3
api/models.py
api/models.py
from flask import current_app from passlib.apps import custom_app_context as pwd_context from itsdangerous import (TimedJSONWebSignatureSerializer as Serializer, BadSignature, SignatureExpired) from datetime import datetime from api import db class User(db.Model): id = db.Column(db.Integer, primary_key=True) first_name = db.Column(db.String(30)) last_name = db.Column(db.String(30)) email = db.Column(db.String(64), unique=True) username = db.Column(db.String(80), unique=True) password_hash = db.Column(db.String(128)) bucket_lists = db.relationship('BucketList', backref='user', lazy='dynamic') def __init__(self, first_name, last_name, email, username): self.first_name = first_name self.last_name = last_name self.email = email self.username = username def __repr__(self): return '<User %r>' % self.username def full_name(self): return '%s %s' % (self.first_name, self.last_name) def hash_password(self, password): self.password_hash = pwd_context.encrypt(password) def verify_password(self, password): return pwd_context.verify(password, self.password_hash) def generate_auth_token(self, expiration=7200): s = Serializer(current_app.config['SECRET_KEY'], expires_in=expiration) return s.dumps({'id': self.id}) @staticmethod def verify_auth_token(token): s = Serializer(current_app.config['SECRET_KEY']) try: data = s.loads(token) except SignatureExpired: return None # valid token, but expired except BadSignature: return None # invalid token user = User.query.get(data['id']) return user
Add User object to model
Add User object to model
Python
mit
EdwinKato/bucket-list,EdwinKato/bucket-list,EdwinKato/bucket-list,EdwinKato/bucket-list,EdwinKato/bucket-list
d57fab92485e403dd24321ded7090b9c46d61655
send_packet.py
send_packet.py
from socket import * def send_packet(src, dst, eth_type, payload, interface = "eth0"): """Send raw Ethernet packet on interface.""" assert(len(src) == len(dst) == 6) # 48-bit ethernet addresses assert(len(eth_type) == 2) # 16-bit ethernet type s = socket(AF_PACKET, SOCK_RAW) # From the docs: "For raw packet # sockets the address is a tuple (ifname, proto [,pkttype [,hatype]])" s.bind((interface, 0)) return s.send(src + dst + eth_type + payload) if __name__ == "__main__": print("Sent %d-byte Ethernet packet on eth0" % send_packet("\xFE\xED\xFA\xCE\xBE\xEF", "\xFE\xED\xFA\xCE\xBE\xEF", "\x7A\x05", "hello"))
Add sending raw packet example
Add sending raw packet example
Python
mit
voidabhi/python-scripts,voidabhi/python-scripts,voidabhi/python-scripts,voidabhi/python-scripts,voidabhi/python-scripts
5f411b0ce564eba7a4b1f6c07d0d3209d63eef8e
searchbypmid.py
searchbypmid.py
import requests, untangle pmcid = 'PMC3834665' epmc_basequeryurl = "http://www.ebi.ac.uk/europepmc/webservices/rest/search" ''' Make query to EuropePMC''' query = {'query':pmcid,'resulttype':'core'} r = requests.get(epmc_basequeryurl, params=query) '''Use untangle to make python structure from xml''' obj = untangle.parse(r.text) print(obj.responseWrapper.hitCount.cdata) '''test the it is ok result i.e. only one result and that is a journal article''' if not obj.responseWrapper.hitCount.cdata == '1': raise ValueError('More (or less) than 1 result') if 'Journal Article' not in ','.join([pubtype.cdata for pubtype in obj.responseWrapper.resultList.result.pubTypeList.pubType]): raise ValueError('Not Journal') ''' build metadata dictionary''' metadata = {} metadata['title'] = obj.responseWrapper.resultList.result.title.cdata metadata['date'] = obj.responseWrapper.resultList.result.firstPublicationDate.cdata metadata['volume'] = obj.responseWrapper.resultList.result.journalInfo.volume.cdata metadata['issue'] = obj.responseWrapper.resultList.result.journalInfo.issue.cdata metadata['pages'] = obj.responseWrapper.resultList.result.pageInfo.cdata #metadata['bibcode'] = obj.responseWrapper.resultList.result.journalInfo.volume.cdata metadata['doi'] = obj.responseWrapper.resultList.result.DOI.cdata metadata['issn'] = obj.responseWrapper.resultList.result.journalInfo.journal.ISSN.cdata metadata['pmid'] = obj.responseWrapper.resultList.result.pmid.cdata metadata['pmcid'] = obj.responseWrapper.resultList.result.pmcid.cdata print(metadata)
Add basic search by pmcid
Add basic search by pmcid
Python
mit
tarrow/epmclib
57c09aad6ad8de1fb5bb13d92b69db381f55cdac
lobster/filemanager.py
lobster/filemanager.py
import os import time TMP_DIR = "/tmp/lobster" def get_tempdir(): tmp_path = TMP_DIR if not os.path.isdir(tmp_path): os.mkdir(tmp_path) return tmp_path def get_workingdir(): tmp_dir = get_tempdir() dir_name = str(int(time.time())) path = "/".join([tmp_dir, dir_name]) os.mkdir(path) return path
Add functions to handle temp directory for download
Add functions to handle temp directory for download
Python
mit
noahfx/lobster
3e52dcc7c4af0ab7208abebdfb59a079ee9b764b
tests/test_iati_wiki.py
tests/test_iati_wiki.py
import pytest from web_test_base import * class TestIATIWiki(WebTestBase): urls_to_get = [ "http://wiki.archive.iatistandard.org/" ] def test_locate_links(self, loaded_request): """ Tests that each page contains links to the defined URLs. """ result = self._get_links_from_page(loaded_request) assert "http://www.iatistandard.org" in result assert "http://www.iatiregistry.org" in result
Add tests for the IATI wiki
Add tests for the IATI wiki This adds tests to ensure that IATI wiki is up and contains specified links
Python
mit
IATI/IATI-Website-Tests
031016e21879ad7c0e3a2c1c888e973bfb23c529
abel/tests/test_hansenlaw.py
abel/tests/test_hansenlaw.py
from __future__ import absolute_import from __future__ import division from __future__ import print_function import os.path import numpy as np from numpy.testing import assert_allclose from abel.hansenlaw import iabel_hansenlaw from abel.analytical import GaussianAnalytical from abel.benchmark import absolute_ratio_benchmark def test_hansenlaw_shape(): n = 21 x = np.ones((n, n), dtype='float32') recon = iabel_hansenlaw(x, calc_speeds=False, verbose=False) assert recon.shape == (n, n) def test_hansenlaw_zeros(): n = 21 x = np.zeros((n, n), dtype='float32') recon = iabel_hansenlaw(x, calc_speeds=False, verbose=False) assert_allclose(recon, 0) def test_hansenlaw_gaussian(): """Check a gaussian solution for HansenLaw""" n = 51 r_max = 25 ref = GaussianAnalytical(n, r_max, symmetric=True, sigma=10) tr = np.tile(ref.abel[None, :], (n, 1)) # make a 2D array from 1D recon = iabel_hansenlaw(tr, calc_speeds=False, verbose=False) recon1d = recon[n//2 + n%2] ratio = absolute_ratio_benchmark(ref, recon1d) # this only passes with a relative tolerance of 0.35, someone would # need to look into it. assert_allclose(ratio, 1.0, rtol=0.35, atol=0)
Add unit tests for the HansenLaw implementation
Add unit tests for the HansenLaw implementation
Python
mit
PyAbel/PyAbel,rth/PyAbel,DhrubajyotiDas/PyAbel,stggh/PyAbel,huletlab/PyAbel
76c580f04edc1995e2dc9d107f84a714c088c0c2
nth-prime/nth_prime1.py
nth-prime/nth_prime1.py
def nth_prime(n): if n <= 0: raise ValueError for i, prime in enumerate(prime_gen()): if n == i + 1: return prime def prime_gen(): def n_gen(): n = 2 while True: yield n n += 1 nonprimes = {} for n in n_gen(): prime = nonprimes.pop(n, None) if prime is None: yield n nonprimes[n ** 2] = n else: x = prime + n while x in nonprimes: x += prime nonprimes[x] = prime print nonprimes
Add better solution for nth prime
Add better solution for nth prime
Python
mit
always-waiting/exercism-python
cd4380577061bd3e10c72926db80a830f3e90100
tests/unit/cloud/clouds/openstack_test.py
tests/unit/cloud/clouds/openstack_test.py
# -*- coding: utf-8 -*- ''' :codeauthor: :email:`Bo Maryniuk <[email protected]>` ''' # Import Python libs from __future__ import absolute_import # Import Salt Testing Libs from salttesting import TestCase from salt.cloud.clouds import openstack from salttesting.mock import MagicMock, patch from tests.unit.cloud.clouds import _preferred_ip class OpenstackTestCase(TestCase): ''' Test case for openstack ''' PRIVATE_IPS = ['0.0.0.0', '1.1.1.1', '2.2.2.2'] @patch('salt.cloud.clouds.openstack.show_instance', MagicMock(return_value={'state': True, 'public_ips': [], 'private_ips': PRIVATE_IPS})) @patch('salt.cloud.clouds.openstack.rackconnect', MagicMock(return_value=False)) @patch('salt.cloud.clouds.openstack.managedcloud', MagicMock(return_value=False)) @patch('salt.cloud.clouds.openstack.preferred_ip', _preferred_ip(PRIVATE_IPS, ['0.0.0.0'])) @patch('salt.cloud.clouds.openstack.ssh_interface', MagicMock(return_value=False)) def test_query_node_data_filter_preferred_ip_addresses(self): ''' Test if query node data is filtering out unpreferred IP addresses. ''' openstack.NodeState = MagicMock() openstack.NodeState.RUNNING = True openstack.__opts__ = {} vm = {'name': None} data = MagicMock() data.public_ips = [] with patch('salt.utils.cloud.is_public_ip', MagicMock(return_value=True)): assert openstack._query_node_data(vm, data, False).public_ips == ['0.0.0.0']
Add initial unit test for openstack cloud module
Add initial unit test for openstack cloud module
Python
apache-2.0
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
df5ee98d6a2e39318e76f67afb4b02cd8e48def1
py/http2/http2/utils.py
py/http2/http2/utils.py
__all__ = [ 'make_ssl_context', ] import ssl def make_ssl_context(crt, key): ssl_context = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2) ssl_context.load_cert_chain(crt, key) if ssl.HAS_ALPN: ssl_context.set_alpn_protocols(['h2']) else: asserts.precond(ssl.HAS_NPN) ssl_context.set_npn_protocols(['h2']) return ssl_context
Add helper for HTTP/2 SSL context
Add helper for HTTP/2 SSL context
Python
mit
clchiou/garage,clchiou/garage,clchiou/garage,clchiou/garage
9244e9d17ed57e1848bf52566d401e19c2cde8b7
integration-test/491-feature-tests.py
integration-test/491-feature-tests.py
from . import FixtureTest class FeaturesTest(FixtureTest): def test_shops(self): self._run_test( 'http://www.openstreetmap.org/node/2893904480', '16/19299/24631', {'kind': 'bakery'}) self._run_test( 'http://www.openstreetmap.org/node/886395953', '16/19297/24636', {'kind': 'books'}) self._run_test( 'http://www.openstreetmap.org/node/2709493928', '16/19297/24629', {'kind': 'butcher'}) self._run_test( 'http://www.openstreetmap.org/node/2565702300', '16/19295/24638', {'kind': 'car'}) self._run_test( 'http://www.openstreetmap.org/node/2065155887', '16/19310/24632', {'kind': 'car_repair'}) def _run_test(self, url, zxy, props): z, x, y = map(int, zxy.split('/')) self.load_fixtures([url]) self.assert_has_feature( z, x, y, 'pois', props)
Add tests for some shop POIs
Add tests for some shop POIs
Python
mit
mapzen/vector-datasource,mapzen/vector-datasource,mapzen/vector-datasource
92946362496f950a28357f3dee44b936cc59909a
StandingsCheck.py
StandingsCheck.py
#!/usr/bin/python from eveapi import eveapi import ChatKosLookup import sys class StandingsChecker: def __init__(self, keyID, vCode): self.checker = ChatKosLookup.KosChecker() self.eveapi = self.checker.eveapi.auth(keyID=keyID, vCode=vCode) def check(self): contacts = self.eveapi.char.ContactList() print 'Personal' self.check_internal(contacts.contactList) print 'Corp' self.check_internal(contacts.corporateContactList) print 'Alliance' self.check_internal(contacts.allianceContactList) def check_internal(self, contacts): entities = [(row.contactID, row.contactName, row.standing) for row in contacts if row.contactID > 3100000] alive_alliances = [row.allianceID for row in self.checker.eveapi.eve.AllianceList(version=1).alliances] remove = {} demote = {} promote = {} for (eid, name, standing) in entities: kos = self.checker.koscheck_internal(name) if (not eid in alive_alliances and not self.valid_corp(eid) and not self.valid_char(eid)): remove[name] = standing elif standing < 0 and (kos == False or kos == ChatKosLookup.NPC): promote[name] = standing elif (standing >= 0 and kos != None and kos != ChatKosLookup.NPC and kos != False): demote[name] = standing if remove: print 'Defunct and can be removed:' for (name, standing) in sorted(remove.items()): print '%3d > [?]: %s' % (standing, name) print '' if demote: print 'KOS and should be < 0:' for (name, standing) in sorted(demote.items()): print '%3d > [-]: %s' % (standing, name) print '' if promote: print 'Not KOS and should be >=0 or removed:' for (name, standing) in sorted(promote.items()): print '%3d > [+]: %s' % (standing, name) print '' print '---' def valid_corp(self, eid): try: ret = self.checker.eveapi.corp.CorporationSheet(corporationID=eid) return (ret.ceoID != 0) except eveapi.Error: return False def valid_char(self, eid): try: self.checker.eveapi.eve.CharacterInfo(characterID=eid) return True except eveapi.Error: return False if __name__ == '__main__': if len(sys.argv) > 2: StandingsChecker(sys.argv[1], sys.argv[2]).check() else: print ('Usage: %s keyID vCode' % sys.argv[0])
Add standings checker to prevent friendly fire
Add standings checker to prevent friendly fire
Python
mit
lizthegrey/nrds-tools
102855b8d3dc7258c68a1f2bac3bb4c8953732dc
backend/scripts/adminuser.py
backend/scripts/adminuser.py
#!/usr/bin/env python import rethinkdb as r from optparse import OptionParser import sys def create_group(conn): group = {} group['name'] = "Admin Group" group['description'] = "Administration Group for Materials Commons" group['id'] = 'admin' group['owner'] = '[email protected]' group['users'] = [] group['birthtime'] = r.now() group['mtime'] = r.now() r.table('usergroups').insert(group).run(conn) admin_group = r.table('usergroups').get('admin')\ .run(conn, time_format='raw') return admin_group def add_user(user, group, conn): for u in group['users']: if u == user: return group['users'].append(user) r.table('usergroups').get('admin').update(group).run(conn) if __name__ == "__main__": parser = OptionParser() parser.add_option("-P", "--port", type="int", dest="port", help="rethinkdb port") parser.add_option("-u", "--user", type="string", dest="user", help="user to add to admin group") (options, args) = parser.parse_args() if options.port is None: print "You must specify the rethinkdb port" sys.exit(1) if options.user is None: print "You must specify a user to add" sys.exit(1) conn = r.connect('localhost', options.port, db='materialscommons') admin_group = r.table('usergroups').get('admin')\ .run(conn, time_format='raw') if admin_group is None: admin_group = create_group(conn) add_user(options.user, admin_group, conn)
Add utility to create administrative users.
Add utility to create administrative users.
Python
mit
materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org
84acd57a114fb2a9c2e17e729586399e66bb0eb9
bin/scrape-bus-stops.py
bin/scrape-bus-stops.py
#!/usr/bin/python3 import urllib.request import json import os import sys endpoint = 'http://datamall2.mytransport.sg/ltaodataservice/BusStops?$skip=' def get_bus_stops(account_key: str, offset: int): url = '{}{}'.format(endpoint, offset) req = urllib.request.Request(url, headers={'AccountKey': account_key}) with urllib.request.urlopen(req) as f: bus_stops = json.load(f) return bus_stops def main(): account_key = os.environ.get('DATAMALL_ACCOUNT_KEY') if account_key is None: print('Error: DATAMALL_ACCOUNT_KEY environment variable not set.') sys.exit(1) if len(sys.argv) > 1: out_file = sys.argv[1] else: out_file = './bus_stops.json' print('[START] Fetch bus stops') with open(out_file, 'w') as f: offset = 0 count = 0 while True: bus_stops = get_bus_stops(account_key, offset) if len(bus_stops['value']) == 0: break count += len(bus_stops['value']) f.write('{:04d}: {}\n'.format(offset, json.dumps(bus_stops['value']))) sys.stdout.write('\rOffset: {}'.format(offset)) offset += 50 print('\rFetched {} bus stops.'.format(count)) print('[END] Fetch bus stops') print('[START] Collect bus stops') all_bus_stops = [] with open(out_file) as f: for line in f: bus_stops = json.loads(line[6:]) all_bus_stops.extend(bus_stops) print('[END] Collect bus stops') print('[START] Write bus stops') with open(out_file, 'w') as f: json.dump(all_bus_stops, f) print('[END] Write bus stops') if __name__ == "__main__": main()
Add script to get bus stops from datamall
[skip ci] Add script to get bus stops from datamall
Python
mit
yi-jiayu/bus-eta-bot,yi-jiayu/bus-eta-bot,yi-jiayu/bus-eta-bot,yi-jiayu/bus-eta-bot
4ec87c35ed6603f1eafb540840a8f978ea87130c
argqueue/queue.py
argqueue/queue.py
import sqlite3 as sql class Queue(object): def __init__(self, db_filename): self.con = sql.connect(db_filename) self._create_tables() def _create_tables(self): with self.con: cur = self.con.cursor() cur.execute("CREATE TABLE IF NOT EXISTS " "Arguments(Id INTEGER PRIMARY KEY AUTOINCREMENT, " "Args TEXT, Status TEXT, " "Created INTEGER)") def put(self, arguments): with self.con: cur = self.con.cursor() cur.execute("INSERT INTO Arguments (Args, Status, Created) " "VALUES (?,'PENDING', strftime('%s', 'now'))", (arguments,)) def pop(self): with self.con: cur = self.con.cursor() cur.execute("BEGIN EXCLUSIVE") cur.execute("SELECT Id,Args FROM Arguments WHERE Status='PENDING' " "ORDER BY Id LIMIT 1") try: row_id, args = cur.fetchone() except TypeError: raise Exception("No more arguments to pop") cur.execute("UPDATE Arguments SET Status='UNKNOWN' WHERE Id=?", (row_id,)) return args
Create Queue with pop and put
Create Queue with pop and put
Python
mit
bewt85/jobqueue
a097aef42255b963b91c54494355d34cdc7c08f7
single-pop/singlepop2npy.py
single-pop/singlepop2npy.py
import sys import numpy as np def read_phi(flname, n_steps, n_loci): sampled_phis = np.zeros((n_steps, n_loci)) fl = open(flname) current_iter_idx = 0 # index used for storage last_iter_idx = 0 # index used to identify when we finish a step for ln in fl: cols = ln.strip().split() iter_idx = int(cols[0]) locus_idx = int(cols[1]) phi = float(cols[2]) if last_iter_idx != iter_idx: last_iter_idx = iter_idx current_iter_idx += 1 sampled_phis[current_iter_idx, locus_idx] = phi fl.close() return sampled_phis if __name__ == "__main__": bamova_phi_output_flname = sys.argv[1] n_steps = int(sys.argv[2]) n_loci = int(sys.argv[3]) npy_flname = sys.argv[4] matrix = read_phi(bamova_phi_output_flname, n_steps, n_loci) np.save(npy_flname, matrix)
Add conversion script for single pop to numpy
Add conversion script for single pop to numpy
Python
apache-2.0
rnowling/pop-gen-models
3236e1dc9624a4e4a2770cf463e8f366e4eb7cde
algorithms/a_star_tree_misplaced_tiles.py
algorithms/a_star_tree_misplaced_tiles.py
""" pynpuzzle - Solve n-puzzle with Python A* tree search algorithm using misplaced tiles heuristic Version : 1.0.0 Author : Hamidreza Mahdavipanah Repository: http://github.com/mahdavipanah/pynpuzzle License : MIT License """ import heapq from .util import best_first_seach as bfs def search(state, goal_state): """A* tree search using misplaced tiles heuristic""" def gn(node): return node.gn() tiles_places = [] for i in range(len(goal_state)): for j in range(len(goal_state)): heapq.heappush(tiles_places, (goal_state[i][j], (i, j))) def hn(node): misplace_count = 0 for i in range(len(node.state)): for j in range(len(node.state)): tile_i, tile_j = tiles_places[node.state[i][j]] if i != tile_i or j != tile_j: misplace_count += 1 return misplace_count def fn(node): return gn(node) + hn(node) return bfs.search(state, goal_state, fn)
Add a* tree search algorithm using misplaced tiles heuristic
Add a* tree search algorithm using misplaced tiles heuristic
Python
mit
mahdavipanah/pynpuzzle
9f5e11f789c01e3a6da0ff2c7376c4ead2741a6a
pyramid_authsanity/tests/test_includeme.py
pyramid_authsanity/tests/test_includeme.py
import pytest from pyramid.authorization import ACLAuthorizationPolicy import pyramid.testing from zope.interface import ( Interface, implementedBy, providedBy, ) from zope.interface.verify import ( verifyClass, verifyObject ) from pyramid_services import IServiceClassifier from pyramid_authsanity.interfaces import ( IAuthSourceService, ) class TestAuthServicePolicyIntegration(object): @pytest.fixture(autouse=True) def pyramid_config(self, request): from pyramid.interfaces import IDebugLogger self.config = pyramid.testing.setUp() self.config.set_authorization_policy(ACLAuthorizationPolicy()) def finish(): del self.config pyramid.testing.tearDown() request.addfinalizer(finish) def _makeOne(self, settings): self.config.registry.settings.update(settings) self.config.include('pyramid_authsanity') def test_include_me(self): from pyramid_authsanity.policy import AuthServicePolicy self._makeOne({}) self.config.commit() introspector = self.config.registry.introspector auth_policy = introspector.get('authentication policy', None) assert isinstance(auth_policy['policy'], AuthServicePolicy) with pytest.raises(ValueError): find_service_factory(self.config, IAuthSourceService) def test_include_me_cookie_no_secret(self): settings = {'authsanity.source': 'cookie'} with pytest.raises(RuntimeError): self._makeOne(settings) def test_include_me_cookie_with_secret(self): from pyramid_authsanity.policy import AuthServicePolicy settings = {'authsanity.source': 'cookie', 'authsanity.secret': 'sekrit'} self._makeOne(settings) self.config.commit() introspector = self.config.registry.introspector auth_policy = introspector.get('authentication policy', None) assert isinstance(auth_policy['policy'], AuthServicePolicy) assert verifyClass(IAuthSourceService, find_service_factory(self.config, IAuthSourceService)) def test_include_me_session(self): from pyramid_authsanity.policy import AuthServicePolicy settings = {'authsanity.source': 'session'} self._makeOne(settings) self.config.commit() introspector = self.config.registry.introspector auth_policy = introspector.get('authentication policy', None) assert isinstance(auth_policy['policy'], AuthServicePolicy) assert verifyClass(IAuthSourceService, find_service_factory(self.config, IAuthSourceService)) def find_service_factory( config, iface=Interface, ): context_iface = providedBy(None) svc_types = (IServiceClassifier, context_iface) adapters = config.registry.adapters svc_factory = adapters.lookup(svc_types, iface, name='') if svc_factory is None: raise ValueError('could not find registered service') return svc_factory
Add tests for the includeme/settings
Add tests for the includeme/settings
Python
isc
usingnamespace/pyramid_authsanity
6aab268f697a2cbdc39aa6ccf59801bd04068626
examples/livestream_datalogger.py
examples/livestream_datalogger.py
from pymoku import Moku, MokuException from pymoku.instruments import * import time, logging, traceback logging.basicConfig(format='%(asctime)s:%(name)s:%(levelname)s::%(message)s') logging.getLogger('pymoku').setLevel(logging.DEBUG) # Use Moku.get_by_serial() or get_by_name() if you don't know the IP m = Moku('192.168.1.106')#.get_by_name('example') i = m.discover_instrument() if i is None or i.type != 'oscilloscope': print "No or wrong instrument deployed" i = Oscilloscope() m.attach_instrument(i) else: print "Attached to existing Oscilloscope" i.set_defaults() i.set_samplerate(1000) #10ksps i.set_xmode(OSC_ROLL) i.commit() # TODO: Symbolic constants, simplify this logic in the underlying driver. if i.datalogger_status() in [1, 2, 6]: i.datalogger_stop() i.datalogger_start(start=0, duration=10, filetype='net') try: while True: ch, idx, d = i.datalogger_get_samples(timeout=5) print "Received samples %d to %d from channel %d" % (idx, idx + len(d), ch) except MokuException as e: print e except Exception as e: print traceback.format_exc() finally: i.datalogger_stop() m.close()
Add example code to use stream from network
Datalogger: Add example code to use stream from network
Python
mit
benizl/pymoku,liquidinstruments/pymoku
cd13ddd24df33e3a34cd5fc71c3ad0b352952f8b
police_api/exceptions.py
police_api/exceptions.py
from requests.exceptions import HTTPError class BaseException(Exception): pass class APIError(BaseException, HTTPError): """ The API responded with a non-200 status code. """ def __init__(self, http_error): self.message = getattr(http_error, 'message', None) self.response = getattr(http_error, 'response', None) class InvalidCategoryException(BaseException): """ The requested category was not found, or is unavailable for the given date. """ pass
from requests.exceptions import HTTPError class BaseException(Exception): pass class APIError(BaseException, HTTPError): """ The API responded with a non-200 status code. """ def __init__(self, http_error): self.message = getattr(http_error, 'message', None) self.response = getattr(http_error, 'response', None) def __str__(self): return self.message or '<unknown error code>' class InvalidCategoryException(BaseException): """ The requested category was not found, or is unavailable for the given date. """ pass
Add __str__ to APIError exception
Add __str__ to APIError exception
Python
mit
rkhleics/police-api-client-python
018ecf79f5235882b47d37f363f746fce271a7cd
fabfile-development.py
fabfile-development.py
from fabric.api import * # Fill out USER and HOSTS configuration before running env.user = '' env.hosts = [''] env.code_dir = '/home/%s/rtd/checkouts/readthedocs.org' % (env.user) env.virtualenv = '/home/%s/rtd' % (env.user) def install_prerequisites(): """Install prerequisites.""" sudo("apt-get -y install python-dev python-pip git redis-server texlive texlive-latex-extra") sudo("pip install virtualenv") def create_virtualenv(): """Create virtualenv.""" run("virtualenv --no-site-packages --distribute rtd") def clone_repository(): """Clone readthedocs repo""" run("mkdir %s/checkouts" % (env.virtualenv)) with cd("%s/checkouts" % env.virtualenv): run("git clone http://github.com/rtfd/readthedocs.org.git") def pip_requirements(): """Install pip requirements""" with cd(env.code_dir): with prefix("source %s/bin/activate" % (env.virtualenv)): run("pip install -r pip_requirements.txt") def build_db(): """Build database""" with prefix("source %s/bin/activate" % (env.virtualenv)): run("%s/readthedocs/manage.py syncdb" % (env.code_dir)) def migrate_db(): """Migrate database""" with prefix("source %s/bin/activate" % (env.virtualenv)): run("%s/readthedocs/manage.py migrate" % (env.code_dir)) def load_testprojects(): """Load test data and update repos""" with prefix("source %s/bin/activate" % (env.virtualenv)): run("%s/readthedocs/manage.py loaddata test_data" % (env.code_dir)) run("%s/readthedocs/manage.py update_repos" % (env.code_dir)) @task(default=True) def install(): """Install readthedocs""" install_prerequisites() create_virtualenv() clone_repository() pip_requirements() build_db() migrate_db() load_testprojects() @task def clean(): """Clean up everything to start over""" sudo("rm -rf %s" % (env.virtualenv)) sudo("pip uninstall virtualenv") sudo("apt-get -y purge python-dev python-pip git redis-server texlive texlive-latex-extra") sudo("apt-get -y autoremove --purge")
Add fabfile to install readthedocs for development.
Add fabfile to install readthedocs for development. * Automates step-by-step instructions in Installation docs. * Installing prerequisites is specific to Debian/Ubuntu. Edit if using a different distribution or different OS. * Clean is also Debian/Ubuntu specific.
Python
mit
laplaceliu/readthedocs.org,clarkperkins/readthedocs.org,LukasBoersma/readthedocs.org,safwanrahman/readthedocs.org,agjohnson/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org,dirn/readthedocs.org,stevepiercy/readthedocs.org,attakei/readthedocs-oauth,cgourlay/readthedocs.org,davidfischer/readthedocs.org,attakei/readthedocs-oauth,jerel/readthedocs.org,nyergler/pythonslides,GovReady/readthedocs.org,nikolas/readthedocs.org,Carreau/readthedocs.org,Tazer/readthedocs.org,takluyver/readthedocs.org,KamranMackey/readthedocs.org,laplaceliu/readthedocs.org,nyergler/pythonslides,cgourlay/readthedocs.org,VishvajitP/readthedocs.org,takluyver/readthedocs.org,wanghaven/readthedocs.org,gjtorikian/readthedocs.org,jerel/readthedocs.org,singingwolfboy/readthedocs.org,kenshinthebattosai/readthedocs.org,techtonik/readthedocs.org,nyergler/pythonslides,takluyver/readthedocs.org,wijerasa/readthedocs.org,davidfischer/readthedocs.org,kenwang76/readthedocs.org,gjtorikian/readthedocs.org,nikolas/readthedocs.org,d0ugal/readthedocs.org,hach-que/readthedocs.org,davidfischer/readthedocs.org,kdkeyser/readthedocs.org,kdkeyser/readthedocs.org,sils1297/readthedocs.org,agjohnson/readthedocs.org,CedarLogic/readthedocs.org,espdev/readthedocs.org,SteveViss/readthedocs.org,d0ugal/readthedocs.org,ojii/readthedocs.org,tddv/readthedocs.org,mrshoki/readthedocs.org,davidfischer/readthedocs.org,pombredanne/readthedocs.org,mrshoki/readthedocs.org,kdkeyser/readthedocs.org,michaelmcandrew/readthedocs.org,cgourlay/readthedocs.org,asampat3090/readthedocs.org,michaelmcandrew/readthedocs.org,sid-kap/readthedocs.org,royalwang/readthedocs.org,mhils/readthedocs.org,rtfd/readthedocs.org,techtonik/readthedocs.org,raven47git/readthedocs.org,CedarLogic/readthedocs.org,sid-kap/readthedocs.org,istresearch/readthedocs.org,kenwang76/readthedocs.org,hach-que/readthedocs.org,Tazer/readthedocs.org,soulshake/readthedocs.org,hach-que/readthedocs.org,KamranMackey/readthedocs.org,titiushko/readthedocs.org,KamranMackey/readthedocs.org,kenwang76/readthedocs.org,safwanrahman/readthedocs.org,dirn/readthedocs.org,VishvajitP/readthedocs.org,espdev/readthedocs.org,singingwolfboy/readthedocs.org,tddv/readthedocs.org,raven47git/readthedocs.org,johncosta/private-readthedocs.org,SteveViss/readthedocs.org,clarkperkins/readthedocs.org,gjtorikian/readthedocs.org,sils1297/readthedocs.org,attakei/readthedocs-oauth,ojii/readthedocs.org,CedarLogic/readthedocs.org,agjohnson/readthedocs.org,singingwolfboy/readthedocs.org,LukasBoersma/readthedocs.org,kenshinthebattosai/readthedocs.org,cgourlay/readthedocs.org,VishvajitP/readthedocs.org,wijerasa/readthedocs.org,atsuyim/readthedocs.org,VishvajitP/readthedocs.org,pombredanne/readthedocs.org,mrshoki/readthedocs.org,kenshinthebattosai/readthedocs.org,michaelmcandrew/readthedocs.org,wanghaven/readthedocs.org,sunnyzwh/readthedocs.org,sunnyzwh/readthedocs.org,emawind84/readthedocs.org,titiushko/readthedocs.org,istresearch/readthedocs.org,Carreau/readthedocs.org,soulshake/readthedocs.org,raven47git/readthedocs.org,laplaceliu/readthedocs.org,LukasBoersma/readthedocs.org,mrshoki/readthedocs.org,tddv/readthedocs.org,fujita-shintaro/readthedocs.org,Carreau/readthedocs.org,singingwolfboy/readthedocs.org,asampat3090/readthedocs.org,stevepiercy/readthedocs.org,titiushko/readthedocs.org,wanghaven/readthedocs.org,kdkeyser/readthedocs.org,fujita-shintaro/readthedocs.org,sid-kap/readthedocs.org,stevepiercy/readthedocs.org,GovReady/readthedocs.org,Tazer/readthedocs.org,ojii/readthedocs.org,royalwang/readthedocs.org,KamranMackey/readthedocs.org,CedarLogic/readthedocs.org,pombredanne/readthedocs.org,wanghaven/readthedocs.org,johncosta/private-readthedocs.org,d0ugal/readthedocs.org,stevepiercy/readthedocs.org,mhils/readthedocs.org,techtonik/readthedocs.org,titiushko/readthedocs.org,asampat3090/readthedocs.org,agjohnson/readthedocs.org,attakei/readthedocs-oauth,Tazer/readthedocs.org,SteveViss/readthedocs.org,jerel/readthedocs.org,fujita-shintaro/readthedocs.org,safwanrahman/readthedocs.org,rtfd/readthedocs.org,jerel/readthedocs.org,laplaceliu/readthedocs.org,GovReady/readthedocs.org,Carreau/readthedocs.org,raven47git/readthedocs.org,takluyver/readthedocs.org,emawind84/readthedocs.org,soulshake/readthedocs.org,fujita-shintaro/readthedocs.org,royalwang/readthedocs.org,hach-que/readthedocs.org,espdev/readthedocs.org,clarkperkins/readthedocs.org,sils1297/readthedocs.org,asampat3090/readthedocs.org,SteveViss/readthedocs.org,sunnyzwh/readthedocs.org,kenshinthebattosai/readthedocs.org,d0ugal/readthedocs.org,nikolas/readthedocs.org,istresearch/readthedocs.org,sunnyzwh/readthedocs.org,techtonik/readthedocs.org,ojii/readthedocs.org,emawind84/readthedocs.org,nyergler/pythonslides,dirn/readthedocs.org,wijerasa/readthedocs.org,sils1297/readthedocs.org,wijerasa/readthedocs.org,soulshake/readthedocs.org,atsuyim/readthedocs.org,gjtorikian/readthedocs.org,emawind84/readthedocs.org,safwanrahman/readthedocs.org,espdev/readthedocs.org,kenwang76/readthedocs.org,michaelmcandrew/readthedocs.org,johncosta/private-readthedocs.org,sid-kap/readthedocs.org,dirn/readthedocs.org,GovReady/readthedocs.org,atsuyim/readthedocs.org,atsuyim/readthedocs.org,clarkperkins/readthedocs.org,nikolas/readthedocs.org,LukasBoersma/readthedocs.org,mhils/readthedocs.org,mhils/readthedocs.org,royalwang/readthedocs.org,espdev/readthedocs.org,istresearch/readthedocs.org
2fa79ad053f8af0acc121543a9ceb85cc07c2ac2
step_0/scripts/instructional_sampling.py
step_0/scripts/instructional_sampling.py
# coding=utf-8 import sys reload(sys) sys.setdefaultencoding('utf-8') # sc is an existing SparkContext. from pyspark.sql import SQLContext sqlContext = SQLContext(sc) directory = "/Volumes/JS'S FIT/json" datasets = sqlContext.read.json(directory) file_count = datasets.where(datasets['verb'].isNull()).count() assert file_count == 21888 # expecting 21888 info_dataset = datasets.select('info') info_dataset.registerTempTable('info') all_tweets_count = info_dataset.select('info.activity_count').groupBy().sum('activity_count').collect() # expecting 2682988 all_posts = datasets.where(datasets['verb'] == 'post') all_posts_count = all_posts.count() assert all_posts_count == 1570398 print '{} posts'.format(all_posts_count) # expecting 1570398 all_shares = datasets.where(datasets['verb'] == 'share') all_shares_count = all_shares.count() assert all_shares_count == 1112590 print '{} shares'.format(all_shares_count) # expecting 1112590 assert all_tweets_count[0][0] == all_posts_count + all_shares_count sample_seed = 2016 fraction = 0.000022 # this give 30 samples sample_posts = all_posts.sample(False, fraction, sample_seed) sample_posts_count = sample_posts.count() print '{} sample posts'.format(sample_posts_count) sample_posts_file = "./output/sample_posts.json" sample_posts_jsons = sample_posts.toJSON().collect() with open(sample_posts_file, 'a') as f: for post in sample_posts_jsons: f.write(post) f.write('\n')
Add step 0, which create a sample file for labelling instruction
Add step 0, which create a sample file for labelling instruction
Python
apache-2.0
chuajiesheng/twitter-sentiment-analysis
d269568387b622861001fdc39eeeaff03ebd9a78
formulamanager.py
formulamanager.py
import os import imp _formula_cache = {} _default_search_path = [os.path.join(os.path.dirname(__file__), "..", "Formula")] class FormulaManager: @staticmethod def _find(name, search_path=[]): file_path = "" for path in search_path + default_search_path: if os.path.exists(os.path.join(path, name + ".py")): file_path = os.path.join(path, name + ".py") return file_path @staticmethod def get(name, search_path=[]): file_path = "" #name can be a path if os.path.isfile(name): file_path = name name = os.path.splitext(os.path.basename(file_path))[0] if _formula_cache.has_key(name): return _formula_cache[name] else: if not file_path: file_path = FormulaManager._find(name, search_path) formula = imp.load_source(name, file_path) _formula_cache[name] = formula return formula
Implement class for managing formulas
Implement class for managing formulas
Python
mit
peterl94/CLbundler,peterl94/CLbundler
eff79d3dc25e2cd5a296f50e051bd950e73ebf47
generate_sound.py
generate_sound.py
from scipy.io import wavfile import numpy as np import subprocess from scipy.signal import hilbert, chirp from tuning import pitch_to_freq def sample_time(since, until, fs=44100.): ''' Generates time sample in given interval [since; until] with given sampling rate (fs). ''' return np.arange(since, until, 1. / fs) def sine(samples, freq=1., amplitude=1., phase=0.): ''' Samples the sine function given the time samples, frequency (Hz), amplitude and phase [0; 2 * np.pi). ''' print(freq) return amplitude * np.sin(2 * np.pi * freq * samples + phase) def white_noise(samples, amplitude=1.): return amplitude * np.random.rand(*t.shape) def save_wav(samples, filename, fs=44100, normalize=False, factor=((2**15))-1): samples = samples / np.max(np.abs(samples)) if normalize else samples wavfile.write(filename, fs, np.int16(samples * factor)) def play(filename): subprocess.call(['afplay', filename]) def generate_and_play(func, duration=1.): filename = 'test.wav' t = sample_time(0, duration) samples = func(t) save_wav(samples, filename, normalize=True) play(filename) if __name__ == '__main__': # plain 440 Hz A for 1 second generate_and_play(lambda t: sine(t, 440)) # 1 Hz dissonance generate_and_play(lambda t: np.sum(sine(t, f) for f in (440, 441)), duration=3) # 10 Hz dissonance generate_and_play(lambda t: np.sum(sine(t, 440 + 10 * i) for i in range(0, 2)), duration=3) # 10 harmonics with same amplitude generate_and_play(lambda t: np.sum(sine(t, 440 * (i + 1)) for i in range(0, 10))) # C-G fifth generate_and_play(lambda t: np.sum(sine(t, pitch_to_freq(i)) for i in (0, 4, 7))) # C major chord generate_and_play(lambda t: np.sum(sine(t, pitch_to_freq(i)) for i in (0, 4, 7)))
Add basic sound generation (time sampling, sine wave, white noise, save to WAV file, play via afplay).
Add basic sound generation (time sampling, sine wave, white noise, save to WAV file, play via afplay).
Python
mit
bzamecnik/tfr,bzamecnik/tfr
c12d70090b47765a658a98c29fd332ca6ec057d7
bin/migrate-tips.py
bin/migrate-tips.py
from gratipay.wireup import db, env from gratipay.models.team import Team, AlreadyMigrated db = db(env()) slugs = db.all(""" SELECT slug FROM teams WHERE is_approved IS TRUE """) for slug in slugs: team = Team.from_slug(slug) try: team.migrate_tips() print("Migrated tips for '%s'" % slug) except AlreadyMigrated: print("'%s' already migrated." % slug) print("Done.")
Add script for migrating tips to new teams
Add script for migrating tips to new teams
Python
mit
studio666/gratipay.com,gratipay/gratipay.com,mccolgst/www.gittip.com,eXcomm/gratipay.com,studio666/gratipay.com,mccolgst/www.gittip.com,eXcomm/gratipay.com,gratipay/gratipay.com,eXcomm/gratipay.com,gratipay/gratipay.com,mccolgst/www.gittip.com,studio666/gratipay.com,gratipay/gratipay.com,mccolgst/www.gittip.com,studio666/gratipay.com,eXcomm/gratipay.com
63b4b1dd0301686f9d14842d680a1f41eb7d0596
candidates/tests/test_person_view.py
candidates/tests/test_person_view.py
# Smoke tests for viewing a candidate's page import re from mock import patch from django_webtest import WebTest from .fake_popit import FakePersonCollection @patch('candidates.popit.PopIt') class TestPersonView(WebTest): def test_get_tessa_jowell(self, mock_popit): mock_popit.return_value.persons = FakePersonCollection response = self.app.get('/person/2009/tessa-jowell') self.assertTrue( re.search( r'''(?msx) <h1>Tessa\ Jowell</h1>\s* <p>Candidate\ for \ <a\ href="/constituency/65808/dulwich-and-west-norwood">Dulwich \ and\ West\ Norwood</a>\ in\ 2015</p>''', unicode(response) ) )
Add a basic smoke test for a person view page
Add a basic smoke test for a person view page
Python
agpl-3.0
datamade/yournextmp-popit,openstate/yournextrepresentative,mysociety/yournextrepresentative,mysociety/yournextmp-popit,openstate/yournextrepresentative,DemocracyClub/yournextrepresentative,neavouli/yournextrepresentative,YoQuieroSaber/yournextrepresentative,mysociety/yournextrepresentative,openstate/yournextrepresentative,mysociety/yournextrepresentative,YoQuieroSaber/yournextrepresentative,openstate/yournextrepresentative,mysociety/yournextmp-popit,mysociety/yournextmp-popit,mysociety/yournextrepresentative,mysociety/yournextmp-popit,neavouli/yournextrepresentative,openstate/yournextrepresentative,YoQuieroSaber/yournextrepresentative,YoQuieroSaber/yournextrepresentative,datamade/yournextmp-popit,neavouli/yournextrepresentative,YoQuieroSaber/yournextrepresentative,datamade/yournextmp-popit,neavouli/yournextrepresentative,mysociety/yournextmp-popit,DemocracyClub/yournextrepresentative,mysociety/yournextrepresentative,datamade/yournextmp-popit,DemocracyClub/yournextrepresentative,neavouli/yournextrepresentative,datamade/yournextmp-popit
2a35dc8835aab9471198a2e1bd32de0ad31014eb
ci/send-test/run.py
ci/send-test/run.py
import bernhard import socket import struct import time import sys def events(n): events = list() for i in range(0, n): events.append(bernhard.Event(params={'host': "host-%i" % i, 'service' : 'service-foo', 'tags': ['foo', 'bar', 'baz']})) return events def encoded_events(n): msg = bernhard.Message(events=events(n)) return msg.raw def send(n): raw = encoded_events(n) sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.connect(('localhost', 5555)) buff = struct.pack('!I', len(raw)) + raw sock.sendall(buff[0:200]) time.sleep(0.1) sock.sendall(buff[200:10000]) time.sleep(0.3) sock.sendall(buff[10000:15000]) time.sleep(0.5) sock.sendall(buff[15000:len(buff)]) rxlen = struct.unpack('!I', sock.recv(4))[0] msg = bernhard.Message(raw=sock.recv(rxlen, socket.MSG_WAITALL)) assert(msg.ok) send(5000)
Add integration test for receiving chunks
Add integration test for receiving chunks
Python
mit
juruen/cavalieri,juruen/cavalieri,juruen/cavalieri,juruen/cavalieri
fdc1723e5d4769902f7896264e27a7d475f2ba1a
test/features/steps/access_db.py
test/features/steps/access_db.py
#!/usr/bin/env python2 from __future__ import print_function import logging import time import sys import psycopg2 from behave import * from contextlib import contextmanager from cluster_under_test import * from db_retriable import * @when('application inserts {number} batches of test data') def step_insert_test_data(context, number): context.dbt.next_batch_number() context.dbt.get_record_number() context.dbt.insert_continuously(int(number)) @then(u'no new records found') def step_insert_test_data(context): print(sys.path) t.assertEqual(0, sys.path) t.assertEqual(0, context.dbt.get_record_number()) @then(u'reading from postgres service url should {expected_result}') def count_records(context, expected_result): """@expected_result: fail or work""" try: con, cur = context.dbt.db.create_connection_with_cursor() cur.execute("SELECT count(*) from testdata;") context.dbt.found_records = cur.fetchall()[0][0] logging.info("Found {} records in the DB.".format(context.dbt.found_records)) result = 'work' except psycopg2.OperationalError as e: logging.warning("Can not read from DB: " + str(e)) result = 'fail' t.assertEqual(expected_result, result) @then(u'last committed batch - {number} - should be visible') def step_impl(context, number): count_records(context, 'work') t.assertEqual(context.dbt.records_in_batches(number), context.dbt.found_records) @then('I run optional CHECKPOINT for faster replication') def step_impl(context): """ Do not do this in production. Spreading of checkpoint writes through the half of the checkpoint interval (default is 5 min.) is to reduce the IO load on the master. Read: https://www.postgresql.org/docs/9.6/static/app-pgbasebackup.html (`-c fast` switch) https://www.postgresql.org/docs/9.6/static/sql-checkpoint.html https://www.postgresql.org/docs/9.6/static/wal-configuration.html """ context.dbt.db.execute('CHECKPOINT;') @when(u'user changes admin password to {password}') def step_impl(context, password): context.dbt.db.execute("ALTER USER admin WITH ENCRYPTED PASSWORD '{}';".format(password)) @then(u'user can access database with password {password}') def step_impl(context, password): db = DbRetriable(host=ClusterUnderTest.service_url, dbname="postgres", user="admin", password=password) db.execute("select now();", retry=False)
Add db related test steps
Add db related test steps
Python
mit
Galeria-Kaufhof/private-postgres-rds,Galeria-Kaufhof/private-postgres-rds,Galeria-Kaufhof/private-postgres-rds
1316ed65550b47d694b870093c38fef47e88a06c
tests/test_entities.py
tests/test_entities.py
from __future__ import unicode_literals from ftfy import fix_text, fix_text_segment from nose.tools import eq_ def test_entities(): example = '&amp;\n<html>\n&amp;' eq_(fix_text(example), '&\n<html>\n&amp;') eq_(fix_text_segment(example), '&amp;\n<html>\n&amp;') eq_(fix_text(example, fix_entities=True), '&\n<html>\n&') eq_(fix_text_segment(example, fix_entities=True), '&\n<html>\n&') eq_(fix_text(example, fix_entities=False), '&amp;\n<html>\n&amp;') eq_(fix_text_segment(example, fix_entities=False), '&amp;\n<html>\n&amp;') eq_(fix_text_segment('&lt;&gt;', fix_entities=False), '&lt;&gt;') eq_(fix_text_segment('&lt;&gt;', fix_entities=True), '<>') eq_(fix_text_segment('&lt;&gt;'), '<>')
Add tests for the fix_entities parameter
Add tests for the fix_entities parameter
Python
mit
rspeer/python-ftfy
ef99851831472c75308220ca6a2ac6d14c17e150
comics/crawlers/spikedmath.py
comics/crawlers/spikedmath.py
from comics.crawler.base import BaseComicCrawler from comics.crawler.meta import BaseComicMeta from comics.crawler.utils.lxmlparser import LxmlParser class ComicMeta(BaseComicMeta): name = 'Spiked Math' language = 'en' url = 'http://www.spikedmath.com/' start_date = '2009-08-24' history_capable_days = 20 schedule = 'Mo,Tu,We,Th,Fr,Sa,Su' time_zone = -5 rights = 'Mike, CC BY-NC-SA 2.5' class ComicCrawler(BaseComicCrawler): def _get_url(self): self.parse_feed('http://feeds.feedburner.com/SpikedMath') for entry in self.feed.entries: if self.timestamp_to_date(entry.updated_parsed) == self.pub_date: self.title = entry.title self.web_url = entry.link if self.title and self.web_url: break if not self.web_url: return page = LxmlParser(self.web_url) self.url = page.src('div.asset-body img')
Add crawler for 'Spiked Math'
Add crawler for 'Spiked Math'
Python
agpl-3.0
klette/comics,jodal/comics,datagutten/comics,datagutten/comics,klette/comics,jodal/comics,jodal/comics,datagutten/comics,klette/comics,datagutten/comics,jodal/comics
ef6457f6cbe6c56ade9d84bce738f8ab58cd4e95
encryptit/tests/dump_json/test_encoder.py
encryptit/tests/dump_json/test_encoder.py
import json from nose.tools import assert_equal from encryptit.dump_json import OpenPGPJsonEncoder def test_encode_bytes(): result = json.dumps(bytes(bytearray([0x01, 0x08])), cls=OpenPGPJsonEncoder) assert_equal('{"octets": "01:08", "length": 2}', result) def test_encode_bytearray(): result = json.dumps(bytearray([0x01, 0x08]), cls=OpenPGPJsonEncoder) assert_equal('{"octets": "01:08", "length": 2}', result)
Add test for JSON encoding `bytes` and `bytearray`
Add test for JSON encoding `bytes` and `bytearray`
Python
agpl-3.0
paulfurley/encryptit,paulfurley/encryptit
e303425602e7535fb16d97a2f24a326791ef646d
tests/test_job.py
tests/test_job.py
import pytest import multiprocessing from copy import deepcopy from pprint import pprint import virtool.job class TestProcessor: def test(self, test_job): """ Test that the processor changes the ``_id`` field to ``job_id``. """ processed = virtool.job.processor(deepcopy(test_job)) test_job["job_id"] = test_job.pop("_id") assert processed == test_job class TestDispatchProcessor: def test(self, test_job): """ Test that the dispatch processor properly formats a raw job document into a dispatchable format. """ assert virtool.job.dispatch_processor(test_job) == { "added": "2017-03-24T13:20:35.780926", "args": { "algorithm": "nuvs", "analysis_id": "e410429b", "index_id": "465428b0", "name": None, "sample_id": "1e01a382", "username": "igboyes" }, "job_id": "4c530449", "mem": 16, "proc": 10, "progress": 1.0, "stage": "import_results", "state": "complete", "task": "nuvs", "user_id": "igboyes" } class TestJob: def test(self, mocker): job_id = "foobar" settings = { "db_name": "test", "db_host": "localhost", "db_port": 27017 } queue = multiprocessing.Queue() task = "foobar" task_args = dict() proc = 1 mem = 1 m = mocker.patch("setproctitle.setproctitle") job = virtool.job.Job(job_id, settings, queue, task, task_args, proc, mem) assert job._job_id == job_id job.start() print(queue.get()) print(queue.get()) assert 0 class TestTermination: def test(self): """ Test that the :class:`virtool.job.Termination` exception works properly. """ with pytest.raises(virtool.job.Termination) as err: raise virtool.job.Termination assert "Termination" in str(err) class TestJobError: def test(self): """ Test that the :class:`virtool.job.JobError` exception works properly. """ with pytest.raises(virtool.job.JobError) as err: raise virtool.job.JobError assert "JobError" in str(err) class TestStageMethod: def test(self): """ Test the the stage_method decorator adds a ``is_stage_method`` attribute to a function with a value of ``True``. """ @virtool.job.stage_method def func(): return "Hello world" assert func.is_stage_method is True
Add first job module tests
Add first job module tests
Python
mit
igboyes/virtool,igboyes/virtool,virtool/virtool,virtool/virtool
8860cb45cbc0be048a0f87335b7a150a4d8b0b7a
PRESUBMIT.py
PRESUBMIT.py
# Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. _EXCLUDED_PATHS = ( ) def _CommonChecks(input_api, output_api): results = [] results.extend(input_api.canned_checks.PanProjectChecks( input_api, output_api, excluded_paths=_EXCLUDED_PATHS)) return results def CheckChangeOnUpload(input_api, output_api): results = [] results.extend(_CommonChecks(input_api, output_api)) return results def CheckChangeOnCommit(input_api, output_api): results = [] results.extend(_CommonChecks(input_api, output_api)) return results
Add a basic presubmit script.
Add a basic presubmit script. git-svn-id: 3a56fcae908c7e16d23cb53443ea4795ac387cf2@70 0e6d7f2b-9903-5b78-7403-59d27f066143
Python
bsd-3-clause
bpsinc-native/src_third_party_trace-viewer,bpsinc-native/src_third_party_trace-viewer,bpsinc-native/src_third_party_trace-viewer,bpsinc-native/src_third_party_trace-viewer
b008bcab5078e7ac598e743bc8f739393c62334f
contrib/add-capsule-header.py
contrib/add-capsule-header.py
#!/usr/bin/python3 # # Copyright (C) 2019 Richard Hughes <[email protected]> # # SPDX-License-Identifier: LGPL-2.1+ import sys import uuid import argparse import struct CAPSULE_FLAGS_PERSIST_ACROSS_RESET = 0x00010000 CAPSULE_FLAGS_POPULATE_SYSTEM_TABLE = 0x00020000 CAPSULE_FLAGS_INITIATE_RESET = 0x00040000 def main(args): # parse GUID from command line try: guid = uuid.UUID(args.guid) except ValueError as e: print(e) return 1 try: with open(args.bin, 'rb') as f: bin_data = f.read() except FileNotFoundError as e: print(e) return 1 # check if already has header hdrsz = struct.calcsize('<16sIII') if len(bin_data) >= hdrsz: hdr = struct.unpack('<16sIII', bin_data[:hdrsz]) imgsz = hdr[3] if imgsz == len(bin_data): print('Replacing existing CAPSULE_HEADER of:') guid_mixed = uuid.UUID(bytes_le=hdr[0]) hdrsz_old = hdr[1] flags = hdr[2] print('GUID: %s' % guid_mixed) print('HdrSz: 0x%04x' % hdrsz_old) print('Flags: 0x%04x' % flags) print('PayloadSz: 0x%04x' % imgsz) bin_data = bin_data[hdrsz_old:] # set header flags flags = CAPSULE_FLAGS_PERSIST_ACROSS_RESET | CAPSULE_FLAGS_INITIATE_RESET if hasattr(args, 'flags'): flags = int(args.flags, 16) # build update capsule header imgsz = hdrsz + len(bin_data) hdr = struct.pack('<16sIII', guid.bytes_le, hdrsz, flags, imgsz) with open(args.cap, 'wb') as f: f.write(hdr + bin_data) print('Wrote capsule %s' % args.cap) print('GUID: %s' % guid) print('HdrSz: 0x%04x' % hdrsz) print('Flags: 0x%04x' % flags) print('PayloadSz: 0x%04x' % imgsz) return 0 parser = argparse.ArgumentParser(description='Add capsule header on firmware') parser.add_argument('--guid', help='GUID of the device', required=True) parser.add_argument('--bin', help='Path to the .bin file', required=True) parser.add_argument('--cap', help='Output capsule file path', required=True) parser.add_argument('--flags', help='Flags, e.g. 0x40000') args = parser.parse_args() sys.exit(main(args))
Add a simple script to add a capsule header
Add a simple script to add a capsule header This may be helpful for OEMs and ODMs shipping 'bare' firmware.
Python
lgpl-2.1
fwupd/fwupd,hughsie/fwupd,fwupd/fwupd,hughsie/fwupd,vathpela/fwupd,vathpela/fwupd,hughsie/fwupd,vathpela/fwupd,fwupd/fwupd,vathpela/fwupd,hughsie/fwupd,fwupd/fwupd
f525d04e978c35132db6ff77f455cf22b486482f
mod/httpserver.py
mod/httpserver.py
"""wrap SimpleHTTPServer and prevent Ctrl-C stack trace output""" import SimpleHTTPServer import SocketServer import log try : log.colored(log.GREEN, 'serving on http://localhost:8000 (Ctrl-C to quit)') httpd = SocketServer.TCPServer(('localhost', 8000), SimpleHTTPServer.SimpleHTTPRequestHandler) httpd.serve_forever() except KeyboardInterrupt: log.colored(log.GREEN, '\nhttp server stopped') exit(0)
"""wrap SimpleHTTPServer and prevent Ctrl-C stack trace output""" import SimpleHTTPServer import SocketServer import log try : log.colored(log.GREEN, 'serving on http://localhost:8000 (Ctrl-C to quit)') SocketServer.TCPServer.allow_reuse_address = True httpd = SocketServer.TCPServer(('localhost', 8000), SimpleHTTPServer.SimpleHTTPRequestHandler) httpd.serve_forever() except KeyboardInterrupt: httpd.shutdown() httpd.server_close() log.colored(log.GREEN, '\nhttp server stopped') exit(0)
Allow resuse-addr at http server start
Allow resuse-addr at http server start
Python
mit
floooh/fips,floooh/fips,michaKFromParis/fips,floooh/fips,michaKFromParis/fips,anthraxx/fips,mgerhardy/fips,anthraxx/fips,mgerhardy/fips,code-disaster/fips,code-disaster/fips
2585d3fac2cedf17a5d851629f8e898d0ed6ec61
umibukela/migrations/0014_auto_20170110_1019.py
umibukela/migrations/0014_auto_20170110_1019.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('umibukela', '0013_auto_20161215_1252'), ] operations = [ migrations.RemoveField( model_name='surveysource', name='survey', ), migrations.DeleteModel( name='SurveySource', ), ]
Add forgotten table delete for table we don't need
Add forgotten table delete for table we don't need
Python
mit
Code4SA/umibukela,Code4SA/umibukela,Code4SA/umibukela,Code4SA/umibukela
eec00e07d8e50d260249eab6cbefc976cc184683
crypto/PRESUBMIT.py
crypto/PRESUBMIT.py
# Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Chromium presubmit script for src/net. See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts for more details on the presubmit API built into gcl. """ def GetPreferredTrySlaves(project, change): # Changes in crypto often need a corresponding OpenSSL edit. return ['linux_redux']
Add crypto pre-submit that will add the openssl builder to the default try-bot list.
Add crypto pre-submit that will add the openssl builder to the default try-bot list. BUG=None TEST=git try should run a linux_redux try job too. Review URL: http://codereview.chromium.org/9235031 git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@119094 0039d316-1c4b-4281-b951-d872f2087c98
Python
bsd-3-clause
Fireblend/chromium-crosswalk,M4sse/chromium.src,M4sse/chromium.src,rogerwang/chromium,bright-sparks/chromium-spacewalk,hujiajie/pa-chromium,ChromiumWebApps/chromium,ltilve/chromium,hgl888/chromium-crosswalk-efl,bright-sparks/chromium-spacewalk,dednal/chromium.src,patrickm/chromium.src,littlstar/chromium.src,mohamed--abdel-maksoud/chromium.src,mogoweb/chromium-crosswalk,M4sse/chromium.src,zcbenz/cefode-chromium,chuan9/chromium-crosswalk,hujiajie/pa-chromium,krieger-od/nwjs_chromium.src,fujunwei/chromium-crosswalk,krieger-od/nwjs_chromium.src,crosswalk-project/chromium-crosswalk-efl,dushu1203/chromium.src,rogerwang/chromium,ChromiumWebApps/chromium,Pluto-tv/chromium-crosswalk,axinging/chromium-crosswalk,anirudhSK/chromium,Jonekee/chromium.src,TheTypoMaster/chromium-crosswalk,axinging/chromium-crosswalk,nacl-webkit/chrome_deps,timopulkkinen/BubbleFish,krieger-od/nwjs_chromium.src,keishi/chromium,markYoungH/chromium.src,Just-D/chromium-1,Jonekee/chromium.src,axinging/chromium-crosswalk,jaruba/chromium.src,Pluto-tv/chromium-crosswalk,timopulkkinen/BubbleFish,pozdnyakov/chromium-crosswalk,hgl888/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,chuan9/chromium-crosswalk,Jonekee/chromium.src,timopulkkinen/BubbleFish,robclark/chromium,mogoweb/chromium-crosswalk,rogerwang/chromium,Fireblend/chromium-crosswalk,fujunwei/chromium-crosswalk,chuan9/chromium-crosswalk,Just-D/chromium-1,pozdnyakov/chromium-crosswalk,axinging/chromium-crosswalk,ondra-novak/chromium.src,krieger-od/nwjs_chromium.src,axinging/chromium-crosswalk,fujunwei/chromium-crosswalk,krieger-od/nwjs_chromium.src,Chilledheart/chromium,hujiajie/pa-chromium,hgl888/chromium-crosswalk,zcbenz/cefode-chromium,Just-D/chromium-1,robclark/chromium,axinging/chromium-crosswalk,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk,dushu1203/chromium.src,zcbenz/cefode-chromium,anirudhSK/chromium,jaruba/chromium.src,Fireblend/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,axinging/chromium-crosswalk,Pluto-tv/chromium-crosswalk,pozdnyakov/chromium-crosswalk,fujunwei/chromium-crosswalk,hujiajie/pa-chromium,keishi/chromium,ChromiumWebApps/chromium,zcbenz/cefode-chromium,patrickm/chromium.src,keishi/chromium,M4sse/chromium.src,nacl-webkit/chrome_deps,krieger-od/nwjs_chromium.src,mohamed--abdel-maksoud/chromium.src,markYoungH/chromium.src,Pluto-tv/chromium-crosswalk,rogerwang/chromium,dushu1203/chromium.src,ChromiumWebApps/chromium,ondra-novak/chromium.src,ltilve/chromium,keishi/chromium,jaruba/chromium.src,markYoungH/chromium.src,Pluto-tv/chromium-crosswalk,littlstar/chromium.src,M4sse/chromium.src,patrickm/chromium.src,patrickm/chromium.src,PeterWangIntel/chromium-crosswalk,ondra-novak/chromium.src,mohamed--abdel-maksoud/chromium.src,M4sse/chromium.src,markYoungH/chromium.src,mohamed--abdel-maksoud/chromium.src,Chilledheart/chromium,ChromiumWebApps/chromium,Just-D/chromium-1,jaruba/chromium.src,hujiajie/pa-chromium,fujunwei/chromium-crosswalk,hgl888/chromium-crosswalk-efl,ChromiumWebApps/chromium,hgl888/chromium-crosswalk,nacl-webkit/chrome_deps,ltilve/chromium,dushu1203/chromium.src,anirudhSK/chromium,anirudhSK/chromium,junmin-zhu/chromium-rivertrail,M4sse/chromium.src,Chilledheart/chromium,hujiajie/pa-chromium,anirudhSK/chromium,dushu1203/chromium.src,bright-sparks/chromium-spacewalk,Just-D/chromium-1,TheTypoMaster/chromium-crosswalk,markYoungH/chromium.src,pozdnyakov/chromium-crosswalk,mogoweb/chromium-crosswalk,timopulkkinen/BubbleFish,TheTypoMaster/chromium-crosswalk,nacl-webkit/chrome_deps,anirudhSK/chromium,mohamed--abdel-maksoud/chromium.src,mogoweb/chromium-crosswalk,Pluto-tv/chromium-crosswalk,littlstar/chromium.src,crosswalk-project/chromium-crosswalk-efl,rogerwang/chromium,Fireblend/chromium-crosswalk,axinging/chromium-crosswalk,Just-D/chromium-1,hgl888/chromium-crosswalk-efl,ltilve/chromium,pozdnyakov/chromium-crosswalk,dednal/chromium.src,bright-sparks/chromium-spacewalk,markYoungH/chromium.src,jaruba/chromium.src,junmin-zhu/chromium-rivertrail,anirudhSK/chromium,dushu1203/chromium.src,hgl888/chromium-crosswalk,ChromiumWebApps/chromium,bright-sparks/chromium-spacewalk,crosswalk-project/chromium-crosswalk-efl,Chilledheart/chromium,ChromiumWebApps/chromium,dushu1203/chromium.src,timopulkkinen/BubbleFish,PeterWangIntel/chromium-crosswalk,axinging/chromium-crosswalk,krieger-od/nwjs_chromium.src,Jonekee/chromium.src,PeterWangIntel/chromium-crosswalk,patrickm/chromium.src,hgl888/chromium-crosswalk,junmin-zhu/chromium-rivertrail,hujiajie/pa-chromium,junmin-zhu/chromium-rivertrail,nacl-webkit/chrome_deps,TheTypoMaster/chromium-crosswalk,nacl-webkit/chrome_deps,crosswalk-project/chromium-crosswalk-efl,bright-sparks/chromium-spacewalk,jaruba/chromium.src,ltilve/chromium,mogoweb/chromium-crosswalk,mogoweb/chromium-crosswalk,pozdnyakov/chromium-crosswalk,chuan9/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,ltilve/chromium,zcbenz/cefode-chromium,pozdnyakov/chromium-crosswalk,dednal/chromium.src,markYoungH/chromium.src,junmin-zhu/chromium-rivertrail,M4sse/chromium.src,pozdnyakov/chromium-crosswalk,zcbenz/cefode-chromium,krieger-od/nwjs_chromium.src,ChromiumWebApps/chromium,PeterWangIntel/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,patrickm/chromium.src,crosswalk-project/chromium-crosswalk-efl,Just-D/chromium-1,markYoungH/chromium.src,axinging/chromium-crosswalk,hgl888/chromium-crosswalk-efl,hgl888/chromium-crosswalk,Jonekee/chromium.src,dednal/chromium.src,timopulkkinen/BubbleFish,dushu1203/chromium.src,junmin-zhu/chromium-rivertrail,zcbenz/cefode-chromium,anirudhSK/chromium,timopulkkinen/BubbleFish,keishi/chromium,dushu1203/chromium.src,keishi/chromium,TheTypoMaster/chromium-crosswalk,robclark/chromium,Jonekee/chromium.src,krieger-od/nwjs_chromium.src,littlstar/chromium.src,timopulkkinen/BubbleFish,Fireblend/chromium-crosswalk,anirudhSK/chromium,hujiajie/pa-chromium,Jonekee/chromium.src,Jonekee/chromium.src,Chilledheart/chromium,hgl888/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,patrickm/chromium.src,Fireblend/chromium-crosswalk,jaruba/chromium.src,jaruba/chromium.src,Jonekee/chromium.src,Fireblend/chromium-crosswalk,ChromiumWebApps/chromium,keishi/chromium,robclark/chromium,patrickm/chromium.src,pozdnyakov/chromium-crosswalk,Fireblend/chromium-crosswalk,robclark/chromium,robclark/chromium,keishi/chromium,hgl888/chromium-crosswalk-efl,ondra-novak/chromium.src,jaruba/chromium.src,chuan9/chromium-crosswalk,zcbenz/cefode-chromium,Just-D/chromium-1,nacl-webkit/chrome_deps,ChromiumWebApps/chromium,M4sse/chromium.src,zcbenz/cefode-chromium,mogoweb/chromium-crosswalk,axinging/chromium-crosswalk,chuan9/chromium-crosswalk,robclark/chromium,PeterWangIntel/chromium-crosswalk,dednal/chromium.src,Jonekee/chromium.src,bright-sparks/chromium-spacewalk,bright-sparks/chromium-spacewalk,mohamed--abdel-maksoud/chromium.src,ondra-novak/chromium.src,nacl-webkit/chrome_deps,dednal/chromium.src,jaruba/chromium.src,chuan9/chromium-crosswalk,fujunwei/chromium-crosswalk,littlstar/chromium.src,hgl888/chromium-crosswalk-efl,ltilve/chromium,Chilledheart/chromium,hujiajie/pa-chromium,littlstar/chromium.src,hgl888/chromium-crosswalk-efl,timopulkkinen/BubbleFish,junmin-zhu/chromium-rivertrail,crosswalk-project/chromium-crosswalk-efl,ltilve/chromium,PeterWangIntel/chromium-crosswalk,ondra-novak/chromium.src,dednal/chromium.src,ondra-novak/chromium.src,dednal/chromium.src,littlstar/chromium.src,hgl888/chromium-crosswalk,pozdnyakov/chromium-crosswalk,pozdnyakov/chromium-crosswalk,patrickm/chromium.src,crosswalk-project/chromium-crosswalk-efl,fujunwei/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,keishi/chromium,nacl-webkit/chrome_deps,mohamed--abdel-maksoud/chromium.src,chuan9/chromium-crosswalk,M4sse/chromium.src,Pluto-tv/chromium-crosswalk,ondra-novak/chromium.src,junmin-zhu/chromium-rivertrail,chuan9/chromium-crosswalk,M4sse/chromium.src,rogerwang/chromium,Chilledheart/chromium,rogerwang/chromium,junmin-zhu/chromium-rivertrail,rogerwang/chromium,hujiajie/pa-chromium,dednal/chromium.src,PeterWangIntel/chromium-crosswalk,Fireblend/chromium-crosswalk,Chilledheart/chromium,TheTypoMaster/chromium-crosswalk,fujunwei/chromium-crosswalk,Just-D/chromium-1,ChromiumWebApps/chromium,mohamed--abdel-maksoud/chromium.src,markYoungH/chromium.src,dednal/chromium.src,dushu1203/chromium.src,ltilve/chromium,littlstar/chromium.src,TheTypoMaster/chromium-crosswalk,krieger-od/nwjs_chromium.src,timopulkkinen/BubbleFish,timopulkkinen/BubbleFish,hgl888/chromium-crosswalk-efl,nacl-webkit/chrome_deps,anirudhSK/chromium,rogerwang/chromium,zcbenz/cefode-chromium,mogoweb/chromium-crosswalk,Jonekee/chromium.src,rogerwang/chromium,anirudhSK/chromium,dednal/chromium.src,Chilledheart/chromium,markYoungH/chromium.src,robclark/chromium,junmin-zhu/chromium-rivertrail,mogoweb/chromium-crosswalk,Pluto-tv/chromium-crosswalk,keishi/chromium,robclark/chromium,hgl888/chromium-crosswalk-efl,mohamed--abdel-maksoud/chromium.src,markYoungH/chromium.src,PeterWangIntel/chromium-crosswalk,jaruba/chromium.src,bright-sparks/chromium-spacewalk,robclark/chromium,junmin-zhu/chromium-rivertrail,fujunwei/chromium-crosswalk,nacl-webkit/chrome_deps,anirudhSK/chromium,keishi/chromium,krieger-od/nwjs_chromium.src,mogoweb/chromium-crosswalk,ondra-novak/chromium.src,dushu1203/chromium.src,zcbenz/cefode-chromium,hgl888/chromium-crosswalk-efl,TheTypoMaster/chromium-crosswalk,hujiajie/pa-chromium
ecd9e9a3f97bdf9489b6dc750d736855a2c109c2
tests/test_setup_py_hook.py
tests/test_setup_py_hook.py
from unittest import TestCase, mock from semantic_release import setup_hook class SetupPyHookTests(TestCase): @mock.patch('semantic_release.cli.main') def test_setup_hook_should_not_call_main_if_to_few_args(self, mock_main): setup_hook(['setup.py']) self.assertFalse(mock_main.called) @mock.patch('semantic_release.cli.main') def test_setup_hook_should_call_main(self, mock_main): setup_hook(['setup.py', 'publish']) self.assertTrue(mock_main.called)
Add tests for the setup.py hook
Add tests for the setup.py hook
Python
mit
wlonk/python-semantic-release,relekang/python-semantic-release,jvrsantacruz/python-semantic-release,relekang/python-semantic-release,riddlesio/python-semantic-release
5e48d933795cc367ba0c5c378994c1b6c5cb3fb2
osf/migrations/0023_merge_20170503_1947.py
osf/migrations/0023_merge_20170503_1947.py
# -*- coding: utf-8 -*- # Generated by Django 1.11 on 2017-05-04 00:47 from __future__ import unicode_literals from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('osf', '0022_auto_20170503_1818'), ('osf', '0021_retraction_date_retracted'), ] operations = [ ]
Add merge migration for hotfix 0.107.6
Add merge migration for hotfix 0.107.6
Python
apache-2.0
Nesiehr/osf.io,Johnetordoff/osf.io,icereval/osf.io,caseyrollins/osf.io,saradbowman/osf.io,baylee-d/osf.io,chrisseto/osf.io,cslzchen/osf.io,TomBaxter/osf.io,cslzchen/osf.io,adlius/osf.io,aaxelb/osf.io,mfraezz/osf.io,chennan47/osf.io,mattclark/osf.io,HalcyonChimera/osf.io,TomBaxter/osf.io,aaxelb/osf.io,baylee-d/osf.io,chrisseto/osf.io,chennan47/osf.io,TomBaxter/osf.io,HalcyonChimera/osf.io,brianjgeiger/osf.io,saradbowman/osf.io,felliott/osf.io,pattisdr/osf.io,binoculars/osf.io,felliott/osf.io,cwisecarver/osf.io,cwisecarver/osf.io,caseyrollins/osf.io,Johnetordoff/osf.io,caneruguz/osf.io,Nesiehr/osf.io,Nesiehr/osf.io,aaxelb/osf.io,HalcyonChimera/osf.io,sloria/osf.io,mfraezz/osf.io,binoculars/osf.io,brianjgeiger/osf.io,mfraezz/osf.io,crcresearch/osf.io,erinspace/osf.io,aaxelb/osf.io,erinspace/osf.io,leb2dg/osf.io,cslzchen/osf.io,felliott/osf.io,sloria/osf.io,chennan47/osf.io,erinspace/osf.io,CenterForOpenScience/osf.io,hmoco/osf.io,adlius/osf.io,brianjgeiger/osf.io,adlius/osf.io,cwisecarver/osf.io,mattclark/osf.io,hmoco/osf.io,Johnetordoff/osf.io,CenterForOpenScience/osf.io,caseyrollins/osf.io,CenterForOpenScience/osf.io,sloria/osf.io,crcresearch/osf.io,felliott/osf.io,mattclark/osf.io,adlius/osf.io,brianjgeiger/osf.io,leb2dg/osf.io,leb2dg/osf.io,Nesiehr/osf.io,baylee-d/osf.io,laurenrevere/osf.io,pattisdr/osf.io,laurenrevere/osf.io,icereval/osf.io,caneruguz/osf.io,caneruguz/osf.io,leb2dg/osf.io,hmoco/osf.io,HalcyonChimera/osf.io,laurenrevere/osf.io,chrisseto/osf.io,pattisdr/osf.io,cwisecarver/osf.io,binoculars/osf.io,chrisseto/osf.io,icereval/osf.io,caneruguz/osf.io,Johnetordoff/osf.io,crcresearch/osf.io,hmoco/osf.io,mfraezz/osf.io,CenterForOpenScience/osf.io,cslzchen/osf.io
7a15963a81da6120a79f282dcf43e5e508d13ff5
queries.py
queries.py
#!/usr/bin/env python # Brandon Heller # # Run geo, time, and combined queries to get an early idea of how interactive # this might be. import time from datetime import datetime from pymongo import Connection INPUT_DB = 'processed' conn = Connection(slave_okay=True) processed = conn[INPUT_DB] def time_queries(): time_ranges = [ ['april_month', [datetime(2010, 4, 1), datetime(2010, 5, 1)]], ['october_month', [datetime(2010, 10, 1), datetime(2010, 11, 1)]], ['november_week', [datetime(2010, 11, 1), datetime(2010, 11, 8)]] ] for entry in time_ranges: desc, range = entry print "--------------------------------------------" print "desc: %s\nrange: %s" % (desc, range) match = {"committed_date_native": {"$gte": range[0], "$lt": range[1]}} start = time.time() matching = processed.commits.find(match) matched = matching.count() elapsed = float(time.time() - start) print "matched: %i" % matched print "time: %0.4f" % elapsed def geo_queries(): #http://www.mongodb.org/display/DOCS/Geospatial+Indexing # mongodb geo bounds queries are represented by lower-left and upper-right # corners. geo_ranges = [ ['bayarea', [[37.2, -123.0], [38.0, -121.0]]], ['cali', [[32.81, -125.0], [42.0, -114.0]]], ['america', [[25.0, -125.0], [50.0, -65.0]]] ['europe', [[33.0, 40.0], [71.55, 71.55]]] ['australia', [[-48.0, 113.1], [-10.5, 179.0]]] ] for entry in geo_ranges: desc, box = entry print "--------------------------------------------" print "desc: %s\nrange: %s" % (desc, box) match = {'loc': {"$within": {"$box": box}}} start = time.time() matching = processed.commits.find(match) matched = matching.count() elapsed = float(time.time() - start) print "matched: %i" % matched print "time: %0.4f" % elapsed time_queries() #geo_queries()
Add time and geo query examples
Add time and geo query examples
Python
mit
emarschner/gothub,emarschner/gothub,emarschner/gothub,emarschner/gothub
1294679a7ea4ceacf610e4dc103677aeedc7b7ea
common/src/split.py
common/src/split.py
from pysamimport import pysam import re, os, hashlib class SplitBAM(object): def __init__(self,bamfile,readgroups,batchsize=10,directory='.',index=False): self.bamfile = bamfile self.bambase,self.bamextn = self.bamfile.rsplit('.',1) self.readgroups = readgroups self.batchsize = batchsize self.directory = directory self.index = index def normalize_readgroup(self,rg): return re.sub(r'[^A-Z0-9.]','_',rg) def readgroup_filename(self,rg): uniqstr = hashlib.md5(rg.encode('ascii')).hexdigest().lower()[:5] return os.path.join(self.directory,self.bambase + '.' + self.normalize_readgroup(rg) + "." + uniqstr + "." + self.bamextn) def iterator(self): seenrg = set() while True: outsam = dict() more=False samfile = pysam.AlignmentFile(self.bamfile, "rb", require_index=False) for al in samfile.fetch(until_eof=True): rg = self.readgroups.group(al) if rg and rg not in seenrg: if rg not in outsam: if len(outsam) >= self.batchsize: more = True continue else: rgfilename = self.readgroup_filename(rg) outsam[rg] = (rgfilename,pysam.AlignmentFile(rgfilename, "wb", template=samfile)) outsam[rg][1].write(al) for rg in outsam: outsam[rg][1].close() if self.index: pysam.index(outsam[rg][0]) yield rg,outsam[rg][0] seenrg.update(outsam) outsam = dict() if not more: break
Split bam into files based on read grouping
Split bam into files based on read grouping
Python
mit
HorvathLab/NGS,HorvathLab/NGS,HorvathLab/NGS,HorvathLab/NGS,HorvathLab/NGS
2dec0f46c6b8ed61f6fd4723bb43711603ea754f
examples/github_test.py
examples/github_test.py
from seleniumbase import BaseCase class GitHubTests(BaseCase): def test_github(self): self.open("https://github.com/") self.update_text("input.header-search-input", "SeleniumBase\n") self.click('a[href="/seleniumbase/SeleniumBase"]') self.assert_element("div.repository-content") self.assert_text("SeleniumBase", "h1") self.click('a[title="seleniumbase"]') self.click('a[title="fixtures"]') self.click('a[title="base_case.py"]') self.assert_text("Code", "nav a.selected")
Add a new example test (GitHub)
Add a new example test (GitHub)
Python
mit
mdmintz/SeleniumBase,mdmintz/SeleniumBase,mdmintz/seleniumspot,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/seleniumspot,mdmintz/SeleniumBase,mdmintz/SeleniumBase
5d8f4c3dc7fbd8bc7380e272889640271b512582
tests/io/test_pickle.py
tests/io/test_pickle.py
import pickle import pytest from bonobo import Bag, PickleReader, PickleWriter, open_fs from bonobo.constants import BEGIN, END from bonobo.execution.node import NodeExecutionContext from bonobo.util.testing import CapturingNodeExecutionContext def test_write_pickled_dict_to_file(tmpdir): fs, filename = open_fs(tmpdir), 'output.pkl' writer = PickleWriter(path=filename) context = NodeExecutionContext(writer, services={'fs': fs}) context.write(BEGIN, Bag({'foo': 'bar'}), Bag({'foo': 'baz', 'ignore': 'this'}), END) context.start() context.step() context.step() context.stop() assert pickle.loads(fs.open(filename, 'rb').read()) == {'foo': 'bar'} with pytest.raises(AttributeError): getattr(context, 'file') def test_read_pickled_list_from_file(tmpdir): fs, filename = open_fs(tmpdir), 'input.pkl' fs.open(filename, 'wb').write(pickle.dumps([ ['a', 'b', 'c'], ['a foo', 'b foo', 'c foo'], ['a bar', 'b bar', 'c bar'] ])) reader = PickleReader(path=filename) context = CapturingNodeExecutionContext(reader, services={'fs': fs}) context.start() context.write(BEGIN, Bag(), END) context.step() context.stop() assert len(context.send.mock_calls) == 2 args0, kwargs0 = context.send.call_args_list[0] assert len(args0) == 1 and not len(kwargs0) args1, kwargs1 = context.send.call_args_list[1] assert len(args1) == 1 and not len(kwargs1) assert args0[0].args[0] == { 'a': 'a foo', 'b': 'b foo', 'c': 'c foo', } assert args1[0].args[0] == { 'a': 'a bar', 'b': 'b bar', 'c': 'c bar', }
Add tests for pickle functionality
Add tests for pickle functionality
Python
apache-2.0
python-bonobo/bonobo,hartym/bonobo,hartym/bonobo,python-bonobo/bonobo,hartym/bonobo,python-bonobo/bonobo
d184afa1cfcef372b50ab8b231fe6db01b497f65
projects/gsc/experiments/gsc_onecyclelr.py
projects/gsc/experiments/gsc_onecyclelr.py
# Numenta Platform for Intelligent Computing (NuPIC) # Copyright (C) 2020, Numenta, Inc. Unless you have an agreement # with Numenta, Inc., for a separate license for this software code, the # following terms and conditions apply: # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero Public License version 3 as # published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # See the GNU Affero Public License for more details. # # You should have received a copy of the GNU Affero Public License # along with this program. If not, see http://www.gnu.org/licenses. # # http://numenta.org/licenses/ # """ Run a simple GSC experiment using OneCycleLR. The parameters used here are derived from earlier ImageNet experiments and are unlikely to be optimal. """ from copy import deepcopy import torch from .base import DEFAULT_SPARSE_CNN SPARSE_CNN_ONECYCLELR = deepcopy(DEFAULT_SPARSE_CNN) SPARSE_CNN_ONECYCLELR.update( lr_scheduler_class=torch.optim.lr_scheduler.OneCycleLR, lr_scheduler_args=dict( max_lr=6.0, div_factor=6, # initial_lr = 1.0 final_div_factor=4000, # min_lr = 0.00025 pct_start=0.1, epochs=30, anneal_strategy="linear", max_momentum=0.01, cycle_momentum=False, ), optimizer_args=dict( lr=0.1, weight_decay=0.0001, momentum=0.0, nesterov=False, ), ) CONFIGS = dict( sparse_cnn_onecyclelr=SPARSE_CNN_ONECYCLELR, )
Add a basic example GSC experiment with OneCycleLR
Add a basic example GSC experiment with OneCycleLR
Python
agpl-3.0
numenta/nupic.research,numenta/nupic.research,subutai/nupic.research,mrcslws/nupic.research,subutai/nupic.research,mrcslws/nupic.research
39a15c3842faa8ffeae78ec31db54656888448ec
testcases/OpTestRebootTimeout.py
testcases/OpTestRebootTimeout.py
#!/usr/bin/env python2 # OpenPOWER Automated Test Project # # Contributors Listed Below - COPYRIGHT 2018 # [+] International Business Machines Corp. # # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. See the License for the specific language governing # permissions and limitations under the License. # import unittest import re import time import OpTestConfiguration from common.OpTestSystem import OpSystemState from common.OpTestConstants import OpTestConstants as BMC_CONST class RebootTime(): def setUp(self): conf = OpTestConfiguration.conf self.cv_HOST = conf.host() self.cv_IPMI = conf.ipmi() self.cv_SYSTEM = conf.system() def runTest(self): self.setup_test() # Don't use run_command() in case we actually reboot quickly self.c.sol.sendline("reboot") start = time.time() self.c.sol.expect("OPAL: Reboot request", timeout=120) print("Time to OPAL reboot handler: {} seconds".format(time.time() - start)) class Skiroot(RebootTime, unittest.TestCase): def setup_test(self): self.cv_SYSTEM.goto_state(OpSystemState.PETITBOOT_SHELL) self.c = self.cv_SYSTEM.sys_get_ipmi_console() self.cv_SYSTEM.host_console_unique_prompt() class Host(RebootTime, unittest.TestCase): def setup_test(self): self.cv_SYSTEM.goto_state(OpSystemState.OS) self.c = self.cv_SYSTEM.sys_get_ipmi_console() self.cv_SYSTEM.host_console_login() self.cv_SYSTEM.host_console_unique_prompt()
Test time to complete reboot call
TestReboot: Test time to complete reboot call Useful for tracking the time we're losing between calling reboot and actually rebooting. Signed-off-by: Samuel Mendoza-Jonas <[email protected]>
Python
apache-2.0
open-power/op-test-framework,open-power/op-test-framework,open-power/op-test-framework
918cef54c3cd83f41d1322ffc509c78e28b341f5
tests/test_redis/test_lua_error_return.py
tests/test_redis/test_lua_error_return.py
#!/usr/bin/env python #coding: utf-8 from unittest import TestCase from redis import Redis, ResponseError from .common import * class LuaReturnErrorTestCase(TestCase): def test_lua_return_error(self): """Test the error described on issue 404 is fixed. https://github.com/twitter/twemproxy/issues/404 """ r = getconn() p = r.pipeline(transaction=False) p.set("test_key", "bananas!") p.eval('return {err="dummyerror"}', 1, "dummy_key") p.get("test_key") set_result, eval_result, get_result = p.execute(raise_on_error=False) assert_equal(True, set_result) assert_equal(True, isinstance(eval_result, ResponseError)) assert_equal(str(eval_result), "dummyerror") assert_equal(get_result, b"bananas!")
Add testcase of error with no space
Add testcase of error with no space Cherry-picked from https://github.com/twitter/twemproxy/pull/406 The alternate fix 6c962267d7fab8595940367b0cc8fb6f75f66cea was used Amended to work with the updated test framework Related to https://github.com/twitter/twemproxy/issues/404 Co-Authored-By: Tyson Andre <[email protected]>
Python
apache-2.0
ifwe/twemproxy,ifwe/twemproxy,ifwe/twemproxy,ifwe/twemproxy
bead9f754bb8a3b6ffc55ae24c7436771c0f792e
CodeFights/fileNaming.py
CodeFights/fileNaming.py
#!/usr/local/bin/python # Code Fights File Naming Problem def fileNaming(names): valid = [] tmp = dict() for name in names: if name not in tmp: valid.append(name) tmp[name] = True else: # That file name has been used k = 1 new = name while new in tmp: new = name + '(' + str(k) + ')' k += 1 valid.append(new) tmp[new] = True return valid def main(): tests = [ [ ["doc", "doc", "image", "doc(1)", "doc"], ["doc", "doc(1)", "image", "doc(1)(1)", "doc(2)"] ], [ ["a(1)", "a(6)", "a", "a", "a", "a", "a", "a", "a", "a", "a", "a"], ["a(1)", "a(6)", "a", "a(2)", "a(3)", "a(4)", "a(5)", "a(7)", "a(8)", "a(9)", "a(10)", "a(11)"] ], [ ["dd", "dd(1)", "dd(2)", "dd", "dd(1)", "dd(1)(2)", "dd(1)(1)", "dd", "dd(1)"], ["dd", "dd(1)", "dd(2)", "dd(3)", "dd(1)(1)", "dd(1)(2)", "dd(1)(1)(1)", "dd(4)", "dd(1)(3)"] ] ] for t in tests: res = fileNaming(t[0]) ans = t[1] if ans == res: print("PASSED: fileNaming({}) returned {}" .format(t[0], res)) else: print("FAILED: fileNaming({}) returned {}, answer: {}" .format(t[0], res, ans)) if __name__ == '__main__': main()
Solve Code Fights file naming problem
Solve Code Fights file naming problem
Python
mit
HKuz/Test_Code
57ff677027eec19c0d659e050675e7a320123637
tests/test_serialize_response.py
tests/test_serialize_response.py
from typing import List import pytest from fastapi import FastAPI from pydantic import BaseModel, ValidationError from starlette.testclient import TestClient app = FastAPI() class Item(BaseModel): name: str price: float = None owner_ids: List[int] = None @app.get("/items/invalid", response_model=Item) def get_invalid(): return {"name": "invalid", "price": "foo"} @app.get("/items/innerinvalid", response_model=Item) def get_innerinvalid(): return {"name": "double invalid", "price": "foo", "owner_ids": ["foo", "bar"]} @app.get("/items/invalidlist", response_model=List[Item]) def get_invalidlist(): return [ {"name": "foo"}, {"name": "bar", "price": "bar"}, {"name": "baz", "price": "baz"}, ] client = TestClient(app) def test_invalid(): with pytest.raises(ValidationError): client.get("/items/invalid") def test_double_invalid(): with pytest.raises(ValidationError): client.get("/items/innerinvalid") def test_invalid_list(): with pytest.raises(ValidationError): client.get("/items/invalidlist")
Add tests for validation errors in response
:white_check_mark: Add tests for validation errors in response
Python
mit
tiangolo/fastapi,tiangolo/fastapi,tiangolo/fastapi
6d1ed4bf18ded809371e383e87679ed1b5714699
pyromancer/test/mock_objects.py
pyromancer/test/mock_objects.py
from pyromancer.objects import Connection class MockConnection(Connection): def __init__(self, *args, **kwargs): self.outbox = [] def write(self, data): self.outbox.append(data)
Add mock Connection object for testing purposes.
Add mock Connection object for testing purposes.
Python
mit
Gwildor/Pyromancer
dde87e6b0d2de331e536d335ead00db5d181ee96
spacy/tests/parser/test_add_label.py
spacy/tests/parser/test_add_label.py
'''Test the ability to add a label to a (potentially trained) parsing model.''' from __future__ import unicode_literals import pytest import numpy.random from thinc.neural.optimizers import Adam from thinc.neural.ops import NumpyOps from ...attrs import NORM from ...gold import GoldParse from ...vocab import Vocab from ...tokens import Doc from ...pipeline import NeuralDependencyParser numpy.random.seed(0) @pytest.fixture def vocab(): return Vocab(lex_attr_getters={NORM: lambda s: s}) @pytest.fixture def parser(vocab): parser = NeuralDependencyParser(vocab) parser.cfg['token_vector_width'] = 4 parser.cfg['hidden_width'] = 6 parser.cfg['hist_size'] = 0 parser.add_label('left') parser.begin_training([], **parser.cfg) sgd = Adam(NumpyOps(), 0.001) for i in range(30): losses = {} doc = Doc(vocab, words=['a', 'b', 'c', 'd']) gold = GoldParse(doc, heads=[1, 1, 3, 3], deps=['left', 'ROOT', 'left', 'ROOT']) parser.update([doc], [gold], sgd=sgd, losses=losses) return parser def test_add_label(parser): doc = Doc(parser.vocab, words=['a', 'b', 'c', 'd']) doc = parser(doc) assert doc[0].head.i == 1 assert doc[0].dep_ == 'left' assert doc[1].head.i == 1 assert doc[2].head.i == 3 assert doc[2].head.i == 3 parser.add_label('right') doc = Doc(parser.vocab, words=['a', 'b', 'c', 'd']) doc = parser(doc) assert doc[0].head.i == 1 assert doc[0].dep_ == 'left' assert doc[1].head.i == 1 assert doc[2].head.i == 3 assert doc[2].head.i == 3 sgd = Adam(NumpyOps(), 0.001) for i in range(10): losses = {} doc = Doc(parser.vocab, words=['a', 'b', 'c', 'd']) gold = GoldParse(doc, heads=[1, 1, 3, 3], deps=['right', 'ROOT', 'left', 'ROOT']) parser.update([doc], [gold], sgd=sgd, losses=losses) doc = Doc(parser.vocab, words=['a', 'b', 'c', 'd']) doc = parser(doc) assert doc[0].dep_ == 'right' assert doc[2].dep_ == 'left'
Add tests for adding parser actions
Add tests for adding parser actions
Python
mit
recognai/spaCy,aikramer2/spaCy,recognai/spaCy,honnibal/spaCy,recognai/spaCy,spacy-io/spaCy,explosion/spaCy,aikramer2/spaCy,explosion/spaCy,spacy-io/spaCy,aikramer2/spaCy,explosion/spaCy,explosion/spaCy,honnibal/spaCy,spacy-io/spaCy,honnibal/spaCy,explosion/spaCy,aikramer2/spaCy,recognai/spaCy,aikramer2/spaCy,spacy-io/spaCy,recognai/spaCy,recognai/spaCy,aikramer2/spaCy,spacy-io/spaCy,honnibal/spaCy,spacy-io/spaCy,explosion/spaCy
16a36338fecb21fb3e9e6a15a7af1a438da48c79
apps/jobs/migrations/0003_jobs_per_page.py
apps/jobs/migrations/0003_jobs_per_page.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('jobs', '0002_auto_20140925_1117'), ] operations = [ migrations.AddField( model_name='jobs', name='per_page', field=models.IntegerField(default=5, null=True, verbose_name=b'jobs per page', blank=True), preserve_default=True, ), ]
Add missing `per_page` migration file.
Add missing `per_page` migration file.
Python
mit
onespacemedia/cms-jobs,onespacemedia/cms-jobs
6f024ec7fada73388e5a9c1df9446747c8e1e586
tests/utils/test_http.py
tests/utils/test_http.py
# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from warehouse.utils.http import is_safe_url class TestHttp: # FROM https://github.com/django/django/blob/ # 011a54315e46acdf288003566b8570440f5ac985/tests/utils_tests/test_http.py def test_is_safe_url(self): for bad_url in ('http://example.com', 'http:///example.com', 'https://example.com', 'ftp://exampel.com', r'\\example.com', r'\\\example.com', r'/\\/example.com', r'\\\example.com', r'\\example.com', r'\\//example.com', r'/\/example.com', r'\/example.com', r'/\example.com', 'http:///example.com', 'http:/\//example.com', 'http:\/example.com', 'http:/\example.com', 'javascript:alert("XSS")', '\njavascript:alert(x)', '\x08//example.com', '\n'): assert not is_safe_url(bad_url, host='testserver'),\ "{} should be blocked".format(bad_url) for good_url in ('/view/?param=http://example.com', '/view/?param=https://example.com', '/view?param=ftp://exampel.com', 'view/?param=//example.com', 'https://testserver/', 'HTTPS://testserver/', '//testserver/', '/url%20with%20spaces/'): assert is_safe_url(good_url, host='testserver'),\ "{} should be allowed".format(good_url)
Copy is_safe_url unit test from Django.
Copy is_safe_url unit test from Django.
Python
apache-2.0
ismail-s/warehouse,HonzaKral/warehouse,dstufft/warehouse,HonzaKral/warehouse,pypa/warehouse,karan/warehouse,karan/warehouse,dstufft/warehouse,wlonk/warehouse,ismail-s/warehouse,wlonk/warehouse,pypa/warehouse,alex/warehouse,ismail-s/warehouse,chopmann/warehouse,alex/warehouse,karan/warehouse,alex/warehouse,ismail-s/warehouse,HonzaKral/warehouse,karan/warehouse,ismail-s/warehouse,pypa/warehouse,dstufft/warehouse,pypa/warehouse,chopmann/warehouse,dstufft/warehouse,alex/warehouse,karan/warehouse,wlonk/warehouse,alex/warehouse,chopmann/warehouse,HonzaKral/warehouse
ad6670874f37c52f4a15f30e1ab2682bd81f40f8
python/helpers/profiler/yappi_profiler.py
python/helpers/profiler/yappi_profiler.py
import yappi class YappiProfile(object): """ Wrapper class that represents Yappi profiling backend with API matching the cProfile. """ def __init__(self): self.stats = None def runcall(self, func, *args, **kw): self.enable() try: return func(*args, **kw) finally: self.disable() def enable(self): yappi.start() def disable(self): yappi.stop() def create_stats(self): self.stats = yappi.convert2pstats(yappi.get_func_stats()).stats def getstats(self): if self.stats is None: self.create_stats() return self.stats def dump_stats(self, file): import marshal f = open(file, 'wb') marshal.dump(self.getstats(), f) f.close()
import yappi class YappiProfile(object): """ Wrapper class that represents Yappi profiling backend with API matching the cProfile. """ def __init__(self): self.stats = None def runcall(self, func, *args, **kw): self.enable() try: return func(*args, **kw) finally: self.disable() def enable(self): yappi.start() def disable(self): yappi.stop() def create_stats(self): self.stats = yappi.convert2pstats(yappi.get_func_stats()).stats def getstats(self): self.create_stats() return self.stats def dump_stats(self, file): import marshal f = open(file, 'wb') marshal.dump(self.getstats(), f) f.close()
Fix 2nd and more capturing snapshot (PY-15823).
Fix 2nd and more capturing snapshot (PY-15823).
Python
apache-2.0
adedayo/intellij-community,muntasirsyed/intellij-community,da1z/intellij-community,vvv1559/intellij-community,vvv1559/intellij-community,diorcety/intellij-community,da1z/intellij-community,asedunov/intellij-community,akosyakov/intellij-community,fitermay/intellij-community,orekyuu/intellij-community,ThiagoGarciaAlves/intellij-community,idea4bsd/idea4bsd,MER-GROUP/intellij-community,clumsy/intellij-community,fitermay/intellij-community,MichaelNedzelsky/intellij-community,kdwink/intellij-community,fitermay/intellij-community,diorcety/intellij-community,idea4bsd/idea4bsd,hurricup/intellij-community,ThiagoGarciaAlves/intellij-community,xfournet/intellij-community,orekyuu/intellij-community,ryano144/intellij-community,dslomov/intellij-community,suncycheng/intellij-community,mglukhikh/intellij-community,lucafavatella/intellij-community,SerCeMan/intellij-community,allotria/intellij-community,amith01994/intellij-community,nicolargo/intellij-community,ftomassetti/intellij-community,akosyakov/intellij-community,nicolargo/intellij-community,hurricup/intellij-community,amith01994/intellij-community,diorcety/intellij-community,vladmm/intellij-community,Distrotech/intellij-community,salguarnieri/intellij-community,MichaelNedzelsky/intellij-community,fengbaicanhe/intellij-community,xfournet/intellij-community,Distrotech/intellij-community,supersven/intellij-community,supersven/intellij-community,ibinti/intellij-community,petteyg/intellij-community,MER-GROUP/intellij-community,ftomassetti/intellij-community,youdonghai/intellij-community,alphafoobar/intellij-community,gnuhub/intellij-community,asedunov/intellij-community,salguarnieri/intellij-community,fnouama/intellij-community,supersven/intellij-community,asedunov/intellij-community,semonte/intellij-community,wreckJ/intellij-community,signed/intellij-community,adedayo/intellij-community,supersven/intellij-community,alphafoobar/intellij-community,holmes/intellij-community,jagguli/intellij-community,fengbaicanhe/intellij-community,lucafavatella/intellij-community,kdwink/intellij-community,wreckJ/intellij-community,samthor/intellij-community,jagguli/intellij-community,fitermay/intellij-community,youdonghai/intellij-community,alphafoobar/intellij-community,petteyg/intellij-community,caot/intellij-community,hurricup/intellij-community,retomerz/intellij-community,apixandru/intellij-community,allotria/intellij-community,amith01994/intellij-community,idea4bsd/idea4bsd,idea4bsd/idea4bsd,ivan-fedorov/intellij-community,MichaelNedzelsky/intellij-community,lucafavatella/intellij-community,vvv1559/intellij-community,kdwink/intellij-community,kdwink/intellij-community,petteyg/intellij-community,suncycheng/intellij-community,orekyuu/intellij-community,ol-loginov/intellij-community,samthor/intellij-community,SerCeMan/intellij-community,fitermay/intellij-community,michaelgallacher/intellij-community,blademainer/intellij-community,pwoodworth/intellij-community,orekyuu/intellij-community,signed/intellij-community,petteyg/intellij-community,ol-loginov/intellij-community,semonte/intellij-community,da1z/intellij-community,Distrotech/intellij-community,MER-GROUP/intellij-community,izonder/intellij-community,salguarnieri/intellij-community,jagguli/intellij-community,tmpgit/intellij-community,TangHao1987/intellij-community,apixandru/intellij-community,slisson/intellij-community,ryano144/intellij-community,petteyg/intellij-community,allotria/intellij-community,lucafavatella/intellij-community,ryano144/intellij-community,slisson/intellij-community,gnuhub/intellij-community,MichaelNedzelsky/intellij-community,retomerz/intellij-community,orekyuu/intellij-community,Distrotech/intellij-community,ryano144/intellij-community,wreckJ/intellij-community,allotria/intellij-community,lucafavatella/intellij-community,pwoodworth/intellij-community,allotria/intellij-community,idea4bsd/idea4bsd,kdwink/intellij-community,xfournet/intellij-community,holmes/intellij-community,fnouama/intellij-community,fnouama/intellij-community,signed/intellij-community,salguarnieri/intellij-community,kool79/intellij-community,MichaelNedzelsky/intellij-community,kool79/intellij-community,fengbaicanhe/intellij-community,izonder/intellij-community,signed/intellij-community,ftomassetti/intellij-community,samthor/intellij-community,idea4bsd/idea4bsd,asedunov/intellij-community,kool79/intellij-community,petteyg/intellij-community,alphafoobar/intellij-community,da1z/intellij-community,ahb0327/intellij-community,retomerz/intellij-community,muntasirsyed/intellij-community,ryano144/intellij-community,youdonghai/intellij-community,robovm/robovm-studio,diorcety/intellij-community,youdonghai/intellij-community,dslomov/intellij-community,jagguli/intellij-community,salguarnieri/intellij-community,jagguli/intellij-community,retomerz/intellij-community,diorcety/intellij-community,ahb0327/intellij-community,MichaelNedzelsky/intellij-community,wreckJ/intellij-community,diorcety/intellij-community,robovm/robovm-studio,da1z/intellij-community,retomerz/intellij-community,idea4bsd/idea4bsd,alphafoobar/intellij-community,apixandru/intellij-community,diorcety/intellij-community,pwoodworth/intellij-community,caot/intellij-community,diorcety/intellij-community,salguarnieri/intellij-community,asedunov/intellij-community,kool79/intellij-community,samthor/intellij-community,ryano144/intellij-community,petteyg/intellij-community,allotria/intellij-community,lucafavatella/intellij-community,ibinti/intellij-community,youdonghai/intellij-community,MichaelNedzelsky/intellij-community,retomerz/intellij-community,youdonghai/intellij-community,da1z/intellij-community,nicolargo/intellij-community,fengbaicanhe/intellij-community,izonder/intellij-community,samthor/intellij-community,wreckJ/intellij-community,gnuhub/intellij-community,FHannes/intellij-community,akosyakov/intellij-community,idea4bsd/idea4bsd,ivan-fedorov/intellij-community,petteyg/intellij-community,fnouama/intellij-community,ThiagoGarciaAlves/intellij-community,allotria/intellij-community,fengbaicanhe/intellij-community,amith01994/intellij-community,robovm/robovm-studio,youdonghai/intellij-community,izonder/intellij-community,asedunov/intellij-community,nicolargo/intellij-community,izonder/intellij-community,suncycheng/intellij-community,suncycheng/intellij-community,hurricup/intellij-community,Distrotech/intellij-community,ibinti/intellij-community,vvv1559/intellij-community,robovm/robovm-studio,blademainer/intellij-community,semonte/intellij-community,Distrotech/intellij-community,signed/intellij-community,izonder/intellij-community,kool79/intellij-community,lucafavatella/intellij-community,blademainer/intellij-community,holmes/intellij-community,fnouama/intellij-community,Lekanich/intellij-community,SerCeMan/intellij-community,signed/intellij-community,vladmm/intellij-community,ivan-fedorov/intellij-community,kool79/intellij-community,ivan-fedorov/intellij-community,xfournet/intellij-community,gnuhub/intellij-community,izonder/intellij-community,clumsy/intellij-community,ivan-fedorov/intellij-community,kdwink/intellij-community,wreckJ/intellij-community,vvv1559/intellij-community,ahb0327/intellij-community,robovm/robovm-studio,gnuhub/intellij-community,petteyg/intellij-community,pwoodworth/intellij-community,da1z/intellij-community,samthor/intellij-community,supersven/intellij-community,wreckJ/intellij-community,nicolargo/intellij-community,apixandru/intellij-community,slisson/intellij-community,adedayo/intellij-community,da1z/intellij-community,allotria/intellij-community,adedayo/intellij-community,holmes/intellij-community,tmpgit/intellij-community,clumsy/intellij-community,adedayo/intellij-community,Distrotech/intellij-community,Lekanich/intellij-community,ibinti/intellij-community,kdwink/intellij-community,tmpgit/intellij-community,michaelgallacher/intellij-community,gnuhub/intellij-community,hurricup/intellij-community,fitermay/intellij-community,ThiagoGarciaAlves/intellij-community,fitermay/intellij-community,vvv1559/intellij-community,samthor/intellij-community,blademainer/intellij-community,ryano144/intellij-community,fengbaicanhe/intellij-community,supersven/intellij-community,gnuhub/intellij-community,salguarnieri/intellij-community,slisson/intellij-community,samthor/intellij-community,TangHao1987/intellij-community,suncycheng/intellij-community,ahb0327/intellij-community,ivan-fedorov/intellij-community,dslomov/intellij-community,pwoodworth/intellij-community,alphafoobar/intellij-community,Distrotech/intellij-community,alphafoobar/intellij-community,dslomov/intellij-community,vladmm/intellij-community,ahb0327/intellij-community,tmpgit/intellij-community,lucafavatella/intellij-community,nicolargo/intellij-community,signed/intellij-community,Lekanich/intellij-community,SerCeMan/intellij-community,ftomassetti/intellij-community,suncycheng/intellij-community,fengbaicanhe/intellij-community,ol-loginov/intellij-community,retomerz/intellij-community,diorcety/intellij-community,xfournet/intellij-community,orekyuu/intellij-community,mglukhikh/intellij-community,fitermay/intellij-community,semonte/intellij-community,xfournet/intellij-community,robovm/robovm-studio,salguarnieri/intellij-community,ibinti/intellij-community,pwoodworth/intellij-community,fitermay/intellij-community,FHannes/intellij-community,blademainer/intellij-community,signed/intellij-community,caot/intellij-community,slisson/intellij-community,Lekanich/intellij-community,supersven/intellij-community,SerCeMan/intellij-community,xfournet/intellij-community,dslomov/intellij-community,Lekanich/intellij-community,vvv1559/intellij-community,clumsy/intellij-community,da1z/intellij-community,FHannes/intellij-community,ftomassetti/intellij-community,apixandru/intellij-community,amith01994/intellij-community,SerCeMan/intellij-community,muntasirsyed/intellij-community,muntasirsyed/intellij-community,kool79/intellij-community,ibinti/intellij-community,ftomassetti/intellij-community,ThiagoGarciaAlves/intellij-community,fnouama/intellij-community,holmes/intellij-community,caot/intellij-community,allotria/intellij-community,ftomassetti/intellij-community,salguarnieri/intellij-community,MER-GROUP/intellij-community,ivan-fedorov/intellij-community,samthor/intellij-community,ryano144/intellij-community,ivan-fedorov/intellij-community,orekyuu/intellij-community,SerCeMan/intellij-community,FHannes/intellij-community,SerCeMan/intellij-community,gnuhub/intellij-community,michaelgallacher/intellij-community,mglukhikh/intellij-community,robovm/robovm-studio,slisson/intellij-community,ivan-fedorov/intellij-community,suncycheng/intellij-community,fnouama/intellij-community,adedayo/intellij-community,Distrotech/intellij-community,blademainer/intellij-community,FHannes/intellij-community,amith01994/intellij-community,fnouama/intellij-community,dslomov/intellij-community,akosyakov/intellij-community,izonder/intellij-community,Lekanich/intellij-community,muntasirsyed/intellij-community,akosyakov/intellij-community,dslomov/intellij-community,asedunov/intellij-community,fnouama/intellij-community,hurricup/intellij-community,fengbaicanhe/intellij-community,vvv1559/intellij-community,ThiagoGarciaAlves/intellij-community,nicolargo/intellij-community,tmpgit/intellij-community,MichaelNedzelsky/intellij-community,clumsy/intellij-community,ol-loginov/intellij-community,adedayo/intellij-community,holmes/intellij-community,youdonghai/intellij-community,michaelgallacher/intellij-community,MER-GROUP/intellij-community,signed/intellij-community,youdonghai/intellij-community,pwoodworth/intellij-community,ol-loginov/intellij-community,fitermay/intellij-community,caot/intellij-community,TangHao1987/intellij-community,nicolargo/intellij-community,allotria/intellij-community,ivan-fedorov/intellij-community,TangHao1987/intellij-community,da1z/intellij-community,alphafoobar/intellij-community,idea4bsd/idea4bsd,blademainer/intellij-community,nicolargo/intellij-community,caot/intellij-community,blademainer/intellij-community,FHannes/intellij-community,apixandru/intellij-community,petteyg/intellij-community,akosyakov/intellij-community,jagguli/intellij-community,akosyakov/intellij-community,holmes/intellij-community,gnuhub/intellij-community,tmpgit/intellij-community,alphafoobar/intellij-community,blademainer/intellij-community,kdwink/intellij-community,semonte/intellij-community,michaelgallacher/intellij-community,slisson/intellij-community,Lekanich/intellij-community,kool79/intellij-community,clumsy/intellij-community,MER-GROUP/intellij-community,akosyakov/intellij-community,ryano144/intellij-community,allotria/intellij-community,pwoodworth/intellij-community,ibinti/intellij-community,orekyuu/intellij-community,retomerz/intellij-community,ahb0327/intellij-community,vladmm/intellij-community,vladmm/intellij-community,petteyg/intellij-community,signed/intellij-community,semonte/intellij-community,xfournet/intellij-community,kdwink/intellij-community,pwoodworth/intellij-community,izonder/intellij-community,fnouama/intellij-community,supersven/intellij-community,akosyakov/intellij-community,clumsy/intellij-community,lucafavatella/intellij-community,caot/intellij-community,suncycheng/intellij-community,suncycheng/intellij-community,allotria/intellij-community,asedunov/intellij-community,SerCeMan/intellij-community,supersven/intellij-community,muntasirsyed/intellij-community,Lekanich/intellij-community,apixandru/intellij-community,FHannes/intellij-community,youdonghai/intellij-community,hurricup/intellij-community,ibinti/intellij-community,amith01994/intellij-community,semonte/intellij-community,retomerz/intellij-community,holmes/intellij-community,signed/intellij-community,MichaelNedzelsky/intellij-community,Lekanich/intellij-community,vladmm/intellij-community,kdwink/intellij-community,kool79/intellij-community,ol-loginov/intellij-community,vladmm/intellij-community,dslomov/intellij-community,ThiagoGarciaAlves/intellij-community,vvv1559/intellij-community,MichaelNedzelsky/intellij-community,adedayo/intellij-community,semonte/intellij-community,xfournet/intellij-community,dslomov/intellij-community,MER-GROUP/intellij-community,supersven/intellij-community,vladmm/intellij-community,Distrotech/intellij-community,supersven/intellij-community,pwoodworth/intellij-community,fengbaicanhe/intellij-community,gnuhub/intellij-community,diorcety/intellij-community,muntasirsyed/intellij-community,retomerz/intellij-community,fitermay/intellij-community,ibinti/intellij-community,FHannes/intellij-community,fengbaicanhe/intellij-community,jagguli/intellij-community,retomerz/intellij-community,clumsy/intellij-community,vladmm/intellij-community,holmes/intellij-community,michaelgallacher/intellij-community,slisson/intellij-community,tmpgit/intellij-community,dslomov/intellij-community,retomerz/intellij-community,FHannes/intellij-community,hurricup/intellij-community,muntasirsyed/intellij-community,caot/intellij-community,blademainer/intellij-community,MER-GROUP/intellij-community,Lekanich/intellij-community,kool79/intellij-community,orekyuu/intellij-community,TangHao1987/intellij-community,ryano144/intellij-community,youdonghai/intellij-community,kdwink/intellij-community,MER-GROUP/intellij-community,tmpgit/intellij-community,xfournet/intellij-community,ol-loginov/intellij-community,suncycheng/intellij-community,dslomov/intellij-community,akosyakov/intellij-community,SerCeMan/intellij-community,ThiagoGarciaAlves/intellij-community,semonte/intellij-community,semonte/intellij-community,lucafavatella/intellij-community,alphafoobar/intellij-community,orekyuu/intellij-community,ahb0327/intellij-community,nicolargo/intellij-community,dslomov/intellij-community,ivan-fedorov/intellij-community,petteyg/intellij-community,blademainer/intellij-community,tmpgit/intellij-community,supersven/intellij-community,apixandru/intellij-community,slisson/intellij-community,apixandru/intellij-community,michaelgallacher/intellij-community,clumsy/intellij-community,adedayo/intellij-community,ahb0327/intellij-community,ThiagoGarciaAlves/intellij-community,michaelgallacher/intellij-community,retomerz/intellij-community,wreckJ/intellij-community,da1z/intellij-community,slisson/intellij-community,hurricup/intellij-community,robovm/robovm-studio,izonder/intellij-community,ahb0327/intellij-community,fitermay/intellij-community,michaelgallacher/intellij-community,gnuhub/intellij-community,adedayo/intellij-community,robovm/robovm-studio,michaelgallacher/intellij-community,ahb0327/intellij-community,kool79/intellij-community,allotria/intellij-community,robovm/robovm-studio,muntasirsyed/intellij-community,ftomassetti/intellij-community,ThiagoGarciaAlves/intellij-community,ibinti/intellij-community,gnuhub/intellij-community,amith01994/intellij-community,asedunov/intellij-community,mglukhikh/intellij-community,TangHao1987/intellij-community,adedayo/intellij-community,suncycheng/intellij-community,TangHao1987/intellij-community,mglukhikh/intellij-community,robovm/robovm-studio,youdonghai/intellij-community,akosyakov/intellij-community,alphafoobar/intellij-community,jagguli/intellij-community,nicolargo/intellij-community,clumsy/intellij-community,vvv1559/intellij-community,clumsy/intellij-community,adedayo/intellij-community,hurricup/intellij-community,ahb0327/intellij-community,idea4bsd/idea4bsd,wreckJ/intellij-community,nicolargo/intellij-community,michaelgallacher/intellij-community,caot/intellij-community,semonte/intellij-community,TangHao1987/intellij-community,mglukhikh/intellij-community,michaelgallacher/intellij-community,ryano144/intellij-community,clumsy/intellij-community,FHannes/intellij-community,FHannes/intellij-community,vladmm/intellij-community,samthor/intellij-community,salguarnieri/intellij-community,SerCeMan/intellij-community,semonte/intellij-community,vladmm/intellij-community,mglukhikh/intellij-community,mglukhikh/intellij-community,asedunov/intellij-community,ol-loginov/intellij-community,izonder/intellij-community,kool79/intellij-community,hurricup/intellij-community,jagguli/intellij-community,ryano144/intellij-community,MER-GROUP/intellij-community,ibinti/intellij-community,salguarnieri/intellij-community,MER-GROUP/intellij-community,da1z/intellij-community,idea4bsd/idea4bsd,alphafoobar/intellij-community,amith01994/intellij-community,SerCeMan/intellij-community,TangHao1987/intellij-community,mglukhikh/intellij-community,wreckJ/intellij-community,signed/intellij-community,muntasirsyed/intellij-community,xfournet/intellij-community,ol-loginov/intellij-community,orekyuu/intellij-community,mglukhikh/intellij-community,lucafavatella/intellij-community,Lekanich/intellij-community,jagguli/intellij-community,hurricup/intellij-community,Distrotech/intellij-community,MichaelNedzelsky/intellij-community,ftomassetti/intellij-community,da1z/intellij-community,ftomassetti/intellij-community,mglukhikh/intellij-community,kdwink/intellij-community,lucafavatella/intellij-community,FHannes/intellij-community,ibinti/intellij-community,ftomassetti/intellij-community,MichaelNedzelsky/intellij-community,xfournet/intellij-community,FHannes/intellij-community,lucafavatella/intellij-community,hurricup/intellij-community,vvv1559/intellij-community,ThiagoGarciaAlves/intellij-community,muntasirsyed/intellij-community,xfournet/intellij-community,vladmm/intellij-community,tmpgit/intellij-community,amith01994/intellij-community,suncycheng/intellij-community,fnouama/intellij-community,jagguli/intellij-community,holmes/intellij-community,pwoodworth/intellij-community,diorcety/intellij-community,akosyakov/intellij-community,ThiagoGarciaAlves/intellij-community,caot/intellij-community,asedunov/intellij-community,caot/intellij-community,muntasirsyed/intellij-community,idea4bsd/idea4bsd,mglukhikh/intellij-community,apixandru/intellij-community,robovm/robovm-studio,fengbaicanhe/intellij-community,wreckJ/intellij-community,ibinti/intellij-community,Distrotech/intellij-community,asedunov/intellij-community,slisson/intellij-community,izonder/intellij-community,amith01994/intellij-community,fitermay/intellij-community,vvv1559/intellij-community,apixandru/intellij-community,ol-loginov/intellij-community,TangHao1987/intellij-community,ahb0327/intellij-community,caot/intellij-community,ftomassetti/intellij-community,apixandru/intellij-community,fnouama/intellij-community,TangHao1987/intellij-community,tmpgit/intellij-community,pwoodworth/intellij-community,ol-loginov/intellij-community,orekyuu/intellij-community,ol-loginov/intellij-community,samthor/intellij-community,youdonghai/intellij-community,idea4bsd/idea4bsd,slisson/intellij-community,samthor/intellij-community,signed/intellij-community,fengbaicanhe/intellij-community,blademainer/intellij-community,asedunov/intellij-community,mglukhikh/intellij-community,apixandru/intellij-community,holmes/intellij-community,holmes/intellij-community,apixandru/intellij-community,amith01994/intellij-community,tmpgit/intellij-community,semonte/intellij-community,vvv1559/intellij-community,MER-GROUP/intellij-community,jagguli/intellij-community,wreckJ/intellij-community,ivan-fedorov/intellij-community,salguarnieri/intellij-community,diorcety/intellij-community,Lekanich/intellij-community,TangHao1987/intellij-community
3f9eaa6ed1e69e6d40637ff9711dd72ad1113457
tmp/test_result_proxy.py
tmp/test_result_proxy.py
#!/usr/bin/env python from circuits import Component, Event class Test(Event): """Test Event""" class Foo(Event): """Foo Event""" class Bar(Event): """Bar Event""" class App(Component): def foo(self): return 1 def bar(self): return 2 def test(self): a = self.fire(Foo()) b = self.fire(Bar()) return a.value + b.value from circuits import Debugger app = App() + Debugger() app.start()
Test added for testing proof of concept
Test added for testing proof of concept
Python
mit
eriol/circuits,eriol/circuits,treemo/circuits,eriol/circuits,treemo/circuits,treemo/circuits,nizox/circuits
49ed704f02b686888aef81eafb1a3c57910f9d74
DataWrangling/CaseStudy/sample_file.py
DataWrangling/CaseStudy/sample_file.py
#!/usr/bin/env python # -*- coding: utf-8 -*- import xml.etree.ElementTree as ET # Use cElementTree or lxml if too slow import os OSM_FILE = "san-francisco-bay_california.osm" # Replace this with your osm file SAMPLE_FILE = "sample_sfb.osm" k = 20 # Parameter: take every k-th top level element def get_element(osm_file, tags=('node', 'way', 'relation')): """Yield element if it is the right type of tag Reference: http://stackoverflow.com/questions/3095434/inserting-newlines-in-xml-file-generated-via-xml-etree-elementtree-in-python """ context = iter(ET.iterparse(osm_file, events=('start', 'end'))) _, root = next(context) for event, elem in context: if event == 'end' and elem.tag in tags: yield elem root.clear() def main(): os.chdir('./data') with open(SAMPLE_FILE, 'wb') as output: output.write('<?xml version="1.0" encoding="UTF-8"?>\n') output.write('<osm>\n ') # Write every kth top level element for i, element in enumerate(get_element(OSM_FILE)): if i % k == 0: output.write(ET.tostring(element, encoding='utf-8')) output.write('</osm>')
Add a script which split your region in smaller sample
feat: Add a script which split your region in smaller sample
Python
mit
aguijarro/DataSciencePython
38f43f5555a799afbec8adb2fc09e895a3c71637
strip-html.py
strip-html.py
#!/usr/bin/env python import re import srt import utils def strip_html_from_subs(subtitles): for subtitle in subtitles: subtitle_lines = subtitle.content.splitlines() stripped_subtitle_lines = ( re.sub('<[^<]+?>', '', line) for line in subtitle_lines ) subtitle.content = '\n'.join(stripped_subtitle_lines) yield subtitle def main(): args = utils.basic_parser().parse_args() subtitles_in = srt.parse_file(args.input) stripped_subs = strip_html_from_subs(subtitles_in) srt.compose_file(stripped_subs, args.output) if __name__ == '__main__': main()
Add utility to strip HTML
Add utility to strip HTML
Python
mit
cdown/srt
6749e116f47b9307571e016762b3ff918445ba9e
examples/update_status.py
examples/update_status.py
import tweepy consumer_key = "" consumer_secret = "" access_token = "" access_token_secret = "" auth = tweepy.OAuthHandler(consumer_key, consumer_secret) auth.set_access_token(access_token, access_token_secret) api = tweepy.API(auth) # Tweet / Update Status # The app and the corresponding credentials must have the Write perission # Check the App permissions section of the Settings tab of your app, under the # Twitter Developer Portal Projects & Apps page at # https://developer.twitter.com/en/portal/projects-and-apps # Make sure to reauthorize your app / regenerate your access token and secret # after setting the Write permission api.update_status("This Tweet was Tweeted using Tweepy!")
Add Tweet / update status example
Add Tweet / update status example
Python
mit
tweepy/tweepy,svven/tweepy
c6d2a791a9984855dbaad026e741723ad129c137
gsw/utilities/mat2npz.py
gsw/utilities/mat2npz.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # mat2npz.py # # purpose: Convert matlab file from TEOS-10 group to a npz file # author: Filipe P. A. Fernandes # e-mail: ocefpaf@gmail # web: http://ocefpaf.tiddlyspot.com/ # created: 06-Jun-2011 # modified: Wed 02 May 2012 04:29:32 PM EDT # # obs: # import numpy as np # FIXME: To use the original matlab file I must use pytables. import scipy.io as sio data_ver = 'v3_0' gsw_data = sio.loadmat('gsw_data_' + data_ver + '.mat', squeeze_me=True) # Save compare values in a separate file. gsw_cv = gsw_data['gsw_cv'] del gsw_data['gsw_cv'] cv_vars = {} for name in gsw_cv.dtype.names: var = np.atleast_1d(gsw_cv[name])[0] cv_vars.update({name: var}) # Check values. np.savez("gsw_cv_" + data_ver, **cv_vars) # Delta SA Atlas. ref_table = {} for k in gsw_data: if '__' not in k: if 'deltaSA_ref' in k: name = 'delta_SA_ref' else: name = k var = np.atleast_1d(gsw_data[k]) ref_table.update({name: var}) np.savez("gsw_data_" + data_ver, **ref_table)
Convert the data from a Matlab -v6 file to numpy "npz".
Convert the data from a Matlab -v6 file to numpy "npz".
Python
mit
castelao/python-gsw,lukecampbell/python-gsw,rabernat/python-gsw,TEOS-10/python-gsw,ocefpaf/python-gsw
23b80f5a5bd5f418e8da229f39d9912b0b4e8e77
git-lang-guesser/guesser.py
git-lang-guesser/guesser.py
import argparse import http.client import pprint from . import exceptions from . import git_requester from . import guess_lang def main(): parser = argparse.ArgumentParser("git-lang-guesser") parser.add_argument("username") parser.add_argument("--list-all", action="store_true") args = parser.parse_args() username = args.username list_all = args.list_all try: repos = git_requester.get_public_repos_for_user(username) except exceptions.RequestFailed as exc: if exc.response.status_code == http.client.NOT_FOUND: print("User {0} not found".format(username)) exit(1) else: print("HTTP error code {0}: {1}".format(exc.response.status_code, exc.response.text)) exit(1) repos = [] # silence PyCharm warning if not repos: print("User does not have any public repos") exit(1) if list_all: counter = guess_lang.count_languages(repos, filter_none=False) pprint.pprint(dict(counter)) else: favourite = guess_lang.guess_favourite(repos) print(favourite) if __name__ == "__main__": main()
Add a main function with top-level logic
Add a main function with top-level logic This is now at the point where I'd give it to a developer for in-house usage, however before being "done" I need to check the edge cases.
Python
mit
robbie-c/git-lang-guesser
0ea33e658cdbf0605ee4a2f28a1e0cd24a57b608
examples/ags_rockstar.py
examples/ags_rockstar.py
from rockstar import RockStar ags_code = "Display("Hello, world!");" rock_it_bro = RockStar(days=777, file_name='helloworld.asc', code=ags_code) rock_it_bro.make_me_a_rockstar()
Add AGS (Adventure Game Studio) example
Add AGS (Adventure Game Studio) example
Python
mit
yask123/rockstar,varunparkhe/rockstar,monsterwater/rockstar,gokaygurcan/rockstar,haosdent/rockstar,jrajath94/RockStar,ActuallyACat/rockstar,Endika/rockstar,jehb/rockstar,avinassh/rockstar
7b527400c162ad6810f938c38e06fdb68e488467
multiplication_table.py
multiplication_table.py
#! /usr/bin/env python # multiplication_table.py -- Creates a multiplication table in excel import sys import openpyxl from openpyxl.styles import Font # A program that takes a number N from the command line # and creates an NxN multiplication table in an Excel spreadsheet. try: usr_input = int(sys.argv[1]) except ValueError: print "Usage: multiplication_table.py [number]" print "[number] means an integer only." sys.exit() # Create a workbook work_book = openpyxl.Workbook() # Create worksheet sheet = work_book.active # Create header row sheet.cell(row=1, column=1).value = '' for header_column in range(1, usr_input + 1): sheet.cell(row=1, column=header_column + 1).value = header_column # Create side column for side_column in range(1, usr_input + 1): sheet.cell(row=side_column + 1, column=1).value = side_column # Make header row and side column bold bold12font = Font(size=12, bold=True) for row in range(1, usr_input + 1): sheet.cell(row=row + 1, column=1).font = bold12font # cell A1 is blank for column in range(1, usr_input + 1): sheet.cell(row=1, column=column + 1).font = bold12font # Create multiplication-table rows and columns. for row_num in range(1, usr_input + 1): for column_ in range(1, usr_input + 1): sheet.cell( row=row_num + 1, column=column_ + 1).value = column_ * row_num work_book.save('mult_table.xlsx')
Create Multiplication table in Excel
Create Multiplication table in Excel
Python
mit
terrameijar/useful_scripts
39147b41147cb4551e60d1356be1795822ae7d72
migrations/versions/12113d3fcab1_.py
migrations/versions/12113d3fcab1_.py
"""empty message Revision ID: 12113d3fcab1 Revises: 85ee128c8304 Create Date: 2016-05-26 16:35:36.442452 """ # revision identifiers, used by Alembic. revision = '12113d3fcab1' down_revision = '85ee128c8304' from datetime import datetime from alembic import op import sqlalchemy as sa from sqlalchemy.sql import text comment="placeholder - entry predates audit" def upgrade(): ### commands auto generated by Alembic - please adjust! ### # Backfill any existing observations with a placeholder audit conn = op.get_bind() result = conn.execute(text("""SELECT id FROM users WHERE email = '[email protected]'""")) user_id = result.fetchone()[0] now = datetime.utcnow() conn.execute(text("""INSERT INTO audit (user_id, timestamp, version, comment) VALUES (:user_id, :timestamp, :version, :comment)"""), user_id=user_id, timestamp=now, version='16.5.16', comment=comment) result = conn.execute(text("""SELECT id FROM audit WHERE comment = :comment"""), comment=comment) audit_id = result.fetchone()[0] result = conn.execute(text("""SELECT id FROM observations WHERE audit_id IS NULL""")) for item in result.fetchall(): conn.execute( text("""UPDATE observations SET audit_id = :audit_id WHERE id = :observation_id"""), audit_id=audit_id, observation_id=item[0]) # op.alter_column('observations', 'audit_id', existing_type=sa.INTEGER(), nullable=False) ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.alter_column('observations', 'audit_id', existing_type=sa.INTEGER(), nullable=True) conn = op.get_bind() result = conn.execute(text("""SELECT id FROM audit WHERE comment = :comment"""), comment=comment) audit_id = result.fetchone()[0] conn.execute(text("""UPDATE observations SET audit_id = NULL WHERE audit_id = :audit_id"""), audit_id=audit_id) ### end Alembic commands ###
Upgrade to backfill existing observations with a dummy audit row so we can require audits for all observations.
Upgrade to backfill existing observations with a dummy audit row so we can require audits for all observations.
Python
bsd-3-clause
uwcirg/true_nth_usa_portal,uwcirg/true_nth_usa_portal,uwcirg/true_nth_usa_portal,uwcirg/true_nth_usa_portal
ede47b64d892878e21178aeda2a16f24db59567b
setup.py
setup.py
"""Python Bindings for Psynteract. """ # This is built on the demo package as described at # https://packaging.python.org/en/latest/distributing.html # Always prefer setuptools over distutils from setuptools import setup, find_packages # To use a consistent encoding from codecs import open from os import path here = path.abspath(path.dirname(__file__)) # Get the long description from the README file with open(path.join(here, 'README.md'), encoding='utf-8') as f: long_description = f.read() setup( name='Psynteract', # Versions should comply with PEP440. For a discussion on single-sourcing # the version across setup.py and the project code, see # https://packaging.python.org/en/latest/single_source_version.html version='0.6.1', description='Psynteract Python bindings', long_description=long_description, # The project's main homepage. url='https://github.com/psynteract/psynteract-py', # Author details author='Felix Henninger & Pascal Kieslich', author_email='[email protected]', # Choose your license license='Apache 2.0', # See https://pypi.python.org/pypi?%3Aaction=list_classifiers classifiers=[ # How mature is this project? Common values are # 3 - Alpha # 4 - Beta # 5 - Production/Stable 'Development Status :: 4 - Beta', # Indicate who your project is intended for 'Intended Audience :: Developers', # Pick your license as you wish (should match "license" above) 'License :: OSI Approved :: Apache 2.0', # Specify the Python versions you support here. In particular, ensure # that you indicate whether you support Python 2, Python 3 or both. 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', ], # List run-time dependencies here. These will be installed by pip when # your project is installed. For an analysis of "install_requires" vs pip's # requirements files see: # https://packaging.python.org/en/latest/requirements.html install_requires=['requests', 'pycouchdb'], )
Add tentative support for installation via pip
Add tentative support for installation via pip
Python
apache-2.0
psynteract/psynteract-py
9c762d01b6dafd48d227c0ef927b844a257ff1b9
joommf/energies/test_demag.py
joommf/energies/test_demag.py
from demag import Demag def test_demag_mif(): demag = Demag() mif_string = demag.get_mif() assert 'Specify Oxs_Demag {}' in mif_string def test_demag_formatting(): demag = Demag() mif_string = demag.get_mif() assert mif_string[0] == 'S' assert mif_string[-1] == '\n' assert mif_string[-2] == '\n'
from demag import Demag def test_demag_mif(): demag = Demag() mif_string = demag.get_mif() assert 'Specify Oxs_Demag {}' in mif_string assert demag.__repr__() == "This is the energy class of type Demag" def test_demag_formatting(): demag = Demag() mif_string = demag.get_mif() assert mif_string[0] == 'S' assert mif_string[-1] == '\n' assert mif_string[-2] == '\n'
Increase test coverage for energy classes
Increase test coverage for energy classes
Python
bsd-2-clause
ryanpepper/oommf-python,ryanpepper/oommf-python,fangohr/oommf-python,ryanpepper/oommf-python,fangohr/oommf-python,ryanpepper/oommf-python,fangohr/oommf-python
bd303aa652cd071a612060e7c0e6b0b11dc91018
php4dvd/test_php4dvd.py
php4dvd/test_php4dvd.py
# -*- coding: utf-8 -*- from selenium import webdriver from selenium.common.exceptions import NoSuchElementException import unittest class TestPhp4dvd(unittest.TestCase): def setUp(self): self.driver = webdriver.Firefox() self.driver.implicitly_wait(10) self.base_url = "http://hub.wart.ru/" self.verificationErrors = [] self.accept_next_alert = True def test_number3(self): driver = self.driver driver.get(self.base_url + "php4dvd/") driver.find_element_by_id("username").clear() driver.find_element_by_id("username").send_keys("admin") driver.find_element_by_name("password").clear() driver.find_element_by_name("password").send_keys("admin") driver.find_element_by_name("submit").click() def is_element_present(self, how, what): try: self.driver.find_element(by=how, value=what) except NoSuchElementException, e: return False return True def close_alert_and_get_its_text(self): try: alert = self.driver.switch_to_alert() alert_text = alert.text if self.accept_next_alert: alert.accept() else: alert.dismiss() return alert_text finally: self.accept_next_alert = True def tearDown(self): self.driver.quit() self.assertEqual([], self.verificationErrors) if __name__ == "__main__": unittest.main()
Add a first test script.
Add a first test script.
Python
bsd-2-clause
bsamorodov/selenium-py-training-samorodov
01428c6eca0351ad6a1767ac39969408c961aec1
examples/push_tx.py
examples/push_tx.py
from bitcoinrpc.authproxy import AuthServiceProxy ################################################# # Sending a transation to the network with RPC # ################################################# # --------------------------------------------------------------------------------------------------------------------- # The following piece of code serves as an example of how to send a transaction created with any of the pieces of # code of the other examples to the P2P network. The code assumes you have access to a Bitcoin node with RPC # enabled. RPC authentication details have to be provided in the proper variables. # # Alternatively, you can also push the transaction to the network with a 3rd party service. # # Note that this code has an additional dependency: bitcoinrpc # This dependency is not required by bitcoin_tools, since it is only used in this example, and most users # will not need to use it. # --------------------------------------------------------------------------------------------------------------------- # Set RPC configuration rpc_user = "" # set roc user rpc_password = "" # set rpc password rpc_server = "" # set rpc server rpc_port = 18332 # Test connection rpc_connection = AuthServiceProxy("http://%s:%s@%s:%s" % (rpc_user, rpc_password, rpc_server, rpc_port)) get_info = rpc_connection.getinfo() print get_info # Send transaction # raw_transaction = ... # rpc_connection.sendrawtransaction(raw_transaction)
Add an example to push raw transaction to the network via RPC
Add an example to push raw transaction to the network via RPC
Python
bsd-3-clause
sr-gi/bitcoin_tools
51965dc3b26abfd0f9fb730c3076ee16b13612bc
dadi/__init__.py
dadi/__init__.py
""" For examples of dadi's usage, see the examples directory in the source distribution. Documentation of all methods can be found in doc/api/index.html of the source distribution. """ import logging logging.basicConfig() import Demographics1D import Demographics2D import Inference import Integration import Misc import Numerics import PhiManip # Protect import of Plotting in case matplotlib not installed. try: import Plotting except ImportError: pass # We do it this way so it's easier to reload. import Spectrum_mod Spectrum = Spectrum_mod.Spectrum try: # This is to try and ensure we have a nice __SVNVERSION__ attribute, so # when we get bug reports, we know what version they were using. The # svnversion file is created by setup.py. import os _directory = os.path.dirname(Integration.__file__) _svn_file = os.path.join(_directory, 'svnversion') __SVNVERSION__ = file(_svn_file).read().strip() except: __SVNVERSION__ = 'Unknown'
""" For examples of dadi's usage, see the examples directory in the source distribution. Documentation of all methods can be found in doc/api/index.html of the source distribution. """ import logging logging.basicConfig() import Demographics1D import Demographics2D import Inference import Integration import Misc import Numerics import PhiManip # Protect import of Plotting in case matplotlib not installed. try: import Plotting except ImportError: pass # We do it this way so it's easier to reload. import Spectrum_mod Spectrum = Spectrum_mod.Spectrum try: # This is to try and ensure we have a nice __SVNVERSION__ attribute, so # when we get bug reports, we know what version they were using. The # svnversion file is created by setup.py. import os _directory = os.path.dirname(Integration.__file__) _svn_file = os.path.join(_directory, 'svnversion') __SVNVERSION__ = file(_svn_file).read().strip() except: __SVNVERSION__ = 'Unknown' # When doing arithmetic with Spectrum objects (which are masked arrays), we # often have masked values which generate annoying arithmetic warnings. Here # we tell numpy to ignore such warnings. This puts greater onus on the user to # check results, but for our use case I think it's the better default. import numpy numpy.seterr(all='ignore')
Hide spurious numpy warnings about divide by zeros and nans.
Hide spurious numpy warnings about divide by zeros and nans. git-svn-id: 4c7b13231a96299fde701bb5dec4bd2aaf383fc6@429 979d6bd5-6d4d-0410-bece-f567c23bd345
Python
bsd-3-clause
yangjl/dadi,ChenHsiang/dadi,cheese1213/dadi,ChenHsiang/dadi,RyanGutenkunst/dadi,cheese1213/dadi,niuhuifei/dadi,paulirish/dadi,beni55/dadi,yangjl/dadi,RyanGutenkunst/dadi,niuhuifei/dadi,beni55/dadi,paulirish/dadi
01db1d6f591858b52b6a75ac72b5bdcb49aca708
python/extract_logs_from_nextflow.py
python/extract_logs_from_nextflow.py
#!/usr/bin/env python import os import sys import pandas as pd from glob import glob import subprocess def main(): if len(sys.argv) != 3: print("Usage: extract_logs_from_nextflow.py <nextflow_trace_file> <task_tag>:<stderr|stdout|both>",file=sys.stderr) # Load Nextflow trace nextflow_trace = pd.read_csv(sys.argv[1],sep="\t") #Get task you are interested in and whether you want stderr/stdout/both # Format <task>:<stderr|stdout|both> task_id,output_type = sys.argv[2].split(":") #Create dirs out_dir="nextflow_logs/{}".format(task_id) os.makedirs(out_dir) # Subset tasks of interest my_tasks = list(nextflow_trace[ (nextflow_trace.process == task_id) ][["hash","tag","status"]].itertuples(index=False,name=None)) if len(my_tasks) == 0: print("No tasks were found",file=sys.stderr) # Iterate through tasks for t_hash,t_tag,t_status in my_tasks: task_dir= get_hash_directory(t_hash) if not task_dir: print("Error: work/{}* directory was not found".format(t_hash)) continue print("{}: {}".format(t_tag,task_dir)) out_prefix="{}_{}_{}".format(t_tag,t_status[0],t_hash.replace("/","_")) if output_type != "stderr": copy_file_into_dir("{}/.command.out".format(task_dir),out_dir,prefix=out_prefix) if output_type != "stdout": copy_file_into_dir("{}/.command.err".format(task_dir),out_dir,prefix=out_prefix) # Helping functions def get_hash_directory( h ): my_task_dir = None matching_dirs = glob("work/{}*".format(h)) if len(matching_dirs) == 1: my_task_dir = matching_dirs[0] return my_task_dir def copy_file_into_dir(my_file,my_dir,prefix=""): print("\t{}".format(my_file)) subprocess.check_call(["cp",my_file,"{}/{}.{}".format(my_dir,prefix,my_file[-3:])]) if __name__ == '__main__': main()
Add script to extract stderr and stout from nextflow
Add script to extract stderr and stout from nextflow
Python
apache-2.0
maubarsom/biotico-tools,maubarsom/biotico-tools,maubarsom/biotico-tools,maubarsom/biotico-tools,maubarsom/biotico-tools
9be2e5927487dd1400d635d40f85192371a0d1ba
tests/test_eight_schools_large.py
tests/test_eight_schools_large.py
"""Test a "large" version of the schools model. Introduced in response to a macOS bug that only triggered when a larger number of parameters were used. """ import pytest import stan program_code = """ data { int<lower=0> J; // number of schools real y[J]; // estimated treatment effects real<lower=0> sigma[J]; // s.e. of effect estimates } parameters { real mu; real<lower=0> tau; real eta[J]; } transformed parameters { real theta[J]; for (j in 1:J) theta[j] = mu + tau * eta[j]; } model { target += normal_lpdf(eta | 0, 1); target += normal_lpdf(y | theta, sigma); } """ schools_data = { "J": 8 * 20, "y": (28, 8, -3, 7, -1, 1, 18, 12) * 20, "sigma": (15, 10, 16, 11, 9, 11, 10, 18) * 20, } @pytest.fixture(scope="module") def posterior(): """Build (compile) a simple model.""" return stan.build(program_code, data=schools_data) def test_eight_schools_large_sample(posterior): num_chains, num_samples = 2, 200 fit = posterior.sample(num_chains=num_chains, num_samples=num_samples, num_warmup=num_samples) num_flat_params = schools_data["J"] * 2 + 2 assert fit.values.shape == (len(fit.sample_and_sampler_param_names) + num_flat_params, num_samples, num_chains,) df = fit.to_frame() assert "eta.1" in df.columns assert len(df["eta.1"]) == num_samples * num_chains assert fit["eta"].shape == (schools_data["J"], num_chains * num_samples)
Verify a model with many parameters works
test: Verify a model with many parameters works Check that a schools model with many parameters (160) works. Prompted by a unusual macOS bug in late 2020 (#163).
Python
isc
stan-dev/pystan,stan-dev/pystan
f5d9ce7ffef618e00252cd65775aef5c42404f8f
DataTag/management/commands/collecttags.py
DataTag/management/commands/collecttags.py
# -*- coding: utf-8 -*- # vim: set ts= from __future__ import unicode_literals from django.conf import settings from django.core.management.base import BaseCommand import os import yaml class Command(BaseCommand): args = None help = 'Collect the tags from the sub-directories' option_list = BaseCommand.option_list def handle(self, *args, **kwargs): # Find the new tags used_tags = set() for root, dirs, files in os.walk(settings.MEDIA_ROOT, followlinks=True): # Skip the root configuration file if root == settings.MEDIA_ROOT: continue # Parse the local configuration file if '.DataTag.yaml' in files: with open(os.path.join(root, '.DataTag.yaml'), 'r') as local_conf: local_conf = yaml.load(local_conf) for key in local_conf: for tag in local_conf[key]['tags']: used_tags.add(tag) # Load the tags from the root configuration root_tags = set() try: with open(os.path.join(settings.MEDIA_ROOT, '.DataTag.yaml'), 'r') as root_conf: root_conf = yaml.load(root_conf) for tag in root_conf: root_tags.add(tag) except IOError: pass # Add the tags that are missing in the root_tags missing_tags = used_tags - root_tags if missing_tags: print("Adding missing tags") print("===================") with open(os.path.join(settings.MEDIA_ROOT, '.DataTag.yaml'), 'a+') as root_conf: for tag in missing_tags: print(" - %s" % (tag)) root_conf.write("%s:\n" % (tag)) else: print("No missing tags")
Add an helper to collect all tags
Add an helper to collect all tags This helper will walk on all sub-directories to find the tags that are missing in the root configuration.
Python
agpl-3.0
ivoire/DataTag,ivoire/DataTag,ivoire/DataTag
4493b6d42d025ac79f6e89c40ed3b583802a2330
2048/test_value_2048.py
2048/test_value_2048.py
from __future__ import print_function import numpy as np import math np.random.seed(1337) # for reproducibility from keras.datasets import mnist from keras.models import Sequential, model_from_json from keras.layers.core import Dense, Dropout, Activation, Flatten from keras.layers.convolutional import Convolution2D, MaxPooling2D from keras.utils import np_utils from sklearn.metrics import * import csv import sys nb_train_samples = 48355 def load_data(csvf): X=np.zeros((nb_train_samples, 1, 4, 4), dtype="uint16") Y=[] i=0 with open(csvf, 'rb') as f: for l in csv.reader(f): if len(l)<3: continue Y.append(int(l[0])) X[i,0,:,:] = np.reshape([int(j) for j in l[2:]], (4,4)) i+=1 Y=np.reshape(Y, (len(Y), 1)) return (X, Y) # the data, shuffled and split between train and test sets (X_train, y_train) = load_data(sys.argv[1]) ll=np.vectorize(lambda x:math.log(x+1)) #X_train = X_train.reshape(X_train.shape[0], 1, 4, 4) X_train = ll(X_train.astype('float32')) print('X_train shape:', X_train.shape) print(X_train.shape[0], 'train samples') # convert class vectors to binary class matrices #Y_train = np_utils.to_categorical(y_train, nb_classes) Y_train = y_train.astype('float32') model = model_from_json(open('value_2048_model.json', 'rb').read()) model.load_weights('value_2048_weights.h5') print(X_train) y_predict = model.predict(X_train, batch_size=1) print(y_predict) print(mean_absolute_error(y_train, y_predict)) print(mean_squared_error(y_train, y_predict))
Test Keras DNN for 2048 value
Test Keras DNN for 2048 value
Python
mit
choupi/NDHUDLWorkshop
fbd49abfb5a3d32f30de0e85c70f08c873813755
cpro/migrations/0014_auto_20170129_2236.py
cpro/migrations/0014_auto_20170129_2236.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations import cpro.models class Migration(migrations.Migration): dependencies = [ ('cpro', '0013_auto_20161025_0249'), ] operations = [ migrations.AddField( model_name='card', name='art_hd', field=models.ImageField(upload_to=cpro.models.uploadItem(b'c/art_hd'), null=True, verbose_name='Art (HD)'), preserve_default=True, ), migrations.AddField( model_name='card', name='art_hd_awakened', field=models.ImageField(upload_to=cpro.models.uploadItem(b'c/art_hd/a'), null=True, verbose_name='Art_Hd (Awakened)'), preserve_default=True, ), ]
Store HD version of the art images (made with waifux2 by @meiponnyan on Twitter), use it when it exists on the homepage and when using the download link
Store HD version of the art images (made with waifux2 by @meiponnyan on Twitter), use it when it exists on the homepage and when using the download link
Python
apache-2.0
SchoolIdolTomodachi/CinderellaProducers,SchoolIdolTomodachi/CinderellaProducers
d1e1e682122a66e76c70052a5fec3e6d9a00ab46
babel_util/scripts/log_to_json.py
babel_util/scripts/log_to_json.py
import json from parsers.infomap_log import InfomapLog if __name__ == "__main__": import argparse parser = argparse.ArgumentParser(description="Creates JSON file from Infomap Logs") parser.add_argument('outfile') parser.add_argument('infiles', nargs='+') args = parser.parse_args() logs = [] for filename in args.infiles: with open(filename) as f: parser = InfomapLog(f) logs.append(parser.parse()) with open(args.outfile, 'w') as f: json.dump(logs, f)
Convert infomap logs to JSON
Convert infomap logs to JSON
Python
agpl-3.0
jevinw/rec_utilities,jevinw/rec_utilities
22c96540eb5cbf33d9ea868458769031dc57102a
lib/logSupport.py
lib/logSupport.py
# # Description: log support module # # Author: # Igor Sfiligoi (Oct 25th 2006) # import time # this class can be used instead of a file for writing class DayLogFile: def __init__(self,base_fname): self.base_fname=base_fname return def close(self): return # nothing to do, just a placeholder def write(self,msg): now=time.time() fname=self.__get_fname() try: fd=open(fname,"a") except: self.write_on_exception("Cannot open %s"%fname,msg) try: try: fd.write(self.format_msg(now,msg)) except: self.write_on_exception("Cannot open %s"%fname,msg) finally: fd.close() return ########################## # these can be customized ########################## def write_on_exception(self,exception_msg,msg): return # do nothing def get_fname(self,timestamp): return "%s.%s.log"%(self.base_fname,time.strftime("%Y%m%d",time.localtime(timestamp))) def format_msg(self,timestamp,msg): return "[%s %s] %s"%(self.format_time(timestamp),os.getpid(),msg) def format_time(self,timestamp): return "%li"%timestamp
Support library for managing log files
Support library for managing log files
Python
bsd-3-clause
holzman/glideinwms-old,holzman/glideinwms-old,holzman/glideinwms-old,bbockelm/glideinWMS,bbockelm/glideinWMS,bbockelm/glideinWMS,bbockelm/glideinWMS
70fa6d750fbbdae405b0012b9aa53f066008ba5f
setup.py
setup.py
# coding=utf-8 from setuptools import setup, find_packages setup( name="tinydb", version="2.1.0", packages=find_packages(), # development metadata zip_safe=True, # metadata for upload to PyPI author="Markus Siemens", author_email="[email protected]", description="TinyDB is a tiny, document oriented database optimized for " "your happiness :)", license="MIT", keywords="database nosql", url="https://github.com/msiemens/tinydb", classifiers=[ "Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: MIT License", "Topic :: Database", "Topic :: Database :: Database Engines/Servers", "Topic :: Utilities", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: Implementation :: PyPy", "Operating System :: OS Independent" ], long_description=open('README.rst', 'r').read(), )
# coding=utf-8 from setuptools import setup, find_packages from codecs import open setup( name="tinydb", version="2.1.0", packages=find_packages(), # development metadata zip_safe=True, # metadata for upload to PyPI author="Markus Siemens", author_email="[email protected]", description="TinyDB is a tiny, document oriented database optimized for " "your happiness :)", license="MIT", keywords="database nosql", url="https://github.com/msiemens/tinydb", classifiers=[ "Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: MIT License", "Topic :: Database", "Topic :: Database :: Database Engines/Servers", "Topic :: Utilities", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: Implementation :: PyPy", "Operating System :: OS Independent" ], long_description=open('README.rst', encoding='utf-8').read(), )
Fix decode error of README.rst during installation
Fix decode error of README.rst during installation
Python
mit
ivankravets/tinydb,Callwoola/tinydb,raquel-ucl/tinydb,cagnosolutions/tinydb,msiemens/tinydb
8c2aa6409358b4b5830e9a6242194030307f2aa6
makemigrations.py
makemigrations.py
#!/usr/bin/env python import decimal import os import sys import django from django.conf import settings DEFAULT_SETTINGS = dict( DEBUG=True, USE_TZ=True, TIME_ZONE='UTC', DATABASES={ "default": { "ENGINE": "django.db.backends.sqlite3", } }, MIDDLEWARE_CLASSES=[ "django.contrib.sessions.middleware.SessionMiddleware", "django.contrib.auth.middleware.AuthenticationMiddleware", "django.contrib.messages.middleware.MessageMiddleware" ], ROOT_URLCONF="pinax.stripe.urls", INSTALLED_APPS=[ "django.contrib.auth", "django.contrib.contenttypes", "django.contrib.sessions", "django.contrib.sites", "django_forms_bootstrap", "jsonfield", "pinax.stripe", ], SITE_ID=1, PINAX_STRIPE_PUBLIC_KEY="", PINAX_STRIPE_SECRET_KEY="", PINAX_STRIPE_PLANS={ "free": { "name": "Free Plan" }, "entry": { "stripe_plan_id": "entry-monthly", "name": "Entry ($9.54/month)", "description": "The entry-level monthly subscription", "price": 9.54, "interval": "month", "currency": "usd" }, "pro": { "stripe_plan_id": "pro-monthly", "name": "Pro ($19.99/month)", "description": "The pro-level monthly subscription", "price": 19.99, "interval": "month", "currency": "usd" }, "premium": { "stripe_plan_id": "premium-monthly", "name": "Gold ($59.99/month)", "description": "The premium-level monthly subscription", "price": decimal.Decimal("59.99"), "interval": "month", "currency": "usd" } }, PINAX_STRIPE_SUBSCRIPTION_REQUIRED_EXCEPTION_URLS=["pinax_stripe_subscribe"], PINAX_STRIPE_SUBSCRIPTION_REQUIRED_REDIRECT="pinax_stripe_subscribe", PINAX_STRIPE_HOOKSET="pinax.stripe.tests.hooks.TestHookSet" ) def run(*args): if not settings.configured: settings.configure(**DEFAULT_SETTINGS) django.setup() parent = os.path.dirname(os.path.abspath(__file__)) sys.path.insert(0, parent) django.core.management.call_command( "makemigrations", "pinax_stripe", *args ) if __name__ == "__main__": run(*sys.argv[1:])
Add a helper script for creating migrations
Add a helper script for creating migrations
Python
mit
pinax/django-stripe-payments
3dce48f228caec9b317f263af3c5e7a71372f0be
project/members/tests/test_application.py
project/members/tests/test_application.py
# -*- coding: utf-8 -*- import pytest from members.tests.fixtures.memberlikes import MembershipApplicationFactory from members.tests.fixtures.types import MemberTypeFactory from members.models import Member @pytest.mark.django_db def test_application_approve(): mtypes = [MemberTypeFactory(label='Normal member')] application = MembershipApplicationFactory() email = application.email application.approve(set_mtypes=mtypes) Member.objects.get(email=email)
# -*- coding: utf-8 -*- import pytest from django.core.urlresolvers import reverse from members.tests.fixtures.memberlikes import MembershipApplicationFactory from members.tests.fixtures.types import MemberTypeFactory from members.models import Member @pytest.mark.django_db def test_application_approve(): mtypes = [MemberTypeFactory(label='Normal member')] application = MembershipApplicationFactory() email = application.email application.approve(set_mtypes=mtypes) Member.objects.get(email=email) @pytest.mark.django_db def test_get_application_form(client): response = client.get(reverse('members-apply')) assert b'Apply for membership' in response.content
Test that we can get the application form
Test that we can get the application form
Python
mit
hacklab-fi/asylum,HelsinkiHacklab/asylum,rambo/asylum,jautero/asylum,hacklab-fi/asylum,HelsinkiHacklab/asylum,hacklab-fi/asylum,rambo/asylum,rambo/asylum,rambo/asylum,HelsinkiHacklab/asylum,jautero/asylum,hacklab-fi/asylum,jautero/asylum,jautero/asylum,HelsinkiHacklab/asylum
a094b0978034869a32be1c541a4d396843819cfe
project/management/commands/generatesecretkey.py
project/management/commands/generatesecretkey.py
from django.core.management.templates import BaseCommand from django.utils.crypto import get_random_string import fileinput from django.conf import settings class Command(BaseCommand): help = ("Replaces the SECRET_KEY VALUE in settings.py with a new one.") def handle(self, *args, **options): # Create a random SECRET_KEY hash to put it in the main settings. chars = 'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)' secret_key = get_random_string(50, chars) file_path = "{}/settings.py".format(settings.PROJECT_DIR) for line in fileinput.input(file_path, inplace=True): if line.startswith("SECRET_KEY = "): print("SECRET_KEY = '{}'".format(secret_key)) else: print(line, end='')
Add management command to generate a random secret key
Add management command to generate a random secret key
Python
mit
Angoreher/xcero,Angoreher/xcero,Angoreher/xcero,magnet-cl/django-project-template-py3,magnet-cl/django-project-template-py3,Angoreher/xcero,magnet-cl/django-project-template-py3,magnet-cl/django-project-template-py3
7941202bf8c7f3c553e768405b72b0b5b499dee6
tfidf.py
tfidf.py
#!/usr/bin/env python # -*- coding: utf-8 -*- import nlp.tokenizer import nlp.TextVectorizer import sys data = [] filenames = sys.argv[1:] for filename in filenames: t = open(filename).read().decode('utf-8') data.append(t) data = "\n".join(data) print 'word,count' v = nlp.TextVectorizer.TfVectorizer(tokenizer=nlp.tokenizer.tokenizeJp, texts=data) tf = v.vectorize() for t, f in sorted(tf.items(), key=lambda x:-x[1]): print "%s,%d" % (t.encode('utf-8'), f)
Change output format to csv
Change output format to csv
Python
mit
otknoy/ExTAT,otknoy/ExTAT
067fa81cf56b7f8ab29060ec8a1409c0c07c797e
greedy/Job_sequencing_problem/python/jobsequencing.py
greedy/Job_sequencing_problem/python/jobsequencing.py
class Job: def __init__(self, name, deadline, duration, profit): self.name = name self.deadline = deadline self.duration = duration self.profit = profit def __lt__(self, other): return self.profit < other.profit def __str__(self): return self.name # Greedy algorithm def JobSequencing(jobs, time_slots): jobs = sorted(jobs) sequence = [jobs.pop()] while len(jobs) > 0 and sum([j.duration for j in sequence]) < time_slots: job = jobs.pop() finish_time = sum([j.duration for j in sequence]) + job.duration if finish_time < time_slots and finish_time < job.deadline: sequence.append(job) return sequence # Example execution # jobs = [Job('a', 3, 1, 2), Job('b', 2, 2, 5), Job('c', 4, 1,3)] # print([str(j) for j in JobSequencing(jobs, 3)])
Add job sequencing greedy Python
Add job sequencing greedy Python
Python
cc0-1.0
ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms
f4b5dafbb9a1a9af00b8981dbb442fa8f8385203
pupa/migrations/0002_auto_20150906_1458.py
pupa/migrations/0002_auto_20150906_1458.py
# -*- coding: utf-8 -*- # Generated by Django 1.9.dev20150906080247 on 2015-09-06 14:58 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('pupa', '0001_initial'), ] operations = [ migrations.AlterField( model_name='importobjects', name='report', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='imported_objects', to='pupa.RunPlan'), ), migrations.AlterField( model_name='runplan', name='jurisdiction', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='runs', to='opencivicdata.Jurisdiction'), ), migrations.AlterField( model_name='scrapeobjects', name='report', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='scraped_objects', to='pupa.ScrapeReport'), ), migrations.AlterField( model_name='scrapereport', name='plan', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='scrapers', to='pupa.RunPlan'), ), ]
Revert "Revert "add migration for new fields""
Revert "Revert "add migration for new fields"" This reverts commit 31b01725ae23624358c7da819a6c6e4c9a41b0c4.
Python
bsd-3-clause
datamade/pupa,mileswwatkins/pupa,opencivicdata/pupa,datamade/pupa,mileswwatkins/pupa,opencivicdata/pupa
587011702ffca5d02263736dcae04f31ce8c38d4
test/models/test_output_type.py
test/models/test_output_type.py
from test.base import ApiDBTestCase from zou.app.models.output_type import OutputType from zou.app.utils import fields class OutputTypeTestCase(ApiDBTestCase): def setUp(self): super(OutputTypeTestCase, self).setUp() self.generate_data(OutputType, 3) def test_get_output_types(self): output_types = self.get("data/output-types") self.assertEquals(len(output_types), 3) def test_get_output_type(self): output_type = self.get_first("data/output-types") output_type_again = self.get("data/output-types/%s" % output_type["id"]) self.assertEquals(output_type, output_type_again) self.get_404("data/output-types/%s" % fields.gen_uuid()) def test_create_output_type(self): data = { "name": "geometry", "short_name": "geo" } self.output_type = self.post("data/output-types", data) self.assertIsNotNone(self.output_type["id"]) output_types = self.get("data/output-types") self.assertEquals(len(output_types), 4) def test_update_output_type(self): output_type = self.get_first("data/output-types") data = { "name": "point cache" } self.put("data/output-types/%s" % output_type["id"], data) output_type_again = self.get( "data/output-types/%s" % output_type["id"]) self.assertEquals(data["name"], output_type_again["name"]) self.put_404("data/output-types/%s" % fields.gen_uuid(), data) def test_delete_output_type(self): output_types = self.get("data/output-types") self.assertEquals(len(output_types), 3) output_type = output_types[0] self.delete("data/output-types/%s" % output_type["id"]) output_types = self.get("data/output-types") self.assertEquals(len(output_types), 2) self.delete_404("data/output-types/%s" % fields.gen_uuid())
Add tests for output type crud routes
Add tests for output type crud routes
Python
agpl-3.0
cgwire/zou
4aef9a5bca4927db0ff7b661850de1c8d3c61274
seqseek/format_fasta.py
seqseek/format_fasta.py
import argparse def run(path): print "Formatting %s" % path with open(path) as fasta: header = '' first_line = fasta.readline() if not first_line.startswith('>'): header = '> ' + path.split('/')[-1].split('.')[0] + '\n' first_line.replace('\n', '') clean = fasta.read().replace('\n', '') with open(path + '.seqseek', 'w') as formatted: formatted.write(header) formatted.write(first_line) formatted.write(clean) if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument("fasta_path") args = parser.parse_args() run(args.fasta_path)
Add utility for formatting fasta files for seqseek
Add utility for formatting fasta files for seqseek
Python
mit
23andMe/seqseek
531d1c868b36580a8423d71c3ee324c8669ab98b
pycelle/misc.py
pycelle/misc.py
from __future__ import division from math import ceil, log __all__ = [ 'base_expansion', ] def base_expansion(n, to_base, from_base=10, zfill=None): """ Converts the number `n` from base `from_base` to base `to_base`. Parameters ---------- n : list | int The number to convert. If an integer, then each digit is treated as a value in base `from_base`. That is, 30 is broken up into [3, 0]. Note that [3, 10] is a valid input that cannot be passed in as an integer, since it require a `from_base` of at least 11. to_base : int The desired base. from_base: int The base of `n`. zfill : int | None If an integer, then the expansion is padded with zeros on the left until the length of the expansion is equal to `zfill`. Returns ------- out : list The `to_base` expansion of `n`. Examples -------- >>> base_expansion(31, 2) [1, 1, 1, 1, 1] >>> base_expansion(31, 3) [1, 0, 1, 1] >>> base_expansion([1, 1, 1, 1, 1], 3, from_base=2) [1, 0, 1, 1] >>> base_expansion([1, 1, 1, 1, 1], 3, from_base=10) [1, 2, 0, 0, 2, 0, 1, 1, 2] """ # Based on: http://code.activestate.com/recipes/577939-base-expansionconversion-algorithm-python/ try: len(n) except TypeError: n = map(int, str(n)) if n == [0]: return n if max(n) >= from_base: raise Exception('Input `n` is not consistent with `from_base`.') L = len(n) base10 = sum([( from_base ** (L - k - 1) ) * n[k] for k in range(L)]) j = int(ceil(log(base10 + 1, to_base))) out = [( base10 // (to_base ** (j - p)) ) % to_base for p in range(1, j + 1)] if zfill is not None: out = [0] * (zfill - len(out)) + out return out
Add function to convert between number bases.
Add function to convert between number bases.
Python
bsd-3-clause
ComSciCtr/pycelle
628c092b2fd8b53e116e04b240f04b7508ab6118
enthought/traits/ui/wx/array_view_editor.py
enthought/traits/ui/wx/array_view_editor.py
#------------------------------------------------------------------------------- # Imports: #------------------------------------------------------------------------------- from enthought.traits.ui.ui_editors.array_view_editor \ import _ArrayViewEditor as BaseArrayViewEditor from ui_editor \ import UIEditor #------------------------------------------------------------------------------- # '_ArrayViewEditor' class: #------------------------------------------------------------------------------- class _ArrayViewEditor ( BaseArrayViewEditor, UIEditor ): pass #--EOF-------------------------------------------------------------------------
Add wx implementation of the ArrayViewEditor.
Add wx implementation of the ArrayViewEditor.
Python
bsd-3-clause
geggo/pyface,brett-patterson/pyface,geggo/pyface,pankajp/pyface
cf160d8259fb98ef4003283063529be8c4c087fb
python/helpers/pycharm_generator_utils/test/data/SkeletonCaching/binaries/compile_binaries.py
python/helpers/pycharm_generator_utils/test/data/SkeletonCaching/binaries/compile_binaries.py
import os from Cython.Build.Cythonize import main as cythonize _binaries_dir = os.path.dirname(os.path.abspath(__file__)) _cythonize_options = ['--inplace'] def main(): for dir_path, _, file_names in os.walk(_binaries_dir): for file_name in file_names: mod_name, ext = os.path.splitext(file_name) if ext == '.pyx': cythonize(_cythonize_options + [os.path.join(dir_path, file_name)]) os.remove(os.path.join(dir_path, mod_name + '.c')) if __name__ == '__main__': main()
Add a script to compile all binary modules in test data directory
Add a script to compile all binary modules in test data directory GitOrigin-RevId: b4da703157f1397dac2760ca844d43143bae5cdc
Python
apache-2.0
allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community