commit
stringlengths
40
40
old_file
stringlengths
4
264
new_file
stringlengths
4
264
old_contents
stringlengths
0
3.26k
new_contents
stringlengths
1
4.43k
subject
stringlengths
15
624
message
stringlengths
15
4.7k
lang
stringclasses
3 values
license
stringclasses
13 values
repos
stringlengths
5
91.5k
511abf77f16a7a92dde93a9f1318967b1d237635
go_doc_get.py
go_doc_get.py
import sublime import sublime_plugin import webbrowser def cleanPackage(pkgURI): pkg = pkgURI.split('.com/')[1] return pkg class GoDocGetCommand(sublime_plugin.TextCommand): def run(self, edit): view = self.view for region in view.sel(): selected = view.substr(region) if "github.corp" in selected: # if corporate go to page pkg = cleanPackage(selected) webbrowser.open('https://github.corp.dyndns.com/' + pkg) elif "github" in selected: # if public package go to doc pkg = cleanPackage(selected) webbrowser.open('https://godoc.org/github.com/' + pkg) else: # default to golang proper webbrowser.open('https://golang.org/pkg/' + selected)
import sublime import sublime_plugin import webbrowser def cleanPackage(pkgURI): pkg = pkgURI.split('.com/')[1] return pkg class GoDocGetCommand(sublime_plugin.TextCommand): def run(self, edit): view = self.view for region in view.sel(): selected = view.substr(region) if "github.corp" in selected: # if corporate go to page on master branch pkg = cleanPackage(selected) res = pkg.split('/') res.insert(2, 'tree/master') pkg = '/'.join(res) webbrowser.open('https://github.corp.dyndns.com/' + pkg) elif "github" in selected: # if public package go to doc pkg = cleanPackage(selected) webbrowser.open('https://godoc.org/github.com/' + pkg) else: # default to golang proper webbrowser.open('https://golang.org/pkg/' + selected)
Set specific branch to go to in GitHub
Set specific branch to go to in GitHub
Python
mit
lowellmower/go_doc_get
078a4d36c1dc088937b242ca63b88b4c03f33fa0
isitup/main.py
isitup/main.py
#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import, unicode_literals import requests def check(url): try: response = requests.get( "https://isitup.org/{0}.json".format(url), headers={'User-Agent': 'https://github.com/lord63/isitup'}) except requests.exceptions.ConnectionError: return ("A network problem(e.g. you're offline; refused connection)," "can't check the site right now.") except requests.exceptions.Timeout: return "The request timed out." status_code = response.json()["status_code"] if status_code == 1: return ("Yay, {0} is up.\nIt took {1[response_time]} ms " "for a {1[response_code]} response code with " "an ip of {1[response_ip]}".format(url, response.json())) if status_code == 2: return "{0} seems to be down!".format(url) if status_code == 3: return "We need a valid domain to check! Try again."
#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import absolute_import, unicode_literals import requests def check(url): try: response = requests.get( "https://isitup.org/{0}.json".format(url), headers={'User-Agent': 'https://github.com/lord63/isitup'}) except requests.exceptions.ConnectionError: return ("A network problem(e.g. you're offline; refused connection)," "can't check the site right now.") except requests.exceptions.Timeout: return "The request timed out." except requests.exceptions.RequestException as error: return "Something bad happened:\n{0}".format(error) status_code = response.json()["status_code"] if status_code == 1: return ("Yay, {0} is up.\nIt took {1[response_time]} ms " "for a {1[response_code]} response code with " "an ip of {1[response_ip]}".format(url, response.json())) if status_code == 2: return "{0} seems to be down!".format(url) if status_code == 3: return "We need a valid domain to check! Try again."
Make sure handle all the exceptions
Make sure handle all the exceptions
Python
mit
lord63/isitup
a65eaeaef60492bfc6319fb9c810155d62c1a3b3
luigi/tasks/export/ftp/go_annotations.py
luigi/tasks/export/ftp/go_annotations.py
# -*- coding: utf-8 -*- """ Copyright [2009-2017] EMBL-European Bioinformatics Institute Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import luigi from tasks.config import db from tasks.config import export from tasks.utils.files import atomic_output class GoAnnotation(luigi.Task): def output(self): return luigi.LocalTarget(export().go('rnacentral_annotations.tsv')) def run(self): with atomic_output(self.output()) as out: export(db(), out)
# -*- coding: utf-8 -*- """ Copyright [2009-2017] EMBL-European Bioinformatics Institute Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import luigi from tasks.config import db from tasks.config import export from rnacentral.export.ftp import go_terms from tasks.utils.files import atomic_output class GoAnnotationExport(luigi.Task): def output(self): return luigi.LocalTarget(export().go('rnacentral_annotations.tsv')) def run(self): with atomic_output(self.output()) as out: go_terms.export(db(), out)
Update name and call correct export
Update name and call correct export This now calls the correct export function. Additionally, the class name is changed to reflect it does export.
Python
apache-2.0
RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline
e74dcb6d19b737a0ec069a9bdda689731a0f295b
sqlalchemy_seed/mixin.py
sqlalchemy_seed/mixin.py
# -*- coding: utf-8 -*- """ sqlalchemy_seed.seed_mixin ~~~~~~~~~~~~~~~~~~~~~~~~~~ Mixin class for unittest. :copyright: (c) 2017 Shinya Ohyanagi, All rights reserved. :license: BSD, see LICENSE for more details. """ from . import ( create_table, drop_table, load_fixture_files, load_fixtures, ) class SeedMixin(object): base = None session = None fixtures = [] fixtures_setup_class = False fixtures_paths = None def _create_fixtures(self): if self.base is None: return if self.session is None: return if self.fixtures_paths is None: return create_table(self.base, self.session) fixtures = load_fixture_files(self.fixtures_paths, self.fixtures) load_fixtures(self.session, fixtures) def _drop_fixtures(self): drop_table(self.base, self.session) @classmethod def setUpClass(cls): if cls.fixtures_setup_class is True: cls._create_fixtures(cls) @classmethod def teatDownClass(cls): if cls.fixtures_setup_class is True: cls._drop_fixtures(cls) def setUp(self): if self.fixtures_setup_class is True: return self._create_fixtures() def tearDown(self): if self.fixtures_setup_class is True: return self._drop_fixtures()
# -*- coding: utf-8 -*- """ sqlalchemy_seed.seed_mixin ~~~~~~~~~~~~~~~~~~~~~~~~~~ Mixin class for unittest. :copyright: (c) 2017 Shinya Ohyanagi, All rights reserved. :license: BSD, see LICENSE for more details. """ from . import ( create_table, drop_table, load_fixture_files, load_fixtures, ) class SeedMixin(object): base = None session = None fixtures = [] fixtures_setup_class = False fixtures_paths = None def _create_fixtures(self): if self.base is None: return if self.session is None: return if self.fixtures_paths is None: return create_table(self.base, self.session) fixtures = load_fixture_files(self.fixtures_paths, self.fixtures) load_fixtures(self.session, fixtures) def _drop_fixtures(self): drop_table(self.base, self.session) @classmethod def setUpClass(cls): if cls.fixtures_setup_class is True: cls._create_fixtures(cls) @classmethod def tearDownClass(cls): if cls.fixtures_setup_class is True: cls._drop_fixtures(cls) def setUp(self): if self.fixtures_setup_class is True: return self._create_fixtures() def tearDown(self): if self.fixtures_setup_class is True: return self._drop_fixtures()
Fix typo in SeedMixin.tearDownClass name
Fix typo in SeedMixin.tearDownClass name
Python
bsd-3-clause
heavenshell/py-sqlalchemy_seed
a74fe1e0d72821afd643142ae283634bd4e3cc71
masters/master.client.syzygy/master_source_cfg.py
masters/master.client.syzygy/master_source_cfg.py
# Copyright (c) 2011 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from buildbot.changes import svnpoller from master import build_utils def SyzygyFileSplitter(path): """split_file for Syzygy.""" projects = ['trunk'] return build_utils.SplitPath(projects, path) def Update(config, active_master, c): syzygy_url = config.Master.syzygy_url syzygy_revlinktmpl = config.Master.googlecode_revlinktmpl % ('syzygy', '%s') syzygy_poller = svnpoller.SVNPoller(svnurl=syzygy_url, split_file=SyzygyFileSplitter, pollinterval=30, revlinktmpl=syzygy_revlinktmpl) c['change_source'].append(syzygy_poller)
# Copyright (c) 2011 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from buildbot.changes import svnpoller from master import build_utils def SyzygyFileSplitter(path): """split_file for Syzygy.""" projects = ['trunk'] return build_utils.SplitPath(projects, path) def Update(config, active_master, c): syzygy_url = config.Master.syzygy_url syzygy_revlinktmpl = config.Master.googlecode_revlinktmpl % ('sawbuck', '%s') syzygy_poller = svnpoller.SVNPoller(svnurl=syzygy_url, split_file=SyzygyFileSplitter, pollinterval=30, revlinktmpl=syzygy_revlinktmpl) c['change_source'].append(syzygy_poller)
Fix link to revision to use 'sawbuck' rather than 'syzygy'
Fix link to revision to use 'sawbuck' rather than 'syzygy' Review URL: http://codereview.chromium.org/6992029 git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@86449 0039d316-1c4b-4281-b951-d872f2087c98
Python
bsd-3-clause
eunchong/build,eunchong/build,eunchong/build,eunchong/build
d3cea746432b1bfd1b5f2d38972c1b761b96e8eb
fetchroots.py
fetchroots.py
import os import base64 from requests import Session, Request from OpenSSL import crypto #url = 'http://ct.googleapis.com/aviator/ct/v1/get-roots' url = 'https://ct.api.venafi.com/ct/v1/get-roots' s = Session() r = Request('GET', url) prepped = r.prepare() r = s.send(prepped) if r.status_code == 200: roots = r.json() # RFC 6962 defines the certificate objects as base64 encoded certs. # Importantly, these are not PEM formatted certs but base64 encoded # ASN.1 (DER) encoded for i in roots: certs = roots[i] for k in certs: try: certobj = crypto.load_certificate(crypto.FILETYPE_ASN1,base64.b64decode(k)) subject = certobj.get_subject() print 'CN={},OU={},O={},L={},S={},C={}'.format(subject.commonName, subject.organizationalUnitName, subject.organizationName, subject.localityName, subject.stateOrProvinceName, subject.countryName) except: print subject.get_components()
import os import base64 from requests import Session, Request from OpenSSL import crypto url = 'http://ct.googleapis.com/aviator/ct/v1/get-roots' s = Session() r = Request('GET', url) prepped = r.prepare() r = s.send(prepped) if r.status_code == 200: roots = r.json() # RFC 6962 defines the certificate objects as base64 encoded certs. # Importantly, these are not PEM formatted certs but base64 encoded # ASN.1 (DER) encoded for i in roots: certs = roots[i] for k in certs: try: certobj = crypto.load_certificate(crypto.FILETYPE_ASN1,base64.b64decode(k)) subject = certobj.get_subject() print 'CN={},OU={},O={},L={},S={},C={}'.format(subject.commonName, subject.organizationalUnitName, subject.organizationName, subject.localityName, subject.stateOrProvinceName, subject.countryName) except: print subject.get_components()
Update to use Google Aviator test log
Update to use Google Aviator test log
Python
apache-2.0
wgoulet/CTPyClient
95b90325b1dfa535fc802ad2a06f15e30010bf3a
fore/hotswap.py
fore/hotswap.py
import os import logging import threading log = logging.getLogger(__name__) class Hotswap(threading.Thread): def __init__(self, out, mod, *args, **kwargs): self.out = out self.gen = mod.generate(*args, **kwargs) threading.Thread.__init__(self) self.daemon = True def run(self): while True: self.out(self.gen.next())
import os import logging import threading log = logging.getLogger(__name__) class Hotswap(threading.Thread): def __init__(self, out, mod, *args, **kwargs): self.out = out self.gen = mod.generate(*args, **kwargs) threading.Thread.__init__(self) self.daemon = True def run(self): while True: self.out(next(self.gen))
Use next(it) instead of it.next()
Hotswap: Use next(it) instead of it.next()
Python
artistic-2.0
MikeiLL/appension,MikeiLL/appension,Rosuav/appension,MikeiLL/appension,Rosuav/appension,MikeiLL/appension,Rosuav/appension,Rosuav/appension
a727161f67edff10bb94785e70add7c42ba99dcc
morepath/tests/test_app.py
morepath/tests/test_app.py
from morepath.app import App, global_app import morepath def setup_module(module): morepath.disable_implicit() def test_global_app(): assert global_app.extends == [] assert global_app.name == 'global_app' def test_app_without_extends(): myapp = App() assert myapp.extends == [global_app] assert myapp.name == '' def test_app_with_extends(): parentapp = App() myapp = App('myapp', extends=parentapp) assert myapp.extends == [parentapp] assert myapp.name == 'myapp' def test_app_caching_lookup(): class MockClassLookup(object): called = 0 def all(self, key, classes): self.called += 1 return ["answer"] class MockApp(MockClassLookup, App): pass myapp = MockApp() lookup = myapp.lookup answer = lookup.component('foo', []) assert answer == 'answer' assert myapp.called == 1 # after this the answer will be cached for those parameters answer = lookup.component('foo', []) assert myapp.called == 1 answer = myapp.lookup.component('foo', []) assert myapp.called == 1 # but different parameters does trigger another call lookup.component('bar', []) assert myapp.called == 2
from morepath.app import App, global_app import morepath def setup_module(module): morepath.disable_implicit() def test_global_app(): assert global_app.extends == [] assert global_app.name == 'global_app' def test_app_without_extends(): myapp = App() assert myapp.extends == [global_app] assert myapp.name == '' def test_app_with_extends(): parentapp = App() myapp = App('myapp', extends=parentapp) assert myapp.extends == [parentapp] assert myapp.name == 'myapp' def test_app_caching_lookup(): class MockClassLookup(object): called = 0 def all(self, key, classes): self.called += 1 return ["answer"] class MockApp(MockClassLookup, App): pass myapp = MockApp() lookup = myapp.lookup answer = lookup.component('foo', []) assert answer == 'answer' assert myapp.called == 1 # after this the answer will be cached for those parameters answer = lookup.component('foo', []) assert myapp.called == 1 answer = myapp.lookup.component('foo', []) assert myapp.called == 1 # but different parameters does trigger another call lookup.component('bar', []) assert myapp.called == 2 def test_app_name(): app = morepath.App(name='foo') assert repr(app) == "<morepath.App 'foo'>"
Add coverage of __repr__ of app.
Add coverage of __repr__ of app.
Python
bsd-3-clause
morepath/morepath,faassen/morepath,taschini/morepath
7e00b8a4436ee4bdad4d248a29985b1cef741a53
nimbus/apps/media/utils.py
nimbus/apps/media/utils.py
def bsd_rand(seed): return (1103515245 * seed + 12345) & 0x7fffffff def baseconv(v1, a1, a2): n1 = {c: i for i, c in dict(enumerate(a1)).items()} b1 = len(a1) b2 = len(a2) d1 = 0 for i, c in enumerate(v1): d1 += n1[c] * pow(b1, b1 - i - 1) v2 = "" while d1: v2 = a2[d1 % b2] + v2 d1 //= b2 return v2 def url_hash_from_pk(pk): b10 = "0123456789" b62 = "abcdefghijklmnopqrstuvwxyz0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ" return baseconv(str(bsd_rand(pk)), b10, b62)
from nimbus.settings import SECRET_KEY import hashlib def baseconv(v1, a1, a2): n1 = {c: i for i, c in enumerate(a1)} b1 = len(a1) b2 = len(a2) d1 = 0 for i, c in enumerate(v1): d1 += n1[c] * pow(b1, len(v1) - i - 1) v2 = "" while d1: v2 = a2[d1 % b2] + v2 d1 //= b2 return v2 m = hashlib.md5() m.update(SECRET_KEY) c = int(baseconv(m.hexdigest(), "0123456789abcdef", "0123456789")) c = c - (c % 2) + 1 def lcg(seed): return (1103515245 * seed + c) & 0x7fffffff def url_hash_from_pk(pk): b10 = "0123456789" b62 = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz" return baseconv(str(lcg(pk)), b10, b62)
Patch bug and security vulnerability
Patch bug and security vulnerability
Python
mit
ethanal/Nimbus,ethanal/Nimbus,ethanal/Nimbus,ethanal/Nimbus
a646bf7d791e84f4fa258e0c258e598c8d3f43d2
code/python/seizures/prediction/PredictorBase.py
code/python/seizures/prediction/PredictorBase.py
from abc import abstractmethod class PredictorBase(object): """" Abstract base class that implement the interface that we use for our predictors. Classic supervised learning. @author: Heiko """ @abstractmethod def fit(self, X, y): """ Method to fit the model. Parameters: X - 2d numpy array of training data y - 1d numpy array of training labels """ raise NotImplementedError() @abstractmethod def predict(self, X, y): """ Method to apply the model data Parameters: X - 2d numpy array of test data """ raise NotImplementedError()
from abc import abstractmethod class PredictorBase(object): """" Abstract base class that implement the interface that we use for our predictors. Classic supervised learning. @author: Heiko """ @abstractmethod def fit(self, X, y): """ Method to fit the model. Parameters: X - 2d numpy array of training data y - 1d numpy array of training labels """ raise NotImplementedError() @abstractmethod def predict(self, X): """ Method to apply the model data Parameters: X - 2d numpy array of test data """ raise NotImplementedError()
Remove y from base predictor.
Remove y from base predictor.
Python
bsd-2-clause
vincentadam87/gatsby-hackathon-seizure,vincentadam87/gatsby-hackathon-seizure
8a4a8cc351ae7fecd53932d0fb6ca0a7f9a83fbc
falcom/api/test/test_uris.py
falcom/api/test/test_uris.py
# Copyright (c) 2017 The Regents of the University of Michigan. # All Rights Reserved. Licensed according to the terms of the Revised # BSD License. See LICENSE.txt for details. from hamcrest import * import unittest from .hamcrest import ComposedAssertion from ..uri import URI # There are three URIs that I need to use: # # http://catalog.hathitrust.org/api/volumes/brief/oclc/[OCLC].json # http://mirlyn-aleph.lib.umich.edu/cgi-bin/bc2meta?id=[BARCODE]&type=bc&schema=marcxml # http://www.worldcat.org/webservices/catalog/content/libraries/[OCLC]?wskey=[WC_KEY]&format=json&maximumLibraries=50 class URITest (unittest.TestCase): def test_null_uri_yields_empty_string (self): uri = URI(None) assert_that(uri(), is_(equal_to(""))) def test_empty_uri_yields_empty_string (self): uri = URI("") assert_that(uri(), is_(equal_to(""))) def test_simple_uri_yields_itself (self): uri = URI("hello") assert_that(uri(), is_(equal_to("hello")))
# Copyright (c) 2017 The Regents of the University of Michigan. # All Rights Reserved. Licensed according to the terms of the Revised # BSD License. See LICENSE.txt for details. from hamcrest import * import unittest from .hamcrest import ComposedAssertion from ..uri import URI # There are three URIs that I need to use: # # http://catalog.hathitrust.org/api/volumes/brief/oclc/[OCLC].json # http://mirlyn-aleph.lib.umich.edu/cgi-bin/bc2meta?id=[BARCODE]&type=bc&schema=marcxml # http://www.worldcat.org/webservices/catalog/content/libraries/[OCLC]?wskey=[WC_KEY]&format=json&maximumLibraries=50 class URITest (unittest.TestCase): def test_null_uri_yields_empty_string (self): uri = URI(None) assert_that(uri(), is_(equal_to(""))) def test_simple_uri_yields_itself (self): uri = URI("hello") assert_that(uri(), is_(equal_to("hello"))) class GivenEmptyStrURI (unittest.TestCase): def setUp (self): self.uri = URI("") def test_when_called_without_args_yields_empty_str (self): assert_that(self.uri(), is_(equal_to("")))
Refactor a test into its own "given" test class
Refactor a test into its own "given" test class
Python
bsd-3-clause
mlibrary/image-conversion-and-validation,mlibrary/image-conversion-and-validation
c84aef2acef68d5feadb23aa045d9aa6e2f8512d
tests/app/dao/test_fees_dao.py
tests/app/dao/test_fees_dao.py
from app.dao.fees_dao import dao_update_fee, dao_get_fees, dao_get_fee_by_id from app.models import Fee from tests.db import create_fee class WhenUsingFeesDAO(object): def it_creates_a_fee(self, db_session): fee = create_fee() assert Fee.query.count() == 1 fee_from_db = Fee.query.filter(Fee.id == fee.id).first() assert fee == fee_from_db def it_updates_a_fee_dao(self, db, db_session, sample_fee): dao_update_fee(sample_fee.id, fee=10) fee_from_db = Fee.query.filter(Fee.id == sample_fee.id).first() assert sample_fee.fee == fee_from_db.fee def it_gets_all_fees(self, db, db_session, sample_fee): fees = [create_fee(fee=100, conc_fee=80), sample_fee] fees_from_db = dao_get_fees() assert Fee.query.count() == 2 assert set(fees) == set(fees_from_db) def it_gets_a_fee_by_id(self, db, db_session, sample_fee): fee = create_fee(fee=100, conc_fee=80) fetched_fee = dao_get_fee_by_id(fee.id) assert fetched_fee == fee
from app.dao.fees_dao import dao_update_fee, dao_get_fees, dao_get_fee_by_id from app.models import Fee from tests.db import create_fee class WhenUsingFeesDAO(object): def it_creates_a_fee(self, db_session): fee = create_fee() assert Fee.query.count() == 1 fee_from_db = Fee.query.filter(Fee.id == fee.id).first() assert fee == fee_from_db def it_updates_a_fee_dao(self, db, db_session, sample_fee): dao_update_fee(sample_fee.id, fee=10) fee_from_db = Fee.query.filter(Fee.id == sample_fee.id).first() assert fee_from_db.fee == 10 def it_gets_all_fees(self, db, db_session, sample_fee): fees = [create_fee(fee=100, conc_fee=80), sample_fee] fees_from_db = dao_get_fees() assert Fee.query.count() == 2 assert set(fees) == set(fees_from_db) def it_gets_a_fee_by_id(self, db, db_session, sample_fee): fee = create_fee(fee=100, conc_fee=80) fetched_fee = dao_get_fee_by_id(fee.id) assert fetched_fee == fee
Make fees dao test clearer
Make fees dao test clearer
Python
mit
NewAcropolis/api,NewAcropolis/api,NewAcropolis/api
0fa938a459293849761fe344c963c503e59e24df
tests/test_cross_validation.py
tests/test_cross_validation.py
import pytest from lightfm.cross_validation import random_train_test_split from lightfm.datasets import fetch_movielens def _assert_disjoint(x, y): x = x.tocsr() y = y.tocoo() for (i, j) in zip(y.row, y.col): assert x[i, j] == 0.0 @pytest.mark.parametrize('test_percentage', [0.2, 0.5, 0.7]) def test_random_train_test_split(test_percentage): data = fetch_movielens()['train'] train, test = random_train_test_split(data, test_percentage=test_percentage) assert test.nnz / data.nnz == test_percentage _assert_disjoint(train, test)
import pytest from lightfm.cross_validation import random_train_test_split from lightfm.datasets import fetch_movielens def _assert_disjoint(x, y): x = x.tocsr() y = y.tocoo() for (i, j) in zip(y.row, y.col): assert x[i, j] == 0.0 @pytest.mark.parametrize('test_percentage', [0.2, 0.5, 0.7]) def test_random_train_test_split(test_percentage): data = fetch_movielens()['train'] train, test = random_train_test_split(data, test_percentage=test_percentage) assert test.nnz / float(data.nnz) == test_percentage _assert_disjoint(train, test)
Make tests work with Python 2.7.
Make tests work with Python 2.7.
Python
apache-2.0
lyst/lightfm,maciejkula/lightfm
5eb8297b6da0b0cfd885975d5b9993a07acca426
importlib_metadata/__init__.py
importlib_metadata/__init__.py
import os import sys import glob import email import itertools import contextlib class Distribution: def __init__(self, path): """ Construct a distribution from a path to the metadata dir """ self.path = path @classmethod def for_name(cls, name, path=sys.path): for path_item in path: glob_specs = ( os.path.join(path_item, f'{name}-*.*-info'), os.path.join(path_item, f'{name}.*-info'), ) globs = itertools.chain.from_iterable(map(glob.iglob, glob_specs)) match = next(globs) return cls(os.path.join(path_item, match)) @classmethod def for_module(cls, mod): return cls.for_name(cls.name_for_module(mod)) @staticmethod def name_for_module(mod): return getattr(mod, '__dist_name__', mod.__name__) @property def metadata(self): return email.message_from_string( self.load_metadata('METADATA') or self.load_metadata('PKG-INFO') ) def load_metadata(self, name): fn = os.path.join(self.path, name) with contextlib.suppress(FileNotFoundError): with open(fn, encoding='utf-8') as strm: return strm.read() @property def version(self): return self.metadata['Version']
import os import sys import glob import email import itertools import contextlib class Distribution: def __init__(self, path): """ Construct a distribution from a path to the metadata dir """ self.path = path @classmethod def for_name(cls, name, path=sys.path): glob_groups = map(glob.iglob, cls._search_globs(name, path)) globs = itertools.chain.from_iterable(glob_groups) return cls(next(globs)) @staticmethod def _search_globs(name, path): """ Generate search globs for locating distribution metadata in path """ for path_item in path: yield os.path.join(path_item, f'{name}-*.*-info') # in develop install, no version is present yield os.path.join(path_item, f'{name}.*-info') @classmethod def for_module(cls, mod): return cls.for_name(cls.name_for_module(mod)) @staticmethod def name_for_module(mod): return getattr(mod, '__dist_name__', mod.__name__) @property def metadata(self): return email.message_from_string( self.load_metadata('METADATA') or self.load_metadata('PKG-INFO') ) def load_metadata(self, name): fn = os.path.join(self.path, name) with contextlib.suppress(FileNotFoundError): with open(fn, encoding='utf-8') as strm: return strm.read() @property def version(self): return self.metadata['Version']
Fix logic in path search.
Fix logic in path search.
Python
apache-2.0
python/importlib_metadata
694575e2707bdf7a2e042e2dd443a46481bc9d39
source/segue/__init__.py
source/segue/__init__.py
# :coding: utf-8 # :copyright: Copyright (c) 2013 Martin Pengelly-Phillips # :license: See LICENSE.txt.
# :coding: utf-8 # :copyright: Copyright (c) 2013 Martin Pengelly-Phillips # :license: See LICENSE.txt. import os import imp import uuid def discover_processors(paths=None, options=None): '''Return processor plugins discovered on *paths*. If *paths* is None will try to use environment variable :envvar:`SEGUE_PROCESSOR_PLUGIN_PATH` Each discovered plugin should have a register function that can be called to return a processor instance. The register function should accept arbitrary keyword arguments. *options* will be passed to the register functions as keyword arguments. ''' processors = [] if paths is None: plugin_path = os.environ.get('SEGUE_PROCESSOR_PLUGIN_PATH') if plugin_path: paths = plugin_path.split(os.pathsep) else: paths = [] if options is None: options = {} for path in paths: for base, directories, filenames in os.walk(path): for filename in filenames: name, extension = os.path.splitext(filename) if extension != '.py': continue module_path = os.path.join(base, filename) module_name = uuid.uuid4().hex module = imp.load_source(module_name, module_path) processor = module.register(**options) return processors
Add helper function for discovering processor plugins.
Add helper function for discovering processor plugins.
Python
apache-2.0
4degrees/segue
8754f8b73b140fa597de1f70a0cf636d198fadb2
extension_course/tests/conftest.py
extension_course/tests/conftest.py
from events.tests.conftest import (administrative_division, administrative_division_type, data_source, event, # noqa location_id, minimal_event_dict, municipality, organization, place, user, user_api_client, django_db_modify_db_settings, django_db_setup)
from events.tests.conftest import (administrative_division, administrative_division_type, data_source, event, # noqa location_id, minimal_event_dict, municipality, organization, place, user, user_api_client, django_db_modify_db_settings, django_db_setup, make_minimal_event_dict, make_keyword_id, make_keyword)
Add new make_* fixtures to extension_course tests
Add new make_* fixtures to extension_course tests
Python
mit
City-of-Helsinki/linkedevents,City-of-Helsinki/linkedevents,City-of-Helsinki/linkedevents
e0d909e25fbf47ebad35756032c9230fe3d3bdaa
example/example/tasksapp/run_tasks.py
example/example/tasksapp/run_tasks.py
import time from dj_experiment.tasks.tasks import longtime_add, netcdf_save if __name__ == '__main__': result = longtime_add.delay(1, 2) # at this time, our task is not finished, so it will return False print 'Task finished? ', result.ready() print 'Task result: ', result.result # sleep 10 seconds to ensure the task has been finished time.sleep(10) # now the task should be finished and ready method will return True print 'Task finished? ', result.ready() print 'Task result: ', result.result result1 = netcdf_save.delay(14, '') print 'Task netcdf finished? ', result1.ready() print 'Task result1: ', result1.result time.sleep(10) print 'Task netcdf finished? ', result1.ready() print 'Task result1: ', result1.result
import os import time from dj_experiment.tasks.tasks import longtime_add, netcdf_save from example.settings import (DJ_EXPERIMENT_BASE_DATA_DIR, DJ_EXPERIMENT_DATA_DIR) if __name__ == '__main__': result = longtime_add.delay(1, 2) # at this time, our task is not finished, so it will return False print 'Task finished? ', result.ready() print 'Task result: ', result.result # sleep 10 seconds to ensure the task has been finished time.sleep(10) # now the task should be finished and ready method will return True print 'Task finished? ', result.ready() print 'Task result: ', result.result rcmdatadir = os.path.join(DJ_EXPERIMENT_BASE_DATA_DIR, DJ_EXPERIMENT_DATA_DIR) result1 = netcdf_save.delay(14, rcmdatadir) print 'Task netcdf finished? ', result1.ready() print 'Task result1: ', result1.result time.sleep(10) print 'Task netcdf finished? ', result1.ready() print 'Task result1: ', result1.result
Fix parameters in task call
Fix parameters in task call
Python
mit
francbartoli/dj-experiment,francbartoli/dj-experiment
21e15235b2cd767e0da56a2a0d224824fda58c42
Tools/idle/ZoomHeight.py
Tools/idle/ZoomHeight.py
# Sample extension: zoom a window to maximum height import re import sys class ZoomHeight: menudefs = [ ('windows', [ ('_Zoom Height', '<<zoom-height>>'), ]) ] windows_keydefs = { '<<zoom-height>>': ['<Alt-F2>'], } unix_keydefs = { '<<zoom-height>>': ['<Control-x><Control-z>'], } def __init__(self, editwin): self.editwin = editwin def zoom_height_event(self, event): top = self.editwin.top geom = top.wm_geometry() m = re.match(r"(\d+)x(\d+)\+(-?\d+)\+(-?\d+)", geom) if not m: top.bell() return width, height, x, y = map(int, m.groups()) newheight = top.winfo_screenheight() if sys.platform == 'win32': newy = 0 newheight = newheight - 72 else: newy = 24 newheight = newheight - 96 if height >= newheight: newgeom = "" else: newgeom = "%dx%d+%d+%d" % (width, newheight, x, newy) top.wm_geometry(newgeom)
# Sample extension: zoom a window to maximum height import re import sys class ZoomHeight: menudefs = [ ('windows', [ ('_Zoom Height', '<<zoom-height>>'), ]) ] windows_keydefs = { '<<zoom-height>>': ['<Alt-F2>'], } unix_keydefs = { '<<zoom-height>>': ['<Control-x><Control-z>'], } def __init__(self, editwin): self.editwin = editwin def zoom_height_event(self, event): top = self.editwin.top zoom_height(top) def zoom_height(top): geom = top.wm_geometry() m = re.match(r"(\d+)x(\d+)\+(-?\d+)\+(-?\d+)", geom) if not m: top.bell() return width, height, x, y = map(int, m.groups()) newheight = top.winfo_screenheight() if sys.platform == 'win32': newy = 0 newheight = newheight - 72 else: newy = 24 newheight = newheight - 96 if height >= newheight: newgeom = "" else: newgeom = "%dx%d+%d+%d" % (width, newheight, x, newy) top.wm_geometry(newgeom)
Move zoom height functionality to separate function.
Move zoom height functionality to separate function.
Python
mit
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
280c81a3990116f66de9af8e6fd6e71d0215a386
client.py
client.py
#!/usr/bin/env python from configReader import ConfigReader import sys import os, os.path import os.path from time import time from math import floor import hashlib import random import requests f = open('adjectives.txt','r') adjectives = [line.rstrip() for line in f] f.close() configReader = ConfigReader(name="clientConfig.txt") path = sys.argv[1] timeHash = hashlib.md5(str(time())).hexdigest()[0:6] adjective = random.choice(adjectives) keys=configReader.getKeys() endpoint=keys['endpoint'] username=keys['username'] password=keys['password'] finalLocation=keys['finalLocation'] urlPath = adjective+timeHash+".png" print "Uploading",path,"as",urlPath,"to",endpoint r = requests.post(endpoint,params={'name':urlPath},files={'file':open(path,'rb')}) print r.status_code if r.status_code==200: print os.path.join(finalLocation,urlPath) os.system("echo '"+os.path.join(finalLocation,urlPath)+"'|pbcopy")
#!/usr/bin/env python from configReader import ConfigReader import sys import os, os.path import os.path from time import time from math import floor import hashlib import random import requests f = open('adjectives.txt','r') adjectives = [line.rstrip() for line in f] f.close() configReader = ConfigReader(name="clientConfig.txt") path = sys.argv[1] timeHash = hashlib.md5(str(time())).hexdigest()[0:6] adjective = random.choice(adjectives) keys=configReader.getKeys() endpoint=keys['endpoint'] username=keys['username'] password=keys['password'] finalLocation=keys['finalLocation'] urlPath = adjective+timeHash+".png" print "Uploading",path,"as",urlPath,"to",endpoint r = requests.post(endpoint,auth=(username,password),params={'name':urlPath},files={'file':open(path,'rb')}) print r.status_code if r.status_code==200: print os.path.join(finalLocation,urlPath) os.system("echo '"+os.path.join(finalLocation,urlPath)+"'|pbcopy")
Add authentication to the serverside
Add authentication to the serverside
Python
mit
ollien/Screenshot-Uploader,ollien/Screenshot-Uploader
a68f3ea83c191478f6a7b0dc6a4b49ff6c297ae2
imports.py
imports.py
#!/usr/bin/env python # vim: set fileencoding=utf-8 : from flask import flash from old_xml_import import old_xml_import from sml_import import sml_import import gzip from model import db, Sample from sqlalchemy.sql import func def move_import(xmlfile, filename, user): if filename.endswith('.gz'): xmlfile = gzip.GzipFile(fileobj=xmlfile, mode='rb', filename=filename) filename = filename[:-len('.gz')] if filename.endswith('.xml'): move = old_xml_import(xmlfile, user) elif filename.endswith('.sml'): move = sml_import(xmlfile, user) else: flash("unknown fileformat: '%s'" % xmlfile.filename, 'error') if move: move.temperature_avg, = db.session.query(func.avg(Sample.temperature)).filter(Sample.move == move, Sample.temperature > 0).one() stroke_count = 0 for events, in db.session.query(Sample.events).filter(Sample.move == move, Sample.events != None): if 'swimming' in events and events['swimming']['type'] == 'Stroke': stroke_count += 1 if 'swimming' in move.activity: assert stroke_count > 0 if stroke_count > 0: move.stroke_count = stroke_count db.session.commit() return move
#!/usr/bin/env python # vim: set fileencoding=utf-8 : from flask import flash from old_xml_import import old_xml_import from sml_import import sml_import import gzip from model import db, Sample from sqlalchemy.sql import func def move_import(xmlfile, filename, user): move = None if filename.endswith('.gz'): xmlfile = gzip.GzipFile(fileobj=xmlfile, mode='rb', filename=filename) filename = filename[:-len('.gz')] if filename.endswith('.xml'): move = old_xml_import(xmlfile, user) elif filename.endswith('.sml'): move = sml_import(xmlfile, user) else: flash("unknown fileformat: '%s'" % xmlfile.filename, 'error') if move: move.temperature_avg, = db.session.query(func.avg(Sample.temperature)).filter(Sample.move == move, Sample.temperature > 0).one() stroke_count = 0 for events, in db.session.query(Sample.events).filter(Sample.move == move, Sample.events != None): if 'swimming' in events and events['swimming']['type'] == 'Stroke': stroke_count += 1 if 'swimming' in move.activity: assert stroke_count > 0 if stroke_count > 0: move.stroke_count = stroke_count db.session.commit() return move
Fix exception 'local variable 'move' referenced before assignment' in case of upload of unknown file formats
Fix exception 'local variable 'move' referenced before assignment' in case of upload of unknown file formats
Python
mit
bwaldvogel/openmoves,marguslt/openmoves,marguslt/openmoves,bwaldvogel/openmoves,mourningsun75/openmoves,mourningsun75/openmoves,mourningsun75/openmoves,marguslt/openmoves,bwaldvogel/openmoves
317cf766f3fe1c55e5a57b7e38fb94222c6525d8
grow/submodules/__init__.py
grow/submodules/__init__.py
import os import sys def fix_imports(): here = os.path.dirname(__file__) dirs = [ os.path.normpath(os.path.join(here, '..', '..')), os.path.normpath(os.path.join(here, 'babel')), os.path.normpath(os.path.join(here, 'dulwich')), os.path.normpath(os.path.join(here, 'google-apputils-python')), os.path.normpath(os.path.join(here, 'httplib2', 'python2')), os.path.normpath(os.path.join(here, 'pytz')), os.path.normpath(os.path.join(here, 'pyyaml', 'lib3')), os.path.normpath(os.path.join(here, 'requests')), ] sys.path.extend(dirs)
import os import sys def fix_imports(): here = os.path.dirname(__file__) dirs = [ os.path.normpath(os.path.join(here, '..', '..')), os.path.normpath(os.path.join(here, 'babel')), os.path.normpath(os.path.join(here, 'dulwich')), os.path.normpath(os.path.join(here, 'google-apputils-python')), os.path.normpath(os.path.join(here, 'httplib2', 'python2')), os.path.normpath(os.path.join(here, 'pytz')), os.path.normpath(os.path.join(here, 'pyyaml', 'lib2')), os.path.normpath(os.path.join(here, 'requests')), ] sys.path.extend(dirs)
Use python2 pyyaml instead of python3.
Use python2 pyyaml instead of python3.
Python
mit
grow/grow,vitorio/pygrow,grow/grow,grow/pygrow,grow/pygrow,vitorio/pygrow,codedcolors/pygrow,grow/grow,codedcolors/pygrow,denmojo/pygrow,denmojo/pygrow,denmojo/pygrow,denmojo/pygrow,vitorio/pygrow,grow/pygrow,codedcolors/pygrow,grow/grow
3d86b4473f66a9311a94b1def4c40189eae23990
lancet/git.py
lancet/git.py
import sys import click from slugify import slugify class SlugBranchGetter(object): def __init__(self, base_branch='master'): self.base_branch = base_branch def __call__(self, repo, issue): discriminator = 'features/{}'.format(issue.key) slug = slugify(issue.fields.summary[:30]) full_name = '{}_{}'.format(discriminator, slug) branches = [b for b in repo.listall_branches() if b.startswith(discriminator)] if len(branches) > 1: click.secho('Multiple matching branches found!', fg='red', bold=True) click.echo() click.echo('The prefix {} matched the following branches:' .format(discriminator)) click.echo() for b in branches: click.echo(' {} {}'.format(click.style('*', fg='red'), b)) click.echo() click.echo('Please remove all but one in order to continue.') sys.exit(1) elif branches: branch = repo.lookup_branch(branches[0]) if branch.branch_name != full_name: branch.rename(full_name) branch = repo.lookup_branch(full_name) else: base = repo.lookup_branch(self.base_branch) if not base: click.secho('Base branch not found: "{}", aborting.' .format(self.base_branch), fg='red', bold=True) sys.exit(1) branch = repo.create_branch(full_name, base.get_object()) return branch
import sys import click from slugify import slugify class SlugBranchGetter(object): prefix = 'feature/' def __init__(self, base_branch='master'): self.base_branch = base_branch def __call__(self, repo, issue): discriminator = '{}{}'.format(self.prefix, issue.key) slug = slugify(issue.fields.summary[:30]) full_name = '{}_{}'.format(discriminator, slug) branches = [b for b in repo.listall_branches() if b.startswith(discriminator)] if len(branches) > 1: click.secho('Multiple matching branches found!', fg='red', bold=True) click.echo() click.echo('The prefix {} matched the following branches:' .format(discriminator)) click.echo() for b in branches: click.echo(' {} {}'.format(click.style('*', fg='red'), b)) click.echo() click.echo('Please remove all but one in order to continue.') sys.exit(1) elif branches: branch = repo.lookup_branch(branches[0]) if branch.branch_name != full_name: branch.rename(full_name) branch = repo.lookup_branch(full_name) else: base = repo.lookup_branch(self.base_branch) if not base: click.secho('Base branch not found: "{}", aborting.' .format(self.base_branch), fg='red', bold=True) sys.exit(1) branch = repo.create_branch(full_name, base.get_object()) return branch
Change the prefix from features/ to feature/.
Change the prefix from features/ to feature/.
Python
mit
GaretJax/lancet,GaretJax/lancet
8816d06381625938137d9fbf8aaee3d9ddabae72
src/sentry/api/endpoints/organization_projects.py
src/sentry/api/endpoints/organization_projects.py
from __future__ import absolute_import from rest_framework.response import Response from sentry.api.base import DocSection from sentry.api.bases.organization import OrganizationEndpoint from sentry.api.serializers import serialize from sentry.models import Project, Team class OrganizationProjectsEndpoint(OrganizationEndpoint): doc_section = DocSection.ORGANIZATIONS def get(self, request, organization): """ List an organization's projects Return a list of projects bound to a organization. {method} {path} """ team_list = Team.objects.get_for_user( organization=organization, user=request.user, ) project_list = [] for team in team_list: project_list.extend(Project.objects.get_for_user( team=team, user=request.user, )) project_list.sort(key=lambda x: x.name) team_map = dict( (t.id, c) for (t, c) in zip(team_list, serialize(team_list, request.user)), ) context = [] for project, pdata in zip(project_list, serialize(project_list, request.user)): pdata['team'] = team_map[project.team_id] context.append(pdata) return Response(context)
from __future__ import absolute_import from rest_framework.response import Response from sentry.api.base import DocSection from sentry.api.bases.organization import OrganizationEndpoint from sentry.api.serializers import serialize from sentry.models import Project class OrganizationProjectsEndpoint(OrganizationEndpoint): doc_section = DocSection.ORGANIZATIONS def get(self, request, organization): """ List an organization's projects Return a list of projects bound to a organization. {method} {path} """ if request.auth and hasattr(request.auth, 'project'): team_list = [request.auth.project.team] project_list = [request.auth.project] else: team_list = list(request.access.teams) project_list = list(Project.objects.filter( team__in=team_list, ).order_by('name')) team_map = dict( (t.id, c) for (t, c) in zip(team_list, serialize(team_list, request.user)), ) context = [] for project, pdata in zip(project_list, serialize(project_list, request.user)): pdata['team'] = team_map[project.team_id] context.append(pdata) return Response(context)
Support API keys on organization project list (fixes GH-1666)
Support API keys on organization project list (fixes GH-1666)
Python
bsd-3-clause
ifduyue/sentry,ngonzalvez/sentry,jean/sentry,Kryz/sentry,nicholasserra/sentry,daevaorn/sentry,BayanGroup/sentry,mvaled/sentry,1tush/sentry,looker/sentry,mvaled/sentry,ngonzalvez/sentry,mvaled/sentry,zenefits/sentry,beeftornado/sentry,JackDanger/sentry,hongliang5623/sentry,daevaorn/sentry,JamesMura/sentry,gencer/sentry,jean/sentry,BuildingLink/sentry,Kryz/sentry,ewdurbin/sentry,ifduyue/sentry,mitsuhiko/sentry,BayanGroup/sentry,Natim/sentry,wong2/sentry,ifduyue/sentry,BuildingLink/sentry,felixbuenemann/sentry,fuziontech/sentry,1tush/sentry,felixbuenemann/sentry,ngonzalvez/sentry,fuziontech/sentry,ifduyue/sentry,Natim/sentry,ewdurbin/sentry,gencer/sentry,songyi199111/sentry,looker/sentry,wong2/sentry,looker/sentry,BuildingLink/sentry,JamesMura/sentry,zenefits/sentry,kevinlondon/sentry,BuildingLink/sentry,korealerts1/sentry,BuildingLink/sentry,mvaled/sentry,JamesMura/sentry,nicholasserra/sentry,fotinakis/sentry,hongliang5623/sentry,felixbuenemann/sentry,nicholasserra/sentry,ifduyue/sentry,beeftornado/sentry,pauloschilling/sentry,kevinlondon/sentry,songyi199111/sentry,mvaled/sentry,zenefits/sentry,alexm92/sentry,daevaorn/sentry,Natim/sentry,pauloschilling/sentry,korealerts1/sentry,fuziontech/sentry,imankulov/sentry,beeftornado/sentry,JamesMura/sentry,fotinakis/sentry,fotinakis/sentry,mitsuhiko/sentry,JackDanger/sentry,jean/sentry,kevinlondon/sentry,Kryz/sentry,gencer/sentry,mvaled/sentry,1tush/sentry,alexm92/sentry,BayanGroup/sentry,alexm92/sentry,songyi199111/sentry,looker/sentry,pauloschilling/sentry,fotinakis/sentry,jean/sentry,daevaorn/sentry,JamesMura/sentry,zenefits/sentry,jean/sentry,ewdurbin/sentry,imankulov/sentry,hongliang5623/sentry,korealerts1/sentry,gencer/sentry,imankulov/sentry,JackDanger/sentry,gencer/sentry,looker/sentry,zenefits/sentry,wong2/sentry
9616b026894327eb7171f978f3856cdae7c9e06b
child_sync_typo3/wizard/delegate_child_wizard.py
child_sync_typo3/wizard/delegate_child_wizard.py
# -*- encoding: utf-8 -*- ############################################################################## # # Copyright (C) 2014 Compassion CH (http://www.compassion.ch) # Releasing children from poverty in Jesus' name # @author: David Coninckx <[email protected]> # # The licence is in the file __openerp__.py # ############################################################################## from openerp.osv import orm from ..model.sync_typo3 import Sync_typo3 class delegate_child_wizard(orm.TransientModel): _inherit = 'delegate.child.wizard' def delegate(self, cr, uid, ids, context=None): child_ids = self._default_child_ids(cr, uid, context) child_obj = self.pool.get('compassion.child') typo3_to_remove_ids = list() for child in child_obj.browse(cr, uid, child_ids, context): if (child.state == 'I'): typo3_to_remove_ids.append(child.id) if typo3_to_remove_ids: res = child_obj.child_remove_from_typo3( cr, uid, typo3_to_remove_ids, context) res = super(delegate_child_wizard, self).delegate( cr, uid, ids, context) return res or Sync_typo3.typo3_index_error(cr, uid, self, context)
# -*- encoding: utf-8 -*- ############################################################################## # # Copyright (C) 2014 Compassion CH (http://www.compassion.ch) # Releasing children from poverty in Jesus' name # @author: David Coninckx <[email protected]> # # The licence is in the file __openerp__.py # ############################################################################## from openerp.osv import orm from ..model.sync_typo3 import Sync_typo3 class delegate_child_wizard(orm.TransientModel): _inherit = 'delegate.child.wizard' def delegate(self, cr, uid, ids, context=None): child_ids = self._default_child_ids(cr, uid, context) child_obj = self.pool.get('compassion.child') typo3_to_remove_ids = list() for child in child_obj.browse(cr, uid, child_ids, context): if (child.state == 'I'): typo3_to_remove_ids.append(child.id) if typo3_to_remove_ids: res = child_obj.child_remove_from_typo3( cr, uid, typo3_to_remove_ids, context) res = super(delegate_child_wizard, self).delegate( cr, uid, ids, context) and res return res or Sync_typo3.typo3_index_error(cr, uid, self, context)
Fix res returned on delegate
Fix res returned on delegate
Python
agpl-3.0
MickSandoz/compassion-switzerland,ecino/compassion-switzerland,CompassionCH/compassion-switzerland,ndtran/compassion-switzerland,eicher31/compassion-switzerland,eicher31/compassion-switzerland,ndtran/compassion-switzerland,MickSandoz/compassion-switzerland,Secheron/compassion-switzerland,CompassionCH/compassion-switzerland,CompassionCH/compassion-switzerland,ecino/compassion-switzerland,ecino/compassion-switzerland,eicher31/compassion-switzerland,Secheron/compassion-switzerland
306e0e38c148fed14cffd82ae0ede9b20ab30853
corehq/ex-submodules/casexml/apps/phone/utils.py
corehq/ex-submodules/casexml/apps/phone/utils.py
def delete_sync_logs(before_date, limit=1000): from casexml.apps.phone.dbaccessors.sync_logs_by_user import get_synclog_ids_before_date from casexml.apps.phone.models import SyncLog from dimagi.utils.couch.database import iter_bulk_delete_with_doc_type_verification sync_log_ids = get_synclog_ids_before_date(before_date, limit) return iter_bulk_delete_with_doc_type_verification(SyncLog.get_db(), sync_log_ids, 'SyncLog', chunksize=5)
def delete_sync_logs(before_date, limit=1000): from casexml.apps.phone.dbaccessors.sync_logs_by_user import get_synclog_ids_before_date from casexml.apps.phone.models import SyncLog from dimagi.utils.couch.database import iter_bulk_delete_with_doc_type_verification sync_log_ids = get_synclog_ids_before_date(before_date, limit) return iter_bulk_delete_with_doc_type_verification(SyncLog.get_db(), sync_log_ids, 'SyncLog', chunksize=25)
Delete sync logs 25 at a time
Delete sync logs 25 at a time
Python
bsd-3-clause
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
cc5cf942cc56f12e09c50c29b488f71504387b7f
avalonstar/apps/api/serializers.py
avalonstar/apps/api/serializers.py
# -*- coding: utf-8 -*- from rest_framework import serializers from apps.broadcasts.models import Broadcast, Raid, Series from apps.games.models import Game class BroadcastSerializer(serializers.ModelSerializer): class Meta: depth = 1 model = Broadcast class RaidSerializer(serializers.ModelSerializer): class Meta: model = Raid class SeriesSerializer(serializers.ModelSerializer): class Meta: model = Series class GameSerializer(serializers.ModelSerializer): class Meta: model = Game
# -*- coding: utf-8 -*- from rest_framework import serializers from apps.broadcasts.models import Broadcast, Raid, Series from apps.games.models import Game class BroadcastSerializer(serializers.ModelSerializer): class Meta: model = Broadcast class RaidSerializer(serializers.ModelSerializer): class Meta: model = Raid class SeriesSerializer(serializers.ModelSerializer): class Meta: model = Series class GameSerializer(serializers.ModelSerializer): class Meta: model = Game
Remove the depth for now.
Remove the depth for now.
Python
apache-2.0
bryanveloso/avalonstar-tv,bryanveloso/avalonstar-tv,bryanveloso/avalonstar-tv
b8770a85e11c048fb0dc6c46f799b17add07568d
productController.py
productController.py
from endpoints import Controller, CorsMixin import sqlite3 from datetime import datetime conn = sqlite3.connect('CIUK.db') cur = conn.cursor() class Default(Controller, CorsMixin): def GET(self): return "CIUK" def POST(self, **kwargs): return '{}, {}, {}'.format(kwargs['title'], kwargs['desc'], kwargs['price']) class Products(Controller, CorsMixin): def GET(self): cur.execute("select * from products") return cur.fetchall() class Product(Controller, CorsMixin): def GET(self, id): cur.execute("select * from products where id=?", (id,)) return cur.fetchone() def POST(self, **kwargs): row =[kwargs['title'], kwargs['desc'], kwargs['price'], datetime.now(), datetime.now()] cur.execute("insert into products values (null, ?, ?, ?, ?, ?);", (row)) conn.commit() return "New product added!" def PUT(self, id, **kwargs): row =[kwargs['title'], kwargs['desc'], kwargs['price'], datetime.now(), id] cur.execute("update products set title=?, description=?, price=?, created_at=? where id=?", (row)) conn.commit() return "Product updated!" def DELETE(self, id): cur.execute("delete from products where id=?", (id,)) conn.commit() return "Product deleted!"
from endpoints import Controller, CorsMixin import sqlite3 from datetime import datetime conn = sqlite3.connect('databaseForTest.db') cur = conn.cursor() class Default(Controller, CorsMixin): def GET(self): return "CIUK" def POST(self, **kwargs): return '{}, {}, {}'.format(kwargs['title'], kwargs['desc'], kwargs['price']) class Products(Controller, CorsMixin): def GET(self): cur.execute("select * from products") return cur.fetchall() class Product(Controller, CorsMixin): def GET(self, id): cur.execute("select * from products where id=?", (id,)) return cur.fetchone() def POST(self, **kwargs): row =[kwargs['title'], kwargs['desc'], kwargs['price'], datetime.now(), datetime.now()] cur.execute("insert into products values (null, ?, ?, ?, ?, ?);", (row)) conn.commit() return "New product added!" def PUT(self, id, **kwargs): row =[kwargs['title'], kwargs['desc'], kwargs['price'], datetime.now(), id] cur.execute("update products set title=?, description=?, price=?, created_at=? where id=?", (row)) conn.commit() return "Product updated!" def DELETE(self, id): cur.execute("delete from products where id=?", (id,)) conn.commit() return "Product deleted!"
Change name of database for test
Change name of database for test
Python
mit
joykuotw/python-endpoints,joykuotw/python-endpoints,joykuotw/python-endpoints
c129b435a7759104feaaa5b828dc2f2ac46d5ab1
src/cmdlinetest/afp_mock.py
src/cmdlinetest/afp_mock.py
#!/usr/bin/env python from bottle import route from textwrap import dedent from bottledaemon import daemon_run """ Simple AFP mock to allow testing the afp-cli. """ @route('/account') def account(): return """{"test_account": ["test_role"]}""" @route('/account/<account>/<role>') def credentials(account, role): return dedent(""" {"Code": "Success", "LastUpdated": "1970-01-01T00:00:00Z", "AccessKeyId": "XXXXXXXXXXXX", "SecretAccessKey": "XXXXXXXXXXXX", "Token": "XXXXXXXXXXXX", "Expiration": "2032-01-01T00:00:00Z", "Type": "AWS-HMAC"}""").strip() daemon_run(host='localhost', port=5555)
#!/usr/bin/env python """ Simple AFP mock to allow testing the afp-cli. """ from bottle import route from textwrap import dedent from bottledaemon import daemon_run @route('/account') def account(): return """{"test_account": ["test_role"]}""" @route('/account/<account>/<role>') def credentials(account, role): return dedent(""" {"Code": "Success", "LastUpdated": "1970-01-01T00:00:00Z", "AccessKeyId": "XXXXXXXXXXXX", "SecretAccessKey": "XXXXXXXXXXXX", "Token": "XXXXXXXXXXXX", "Expiration": "2032-01-01T00:00:00Z", "Type": "AWS-HMAC"}""").strip() daemon_run(host='localhost', port=5555)
Move string above the imports so it becomes a docstring
Move string above the imports so it becomes a docstring
Python
apache-2.0
ImmobilienScout24/afp-cli,ImmobilienScout24/afp-cli,ImmobilienScout24/afp-cli
fbfeaaad959cd4ed9ef91cebbef847c5f1bf3fdb
src/ggrc/models/cache.py
src/ggrc/models/cache.py
# Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: [email protected] # Maintained By: [email protected] class Cache: """ Tracks modified objects in the session distinguished by type of modification: new, dirty and deleted. """ def __init__(self): self.clear() def update(self, session): self.new.update(session.new) self.deleted.update(session.deleted) modified = {o for o in session.dirty if session.is_modified(o)} # When intermediate flushes happen, new and dirty may overlap self.dirty.update(modified - self.new - self.deleted) def clear(self): self.new = set() self.dirty = set() self.deleted = set() def copy(self): copied_cache = Cache() copied_cache.new = set(self.new) copied_cache.dirty = set(self.dirty) copied_cache.deleted = set(self.deleted) return copied_cache
# Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: [email protected] # Maintained By: [email protected] class Cache: """ Tracks modified objects in the session distinguished by type of modification: new, dirty and deleted. """ def __init__(self): self.clear() def update(self, session): self.new.update(session.new) self.deleted.update(session.deleted) modified = set(o for o in session.dirty if session.is_modified(o)) # When intermediate flushes happen, new and dirty may overlap self.dirty.update(modified - self.new - self.deleted) def clear(self): self.new = set() self.dirty = set() self.deleted = set() def copy(self): copied_cache = Cache() copied_cache.new = set(self.new) copied_cache.dirty = set(self.dirty) copied_cache.deleted = set(self.deleted) return copied_cache
Fix to keep build-chain Python 2.6 compatible
Fix to keep build-chain Python 2.6 compatible
Python
apache-2.0
uskudnik/ggrc-core,uskudnik/ggrc-core,vladan-m/ggrc-core,selahssea/ggrc-core,prasannav7/ggrc-core,VinnieJohns/ggrc-core,vladan-m/ggrc-core,josthkko/ggrc-core,j0gurt/ggrc-core,AleksNeStu/ggrc-core,andrei-karalionak/ggrc-core,plamut/ggrc-core,selahssea/ggrc-core,hasanalom/ggrc-core,andrei-karalionak/ggrc-core,jmakov/ggrc-core,hasanalom/ggrc-core,vladan-m/ggrc-core,NejcZupec/ggrc-core,selahssea/ggrc-core,andrei-karalionak/ggrc-core,prasannav7/ggrc-core,kr41/ggrc-core,vladan-m/ggrc-core,jmakov/ggrc-core,edofic/ggrc-core,VinnieJohns/ggrc-core,j0gurt/ggrc-core,AleksNeStu/ggrc-core,edofic/ggrc-core,NejcZupec/ggrc-core,plamut/ggrc-core,hyperNURb/ggrc-core,prasannav7/ggrc-core,hyperNURb/ggrc-core,plamut/ggrc-core,VinnieJohns/ggrc-core,vladan-m/ggrc-core,hasanalom/ggrc-core,hyperNURb/ggrc-core,josthkko/ggrc-core,hyperNURb/ggrc-core,andrei-karalionak/ggrc-core,VinnieJohns/ggrc-core,josthkko/ggrc-core,edofic/ggrc-core,hyperNURb/ggrc-core,hasanalom/ggrc-core,j0gurt/ggrc-core,AleksNeStu/ggrc-core,j0gurt/ggrc-core,AleksNeStu/ggrc-core,NejcZupec/ggrc-core,plamut/ggrc-core,kr41/ggrc-core,jmakov/ggrc-core,jmakov/ggrc-core,kr41/ggrc-core,kr41/ggrc-core,NejcZupec/ggrc-core,uskudnik/ggrc-core,jmakov/ggrc-core,selahssea/ggrc-core,uskudnik/ggrc-core,edofic/ggrc-core,josthkko/ggrc-core,uskudnik/ggrc-core,prasannav7/ggrc-core,hasanalom/ggrc-core
0ee0650dfacf648982615be49cefd57f928a73ee
holonet/core/list_access.py
holonet/core/list_access.py
# -*- coding: utf8 -*- from django.conf import settings from holonet.mappings.helpers import clean_address, split_address from .models import DomainBlacklist, DomainWhitelist, SenderBlacklist, SenderWhitelist def is_blacklisted(sender): sender = clean_address(sender) prefix, domain = split_address(sender) try: DomainBlacklist.objects.get(domain=domain) return True except DomainBlacklist.DoesNotExist: pass try: SenderBlacklist.objects.get(sender=sender) return True except SenderBlacklist.DoesNotExist: pass return False def is_not_whitelisted(sender): sender = clean_address(sender) prefix, domain = split_address(sender) if settings.SENDER_WHITELIST_ENABLED: try: SenderWhitelist.objects.get(sender=sender) return False except SenderWhitelist.DoesNotExist: pass if settings.DOMAIN_WHITELIST_ENABLED: try: DomainWhitelist.objects.get(domain=domain) return False except DomainWhitelist.DoesNotExist: pass return bool(settings.SENDER_WHITELIST_ENABLED or settings.DOMAIN_WHITELIST_ENABLED)
# -*- coding: utf8 -*- from django.conf import settings from holonet.mappings.helpers import clean_address, split_address from .models import DomainBlacklist, DomainWhitelist, SenderBlacklist, SenderWhitelist def is_blacklisted(sender): sender = clean_address(sender) prefix, domain = split_address(sender) if DomainBlacklist.objects.filter(domain=domain).exists(): return True if SenderBlacklist.objects.filter(sender=sender).exists(): return True return False def is_not_whitelisted(sender): sender = clean_address(sender) prefix, domain = split_address(sender) if settings.SENDER_WHITELIST_ENABLED: if SenderWhitelist.objects.filter(sender=sender).exists(): return False if settings.DOMAIN_WHITELIST_ENABLED: if DomainWhitelist.objects.filter(domain=domain).exists(): return False return bool(settings.SENDER_WHITELIST_ENABLED or settings.DOMAIN_WHITELIST_ENABLED)
Change to exists instead of catching DoesNotExist exception.
Change to exists instead of catching DoesNotExist exception.
Python
mit
webkom/holonet,webkom/holonet,webkom/holonet
f68b4b9b133d3c8ecb9826af9736c8c1fca64e49
maxims/credentials.py
maxims/credentials.py
from axiom import attributes, item from twisted.cred import credentials class UsernamePassword(item.Item): """ A stored username and password. """ username = attributes.bytes(allowNone=False) password = attributes.bytes(allowNone=False) def instantiate(self): return credentials.UsernamePassword(self.username, self.password)
from axiom import attributes, item from twisted.cred import credentials class UsernamePassword(item.Item): """ A stored username and password. Note that although this class is an ``IUsernamePassword`` implementation, you should still use the ``instantiate`` method to get independent ``IUsernamePassword`` providers. """ username = attributes.bytes(allowNone=False) password = attributes.bytes(allowNone=False) def instantiate(self): return credentials.UsernamePassword(self.username, self.password)
Add caveat about UsernamePassword already being an IUsernamePassword implementation
Add caveat about UsernamePassword already being an IUsernamePassword implementation
Python
isc
lvh/maxims
214511a6fbdd0763667e740735d0876f78a3b244
derpibooru/query.py
derpibooru/query.py
from .request import url class Search(object): def __init__(self, key=None, q=[], sf="created_at", sd="desc"): self._parameters = { "key": key, "q": q, "sf": sf, "sd": sd } @property def parameters(self): return self._parameters @property def url(self): return url(**self.parameters) def key(self, key=None): self._parameters["key"] = key return Search(**self._parameters) def query(self, *q): self._parameters["q"] = [str(tag).strip() for tag in q] return Search(**self._parameters) def descending(self): self._parameters["sd"] = "desc" return Search(**self._parameters) def ascending(self): self._parameters["sd"] = "asc" return Search(**self._parameters) def sort_by(self, sf): self._parameters["sf"] = sf return Search(**self._parameters)
from .request import url class Search(object): def __init__(self, key=None, q=[], sf="created_at", sd="desc"): self._parameters = { "key": key, "q": [str(tag).strip() for tag in q if tag], "sf": sf, "sd": sd } @property def parameters(self): return self._parameters @property def url(self): return url(**self.parameters) def key(self, key=None): self._parameters["key"] = key return Search(**self._parameters) def query(self, *q): self._parameters["q"] = [str(tag).strip() for tag in q if tag] return Search(**self._parameters) def descending(self): self._parameters["sd"] = "desc" return Search(**self._parameters) def ascending(self): self._parameters["sd"] = "asc" return Search(**self._parameters) def sort_by(self, sf): self._parameters["sf"] = sf return Search(**self._parameters)
Add check for empty tags
Add check for empty tags
Python
bsd-2-clause
joshua-stone/DerPyBooru
ab4d640923e0e556ba3a9f64cc122c95ba4fc52c
settings.py
settings.py
import os PROJECT_NAME = "lightning-talks" MONGO_DATABASE = 'lightningtalk-dev' VOTING = False ENVIRONMENTS = { "prd": { "hosts": ['104.236.202.196'] } }
import os PROJECT_NAME = "lightning-talks" MONGO_DATABASE = 'lightningtalk' VOTING = False ENVIRONMENTS = { "prd": { "hosts": ['104.236.202.196'] } }
Change DB location to production.
Change DB location to production.
Python
mit
ireapps/lightning-talks,ireapps/lightning-talks,ireapps/lightning-talks,ireapps/lightning-talks
68636bfcf95163e9764860b09a713d59464e3419
conda/linux_dev/get_freecad_version.py
conda/linux_dev/get_freecad_version.py
import sys import os import subprocess import platform platform_dict = {} platform_dict["Darwin"] = "OSX" sys_n_arch = platform.platform() sys_n_arch = sys_n_arch.split("-") system, arch = sys_n_arch[0], sys_n_arch[4] if system in platform_dict: system = platform_dict[system] version_info = subprocess.check_output("freecadcmd --version", shell=True) version_info = version_info.decode("utf-8").split(" ") dev_version = version_info[1] revision = version_info[3] print("FreeCAD_{}-{}-{}-glibc2.12-{}-conda".format(dev_version, revision, system, arch))
import sys import os import subprocess import platform platform_dict = {} platform_dict["Darwin"] = "OSX" sys_n_arch = platform.platform() sys_n_arch = sys_n_arch.split("-") system, arch = sys_n_arch[0], sys_n_arch[4] if system in platform_dict: system = platform_dict[system] version_info = subprocess.check_output("freecadcmd --version", shell=True) version_info = version_info.decode("utf-8").split(" ") dev_version = version_info[1] revision = version_info[3] print("FreeCAD_{}-{}-{}-Conda_glibc2.12-x86_64".format(dev_version, revision, system))
Revert to using current AppImage update info
Revert to using current AppImage update info https://github.com/FreeCAD/FreeCAD-AppImage/issues/35
Python
lgpl-2.1
FreeCAD/FreeCAD-AppImage,FreeCAD/FreeCAD-AppImage
956ad502766eddbaf3c81672a30e58c814ba8437
test/test_api_classes.py
test/test_api_classes.py
import pytest from jedi import api def make_definitions(): return api.defined_names(""" import sys class C: pass x = C() def f(): pass """) @pytest.mark.parametrize('definition', make_definitions()) def test_basedefinition_type(definition): assert definition.type in ('module', 'class', 'instance', 'function', 'statement', 'import')
import textwrap import pytest from jedi import api def make_definitions(): """ Return a list of definitions for parametrized tests. :rtype: [jedi.api_classes.BaseDefinition] """ source = textwrap.dedent(""" import sys class C: pass x = C() def f(): pass """) definitions = [] definitions += api.defined_names(source) source += textwrap.dedent(""" variable = sys or C or x or f""") lines = source.splitlines() script = api.Script(source, len(lines), len('variable'), None) definitions += script.definition() script2 = api.Script(source, 4, len('class C'), None) definitions += script2.related_names() return definitions @pytest.mark.parametrize('definition', make_definitions()) def test_basedefinition_type(definition): assert definition.type in ('module', 'class', 'instance', 'function', 'statement', 'import')
Make more examples in make_definitions
Make more examples in make_definitions
Python
mit
WoLpH/jedi,jonashaag/jedi,tjwei/jedi,flurischt/jedi,dwillmer/jedi,jonashaag/jedi,mfussenegger/jedi,flurischt/jedi,tjwei/jedi,mfussenegger/jedi,dwillmer/jedi,WoLpH/jedi
d16d3d9b74f4fdfe06b660fa3d5221614beb2eed
mezzanine/core/management.py
mezzanine/core/management.py
from django.conf import settings from django.contrib.auth.models import User from django.contrib.auth import models as auth_app from django.db.models.signals import post_syncdb def create_demo_user(app, created_models, verbosity, **kwargs): if settings.DEBUG and User in created_models: if verbosity >= 2: print "Creating demo User object" User.objects.create_superuser("demo", "[email protected]", "demo") post_syncdb.connect(create_demo_user, sender=auth_app)
from django.conf import settings from django.contrib.auth.models import User from django.contrib.auth import models as auth_app from django.db.models.signals import post_syncdb def create_demo_user(app, created_models, verbosity, **kwargs): if settings.DEBUG and User in created_models and not kwargs.get("interactive"): print print "Creating default account (username: admin / password: default)" print User.objects.create_superuser("admin", "[email protected]", "default") post_syncdb.connect(create_demo_user, sender=auth_app)
Update creation of default user to only run with ``--noinput`` passed to ``syncdb``.
Update creation of default user to only run with ``--noinput`` passed to ``syncdb``.
Python
bsd-2-clause
nikolas/mezzanine,sjuxax/mezzanine,biomassives/mezzanine,dekomote/mezzanine-modeltranslation-backport,saintbird/mezzanine,guibernardino/mezzanine,jjz/mezzanine,christianwgd/mezzanine,gbosh/mezzanine,agepoly/mezzanine,wyzex/mezzanine,vladir/mezzanine,viaregio/mezzanine,sjdines/mezzanine,christianwgd/mezzanine,wbtuomela/mezzanine,agepoly/mezzanine,vladir/mezzanine,batpad/mezzanine,frankier/mezzanine,gbosh/mezzanine,stbarnabas/mezzanine,molokov/mezzanine,douglaskastle/mezzanine,joshcartme/mezzanine,tuxinhang1989/mezzanine,SoLoHiC/mezzanine,promil23/mezzanine,Skytorn86/mezzanine,webounty/mezzanine,dekomote/mezzanine-modeltranslation-backport,frankchin/mezzanine,eino-makitalo/mezzanine,readevalprint/mezzanine,Cajoline/mezzanine,mush42/mezzanine,christianwgd/mezzanine,fusionbox/mezzanine,frankier/mezzanine,adrian-the-git/mezzanine,frankchin/mezzanine,promil23/mezzanine,webounty/mezzanine,Skytorn86/mezzanine,orlenko/sfpirg,adrian-the-git/mezzanine,wrwrwr/mezzanine,stbarnabas/mezzanine,dsanders11/mezzanine,theclanks/mezzanine,gradel/mezzanine,biomassives/mezzanine,damnfine/mezzanine,jerivas/mezzanine,AlexHill/mezzanine,geodesign/mezzanine,orlenko/plei,PegasusWang/mezzanine,stephenmcd/mezzanine,stephenmcd/mezzanine,dovydas/mezzanine,SoLoHiC/mezzanine,fusionbox/mezzanine,ZeroXn/mezzanine,industrydive/mezzanine,vladir/mezzanine,industrydive/mezzanine,nikolas/mezzanine,mush42/mezzanine,dovydas/mezzanine,jjz/mezzanine,sjuxax/mezzanine,wbtuomela/mezzanine,Cicero-Zhao/mezzanine,dsanders11/mezzanine,douglaskastle/mezzanine,spookylukey/mezzanine,PegasusWang/mezzanine,frankier/mezzanine,scarcry/snm-mezzanine,guibernardino/mezzanine,ryneeverett/mezzanine,gradel/mezzanine,saintbird/mezzanine,wbtuomela/mezzanine,dustinrb/mezzanine,Kniyl/mezzanine,AlexHill/mezzanine,nikolas/mezzanine,readevalprint/mezzanine,jjz/mezzanine,jerivas/mezzanine,theclanks/mezzanine,scarcry/snm-mezzanine,Cajoline/mezzanine,Skytorn86/mezzanine,ZeroXn/mezzanine,webounty/mezzanine,tuxinhang1989/mezzanine,molokov/mezzanine,emile2016/mezzanine,industrydive/mezzanine,geodesign/mezzanine,cccs-web/mezzanine,geodesign/mezzanine,damnfine/mezzanine,jerivas/mezzanine,SoLoHiC/mezzanine,viaregio/mezzanine,gbosh/mezzanine,ZeroXn/mezzanine,damnfine/mezzanine,tuxinhang1989/mezzanine,batpad/mezzanine,biomassives/mezzanine,orlenko/sfpirg,joshcartme/mezzanine,molokov/mezzanine,dustinrb/mezzanine,sjdines/mezzanine,theclanks/mezzanine,emile2016/mezzanine,promil23/mezzanine,Kniyl/mezzanine,ryneeverett/mezzanine,ryneeverett/mezzanine,mush42/mezzanine,eino-makitalo/mezzanine,orlenko/plei,wyzex/mezzanine,cccs-web/mezzanine,spookylukey/mezzanine,spookylukey/mezzanine,sjdines/mezzanine,wyzex/mezzanine,wrwrwr/mezzanine,gradel/mezzanine,douglaskastle/mezzanine,orlenko/sfpirg,frankchin/mezzanine,emile2016/mezzanine,orlenko/plei,Kniyl/mezzanine,dovydas/mezzanine,viaregio/mezzanine,adrian-the-git/mezzanine,PegasusWang/mezzanine,saintbird/mezzanine,scarcry/snm-mezzanine,Cicero-Zhao/mezzanine,stephenmcd/mezzanine,eino-makitalo/mezzanine,Cajoline/mezzanine,sjuxax/mezzanine,dsanders11/mezzanine,agepoly/mezzanine,joshcartme/mezzanine,dustinrb/mezzanine,dekomote/mezzanine-modeltranslation-backport,readevalprint/mezzanine
460a2430fbd8832f3fada1a74b754d71a27ac282
mockingjay/matcher.py
mockingjay/matcher.py
import abc import re class Matcher(object): __metaclass__ = abc.ABCMeta @abc.abstractmethod def assert_request_matched(self, request): """ Assert that the request matched the spec in this matcher object. """ class HeaderMatcher(Matcher): """ Matcher for the request's header. :param key: the name of the header :param value: the value of the header """ def __init__(self, key, value): self.key = key self.value = value def assert_request_matched(self, request): assert request.headers.get(self.key) == self.value class BodyMatcher(Matcher): """ Matcher for the request body. :param body: can either be a string or a :class:`_sre.SRE_Pattern`: object """ def __init__(self, body): self.body = body def assert_request_matched(self, request): if isinstance(self.body, re._pattern_type): assert self.body.search(request.body) is not None else: assert request.body == self.body
import abc import re class StringOrPattern(object): """ A decorator object that wraps a string or a regex pattern so that it can be compared against another string either literally or using the pattern. """ def __init__(self, subject): self.subject = subject def __eq__(self, other_str): if isinstance(self.subject, re._pattern_type): return self.subject.search(other_str) is not None else: return self.subject == other_str def __hash__(self): return self.subject.__hash__() class Matcher(object): __metaclass__ = abc.ABCMeta @abc.abstractmethod def assert_request_matched(self, request): """ Assert that the request matched the spec in this matcher object. """ class HeaderMatcher(Matcher): """ Matcher for the request's header. :param key: the name of the header :param value: the value of the header """ def __init__(self, key, value): self.key = key self.value = StringOrPattern(value) def assert_request_matched(self, request): assert request.headers.get(self.key) == self.value class BodyMatcher(Matcher): """ Matcher for the request body. :param body: can either be a string or a :class:`_sre.SRE_Pattern`: object """ def __init__(self, body): self.body = StringOrPattern(body) def assert_request_matched(self, request): assert request.body == self.body
Allow all values to be compared with either literally or with a pattern
Allow all values to be compared with either literally or with a pattern
Python
bsd-3-clause
kevinjqiu/mockingjay
da69fff2d104c9cccd285078c40de05ea46fdb4d
halaqat/urls.py
halaqat/urls.py
"""halaqat URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.8/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Add an import: from blog import urls as blog_urls 2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls)) """ from django.conf.urls import include, url from django.contrib import admin from back_office import urls as back_office_url from students import urls as students_url urlpatterns = [ url(r'^back_office/', include(back_office_url)), url(r'^students/', include(students_url)), url(r'^admin/', include(admin.site.urls)), ]
"""halaqat URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.8/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Add an import: from blog import urls as blog_urls 2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls)) """ from django.conf.urls import include, url from django.contrib import admin from back_office import urls as back_office_url from students import urls as students_url urlpatterns = [ url(r'^back_office/', include(back_office_url)), url(r'^back-office/students/', include(students_url)), url(r'^admin/', include(admin.site.urls)), ]
Add back-office to student URL
Add back-office to student URL
Python
mit
EmadMokhtar/halaqat,EmadMokhtar/halaqat,EmadMokhtar/halaqat
80b50733a01c70058353815f6db7c621e7868a73
docs/source/conf.py
docs/source/conf.py
# vim:fileencoding=utf-8:noet from __future__ import (unicode_literals, division, absolute_import, print_function) import os import sys sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(os.getcwd())))) sys.path.insert(0, os.path.abspath(os.getcwd())) extensions = ['powerline_autodoc', 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.viewcode'] source_suffix = '.rst' master_doc = 'index' project = 'Powerline' version = 'beta' release = 'beta' exclude_patterns = ['_build'] pygments_style = 'sphinx' html_theme = 'default' html_static_path = ['_static'] html_show_copyright = False on_rtd = os.environ.get('READTHEDOCS', None) == 'True' if not on_rtd: # only import and set the theme if we’re building docs locally try: import sphinx_rtd_theme html_theme = 'sphinx_rtd_theme' html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] except ImportError: pass
# vim:fileencoding=utf-8:noet from __future__ import (unicode_literals, division, absolute_import, print_function) import os import sys sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(os.getcwd())))) sys.path.insert(0, os.path.abspath(os.getcwd())) extensions = ['powerline_autodoc', 'sphinx.ext.todo', 'sphinx.ext.coverage', 'sphinx.ext.viewcode'] source_suffix = '.rst' master_doc = 'index' project = 'Powerline' version = 'beta' release = 'beta' exclude_patterns = ['_build'] pygments_style = 'sphinx' html_theme = 'default' html_static_path = ['_static'] html_show_copyright = False latex_show_urls = 'footnote' latex_elements = { 'preamble': ''' \\DeclareUnicodeCharacter{22EF}{$\\cdots$} % Dots \\DeclareUnicodeCharacter{2665}{\\ding{170}} % Heart \\DeclareUnicodeCharacter{2746}{\\ding{105}} % Snow \\usepackage{pifont} ''', } on_rtd = os.environ.get('READTHEDOCS', None) == 'True' if not on_rtd: # only import and set the theme if we’re building docs locally try: import sphinx_rtd_theme html_theme = 'sphinx_rtd_theme' html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] except ImportError: pass
Fix latex build: make some unicode characters found in help work
Fix latex build: make some unicode characters found in help work
Python
mit
Liangjianghao/powerline,bartvm/powerline,junix/powerline,wfscheper/powerline,areteix/powerline,EricSB/powerline,blindFS/powerline,dragon788/powerline,dragon788/powerline,prvnkumar/powerline,s0undt3ch/powerline,russellb/powerline,dragon788/powerline,s0undt3ch/powerline,xxxhycl2010/powerline,xfumihiro/powerline,Liangjianghao/powerline,EricSB/powerline,firebitsbr/powerline,IvanAli/powerline,bartvm/powerline,areteix/powerline,seanfisk/powerline,prvnkumar/powerline,bezhermoso/powerline,russellb/powerline,Luffin/powerline,lukw00/powerline,blindFS/powerline,seanfisk/powerline,bartvm/powerline,IvanAli/powerline,Luffin/powerline,areteix/powerline,darac/powerline,firebitsbr/powerline,Liangjianghao/powerline,QuLogic/powerline,lukw00/powerline,xxxhycl2010/powerline,S0lll0s/powerline,russellb/powerline,xfumihiro/powerline,cyrixhero/powerline,kenrachynski/powerline,IvanAli/powerline,xfumihiro/powerline,prvnkumar/powerline,kenrachynski/powerline,S0lll0s/powerline,darac/powerline,DoctorJellyface/powerline,wfscheper/powerline,xxxhycl2010/powerline,cyrixhero/powerline,QuLogic/powerline,QuLogic/powerline,junix/powerline,lukw00/powerline,firebitsbr/powerline,DoctorJellyface/powerline,bezhermoso/powerline,darac/powerline,seanfisk/powerline,EricSB/powerline,blindFS/powerline,wfscheper/powerline,cyrixhero/powerline,bezhermoso/powerline,junix/powerline,Luffin/powerline,DoctorJellyface/powerline,s0undt3ch/powerline,kenrachynski/powerline,S0lll0s/powerline
20929dd2e1ddd0909afc3e25b040bfdcdc2c9b00
src/opencmiss/neon/core/problems/biomeng321lab1.py
src/opencmiss/neon/core/problems/biomeng321lab1.py
''' Copyright 2015 University of Auckland Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ''' import json from opencmiss.neon.core.problems.base import BaseProblem BOUNDARY_CONDITIONS = ['Type 1', 'Type 2', 'Type 3', 'Type 4', 'Type 5'] class Biomeng321Lab1(BaseProblem): def __init__(self): super(Biomeng321Lab1, self).__init__() self.setName('Biomeng321 Lab1') self._boundary_condition = None def setBoundaryCondition(self, boundary_condition): self._boundary_condition = boundary_condition def getBoundaryCondition(self): return self._boundary_condition def serialise(self): d = {} d['boundary_condition'] = self._boundary_condition return json.dumps(d) def deserialise(self, string): d = json.loads(string) self._boundary_condition = d['boundary_condition'] if 'boundary_condition' in d else None def validate(self): return True
''' Copyright 2015 University of Auckland Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ''' import json from opencmiss.neon.core.problems.base import BaseProblem BOUNDARY_CONDITIONS = ['Model 1', 'Model 2', 'Model 3', 'Model 4', 'Model 5'] class Biomeng321Lab1(BaseProblem): def __init__(self): super(Biomeng321Lab1, self).__init__() self.setName('Biomeng321 Lab1') self._boundary_condition = None def setBoundaryCondition(self, boundary_condition): self._boundary_condition = boundary_condition def getBoundaryCondition(self): return self._boundary_condition def serialise(self): d = {} d['boundary_condition'] = self._boundary_condition return json.dumps(d) def deserialise(self, string): d = json.loads(string) self._boundary_condition = d['boundary_condition'] if 'boundary_condition' in d else None def validate(self): return True
Change name of boundary conditions for Biomeng321 Lab1.
Change name of boundary conditions for Biomeng321 Lab1.
Python
apache-2.0
alan-wu/neon
8c551fe51ed142305945c0cef530ac84ed3e7eb9
nodeconductor/logging/perms.py
nodeconductor/logging/perms.py
from nodeconductor.core.permissions import StaffPermissionLogic PERMISSION_LOGICS = ( ('logging.Alert', StaffPermissionLogic(any_permission=True)), ('logging.SystemNotification', StaffPermissionLogic(any_permission=True)), )
from nodeconductor.core.permissions import StaffPermissionLogic PERMISSION_LOGICS = ( ('logging.Alert', StaffPermissionLogic(any_permission=True)), ('logging.WebHook', StaffPermissionLogic(any_permission=True)), ('logging.PushHook', StaffPermissionLogic(any_permission=True)), ('logging.EmailHook', StaffPermissionLogic(any_permission=True)), ('logging.SystemNotification', StaffPermissionLogic(any_permission=True)), )
Allow staff user to manage hooks.
Allow staff user to manage hooks.
Python
mit
opennode/nodeconductor,opennode/nodeconductor,opennode/nodeconductor
13df4b7ba5c706e1fddbd17ac9edf3894e9a7206
nymms/tests/test_registry.py
nymms/tests/test_registry.py
import unittest from nymms import registry from nymms.resources import Command, MonitoringGroup from weakref import WeakValueDictionary class TestRegistry(unittest.TestCase): def test_empty_registry(self): self.assertEqual(Command.registry, WeakValueDictionary()) def test_register_object(self): # First test it's empty self.assertEqual(Command.registry, WeakValueDictionary()) # Add a command command = Command('test_command', '/bin/true') # verify that there is only a single command in the registry self.assertEqual(len(Command.registry), 1) # Verify that the registered command is the same as command self.assertIs(Command.registry[command.name], command) def test_duplicate_register(self): # add a command command = Command('test_command', '/bin/true') with self.assertRaises(registry.DuplicateEntryError): Command('test_command', '/bin/true') def test_invalid_resource_register(self): with self.assertRaises(TypeError): Command.registry['test'] = MonitoringGroup('test_group')
import unittest from nymms import registry from nymms.resources import Command, MonitoringGroup from weakref import WeakValueDictionary class TestRegistry(unittest.TestCase): def tearDown(self): # Ensure we have a fresh registry after every test Command.registry.clear() def test_empty_registry(self): self.assertEqual(Command.registry, WeakValueDictionary()) def test_register_object(self): # First test it's empty self.assertEqual(Command.registry, WeakValueDictionary()) # Add a command command = Command('test_command', '/bin/true') # verify that there is only a single command in the registry self.assertEqual(len(Command.registry), 1) # Verify that the registered command is the same as command self.assertIs(Command.registry[command.name], command) def test_duplicate_register(self): # add a command command = Command('test_command', '/bin/true') with self.assertRaises(registry.DuplicateEntryError): Command('test_command', '/bin/true') def test_invalid_resource_register(self): with self.assertRaises(TypeError): Command.registry['test'] = MonitoringGroup('test_group')
Clear registry between each test
Clear registry between each test
Python
bsd-2-clause
cloudtools/nymms
6f50381e2e14ab7c1c90e52479ffcfc7748329b3
UI/resources/constants.py
UI/resources/constants.py
# -*- coding: utf-8 -*- SAVE_PASSWORD_HASHED = True MAX_RETRIES_DOWNLOAD_FROM_SAME_FARMER = 3 MAX_RETRIES_UPLOAD_TO_SAME_FARMER = 3 MAX_RETRIES_NEGOTIATE_CONTRACT = 10 MAX_RETRIES_GET_FILE_POINTERS = 10 FILE_POINTERS_REQUEST_DELAY = 1 # int: file pointers request delay, in seconds. MAX_DOWNLOAD_REQUEST_BLOCK_SIZE = 32 * 1024 MAX_UPLOAD_REQUEST_BLOCK_SIZE = 4096 MAX_UPLOAD_CONNECTIONS_AT_SAME_TIME = 4 MAX_DOWNLOAD_CONNECTIONS_AT_SAME_TIME = 4 DEFAULT_MAX_BRIDGE_REQUEST_TIMEOUT = 5 # int: maximum bridge request timeout, in seconds. DEFAULT_BRIDGE_API_URL = 'api.storj.io'
# -*- coding: utf-8 -*- SAVE_PASSWORD_HASHED = True MAX_RETRIES_DOWNLOAD_FROM_SAME_FARMER = 3 MAX_RETRIES_UPLOAD_TO_SAME_FARMER = 3 MAX_RETRIES_NEGOTIATE_CONTRACT = 10 MAX_RETRIES_GET_FILE_POINTERS = 10 FILE_POINTERS_REQUEST_DELAY = 1 # int: file pointers request delay, in seconds. MAX_DOWNLOAD_REQUEST_BLOCK_SIZE = 32 * 1024 MAX_UPLOAD_REQUEST_BLOCK_SIZE = 4096 MAX_UPLOAD_CONNECTIONS_AT_SAME_TIME = 4 MAX_DOWNLOAD_CONNECTIONS_AT_SAME_TIME = 4 DEFAULT_MAX_BRIDGE_REQUEST_TIMEOUT = 5 DEFAULT_MAX_FARMER_CONNECTION_TIMEOUT = 7 # int: maximum bridge request timeout, in seconds. DEFAULT_BRIDGE_API_URL = 'api.storj.io'
Add farmer max timeout constant
Add farmer max timeout constant
Python
mit
lakewik/storj-gui-client
be9d58ffcf23e4fb47d2c09e869368ab9ec738c9
localore/localore/embeds.py
localore/localore/embeds.py
from urllib.parse import urlparse from django.conf import settings from wagtail.wagtailembeds.finders.embedly import embedly from wagtail.wagtailembeds.finders.oembed import oembed def get_default_finder(): if hasattr(settings, 'WAGTAILEMBEDS_EMBEDLY_KEY'): return embedly return oembed def finder(url, max_width=None): domain = urlparse(url).netloc # work around Embedly missing embedding HTML for Twitter and Instagram URLs if domain.endswith(( 'instagram.com', 'twitter.com', )): return oembed(url, max_width) embed_dict = get_default_finder()(url, max_width) if domain.endswith('soundcloud.com'): embed_dict['html'] = ( embed_dict['html'] .replace('visual%3Dtrue', 'visual%3Dfalse') .replace('width="500"', 'width="100%"') .replace('height="500"', 'height="166"') ) embed_dict['width'] = '100%' embed_dict['height'] = '166' return embed_dict
from urllib.parse import urlparse from django.conf import settings from wagtail.wagtailembeds.finders.embedly import embedly from wagtail.wagtailembeds.finders.oembed import oembed def get_default_finder(): if hasattr(settings, 'WAGTAILEMBEDS_EMBEDLY_KEY'): return embedly return oembed def finder(url, max_width=None): domain = urlparse(url).netloc # work around Embedly missing embedding HTML for Twitter and Instagram URLs if domain.endswith(( 'instagram.com', 'twitter.com', )): return oembed(url, max_width) embed_dict = get_default_finder()(url, max_width) if domain.endswith('soundcloud.com'): embed_dict['html'] = ( embed_dict['html'] .replace('visual%3Dtrue', 'visual%3Dfalse') .replace('width="%s"' % embed_dict['width'], 'width="100%"') .replace('height="%s"' % embed_dict['height'], 'height="166"') ) embed_dict['width'] = None embed_dict['height'] = 166 return embed_dict
Fix SoundCloud embed width/height replacement.
Fix SoundCloud embed width/height replacement. SoundCloud embeds aren't always 500x500. Also, don't set the "width" embed dict key to '100%': "width"/"height" keys expect integers only.
Python
mpl-2.0
ghostwords/localore,ghostwords/localore,ghostwords/localore
9ca260d508a8d7ed742251cc7f80541fbd32882f
mpld3/test_plots/test_tickformat_str_method.py
mpld3/test_plots/test_tickformat_str_method.py
import matplotlib import matplotlib.pyplot as plt import mpld3 from mpld3 import plugins def create_plot(): fig, ax = plt.subplots() ax.plot([1, 3], [1, 2]) fmtr = matplotlib.ticker.StrMethodFormatter("{x:.2f}") ax.xaxis.set_major_formatter(fmtr) ax.set_title('Tickformat str method test', size=14) return fig def test_date(): fig = create_plot() _ = mpld3.fig_to_html(fig) plt.close(fig) if __name__ == "__main__": mpld3.show(create_plot())
import matplotlib import matplotlib.pyplot as plt import mpld3 from mpld3 import plugins def create_plot(): fig, ax = plt.subplots() ax.plot([1, 3], [1, 2]) fmtr = matplotlib.ticker.StrMethodFormatter("{x:.2f} :)") ax.xaxis.set_major_formatter(fmtr) ax.set_title('Tickformat str method test', size=14) return fig def test_date(): fig = create_plot() _ = mpld3.fig_to_html(fig) plt.close(fig) if __name__ == "__main__": mpld3.show(create_plot())
Clarify purpose of test by...adding a smiley!
Clarify purpose of test by...adding a smiley!
Python
bsd-3-clause
mpld3/mpld3,jakevdp/mpld3,jakevdp/mpld3,mpld3/mpld3
8184354179bf6cf88304ebd743b2236258e46522
unicornclient/routine.py
unicornclient/routine.py
import threading import queue class Routine(threading.Thread): def __init__(self): threading.Thread.__init__(self) self.queue = queue.Queue() self.manager = None self.no_wait = False self.is_stopping = False self.sleeper = threading.Event() def run(self): while True: got_task = False data = None if self.no_wait: try: data = self.queue.get_nowait() got_task = True except queue.Empty: data = None got_task = False else: data = self.queue.get() got_task = True if data: index = 'routine_command' routine_command = data[index] if index in data else None if routine_command == 'stop': self.is_stopping = True self.process(data) if got_task: self.queue.task_done() if self.is_stopping: break def process(self, data): pass def sleep(self, seconds): while not self.sleeper.is_set(): self.sleeper.wait(timeout=seconds) self.sleeper.clear() def stop_signal(self): while not self.queue.empty(): try: self.queue.get_nowait() except queue.Empty: continue self.queue.task_done() self.queue.put({'routine_command': 'stop'}) self.sleeper.set()
import threading import queue class Routine(threading.Thread): def __init__(self): threading.Thread.__init__(self) self.queue = queue.Queue() self.manager = None self.no_wait = False self.is_stopping = False self.sleeper = threading.Event() def run(self): while True: got_task = False data = None if self.no_wait: try: data = self.queue.get_nowait() got_task = True except queue.Empty: data = None got_task = False else: data = self.queue.get() got_task = True if data: index = 'routine_command' routine_command = data[index] if index in data else None if routine_command == 'stop': self.is_stopping = True self.process(data) if got_task: self.queue.task_done() if self.is_stopping: break def process(self, data): pass def sleep(self, seconds): if self.is_stopping: return while not self.sleeper.is_set(): self.sleeper.wait(timeout=seconds) self.sleeper.clear() def stop_signal(self): while not self.queue.empty(): try: self.queue.get_nowait() except queue.Empty: continue self.queue.task_done() self.queue.put({'routine_command': 'stop'}) self.sleeper.set()
Disable sleep function when stopping
Disable sleep function when stopping
Python
mit
amm0nite/unicornclient,amm0nite/unicornclient
d260eefb8dc8ca8bc71c548c1389853e49eafd28
scripts/manage_db.py
scripts/manage_db.py
"""Database management and migration functionality.""" # pylint: disable=no-name-in-module,import-error from flask.ext.script import Manager from flask.ext.migrate import Migrate, MigrateCommand # pylint: enable=no-name-in-module,import-error from flask_forecaster.flask_app import app, db migrate = Migrate(app, db) manager = Manager(app) manager.add_command('db', MigrateCommand) if __name__ == '__main__': manager.run()
"""Database management and migration functionality.""" import logging import sys # pylint: disable=no-name-in-module,import-error from flask.ext.script import Manager from flask.ext.migrate import Migrate, MigrateCommand # pylint: enable=no-name-in-module,import-error from flask_forecaster.flask_app import app, db logging.basicConfig( datefmt='%Y/%m/%d %H.%M.%S', format='%(levelname)s : %(name)s : %(message)s', level=logging.DEBUG, stream=sys.stdout, ) logger = logging.getLogger('manage_db') migrate = Migrate(app, db) manager = Manager(app) manager.add_command('db', MigrateCommand) if __name__ == '__main__': logger.info('managing the database') manager.run()
Add logging to database management script
Add logging to database management script
Python
isc
textbook/flask-forecaster,textbook/flask-forecaster
42db9ceae490152040651a23d397e7ad4c950712
flask/flask/tests/test_template.py
flask/flask/tests/test_template.py
from flask import Flask, render_template_string import jinja2 def test_undefined_variable__no_error(): app = Flask(__name__) assert issubclass(app.jinja_env.undefined, jinja2.Undefined) @app.route('/') def endpoint(): return render_template_string('foo = [{{bar}}]', foo='blabla') resp = app.test_client().get('/') # http://jinja.pocoo.org/docs/2.10/templates/#variables # If a variable or attribute does not exist, you will get back an undefined # value. What you can do with that kind of value depends on the application # configuration: the default behavior is to evaluate to an empty string if # printed or iterated over, and to fail for every other operation. assert resp.data == 'foo = []' def test_undefined_variable__strict__raise_error(capsys): app = Flask(__name__) # http://jinja.pocoo.org/docs/2.10/api/#undefined-types # The closest to regular Python behavior is the StrictUndefined which # disallows all operations beside testing if it’s an undefined object. app.jinja_env.undefined = jinja2.StrictUndefined @app.route('/') def endpoint(): return render_template_string('foo = [{{bar}}]', foo='blabla') resp = app.test_client().get('/') assert resp.status_code == 500 assert "UndefinedError: 'bar' is undefined" in capsys.readouterr()[1]
# -*- coding: utf-8 -*- from flask import Flask, render_template_string import jinja2 def test_undefined_variable__no_error(): app = Flask(__name__) assert issubclass(app.jinja_env.undefined, jinja2.Undefined) @app.route('/') def endpoint(): return render_template_string('foo = [{{bar}}]', foo='blabla') resp = app.test_client().get('/') # http://jinja.pocoo.org/docs/2.10/templates/#variables # If a variable or attribute does not exist, you will get back an undefined # value. What you can do with that kind of value depends on the application # configuration: the default behavior is to evaluate to an empty string if # printed or iterated over, and to fail for every other operation. assert resp.data == 'foo = []' def test_undefined_variable__strict__raise_error(capsys): app = Flask(__name__) # http://jinja.pocoo.org/docs/2.10/api/#undefined-types # The closest to regular Python behavior is the StrictUndefined which # disallows all operations beside testing if it’s an undefined object. app.jinja_env.undefined = jinja2.StrictUndefined @app.route('/') def endpoint(): return render_template_string('foo = [{{bar}}]', foo='blabla') resp = app.test_client().get('/') assert resp.status_code == 500 assert "UndefinedError: 'bar' is undefined" in capsys.readouterr()[1]
Fix source code encoding error
[flask] Fix source code encoding error
Python
mit
imsardine/learning,imsardine/learning,imsardine/learning,imsardine/learning,imsardine/learning,imsardine/learning,imsardine/learning
6838e21d03060d23eefaaf4336214d04d98afe96
install.py
install.py
#!/usr/bin/env python import os cdir = os.path.dirname(os.path.abspath(__file__)) sourcedir = os.path.join(cdir, 'src') targetdir = '~' for dotfile in os.listdir(sourcedir): source = os.path.join(sourcedir, dotfile) target = os.path.join(targetdir, dotfile) ##TODO## :: Look at why os.symlink doesn't like relative paths os.system("ln -s %s %s" % (source, target))
#!/usr/bin/env python import os cdir = os.path.dirname(os.path.abspath(__file__)) sourcedir = os.path.join(cdir, 'src') targetdir = os.path.expanduser('~') for dotfile in os.listdir(sourcedir): source = os.path.join(sourcedir, dotfile) target = os.path.join(targetdir, dotfile) ##TODO## :: Look at why os.symlink doesn't like relative paths os.system("ln -s %s %s" % (source, target))
Make home directory selection more portable
Make home directory selection more portable
Python
mit
jaylynch/dotfiles,jaylynch/dotfiles
49c60d069da48cd83939a4e42e933e9a28e21dd2
tests/cupy_tests/cuda_tests/test_nccl.py
tests/cupy_tests/cuda_tests/test_nccl.py
import unittest from cupy import cuda from cupy.testing import attr @unittest.skipUnless(cuda.nccl_enabled, 'nccl is not installed') class TestNCCL(unittest.TestCase): @attr.gpu def test_single_proc_ring(self): id = cuda.nccl.get_unique_id() comm = cuda.nccl.NcclCommunicator(1, id, 0) assert 0 == comm.rank_id() comm.destroy() @attr.gpu @unittest.skipUnless(cuda.nccl.get_version() >= 2400, "Using old NCCL") def test_abort(self): id = cuda.nccl.get_unique_id() comm = cuda.nccl.NcclCommunicator(1, id, 0) comm.abort() @attr.gpu @unittest.skipUnless(cuda.nccl.get_version() >= 2400, "Using old NCCL") def test_check_async_error(self): id = cuda.nccl.get_unique_id() comm = cuda.nccl.NcclCommunicator(1, id, 0) comm.check_async_error() comm.destroy()
import unittest from cupy import cuda from cupy.testing import attr @unittest.skipUnless(cuda.nccl_enabled, 'nccl is not installed') class TestNCCL(unittest.TestCase): @attr.gpu def test_single_proc_ring(self): id = cuda.nccl.get_unique_id() comm = cuda.nccl.NcclCommunicator(1, id, 0) assert 0 == comm.rank_id() comm.destroy() @attr.gpu @unittest.skipUnless(cuda.nccl_enabled and cuda.nccl.get_version() >= 2400, "Using old NCCL") def test_abort(self): id = cuda.nccl.get_unique_id() comm = cuda.nccl.NcclCommunicator(1, id, 0) comm.abort() @attr.gpu @unittest.skipUnless(cuda.nccl_enabled and cuda.nccl.get_version() >= 2400, "Using old NCCL") def test_check_async_error(self): id = cuda.nccl.get_unique_id() comm = cuda.nccl.NcclCommunicator(1, id, 0) comm.check_async_error() comm.destroy()
Check NCCL existence in test decorators
Check NCCL existence in test decorators
Python
mit
cupy/cupy,cupy/cupy,cupy/cupy,cupy/cupy
5ca84f89d08ab4b31c47753ce74129ce06f8ed3a
apps/bluebottle_utils/models.py
apps/bluebottle_utils/models.py
from django.db import models from django_countries import CountryField class Address(models.Model): """ A postal address. """ address_line1 = models.CharField(max_length=100, blank=True) address_line2 = models.CharField(max_length=100, blank=True) city = models.CharField(max_length=100, blank=True) state = models.CharField(max_length=100, blank=True) country = CountryField() zip_code = models.CharField(max_length=20, blank=True) class Meta: abstract = True
from django.db import models from django_countries import CountryField class Address(models.Model): """ A postal address. """ address_line1 = models.CharField(max_length=100, blank=True) address_line2 = models.CharField(max_length=100, blank=True) city = models.CharField(max_length=100, blank=True) state = models.CharField(max_length=100, blank=True) country = CountryField() zip_code = models.CharField(max_length=20, blank=True) def __unicode__(self): return self.address_line1[:80] class Meta: abstract = True
Add a __unicode__ method to the Address model in utils.
Add a __unicode__ method to the Address model in utils.
Python
bsd-3-clause
onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site
a9cebe11642b41a8c0b277e09bf273b52dbb63f9
apps/careeropportunity/views.py
apps/careeropportunity/views.py
# -*- coding: utf-8 -*- from django.shortcuts import render from django.utils import timezone # API v1 from rest_framework import mixins, viewsets from rest_framework.permissions import AllowAny from apps.careeropportunity.models import CareerOpportunity from apps.careeropportunity.serializers import CareerSerializer def index(request, id=None): return render(request, 'careeropportunity/index.html') class CareerViewSet(viewsets.GenericViewSet, mixins.RetrieveModelMixin, mixins.ListModelMixin): """ Viewset for Career serializer """ queryset = CareerOpportunity.objects.filter( start__lte=timezone.now(), end__gte=timezone.now() ).order_by('-featured', '-start') serializer_class = CareerSerializer permission_classes = (AllowAny,)
# -*- coding: utf-8 -*- from django.shortcuts import render from django.utils import timezone # API v1 from rest_framework import mixins, viewsets from rest_framework.permissions import AllowAny from rest_framework.pagination import PageNumberPagination from apps.careeropportunity.models import CareerOpportunity from apps.careeropportunity.serializers import CareerSerializer def index(request, id=None): return render(request, 'careeropportunity/index.html') class HundredItemsPaginator(PageNumberPagination): page_size = 100 class CareerViewSet(viewsets.GenericViewSet, mixins.RetrieveModelMixin, mixins.ListModelMixin): """ Viewset for Career serializer """ queryset = CareerOpportunity.objects.filter( start__lte=timezone.now(), end__gte=timezone.now() ).order_by('-featured', '-start') serializer_class = CareerSerializer permission_classes = (AllowAny,) pagination_class = HundredItemsPaginator
Increase pagination size for careeropportunity api
Increase pagination size for careeropportunity api
Python
mit
dotKom/onlineweb4,dotKom/onlineweb4,dotKom/onlineweb4,dotKom/onlineweb4
453b6a8697b066174802257156ac364aed2c650a
emission/storage/timeseries/aggregate_timeseries.py
emission/storage/timeseries/aggregate_timeseries.py
import logging import pandas as pd import pymongo import emission.core.get_database as edb import emission.storage.timeseries.builtin_timeseries as bits class AggregateTimeSeries(bits.BuiltinTimeSeries): def __init__(self): super(AggregateTimeSeries, self).__init__(None) self.user_query = {}
import logging import pandas as pd import pymongo import emission.core.get_database as edb import emission.storage.timeseries.builtin_timeseries as bits class AggregateTimeSeries(bits.BuiltinTimeSeries): def __init__(self): super(AggregateTimeSeries, self).__init__(None) self.user_query = {} def _get_sort_key(self, time_query = None): return None
Implement a sort key method for the aggregate timeseries
Implement a sort key method for the aggregate timeseries This should return null because we want to mix up the identifying information from the timeseries and sorting will re-impose some order. Also sorting takes too much time!
Python
bsd-3-clause
shankari/e-mission-server,yw374cornell/e-mission-server,e-mission/e-mission-server,sunil07t/e-mission-server,e-mission/e-mission-server,sunil07t/e-mission-server,shankari/e-mission-server,shankari/e-mission-server,e-mission/e-mission-server,yw374cornell/e-mission-server,sunil07t/e-mission-server,yw374cornell/e-mission-server,shankari/e-mission-server,sunil07t/e-mission-server,e-mission/e-mission-server,yw374cornell/e-mission-server
6577b521ac8fd0f1c9007f819dc0c7ee27ef4955
numba/typesystem/tests/test_type_properties.py
numba/typesystem/tests/test_type_properties.py
from numba.typesystem import * assert int_.is_int assert int_.is_numeric assert long_.is_int assert long_.is_numeric assert not long_.is_long assert float_.is_float assert float_.is_numeric assert double.is_float assert double.is_numeric assert not double.is_double assert object_.is_object assert list_.is_list assert list_.is_object assert list_type(int_, 2).is_list assert list_type(int_, 2).is_object assert function(void, [double]).is_function
from numba.typesystem import * assert int_.is_int assert int_.is_numeric assert long_.is_int assert long_.is_numeric assert not long_.is_long assert float_.is_float assert float_.is_numeric assert double.is_float assert double.is_numeric assert not double.is_double assert object_.is_object assert list_(int_, 2).is_list assert list_(int_, 2).is_object assert function(void, [double]).is_function
Update test for rename of list type
Update test for rename of list type
Python
bsd-2-clause
gdementen/numba,GaZ3ll3/numba,stuartarchibald/numba,pitrou/numba,jriehl/numba,stefanseefeld/numba,ssarangi/numba,sklam/numba,IntelLabs/numba,gdementen/numba,jriehl/numba,stuartarchibald/numba,GaZ3ll3/numba,GaZ3ll3/numba,seibert/numba,numba/numba,pombredanne/numba,jriehl/numba,pitrou/numba,cpcloud/numba,gmarkall/numba,stefanseefeld/numba,pitrou/numba,gmarkall/numba,pitrou/numba,sklam/numba,pombredanne/numba,ssarangi/numba,jriehl/numba,gdementen/numba,pombredanne/numba,jriehl/numba,sklam/numba,cpcloud/numba,sklam/numba,numba/numba,gmarkall/numba,gdementen/numba,numba/numba,numba/numba,stonebig/numba,GaZ3ll3/numba,cpcloud/numba,IntelLabs/numba,GaZ3ll3/numba,ssarangi/numba,seibert/numba,gdementen/numba,sklam/numba,seibert/numba,pombredanne/numba,pitrou/numba,seibert/numba,stuartarchibald/numba,stonebig/numba,gmarkall/numba,seibert/numba,ssarangi/numba,stuartarchibald/numba,cpcloud/numba,cpcloud/numba,stefanseefeld/numba,stuartarchibald/numba,gmarkall/numba,IntelLabs/numba,stefanseefeld/numba,stonebig/numba,stonebig/numba,numba/numba,pombredanne/numba,stefanseefeld/numba,ssarangi/numba,stonebig/numba,IntelLabs/numba,IntelLabs/numba
3d435421fd3680d4b5e84a4ca69e4d294c3e01e0
example/__init__.py
example/__init__.py
from pupa.scrape import Jurisdiction from .people import PersonScraper class Example(Jurisdiction): jurisdiction_id = 'ocd-jurisdiction/country:us/state:ex/place:example' name = 'Example Legislature' url = 'http://example.com' terms = [{ 'name': '2013-2014', 'sessions': ['2013'], 'start_year': 2013, 'end_year': 2014 }] provides = ['people'], parties = [ {'name': 'Independent' }, {'name': 'Green' }, {'name': 'Bull-Moose'} ] session_details = { '2013': {'_scraped_name': '2013'} } def get_scraper(self, term, session, scraper_type): if scraper_type == 'people': return PersonScraper def scrape_session_list(self): return ['2013']
from pupa.scrape import Jurisdiction from .people import PersonScraper class Example(Jurisdiction): jurisdiction_id = 'ocd-jurisdiction/country:us/state:ex/place:example' name = 'Example Legislature' url = 'http://example.com' terms = [{ 'name': '2013-2014', 'sessions': ['2013'], 'start_year': 2013, 'end_year': 2014 }] provides = ['people'] parties = [ {'name': 'Independent' }, {'name': 'Green' }, {'name': 'Bull-Moose'} ] session_details = { '2013': {'_scraped_name': '2013'} } def get_scraper(self, term, session, scraper_type): if scraper_type == 'people': return PersonScraper def scrape_session_list(self): return ['2013']
Remove trailing comma from example
Remove trailing comma from example
Python
bsd-3-clause
datamade/pupa,rshorey/pupa,influence-usa/pupa,mileswwatkins/pupa,influence-usa/pupa,opencivicdata/pupa,rshorey/pupa,opencivicdata/pupa,mileswwatkins/pupa,datamade/pupa
c9f5bee80dfb0523050afc6cb72eea096a2e3b95
ir/util.py
ir/util.py
import os import stat import time def updateModificationTime(path): accessTime = os.stat(path)[stat.ST_ATIME] modificationTime = time.time() os.utime(path, (accessTime, modificationTime))
import os import stat import time from PyQt4.QtCore import SIGNAL from PyQt4.QtGui import QAction, QKeySequence, QMenu, QShortcut from aqt import mw def addMenu(name): if not hasattr(mw, 'customMenus'): mw.customMenus = {} if name not in mw.customMenus: menu = QMenu('&' + name, mw) mw.customMenus[name] = menu mw.form.menubar.insertMenu(mw.form.menuTools.menuAction(), mw.customMenus[name]) def addMenuItem(menuName, text, function, keys=None): action = QAction(text, mw) if keys: action.setShortcut(QKeySequence(keys)) mw.connect(action, SIGNAL('triggered()'), function) if menuName == 'File': mw.form.menuCol.addAction(action) elif menuName == 'Edit': mw.form.menuEdit.addAction(action) elif menuName == 'Tools': mw.form.menuTools.addAction(action) elif menuName == 'Help': mw.form.menuHelp.addAction(action) else: addMenu(menuName) mw.customMenus[menuName].addAction(action) def addShortcut(function, keys): shortcut = QShortcut(QKeySequence(keys), mw) mw.connect(shortcut, SIGNAL('activated()'), function) def updateModificationTime(path): accessTime = os.stat(path)[stat.ST_ATIME] modificationTime = time.time() os.utime(path, (accessTime, modificationTime))
Add helper functions for adding menu items & shortcuts
Add helper functions for adding menu items & shortcuts
Python
isc
luoliyan/incremental-reading-for-anki,luoliyan/incremental-reading-for-anki
eff7f0bf52507013859788eec29eea819af6ce63
grow/preprocessors/routes_cache.py
grow/preprocessors/routes_cache.py
from . import base class RoutesCachePreprocessor(base.BasePreprocessor): KIND = '_routes_cache' def __init__(self, pod): self.pod = pod def run(self, build=True): self.pod.routes.reset_cache(rebuild=True) def list_watched_dirs(self): return ['/content/', '/static/']
import datetime from . import base class RoutesCachePreprocessor(base.BasePreprocessor): KIND = '_routes_cache' LIMIT = datetime.timedelta(seconds=1) def __init__(self, pod): self.pod = pod self._last_run = None def run(self, build=True): # Avoid rebuilding routes cache more than once per second. now = datetime.datetime.now() limit = RoutesCachePreprocessor.LIMIT if not self._last_run or (now - self._last_run) > limit: self.pod.routes.reset_cache(rebuild=True, inject=False) self._last_run = now def list_watched_dirs(self): return ['/content/']
Implement ratelimit on routes cache.
Implement ratelimit on routes cache.
Python
mit
denmojo/pygrow,grow/pygrow,grow/grow,denmojo/pygrow,denmojo/pygrow,grow/pygrow,grow/grow,grow/grow,grow/grow,grow/pygrow,denmojo/pygrow
e2479e3f8748fbfa34c89ecda7d2f3e72e94fa57
pydata/urls.py
pydata/urls.py
from django.conf.urls import url, include from . import views urlpatterns = [ url(r'^events/import/?$', views.ConferenceImport.as_view(), name='event_import'), url(r'^persons/import/?$', views.PersonImport.as_view(), name='person_import'), url(r'^tasks/import/?$', views.TaskImport.as_view(), name='task_import'), url(r'^sponsorships/import/?$', views.SponsorshipImport.as_view(), name='sponsorship_import'), url(r'^bulk-import/?', include([ url(r'^$', views.BulkImportEventSelect.as_view(), name='bulk_import_select'), url(r'^(?P<slug>[\w-]+)/person/?$', views.PersonBulkImport.as_view(), name='bulk_import_person'), url(r'^(?P<slug>[\w-]+)/task/?$', views.TaskBulkImport.as_view(), name='bulk_import_task'), url(r'^(?P<slug>[\w-]+)/sponsorship/?$', views.SponsorshipBulkImport.as_view(), name='bulk_import_sponsorship'), ])), ]
from django.conf.urls import url, include from . import views urlpatterns = [ url(r'^events/import/?$', views.ConferenceImport.as_view(), name='event_import'), url(r'^persons/import/?$', views.PersonImport.as_view(), name='person_import'), url(r'^tasks/import/?$', views.TaskImport.as_view(), name='task_import'), url(r'^sponsorships/import/?$', views.SponsorshipImport.as_view(), name='sponsorship_import'), url(r'^bulk-import/', include([ url(r'^$', views.BulkImportEventSelect.as_view(), name='bulk_import_select'), url(r'^(?P<slug>[\w-]+)/person/?$', views.PersonBulkImport.as_view(), name='bulk_import_person'), url(r'^(?P<slug>[\w-]+)/task/?$', views.TaskBulkImport.as_view(), name='bulk_import_task'), url(r'^(?P<slug>[\w-]+)/sponsorship/?$', views.SponsorshipBulkImport.as_view(), name='bulk_import_sponsorship'), ])), ]
Fix malformed URLs in bulk import
Fix malformed URLs in bulk import
Python
mit
swcarpentry/amy,vahtras/amy,pbanaszkiewicz/amy,pbanaszkiewicz/amy,swcarpentry/amy,vahtras/amy,swcarpentry/amy,vahtras/amy,pbanaszkiewicz/amy
38d16da934503a964ae5e16aafd65c0642970472
pysocialids.py
pysocialids.py
# # define overloading of ids for each social site # to be customized for your accounts # # # flickr # def flickr_api_secret(): return "" def flickr_api_key(): return "" def flickr_user_id(): return "" # # twitter # def twitter_consumer_key(): return "" def twitter_consumer_secret(): return "" def twitter_access_token(): return "" def twitter_access_token_secret(): return "" def twitter_screenname(): return "" # # tumblr # def tumblr_consumer_key(): return "" def tumblr_secret_key(): return "" def tumblr_access_token(): return "" def tumblr_access_token_secret(): return "" def tumblr_userid(): return ""
# # define overloading of ids for each social site # to be customized for your accounts # # # flickr # def flickr_api_secret(): return "" def flickr_api_key(): return "" def flickr_user_id(): return "" # # twitter # def twitter_consumer_key(): return "" def twitter_consumer_secret(): return "" def twitter_access_token(): return "" def twitter_access_token_secret(): return "" def twitter_screenname(): return "" # # tumblr # def tumblr_consumer_key(): return "" def tumblr_secret_key(): return "" def tumblr_access_token(): return "" def tumblr_access_token_secret(): return "" def tumblr_userid(): return "" # # faa # def faa_username(): return "" def faa_password(): return "" def faa_profile(): return "" # # wordpress # def wordpress_blogid(): return ""
Complete social ids for wordpress and faa
Complete social ids for wordpress and faa
Python
mit
JulienLeonard/socialstats
4e1b76db16658a01d3f8cf99f8b5d58e63b5e343
project_generator/builders/builder.py
project_generator/builders/builder.py
# Copyright 2014 0xc0170 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import logging class Builder: """ Template to be subclassed """ def build_project(self, project, project_path): raise NotImplementedError def build(self, projects_path, project_list, env_settings, root): # Loop through each of the projects and build them. logging.debug("Building projects.") for i, project_name in enumerate(project_list): logging.debug("Building project %i of %i: %s" % (i + 1, len(project_list), project_name)) self.build_project(project_name, projects_path[i], env_settings, root)
# Copyright 2014 0xc0170 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import logging class Builder: """ Template to be subclassed """ def build_project(self, project, project_path): raise NotImplementedError def build(self, projects_path, project_list, env_settings, root): # Loop through each of the projects and build them. logging.debug("Building projects.") for i, project_name in enumerate(project_list): logging.debug("Building project %i of %i: %s" % (i + 1, len(project_list), project_name)) self.build_project(project_name[0], projects_path[i], env_settings, root)
Fix - the project_path is the first member of tuple
Fix - the project_path is the first member of tuple
Python
apache-2.0
project-generator/project_generator,sarahmarshy/project_generator,0xc0170/project_generator,hwfwgrp/project_generator,ohagendorf/project_generator,sg-/project_generator,sg-/project_generator,molejar/project_generator
b555137fa7c7e84353daa1d12e29ba636bb9fd77
post_office/test_settings.py
post_office/test_settings.py
# -*- coding: utf-8 -*- INSTALLED_APPS = ['post_office'] DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', }, } CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.memcached.PyLibMCCache', 'LOCATION': '127.0.0.1:11211', 'TIMEOUT': 36000, 'KEY_PREFIX': 'stamps:', }, 'post_office': { 'BACKEND': 'django.core.cache.backends.memcached.PyLibMCCache', 'LOCATION': '127.0.0.1:11211', } }
# -*- coding: utf-8 -*- INSTALLED_APPS = ['post_office'] DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': ':memory:', }, } CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', 'TIMEOUT': 36000, 'KEY_PREFIX': 'post-office', }, 'post_office': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', 'TIMEOUT': 36000, 'KEY_PREFIX': 'post-office', } }
Use locmem cache for tests.
Use locmem cache for tests.
Python
mit
JostCrow/django-post_office,ekohl/django-post_office,CasherWest/django-post_office,ui/django-post_office,carrerasrodrigo/django-post_office,CasherWest/django-post_office,fapelhanz/django-post_office,jrief/django-post_office,RafRaf/django-post_office,LeGast00n/django-post_office,yprez/django-post_office,ui/django-post_office
f712a03fea451b846e6f8b3e33a685dc5794f923
framework/transactions/commands.py
framework/transactions/commands.py
# -*- coding: utf-8 -*- import logging from framework.mongo import database as proxy_database from website import settings as osfsettings logger = logging.getLogger(__name__) def begin(database=None): database = database or proxy_database database.command('beginTransaction') def rollback(database=None): database = database or proxy_database database.command('rollbackTransaction') def commit(database=None): database = database or proxy_database database.command('commitTransaction') def show_live(database=None): database = database or proxy_database return database.command('showLiveTransactions') def disconnect(database=None): database = database or proxy_database try: database.connection.close() except AttributeError: if not osfsettings.DEBUG_MODE: logger.error('MongoDB client not attached to request.')
# -*- coding: utf-8 -*- import contextlib import logging from framework.mongo import database as proxy_database from website import settings as osfsettings logger = logging.getLogger(__name__) @contextlib.contextmanager def handle_missing_client(): try: yield except AttributeError: if not osfsettings.DEBUG_MODE: logger.error('MongoDB client not attached to request.') def begin(database=None): database = database or proxy_database with handle_missing_client(): database.command('beginTransaction') def rollback(database=None): database = database or proxy_database with handle_missing_client(): database.command('rollbackTransaction') def commit(database=None): database = database or proxy_database with handle_missing_client(): database.command('commitTransaction') def show_live(database=None): database = database or proxy_database with handle_missing_client(): return database.command('showLiveTransactions') def disconnect(database=None): database = database or proxy_database with handle_missing_client(): database.connection.close()
Handle when database is None
Handle when database is None
Python
apache-2.0
baylee-d/osf.io,baylee-d/osf.io,mluo613/osf.io,hmoco/osf.io,caneruguz/osf.io,Nesiehr/osf.io,hmoco/osf.io,alexschiller/osf.io,chrisseto/osf.io,leb2dg/osf.io,saradbowman/osf.io,mattclark/osf.io,cslzchen/osf.io,HalcyonChimera/osf.io,felliott/osf.io,aaxelb/osf.io,pattisdr/osf.io,TomBaxter/osf.io,alexschiller/osf.io,HalcyonChimera/osf.io,CenterForOpenScience/osf.io,mluo613/osf.io,cwisecarver/osf.io,caseyrollins/osf.io,felliott/osf.io,chrisseto/osf.io,chennan47/osf.io,aaxelb/osf.io,Nesiehr/osf.io,sloria/osf.io,binoculars/osf.io,laurenrevere/osf.io,mfraezz/osf.io,acshi/osf.io,acshi/osf.io,monikagrabowska/osf.io,cslzchen/osf.io,CenterForOpenScience/osf.io,aaxelb/osf.io,erinspace/osf.io,alexschiller/osf.io,chrisseto/osf.io,saradbowman/osf.io,icereval/osf.io,HalcyonChimera/osf.io,erinspace/osf.io,aaxelb/osf.io,mluo613/osf.io,cwisecarver/osf.io,felliott/osf.io,alexschiller/osf.io,mfraezz/osf.io,CenterForOpenScience/osf.io,crcresearch/osf.io,brianjgeiger/osf.io,icereval/osf.io,adlius/osf.io,mfraezz/osf.io,pattisdr/osf.io,crcresearch/osf.io,mattclark/osf.io,acshi/osf.io,laurenrevere/osf.io,monikagrabowska/osf.io,adlius/osf.io,Nesiehr/osf.io,leb2dg/osf.io,chrisseto/osf.io,Johnetordoff/osf.io,caneruguz/osf.io,brianjgeiger/osf.io,felliott/osf.io,cwisecarver/osf.io,chennan47/osf.io,TomBaxter/osf.io,caseyrollins/osf.io,cslzchen/osf.io,mluo613/osf.io,acshi/osf.io,binoculars/osf.io,monikagrabowska/osf.io,mattclark/osf.io,caseyrollins/osf.io,alexschiller/osf.io,adlius/osf.io,mfraezz/osf.io,Johnetordoff/osf.io,binoculars/osf.io,laurenrevere/osf.io,chennan47/osf.io,hmoco/osf.io,monikagrabowska/osf.io,sloria/osf.io,HalcyonChimera/osf.io,erinspace/osf.io,cwisecarver/osf.io,adlius/osf.io,cslzchen/osf.io,hmoco/osf.io,crcresearch/osf.io,CenterForOpenScience/osf.io,sloria/osf.io,Johnetordoff/osf.io,acshi/osf.io,monikagrabowska/osf.io,leb2dg/osf.io,mluo613/osf.io,caneruguz/osf.io,Nesiehr/osf.io,pattisdr/osf.io,icereval/osf.io,TomBaxter/osf.io,Johnetordoff/osf.io,caneruguz/osf.io,brianjgeiger/osf.io,leb2dg/osf.io,baylee-d/osf.io,brianjgeiger/osf.io
96856fc267ec99de6e83a997346c853dbdb1cfd5
reddit_adzerk/lib/validator.py
reddit_adzerk/lib/validator.py
import re from r2.lib.errors import errors from r2.lib.validator import ( VMultiByPath, Validator, ) from r2.models import ( NotFound, Subreddit, ) is_multi_rx = re.compile(r"\A/?(user|r)/[^\/]+/m/(?P<name>.*?)/?\Z") class VSite(Validator): def __init__(self, param, required=True, *args, **kwargs): super(VSite, self).__init__(param, *args, **kwargs) self.required = required def run(self, path): if not self.required and not path: return if is_multi_rx.match(path): return VMultiByPath(self.param, kinds=("m")).run(path) else: try: return Subreddit._by_name(path) except NotFound: self.set_error(errors.INVALID_SITE_PATH)
import re from r2.lib.errors import errors from r2.lib.validator import ( VMultiByPath, Validator, ) from r2.models import ( NotFound, Subreddit, MultiReddit, ) is_multi_rx = re.compile(r"\A/?(user|r)/[^\/]+/m/(?P<name>.*?)/?\Z") is_adhoc_multi_rx = re.compile(r"\A\/r\/((?:[0-z]+\+)+(?:[0-z])+)\Z") class VSite(Validator): def __init__(self, param, required=True, *args, **kwargs): super(VSite, self).__init__(param, *args, **kwargs) self.required = required def run(self, path): if not self.required and not path: return adhoc_multi_rx = is_adhoc_multi_rx.match(path) if is_multi_rx.match(path): return VMultiByPath(self.param, kinds=("m")).run(path) elif adhoc_multi_rx: sr_strings = adhoc_multi_rx.groups()[0].split("+") srs = Subreddit._by_name(sr_strings, stale=True).values() return MultiReddit(path, srs) else: try: return Subreddit._by_name(path) except NotFound: self.set_error(errors.INVALID_SITE_PATH)
Fix adhoc multisubreddit promo_request network request
Fix adhoc multisubreddit promo_request network request
Python
bsd-3-clause
madbook/reddit-plugin-adzerk,madbook/reddit-plugin-adzerk,madbook/reddit-plugin-adzerk
06d271da251d3c85266629197d6b31b2ff617623
sympy/matrices/expressions/tests/test_hadamard.py
sympy/matrices/expressions/tests/test_hadamard.py
from sympy.matrices.expressions import MatrixSymbol, HadamardProduct from sympy.matrices import ShapeError from sympy import symbols from sympy.utilities.pytest import raises def test_HadamardProduct(): n, m, k = symbols('n,m,k') Z = MatrixSymbol('Z', n, n) A = MatrixSymbol('A', n, m) B = MatrixSymbol('B', n, m) C = MatrixSymbol('C', m, k) assert HadamardProduct(A, B, A).shape == A.shape raises(ShapeError, lambda: HadamardProduct(A, B.T)) raises(TypeError, lambda: A + 1) raises(TypeError, lambda: 5 + A) raises(TypeError, lambda: 5 - A) assert HadamardProduct(A, 2*B, -A)[1, 1] == -2 * A[1, 1]**2 * B[1, 1] mix = HadamardProduct(Z*A, B)*C assert mix.shape == (n, k)
from sympy.matrices.expressions import MatrixSymbol, HadamardProduct from sympy.matrices import ShapeError from sympy import symbols from sympy.utilities.pytest import raises def test_HadamardProduct(): n, m, k = symbols('n,m,k') Z = MatrixSymbol('Z', n, n) A = MatrixSymbol('A', n, m) B = MatrixSymbol('B', n, m) C = MatrixSymbol('C', m, k) assert HadamardProduct(A, B, A).shape == A.shape raises(ShapeError, lambda: HadamardProduct(A, B.T)) raises(TypeError, lambda: A + 1) raises(TypeError, lambda: 5 + A) raises(TypeError, lambda: 5 - A) assert HadamardProduct(A, 2*B, -A)[1, 1] == -2 * A[1, 1]**2 * B[1, 1] mix = HadamardProduct(Z*A, B)*C assert mix.shape == (n, k) def test_mixed_indexing(): X = MatrixSymbol('X', 2, 2) Y = MatrixSymbol('Y', 2, 2) Z = MatrixSymbol('Z', 2, 2) assert (X*HadamardProduct(Y, Z))[0, 0] == \ X[0, 0]*Y[0, 0]*Z[0, 0] + X[0, 1]*Y[1, 0]*Z[1, 0]
Add index test for Hadamard+MatMul mix
Add index test for Hadamard+MatMul mix
Python
bsd-3-clause
kaichogami/sympy,Sumith1896/sympy,sahmed95/sympy,MridulS/sympy,Gadal/sympy,yashsharan/sympy,Shaswat27/sympy,chaffra/sympy,beni55/sympy,drufat/sympy,MechCoder/sympy,souravsingh/sympy,kaushik94/sympy,abhiii5459/sympy,liangjiaxing/sympy,iamutkarshtiwari/sympy,Gadal/sympy,grevutiu-gabriel/sympy,vipulroxx/sympy,moble/sympy,kevalds51/sympy,atsao72/sympy,atreyv/sympy,lindsayad/sympy,beni55/sympy,asm666/sympy,jbbskinny/sympy,lindsayad/sympy,Designist/sympy,wanglongqi/sympy,lidavidm/sympy,madan96/sympy,meghana1995/sympy,wanglongqi/sympy,kumarkrishna/sympy,cswiercz/sympy,drufat/sympy,farhaanbukhsh/sympy,amitjamadagni/sympy,cccfran/sympy,hrashk/sympy,yashsharan/sympy,cswiercz/sympy,AkademieOlympia/sympy,kmacinnis/sympy,saurabhjn76/sympy,asm666/sympy,hargup/sympy,ahhda/sympy,mafiya69/sympy,liangjiaxing/sympy,VaibhavAgarwalVA/sympy,mafiya69/sympy,moble/sympy,madan96/sympy,emon10005/sympy,debugger22/sympy,Curious72/sympy,garvitr/sympy,ChristinaZografou/sympy,saurabhjn76/sympy,pandeyadarsh/sympy,sahilshekhawat/sympy,shipci/sympy,yukoba/sympy,jamesblunt/sympy,kaushik94/sympy,garvitr/sympy,MechCoder/sympy,mafiya69/sympy,abloomston/sympy,Vishluck/sympy,kmacinnis/sympy,emon10005/sympy,Shaswat27/sympy,madan96/sympy,aktech/sympy,maniteja123/sympy,cccfran/sympy,VaibhavAgarwalVA/sympy,meghana1995/sympy,souravsingh/sympy,cswiercz/sympy,emon10005/sympy,debugger22/sympy,Arafatk/sympy,hrashk/sympy,saurabhjn76/sympy,lindsayad/sympy,Arafatk/sympy,shikil/sympy,yukoba/sympy,farhaanbukhsh/sympy,skidzo/sympy,abloomston/sympy,postvakje/sympy,ahhda/sympy,skidzo/sympy,dqnykamp/sympy,sahmed95/sympy,jerli/sympy,ga7g08/sympy,oliverlee/sympy,sahilshekhawat/sympy,toolforger/sympy,kevalds51/sympy,mcdaniel67/sympy,wanglongqi/sympy,MridulS/sympy,souravsingh/sympy,rahuldan/sympy,Davidjohnwilson/sympy,skidzo/sympy,kaichogami/sympy,rahuldan/sympy,drufat/sympy,Vishluck/sympy,pandeyadarsh/sympy,moble/sympy,jamesblunt/sympy,AunShiLord/sympy,Davidjohnwilson/sympy,MechCoder/sympy,grevutiu-gabriel/sympy,postvakje/sympy,wyom/sympy,jamesblunt/sympy,vipulroxx/sympy,Davidjohnwilson/sympy,Shaswat27/sympy,atsao72/sympy,beni55/sympy,yashsharan/sympy,aktech/sympy,pbrady/sympy,Titan-C/sympy,maniteja123/sympy,chaffra/sympy,jaimahajan1997/sympy,garvitr/sympy,dqnykamp/sympy,amitjamadagni/sympy,lidavidm/sympy,diofant/diofant,Gadal/sympy,ga7g08/sympy,toolforger/sympy,yukoba/sympy,bukzor/sympy,kevalds51/sympy,vipulroxx/sympy,pbrady/sympy,sampadsaha5/sympy,hargup/sympy,bukzor/sympy,jbbskinny/sympy,oliverlee/sympy,abloomston/sympy,AunShiLord/sympy,AkademieOlympia/sympy,sahmed95/sympy,mcdaniel67/sympy,iamutkarshtiwari/sympy,sunny94/temp,hargup/sympy,grevutiu-gabriel/sympy,mcdaniel67/sympy,Sumith1896/sympy,chaffra/sympy,shikil/sympy,meghana1995/sympy,rahuldan/sympy,kumarkrishna/sympy,cccfran/sympy,Curious72/sympy,lidavidm/sympy,toolforger/sympy,kaushik94/sympy,atreyv/sympy,Sumith1896/sympy,atsao72/sympy,postvakje/sympy,dqnykamp/sympy,hrashk/sympy,farhaanbukhsh/sympy,AkademieOlympia/sympy,Mitchkoens/sympy,MridulS/sympy,skirpichev/omg,pandeyadarsh/sympy,Titan-C/sympy,atreyv/sympy,VaibhavAgarwalVA/sympy,Curious72/sympy,bukzor/sympy,sampadsaha5/sympy,Vishluck/sympy,shipci/sympy,kaichogami/sympy,abhiii5459/sympy,jbbskinny/sympy,debugger22/sympy,asm666/sympy,sahilshekhawat/sympy,jaimahajan1997/sympy,shipci/sympy,wyom/sympy,jerli/sympy,liangjiaxing/sympy,iamutkarshtiwari/sympy,abhiii5459/sympy,sunny94/temp,Mitchkoens/sympy,ChristinaZografou/sympy,Designist/sympy,sampadsaha5/sympy,shikil/sympy,sunny94/temp,kmacinnis/sympy,ChristinaZografou/sympy,jerli/sympy,wyom/sympy,ahhda/sympy,oliverlee/sympy,ga7g08/sympy,aktech/sympy,Mitchkoens/sympy,Titan-C/sympy,maniteja123/sympy,jaimahajan1997/sympy,pbrady/sympy,Arafatk/sympy,AunShiLord/sympy,Designist/sympy,kumarkrishna/sympy
24d2b9620af40395c66bd8d93c443fddfe74b5cf
hs_core/tests/api/rest/__init__.py
hs_core/tests/api/rest/__init__.py
from test_create_resource import * from test_resource_file import * from test_resource_list import * from test_resource_meta import * from test_resource_types import * from test_set_access_rules import * from test_user_info import *
# Do not import tests here as this will cause # some tests to be discovered and run twice
Remove REST test imports to avoid some tests being run twice
Remove REST test imports to avoid some tests being run twice
Python
bsd-3-clause
ResearchSoftwareInstitute/MyHPOM,hydroshare/hydroshare,ResearchSoftwareInstitute/MyHPOM,hydroshare/hydroshare,FescueFungiShare/hydroshare,ResearchSoftwareInstitute/MyHPOM,RENCI/xDCIShare,ResearchSoftwareInstitute/MyHPOM,hydroshare/hydroshare,FescueFungiShare/hydroshare,ResearchSoftwareInstitute/MyHPOM,RENCI/xDCIShare,hydroshare/hydroshare,RENCI/xDCIShare,RENCI/xDCIShare,FescueFungiShare/hydroshare,hydroshare/hydroshare,RENCI/xDCIShare,FescueFungiShare/hydroshare,FescueFungiShare/hydroshare
d328129a2f2909c1b8769f1edb94746c4a88dd28
test_project/test_models.py
test_project/test_models.py
from django.db import models class TestUser0(models.Model): username = models.CharField() test_field = models.CharField('My title') class Meta: app_label = 'controlcenter' def foo(self): return 'original foo value' foo.short_description = 'original foo label' def bar(self): return 'original bar value' bar.short_description = 'original bar label' def baz(self): pass baz.short_description = '' def egg(self): return 'original egg value' class TestUser1(models.Model): primary = models.AutoField(primary_key=True) username = models.CharField() class Meta: app_label = 'controlcenter'
from django.db import models class TestUser0(models.Model): username = models.CharField(max_length=255) test_field = models.CharField('My title', max_length=255) class Meta: app_label = 'controlcenter' def foo(self): return 'original foo value' foo.short_description = 'original foo label' def bar(self): return 'original bar value' bar.short_description = 'original bar label' def baz(self): pass baz.short_description = '' def egg(self): return 'original egg value' class TestUser1(models.Model): primary = models.AutoField(primary_key=True) username = models.CharField(max_length=255) class Meta: app_label = 'controlcenter'
Add `max_length` to char fields
Add `max_length` to char fields
Python
bsd-3-clause
byashimov/django-controlcenter,byashimov/django-controlcenter,byashimov/django-controlcenter
36e6e2bedcc37a48097ccf0abd544ca095748412
build/strip-po-charset.py
build/strip-po-charset.py
# # strip-po-charset.py # import sys, string def strip_po_charset(inp, out): out.write(string.replace(inp.read(), "\"Content-Type: text/plain; charset=UTF-8\\n\"\n","")) def main(): if len(sys.argv) != 3: print "Usage: %s <input (po) file> <output (spo) file>" % sys.argv[0] print print "Unsupported number of arguments; 2 required." sys.exit(1) strip_po_charset(open(sys.argv[1],'r'), open(sys.argv[2],'w')) if __name__ == '__main__': main()
# # strip-po-charset.py # import sys, string def strip_po_charset(inp, out): out.write(string.replace(inp.read(), "\"Content-Type: text/plain; charset=UTF-8\\n\"\n","")) def main(): if len(sys.argv) != 3: print "Usage: %s <input (po) file> <output (spo) file>" % sys.argv[0] print print "Unsupported number of arguments; 2 required." sys.exit(1) strip_po_charset(open(sys.argv[1],'r'), open(sys.argv[2],'w')) if __name__ == '__main__': main()
Set svn:eol-style='native' on some text files that were lacking it.
Set svn:eol-style='native' on some text files that were lacking it. git-svn-id: f8a4e5e023278da1e04e203c7fe051e3c4285d88@855475 13f79535-47bb-0310-9956-ffa450edef68
Python
apache-2.0
wbond/subversion,wbond/subversion,wbond/subversion,YueLinHo/Subversion,YueLinHo/Subversion,YueLinHo/Subversion,wbond/subversion,wbond/subversion,wbond/subversion,YueLinHo/Subversion,wbond/subversion,YueLinHo/Subversion,YueLinHo/Subversion,YueLinHo/Subversion,YueLinHo/Subversion
a00f9c56671c028c69638f61d3d4c1fd022c0430
cinspect/tests/test_patching.py
cinspect/tests/test_patching.py
from __future__ import absolute_import, print_function # Standard library import inspect import unittest from cinspect import getfile, getsource class TestHelloModule(unittest.TestCase): def test_patching_inspect_should_work(self): # Given inspect.getsource = getsource inspect.getfile = getfile # When t = getfile(unittest) s = getsource(unittest.main) # Then self.assertGreater(len(t), 0) self.assertGreater(len(s), 0)
from __future__ import absolute_import, print_function # Standard library import inspect import unittest from cinspect import getfile, getsource class TestPatching(unittest.TestCase): def test_patching_inspect_should_work(self): # Given inspect.getsource = getsource inspect.getfile = getfile # When t = getfile(unittest) s = getsource(unittest.main) # Then self.assertGreater(len(t), 0) self.assertGreater(len(s), 0)
Fix copy paste bug in test class name.
Fix copy paste bug in test class name.
Python
bsd-3-clause
punchagan/cinspect,punchagan/cinspect
97e39ec9e03728384ad00a7e011194412521631e
tests/test_containers.py
tests/test_containers.py
try: from http.server import SimpleHTTPRequestHandler except ImportError: from SimpleHTTPServer import SimpleHTTPRequestHandler try: from socketserver import TCPServer except ImportError: from SocketServer import TCPServer import os import threading import unittest import containers PORT = 8080 class TestServer(TCPServer): allow_reuse_address = True handler = SimpleHTTPRequestHandler httpd = TestServer(('', PORT), handler) httpd_thread = threading.Thread(target=httpd.serve_forever) httpd_thread.setDaemon(True) httpd_thread.start() class TestDiscovery(unittest.TestCase): def test_get_etcd(self): containers.simple_discovery('localhost:8080/tests/etc/etcd-v2.0.0-linux-amd64', var='/tmp', secure=False) if __name__ == '__main__': unittest.main()
try: from http.server import SimpleHTTPRequestHandler except ImportError: from SimpleHTTPServer import SimpleHTTPRequestHandler try: from socketserver import TCPServer except ImportError: from SocketServer import TCPServer import os import threading import unittest import glob, os import containers PORT = 8080 class TestServer(TCPServer): allow_reuse_address = True handler = SimpleHTTPRequestHandler httpd = TestServer(('', PORT), handler) httpd_thread = threading.Thread(target=httpd.serve_forever) httpd_thread.setDaemon(True) httpd_thread.start() class TestDiscovery(unittest.TestCase): def tearDown(self): filelist = glob.glob('/tmp/*.aci') for f in filelist: os.remove(f) def test_get_etcd(self): containers.simple_discovery('localhost:8080/tests/etc/etcd-v2.0.0-linux-amd64', var='/tmp', secure=False) if __name__ == '__main__': unittest.main()
Remove aci files after tests have run
Remove aci files after tests have run
Python
mit
kragniz/containers
daf4a6fd35811210c546782a771c6ddef8641f25
opps/images/templatetags/images_tags.py
opps/images/templatetags/images_tags.py
#!/usr/bin/env python # -*- coding: utf-8 -*- from django import template from django.conf import settings from ..generate import image_url as url register = template.Library() @register.simple_tag def image_url(image_url, **kwargs): return url(image_url=image_url, **kwargs) @register.simple_tag def image_obj(image, **kwargs): HALIGN_VALUES = ("left", "center", "right") VALIGN_VALUES = ("top", "middle", "bottom") if image == "": return "" if settings.THUMBOR_ENABLED: new = {} new['flip'] = image.flip new['flop'] = image.flop if image.halign and image.halign in HALIGN_VALUES: new['halign'] = image.halign if image.valign and image.valign in VALIGN_VALUES: new['valign'] = image.valign new['fit_in'] = image.fit_in new['smart'] = image.smart if image.crop_x1 > 0 or image.crop_x2 > 0 or image.crop_y1 > 0 or \ image.crop_y2 > 0: new['crop'] = ((image.crop_x1, image.crop_y1), (image.crop_x2, image.crop_y2)) kwargs = dict(new, **kwargs) return url(image_url=image.archive.url, **kwargs)
#!/usr/bin/env python # -*- coding: utf-8 -*- from django import template from django.conf import settings from ..generate import image_url as url register = template.Library() @register.simple_tag def image_url(image_url, **kwargs): return url(image_url=image_url, **kwargs) @register.simple_tag def image_obj(image, **kwargs): HALIGN_VALUES = ("left", "center", "right") VALIGN_VALUES = ("top", "middle", "bottom") if image == "" or not image: return "" if settings.THUMBOR_ENABLED: new = {} new['flip'] = image.flip new['flop'] = image.flop if image.halign and image.halign in HALIGN_VALUES: new['halign'] = image.halign if image.valign and image.valign in VALIGN_VALUES: new['valign'] = image.valign new['fit_in'] = image.fit_in new['smart'] = image.smart if image.crop_x1 > 0 or image.crop_x2 > 0 or image.crop_y1 > 0 or \ image.crop_y2 > 0: new['crop'] = ((image.crop_x1, image.crop_y1), (image.crop_x2, image.crop_y2)) kwargs = dict(new, **kwargs) return url(image_url=image.archive.url, **kwargs)
Fix image_obj template tag when sending Nonetype image
Fix image_obj template tag when sending Nonetype image
Python
mit
YACOWS/opps,YACOWS/opps,jeanmask/opps,opps/opps,williamroot/opps,jeanmask/opps,opps/opps,YACOWS/opps,jeanmask/opps,YACOWS/opps,williamroot/opps,opps/opps,williamroot/opps,williamroot/opps,opps/opps,jeanmask/opps
bb896ed1723ca01ed18d1110eb51ca1661135db6
rapport/plugins/launchpad.py
rapport/plugins/launchpad.py
# Copyright (c) 2013, Sascha Peilicke <[email protected]> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License version 2 as # published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program (see the file COPYING); if not, write to the # Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA """ Launchpad plugin. """ from launchpadlib.launchpad import Launchpad import rapport.plugin class LaunchpadPlugin(rapport.plugin.Plugin): def __init__(self, *args, **kwargs): super(LaunchpadPlugin, self).__init__(*args, **kwargs) self.lp = Launchpad.login_anonymously(self.login, 'production') def _get_json(url): return json.loads(requests.get(url).text) def collect(self, timeframe): bug_tasks = self.lp.people["saschpe"].searchTasks() #TODO: Try to find some useful info return self._results() rapport.plugin.register("launchpad", LaunchpadPlugin)
# Copyright (c) 2013, Sascha Peilicke <[email protected]> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License version 2 as # published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program (see the file COPYING); if not, write to the # Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA """ Launchpad plugin. """ import warnings warnings.filterwarnings('ignore', 'Module argparse was already imported') # Filter a UserWarning from Jinja2 from launchpadlib.launchpad import Launchpad import rapport.plugin class LaunchpadPlugin(rapport.plugin.Plugin): def __init__(self, *args, **kwargs): super(LaunchpadPlugin, self).__init__(*args, **kwargs) self.lp = Launchpad.login_anonymously(self.login, 'production') def _get_json(url): return json.loads(requests.get(url).text) def collect(self, timeframe): bug_tasks = self.lp.people["saschpe"].searchTasks() #TODO: Try to find some useful info return self._results() rapport.plugin.register("launchpad", LaunchpadPlugin)
Add filter for "argparse" warning
Add filter for "argparse" warning
Python
apache-2.0
saschpe/rapport
53d25950eb1ff21bb4488b60e802cb243735681f
cmsplugin_zinnia/placeholder.py
cmsplugin_zinnia/placeholder.py
"""Placeholder model for Zinnia""" import inspect from cms.models.fields import PlaceholderField from cms.plugin_rendering import render_placeholder from zinnia.models.entry import EntryAbstractClass class EntryPlaceholder(EntryAbstractClass): """Entry with a Placeholder to edit content""" content_placeholder = PlaceholderField('content') def acquire_context(self): """ Inspect the stack to acquire the current context used, to render the placeholder. I'm really sorry for this, but if you have a better way, you are welcome ! """ frame = None try: for f in inspect.stack()[1:]: frame = f[0] args, varargs, keywords, alocals = inspect.getargvalues(frame) if 'context' in args: return alocals['context'] finally: del frame @property def html_content(self): """ Render the content_placeholder field dynamicly. https://github.com/Fantomas42/cmsplugin-zinnia/issues/3 """ context = self.acquire_context() return render_placeholder(self.content_placeholder, context) class Meta(EntryAbstractClass.Meta): """EntryPlaceholder's Meta""" abstract = True
"""Placeholder model for Zinnia""" import inspect from django.template.context import Context, RequestContext from cms.models.fields import PlaceholderField from cms.plugin_rendering import render_placeholder from zinnia.models.entry import EntryAbstractClass class EntryPlaceholder(EntryAbstractClass): """Entry with a Placeholder to edit content""" content_placeholder = PlaceholderField('content') def acquire_context(self): """ Inspect the stack to acquire the current context used, to render the placeholder. I'm really sorry for this, but if you have a better way, you are welcome ! """ frame = None request = None try: for f in inspect.stack()[1:]: frame = f[0] args, varargs, keywords, alocals = inspect.getargvalues(frame) if not request and 'request' in args: request = alocals['request'] if 'context' in args: return alocals['context'] finally: del frame if request is not None: return RequestContext(request) else: return Context() @property def html_content(self): """ Render the content_placeholder field dynamicly. https://github.com/Fantomas42/cmsplugin-zinnia/issues/3 """ context = self.acquire_context() return render_placeholder(self.content_placeholder, context) class Meta(EntryAbstractClass.Meta): """EntryPlaceholder's Meta""" abstract = True
Make acquire_context always return some Context
Make acquire_context always return some Context
Python
bsd-3-clause
django-blog-zinnia/cmsplugin-zinnia,bittner/cmsplugin-zinnia,bittner/cmsplugin-zinnia,django-blog-zinnia/cmsplugin-zinnia,bittner/cmsplugin-zinnia,django-blog-zinnia/cmsplugin-zinnia
caf245e14421472adb0668e57adf5a3e3ae68424
scuba/utils.py
scuba/utils.py
try: from shlex import quote as shell_quote except ImportError: from pipes import quote as shell_quote def format_cmdline(args, maxwidth=80): def lines(): line = '' for a in (shell_quote(a) for a in args): if len(line) + len(a) > maxwidth: yield line line = '' line += ' ' + a return ' \\\n'.join(lines())[1:]
try: from shlex import quote as shell_quote except ImportError: from pipes import quote as shell_quote def format_cmdline(args, maxwidth=80): '''Format args into a shell-quoted command line. The result will be wrapped to maxwidth characters where possible, not breaking a single long argument. ''' # Leave room for the space and backslash at the end of each line maxwidth -= 2 def lines(): line = '' for a in (shell_quote(a) for a in args): # If adding this argument will make the line too long, # yield the current line, and start a new one. if len(line) + len(a) + 1 > maxwidth: yield line line = '' # Append this argument to the current line, separating # it by a space from the existing arguments. if line: line += ' ' + a else: line = a yield line return ' \\\n'.join(lines())
Fix missing final line from format_cmdline()
Fix missing final line from format_cmdline() The previous code was missing 'yield line' after the for loop. This commit fixes that, as well as the extra space at the beginning of each line. Normally, we'd use str.join() to avoid such a problem, but this code is accumulating the line manually, so we can't just join the args together. This fixes #41.
Python
mit
JonathonReinhart/scuba,JonathonReinhart/scuba,JonathonReinhart/scuba
2c5c04fd0bb1dc4f5bf54af2e2739fb6a0f1d2c4
survey/urls.py
survey/urls.py
from django.conf.urls import patterns, include, url from .views import IndexView, SurveyDetail, ConfirmView, SurveyCompleted urlpatterns = patterns('', # Examples: url(r'^survey/$', IndexView.as_view(), name='survey-list'), url(r'^survey/(?P<id>[a-zA-Z0-9-]+)/', SurveyDetail.as_view(), name='survey-detail'), url(r'^survey/(?P<id>[a-zA-Z0-9-]+)/completed/', SurveyCompleted.as_view(), name='survey-completed'), url(r'^survey/(?P<id>[a-zA-Z0-9-]+)-(?P<step>\d+)/', SurveyDetail.as_view(), name='survey-detail-step'), url(r'^confirm/(?P<uuid>\w+)/', ConfirmView.as_view(), name='survey-confirmation'), )
from django.conf.urls import patterns, include, url from .views import IndexView, SurveyDetail, ConfirmView, SurveyCompleted urlpatterns = patterns('', url(r'^survey/$', IndexView.as_view(), name='survey-list'), url(r'^survey/(?P<id>\d+)/', SurveyDetail.as_view(), name='survey-detail'), url(r'^survey/(?P<id>\d+)/completed/', SurveyCompleted.as_view(), name='survey-completed'), url(r'^survey/(?P<id>\d+)-(?P<step>\d+)/', SurveyDetail.as_view(), name='survey-detail-step'), url(r'^confirm/(?P<uuid>\w+)/', ConfirmView.as_view(), name='survey-confirmation'), )
Fix - No more crash when entering an url with letter
Fix - No more crash when entering an url with letter
Python
agpl-3.0
Pierre-Sassoulas/django-survey,Pierre-Sassoulas/django-survey,Pierre-Sassoulas/django-survey
3e07e509fe0b1dd7c02b39c490c994e3c0bb94b5
bot.py
bot.py
import discord client = discord.Client() @client.event async def on_message(message): if message.author == client.user: return if message.content.startswith('!ping'): msg = 'Pong! This bot is alive.' await client.send_message(message.channel, msg) @client.event async def on_ready(): print('Logged in as') print('User:', client.user.name) print('ID', client.user.id) print('------') tokenfile = open('token') token = tokenfile.read().replace('\n', '') client.run(token)
# TODO Put an enum matching unit pairs, will make code cleaner import discord import re from unitconverter import * client = discord.Client() def construct_response(messageregex): string = messageregex.string + ' is ' currentvalue = int(messageregex.string.replace(messageregex.group(2), '')) convertedvalue = feet_to_meters(currentvalue) return string + str(convertedvalue) @client.event async def on_message(message): if message.author == client.user: return if message.content.startswith('!ping'): msg = 'Pong! This bot is alive.' await client.send_message(message.channel, msg) # this technique can be used for the other units if re.search('[0-9]+(| )(ft|feet)', message.content) is not None: msg = construct_response(re.search('[0-9]+(| )(ft|feet)', message.content)) + ' meters' await client.send_message(message.channel, msg) @client.event async def on_ready(): print('Logged in as') print('User:', client.user.name) print('ID', client.user.id) print('------') tokenfile = open('token') token = tokenfile.read().replace('\n', '') client.run(token)
Add unit conversion for ft
Add unit conversion for ft
Python
mit
suclearnub/scubot
0381fe32664e246011d5917a81c81fce936ae364
tests/tangelo-verbose.py
tests/tangelo-verbose.py
import fixture def test_standard_verbosity(): stderr = fixture.start_tangelo(stderr=True) stderr = '\n'.join(stderr) assert 'TANGELO Server is running' in stderr assert 'TANGELO Hostname' in stderr fixture.stop_tangelo() def test_lower_verbosity(): stderr = fixture.start_tangelo("-q", stderr=True) stderr = '\n'.join(stderr) assert 'TANGELO Server is running' in stderr assert 'TANGELO Hostname' not in stderr fixture.stop_tangelo()
import fixture def test_standard_verbosity(): stderr = fixture.start_tangelo(stderr=True) stderr = '\n'.join(stderr) fixture.stop_tangelo() assert 'TANGELO Server is running' in stderr assert 'TANGELO Hostname' in stderr def test_lower_verbosity(): stderr = fixture.start_tangelo("-q", stderr=True) stderr = '\n'.join(stderr) fixture.stop_tangelo() assert 'TANGELO Server is running' in stderr assert 'TANGELO Hostname' not in stderr
Reorder when the tangelo instance gets shut down in a test so that if an assert fails, other tests will still be able to run.
Reorder when the tangelo instance gets shut down in a test so that if an assert fails, other tests will still be able to run.
Python
apache-2.0
Kitware/tangelo,Kitware/tangelo,Kitware/tangelo
e2954d74b77046d3dee8134128f122a09dff3c7d
clowder_server/emailer.py
clowder_server/emailer.py
from django.core.mail import send_mail from clowder_account.models import ClowderUser ADMIN_EMAIL = '[email protected]' def send_alert(company, name): for user in ClowderUser.objects.filter(company=company): subject = 'FAILURE: %s' % (name) body = subject send_mail(subject, body, ADMIN_EMAIL, [user.email], fail_silently=True)
import os import requests from django.core.mail import send_mail from clowder_account.models import ClowderUser ADMIN_EMAIL = '[email protected]' def send_alert(company, name): for user in ClowderUser.objects.filter(company=company): subject = 'FAILURE: %s' % (name) body = subject slack_token = os.getenv('PARKME_SLACK_TOKEN') url = 'https://hooks.slack.com/services/%s' % (slack_token) payload = {"username": "devopsbot", "text": body, "icon_emoji": ":robot_face:"} requests.post(url, json=payload) send_mail(subject, body, ADMIN_EMAIL, [user.email], fail_silently=True)
Add support for slack messaging
Add support for slack messaging
Python
agpl-3.0
keithhackbarth/clowder_server,keithhackbarth/clowder_server,keithhackbarth/clowder_server,keithhackbarth/clowder_server
247c1fc0af2556a5bd421488430d97f45c533771
kaggle/titanic/categorical_and_scaler_prediction.py
kaggle/titanic/categorical_and_scaler_prediction.py
import pandas def main(): train_all = pandas.DataFrame.from_csv('train.csv') train = train_all[['Survived', 'Sex', 'Fare']] print(train) if __name__ == '__main__': main()
import pandas from sklearn.naive_bayes import MultinomialNB from sklearn.cross_validation import train_test_split from sklearn.preprocessing import LabelEncoder def main(): train_all = pandas.DataFrame.from_csv('train.csv') train = train_all[['Survived', 'Sex', 'Fare']][:20] gender_label = LabelEncoder() train.Sex = gender_label.fit_transform(train.Sex) X = train[['Sex', 'Fare']] y = train['Survived'] X_train, X_test, y_train, y_test = train_test_split( X, y, test_size=0.33, random_state=42) clf = MultinomialNB() clf.fit(X_train, y_train) print(clf.predict(X_test)) if __name__ == '__main__': main()
Make predictions with gender and ticket price
Make predictions with gender and ticket price
Python
mit
noelevans/sandpit,noelevans/sandpit,noelevans/sandpit,noelevans/sandpit,noelevans/sandpit,noelevans/sandpit
70259a9f9ce5647f9c36b70c2eb20b51ba447eda
middleware.py
middleware.py
#!/usr/bin/env python3 class Routes: '''Define the feature of route for URIs.''' def __init__(self): self._Routes = [] def AddRoute(self, uri, callback): '''Add an URI into the route table.''' self._Routes.append([uri, callback]) def Dispatch(self, req, res): '''Dispatch an URI according to the route table.''' uri = "" for fv in req.Header: if fv[0] == "URI": uri = fv[1] found = 1 break found = 0 for r in self._Routes: if r[0] == uri: r[1](req, res) found = 1 break if found != 1: self._NotFound(req, res) def _NotFound(self, req, res): '''Define the default error page for not found URI.''' res.Header.append(["Status", "404 Not Found"])
#!/usr/bin/env python3 class Routes: '''Define the feature of route for URIs.''' def __init__(self): self._Routes = [] def AddRoute(self, uri, callback): '''Add an URI into the route table.''' self._Routes.append([uri, callback]) def Dispatch(self, req, res): '''Dispatch an URI according to the route table.''' uri = "" for fv in req.Header: if fv[0] == "URI": uri = fv[1] found = 1 break found = 0 # Check the route for r in self._Routes: if r[0] == uri: r[1](req, res) found = 1 break # Check static files if found != 1: found = self._ReadStaticFiles(uri, res) # It is really not found if found != 1: self._NotFound(req, res) def _ReadStaticFiles(self, uri, res): found = 0 try: f = open("static/{}".format(uri), "r") res.Body = f.read() f.close() found = 1 except: pass return found def _NotFound(self, req, res): '''Define the default error page for not found URI.''' res.Header.append(["Status", "404 Not Found"])
Add read static files feature.
Add read static files feature.
Python
bsd-3-clause
starnight/MicroHttpServer,starnight/MicroHttpServer,starnight/MicroHttpServer,starnight/MicroHttpServer
2ef97501b15a9369d21953312115ea36355f251c
minimax.py
minimax.py
class Heuristic: def heuristic(self, board, color): raise NotImplementedError('Dont override this class')
class Heuristic: def heuristic(self, board, color): raise NotImplementedError('Dont override this class') class Minimax: def __init__(self, color_me, h_me, h_challenger): self.h_me = h_me self.h_challenger = h_challenger self.color_me = color_me def heuristic(self, board, color): if color == self.color_me: return self.h_me.heuristic(board, color) else return self.h_challenger.heuristic(board, color)
Create the minimal class MiniMax
Create the minimal class MiniMax
Python
apache-2.0
frila/agente-minimax
8a573dae750b1b9415df0c9e2c019750171e66f0
migrations.py
migrations.py
import os import json from dateutil.parser import parse from scrapi.util import safe_filename def migrate_from_old_scrapi(): for dirname, dirs, filenames in os.walk('archive'): for filename in filenames: oldpath = os.path.join(dirname, filename) source, sid, dt = dirname.split('/')[1:] dt = parse(dt).isoformat() sid = safe_filename(sid) newpath = os.path.join('archive', source, sid, dt, filename) if filename == 'manifest.json': with open(oldpath) as old: old_json = json.load(old) new_json = { 'consumerVersion': old_json['version'], 'normalizeVersion': old_json['version'], 'timestamp': dt, 'source': source, 'id': sid } old_json = json.dumps(old_json, indent=4, sort_keys=True) new_json = json.dumps(new_json, indent=4, sort_keys=True) print '{} -> {}'.format(oldpath, newpath) print old_json print new_json
import os import json from dateutil.parser import parse from scrapi.util import safe_filename def migrate_from_old_scrapi(): for dirname, dirs, filenames in os.walk('archive'): for filename in filenames: oldpath = os.path.join(dirname, filename) source, sid, dt = dirname.split('/')[1:] dt = parse(dt).isoformat() sid = safe_filename(sid) newpath = os.path.join('archive', source, sid, dt, filename) if filename == 'manifest.json': with open(oldpath) as old: old_json = json.load(old) new_json = { 'consumerVersion': old_json['version'], 'normalizeVersion': old_json['version'], 'timestamp': dt, 'source': source, 'id': sid } old_json = json.dumps(old_json, indent=4, sort_keys=True) new_json = json.dumps(new_json, indent=4, sort_keys=True) print old_json print new_json print '{} -> {}'.format(oldpath, newpath)
Move json print methods into if statement
Move json print methods into if statement
Python
apache-2.0
erinspace/scrapi,CenterForOpenScience/scrapi,icereval/scrapi,fabianvf/scrapi,fabianvf/scrapi,ostwald/scrapi,mehanig/scrapi,alexgarciac/scrapi,jeffreyliu3230/scrapi,felliott/scrapi,felliott/scrapi,CenterForOpenScience/scrapi,mehanig/scrapi,erinspace/scrapi
c668aaa0f22f5a61094c2028291b65c781733a54
mojapi/api.py
mojapi/api.py
import json import requests import time def get_statuses(): return requests.get('https://status.mojang.com/check/').json() def get_uuid(username, unix_timestamp=None): if unix_timestamp is None: unix_timestamp = int(time.time()) return requests.get( 'https://api.mojang.com/users/profiles/minecraft/{}?at={}'.format(username, unix_timestamp) ).json() def get_usernames(uuid): return requests.get('https://api.mojang.com/user/profiles/{}/names'.format(uuid)).json() def get_profiles(*usernames): return requests.post( url='https://api.mojang.com/profiles/minecraft', headers={ b'Content-Type': b'application/json' }, data=json.dumps(list(usernames)) ).json()
import json import requests import time def get_statuses(): return requests.get('https://status.mojang.com/check/').json() def get_uuid(username, unix_timestamp=None): if unix_timestamp is None: unix_timestamp = int(time.time()) return requests.get( 'https://api.mojang.com/users/profiles/minecraft/{}?at={}'.format(username, unix_timestamp) ).json() def get_usernames(uuid): return requests.get('https://api.mojang.com/user/profiles/{}/names'.format(uuid)).json() def get_profiles(*usernames): return requests.post( url='https://api.mojang.com/profiles/minecraft', headers={ b'Content-Type': b'application/json' }, data=json.dumps(list(usernames)) ).json() def get_blocked_server_hashes(): response = requests.get('https://sessionserver.mojang.com/blockedservers') response.raise_for_status() sha1_hashes = response.content.split(b'\n') return sha1_hashes
Add get blocked server hashes call
Add get blocked server hashes call
Python
mit
zugmc/mojapi
6845c56edc315f5ce07f0bf1101d59ee04036024
pydir/daemon-rxcmd.py
pydir/daemon-rxcmd.py
#!/usr/bin/python # -*- coding: utf-8 -*- # Copyright (c) 2016 F Dou<[email protected]> # See LICENSE for details. import bluetooth import os import logging import time from daemon import runner class RxCmdDaemon(): def __init__(self): self.stdin_path = '/dev/null' self.stdout_path = '/dev/tty' self.stderr_path = '/dev/tty' self.pidfile_path = '/tmp/RxCmdDaemon.pid' self.pidfile_timeout = 5 def run(self): while True: server_sock=bluetooth.BluetoothSocket( bluetooth.RFCOMM ) port = 1 server_sock.bind(("",port)) server_sock.listen(1) client_sock,address = server_sock.accept() print "Accepted connection from ",address try: while True: data = client_sock.recv(1024) print "received [%s]" % data os.system(data) except Exception as e: logging.exception(e) rxCmdDaemon = RxCmdDaemon() daemon_runner = runner.DaemonRunner(rxCmdDaemon) daemon_runner.do_action()
#!/usr/bin/python # -*- coding: utf-8 -*- # Copyright (c) 2016 F Dou<[email protected]> # See LICENSE for details. import bluetooth import os import logging import time from daemon import runner class RxCmdDaemon(): def __init__(self): self.stdin_path = '/dev/null' self.stdout_path = '/dev/tty' self.stderr_path = '/dev/tty' self.pidfile_path = '/tmp/RxCmdDaemon.pid' self.pidfile_timeout = 5 def run(self): while True: server_sock=bluetooth.BluetoothSocket( bluetooth.RFCOMM ) port = 1 server_sock.bind(("",port)) server_sock.listen(1) client_sock,address = server_sock.accept() print "Accepted connection from ",address try: while True: data = client_sock.recv(1024) print "received [%s]" % data os.system(data) except Exception as e: logging.exception(e) while True: try: rxCmdDaemon = RxCmdDaemon() daemon_runner = runner.DaemonRunner(rxCmdDaemon) daemon_runner.do_action() except Exception as e: logging.exception(e)
Add try/catch to improve error handling
Add try/catch to improve error handling
Python
apache-2.0
javatechs/RxCmd,javatechs/RxCmd,javatechs/RxCmd
e45c3a759d56dc70907b2169ece9da2415ab1ffa
resync/resource_container.py
resync/resource_container.py
"""ResourceSync Resource Container object Both ResourceList and Change Set objects are collections of Resource objects with additional metadata regarding capabilities and discovery information. This is a superclass for the ResourceList and ChangeSet classes which contains common functionality. """ class ResourceContainer(object): """Class containing resource-like objects Core functionality:: - resources property that is the set/list of resources -- add() to add a resource-like object to self.resources -- iter() to get iterator over self.resource in appropriate order - capabilities property that is a dict of capabilities Derived classes may add extra functionality such as len() etc.. However, any code designed to work with any ResourceContainer should use only the core functionality. """ def __init__(self, resources=None, capabilities=None): self.resources=resources self.capabilities=(capabilities if (capabilities is not None) else {}) def __iter__(self): """Iterator over all the resources in this resourcelist Baseline implementation use iterator given by resources property """ return(iter(self.resources)) def add(self, resource): """Add a resource or an iterable collection of resources to this container Must be implemented in derived class """ raise NotImplemented("add() not implemented") def __str__(self): """Return string of all resources in order given by interator""" s = '' for resource in self: s += str(resource) + "\n" return(s)
"""ResourceSync Resource Container object Both ResourceList and ChangeList objects are collections of Resource objects with additional metadata regarding capabilities and discovery information. This is a superclass for the ResourceList and ChangeList classes which contains common functionality. """ class ResourceContainer(object): """Class containing resource-like objects Core functionality:: - resources property that is the set/list of resources -- add() to add a resource-like object to self.resources -- iter() to get iterator over self.resource in appropriate order - capabilities property that is a dict of capabilities Derived classes may add extra functionality such as len() etc.. However, any code designed to work with any ResourceContainer should use only the core functionality. """ def __init__(self, resources=None, capabilities=None): self.resources=resources self.capabilities=(capabilities if (capabilities is not None) else {}) def __iter__(self): """Iterator over all the resources in this resourcelist Baseline implementation use iterator given by resources property """ return(iter(self.resources)) def add(self, resource): """Add a resource or an iterable collection of resources to this container Must be implemented in derived class """ raise NotImplemented("add() not implemented") def __str__(self): """Return string of all resources in order given by interator""" s = '' for resource in self: s += str(resource) + "\n" return(s)
Fix comments for beta spec language
Fix comments for beta spec language
Python
apache-2.0
dans-er/resync,resync/resync,lindareijnhoudt/resync,lindareijnhoudt/resync,dans-er/resync
96df077d5485979af256fe7b95708ace658fb8e2
test/mitmproxy/test_examples.py
test/mitmproxy/test_examples.py
import glob from mitmproxy import utils, script from mitmproxy.proxy import config from netlib import tutils as netutils from netlib.http import Headers from . import tservers, tutils from examples import ( modify_form, ) def test_load_scripts(): example_dir = utils.Data(__name__).path("../../examples") scripts = glob.glob("%s/*.py" % example_dir) tmaster = tservers.TestMaster(config.ProxyConfig()) for f in scripts: if "har_extractor" in f: continue if "flowwriter" in f: f += " -" if "iframe_injector" in f: f += " foo" # one argument required if "filt" in f: f += " ~a" if "modify_response_body" in f: f += " foo bar" # two arguments required try: s = script.Script(f, script.ScriptContext(tmaster)) # Loads the script file. except Exception as v: if "ImportError" not in str(v): raise else: s.unload() def test_modify_form(): form_header = Headers(content_type="application/x-www-form-urlencoded") flow = tutils.tflow(req=netutils.treq(headers=form_header)) modify_form.request({}, flow) assert flow.request.urlencoded_form["mitmproxy"] == ["rocks"]
import glob from mitmproxy import utils, script from mitmproxy.proxy import config from netlib import tutils as netutils from netlib.http import Headers from . import tservers, tutils from examples import ( add_header, modify_form, ) def test_load_scripts(): example_dir = utils.Data(__name__).path("../../examples") scripts = glob.glob("%s/*.py" % example_dir) tmaster = tservers.TestMaster(config.ProxyConfig()) for f in scripts: if "har_extractor" in f: continue if "flowwriter" in f: f += " -" if "iframe_injector" in f: f += " foo" # one argument required if "filt" in f: f += " ~a" if "modify_response_body" in f: f += " foo bar" # two arguments required try: s = script.Script(f, script.ScriptContext(tmaster)) # Loads the script file. except Exception as v: if "ImportError" not in str(v): raise else: s.unload() def test_add_header(): flow = tutils.tflow(resp=netutils.tresp()) add_header.response({}, flow) assert flow.response.headers["newheader"] == "foo" def test_modify_form(): form_header = Headers(content_type="application/x-www-form-urlencoded") flow = tutils.tflow(req=netutils.treq(headers=form_header)) modify_form.request({}, flow) assert flow.request.urlencoded_form["mitmproxy"] == ["rocks"]
Add tests for add_header example
Add tests for add_header example
Python
mit
mitmproxy/mitmproxy,jvillacorta/mitmproxy,tdickers/mitmproxy,dufferzafar/mitmproxy,mosajjal/mitmproxy,cortesi/mitmproxy,tdickers/mitmproxy,dwfreed/mitmproxy,laurmurclar/mitmproxy,gzzhanghao/mitmproxy,ddworken/mitmproxy,mosajjal/mitmproxy,mhils/mitmproxy,mhils/mitmproxy,fimad/mitmproxy,mitmproxy/mitmproxy,ujjwal96/mitmproxy,dwfreed/mitmproxy,zlorb/mitmproxy,dufferzafar/mitmproxy,dufferzafar/mitmproxy,laurmurclar/mitmproxy,zlorb/mitmproxy,jvillacorta/mitmproxy,vhaupert/mitmproxy,Kriechi/mitmproxy,ujjwal96/mitmproxy,fimad/mitmproxy,dufferzafar/mitmproxy,mitmproxy/mitmproxy,jvillacorta/mitmproxy,mosajjal/mitmproxy,ddworken/mitmproxy,gzzhanghao/mitmproxy,vhaupert/mitmproxy,tdickers/mitmproxy,zlorb/mitmproxy,xaxa89/mitmproxy,mitmproxy/mitmproxy,dwfreed/mitmproxy,mhils/mitmproxy,fimad/mitmproxy,xaxa89/mitmproxy,ujjwal96/mitmproxy,zlorb/mitmproxy,ddworken/mitmproxy,fimad/mitmproxy,laurmurclar/mitmproxy,MatthewShao/mitmproxy,gzzhanghao/mitmproxy,laurmurclar/mitmproxy,Kriechi/mitmproxy,ujjwal96/mitmproxy,StevenVanAcker/mitmproxy,mhils/mitmproxy,ddworken/mitmproxy,Kriechi/mitmproxy,MatthewShao/mitmproxy,StevenVanAcker/mitmproxy,mosajjal/mitmproxy,dwfreed/mitmproxy,cortesi/mitmproxy,cortesi/mitmproxy,mitmproxy/mitmproxy,gzzhanghao/mitmproxy,xaxa89/mitmproxy,cortesi/mitmproxy,StevenVanAcker/mitmproxy,mhils/mitmproxy,Kriechi/mitmproxy,xaxa89/mitmproxy,tdickers/mitmproxy,jvillacorta/mitmproxy,MatthewShao/mitmproxy,MatthewShao/mitmproxy,StevenVanAcker/mitmproxy,vhaupert/mitmproxy,vhaupert/mitmproxy
6f45e82af789586baf7354b562bbb1587d94b28c
qual/tests/test_calendar.py
qual/tests/test_calendar.py
import unittest from datetime import date import qual class TestProlepticGregorianCalendar(unittest.TestCase): def setUp(self): self.calendar = qual.ProlepticGregorianCalendar() def check_valid_date(self, year, month, day): d = self.calendar.date(year, month, day) self.assertIsNotNone(d) def check_invalid_date(self, year, month, day): self.assertRaises(Exception, lambda : self.calendar(year, month, day)) def test_leap_year_from_before_1582(self): """Pope Gregory introduced the calendar in 1582""" self.check_valid_date(1200, 2, 29) def test_Julian_leap_day_is_not_a_valid_date(self): """This day /was/ a leap day contemporaneously, but is not a valid date of the Gregorian calendar.""" self.check_invalid_date(1300, 2, 29)
import unittest from datetime import date import qual class TestProlepticGregorianCalendar(unittest.TestCase): def setUp(self): self.calendar = qual.ProlepticGregorianCalendar() def check_valid_date(self, year, month, day): d = self.calendar.date(year, month, day) self.assertIsNotNone(d) def check_invalid_date(self, year, month, day): self.assertRaises(Exception, lambda : self.calendar(year, month, day)) def test_leap_year_from_before_1582(self): """Pope Gregory introduced the calendar in 1582""" self.check_valid_date(1200, 2, 29) def test_day_missed_out_in_British_calendar_change(self): """This date never happened in English law: It was missed when changing from the Julian to Gregorian. This test proves that we are not using a historical British calendar.""" self.check_valid_date(1752, 9, 3) def test_Julian_leap_day_is_not_a_valid_date(self): """This day /was/ a leap day contemporaneously, but is not a valid date of the Gregorian calendar.""" self.check_invalid_date(1300, 2, 29)
Add a test for a date missing from English historical calendars.
Add a test for a date missing from English historical calendars.
Python
apache-2.0
jwg4/qual,jwg4/calexicon
d4c168cc552a444ecb3ee3059f12fa1c34c4419c
test_sempai.py
test_sempai.py
import jsonsempai import os import shutil import sys import tempfile TEST_FILE = '''{ "three": 3, "one": { "two": { "three": 3 } } }''' class TestSempai(object): def setup(self): self.direc = tempfile.mkdtemp(prefix='jsonsempai') sys.path.append(self.direc) with open(os.path.join(self.direc, 'sempai.json'), 'w') as f: f.write(TEST_FILE) def teardown(self): sys.path.remove(self.direc) shutil.rmtree(self.direc) def test_import(self): import sempai assert sempai def test_access(self): import sempai assert sempai.three == 3 def test_access_nested(self): import sempai assert sempai.one.two.three == 3 def test_acts_like_dict(self): import sempai assert sempai.one.two == {"three": 3} def test_set(self): import sempai sempai.one.two.three = 4 assert sempai.one.two.three == 4 def test_location(self): import sempai assert sempai.__file__ == os.path.join(self.direc, 'sempai.json')
import jsonsempai import os import shutil import sys import tempfile TEST_FILE = '''{ "three": 3, "one": { "two": { "three": 3 } } }''' class TestSempai(object): def setup(self): self.direc = tempfile.mkdtemp(prefix='jsonsempai') sys.path.append(self.direc) with open(os.path.join(self.direc, 'sempai.json'), 'w') as f: f.write(TEST_FILE) def teardown(self): sys.path.remove(self.direc) shutil.rmtree(self.direc) def test_import(self): import sempai assert sempai def test_access(self): import sempai assert sempai.three == 3 def test_access_nested(self): import sempai assert sempai.one.two.three == 3 def test_acts_like_dict(self): import sempai assert sempai.one.two == {"three": 3} def test_set(self): import sempai sempai.one.two.three = 4 assert sempai.one.two.three == 4 def test_del(self): import sempai del sempai.one.two.three assert sempai.one.two.get('three', 'not at home') == 'not at home' def test_location(self): import sempai assert sempai.__file__ == os.path.join(self.direc, 'sempai.json')
Add test for removing item
Add test for removing item
Python
mit
kragniz/json-sempai
004326064c87184e4373ab0b2d8d7ef9b46d94f9
tokens/conf.py
tokens/conf.py
PHASES = ( ('PHASE_01', 'In review',), ('PHASE_02', 'Active',), ('PHASE_02', 'Inactive',), )
PHASES = ( ('PHASE_01', 'In review',), ('PHASE_02', 'Active',), ('PHASE_02', 'Inactive',), ) TOKEN_TYPES = ( ('MintableToken', 'Mintable Token'), )
Add MintableToken as new token type
Add MintableToken as new token type
Python
apache-2.0
onyb/ethane,onyb/ethane,onyb/ethane,onyb/ethane
76ec25090ece865d67f63c07c32aff7cebf105c1
ynr/apps/people/migrations/0034_get_birth_year.py
ynr/apps/people/migrations/0034_get_birth_year.py
# Generated by Django 3.2.4 on 2021-10-27 14:41 from django.db import migrations def get_birth_year(apps, schema_editor): Person = apps.get_model("people", "Person") for person in Person.objects.all(): birth_year = person.birth_date.split("-")[0] person.birth_date = birth_year person.save() class Migration(migrations.Migration): dependencies = [("people", "0033_auto_20210928_1007")] operations = [ migrations.RunPython(get_birth_year, migrations.RunPython.noop) ]
# Generated by Django 3.2.4 on 2021-10-27 14:41 from django.db import migrations def get_birth_year(apps, schema_editor): Person = apps.get_model("people", "Person") for person in Person.objects.exclude(birth_date="").iterator(): birth_year = person.birth_date.split("-")[0] person.birth_date = birth_year person.save() class Migration(migrations.Migration): dependencies = [("people", "0033_auto_20210928_1007")] operations = [ migrations.RunPython(get_birth_year, migrations.RunPython.noop) ]
Improve performance of birth date data migration
Improve performance of birth date data migration
Python
agpl-3.0
DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative
1782b15b244597d56bff18c465237c7e1f3ab482
wikked/commands/users.py
wikked/commands/users.py
import logging import getpass from wikked.bcryptfallback import generate_password_hash from wikked.commands.base import WikkedCommand, register_command logger = logging.getLogger(__name__) @register_command class UsersCommand(WikkedCommand): def __init__(self): super(UsersCommand, self).__init__() self.name = 'users' self.description = "Lists users of this wiki." def setupParser(self, parser): pass def run(self, ctx): logger.info("Users:") for user in ctx.wiki.auth.getUsers(): logger.info(" - " + user.username) @register_command class NewUserCommand(WikkedCommand): def __init__(self): super(NewUserCommand, self).__init__() self.name = 'newuser' self.description = ( "Generates the entry for a new user so you can " "copy/paste it in your `.wikirc`.") def setupParser(self, parser): parser.add_argument('username', nargs=1) parser.add_argument('password', nargs='?') def run(self, ctx): username = ctx.args.username password = ctx.args.password or getpass.getpass('Password: ') password = generate_password_hash(password) logger.info("%s = %s" % (username[0], password))
import logging import getpass from wikked.bcryptfallback import generate_password_hash from wikked.commands.base import WikkedCommand, register_command logger = logging.getLogger(__name__) @register_command class UsersCommand(WikkedCommand): def __init__(self): super(UsersCommand, self).__init__() self.name = 'users' self.description = "Lists users of this wiki." def setupParser(self, parser): pass def run(self, ctx): logger.info("Users:") for user in ctx.wiki.auth.getUsers(): logger.info(" - " + user.username) @register_command class NewUserCommand(WikkedCommand): def __init__(self): super(NewUserCommand, self).__init__() self.name = 'newuser' self.description = ( "Generates the entry for a new user so you can " "copy/paste it in your `.wikirc`.") def setupParser(self, parser): parser.add_argument('username', nargs=1) parser.add_argument('password', nargs='?') def run(self, ctx): username = ctx.args.username password = ctx.args.password or getpass.getpass('Password: ') password = generate_password_hash(password) logger.info("%s = %s" % (username[0], password)) logger.info("") logger.info("(copy this into your .wikirc file)")
Add some explanation as to what to do with the output.
newuser: Add some explanation as to what to do with the output.
Python
apache-2.0
ludovicchabant/Wikked,ludovicchabant/Wikked,ludovicchabant/Wikked
2342cd5ede9fac66007d2b15025feeff52c2400b
flexget/plugins/operate/verify_ssl_certificates.py
flexget/plugins/operate/verify_ssl_certificates.py
from __future__ import unicode_literals, division, absolute_import from builtins import * # pylint: disable=unused-import, redefined-builtin import logging from flexget import plugin from flexget.event import event log = logging.getLogger('verify_ssl') class VerifySSLCertificates(object): """ Plugin that can off SSL certificate verification. Example:: verify_ssl_certificates: no """ schema = {'type': 'boolean'} @plugin.priority(253) def on_task_start(self, task, config): if config is False: task.requests.verify = False @event('plugin.register') def register_plugin(): plugin.register(VerifySSLCertificates, 'verify_ssl_certificates', api_ver=2)
from __future__ import unicode_literals, division, absolute_import from builtins import * # pylint: disable=unused-import, redefined-builtin import logging from requests.packages import urllib3 from flexget import plugin from flexget.event import event log = logging.getLogger('verify_ssl') class VerifySSLCertificates(object): """ Plugin that can off SSL certificate verification. Example:: verify_ssl_certificates: no """ schema = {'type': 'boolean'} @plugin.priority(253) def on_task_start(self, task, config): if config is False: task.requests.verify = False # Disabling verification results in a warning for every HTTPS # request: # "InsecureRequestWarning: Unverified HTTPS request is being made. # Adding certificate verification is strongly advised. See: # https://urllib3.readthedocs.io/en/latest/security.html" # Disable those warnings because the user has explicitly disabled # verification and the warning is not beneficial. # This change is permanent rather than task scoped, but there won't # be any warnings to disable when verification is enabled. urllib3.disable_warnings() @event('plugin.register') def register_plugin(): plugin.register(VerifySSLCertificates, 'verify_ssl_certificates', api_ver=2)
Disable warnings about disabling SSL verification.
Disable warnings about disabling SSL verification. Disabling SSL certificate verification results in a warning for every HTTPS request: "InsecureRequestWarning: Unverified HTTPS request is being made. Adding certificate verification is strongly advised. See: https://urllib3.readthedocs.io/en/latest/security.html" Disable those warnings because the user has explicitly disabled verification and so the warning is not beneficial.
Python
mit
OmgOhnoes/Flexget,qk4l/Flexget,jacobmetrick/Flexget,jacobmetrick/Flexget,Flexget/Flexget,LynxyssCZ/Flexget,crawln45/Flexget,Flexget/Flexget,ianstalk/Flexget,OmgOhnoes/Flexget,poulpito/Flexget,drwyrm/Flexget,malkavi/Flexget,jawilson/Flexget,malkavi/Flexget,LynxyssCZ/Flexget,jawilson/Flexget,ianstalk/Flexget,gazpachoking/Flexget,sean797/Flexget,crawln45/Flexget,Danfocus/Flexget,JorisDeRieck/Flexget,drwyrm/Flexget,Danfocus/Flexget,JorisDeRieck/Flexget,JorisDeRieck/Flexget,sean797/Flexget,jawilson/Flexget,LynxyssCZ/Flexget,tobinjt/Flexget,tobinjt/Flexget,jacobmetrick/Flexget,qk4l/Flexget,qk4l/Flexget,ianstalk/Flexget,tobinjt/Flexget,malkavi/Flexget,jawilson/Flexget,Danfocus/Flexget,Flexget/Flexget,JorisDeRieck/Flexget,poulpito/Flexget,drwyrm/Flexget,malkavi/Flexget,sean797/Flexget,Danfocus/Flexget,crawln45/Flexget,poulpito/Flexget,crawln45/Flexget,OmgOhnoes/Flexget,tobinjt/Flexget,LynxyssCZ/Flexget,Flexget/Flexget,gazpachoking/Flexget
3ca30011794143785955792e391902823427ef77
registration/views.py
registration/views.py
# Create your views here. from django.http import HttpResponse from registration.models import Team from django.core import serializers def get_teams(request): return_data = serializers.serialize("json", Team.objects.all()) return HttpResponse(return_data, content_type="application/json")
# Create your views here. from django.http import HttpResponse from registration.models import Team from django.core import serializers from django.views.decorators.cache import cache_page @cache_page(60 * 5) def get_teams(request): return_data = serializers.serialize("json", Team.objects.all()) return HttpResponse(return_data, content_type="application/json")
Add caching for getTeams API call
Add caching for getTeams API call
Python
bsd-3-clause
hgrimberg01/esc,hgrimberg01/esc
33fbc424d725836355c071593042953fb195cff6
server/project/apps/core/serializers.py
server/project/apps/core/serializers.py
from rest_framework import serializers from .models import Playlist, Track, Favorite class TrackSerializer(serializers.ModelSerializer): class Meta: model = Track fields = '__all__' class PlaylistSerializer(serializers.ModelSerializer): tracks = TrackSerializer(many=True) class Meta: model = Playlist fields = ('id', 'playlist_name', 'user_id', 'tracks') def create(self, validated_data): tracks_data = validated_data.pop('tracks') playlist = Playlist.objects.create(**validated_data) for track_data in tracks_data: Track.objects.create(**track_data) return playlist def update(self, instance, validated_data): tracks_data = validated_data.pop('tracks') instance.playlist_name = validated_data.get('playlist_name', instance.playlist_name) instance.save() Track.objects.filter(playlist=instance.id).delete() for track_data in tracks_data: Track.objects.create(**track_data) instance.tracks.add(track_id) instance.save() return Playlist.objects.get(pk=instance.id) class FavoriteSerializer(serializers.ModelSerializer): class Meta: model = Favorite fields = '__all__'
from rest_framework import serializers from .models import Playlist, Track, Favorite class TrackSerializer(serializers.ModelSerializer): class Meta: model = Track fields = '__all__' class PlaylistSerializer(serializers.ModelSerializer): tracks = TrackSerializer(many=True) class Meta: model = Playlist fields = ('id', 'playlist_name', 'user_id', 'tracks') def create(self, validated_data): tracks_data = validated_data.pop('tracks') playlist = Playlist.objects.create(**validated_data) for track_data in tracks_data: Track.objects.create(**track_data) return playlist def update(self, instance, validated_data): tracks_data = validated_data.pop('tracks') instance.playlist_name = validated_data.get('playlist_name', instance.playlist_name) instance.save() Track.objects.filter(playlist=instance.id).delete() for track_data in tracks_data: track_id = Track.objects.create(**track_data) instance.tracks.add(track_id) instance.save() return Playlist.objects.get(pk=instance.id) class FavoriteSerializer(serializers.ModelSerializer): class Meta: model = Favorite fields = '__all__'
Add tracks to playlist on update
Add tracks to playlist on update
Python
mit
hrr20-over9000/9001,SoundMoose/SoundMoose,SoundMoose/SoundMoose,douvaughn/9001,douvaughn/9001,hxue920/9001,hrr20-over9000/9001,hxue920/9001,CalHoll/SoundMoose,CalHoll/SoundMoose,douvaughn/9001,CalHoll/SoundMoose,hrr20-over9000/9001,hxue920/9001,douvaughn/9001,hxue920/9001,SoundMoose/SoundMoose,SoundMoose/SoundMoose,CalHoll/SoundMoose
3bc3a9c29e448e6ca1eaa3c962d144bd1b5f874e
migrations/versions/d596dc9b53d9_create_redmine_tables.py
migrations/versions/d596dc9b53d9_create_redmine_tables.py
"""create redmine tables Revision ID: d596dc9b53d9 Revises: 2ffb0d589280 Create Date: 2017-08-14 14:43:31.234637 """ from alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic. revision = 'd596dc9b53d9' down_revision = '2ffb0d589280' branch_labels = None depends_on = None def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.create_table('redmine_users', sa.Column('id', sa.Integer(), nullable=False), sa.Column('user_id', sa.Unicode(length=16), nullable=False), sa.Column('api_key', sa.Unicode(length=40), nullable=False), sa.PrimaryKeyConstraint('id') ) op.create_table('redmine_projectchannel', sa.Column('id', sa.Integer(), nullable=False), sa.Column('project_id', sa.Integer(), nullable=False), sa.Column('channels', sa.Unicode(length=255), nullable=False), sa.PrimaryKeyConstraint('id') ) ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_table('remine_projectroom') op.drop_table('redmine_users') ### end Alembic commands ###
"""create redmine tables Revision ID: d596dc9b53d9 Revises: 2ffb0d589280 Create Date: 2017-08-14 14:43:31.234637 """ from alembic import op import sqlalchemy as sa # revision identifiers, used by Alembic. revision = 'd596dc9b53d9' down_revision = '2ffb0d589280' branch_labels = None depends_on = None def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.create_table('redmine_users', sa.Column('id', sa.Integer(), nullable=False), sa.Column('user_id', sa.Unicode(length=16), nullable=False), sa.Column('api_key', sa.Unicode(length=40), nullable=False), sa.PrimaryKeyConstraint('id') ) op.create_table('redmine_projectchannel', sa.Column('id', sa.Integer(), nullable=False), sa.Column('project_id', sa.Integer(), nullable=False), sa.Column('channels', sa.Unicode(length=255), nullable=False), sa.PrimaryKeyConstraint('id') ) ### end Alembic commands ### def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_table('redmine_projectchannel') op.drop_table('redmine_users') ### end Alembic commands ###
Downgrade migration table name updated is current
Downgrade migration table name updated is current
Python
mit
beproud/beproudbot,beproud/beproudbot
193831b6ee8b49674e32413e71819f2451bfc844
situational/apps/quick_history/forms.py
situational/apps/quick_history/forms.py
from django import forms from . import widgets class HistoryDetailsForm(forms.Form): CIRCUMSTANCE_CHOICES = [ ("full_time", "Full time"), ("part_time", "Part time"), ("work_programme", "Work programme"), ("unemployed", "Unemployed"), ("sick", "Off sick"), ("training", "In full time training"), ("caring", "Caring full time for others"), ("none", "None of these"), ] circumstances = forms.ChoiceField( widget=forms.RadioSelect(), choices=CIRCUMSTANCE_CHOICES ) date = forms.DateField( widget=widgets.MonthYearWidget(years=range(2000, 2016)) ) description = forms.CharField(required=False) def clean(self): cleaned_data = super(HistoryDetailsForm, self).clean() return cleaned_data
from django import forms from . import widgets class HistoryDetailsForm(forms.Form): CIRCUMSTANCE_CHOICES = [ ("full_time", "Full time"), ("part_time", "Part time"), ("unemployed", "Unemployed"), ("sick", "Off sick"), ("training", "In full time training"), ("caring", "Caring full time for others"), ("none", "None of these"), ] circumstances = forms.ChoiceField( widget=forms.RadioSelect(), choices=CIRCUMSTANCE_CHOICES ) date = forms.DateField( widget=widgets.MonthYearWidget(years=range(2000, 2016)) ) description = forms.CharField(required=False) def clean(self): cleaned_data = super(HistoryDetailsForm, self).clean() return cleaned_data
Remove "work programme" option from quick history
Remove "work programme" option from quick history
Python
bsd-3-clause
lm-tools/situational,lm-tools/sectors,lm-tools/situational,lm-tools/situational,lm-tools/situational,lm-tools/sectors,lm-tools/situational,lm-tools/sectors,lm-tools/sectors
bf8d2b33794f676f27b9a22bca57a06b94f7c2ce
smart/accesscontrol/rules/helper_app.py
smart/accesscontrol/rules/helper_app.py
""" Rules for PHAs, AccessTokens, ReqTokens """ from smart.views import * def grant(happ, permset): """ grant the permissions of an account to this permset """ def need_admin(*a,**b): return happ.admin_p permset.grant(get_first_record_tokens, None) permset.grant(get_next_record_tokens, None) permset.grant(get_record_tokens, None) permset.grant(download_ontology, None) permset.grant(record_search, [need_admin]) permset.grant(record_post_objects, [need_admin]) permset.grant(put_demographics, [need_admin])
""" Rules for PHAs, AccessTokens, ReqTokens """ from smart.views import * def grant(happ, permset): """ grant the permissions of an account to this permset """ def need_admin(*a,**b): return happ.admin_p permset.grant(get_first_record_tokens, None) permset.grant(get_next_record_tokens, None) permset.grant(get_record_tokens, None) permset.grant(download_ontology, None) permset.grant(record_search, [need_admin]) permset.grant(record_post_objects, [need_admin]) # permset.grant(put_demographics, [need_admin])
Remove put_demographics from the permset of the background apps
Remove put_demographics from the permset of the background apps
Python
apache-2.0
smart-classic/smart_server,smart-classic/smart_server
1ca9052a989ad0c1642875c7f29b8ba2130011fa
south/introspection_plugins/__init__.py
south/introspection_plugins/__init__.py
# This module contains built-in introspector plugins for various common # Django apps. # These imports trigger the lower-down files import south.introspection_plugins.geodjango import south.introspection_plugins.django_tagging import south.introspection_plugins.django_taggit import south.introspection_plugins.django_objectpermissions
# This module contains built-in introspector plugins for various common # Django apps. # These imports trigger the lower-down files import south.introspection_plugins.geodjango import south.introspection_plugins.django_tagging import south.introspection_plugins.django_taggit import south.introspection_plugins.django_objectpermissions import south.introspection_plugins.annoying_autoonetoone
Add import of django-annoying patch
Add import of django-annoying patch
Python
apache-2.0
smartfile/django-south,smartfile/django-south
93373242eab8d387a9b13c567239fa2e36b10ffa
mqtt_logger/management/commands/runmqttlistener.py
mqtt_logger/management/commands/runmqttlistener.py
from django.core.management.base import BaseCommand, CommandError from mqtt_logger.models import * class Command(BaseCommand): help = 'Start listening to mqtt subscriptions and save messages in database.' def add_arguments(self, parser): pass def handle(self, *args, **options): self.stdout.write("Starting MQTT listener...") clients = MQTTSubscription.subscribe_all(start_loop=True) for c in clients: self.stdout.write(" %s:%s %s"%(c.host, c.port, c.topics)) self.stdout.write("MQTT listener started.") self.stdout.write("Hit <ENTER> to quit.") wait = raw_input()
from django.core.management.base import BaseCommand, CommandError from mqtt_logger.models import * import time class Command(BaseCommand): help = 'Start listening to mqtt subscriptions and save messages in database.' def add_arguments(self, parser): pass def handle(self, *args, **options): self.stdout.write("Starting MQTT listener...") subs = list(MQTTSubscription.objects.filter(active=True)) for s in subs: self.stdout.write(" Connecting to %s:%s %s"%(s.server, s.port, s.topic)) s.client = s.subscribe(start_loop=True) while(True): time.sleep(10) newsubs = MQTTSubscription.objects.filter(active=True) for s in subs: if s not in newsubs: self.stdout.write(" Disconnecting from %s:%s %s"%(s.server, s.port, s.topic)) s.client.disconnect() subs.remove(s) for s in newsubs: if s not in subs: self.stdout.write(" Connecting to %s:%s %s"%(s.server, s.port, s.topic)) s.client = s.subscribe(start_loop=True) subs.append(s)
Make the listener automatically update the subscriptions.
Make the listener automatically update the subscriptions.
Python
mit
ast0815/mqtt-hub,ast0815/mqtt-hub
e019ce982325a6284e844df3c9a5f8172f494ba3
run_mandel.py
run_mandel.py
import fractal import bmp pixels = fractal.mandelbrot(488, 256) bmp.write_grayscale('mandel.bmp', pixels)
import fractal import bmp def main(): pixels = fractal.mandelbrot(488, 256) bmp.write_grayscale('mandel.bmp', pixels) if __name__ == '__main__': main()
Add a main runner for mandel
Add a main runner for mandel
Python
mit
kentoj/python-fundamentals
f7f489369fa675e6efe0fa5b164b7ee1fc25f3fd
test/test_legend_labels.py
test/test_legend_labels.py
# -*- coding: utf-8 -*- # import helpers def plot(): from matplotlib import pyplot as plt import numpy as np fig = plt.figure() x = np.ma.arange(0, 2*np.pi, 0.02) y1 = np.sin(1*x) y2 = np.sin(2*x) y3 = np.sin(3*x) plt.plot(x, y1, label='y1') plt.plot(x, y2, label=None) plt.plot(x, y3, label='y4') plt.legend() return fig def test(): phash = helpers.Phash(plot()) assert phash.phash == 'eb785e0aaed6c190', phash.get_details() return if __name__ == '__main__': # print(helpers.Phash(plot()).phash) helpers.compare_with_latex(plot())
# -*- coding: utf-8 -*- # import helpers def plot(): from matplotlib import pyplot as plt import numpy as np fig = plt.figure() x = np.ma.arange(0, 2*np.pi, 0.02) y1 = np.sin(1*x) y2 = np.sin(2*x) y3 = np.sin(3*x) plt.plot(x, y1, label='y1') plt.plot(x, y2, label=None) plt.plot(x, y3, label='y4') plt.legend() return fig def test(): phash = helpers.Phash(plot()) assert phash.phash == 'eb785e0aaed68194', phash.get_details() return if __name__ == '__main__': # print(helpers.Phash(plot()).phash) helpers.compare_with_latex(plot())
Test hash needed to change.
Test hash needed to change.
Python
mit
nschloe/matplotlib2tikz,m-rossi/matplotlib2tikz,danielhkl/matplotlib2tikz