commit
stringlengths
40
40
old_file
stringlengths
4
264
new_file
stringlengths
4
264
old_contents
stringlengths
0
3.26k
new_contents
stringlengths
1
4.43k
subject
stringlengths
15
624
message
stringlengths
15
4.7k
lang
stringclasses
3 values
license
stringclasses
13 values
repos
stringlengths
5
91.5k
91229ab93609f66af866f7ef87b576a84546aeab
api/base/parsers.py
api/base/parsers.py
from rest_framework.parsers import JSONParser from api.base.renderers import JSONAPIRenderer from api.base.exceptions import JSONAPIException class JSONAPIParser(JSONParser): """ Parses JSON-serialized data. Overrides media_type. """ media_type = 'application/vnd.api+json' renderer_class = JSONAPIRenderer def parse(self, stream, media_type=None, parser_context=None): """ Parses the incoming bytestream as JSON and returns the resulting data """ result = super(JSONAPIParser, self).parse(stream, media_type=media_type, parser_context=parser_context) data = result.get('data', {}) if data: if 'attributes' not in data: raise JSONAPIException(source={'pointer': '/data/attributes'}, detail='This field is required.') id = data.get('id') type = data.get('type') attributes = data.get('attributes') parsed = {'id': id, 'type': type} parsed.update(attributes) return parsed else: raise JSONAPIException(source={'pointer': '/data'}, detail='This field is required.') class JSONAPIParserForRegularJSON(JSONAPIParser): media_type = 'application/json'
from rest_framework.parsers import JSONParser from rest_framework.exceptions import ParseError from api.base.renderers import JSONAPIRenderer from api.base.exceptions import JSONAPIException class JSONAPIParser(JSONParser): """ Parses JSON-serialized data. Overrides media_type. """ media_type = 'application/vnd.api+json' renderer_class = JSONAPIRenderer def parse(self, stream, media_type=None, parser_context=None): """ Parses the incoming bytestream as JSON and returns the resulting data """ result = super(JSONAPIParser, self).parse(stream, media_type=media_type, parser_context=parser_context) if not isinstance(result, dict): raise ParseError() data = result.get('data', {}) if data: if 'attributes' not in data: raise JSONAPIException(source={'pointer': '/data/attributes'}, detail='This field is required.') id = data.get('id') object_type = data.get('type') attributes = data.get('attributes') parsed = {'id': id, 'type': object_type} parsed.update(attributes) return parsed else: raise JSONAPIException(source={'pointer': '/data'}, detail='This field is required.') class JSONAPIParserForRegularJSON(JSONAPIParser): media_type = 'application/json'
Raise Parse Error if request data is not a dictionary
Raise Parse Error if request data is not a dictionary
Python
apache-2.0
mluo613/osf.io,adlius/osf.io,alexschiller/osf.io,samanehsan/osf.io,doublebits/osf.io,Ghalko/osf.io,rdhyee/osf.io,GageGaskins/osf.io,erinspace/osf.io,samanehsan/osf.io,caneruguz/osf.io,HalcyonChimera/osf.io,amyshi188/osf.io,doublebits/osf.io,caseyrygt/osf.io,adlius/osf.io,caneruguz/osf.io,kwierman/osf.io,erinspace/osf.io,icereval/osf.io,cslzchen/osf.io,zachjanicki/osf.io,RomanZWang/osf.io,DanielSBrown/osf.io,mfraezz/osf.io,zamattiac/osf.io,zachjanicki/osf.io,asanfilippo7/osf.io,chennan47/osf.io,KAsante95/osf.io,caneruguz/osf.io,abought/osf.io,RomanZWang/osf.io,leb2dg/osf.io,acshi/osf.io,baylee-d/osf.io,CenterForOpenScience/osf.io,chennan47/osf.io,brianjgeiger/osf.io,caseyrygt/osf.io,haoyuchen1992/osf.io,cslzchen/osf.io,laurenrevere/osf.io,rdhyee/osf.io,mluke93/osf.io,billyhunt/osf.io,mattclark/osf.io,TomBaxter/osf.io,KAsante95/osf.io,baylee-d/osf.io,asanfilippo7/osf.io,mluo613/osf.io,caseyrollins/osf.io,brandonPurvis/osf.io,cslzchen/osf.io,Ghalko/osf.io,chrisseto/osf.io,doublebits/osf.io,kwierman/osf.io,mluo613/osf.io,mluke93/osf.io,amyshi188/osf.io,crcresearch/osf.io,zamattiac/osf.io,jnayak1/osf.io,aaxelb/osf.io,crcresearch/osf.io,cwisecarver/osf.io,emetsger/osf.io,Johnetordoff/osf.io,brianjgeiger/osf.io,Nesiehr/osf.io,GageGaskins/osf.io,zamattiac/osf.io,wearpants/osf.io,doublebits/osf.io,samchrisinger/osf.io,kch8qx/osf.io,mattclark/osf.io,aaxelb/osf.io,ticklemepierce/osf.io,rdhyee/osf.io,mfraezz/osf.io,zachjanicki/osf.io,mfraezz/osf.io,adlius/osf.io,haoyuchen1992/osf.io,caseyrygt/osf.io,KAsante95/osf.io,njantrania/osf.io,emetsger/osf.io,emetsger/osf.io,saradbowman/osf.io,hmoco/osf.io,caseyrollins/osf.io,binoculars/osf.io,abought/osf.io,ticklemepierce/osf.io,monikagrabowska/osf.io,brandonPurvis/osf.io,pattisdr/osf.io,chrisseto/osf.io,samchrisinger/osf.io,cosenal/osf.io,KAsante95/osf.io,acshi/osf.io,petermalcolm/osf.io,Nesiehr/osf.io,crcresearch/osf.io,chennan47/osf.io,acshi/osf.io,pattisdr/osf.io,RomanZWang/osf.io,alexschiller/osf.io,saradbowman/osf.io,samchrisinger/osf.io,monikagrabowska/osf.io,RomanZWang/osf.io,laurenrevere/osf.io,alexschiller/osf.io,mfraezz/osf.io,monikagrabowska/osf.io,cosenal/osf.io,danielneis/osf.io,billyhunt/osf.io,haoyuchen1992/osf.io,danielneis/osf.io,ZobairAlijan/osf.io,cwisecarver/osf.io,alexschiller/osf.io,RomanZWang/osf.io,jnayak1/osf.io,caneruguz/osf.io,acshi/osf.io,caseyrollins/osf.io,mattclark/osf.io,hmoco/osf.io,billyhunt/osf.io,samchrisinger/osf.io,jnayak1/osf.io,kch8qx/osf.io,cosenal/osf.io,rdhyee/osf.io,SSJohns/osf.io,chrisseto/osf.io,pattisdr/osf.io,amyshi188/osf.io,KAsante95/osf.io,billyhunt/osf.io,zachjanicki/osf.io,petermalcolm/osf.io,asanfilippo7/osf.io,GageGaskins/osf.io,doublebits/osf.io,ZobairAlijan/osf.io,GageGaskins/osf.io,njantrania/osf.io,TomHeatwole/osf.io,CenterForOpenScience/osf.io,Nesiehr/osf.io,cslzchen/osf.io,monikagrabowska/osf.io,kwierman/osf.io,baylee-d/osf.io,ticklemepierce/osf.io,hmoco/osf.io,TomHeatwole/osf.io,DanielSBrown/osf.io,brandonPurvis/osf.io,kwierman/osf.io,DanielSBrown/osf.io,SSJohns/osf.io,sloria/osf.io,Ghalko/osf.io,kch8qx/osf.io,danielneis/osf.io,laurenrevere/osf.io,zamattiac/osf.io,binoculars/osf.io,wearpants/osf.io,HalcyonChimera/osf.io,TomHeatwole/osf.io,kch8qx/osf.io,HalcyonChimera/osf.io,brianjgeiger/osf.io,felliott/osf.io,abought/osf.io,sloria/osf.io,brandonPurvis/osf.io,petermalcolm/osf.io,abought/osf.io,felliott/osf.io,TomBaxter/osf.io,chrisseto/osf.io,caseyrygt/osf.io,billyhunt/osf.io,GageGaskins/osf.io,petermalcolm/osf.io,acshi/osf.io,HalcyonChimera/osf.io,cosenal/osf.io,emetsger/osf.io,monikagrabowska/osf.io,erinspace/osf.io,Johnetordoff/osf.io,wearpants/osf.io,icereval/osf.io,hmoco/osf.io,njantrania/osf.io,mluo613/osf.io,TomBaxter/osf.io,ZobairAlijan/osf.io,jnayak1/osf.io,wearpants/osf.io,aaxelb/osf.io,haoyuchen1992/osf.io,Johnetordoff/osf.io,danielneis/osf.io,ticklemepierce/osf.io,TomHeatwole/osf.io,njantrania/osf.io,adlius/osf.io,felliott/osf.io,amyshi188/osf.io,leb2dg/osf.io,sloria/osf.io,brandonPurvis/osf.io,mluo613/osf.io,samanehsan/osf.io,SSJohns/osf.io,brianjgeiger/osf.io,leb2dg/osf.io,ZobairAlijan/osf.io,DanielSBrown/osf.io,cwisecarver/osf.io,cwisecarver/osf.io,Ghalko/osf.io,Johnetordoff/osf.io,kch8qx/osf.io,Nesiehr/osf.io,SSJohns/osf.io,mluke93/osf.io,mluke93/osf.io,leb2dg/osf.io,alexschiller/osf.io,binoculars/osf.io,felliott/osf.io,CenterForOpenScience/osf.io,icereval/osf.io,aaxelb/osf.io,samanehsan/osf.io,CenterForOpenScience/osf.io,asanfilippo7/osf.io
bdbff5ea5548067713951a85b05f3818e537c8d4
streamparse/bootstrap/project/src/bolts/wordcount.py
streamparse/bootstrap/project/src/bolts/wordcount.py
from __future__ import absolute_import, print_function, unicode_literals from collections import Counter from streamparse.bolt import Bolt class WordCounter(Bolt): def initialize(self, conf, ctx): self.counts = Counter() def process(self, tup): word = tup.values[0] self.counts[word] += 1 self.emit([word, self.counts[word]]) self.log('%s: %d' % (word, self.counts[word]))
from __future__ import absolute_import, print_function, unicode_literals from collections import Counter from streamparse.bolt import Bolt class WordCounter(Bolt): AUTO_ACK = True # automatically acknowledge tuples after process() AUTO_ANCHOR = True # automatically anchor tuples to current tuple AUTO_FAIL = True # automatically fail tuples when exceptions occur def initialize(self, conf, ctx): self.counts = Counter() def process(self, tup): word = tup.values[0] self.counts[word] += 1 self.emit([word, self.counts[word]]) self.log('%s: %d' % (word, self.counts[word]))
Update quickstart project to use AUTO_*
Update quickstart project to use AUTO_*
Python
apache-2.0
crohling/streamparse,petchat/streamparse,petchat/streamparse,eric7j/streamparse,msmakhlouf/streamparse,codywilbourn/streamparse,Parsely/streamparse,codywilbourn/streamparse,msmakhlouf/streamparse,scrapinghub/streamparse,Parsely/streamparse,petchat/streamparse,phanib4u/streamparse,petchat/streamparse,scrapinghub/streamparse,msmakhlouf/streamparse,scrapinghub/streamparse,scrapinghub/streamparse,crohling/streamparse,msmakhlouf/streamparse,phanib4u/streamparse,petchat/streamparse,msmakhlouf/streamparse,hodgesds/streamparse,hodgesds/streamparse,scrapinghub/streamparse,eric7j/streamparse
95988fc4e5d7b5b5fa3235000ad9680c168c485c
aiospamc/__init__.py
aiospamc/__init__.py
#!/usr/bin/env python3 '''aiospamc package. An asyncio-based library to communicate with SpamAssassin's SPAMD service.''' from aiospamc.client import Client __all__ = ('Client', 'MessageClassOption', 'ActionOption') __author__ = 'Michael Caley' __copyright__ = 'Copyright 2016, 2017 Michael Caley' __license__ = 'MIT' __version__ = '0.3.0' __email__ = '[email protected]'
#!/usr/bin/env python3 '''aiospamc package. An asyncio-based library to communicate with SpamAssassin's SPAMD service.''' from aiospamc.client import Client from aiospamc.options import ActionOption, MessageClassOption __all__ = ('Client', 'MessageClassOption', 'ActionOption') __author__ = 'Michael Caley' __copyright__ = 'Copyright 2016, 2017 Michael Caley' __license__ = 'MIT' __version__ = '0.3.0' __email__ = '[email protected]'
Add import ActionOption and MessageClassOption to __all__
Add import ActionOption and MessageClassOption to __all__
Python
mit
mjcaley/aiospamc
f1793ed8a494701271b4a4baff8616e9c6202e80
message_view.py
message_view.py
import sublime import sublime_plugin PANEL_NAME = "SublimeLinter Messages" OUTPUT_PANEL = "output." + PANEL_NAME def plugin_unloaded(): for window in sublime.windows(): window.destroy_output_panel(PANEL_NAME) class SublimeLinterDisplayPanelCommand(sublime_plugin.WindowCommand): def run(self, msg=""): panel_view = self.window.create_output_panel(PANEL_NAME) panel_view.set_read_only(False) panel_view.run_command('append', {'characters': msg}) panel_view.set_read_only(True) panel_view.show(0) self.window.run_command("show_panel", {"panel": OUTPUT_PANEL}) class SublimeLinterRemovePanelCommand(sublime_plugin.WindowCommand): def run(self): self.window.destroy_output_panel(PANEL_NAME)
import sublime import sublime_plugin PANEL_NAME = "SublimeLinter Messages" OUTPUT_PANEL = "output." + PANEL_NAME def plugin_unloaded(): for window in sublime.windows(): window.destroy_output_panel(PANEL_NAME) class SublimeLinterDisplayPanelCommand(sublime_plugin.WindowCommand): def run(self, msg=""): window = self.window if is_panel_active(window): panel_view = window.find_output_panel(PANEL_NAME) else: panel_view = window.create_output_panel(PANEL_NAME) scroll_to = panel_view.size() msg = msg.rstrip() + '\n\n\n' panel_view.set_read_only(False) panel_view.run_command('append', {'characters': msg}) panel_view.set_read_only(True) panel_view.show(scroll_to) window.run_command("show_panel", {"panel": OUTPUT_PANEL}) class SublimeLinterRemovePanelCommand(sublime_plugin.WindowCommand): def run(self): self.window.destroy_output_panel(PANEL_NAME) def is_panel_active(window): return window.active_panel() == OUTPUT_PANEL
Append messages if message view is currently open
Append messages if message view is currently open
Python
mit
SublimeLinter/SublimeLinter3,SublimeLinter/SublimeLinter3
4803578ebb306e2e142b629d98cb82899c0b0270
authorize/__init__.py
authorize/__init__.py
import xml.etree.ElementTree as E from authorize.configuration import Configuration from authorize.address import Address from authorize.bank_account import BankAccount from authorize.batch import Batch from authorize.credit_card import CreditCard from authorize.customer import Customer from authorize.environment import Environment from authorize.exceptions import AuthorizeError from authorize.exceptions import AuthorizeConnectionError from authorize.exceptions import AuthorizeResponseError from authorize.exceptions import AuthorizeInvalidError from authorize.recurring import Recurring from authorize.transaction import Transaction # Monkeypatch the ElementTree module so that we can use CDATA element types E._original_serialize_xml = E._serialize_xml def _serialize_xml(write, elem, *args): if elem.tag == '![CDATA[': write('<![CDATA[%s]]>' % elem.text) return return E._original_serialize_xml(write, elem, *args) E._serialize_xml = E._serialize['xml'] = _serialize_xml
import xml.etree.ElementTree as E from authorize.configuration import Configuration from authorize.address import Address from authorize.bank_account import BankAccount from authorize.batch import Batch from authorize.credit_card import CreditCard from authorize.customer import Customer from authorize.environment import Environment from authorize.exceptions import AuthorizeError from authorize.exceptions import AuthorizeConnectionError from authorize.exceptions import AuthorizeResponseError from authorize.exceptions import AuthorizeInvalidError from authorize.recurring import Recurring from authorize.transaction import Transaction # Monkeypatch the ElementTree module so that we can use CDATA element types E._original_serialize_xml = E._serialize_xml def _serialize_xml(write, elem, *args, **kwargs): if elem.tag == '![CDATA[': write('<![CDATA[%s]]>' % elem.text) return return E._original_serialize_xml(write, elem, *args, **kwargs) E._serialize_xml = E._serialize['xml'] = _serialize_xml
Fix monkey patching to pass kwargs required by Python 3.4
Fix monkey patching to pass kwargs required by Python 3.4
Python
mit
aryeh/py-authorize,uglycitrus/py-authorize,vcatalano/py-authorize
052a29030c0665f5724b0fe83550ce7d81202002
incuna_test_utils/testcases/urls.py
incuna_test_utils/testcases/urls.py
import warnings from django.core.urlresolvers import resolve, reverse from django.test import TestCase class URLTestMixin(object): def assert_url_matches_view(self, view, expected_url, url_name, url_args=None, url_kwargs=None): """ Assert a view's url is correctly configured Check the url_name reverses to give a correctly formated expected_url. Check the expected_url resolves to the expected view. """ reversed_url = reverse(url_name, args=url_args, kwargs=url_kwargs) self.assertEqual(reversed_url, expected_url) resolved_view = resolve(expected_url).func if hasattr(view, 'cls'): self.assertEqual(resolved_view.cls, view) else: self.assertEqual(resolved_view.__name__, view.__name__) class URLTestCase(URLTestMixin, TestCase): pass
import warnings from django.core.urlresolvers import resolve, reverse from django.test import TestCase class URLTestMixin(object): def assert_url_matches_view(self, view, expected_url, url_name, url_args=None, url_kwargs=None): """ Assert a view's url is correctly configured Check the url_name reverses to give a correctly formated expected_url. Check the expected_url resolves to the expected view. """ reversed_url = reverse(url_name, args=url_args, kwargs=url_kwargs) self.assertEqual(reversed_url, expected_url) resolved_view = resolve(expected_url).func if hasattr(resolved_view, 'cls'): self.assertEqual(resolved_view.cls, view) else: self.assertEqual(resolved_view.__name__, view.__name__) class URLTestCase(URLTestMixin, TestCase): pass
Fix check for cls attribute
Fix check for cls attribute * If resolved_view has a cls attribute then view should already be a class.
Python
bsd-2-clause
incuna/incuna-test-utils,incuna/incuna-test-utils
e8dd4ca8bd51b84d5d7d5a6a1c4144475e066bf1
zabbix.py
zabbix.py
import requests class Api(object): def __init__(self, server='http://localhost/zabbix'): self.session = requests.Session() self.session.headers.update({ 'Content-Type': 'application/json' }) self.url = server + '/api_jsonrpc.php' self.auth = '' self.id = 0
import requests class ZabbixError(Exception): pass class Api(object): def __init__(self, server='http://localhost/zabbix'): self.session = requests.Session() self.session.headers.update({ 'Content-Type': 'application/json' }) self.url = server + '/api_jsonrpc.php' self.auth = '' self.id = 0 def do_request(self, method, params=None): json_payload = { 'jsonrpc': '2.0', 'method': method, 'params': params or {}, 'auth': self.auth, 'id': self.id, } self.id += 1 response = self.session.post(self.url, data = json.dumps(json_payload)) if response.status_code != 200: raise ZabbixError("HTTP ERROR %S: %S" % (response.status, response.reason)) if response.text == '': raise ZabbixError("Received empty response") return response.json()
Create do_requestion function to be used by other methods.
Create do_requestion function to be used by other methods.
Python
apache-2.0
supasate/PythonZabbixApi
1b20116059b21905688b7fd6153ecd7c42bdc4a1
parseSAMOutput.py
parseSAMOutput.py
#!python # Load libraries import sys, getopt import pysam import libPipeline # Set constants helpMsg =''' SYNOPSIS parseSAMOutput parseSAMOutput [OPTIONS] SAMFILE # DESCRIPTION parseSAMOutput.py Parses SAM alignments into paired-end read summaries. Prints results to stdout. OPTIONS --rmdup Remove duplicate reads (reduces PCR effects) -h/--help Print help message and exit ''' if __name__ == "__main__": # Set defaults rmdup = False # Parse arguments options, args = getopt.getopt(sys.argv[1:], 'h', ["help", "rmdup"]) for opt, value in options: if opt in ("-h", "--help"): print >> sys.stderr, helpMsg sys.exit(2) elif opt == "--rmdup": rmdup = True else: print >> sys.stderr, "Error -- option %s not recognized" % opt sys.exit(1) # Parse arguments & options if len(args) > 0: alignmentPath = args[0] else: print >> sys.stderr, "Error -- need path to SAM file" sys.exit(1) libPipeline.processSAMOutput(alignmentPath, sys.stdout)
#!python # Load libraries import sys, getopt import pysam import libPipeline # Set constants helpMsg =''' SYNOPSIS parseSAMOutput parseSAMOutput [OPTIONS] SAMFILE # DESCRIPTION parseSAMOutput.py Parses SAM alignments into paired-end read summaries. Prints results to stdout. OPTIONS --rmdup Remove duplicate reads (reduces PCR effects) -h/--help Print help message and exit ''' if __name__ == "__main__": # Set defaults rmdup = False # Parse arguments options, args = getopt.getopt(sys.argv[1:], 'h', ["help", "rmdup"]) for opt, value in options: if opt in ("-h", "--help"): print >> sys.stderr, helpMsg sys.exit(2) elif opt == "--rmdup": rmdup = True else: print >> sys.stderr, "Error -- option %s not recognized" % opt sys.exit(1) # Parse arguments & options if len(args) > 0: alignmentPath = args[0] else: print >> sys.stderr, "Error -- need path to SAM file" sys.exit(1) libPipeline.processSAMOutput(alignmentPath, sys.stdout, rmdup=rmdup)
Fix rmdup handling in wrapper script.
Fix rmdup handling in wrapper script.
Python
apache-2.0
awblocker/paired-end-pipeline,awblocker/paired-end-pipeline
ddf0ff2c72a8026c6cf90bc51b1c9fc8d68a003f
apicore/oasschema.py
apicore/oasschema.py
import jsonschema def validate(data, schema): try: jsonschema.validate(data, schema) except jsonschema.exceptions.ValidationError: return False if schema["type"] == "object": allowed = list(schema["properties"].keys()) for key in data.keys(): if key not in allowed: return False return True
import jsonschema def validate(data, schema): try: jsonschema.validate(data, schema) except jsonschema.exceptions.ValidationError: return False # TODO if $ref or if "type" in schema and schema["type"] == "object": allowed = list(schema["properties"].keys()) for key in data.keys(): if key not in allowed: return False return True
FIX schema type not object
FIX schema type not object
Python
mit
meezio/apicore,meezio/apicore
613a6f7947b46b9a6c4c679b638d1c4d946b644d
neutron/conf/policies/network_ip_availability.py
neutron/conf/policies/network_ip_availability.py
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_policy import policy from neutron.conf.policies import base rules = [ policy.DocumentedRuleDefault( 'get_network_ip_availability', base.RULE_ADMIN_ONLY, 'Get network IP availability', [ { 'method': 'GET', 'path': '/network-ip-availabilities', }, { 'method': 'GET', 'path': '/network-ip-availabilities/{network_id}', }, ] ), ] def list_rules(): return rules
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_log import versionutils from oslo_policy import policy from neutron.conf.policies import base DEPRECATED_REASON = """ The network IP availability API now support system scope and default roles. """ rules = [ policy.DocumentedRuleDefault( name='get_network_ip_availability', check_str=base.SYSTEM_READER, scope_types=['system'], description='Get network IP availability', operations=[ { 'method': 'GET', 'path': '/network-ip-availabilities', }, { 'method': 'GET', 'path': '/network-ip-availabilities/{network_id}', }, ], deprecated_rule=policy.DeprecatedRule( name='get_network_ip_availability', check_str=base.RULE_ADMIN_ONLY), deprecated_reason=DEPRECATED_REASON, deprecated_since=versionutils.deprecated.WALLABY ), ] def list_rules(): return rules
Implement secure RBAC for the network IP availability
Implement secure RBAC for the network IP availability This commit updates the network IP availability policies to understand scope checking and account for a read-only role. This is part of a broader series of changes across OpenStack to provide a consistent RBAC experience and improve security. Change-Id: Ia965e549ec5d8b23e837b41c304004d8e57785e9
Python
apache-2.0
openstack/neutron,mahak/neutron,mahak/neutron,mahak/neutron,openstack/neutron,openstack/neutron
3f9623766b58c02b21abb967315bfe30a2b3974f
tests/TestTransaction.py
tests/TestTransaction.py
import Transaction import unittest class TestTransaction(unittest.TestCase) : def setUp(self) : self.test_object = Transaction.Transaction() def tearDown(self) : pass def test_not_None(self) : self.assertIsNotNone(self.test_object) def test_can_assign_data(self) : self.test_object['foo'] = 'bar' self.assertIn('foo', self.test_object) self.assertEqual(self.test_object['foo'], 'bar')
import Transaction import unittest class TestTransaction(unittest.TestCase) : def setUp(self) : self.test_object = Transaction.Transaction() def tearDown(self) : pass def test_not_None(self) : self.assertIsNotNone(self.test_object) def test_can_assign_data(self) : self.test_object['foo'] = 'bar' self.assertIn('foo', self.test_object) self.assertEqual(self.test_object['foo'], 'bar') def test_different_transactions_are_not_each_other(self) : emptyTransaction = Transaction.Transaction() self.assertIsNot(self.test_object, emptyTransaction) def test_different_transactions_with_same_data_are_equal(self) : self.test_object['foo'] = 'bar' newTransaction = Transaction.Transaction() newTransaction['foo'] = 'bar' self.assertEqual(self.test_object, newTransaction) def test_transaction_is_itself(self) : self.assertIs(self.test_object, self.test_object) def test_different_transactions_with_same_data_are_equal(self) : self.test_object['foo'] = 'bar' newTransaction = Transaction.Transaction() newTransaction['foo'] = 'baz' self.assertNotEqual(self.test_object, newTransaction)
Add test to clarify equality/is behavior
Add test to clarify equality/is behavior
Python
apache-2.0
mattdeckard/wherewithal
2728f33a0c8477d75b3716ea39fe2e3c8db9378d
tests/test_OrderedSet.py
tests/test_OrderedSet.py
from twisted.trial import unittest from better_od import OrderedSet class TestOrderedSet(unittest.TestCase): def setUp(self): self.values = 'abcddefg' self.s = OrderedSet(self.values) def test_order(self): expected = list(enumerate('abcdefg')) self.assertEquals(list(enumerate(self.s)), expected) def test_index(self): self.assertEquals(self.s.key_index('c'), 2) class TestOrderedSetMutations(unittest.TestCase): def test_add_new_value(self): prev = len(self.s) self.s.add('z') self.assertEqual(len(self.s), prev + 1)
from twisted.trial import unittest from better_od import OrderedSet class TestOrderedSet(unittest.TestCase): def setUp(self): self.s = OrderedSet('abcdefg') def test_order(self): expected = list(enumerate('abcdefg')) self.assertEquals(list(enumerate(self.s)), expected) def test_reorder(self): new_order = 'gdcbaef' self.s.reorder_keys(new_order) self.assertEquals(list(enumerate(self.s)), list(enumerate(new_order))) def test_index(self): self.assertEquals(self.s.key_index('c'), 2) class TestOrderedSetMutations(unittest.TestCase): def test_add_new_value(self): s = OrderedSet('abcdef') prev = len(s) s.add('z') self.assertEqual(len(s), prev + 1) def test_add_existing_value(self): s = OrderedSet('abcdef') prev = len(s) s.add('a') self.assertEqual(len(s), prev) def test_discard_existing_value(self): s = OrderedSet('abcdef') self.assertIs(s.discard('a'), None) def test_discard_nonexistent_value(self): s = OrderedSet('abcdef') self.assertIs(s.discard('z'), None)
Add OrderedSet mutation tests. Refactor tests.
Add OrderedSet mutation tests. Refactor tests. Refactored the tests to rely less on setUp because I've got to test mutating the objects.
Python
mit
JustusW/BetterOrderedDict,therealfakemoot/collections2
756860e325edd06eb98bed7c6fd5fa6c4a78243e
tests/test_migrations.py
tests/test_migrations.py
""" Tests that migrations are not missing """ try: from io import StringIO except ImportError: from StringIO import StringIO import pytest from django.core.management import call_command def test_no_missing_migrations(): """Check no model changes have been made since the last `./manage.py makemigrations`. Pulled from mozilla/treeherder #dd53914, subject to MPL """ with pytest.raises(SystemExit) as e: # Replace with `check_changes=True` once we're using a Django version that includes: # https://code.djangoproject.com/ticket/25604 # https://github.com/django/django/pull/5453 call_command('makemigrations', interactive=False, dry_run=True, exit_code=True) assert str(e.value) == '1'
""" Tests that migrations are not missing """ try: from io import StringIO except ImportError: from StringIO import StringIO import pytest from django.core.management import call_command @pytest.mark.django_db def test_no_missing_migrations(): """Check no model changes have been made since the last `./manage.py makemigrations`. Pulled from mozilla/treeherder #dd53914, subject to MPL """ with pytest.raises(SystemExit) as e: # Replace with `check_changes=True` once we're using a Django version that includes: # https://code.djangoproject.com/ticket/25604 # https://github.com/django/django/pull/5453 call_command('makemigrations', interactive=False, dry_run=True, exit_code=True) assert str(e.value) == '1'
Add missing pytest DB marker
Add missing pytest DB marker
Python
bsd-2-clause
bennylope/django-organizations,bennylope/django-organizations
d4003a3b07e4ead9bccbc6a9c8ff835970ad99a3
pymatgen/core/design_patterns.py
pymatgen/core/design_patterns.py
# coding: utf-8 from __future__ import division, unicode_literals """ This module defines some useful design patterns. """ __author__ = "Shyue Ping Ong" __copyright__ = "Copyright 2011, The Materials Project" __version__ = "1.0" __maintainer__ = "Shyue Ping Ong" __email__ = "[email protected]" __status__ = "Production" __date__ = "Sep 23, 2011" class Enum(set): """ Creates an enum out of a set. """ def __getattr__(self, name): if name in self: return name raise AttributeError class NullFile(object): """A file object that is associated to /dev/null.""" def __new__(cls): import os return open(os.devnull, 'w') def __init__(self): """no-op""" class NullStream(object): """A fake stream with a no-op write..""" def write(*args): """no-op"""
# coding: utf-8 from __future__ import division, unicode_literals """ This module defines some useful design patterns. """ __author__ = "Shyue Ping Ong" __copyright__ = "Copyright 2011, The Materials Project" __version__ = "1.0" __maintainer__ = "Shyue Ping Ong" __email__ = "[email protected]" __status__ = "Production" __date__ = "Sep 23, 2011" class Enum(set): """ Creates an enum out of a set. """ def __getattr__(self, name): if name in self: return name raise AttributeError
Move NullFile and NullStream to monty
Move NullFile and NullStream to monty Former-commit-id: 5492c3519fdfc444fd8cbb92cbf4b9c67c8a0883 [formerly 4aa6714284cb45a2747cea8e0f38e8fbcd8ec0bc] Former-commit-id: e6119512027c605a8277d0a99f37a6ab0d73b6c7
Python
mit
xhqu1981/pymatgen,tallakahath/pymatgen,fraricci/pymatgen,aykol/pymatgen,mbkumar/pymatgen,ndardenne/pymatgen,czhengsci/pymatgen,blondegeek/pymatgen,czhengsci/pymatgen,czhengsci/pymatgen,gVallverdu/pymatgen,nisse3000/pymatgen,dongsenfo/pymatgen,aykol/pymatgen,johnson1228/pymatgen,montoyjh/pymatgen,gpetretto/pymatgen,Bismarrck/pymatgen,fraricci/pymatgen,gmatteo/pymatgen,mbkumar/pymatgen,mbkumar/pymatgen,tschaume/pymatgen,ndardenne/pymatgen,mbkumar/pymatgen,vorwerkc/pymatgen,setten/pymatgen,gVallverdu/pymatgen,tallakahath/pymatgen,vorwerkc/pymatgen,montoyjh/pymatgen,setten/pymatgen,gVallverdu/pymatgen,Bismarrck/pymatgen,tallakahath/pymatgen,montoyjh/pymatgen,tschaume/pymatgen,xhqu1981/pymatgen,vorwerkc/pymatgen,dongsenfo/pymatgen,blondegeek/pymatgen,blondegeek/pymatgen,czhengsci/pymatgen,johnson1228/pymatgen,tschaume/pymatgen,richardtran415/pymatgen,davidwaroquiers/pymatgen,gpetretto/pymatgen,fraricci/pymatgen,dongsenfo/pymatgen,tschaume/pymatgen,blondegeek/pymatgen,johnson1228/pymatgen,matk86/pymatgen,montoyjh/pymatgen,matk86/pymatgen,gpetretto/pymatgen,aykol/pymatgen,nisse3000/pymatgen,davidwaroquiers/pymatgen,xhqu1981/pymatgen,gpetretto/pymatgen,dongsenfo/pymatgen,richardtran415/pymatgen,gVallverdu/pymatgen,Bismarrck/pymatgen,Bismarrck/pymatgen,matk86/pymatgen,davidwaroquiers/pymatgen,richardtran415/pymatgen,ndardenne/pymatgen,setten/pymatgen,richardtran415/pymatgen,Bismarrck/pymatgen,tschaume/pymatgen,vorwerkc/pymatgen,gmatteo/pymatgen,setten/pymatgen,johnson1228/pymatgen,davidwaroquiers/pymatgen,nisse3000/pymatgen,nisse3000/pymatgen,fraricci/pymatgen,matk86/pymatgen
e69a23abc438b1f527cf9247bc210874eb227253
pyshelf/artifact_list_manager.py
pyshelf/artifact_list_manager.py
from pyshelf.cloud.stream_iterator import StreamIterator from flask import Response class ArtifactListManager(object): def __init__(self, container): self.container = container def get_artifact(self, path): """ Gets artifact or artifact list information. Args: path(string): path or name of artifact. Returns: flask.Response """ with self.container.create_master_bucket_storage() as storage: if path[-1] == "/": self.container.logger.debug("Artifact with path {} is a directory.".format(path)) child_list = storage.get_directory_contents(path) links = [] for child in child_list: title = child.name path = "/artifact/" + title rel = "child" if child.name == path: rel = "self" links.append(self._format_link(path=path, rel=rel, title=title)) response = Response() response.headers["Link"] = ",".join(links) response.status_code = 204 else: stream = storage.get_artifact(path) response = Response(stream) response.headers["Content-Type"] = stream.headers["content-type"] return response def _format_link(self, **kwargs): link = "<{path}>; rel={rel}; title={title}".format(**kwargs) return link
from pyshelf.cloud.stream_iterator import StreamIterator from flask import Response class ArtifactListManager(object): def __init__(self, container): self.container = container def get_artifact(self, path): """ Gets artifact or artifact list information. Args: path(string): path or name of artifact. Returns: flask.Response """ with self.container.create_master_bucket_storage() as storage: if path[-1] == "/": self.container.logger.debug("Artifact with path {} is a directory.".format(path)) child_list = storage.get_directory_contents(path) links = [] for child in child_list: title = child.name url = "/artifact/" + title rel = "child" if child.name == path: rel = "self" links.append(self._format_link(url=url, rel=rel, title=title)) response = Response() response.headers["Link"] = ",".join(links) response.status_code = 204 else: stream = storage.get_artifact(path) response = Response(stream) response.headers["Content-Type"] = stream.headers["content-type"] return response def _format_link(self, **kwargs): link = "<{url}>; rel={rel}; title={title}".format(**kwargs) return link
Fix relative portion of link.
Fix relative portion of link.
Python
mit
kyle-long/pyshelf,kyle-long/pyshelf,not-nexus/shelf,not-nexus/shelf
c4903f5b631bba21e17be1b7deb118c0c9571432
Lab3/PalindromeExercise.py
Lab3/PalindromeExercise.py
# Asks the user for input of the word and makes it lower case. normStr = raw_input("Enter the word:\n").lower(); # Inverts the string so it can compare it with the original input. invertStr = normStr[::-1];
# Asks the user for input of the word and makes it lower case. normStr = raw_input("Enter the word:\n").lower(); # Inverts the string so it can compare it with the original input. invertStr = normStr[::-1]; # Tests if the string is a palindrome. If so, it prints True. Else, prints False. if normStr == invertStr: print 'True'; else: print 'False';
Test added. Program should be complete.
Test added. Program should be complete.
Python
mit
lgomie/dt228-3B-cloud-repo
98eead2549f4a2793011ffe8107e64530ddbf782
runtests.py
runtests.py
#!/usr/bin/env python import sys from os.path import abspath, dirname import django from django.conf import settings sys.path.insert(0, abspath(dirname(__file__))) if not settings.configured: settings.configure( INSTALLED_APPS=( 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.auth', 'relatives', 'relatives.tests', 'django.contrib.admin', ), DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3', } }, ROOT_URLCONF='relatives.tests.urls', STATIC_URL='/static/', ) def runtests(*test_args): if hasattr(django, 'setup'): django.setup() if not test_args: test_args = ['tests'] from django.test.simple import DjangoTestSuiteRunner failures = DjangoTestSuiteRunner(failfast=False).run_tests(test_args) sys.exit(failures) if __name__ == "__main__": runtests(*sys.argv[1:])
#!/usr/bin/env python import sys from os.path import abspath, dirname import django from django.conf import settings sys.path.insert(0, abspath(dirname(__file__))) if not settings.configured: settings.configure( INSTALLED_APPS=( 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.auth', 'relatives', 'relatives.tests', 'django.contrib.admin', ), DATABASES={ 'default': { 'ENGINE': 'django.db.backends.sqlite3', } }, MIDDLEWARE_CLASSES=[ 'django.contrib.sessions.middleware.SessionMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', ], ROOT_URLCONF='relatives.tests.urls', STATIC_URL='/static/', ) def runtests(*test_args): if hasattr(django, 'setup'): django.setup() if not test_args: test_args = ['tests'] from django.test.simple import DjangoTestSuiteRunner failures = DjangoTestSuiteRunner(failfast=False).run_tests(test_args) sys.exit(failures) if __name__ == "__main__": runtests(*sys.argv[1:])
Add missing auth middleware for 1.7 tests
Add missing auth middleware for 1.7 tests
Python
mit
treyhunner/django-relatives,treyhunner/django-relatives
de02f354e406e5b9a3f742697d3979d54b9ee581
fvserver/urls.py
fvserver/urls.py
from django.conf.urls import patterns, include, url # Uncomment the next two lines to enable the admin: from django.contrib import admin admin.autodiscover() urlpatterns = patterns('', # Examples: # url(r'^macnamer/', include('macnamer.foo.urls')), url(r'^login/$', 'django.contrib.auth.views.login'), url(r'^logout/$', 'django.contrib.auth.views.logout_then_login'), url(r'^changepassword/$', 'django.contrib.auth.views.password_change'), url(r'^changepassword/done/$', 'django.contrib.auth.views.password_change_done'), url(r'^', include('server.urls')), # Uncomment the admin/doc line below to enable admin documentation: url(r'^admin/doc/', include('django.contrib.admindocs.urls')), # Uncomment the next line to enable the admin: url(r'^admin/', include(admin.site.urls)), #url(r'^$', 'namer.views.index', name='home'), )
from django.conf.urls import patterns, include, url # Uncomment the next two lines to enable the admin: from django.contrib import admin admin.autodiscover() urlpatterns = patterns('', # Examples: # url(r'^macnamer/', include('macnamer.foo.urls')), url(r'^login/$', 'django.contrib.auth.views.login'), url(r'^logout/$', 'django.contrib.auth.views.logout_then_login'), url(r'^changepassword/$', 'django.contrib.auth.views.password_change', name='password_change'), url(r'^changepassword/done/$', 'django.contrib.auth.views.password_change_done', name='password_change_done'), url(r'^', include('server.urls')), # Uncomment the admin/doc line below to enable admin documentation: url(r'^admin/doc/', include('django.contrib.admindocs.urls')), # Uncomment the next line to enable the admin: url(r'^admin/', include(admin.site.urls)), #url(r'^$', 'namer.views.index', name='home'), )
Update password functions for Django 1.8
Update password functions for Django 1.8
Python
apache-2.0
grahamgilbert/Crypt-Server,squarit/Crypt-Server,arubdesu/Crypt-Server,grahamgilbert/Crypt-Server,squarit/Crypt-Server,arubdesu/Crypt-Server,squarit/Crypt-Server,arubdesu/Crypt-Server,squarit/Crypt-Server,grahamgilbert/Crypt-Server,grahamgilbert/Crypt-Server
cc8f0760aa5497d2285dc85c6f3c17c6ce327c35
core/__init__.py
core/__init__.py
# Offer forward compatible imports of datastore_rpc and datastore_query. import logging try: from google.appengine.datastore import datastore_rpc from google.appengine.datastore import datastore_query logging.info('Imported official google datastore_{rpc,query}') except ImportError: logging.warning('Importing local datastore_{rpc,query}') from . import datastore_rpc from . import datastore_query from . import monkey
# Offer forward compatible imports of datastore_rpc and datastore_query. import logging import sys try: from google.appengine.datastore import datastore_rpc from google.appengine.datastore import datastore_query sys.modules['core.datastore_rpc'] = datastore_rpc sys.modules['core.datastore_query'] = datastore_query logging.info('Imported official google datastore_{rpc,query}') except ImportError: logging.warning('Importing local datastore_{rpc,query}') from . import datastore_rpc from . import datastore_query from . import monkey
Make official google imports actually work.
Make official google imports actually work.
Python
apache-2.0
GoogleCloudPlatform/datastore-ndb-python,GoogleCloudPlatform/datastore-ndb-python
c566fa8f49ea826b29937c9c128350494eb10bf6
rrd/__init__.py
rrd/__init__.py
#-*- coding:utf-8 -*- import os from flask import Flask #-- create app -- app = Flask(__name__) app.config.from_object("rrd.config") @app.errorhandler(Exception) def all_exception_handler(error): print "exception: %s" %error return u'dashboard 暂时无法访问,请联系管理员', 500 from view import api, chart, screen, index
#-*- coding:utf-8 -*- import os from flask import Flask from flask import request from flask import redirect #-- create app -- app = Flask(__name__) app.config.from_object("rrd.config") @app.errorhandler(Exception) def all_exception_handler(error): print "exception: %s" %error return u'dashboard 暂时无法访问,请联系管理员', 500 from view import api, chart, screen, index @app.before_request def before_request(): sig = request.cookies.get('sig') if not sig: return redirect(config.JSONCFG['redirectUrl'], code=302)
Add before_request and it works for this bug
Add before_request and it works for this bug Check if the signature exists. Redirect to the login page if it doesn't. I took `rrd/view/index.py` as the reference
Python
apache-2.0
Cepave/dashboard,Cepave/dashboard,Cepave/dashboard,Cepave/dashboard
22d08aa11216d8ee367f8b4a11a14e08b3917dfd
flask_perm/admin.py
flask_perm/admin.py
# -*- coding: utf-8 -*- from flask import Blueprint, render_template, current_app, url_for, redirect, request, flash bp = Blueprint('perm-admin', __name__, template_folder='templates', static_folder='static') @bp.route('/') def index(): if not bp.perm.has_perm_admin_logined(): return redirect(url_for('perm-admin.login')) render_data = { 'base_api_url': current_app.config.get('PERM_ADMIN_API_PREFIX'), 'base_web_url': current_app.config.get('PERM_ADMIN_PREFIX'), 'debug': current_app.config.get('DEBUG'), } return render_template('/perm-admin/index.html', **render_data) @bp.route('/login', methods=['GET', 'POST']) def login(): if request.method == 'POST': username = request.form['username'] password = request.form['password'] if bp.perm.check_perm_admin_auth(username, password): bp.perm.login_perm_admin() return redirect(url_for('perm-admin.index')) else: flash(u'Invalid Password', 'error') return redirect(url_for('perm-admin.login')) return render_template('/perm-admin/login.html') @bp.route('/logout') def logout(): bp.perm.logout_perm_admin() return redirect(url_for('perm-admin.login'))
# -*- coding: utf-8 -*- from flask import Blueprint, render_template, current_app, url_for, redirect, request, flash bp = Blueprint('perm-admin', __name__, template_folder='templates', static_folder='static') @bp.route('/') def index(): if not current_app.extensions['perm'].has_perm_admin_logined(): return redirect(url_for('perm-admin.login')) render_data = { 'base_api_url': current_app.config.get('PERM_ADMIN_API_PREFIX'), 'base_web_url': current_app.config.get('PERM_ADMIN_PREFIX'), 'debug': current_app.config.get('DEBUG'), } return render_template('/perm-admin/index.html', **render_data) @bp.route('/login', methods=['GET', 'POST']) def login(): if request.method == 'POST': username = request.form['username'] password = request.form['password'] admin_id = current_app.extensions['perm'].get_perm_admin_id_by_auth(username, password) if admin_id: current_app.extensions['perm'].login_perm_admin(admin_id) return redirect(url_for('perm-admin.index')) else: flash(u'Invalid Password', 'error') return redirect(url_for('perm-admin.login')) return render_template('/perm-admin/login.html') @bp.route('/logout') def logout(): current_app.extensions['perm'].logout_perm_admin() return redirect(url_for('perm-admin.login'))
Use app.extensions to get perm reference.
Use app.extensions to get perm reference.
Python
mit
soasme/flask-perm,soasme/flask-perm,soasme/flask-perm
614a5de89a5746e95dc14f371d001c5a9056f646
passwordhash.py
passwordhash.py
#!/usr/bin/env python # Password Hashing Module for Linux # Author: Dave Russell Jr (drussell393) from getpass import getpass import crypt # If you like Python 2, please to be importing. import os import binascii password = getpass('Enter your desired password, Harry: ') passwordConfirm = getpass('Confirm your password: ') if (password == passwordConfirm): # Python 2 alternative, os.urandom() passwordHash = crypt.crypt(password, '$6$' + binascii.hexlify(os.urandom(4))) # Python 3 likes my crypt (mksalt doesn't work in Python 2) #passwordHash = crypt.crypt(password, crypt.mksalt(crypt.METHOD_SHA512)) print('You\'re a wizard, Harry: ' + passwordHash) else: print('Dobby has heard of your greatness, sir. But of your goodness, Dobby never knew.') print('Your confirmation password didn\'t match, Oh Great One.')
#!/usr/bin/env python3 # Password Hashing Module for Linux # Author: Dave Russell Jr (drussell393) from getpass import getpass import crypt # If you like Python 2, please to be importing. #import os #import binascii password = getpass('Enter your desired password, Harry: ') passwordConfirm = getpass('Confirm your password: ') if (password == passwordConfirm): # Python 2 alternative, os.urandom() #passwordHash = crypt.crypt(password, '$6$' + binascii.hexlify(os.urandom(4))) # Python 3 likes my crypt (mksalt doesn't work in Python 2) passwordHash = crypt.crypt(password, crypt.mksalt(crypt.METHOD_SHA512)) print('You\'re a wizard, Harry: ' + passwordHash) else: print('Dobby has heard of your greatness, sir. But of your goodness, Dobby never knew.') print('Your confirmation password didn\'t match, Oh Great One.')
Make changes to default to Python3
Make changes to default to Python3 Since we have undoubtedly moved to Python3 for the most part within the community, this should be Python3 by default. We make Python2 users work harder now.
Python
mit
drussell393/Linux-Password-Hash
4735804f4951835e4e3c7d116628344bddf45aa3
atomicpress/admin.py
atomicpress/admin.py
# -*- coding: utf-8 -*- from flask import current_app from flask_admin.contrib.fileadmin import FileAdmin from flask_admin import AdminIndexView, expose, Admin from flask_admin.contrib.sqla import ModelView from atomicpress import models from atomicpress.app import db class HomeView(AdminIndexView): @expose("/") def index(self): return self.render('admin/home.html') def create_admin(): app = current_app._get_current_object() admin = Admin(app, "AtomicPress", index_view=HomeView(name='Home')) admin.add_view(ModelView(models.Blog, db.session, category="Blog")) admin.add_view(ModelView(models.Author, db.session, category="Blog")) admin.add_view(ModelView(models.Post, db.session, category="Post")) admin.add_view(ModelView(models.Tag, db.session, category="Post")) admin.add_view(ModelView(models.Category, db.session, category="Post")) admin.add_view(FileAdmin(app.config["UPLOADS_PATH"], app.config["UPLOADS_URL"], name='Upload files'))
# -*- coding: utf-8 -*- from flask import current_app from flask_admin.contrib.fileadmin import FileAdmin from flask_admin import AdminIndexView, expose, Admin from flask_admin.contrib.sqla import ModelView from atomicpress import models from atomicpress.app import db class HomeView(AdminIndexView): @expose("/") def index(self): return self.render('admin/home.html') class PostView(ModelView): column_default_sort = ('date', True) def create_admin(): app = current_app._get_current_object() admin = Admin(app, "AtomicPress", index_view=HomeView(name='Home')) admin.add_view(ModelView(models.Blog, db.session, category="Blog")) admin.add_view(ModelView(models.Author, db.session, category="Blog")) admin.add_view(PostView(models.Post, db.session, category="Post")) admin.add_view(ModelView(models.Tag, db.session, category="Post")) admin.add_view(ModelView(models.Category, db.session, category="Post")) admin.add_view(FileAdmin(app.config["UPLOADS_PATH"], app.config["UPLOADS_URL"], name='Upload files'))
Update post view sorting (so latest comes first)
Update post view sorting (so latest comes first)
Python
mit
marteinn/AtomicPress,marteinn/AtomicPress,marteinn/AtomicPress,marteinn/AtomicPress
3a204de33589de943ff09525895812530baac0b2
saylua/modules/pets/models/db.py
saylua/modules/pets/models/db.py
from google.appengine.ext import ndb # This is to store alternate linart versions of the same pets class SpeciesVersion(ndb.Model): name = ndb.StringProperty() base_image = ndb.StringProperty() base_psd = ndb.StringProperty() default_image = ndb.StringProperty() # Pets are divided into species and species are divided into variations class Species(ndb.Model): name = ndb.StringProperty(indexed=True) versions = ndb.StructuredProperty(SpeciesVersion, repeated=True) description = ndb.StringProperty() class SpeciesVariation(ndb.Model): species_key = ndb.KeyProperty(indexed=True) name = ndb.StringProperty(indexed=True) description = ndb.StringProperty() class Pet(ndb.Model): user_key = ndb.KeyProperty(indexed=True) variation_key = ndb.KeyProperty(indexed=True) # Only set if the pet is a variation species_name = ndb.StringProperty(indexed=True) # Note the denormalization # Personal profile information for the pet name = ndb.StringProperty() css = ndb.StringProperty() description = ndb.StringProperty() # If either of these is set to a number other than 0, the pet is for sale ss_price = ndb.IntegerProperty(default=0, indexed=True) cc_price = ndb.IntegerProperty(default=0, indexed=True)
from google.appengine.ext import ndb # This is to store alternate linart versions of the same pets class SpeciesVersion(ndb.Model): name = ndb.StringProperty() base_image = ndb.StringProperty() base_psd = ndb.StringProperty() default_image = ndb.StringProperty() # Pets are divided into species and species are divided into variations class Species(ndb.Model): name = ndb.StringProperty() versions = ndb.StructuredProperty(SpeciesVersion) description = ndb.TextProperty() class SpeciesVariation(ndb.Model): species_id = ndb.StringProperty() name = ndb.StringProperty() description = ndb.TextProperty() class Pet(ndb.Model): pet_id = ndb.StringProperty() owner_id = ndb.IntegerProperty() variation_key = ndb.KeyProperty() # Only set if the pet is a variation species_name = ndb.StringProperty() # Note the denormalization # Personal profile information for the pet name = ndb.StringProperty() css = ndb.TextProperty() description = ndb.TextProperty() # If either of these is set to a number other than 0, the pet is for sale ss_price = ndb.IntegerProperty(default=0) cc_price = ndb.IntegerProperty(default=0)
Update to pet model for provisioner
Update to pet model for provisioner
Python
agpl-3.0
saylua/SayluaV2,saylua/SayluaV2,LikeMyBread/Saylua,LikeMyBread/Saylua,saylua/SayluaV2,LikeMyBread/Saylua,LikeMyBread/Saylua
a77ead1975050938c8557979f54683829747bf0f
addons/sale_stock/migrations/8.0.1.0/pre-migration.py
addons/sale_stock/migrations/8.0.1.0/pre-migration.py
# -*- coding: utf-8 -*- ############################################################################## # # Odoo, a suite of business apps # This module Copyright (C) 2014 Therp BV (<http://therp.nl>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.openupgrade import openupgrade column_renames = { 'sale.order.line': [('procurement_id', None)]} @openupgrade.migrate() def migrate(cr, version): openupgrade.rename_columns(cr, column_renames)
# -*- coding: utf-8 -*- ############################################################################## # # Odoo, a suite of business apps # This module Copyright (C) 2014 Therp BV (<http://therp.nl>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.openupgrade import openupgrade column_renames = { 'sale_order_line': [('procurement_id', None)]} @openupgrade.migrate() def migrate(cr, version): openupgrade.rename_columns(cr, column_renames)
Fix table name error in sale_stock column renames
Fix table name error in sale_stock column renames
Python
agpl-3.0
blaggacao/OpenUpgrade,sebalix/OpenUpgrade,OpenUpgrade-dev/OpenUpgrade,hifly/OpenUpgrade,kirca/OpenUpgrade,sebalix/OpenUpgrade,blaggacao/OpenUpgrade,kirca/OpenUpgrade,bwrsandman/OpenUpgrade,hifly/OpenUpgrade,Endika/OpenUpgrade,kirca/OpenUpgrade,OpenUpgrade/OpenUpgrade,pedrobaeza/OpenUpgrade,grap/OpenUpgrade,damdam-s/OpenUpgrade,bwrsandman/OpenUpgrade,pedrobaeza/OpenUpgrade,kirca/OpenUpgrade,Endika/OpenUpgrade,sebalix/OpenUpgrade,OpenUpgrade-dev/OpenUpgrade,bwrsandman/OpenUpgrade,pedrobaeza/OpenUpgrade,OpenUpgrade/OpenUpgrade,csrocha/OpenUpgrade,kirca/OpenUpgrade,damdam-s/OpenUpgrade,pedrobaeza/OpenUpgrade,sebalix/OpenUpgrade,blaggacao/OpenUpgrade,OpenUpgrade-dev/OpenUpgrade,sebalix/OpenUpgrade,OpenUpgrade-dev/OpenUpgrade,mvaled/OpenUpgrade,OpenUpgrade-dev/OpenUpgrade,csrocha/OpenUpgrade,kirca/OpenUpgrade,OpenUpgrade/OpenUpgrade,hifly/OpenUpgrade,grap/OpenUpgrade,mvaled/OpenUpgrade,OpenUpgrade/OpenUpgrade,csrocha/OpenUpgrade,hifly/OpenUpgrade,grap/OpenUpgrade,kirca/OpenUpgrade,sebalix/OpenUpgrade,sebalix/OpenUpgrade,mvaled/OpenUpgrade,bwrsandman/OpenUpgrade,0k/OpenUpgrade,Endika/OpenUpgrade,csrocha/OpenUpgrade,damdam-s/OpenUpgrade,Endika/OpenUpgrade,hifly/OpenUpgrade,0k/OpenUpgrade,hifly/OpenUpgrade,blaggacao/OpenUpgrade,OpenUpgrade-dev/OpenUpgrade,hifly/OpenUpgrade,0k/OpenUpgrade,grap/OpenUpgrade,grap/OpenUpgrade,bwrsandman/OpenUpgrade,OpenUpgrade/OpenUpgrade,0k/OpenUpgrade,damdam-s/OpenUpgrade,csrocha/OpenUpgrade,damdam-s/OpenUpgrade,blaggacao/OpenUpgrade,grap/OpenUpgrade,csrocha/OpenUpgrade,pedrobaeza/OpenUpgrade,Endika/OpenUpgrade,grap/OpenUpgrade,Endika/OpenUpgrade,blaggacao/OpenUpgrade,bwrsandman/OpenUpgrade,bwrsandman/OpenUpgrade,Endika/OpenUpgrade,mvaled/OpenUpgrade,0k/OpenUpgrade,pedrobaeza/OpenUpgrade,mvaled/OpenUpgrade,OpenUpgrade/OpenUpgrade,mvaled/OpenUpgrade,0k/OpenUpgrade,damdam-s/OpenUpgrade,blaggacao/OpenUpgrade,pedrobaeza/OpenUpgrade,damdam-s/OpenUpgrade,OpenUpgrade/OpenUpgrade,csrocha/OpenUpgrade,mvaled/OpenUpgrade
8eb936799a320e9b68bffe58b04da941eba2268e
setup.py
setup.py
from setuptools import find_packages, setup setup( version='4.0.2', name='incuna-groups', packages=find_packages(), include_package_data=True, install_requires=[ 'django_crispy_forms>=1.4.0,<2', 'django-polymorphic>=1.2,<1.3', 'incuna-pagination>=0.1.1,<1', ], description='Generic group/forum framework.', author='Incuna Ltd', author_email='[email protected]', url='https://github.com/incuna/incuna-groups', )
from setuptools import find_packages, setup setup( version='4.0.2', name='incuna-groups', packages=find_packages(), include_package_data=True, install_requires=[ 'django_crispy_forms>=1.6.1,<2', 'django-polymorphic>=1.2,<1.3', 'incuna-pagination>=0.1.1,<1', ], description='Generic group/forum framework.', author='Incuna Ltd', author_email='[email protected]', url='https://github.com/incuna/incuna-groups', )
Use a newer crispy_forms version.
Use a newer crispy_forms version.
Python
bsd-2-clause
incuna/incuna-groups,incuna/incuna-groups
4aeb2e57a05491973c761eb169a42cb5e1e32737
gtr/__init__.py
gtr/__init__.py
__all__ = [ "gtr.services.funds.Funds" ] __version__ = "0.1.0" from gtr.services.base import _Service from gtr.services.funds import Funds from gtr.services.organisations import Organisations from gtr.services.persons import Persons from gtr.services.projects import Projects
__all__ = [ "gtr.services.funds.Funds", "gtr.services.organisations.Organisations", "gtr.services.persons.Persons", "gtr.services.projects.Projects" ] __version__ = "0.1.0" from gtr.services.base import _Service from gtr.services.funds import Funds from gtr.services.organisations import Organisations from gtr.services.persons import Persons from gtr.services.projects import Projects
Add all service Classes to import
Add all service Classes to import
Python
apache-2.0
nestauk/gtr
1d84a3b58aa752834aed31123dd16e3bfa723609
tests/storage_adapter_tests/test_storage_adapter.py
tests/storage_adapter_tests/test_storage_adapter.py
from unittest import TestCase from chatterbot.storage import StorageAdapter class StorageAdapterTestCase(TestCase): """ This test case is for the StorageAdapter base class. Although this class is not intended for direct use, this test case ensures that exceptions requiring basic functionality are triggered when needed. """ def setUp(self): super(StorageAdapterTestCase, self).setUp() self.adapter = StorageAdapter() def test_count(self): with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError): self.adapter.count() def test_find(self): with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError): self.adapter.find('') def test_filter(self): with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError): self.adapter.filter() def test_remove(self): with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError): self.adapter.remove('') def test_create(self): with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError): self.adapter.create() def test_update(self): with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError): self.adapter.update('') def test_get_random(self): with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError): self.adapter.get_random() def test_get_response_statements(self): with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError): self.adapter.get_response_statements() def test_drop(self): with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError): self.adapter.drop()
from unittest import TestCase from chatterbot.storage import StorageAdapter class StorageAdapterTestCase(TestCase): """ This test case is for the StorageAdapter base class. Although this class is not intended for direct use, this test case ensures that exceptions requiring basic functionality are triggered when needed. """ def setUp(self): super(StorageAdapterTestCase, self).setUp() self.adapter = StorageAdapter() def test_count(self): with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError): self.adapter.count() def test_filter(self): with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError): self.adapter.filter() def test_remove(self): with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError): self.adapter.remove('') def test_create(self): with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError): self.adapter.create() def test_update(self): with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError): self.adapter.update('') def test_get_random(self): with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError): self.adapter.get_random() def test_drop(self): with self.assertRaises(StorageAdapter.AdapterMethodNotImplementedError): self.adapter.drop()
Remove tests for storage adapter methods being removed.
Remove tests for storage adapter methods being removed.
Python
bsd-3-clause
vkosuri/ChatterBot,gunthercox/ChatterBot
2d95f8fe9c9e9edf5b1a0b5dee2992187b0d89ed
src/pytest_django_lite/plugin.py
src/pytest_django_lite/plugin.py
import os import pytest try: from django.conf import settings except ImportError: settings = None # NOQA def is_configured(): if settings is None: return False return settings.configured or os.environ.get('DJANGO_SETTINGS_MODULE') @pytest.fixture(autouse=True, scope='session') def _django_runner(request): if not is_configured(): return from django.test.simple import DjangoTestSuiteRunner runner = DjangoTestSuiteRunner(interactive=False) runner.setup_test_environment() request.addfinalizer(runner.teardown_test_environment) config = runner.setup_databases() def teardown_database(): runner.teardown_databases(config) request.addfinalizer(teardown_database) return runner
import os import pytest try: from django.conf import settings except ImportError: settings = None # NOQA def is_configured(): if settings is None: return False return settings.configured or os.environ.get('DJANGO_SETTINGS_MODULE') @pytest.fixture(autouse=True, scope='session') def _django_runner(request): if not is_configured(): return from django.test.simple import DjangoTestSuiteRunner try: import django django.setup() except AttributeError: pass runner = DjangoTestSuiteRunner(interactive=False) runner.setup_test_environment() request.addfinalizer(runner.teardown_test_environment) config = runner.setup_databases() def teardown_database(): runner.teardown_databases(config) request.addfinalizer(teardown_database) return runner
Deal with the Django app refactoring.
Deal with the Django app refactoring.
Python
apache-2.0
pombredanne/pytest-django-lite,dcramer/pytest-django-lite
2f152c5036d32a780741edd8fb6ce75684728824
singleuser/user-config.py
singleuser/user-config.py
import os mylang = 'test' family = 'wikipedia' # Not defining any extra variables here at all since that causes pywikibot # to issue a warning about potential misspellings if os.path.exists(os.path.expanduser('~/user-config.py')): with open(os.path.expanduser('~/user-config.py'), 'r') as f: exec( compile(f.read(), os.path.expanduser('~/user-config.py'), 'exec'), globals()) # Things that should be non-easily-overridable usernames['*']['*'] = os.environ['JPY_USER']
import os mylang = 'test' family = 'wikipedia' custom_path = os.path.expanduser('~/user-config.py') if os.path.exists(custom_path): with open(custom_path, 'r') as f: exec(compile(f.read(), custom_path, 'exec'), globals()) # Things that should be non-easily-overridable usernames['*']['*'] = os.environ['JPY_USER']
Revert "Do not introduce extra variables"
Revert "Do not introduce extra variables" Since the 'f' is considered an extra variable and introduces a warning anyway :( Let's fix this the right way This reverts commit a03de68fb772d859098327d0e54a219fe4507072.
Python
mit
yuvipanda/paws,yuvipanda/paws
2d4016d8e4245a6e85c2bbea012d13471718b1b0
journal/views.py
journal/views.py
from django.utils.decorators import method_decorator from django.views.decorators.csrf import csrf_exempt from django.views.generic import View from django.http import JsonResponse from rest_framework.parsers import JSONParser from .models import Entry from .serializers import EntrySerializer @method_decorator(csrf_exempt, name='dispatch') class RestView(View): def get(self, request): last = request.GET.get('last', None) if last is None: entries = Entry.objects.all() else: last_entry = Entry.objects.get(uuid=last) entries = Entry.objects.filter(id__gt=last_entry.id) serializer = EntrySerializer(entries, many=True) return JsonResponse({'entries': serializer.data}) @csrf_exempt def put(self, request): body = JSONParser().parse(request) serializer = EntrySerializer(data=body['entries'], many=True) if serializer.is_valid(): serializer.save() return JsonResponse({}, status=201) return JsonResponse(serializer.errors, status=400)
from django.utils.decorators import method_decorator from django.views.decorators.csrf import csrf_exempt from django.views.generic import View from django.http import JsonResponse from rest_framework.parsers import JSONParser from .models import Entry from .serializers import EntrySerializer @method_decorator(csrf_exempt, name='dispatch') class RestView(View): def get(self, request): last = request.GET.get('last', None) tag = request.GET.get('tag', None) entries = Entry.objects.filter(tag=tag) if last is not None: last_entry = entries.get(uuid=last) entries = entries.filter(id__gt=last_entry.id) serializer = EntrySerializer(entries, many=True) return JsonResponse({'entries': serializer.data}) @csrf_exempt def put(self, request): tag = request.GET.get('tag', None) body = JSONParser().parse(request) serializer = EntrySerializer(data=body['entries'], many=True) if serializer.is_valid(): serializer.save(tag=tag) return JsonResponse({}, status=201) return JsonResponse(serializer.errors, status=400)
Add a way to specific tag.
Add a way to specific tag.
Python
agpl-3.0
etesync/journal-manager
a4c9dd451062b83b907a350ea30f2d36badb6522
parsers/__init__.py
parsers/__init__.py
import importlib parsers = """ singtao.STParser apple.AppleParser """.split() parser_dict = {} # Import the parser and fill in parser_dict: domain -> parser for parser_name in parsers: module, class_name = parser_name.rsplit('.', 1) parser = getattr(importlib.import_module('parsers.' + module), class_name) for domain in parser.domains: parser_dict[domain] = parser def get_parser(url): return parser_dict[url.split('/')[2]] # Each feeder places URLs into the database to be checked periodically. parsers = [parser for parser in parser_dict.values()] __all__ = ['parsers', 'get_parser']
import importlib parsers = """ singtao.STParser apple.AppleParser tvb.TVBParser """.split() parser_dict = {} # Import the parser and fill in parser_dict: domain -> parser for parser_name in parsers: module, class_name = parser_name.rsplit('.', 1) parser = getattr(importlib.import_module('parsers.' + module), class_name) for domain in parser.domains: parser_dict[domain] = parser def get_parser(url): return parser_dict[url.split('/')[2]] # Each feeder places URLs into the database to be checked periodically. parsers = [parser for parser in parser_dict.values()] __all__ = ['parsers', 'get_parser']
Add tvb Parser to the init
Add tvb Parser to the init
Python
mit
code4hk/hk-news-scrapper
a90889b773010d2fe2ed1dff133f951c0b5baea4
demo/__init__.py
demo/__init__.py
"""Package for PythonTemplateDemo.""" __project__ = 'PythonTemplateDemo' __version__ = '0.0.0' VERSION = __project__ + '-' + __version__ PYTHON_VERSION = 2, 7 import sys if not sys.version_info >= PYTHON_VERSION: # pragma: no cover (manual test) exit("Python {}.{}+ is required.".format(*PYTHON_VERSION))
"""Package for PythonTemplateDemo.""" import sys __project__ = 'PythonTemplateDemo' __version__ = '0.0.0' VERSION = __project__ + '-' + __version__ PYTHON_VERSION = 2, 7 if not sys.version_info >= PYTHON_VERSION: # pragma: no cover (manual test) exit("Python {}.{}+ is required.".format(*PYTHON_VERSION))
Deploy Travis CI build 387 to GitHub
Deploy Travis CI build 387 to GitHub
Python
mit
jacebrowning/template-python-demo
7e738ddbc1a4585f92e605369f8d6dc1d986dbec
scripts/get_cuda_version.py
scripts/get_cuda_version.py
import os nvcc_version_cmd = 'nvcc -V > output.txt' os.system(nvcc_version_cmd) with open('output.txt') as f: lines = f.readlines() for line in lines: if ", release" in line: start = line.index(', release') + 10 end = line.index('.', start) result = line[start:end] print(result) quit()
import os nvcc_version_cmd = 'nvcc -V > output.txt' os.system(nvcc_version_cmd) with open('output.txt') as f: lines = f.readlines() for line in lines: if ", release" in line: start = line.index(', release') + 10 end = line.index('.', start) result = line[start:end] print(result) os.remove("output.txt") quit()
Remove output.txt file after done
Remove output.txt file after done After we are done detecting nvcc version, let's delete the temporary output.txt file.
Python
mit
GOMC-WSU/GOMC,GOMC-WSU/GOMC,GOMC-WSU/GOMC,GOMC-WSU/GOMC,GOMC-WSU/GOMC
7a37e7ab531c151b6426dcf04647e063f3c0b0d6
service/urls.py
service/urls.py
from django.conf.urls import url, include from rest_framework import routers import service.authors.views import service.friendrequest.views import service.users.views import service.nodes.views import service.posts.views router = routers.DefaultRouter() router.register(r'users', service.users.views.UserViewSet) router.register(r'nodes', service.nodes.views.NodeViewSet) router.register(r'author', service.authors.views.AuthorViewSet, base_name="author") # Wire up our API using automatic URL routing. # Additionally, we include login URLs for the browseable API. urlpatterns = [ url(r'^author/posts/', service.posts.views.AllPostsViewSet.as_view({"get": "list"}), name='all-posts-list'), url(r'^', include(router.urls)), url(r'^auth/', include('rest_framework.urls', namespace='rest_framework')), url(r'^friendrequest/', service.friendrequest.views.friendrequest, name='friend-request'), url(r'^posts/', service.posts.views.PublicPostsList.as_view(), name='public-posts-list'), url(r'^posts/(?P<pk>[0-9a-z\\-]+)/', service.posts.views.AllPostsViewSet.as_view({"get": "retrieve"}), name='all-posts-detail'), url(r'^author/(?P<pk>[0-9a-z\\-]+)/posts/', service.posts.views.AuthorPostsList.as_view(), name='author-posts-list'), ]
from django.conf.urls import url, include from rest_framework import routers import service.authors.views import service.friendrequest.views import service.users.views import service.nodes.views import service.posts.views router = routers.DefaultRouter() router.register(r'users', service.users.views.UserViewSet) router.register(r'nodes', service.nodes.views.NodeViewSet) router.register(r'author', service.authors.views.AuthorViewSet, base_name="author") # Wire up our API using automatic URL routing. # Additionally, we include login URLs for the browseable API. urlpatterns = [ url(r'^author/posts/', service.posts.views.AllPostsViewSet.as_view({"get": "list"}), name='all-posts-list'), url(r'^', include(router.urls)), url(r'^auth/', include('rest_framework.urls', namespace='rest_framework')), url(r'^friendrequest/', service.friendrequest.views.friendrequest, name='friend-request'), url(r'^posts/', service.posts.views.PublicPostsList.as_view(), name='public-posts-list'), url(r'^posts/(?P<pk>[0-9a-z\\-]+)/', service.posts.views.AllPostsViewSet.as_view({"get": "retrieve"}), name='post-detail'), url(r'^author/(?P<pk>[0-9a-z\\-]+)/posts/', service.posts.views.AuthorPostsList.as_view(), name='author-posts-list'), ]
Fix bug caused by giving post detail view a new name
Fix bug caused by giving post detail view a new name
Python
apache-2.0
TeamAADGT/CMPUT404-project-socialdistribution,TeamAADGT/CMPUT404-project-socialdistribution,TeamAADGT/CMPUT404-project-socialdistribution
a6f0b0db3e32c71e89d73db8997308e67aae294f
setup_cython.py
setup_cython.py
from distutils.core import setup from distutils.extension import Extension from Cython.Distutils import build_ext core = Extension( 'geopy.core', ["geopy/core.pyx"], language='c++', libraries=['stdc++'], ) setup( cmdclass = {'build_ext': build_ext}, include_dirs = [], ext_modules = [core] )
import os from distutils.core import setup from distutils.extension import Extension from Cython.Distutils import build_ext core = Extension( 'geometry.core', [os.path.join("geometry", "core.pyx")], language='c++', libraries=['stdc++'], include_dirs = ['.'], ) setup( cmdclass = {'build_ext': build_ext}, include_dirs = [], ext_modules = [core] )
Make module path OS independent by using os.path.join
Make module path OS independent by using os.path.join
Python
bsd-3-clause
FRidh/python-geometry
0571864eb2d99b746386ace721b8e218f127c6ac
email_obfuscator/templatetags/email_obfuscator.py
email_obfuscator/templatetags/email_obfuscator.py
from django import template from django.template.defaultfilters import stringfilter from django.utils.safestring import mark_safe register = template.Library() def obfuscate_string(value): return ''.join(['&#%s;'.format(str(ord(char))) for char in value]) @register.filter @stringfilter def obfuscate(value): return mark_safe(obfuscate_string(value)) @register.filter @stringfilter def obfuscate_mailto(value, text=False): mail = obfuscate_string(value) if text: link_text = text else: link_text = mail return mark_safe('<a href="%s%s">%s</a>'.format( obfuscate_string('mailto:'), mail, link_text))
from django import template from django.template.defaultfilters import stringfilter from django.utils.safestring import mark_safe register = template.Library() def obfuscate_string(value): return ''.join(['&#{0:s};'.format(str(ord(char))) for char in value]) @register.filter @stringfilter def obfuscate(value): return mark_safe(obfuscate_string(value)) @register.filter @stringfilter def obfuscate_mailto(value, text=False): mail = obfuscate_string(value) if text: link_text = text else: link_text = mail return mark_safe('<a href="{0:s}{1:s}">{2:s}</a>'.format( obfuscate_string('mailto:'), mail, link_text))
Fix mixup of old and new-style string formatting
Fix mixup of old and new-style string formatting
Python
mit
morninj/django-email-obfuscator
41ac7e2d85126c2fe5dd16230ed678d72a8d048f
jax/__init__.py
jax/__init__.py
# Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os os.environ.setdefault('TF_CPP_MIN_LOG_LEVEL', '1') version_file = os.path.join(os.path.abspath(os.path.dirname(__file__)), "version.py") with open(version_file) as f: exec(f.read(), globals()) from jax.api import * import jax.numpy as np # side-effecting import sets up operator overloads
# Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os os.environ.setdefault('TF_CPP_MIN_LOG_LEVEL', '1') from jax.version import __version__ from jax.api import * import jax.numpy as np # side-effecting import sets up operator overloads
Use a regular import to add jax.__version__ rather than exec() trickery.
Use a regular import to add jax.__version__ rather than exec() trickery. (The exec() trickery is needed for setup.py, but not for jax/__init__.py.)
Python
apache-2.0
tensorflow/probability,google/jax,google/jax,google/jax,google/jax,tensorflow/probability
e1c6b7c369395208b467fcf169b6e3d0eb8c8dd9
src/rlib/string_stream.py
src/rlib/string_stream.py
from rpython.rlib.streamio import Stream, StreamError class StringStream(Stream): def __init__(self, string): self._string = string self.pos = 0 self.max = len(string) - 1 def write(self, data): raise StreamError("StringStream is not writable") def truncate(self, size): raise StreamError("StringStream is immutable") def peek(self): if self.pos < self.max: return self._string[self.pos:] else: return '' def tell(self): return self.pos def seek(self, offset, whence): if whence == 0: self.pos = max(0, offset) elif whence == 1: self.pos = max(0, self.pos + offset) elif whence == 2: self.pos = max(0, self.max + offset) else: raise StreamError("seek(): whence must be 0, 1 or 2") def read(self, n): assert isinstance(n, int) end = self.pos + n data = self._string[self.pos:end] self.pos += len(data) return data
from rpython.rlib.streamio import Stream, StreamError class StringStream(Stream): def __init__(self, string): self._string = string self.pos = 0 self.max = len(string) - 1 def write(self, data): raise StreamError("StringStream is not writable") def truncate(self, size): raise StreamError("StringStream is immutable") def tell(self): return self.pos def seek(self, offset, whence): if whence == 0: self.pos = max(0, offset) elif whence == 1: self.pos = max(0, self.pos + offset) elif whence == 2: self.pos = max(0, self.max + offset) else: raise StreamError("seek(): whence must be 0, 1 or 2") def read(self, n): assert isinstance(n, int) end = self.pos + n assert end >= 0 data = self._string[self.pos:end] self.pos += len(data) return data
Fix StringStream to conform to latest pypy
Fix StringStream to conform to latest pypy Signed-off-by: Stefan Marr <[email protected]>
Python
mit
SOM-st/RPySOM,smarr/RTruffleSOM,smarr/RTruffleSOM,smarr/PySOM,SOM-st/RPySOM,SOM-st/RTruffleSOM,SOM-st/PySOM,SOM-st/RTruffleSOM,SOM-st/PySOM,smarr/PySOM
98925a82dfb45a4c76496cd11af8d1483a678e6e
sigh/views/api.py
sigh/views/api.py
import json from functools import wraps from flask import Blueprint from flask import Response from ..models import Tag api_views = Blueprint('api', __name__, url_prefix='/api/') def jsonify(func): @wraps(func) def _(*args, **kwargs): result = func(*args, **kwargs) return Response(json.dumps(result), mimetype='application/json') return _ @api_views.route('tag/autocompletion/<q>') @jsonify def autocomplete_tag(q): tags = Tag.query.filter(Tag.searchable_name.ilike(u'%{}%'.format(q.lower()))).all() tags = [tag.to_dict('id_', 'display_name') for tag in tags] return tags
import json from functools import wraps from flask import Blueprint from flask import Response from ..models import Tag from ..models import User api_views = Blueprint('api', __name__, url_prefix='/api/') def jsonify(func): @wraps(func) def _(*args, **kwargs): result = func(*args, **kwargs) return Response(json.dumps(result), mimetype='application/json') return _ @api_views.route('tag/autocompletion/<q>') @jsonify def autocomplete_tag(q): tags = Tag.query.filter(Tag.searchable_name.ilike(u'%{}%'.format(q.lower()))).all() tags = [tag.to_dict('id_', 'display_name') for tag in tags] return tags @api_views.route('user/autocompletion/<q>') @jsonify def autocomplete_user(q): users = User.query.filter(User.username.ilike(u'%{}%'.format(q.lower()))).all() users = [user.to_dict('id_', 'name', 'username', 'avatar') for user in users] return users
Create a new API for User autocompletion
Create a new API for User autocompletion
Python
mit
kxxoling/Programmer-Sign,kxxoling/Programmer-Sign,kxxoling/Programmer-Sign
44b0634b387c3856aa26b711bf38862380ab1ffe
setup.py
setup.py
import os from setuptools import setup, find_packages ROOT = os.path.abspath(os.path.dirname(__file__)) setup( name='Flask-Mobility', version='0.1', url='http://github.com/rehandalal/flask-mobility/', license='BSD', author='Rehan Dalal', author_email='[email protected]', description='A Flask extension to simplify building mobile-friendly sites.', long_description=open(os.path.join(ROOT, 'README.rst')).read(), py_modules=['flask_mobility'], zip_safe=False, include_package_data=True, platforms='any', install_requires=[ 'setuptools', 'Flask' ], classifiers=[ 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', 'Topic :: Software Development :: Libraries :: Python Modules' ] )
import os from setuptools import setup, find_packages ROOT = os.path.abspath(os.path.dirname(__file__)) setup( name='Flask-Mobility', version='0.1', url='http://github.com/rehandalal/flask-mobility/', license='BSD', author='Rehan Dalal', author_email='[email protected]', description='A Flask extension to simplify building mobile-friendly sites.', long_description=open(os.path.join(ROOT, 'README.rst')).read(), py_modules=['flask_mobility'], zip_safe=False, include_package_data=True, platforms='any', install_requires=[ 'setuptools', 'Flask' ], classifiers=[ 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', 'Topic :: Software Development :: Libraries :: Python Modules' ] )
Add python versions to classifiers
Add python versions to classifiers
Python
bsd-3-clause
rehandalal/flask-mobility
f22945907bafb189645800db1e9ca804104b06db
setup.py
setup.py
""" The setup package to install TensorPy dependencies *> This does NOT include TensorFlow installation *> To install TensorFlow, use "./install_tensorflow.sh" """ from setuptools import setup, find_packages # noqa setup( name='tensorpy', version='1.0.1', url='http://tensorpy.com', author='Michael Mintz', author_email='@mintzworld', maintainer='Michael Mintz', description='The fast & easy way to get started with Tensorflow', license='The MIT License', install_requires=[ 'requests==2.11.1', 'six>=1.10.0', 'Pillow==3.4.1', 'BeautifulSoup==3.2.1', ], packages=['tensorpy'], )
""" The setup package to install TensorPy dependencies *> This does NOT include TensorFlow installation *> To install TensorFlow, use "./install_tensorflow.sh" """ from setuptools import setup, find_packages # noqa setup( name='tensorpy', version='1.0.1', url='http://tensorpy.com', author='Michael Mintz', author_email='@mintzworld', maintainer='Michael Mintz', description='The fast & easy way to get started with Tensorflow', license='The MIT License', install_requires=[ 'requests==2.11.1', 'six==1.10.0', 'Pillow==3.4.1', 'BeautifulSoup==3.2.1', ], packages=['tensorpy'], )
Update the "six" to force version 1.10.0
Update the "six" to force version 1.10.0
Python
mit
TensorPy/TensorPy,TensorPy/TensorPy
17e774c1c59411fd8b33d597b54f872466ec4fe7
setup.py
setup.py
#!/usr/bin/env python import os import setuptools setuptools.setup( name='remoteconfig', version='0.2.4', author='Max Zheng', author_email='maxzheng.os @t gmail.com', description=open('README.rst').read(), install_requires=[ 'localconfig>=0.4', 'requests', ], license='MIT', package_dir={'': 'src'}, packages=setuptools.find_packages('src'), include_package_data=True, setup_requires=['setuptools-git'], classifiers=[ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'Topic :: Software Development :: Configuration', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', ], keywords='configuration remote http config', )
#!/usr/bin/env python import os import setuptools setuptools.setup( name='remoteconfig', version='0.2.4', author='Max Zheng', author_email='maxzheng.os @t gmail.com', description='A simple wrapper for localconfig that allows for reading config from a remote server', long_description=open('README.rst').read(), url='https://github.com/maxzheng/remoteconfig', install_requires=[ 'localconfig>=0.4', 'requests', ], license='MIT', package_dir={'': 'src'}, packages=setuptools.find_packages('src'), include_package_data=True, setup_requires=['setuptools-git'], classifiers=[ 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 'Topic :: Software Development', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', ], keywords='configuration remote http config', )
Add long description / url
Add long description / url
Python
mit
maxzheng/remoteconfig
83cc9a5304e41e4ce517cfc739238a37f13f626a
matchzoo/data_pack/build_unit_from_data_pack.py
matchzoo/data_pack/build_unit_from_data_pack.py
from tqdm import tqdm from .data_pack import DataPack from matchzoo import processor_units def build_unit_from_data_pack( unit: processor_units.StatefulProcessorUnit, data_pack: DataPack, flatten: bool = True, verbose: int = 1 ) -> processor_units.StatefulProcessorUnit: """ Build a :class:`StatefulProcessorUnit` from a :class:`DataPack` object. :param unit: :class:`StatefulProcessorUnit` object to be built. :param data_pack: The input :class:`DataPack` object. :param flatten: Flatten the datapack or not. `True` to organize the :class:`DataPack` text as a list, and `False` to organize :class:`DataPack` text as a list of list. :param verbose: Verbosity. :return: A built :class:`StatefulProcessorUnit` object. """ corpus = [] if flatten: data_pack.apply_on_text(corpus.extend, verbose=verbose) else: data_pack.apply_on_text(corpus.append, verbose=verbose) if verbose: description = 'Building ' + unit.__class__.__name__ + \ ' from a datapack.' corpus = tqdm(corpus, desc=description) unit.fit(corpus) return unit
"""Build unit from data pack.""" from tqdm import tqdm from matchzoo import processor_units from .data_pack import DataPack def build_unit_from_data_pack( unit: processor_units.StatefulProcessorUnit, data_pack: DataPack, flatten: bool = True, verbose: int = 1 ) -> processor_units.StatefulProcessorUnit: """ Build a :class:`StatefulProcessorUnit` from a :class:`DataPack` object. :param unit: :class:`StatefulProcessorUnit` object to be built. :param data_pack: The input :class:`DataPack` object. :param flatten: Flatten the datapack or not. `True` to organize the :class:`DataPack` text as a list, and `False` to organize :class:`DataPack` text as a list of list. :param verbose: Verbosity. :return: A built :class:`StatefulProcessorUnit` object. """ corpus = [] if flatten: data_pack.apply_on_text(corpus.extend, verbose=verbose) else: data_pack.apply_on_text(corpus.append, verbose=verbose) if verbose: description = 'Building ' + unit.__class__.__name__ + \ ' from a datapack.' corpus = tqdm(corpus, desc=description) unit.fit(corpus) return unit
Update docs for build unit.
Update docs for build unit.
Python
apache-2.0
faneshion/MatchZoo,faneshion/MatchZoo
fac395f86a1fbe9a8c64a0f178b4dcaa3a218fb1
setup.py
setup.py
#!/usr/bin/env python """ Logan ====== Logan is a toolkit for running standalone Django applications. It provides you with tools to create a CLI runner, manage settings, and the ability to bootstrap the process. :copyright: (c) 2012 David Cramer. :license: Apache License 2.0, see LICENSE for more details. """ from setuptools import setup, find_packages setup( name='logan', version='0.2.2', author='David Cramer', author_email='[email protected]', url='http://github.com/dcramer/logan', description='Logan is a toolkit for building standalone Django applications.', packages=find_packages(exclude=["tests"]), long_description=__doc__, zip_safe=False, install_requires=[], tests_require=[ 'django>=1.2.5,<1.4', 'nose>=1.1.2', 'unittest2', ], test_suite='unittest2.collector', license='Apache License 2.0', include_package_data=True, classifiers=[ 'Framework :: Django', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', 'Operating System :: OS Independent', 'Topic :: Software Development' ], )
#!/usr/bin/env python """ Logan ====== Logan is a toolkit for running standalone Django applications. It provides you with tools to create a CLI runner, manage settings, and the ability to bootstrap the process. :copyright: (c) 2012 David Cramer. :license: Apache License 2.0, see LICENSE for more details. """ from setuptools import setup, find_packages setup( name='logan', version='0.2.2', author='David Cramer', author_email='[email protected]', url='http://github.com/dcramer/logan', description='Logan is a toolkit for building standalone Django applications.', packages=find_packages(exclude=["tests"]), long_description=__doc__, zip_safe=False, install_requires=[], tests_require=[ 'django>=1.2.5,<1.5', 'nose>=1.1.2', 'unittest2', ], test_suite='unittest2.collector', license='Apache License 2.0', include_package_data=True, classifiers=[ 'Framework :: Django', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', 'Operating System :: OS Independent', 'Topic :: Software Development' ], )
Support Django 1.4 in the test suite
Support Django 1.4 in the test suite
Python
apache-2.0
dcramer/logan
880b5257d549c2150d8888a2f062acd9cc948480
array/is-crypt-solution.py
array/is-crypt-solution.py
# You have an array of strings crypt, the cryptarithm, and an an array containing the mapping of letters and digits, solution. The array crypt will contain three non-empty strings that follow the structure: [word1, word2, word3], which should be interpreted as the word1 + word2 = word3 cryptarithm # Write a solution where if crypt, when it is decoded by replacing all of the letters in the cryptarithm with digits using the mapping in solution, becomes a valid arithmetic equation containing no numbers with leading zeroes, the answer is true. If it does not become a valid arithmetic solution, the answer is false def isCryptSolution(crypt, solution): # map letters to given numbers dic = {} for key in solution: dic[key[0]] = int(key[1]) # generate input strings into numbers arr = [] for string in crypt: arr.append(0) for letter in string: arr[-1] = arr[-1]*10 + dic[letter]
# You have an array of strings crypt, the cryptarithm, and an an array containing the mapping of letters and digits, solution. The array crypt will contain three non-empty strings that follow the structure: [word1, word2, word3], which should be interpreted as the word1 + word2 = word3 cryptarithm # Write a solution where if crypt, when it is decoded by replacing all of the letters in the cryptarithm with digits using the mapping in solution, becomes a valid arithmetic equation containing no numbers with leading zeroes, the answer is true. If it does not become a valid arithmetic solution, the answer is false def isCryptSolution(crypt, solution): # map letters to given numbers dic = {} for key in solution: dic[key[0]] = int(key[1]) # generate input strings into numbers arr = [] for string in crypt: arr.append(0) for letter in string: arr[-1] = arr[-1]*10 + dic[letter] # check if sum of decoded numbers of first and second strings equal to decoded number of third string if arr[0] + arr[1] == arr[2]: if len(`arr[0]`) == len(crypt[0]): # check if decoded number of first string has any leading zero if len(`arr[1]`) == len(crypt[1]): # check if decoded number of second string has any leading zero if len(`arr[2]`) == len(crypt[2]): # check if decoded number of third string has any leading zero return True return False
Check if sum of decoded numbers of first and second strings equal to decoded number of third string
Check if sum of decoded numbers of first and second strings equal to decoded number of third string
Python
mit
derekmpham/interview-prep,derekmpham/interview-prep
fef260c3731408592fd88e73817fe0f0cd7fe769
telemetry/telemetry/core/chrome/inspector_memory_unittest.py
telemetry/telemetry/core/chrome/inspector_memory_unittest.py
# Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import os from telemetry.test import tab_test_case class InspectorMemoryTest(tab_test_case.TabTestCase): def testGetDOMStats(self): unittest_data_dir = os.path.join(os.path.dirname(__file__), '..', '..', '..', 'unittest_data') self._browser.SetHTTPServerDirectories(unittest_data_dir) self._tab.Navigate( self._browser.http_server.UrlOf('dom_counter_sample.html')) self._tab.WaitForDocumentReadyStateToBeComplete() counts = self._tab.dom_stats self.assertEqual(counts['document_count'], 1) self.assertEqual(counts['node_count'], 14) self.assertEqual(counts['event_listener_count'], 2)
# Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import os from telemetry.test import tab_test_case class InspectorMemoryTest(tab_test_case.TabTestCase): def testGetDOMStats(self): unittest_data_dir = os.path.join(os.path.dirname(__file__), '..', '..', '..', 'unittest_data') self._browser.SetHTTPServerDirectories(unittest_data_dir) # Due to an issue with CrOS, we create a new tab here rather than # using self._tab to get a consistent starting page on all platforms tab = self._browser.tabs.New() tab.Navigate( self._browser.http_server.UrlOf('dom_counter_sample.html')) tab.WaitForDocumentReadyStateToBeComplete() counts = tab.dom_stats self.assertEqual(counts['document_count'], 2) self.assertEqual(counts['node_count'], 18) self.assertEqual(counts['event_listener_count'], 2)
Fix InspectorMemoryTest.testGetDOMStats to have consistent behaviour on CrOS and desktop versions of Chrome. Starting the browser in CrOS requires navigating through an initial setup that does not leave us with a tab at "chrome://newtab". This workaround runs the test in a new tab on all platforms for consistency.
Fix InspectorMemoryTest.testGetDOMStats to have consistent behaviour on CrOS and desktop versions of Chrome. Starting the browser in CrOS requires navigating through an initial setup that does not leave us with a tab at "chrome://newtab". This workaround runs the test in a new tab on all platforms for consistency. BUG=235634 TEST=InspectorMemoryTest.testGetDOMStats passes on cros and system NOTRY=true Review URL: https://chromiumcodereview.appspot.com/14672002 git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@197490 0039d316-1c4b-4281-b951-d872f2087c98
Python
bsd-3-clause
benschmaus/catapult,SummerLW/Perf-Insight-Report,catapult-project/catapult-csm,sahiljain/catapult,catapult-project/catapult-csm,benschmaus/catapult,catapult-project/catapult,catapult-project/catapult-csm,benschmaus/catapult,SummerLW/Perf-Insight-Report,catapult-project/catapult,catapult-project/catapult,sahiljain/catapult,SummerLW/Perf-Insight-Report,catapult-project/catapult-csm,benschmaus/catapult,benschmaus/catapult,SummerLW/Perf-Insight-Report,catapult-project/catapult,SummerLW/Perf-Insight-Report,benschmaus/catapult,benschmaus/catapult,sahiljain/catapult,catapult-project/catapult-csm,SummerLW/Perf-Insight-Report,catapult-project/catapult,sahiljain/catapult,catapult-project/catapult,catapult-project/catapult-csm,sahiljain/catapult,sahiljain/catapult,catapult-project/catapult,catapult-project/catapult-csm
ec91dc2fec8da044737c08db257f621d75016d3d
setup.py
setup.py
#!/usr/bin/env python3 # -*- coding: utf-8 -*- import sys import os from setuptools import setup from pathlib import Path this_dir = Path(__file__).absolute().parent if sys.argv[-1].startswith('publish'): if os.system("pip list | grep wheel"): print("wheel not installed.\nUse `pip install wheel`.\nExiting.") sys.exit() if os.system("pip list | grep twine"): print("twine not installed.\nUse `pip install twine`.\nExiting.") sys.exit() os.system("python setup.py sdist bdist_wheel") if sys.argv[-1] == 'publishtest': os.system("twine upload -r test dist/*") else: os.system("twine upload dist/*") print("You probably want to also tag the version now.") sys.exit() if __name__ == "__main__": setup(use_scm_version={ "write_to": str(this_dir / "parglare" / "version.py"), "write_to_template": '__version__ = "{version}"\n', })
#!/usr/bin/env python3 # -*- coding: utf-8 -*- import sys import os from setuptools import setup from pathlib import Path this_dir = Path(__file__).absolute().parent if sys.argv[-1].startswith('publish'): if os.system("pip list | grep wheel"): print("wheel not installed.\nUse `pip install wheel`.\nExiting.") sys.exit() if os.system("pip list | grep twine"): print("twine not installed.\nUse `pip install twine`.\nExiting.") sys.exit() os.system("python setup.py sdist bdist_wheel") if sys.argv[-1] == 'publishtest': os.system("twine upload -r test dist/*") else: os.system("twine upload dist/*") sys.exit() if __name__ == "__main__": setup(use_scm_version={ "write_to": str(this_dir / "parglare" / "version.py"), "write_to_template": '__version__ = "{version}"\n', })
Remove print. Tag should be made before publish.
Remove print. Tag should be made before publish.
Python
mit
igordejanovic/parglare,igordejanovic/parglare
3b59809abe755954787482fd3112862dd54019eb
setup.py
setup.py
from setuptools import setup, find_packages install_requires = open('requirements.txt').read().split() setup( name='mocurly', version='0.0.1', description='A library that allows your python tests to easily mock out the recurly library', author='Yoriyasu Yano', author_email='[email protected]', url='https://github.com/Captricity/mocurly', packages=find_packages(exclude=("tests", "tests.*")), install_requires=install_requires, test_suite = 'tests' )
from setuptools import setup, find_packages install_requires = open('requirements.txt').read().split() setup( name='mocurly', version='0.0.1', description='A library that allows your python tests to easily mock out the recurly library', author='Yoriyasu Yano', author_email='[email protected]', url='https://github.com/Captricity/mocurly', packages=find_packages(exclude=("tests", "tests.*")), package_data={'mocurly': ['templates/*.xml']}, install_requires=install_requires, test_suite = 'tests' )
Install should include xml files
Install should include xml files
Python
mit
Captricity/mocurly
c6d81ce7eede6db801d4e9a92b27ec5d409d0eab
setup.py
setup.py
from setuptools import setup setup( name='autograd', version='1.4', description='Efficiently computes derivatives of numpy code.', author='Dougal Maclaurin and David Duvenaud and Matthew Johnson', author_email="[email protected], [email protected], [email protected]", packages=['autograd', 'autograd.numpy', 'autograd.scipy', 'autograd.scipy.stats', 'autograd.misc'], install_requires=['numpy>=1.12', 'future>=0.15.2'], keywords=['Automatic differentiation', 'backpropagation', 'gradients', 'machine learning', 'optimization', 'neural networks', 'Python', 'Numpy', 'Scipy'], url='https://github.com/HIPS/autograd', license='MIT', classifiers=['Development Status :: 4 - Beta', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.5'], )
from setuptools import setup setup( name='autograd', version='1.5', description='Efficiently computes derivatives of numpy code.', author='Dougal Maclaurin and David Duvenaud and Matthew Johnson', author_email="[email protected], [email protected], [email protected]", packages=['autograd', 'autograd.numpy', 'autograd.scipy', 'autograd.scipy.stats', 'autograd.misc'], install_requires=['numpy>=1.12', 'future>=0.15.2'], keywords=['Automatic differentiation', 'backpropagation', 'gradients', 'machine learning', 'optimization', 'neural networks', 'Python', 'Numpy', 'Scipy'], url='https://github.com/HIPS/autograd', license='MIT', classifiers=['Development Status :: 4 - Beta', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.5'], )
Increase version number for pypi
Increase version number for pypi
Python
mit
HIPS/autograd,HIPS/autograd
fa874329f57899eee34182f20b0621bf488cae5e
setup.py
setup.py
from setuptools import setup, find_packages setup( name='jupyterhub-kubespawner', version='0.5.1', install_requires=[ 'jupyterhub', 'pyYAML', 'kubernetes==2.*', 'escapism', 'jupyter', ], setup_requires=['pytest-runner'], tests_require=['pytest'], description='JupyterHub Spawner targeting Kubernetes', url='http://github.com/jupyterhub/kubespawner', author='Yuvi Panda', author_email='[email protected]', license='BSD', packages=find_packages(), )
from setuptools import setup, find_packages setup( name='jupyterhub-kubespawner', version='0.5.1', install_requires=[ 'jupyterhub', 'pyYAML', 'kubernetes==3.*', 'escapism', 'jupyter', ], setup_requires=['pytest-runner'], tests_require=['pytest'], description='JupyterHub Spawner targeting Kubernetes', url='http://github.com/jupyterhub/kubespawner', author='Yuvi Panda', author_email='[email protected]', license='BSD', packages=find_packages(), )
Switch to newer version of kubernetes client library
Switch to newer version of kubernetes client library Has a bunch of fixes for us that are useful!
Python
bsd-3-clause
yuvipanda/jupyterhub-kubernetes-spawner,ktong/kubespawner,jupyterhub/kubespawner
9d53f0b11c53127d556bc1027b82491d71a1a381
setup.py
setup.py
from setuptools import setup setup( name = 'PyFVCOM', packages = ['PyFVCOM'], version = '2.0.0', description = ("PyFVCOM is a collection of various tools and utilities which can be used to extract, analyse and plot input and output files from FVCOM."), author = 'Pierre Cazenave', author_email = '[email protected]', url = 'https://gitlab.ecosystem-modelling.pml.ac.uk/fvcom/PyFVCOM', download_url = 'http://gitlab.em.pml.ac.uk/fvcom/PyFVCOM/repository/archive.tar.gz?ref=1.6.2', keywords = ['fvcom', 'unstructured grid', 'mesh'], license = 'MIT', platforms = 'any', install_requires = ['pyshp', 'jdcal', 'scipy', 'numpy', 'matplotlib', 'netCDF4', 'lxml', 'pyproj', 'pytz', 'networkx', 'UTide'], classifiers = [] )
from setuptools import setup setup( name = 'PyFVCOM', packages = ['PyFVCOM'], version = '2.1.0', description = ("PyFVCOM is a collection of various tools and utilities which can be used to extract, analyse and plot input and output files from FVCOM."), author = 'Pierre Cazenave', author_email = '[email protected]', url = 'https://gitlab.ecosystem-modelling.pml.ac.uk/fvcom/PyFVCOM', download_url = 'http://gitlab.em.pml.ac.uk/fvcom/PyFVCOM/repository/archive.tar.gz?ref=2.1.0', keywords = ['fvcom', 'unstructured grid', 'mesh'], license = 'MIT', platforms = 'any', install_requires = ['pyshp', 'jdcal', 'scipy', 'numpy', 'matplotlib', 'netCDF4', 'lxml', 'pyproj', 'pytz', 'networkx', 'UTide'], classifiers = [] )
Prepare for the next release.
Prepare for the next release.
Python
mit
pwcazenave/PyFVCOM
6a84b18f584c7f9b8a3d7d53605bce5be919b056
setup.py
setup.py
from setuptools import setup from pybib import __version__ setup(name='pybib', version=__version__, description='Fetch citation information, given a Digital Object Identifier', url='https://github.com/jgilchrist/pybib', author='Jonny Gilchrist', packages=['pybib'], install_requires=[ 'requests', 'python-termstyle', ], scripts=['bin/bib'])
from setuptools import setup from pybib import __version__ setup(name='pybib', version=__version__, description='Fetch citation information, given a Digital Object Identifier', long_description=open('README.rst').read(), url='https://github.com/jgilchrist/pybib', author='Jonny Gilchrist', packages=['pybib'], install_requires=[ 'requests', 'python-termstyle', ], scripts=['bin/bib'])
Fix the README for PyPi
Fix the README for PyPi
Python
bsd-3-clause
jgilchrist/pybib
d78916f43b289ff56d5a5d87f8db6fbf5f9d7436
setup.py
setup.py
from setuptools import setup setup( name="img2txt", version="2.0", author="hit9", author_email="[email protected]", description="Image to Ascii Text, can output to html or ansi terminal.", license="BSD", url="http://hit9.org/img2txt", install_requires=['docopt', 'Pillow'], scripts=['img2txt.py'] )
from setuptools import setup setup( name="img2txt.py", version="2.0", author="hit9", author_email="[email protected]", description="Image to Ascii Text, can output to html or ansi terminal.", license="BSD", url="http://hit9.org/img2txt", install_requires=['docopt', 'Pillow'], scripts=['img2txt.py'] )
Rename package name to img2txt.py
Rename package name to img2txt.py
Python
bsd-3-clause
hit9/img2txt,hit9/img2txt
16a6783a5671b58836d7c81395ff09b68acf1cb1
setup.py
setup.py
__author__ = 'yalnazov' from setuptools import setup setup( name='paymill-wrapper', version='2.0.0', description='Python wrapper for PAYMILL API', author='Aleksandar Yalnazov', author_email='[email protected]', url='https://github.com/paymill/paymill-python', license='MIT', packages=['paymill'], classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'License :: OSI Approved :: MIT License', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Software Development :: Libraries :: Python Modules', ], install_requires=['requests >= 2.1.0', 'paymill-jsonobject>=0.7.1beta'] )
__author__ = 'yalnazov' from setuptools import setup setup( name='paymill-wrapper', version='2.1.0', description='Python wrapper for PAYMILL API', author='Aleksandar Yalnazov', author_email='[email protected]', url='https://github.com/paymill/paymill-python', license='MIT', packages=['paymill', 'paymill.models', 'paymill.services', 'paymill.utils'], classifiers=[ 'Development Status :: 4 - Beta', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'License :: OSI Approved :: MIT License', 'Topic :: Internet :: WWW/HTTP', 'Topic :: Software Development :: Libraries :: Python Modules', ], install_requires=['requests >= 2.1.0', 'paymill-jsonobject>=0.7.1beta'] )
Fix distribution and bump version.
Fix distribution and bump version.
Python
mit
lukasklein/paymill-python,paymill/paymill-python
3a0c7caadb46a69fb29fe34bd64de28c9b263fd6
restconverter.py
restconverter.py
# -*- coding: utf-8 -*- """ flaskjk.restconverter ~~~~~~~~~~~~~~~~~~~~~ Helper functions for converting RestructuredText This class heavily depends on the functionality provided by the docutils package. :copyright: (c) 2010 by Jochem Kossen. :license: BSD, see LICENSE for more details. """ from docutils import core from docutils.writers.html4css1 import Writer, HTMLTranslator class HTMLFragmentTranslator(HTMLTranslator): def __init__(self, document): HTMLTranslator.__init__(self, document) self.head_prefix = ['','','','',''] self.body_prefix = [] self.body_suffix = [] self.stylesheet = [] def astext(self): return ''.join(self.body) html_fragment_writer = Writer() html_fragment_writer.translator_class = HTMLFragmentTranslator def rest_to_html(s): """Convert ReST input to HTML output""" return core.publish_string(s, writer=html_fragment_writer)
# -*- coding: utf-8 -*- """ flaskjk.restconverter ~~~~~~~~~~~~~~~~~~~~~ Helper functions for converting RestructuredText This class heavily depends on the functionality provided by the docutils package. See http://wiki.python.org/moin/ReStructuredText for more information :copyright: (c) 2010 by Jochem Kossen. :license: BSD, see LICENSE for more details. """ from docutils import core from docutils.writers.html4css1 import Writer, HTMLTranslator class HTMLFragmentTranslator(HTMLTranslator): def __init__(self, document): HTMLTranslator.__init__(self, document) self.head_prefix = ['','','','',''] self.body_prefix = [] self.body_suffix = [] self.stylesheet = [] def astext(self): return ''.join(self.body) html_fragment_writer = Writer() html_fragment_writer.translator_class = HTMLFragmentTranslator def rest_to_html(s): """Convert ReST input to HTML output""" return core.publish_string(s, writer=html_fragment_writer) def rest_to_html_fragment(s): parts = core.publish_parts( source=s, writer_name='html') return parts['body_pre_docinfo']+parts['fragment']
Add rest_to_html_fragment to be able to convert just the body part
Add rest_to_html_fragment to be able to convert just the body part
Python
bsd-2-clause
jkossen/flaskjk
fe79e799f2d3862e4764c69e76ed5a7d0a132002
setup.py
setup.py
#!/usr/bin/env python from setuptools import setup import os __doc__ = """ Command line tool and library wrappers around iwlist and /etc/network/interfaces. """ def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() install_requires = [ 'setuptools', 'pbkdf2', ] try: import argparse except: install_requires.append('argparse') version = '1.0.0' setup( name='wifi', version=version, author='Rocky Meza, Gavin Wahl', author_email='[email protected]', description=__doc__, long_description=read('README.rst'), packages=['wifi'], scripts=['bin/wifi'], test_suite='tests', platforms=["Debian"], license='BSD', install_requires=install_requires, classifiers=[ "License :: OSI Approved :: BSD License", "Topic :: System :: Networking", "Operating System :: POSIX :: Linux", "Environment :: Console", "Programming Language :: Python", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.3", ], data_files=[ ('/etc/bash_completion.d/', ['extras/wifi-completion.bash']), ] )
#!/usr/bin/env python from setuptools import setup import os __doc__ = """ Command line tool and library wrappers around iwlist and /etc/network/interfaces. """ def read(fname): return open(os.path.join(os.path.dirname(__file__), fname)).read() install_requires = [ 'setuptools', 'pbkdf2', ] try: import argparse except: install_requires.append('argparse') version = '1.0.0' data_files = [ ('/etc/bash_completion.d/', ['extras/wifi-completion.bash']), ] for entry in data_files: # make sure we actually have write access to the target folder and if not don't # include it in data_files if not os.access(entry[0], os.W_OK): print("Skipping copying files to %s, no write access" % entry[0]) data_files.remove(entry) setup( name='wifi', version=version, author='Rocky Meza, Gavin Wahl', author_email='[email protected]', description=__doc__, long_description=read('README.rst'), packages=['wifi'], scripts=['bin/wifi'], test_suite='tests', platforms=["Debian"], license='BSD', install_requires=install_requires, classifiers=[ "License :: OSI Approved :: BSD License", "Topic :: System :: Networking", "Operating System :: POSIX :: Linux", "Environment :: Console", "Programming Language :: Python", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3.3", ], data_files=data_files )
Check for write access for bashcompletion via os.access
Check for write access for bashcompletion via os.access Alternative version of pull request rockymeza/wifi#41 (which apparently isn't worked on any more) which checks for write access before attempting to install the bashcompletion addition during installation -- if only installed in a library into a virtualenv the installation will now complete without an issue.
Python
bsd-2-clause
rockymeza/wifi,cangelis/wifi,nicupavel/wifi,foosel/wifi,rockymeza/wifi,cangelis/wifi,foosel/wifi,simudream/wifi,nicupavel/wifi,simudream/wifi
b0d54c11ea76c6485982e274e8226dfc6da25ceb
setup.py
setup.py
#!/usr/bin/env python # coding: utf-8 from distribute_setup import use_setuptools use_setuptools() try: from setuptools import setup except ImportError: from distutils.core import setup import sys extra = {} if sys.version_info >= (3,): extra['use_2to3'] = True setup(name='mockito', version='0.3.0', packages=['mockito', 'mockito_test', 'mockito_util'], url='http://code.google.com/p/mockito/wiki/MockitoForPython', download_url='http://bitbucket.org/szczepiq/mockito-python/downloads/', maintainer='mockito maintainers', maintainer_email='[email protected]', license='MIT', description='Spying framework', long_description='Mockito is a spying framework based on Java library with the same name.', classifiers=['Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Topic :: Software Development :: Testing', 'Programming Language :: Python :: 3' ], test_loader = 'mockito_util.test:TestLoader', test_suite = 'mockito_test', **extra )
#!/usr/bin/env python # coding: utf-8 from distribute_setup import use_setuptools use_setuptools() try: from setuptools import setup except ImportError: from distutils.core import setup import sys extra = {} if sys.version_info >= (3,): extra['use_2to3'] = True setup(name='mockito', version='0.3.0', packages=['mockito', 'mockito_test', 'mockito_util'], url='http://code.google.com/p/mockito-python', download_url='http://code.google.com/p/mockito-python/downloads/list', maintainer='Mockito Maintainers', maintainer_email='[email protected]', license='MIT', description='Spying framework', long_description='Mockito is a spying framework based on Java library with the same name.', classifiers=['Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Topic :: Software Development :: Testing', 'Programming Language :: Python :: 3' ], test_loader = 'mockito_util.test:TestLoader', test_suite = 'mockito_test', **extra )
Change download and documentation URLs.
Change download and documentation URLs.
Python
mit
zhilts/pymockito,zhilts/pymockito
c64f9ebe17d9076958db502401567add4116b431
setup.py
setup.py
# -*- coding: utf-8 -*- from setuptools import setup setup( name='flake8-coding', version='0.1.0', description='Adds coding magic comment checks to flake8', long_description=open("README.rst").read(), classifiers=[ 'Development Status :: 3 - Alpha', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Topic :: Software Development', ], author='Takeshi KOMIYA', author_email='i.tkomiya at gmail.com', url='https://github.com/tk0miya/flake8-coding', license='Apache License 2.0', keywords='pep8 flake8 coding', py_modules=['flake8_coding'], install_requires=[ 'flake8', ], entry_points={ 'flake8.extension': ['C10 = flake8_coding:CodingChecker'], }, )
# -*- coding: utf-8 -*- from setuptools import setup from flake8_coding import __version__ setup( name='flake8-coding', version=__version__, description='Adds coding magic comment checks to flake8', long_description=open("README.rst").read(), classifiers=[ 'Development Status :: 3 - Alpha', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Topic :: Software Development', ], author='Takeshi KOMIYA', author_email='i.tkomiya at gmail.com', url='https://github.com/tk0miya/flake8-coding', license='Apache License 2.0', keywords='pep8 flake8 coding', py_modules=['flake8_coding'], install_requires=[ 'flake8', ], entry_points={ 'flake8.extension': ['C10 = flake8_coding:CodingChecker'], }, )
Load package version from flake8_coding.py
Load package version from flake8_coding.py
Python
apache-2.0
tk0miya/flake8-coding
17db9de51b816210728db7f58685b7d8e5545c65
src/__init__.py
src/__init__.py
from pkg_resources import get_distribution import codecs import json __version__ = get_distribution('rasa_nlu').version class Interpreter(object): def parse(self, text): raise NotImplementedError() @staticmethod def load_synonyms(entity_synonyms): if entity_synonyms: with codecs.open(entity_synonyms, encoding='utf-8') as infile: return json.loads(infile.read()) @staticmethod def replace_synonyms(entities, entity_synonyms): for i in range(len(entities)): entity_value = entities[i]["value"] if (type(entity_value) == unicode and type(entity_synonyms) == unicode and entity_value.lower() in entity_synonyms): entities[i]["value"] = entity_synonyms[entity_value]
from pkg_resources import get_distribution import codecs import json __version__ = get_distribution('rasa_nlu').version class Interpreter(object): def parse(self, text): raise NotImplementedError() @staticmethod def load_synonyms(entity_synonyms): if entity_synonyms: with codecs.open(entity_synonyms, encoding='utf-8') as infile: return json.loads(infile.read()) @staticmethod def replace_synonyms(entities, entity_synonyms): for i in range(len(entities)): entity_value = entities[i]["value"] if entity_value.lower() in entity_synonyms: entities[i]["value"] = entity_synonyms[entity_value.lower()]
Fix entity dict access key
Fix entity dict access key
Python
apache-2.0
RasaHQ/rasa_nlu,beeva-fernandocerezal/rasa_nlu,beeva-fernandocerezal/rasa_nlu,verloop/rasa_nlu,PHLF/rasa_nlu,verloop/rasa_nlu,PHLF/rasa_nlu,RasaHQ/rasa_nlu,RasaHQ/rasa_nlu
32508c310075d779e368d09f8642db6ba9029a44
setup.py
setup.py
from distutils.core import setup setup( name = 'mustache', packages = ['mustache'], version = '0.1.3', description = 'Mustache templating in Python', author = 'Peter Downs', author_email = '[email protected]', url = 'https://github.com/peterldowns/python-mustache', download_url = 'https://github.com/peterldowns/python-mustache/tarball/v0.1.3', install_requirements = open('requirements.txt').read(), extras_require = { 'test' : open('tests/requirements.txt').read(), } keywords = [ 'templating', 'template', 'mustache', 'web'], classifiers = [ 'Programming Language :: Python', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 2.7', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: Text Processing :: Markup'], )
from distutils.core import setup setup( name = 'mustache', packages = ['mustache'], version = '0.1.3', description = 'Mustache templating in Python', author = 'Peter Downs', author_email = '[email protected]', url = 'https://github.com/peterldowns/python-mustache', download_url = 'https://github.com/peterldowns/python-mustache/tarball/v0.1.3', install_requirements = open('requirements.txt').read(), extras_require = { 'test' : open('tests/requirements.txt').read(), }, keywords = [ 'templating', 'template', 'mustache', 'web'], classifiers = [ 'Programming Language :: Python', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 2.7', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: Text Processing :: Markup'], )
Fix typo breaking PyPI push.
Fix typo breaking PyPI push.
Python
mit
peterldowns/python-mustache,peterldowns/python-mustache
de7f7dd544f41ccb38d4e132fe0c994728ec8efe
setup.py
setup.py
"""setup.py""" #pylint:disable=line-too-long from codecs import open as codecs_open try: from setuptools import setup except ImportError: from distutils.core import setup #pylint:disable=import-error,no-name-in-module with codecs_open('README.rst', 'r', 'utf-8') as f: readme = f.read() with codecs_open('HISTORY.rst', 'r', 'utf-8') as f: history = f.read() setup( name='jsonrpcclient', version='2.0.1', description='JSON-RPC client library.', long_description=readme + '\n\n' + history, author='Beau Barker', author_email='[email protected]', url='https://jsonrpcclient.readthedocs.org/', packages=['jsonrpcclient'], package_data={'jsonrpcclient': ['response-schema.json']}, include_package_data=True, install_requires=['jsonschema', 'future', 'requests', 'pyzmq'], tests_require=['tox'], classifiers=[ 'License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', ], )
"""setup.py""" #pylint:disable=line-too-long from codecs import open as codecs_open try: from setuptools import setup except ImportError: from distutils.core import setup #pylint:disable=import-error,no-name-in-module with codecs_open('README.rst', 'r', 'utf-8') as f: readme = f.read() with codecs_open('HISTORY.rst', 'r', 'utf-8') as f: history = f.read() setup( name='jsonrpcclient', version='2.0.1', description='JSON-RPC client library.', long_description=readme + '\n\n' + history, author='Beau Barker', author_email='[email protected]', url='https://jsonrpcclient.readthedocs.org/', packages=['jsonrpcclient'], package_data={'jsonrpcclient': ['response-schema.json']}, include_package_data=True, install_requires=['jsonschema', 'future'], tests_require=['tox'], classifiers=[ 'License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', ], )
Remove requests and pyzmq from package dependencies
Remove requests and pyzmq from package dependencies These will now have to be installed separately by the user depending on the transport protocol required.
Python
mit
bcb/jsonrpcclient
4cae30c31369ee840dd79fb30fa3023711415012
setup.py
setup.py
import backdropsend from setuptools import setup, find_packages setup( name='backdropsend', version=backdropsend.__VERSION__, packages=find_packages(exclude=['test*']), scripts=['backdrop-send'], # metadata for upload to PyPI author=backdropsend.__AUTHOR__, author_email=backdropsend.__AUTHOR_EMAIL__, maintainer='Government Digital Service', url='https://github.com/alphagov/backdrop-send', description='backdrop-send: CLI tool for sending data to Backdrop', license='MIT', keywords='api data performance_platform', data_files=[('/usr/share/man/man1', ['docs/backdrop-send.1.gz'])], install_requires=['requests', 'argparse'], )
import backdropsend from setuptools import setup, find_packages setup( name='backdropsend', version=backdropsend.__VERSION__, packages=find_packages(exclude=['test*']), scripts=['backdrop-send'], # metadata for upload to PyPI author=backdropsend.__AUTHOR__, author_email=backdropsend.__AUTHOR_EMAIL__, maintainer='Government Digital Service', url='https://github.com/alphagov/backdrop-send', description='backdrop-send: CLI tool for sending data to Backdrop', license='MIT', keywords='api data performance_platform', data_files=[('/usr/local/share/man/man1', ['docs/backdrop-send.1.gz'])], install_requires=['requests', 'argparse'], )
Install man pages to /usr/local/share for permissions
Install man pages to /usr/local/share for permissions @YolinaS @gtrogers
Python
mit
alphagov/backdropsend,alphagov/backdropsend
2a950c91416d3b92a91f4f245a37a95b418b4bab
custom/uth/tasks.py
custom/uth/tasks.py
from custom.uth.utils import create_case, match_case, attach_images_to_case, submit_error_case from custom.uth.models import SonositeUpload, VscanUpload from celery.task import task import io def get_files_from_doc(doc): files = {} for f in doc._attachments.keys(): files[f] = io.BytesIO(doc.fetch_attachment(f)) return files @task def async_create_case(upload_id): upload_doc = SonositeUpload.get(upload_id) files = get_files_from_doc(upload_doc) create_case(upload_doc.related_case_id, files) # TODO delete doc if processing is successful @task def async_find_and_attach(upload_id): try: upload_doc = VscanUpload.get(upload_id) files = get_files_from_doc(upload_doc) case = match_case( upload_doc.scanner_serial, upload_doc.scan_id, # upload_doc.date ) if case: files = {} for f in upload_doc._attachments.keys(): files[f] = io.BytesIO(upload_doc.fetch_attachment(f)) attach_images_to_case(case._id, files) else: return -1 # TODO delete doc if successful except: # mark the case as having errored (if we know what it is) # but reraise the error since we don't want to hide it if case: submit_error_case(case._id) raise
from custom.uth.utils import create_case, match_case, attach_images_to_case, submit_error_case from custom.uth.models import SonositeUpload, VscanUpload from celery.task import task import io def get_files_from_doc(doc): files = {} for f in doc._attachments.keys(): files[f] = io.BytesIO(doc.fetch_attachment(f)) return files @task def async_create_case(upload_id): upload_doc = SonositeUpload.get(upload_id) files = get_files_from_doc(upload_doc) create_case(upload_doc.related_case_id, files) upload_doc.delete() @task def async_find_and_attach(upload_id): case = None try: upload_doc = VscanUpload.get(upload_id) files = get_files_from_doc(upload_doc) case = match_case( upload_doc.scanner_serial, upload_doc.scan_id, ) if case: files = {} for f in upload_doc._attachments.keys(): files[f] = io.BytesIO(upload_doc.fetch_attachment(f)) attach_images_to_case(case._id, files) else: return -1 upload_doc.delete() except: # mark the case as having errored (if we know what it is) # but reraise the error since we don't want to hide it if case: submit_error_case(case._id) raise
Delete docs on task completion
Delete docs on task completion
Python
bsd-3-clause
puttarajubr/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq,SEL-Columbia/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,SEL-Columbia/commcare-hq,SEL-Columbia/commcare-hq,dimagi/commcare-hq
5bb84d5eac353cd4bbe1843fccaca64161830591
savu/__init__.py
savu/__init__.py
# Copyright 2014 Diamond Light Source Ltd. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """The Base level for Savu use with : import savu .. moduleauthor:: Mark Basham <[email protected]> """
# Copyright 2014 Diamond Light Source Ltd. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """The Base level for Savu use with : import savu .. moduleauthor:: Mark Basham <[email protected]> """ from . import core from . import data from . import plugins
Update to make import of savu a little more useful
Update to make import of savu a little more useful
Python
apache-2.0
mjn19172/Savu,swtp1v07/Savu,swtp1v07/Savu,swtp1v07/Savu,mjn19172/Savu,swtp1v07/Savu,mjn19172/Savu,mjn19172/Savu,mjn19172/Savu
95ba22a3f8e4a8084fd19071a713be550158a569
setup.py
setup.py
from setuptools import setup, find_packages import sys import versioneer project_name = 'menpo3d' # Versioneer allows us to automatically generate versioning from # our git tagging system which makes releases simpler. versioneer.VCS = 'git' versioneer.versionfile_source = '{}/_version.py'.format(project_name) versioneer.versionfile_build = '{}/_version.py'.format(project_name) versioneer.tag_prefix = 'v' # tags are like v1.2.0 versioneer.parentdir_prefix = project_name + '-' # dirname like 'menpo-v1.2.0' install_requires = ['menpo==0.4.0', 'cyassimp==0.2.0', 'cyrasterize==0.2.0'] # These dependencies currently don't work on Python 3 if sys.version_info.major == 2: install_requires.append('mayavi==4.3.1') install_requires.append('menpo-pyvrml97==2.3.0a4') setup(name=project_name, version=versioneer.get_version(), cmdclass=versioneer.get_cmdclass(), description='MenpoKit providing tools for 3D Computer Vision research', author='James Booth', author_email='[email protected]', packages=find_packages(), install_requires=install_requires )
from setuptools import setup, find_packages import sys import versioneer project_name = 'menpo3d' # Versioneer allows us to automatically generate versioning from # our git tagging system which makes releases simpler. versioneer.VCS = 'git' versioneer.versionfile_source = '{}/_version.py'.format(project_name) versioneer.versionfile_build = '{}/_version.py'.format(project_name) versioneer.tag_prefix = 'v' # tags are like v1.2.0 versioneer.parentdir_prefix = project_name + '-' # dirname like 'menpo-v1.2.0' install_requires = ['menpo==0.4.0', 'cyassimp==0.2.0', 'cyrasterize==0.2.0'] # These dependencies currently don't work on Python 3 if sys.version_info.major == 2: install_requires.append('mayavi==4.3.1') install_requires.append('menpo-pyvrml97==2.3.0a4') setup(name=project_name, version=versioneer.get_version(), cmdclass=versioneer.get_cmdclass(), description='MenpoKit providing tools for 3D Computer Vision research', author='James Booth', author_email='[email protected]', packages=find_packages(), package_data={'menpo3d': ['data/*']}, install_requires=install_requires, tests_require=['nose==1.3.4', 'mock==1.0.1'] )
Include data folder and specific test dependencies
Include data folder and specific test dependencies
Python
bsd-3-clause
grigorisg9gr/menpo3d,nontas/menpo3d,nontas/menpo3d,grigorisg9gr/menpo3d
4531017c7c9e96a7a1108f39a906ddcac25ebd59
setup.py
setup.py
import os from setuptools import setup from setuptools import find_packages here = os.path.abspath(os.path.dirname(__file__)) try: with open(os.path.join(here, 'README.rst')) as f: README = f.read() with open(os.path.join(here, 'CHANGES.rst')) as f: CHANGES = f.read() except: README = '' CHANGES = '' setup( name='importscan', version='0.2.dev0', description='Recursively import modules and sub-packages', long_description=README + '\n\n' + CHANGES, classifiers=[ "Intended Audience :: Developers", "Programming Language :: Python", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", ], keywords='decorator import package', author="Martijn Faassen", author_email="[email protected]", license="BSD", packages=find_packages(), include_package_data=True, zip_safe=False, install_requires=[ 'setuptools' ], extras_require=dict( test=['pytest >= 2.5.2', 'py >= 1.4.20', 'pytest-cov', 'pytest-remove-stale-bytecode'], ) )
import io from setuptools import setup, find_packages long_description = '\n'.join(( io.open('README.rst', encoding='utf-8').read(), io.open('CHANGES.txt', encoding='utf-8').read() )) setup( name='importscan', version='0.2.dev0', description='Recursively import modules and sub-packages', long_description=long_description, classifiers=[ "Intended Audience :: Developers", "Programming Language :: Python", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", ], keywords='decorator import package', author="Martijn Faassen", author_email="[email protected]", license="BSD", packages=find_packages(), include_package_data=True, zip_safe=False, install_requires=[ 'setuptools' ], extras_require=dict( test=['pytest >= 2.5.2', 'py >= 1.4.20', 'pytest-cov', 'pytest-remove-stale-bytecode'], ) )
Use io.open with encoding='utf-8' and flake8 compliance
Use io.open with encoding='utf-8' and flake8 compliance
Python
bsd-3-clause
faassen/importscan
ed400cd2ad3a98af3ea02e41e54758c5d42e72d4
setup.py
setup.py
import os from setuptools import setup, find_packages here = os.path.abspath(os.path.dirname(__file__)) README = open(os.path.join(here, 'README.md')).read() CHANGES = open(os.path.join(here, 'CHANGES.txt')).read() requires = [ 'pyramid', 'pyramid_debugtoolbar', 'waitress', ] test_requires = requires + [ 'webtest', 'mock', ] setup(name='topdfserver', version='0.0', description='topdfserver', long_description=README + '\n\n' + CHANGES, classifiers=[ "Programming Language :: Python", "Framework :: Pylons", "Topic :: Internet :: WWW/HTTP", "Topic :: Internet :: WWW/HTTP :: WSGI :: Application", ], author='', author_email='', url='', keywords='web pyramid pylons', packages=find_packages(), include_package_data=True, zip_safe=False, install_requires=requires, tests_require=test_requires, test_suite="topdfserver", entry_points = """\ [paste.app_factory] main = topdfserver:main """, )
import os from setuptools import setup, find_packages here = os.path.abspath(os.path.dirname(__file__)) README = open(os.path.join(here, 'README.md')).read() CHANGES = open(os.path.join(here, 'CHANGES.txt')).read() requires = [ 'pyramid', 'pyramid_debugtoolbar', 'waitress', ] test_requires = requires + [ 'webtest', 'mock', 'coverage', ] setup(name='topdfserver', version='0.0', description='topdfserver', long_description=README + '\n\n' + CHANGES, classifiers=[ "Programming Language :: Python", "Framework :: Pylons", "Topic :: Internet :: WWW/HTTP", "Topic :: Internet :: WWW/HTTP :: WSGI :: Application", ], author='', author_email='', url='', keywords='web pyramid pylons', packages=find_packages(), include_package_data=True, zip_safe=False, install_requires=requires, tests_require=test_requires, test_suite="topdfserver", entry_points = """\ [paste.app_factory] main = topdfserver:main """, )
Add coverage to test dependancies
Add coverage to test dependancies
Python
agpl-3.0
makinacorpus/convertit,makinacorpus/convertit
6e71b0de777bf516d376397961ec232ec39ea195
setup.py
setup.py
from setuptools import setup try: from pypandoc import convert read_md = lambda f: convert(f, 'rst') except ImportError: print("warning: pypandoc module not found, could not convert Markdown to RST") read_md = lambda f: open(f, 'r').read() setup(name='centerline', version='0.1', description='Calculate the centerline of a polygon', long_description=read_md('README.md'), classifiers=[ 'Development Status :: 4 - Beta', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 2.7', 'Intended Audience :: Science/Research', 'Topic :: Scientific/Engineering :: GIS' ], url='https://github.com/fitodic/centerline.git', author='Filip Todic', author_email='[email protected]', license='MIT', packages=['centerline'], install_requires=['numpy', 'scipy', 'Shapely', 'GDAL', 'click', 'cligj', 'six', 'Fiona'], scripts=['bin/shp2centerline'], include_package_data=True, zip_safe=False)
from setuptools import setup try: from pypandoc import convert def read_md(): return lambda f: convert(f, 'rst') except ImportError: print( "warning: pypandoc module not found, could not convert Markdown to RST" ) def read_md(): return lambda f: open(f, 'r').read() setup(name='centerline', version='0.1', description='Calculate the centerline of a polygon', long_description=read_md('README.md'), classifiers=[ 'Development Status :: 4 - Beta', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 2.7', 'Intended Audience :: Science/Research', 'Topic :: Scientific/Engineering :: GIS' ], url='https://github.com/fitodic/centerline.git', author='Filip Todic', author_email='[email protected]', license='MIT', packages=['centerline'], install_requires=['numpy', 'scipy', 'Shapely', 'GDAL', 'click', 'cligj', 'six', 'Fiona'], scripts=['bin/shp2centerline'], include_package_data=True, zip_safe=False)
Define a MD->RST conversion function
Define a MD->RST conversion function
Python
mit
fitodic/centerline,fitodic/polygon-centerline,fitodic/centerline
7cd2249d231e8afc1384bc3757856e8fdbb234bf
setup.py
setup.py
from setuptools import setup from subprocess import check_output, CalledProcessError try: num_gpus = len(check_output(['nvidia-smi', '--query-gpu=gpu_name', '--format=csv']).decode().strip().split('\n')) tf = 'tensorflow-gpu' if num_gpus > 1 else 'tensorflow' except CalledProcessError: tf = 'tensorflow' except FileNotFoundError: tf = 'tensorflow' setup( name='autoencoder', version='0.1', description='An autoencoder implementation', author='Gokcen Eraslan', author_email="[email protected]", packages=['autoencoder'], install_requires=[tf, 'numpy>=1.7', 'keras>=1.2', 'six>=1.10.0', 'scikit-learn', 'pandas' #for preprocessing ], url='https://github.com/gokceneraslan/autoencoder', entry_points={ 'console_scripts': [ 'autoencoder = autoencoder.__main__:main' ]}, license='Apache License 2.0', classifiers=['License :: OSI Approved :: Apache Software License', 'Topic :: Scientific/Engineering :: Artificial Intelligence', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.5'], )
from setuptools import setup from subprocess import check_output, CalledProcessError try: num_gpus = len(check_output(['nvidia-smi', '--query-gpu=gpu_name', '--format=csv']).decode().strip().split('\n')) tf = 'tensorflow-gpu' if num_gpus > 1 else 'tensorflow' except CalledProcessError: tf = 'tensorflow>=1.0.0' except FileNotFoundError: tf = 'tensorflow>=1.0.0' setup( name='autoencoder', version='0.1', description='An autoencoder implementation', author='Gokcen Eraslan', author_email="[email protected]", packages=['autoencoder'], install_requires=[tf, 'numpy>=1.7', 'keras>=1.2.2', 'six>=1.10.0', 'scikit-learn', 'pandas' #for preprocessing ], url='https://github.com/gokceneraslan/autoencoder', entry_points={ 'console_scripts': [ 'autoencoder = autoencoder.__main__:main' ]}, license='Apache License 2.0', classifiers=['License :: OSI Approved :: Apache Software License', 'Topic :: Scientific/Engineering :: Artificial Intelligence', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.5'], )
Make dependency check more stringent
Make dependency check more stringent
Python
apache-2.0
theislab/dca,theislab/dca,theislab/dca
43ae9bdec900081d6ff91fc3847a4d8d9a42eaeb
contrib/plugins/w3cdate.py
contrib/plugins/w3cdate.py
""" Add a 'w3cdate' key to every entry -- this contains the date in ISO8601 format WARNING: you must have PyXML installed as part of your python installation in order for this plugin to work Place this plugin early in your load_plugins list, so that the w3cdate will be available to subsequent plugins """ __author__ = "Ted Leung <[email protected]>" __version__ = "$Id:" __copyright__ = "Copyright (c) 2003 Ted Leung" __license__ = "Python" import xml.utils.iso8601 import time def cb_prepare(args): request = args["request"] form = request.getHttp()['form'] config = request.getConfiguration() data = request.getData() entry_list = data['entry_list'] for i in range(len(entry_list)): entry = entry_list[i] entry['w3cdate'] = xml.utils.iso8601.ctime(time.mktime(entry['timetuple']))
""" Add a 'w3cdate' key to every entry -- this contains the date in ISO8601 format WARNING: you must have PyXML installed as part of your python installation in order for this plugin to work Place this plugin early in your load_plugins list, so that the w3cdate will be available to subsequent plugins """ __author__ = "Ted Leung <[email protected]>" __version__ = "$Id:" __copyright__ = "Copyright (c) 2003 Ted Leung" __license__ = "Python" import xml.utils.iso8601 import time def cb_prepare(args): request = args["request"] form = request.getHttp()['form'] config = request.getConfiguration() data = request.getData() entry_list = data['entry_list'] for i in range(len(entry_list)): entry = entry_list[i] t = entry['timetuple'] # adjust for daylight savings time t = t[0],t[1],t[2],t[3]+time.localtime()[-1],t[4],t[5],t[6],t[7],t[8] entry['w3cdate'] = xml.utils.iso8601.ctime(time.mktime(t))
Fix daylight savings time bug
Fix daylight savings time bug
Python
mit
daitangio/pyblosxom,daitangio/pyblosxom,willkg/douglas,willkg/douglas
e0a6ea3d48691bedfb39a0a92d569ea4aaf61810
pavement.py
pavement.py
import paver.doctools import paver.setuputils from schevo.release import setup_meta options( setup=setup_meta, sphinx=Bunch( docroot='doc', builddir='build', sourcedir='source', ), ) @task @needs('paver.doctools.html') def openhtml(): index_file = path('doc/build/html/index.html') sh('open ' + index_file)
from schevo.release import setup_meta options( setup=setup_meta, sphinx=Bunch( docroot='doc', builddir='build', sourcedir='source', ), ) try: import paver.doctools except ImportError: pass else: @task @needs('paver.doctools.html') def openhtml(): index_file = path('doc/build/html/index.html') sh('open ' + index_file)
Make paver.doctools optional, to allow for downloading of ==dev eggs
Make paver.doctools optional, to allow for downloading of ==dev eggs Signed-off-by: Matthew R. Scott <[email protected]>
Python
mit
Schevo/schevo,Schevo/schevo
910d848f9c7ceb9133fe52c0c3f2df6c8ed4e4aa
phi/flow.py
phi/flow.py
# pylint: disable-msg = unused-import """ *Main PhiFlow import:* `from phi.flow import *` Imports important functions and classes from `math`, `geom`, `field`, `physics` and `vis` (including sub-modules) as well as the modules and sub-modules themselves. See `phi.tf.flow`, `phi.torch.flow`, `phi.jax.flow`. """ # Modules import numpy import numpy as np import phi from . import math, geom, field, physics, vis from .math import extrapolation, backend from .physics import fluid, flip, advect, diffuse # Classes from .math import DType, Solve from .geom import Geometry, Sphere, Box, Cuboid from .field import Grid, CenteredGrid, StaggeredGrid, GeometryMask, SoftGeometryMask, HardGeometryMask, Noise, PointCloud, Scene from .vis import view, Viewer, control from .physics._boundaries import Obstacle # Constants from .math import PI, INF, NAN # Functions from .math import wrap, tensor, spatial, channel, batch, instance from .geom import union from .vis import show # Exceptions from .math import ConvergenceException, NotConverged, Diverged
# pylint: disable-msg = unused-import """ *Main PhiFlow import:* `from phi.flow import *` Imports important functions and classes from `math`, `geom`, `field`, `physics` and `vis` (including sub-modules) as well as the modules and sub-modules themselves. See `phi.tf.flow`, `phi.torch.flow`, `phi.jax.flow`. """ # Modules import numpy import numpy as np import phi from . import math, geom, field, physics, vis from .math import extrapolation, backend from .physics import fluid, flip, advect, diffuse # Classes from .math import Tensor, DType, Solve from .geom import Geometry, Sphere, Box, Cuboid from .field import Grid, CenteredGrid, StaggeredGrid, GeometryMask, SoftGeometryMask, HardGeometryMask, Noise, PointCloud, Scene from .vis import view, Viewer, control from .physics._boundaries import Obstacle # Constants from .math import PI, INF, NAN # Functions from .math import wrap, tensor, spatial, channel, batch, instance from .geom import union from .vis import show # Exceptions from .math import ConvergenceException, NotConverged, Diverged
Add Tensor to standard imports
[Φ] Add Tensor to standard imports
Python
mit
tum-pbs/PhiFlow,tum-pbs/PhiFlow
f408d7e61753ecdeb280e59ecb35485385ec3f6a
Tools/compiler/compile.py
Tools/compiler/compile.py
import sys import getopt from compiler import compile, visitor ##import profile def main(): VERBOSE = 0 DISPLAY = 0 CONTINUE = 0 opts, args = getopt.getopt(sys.argv[1:], 'vqdc') for k, v in opts: if k == '-v': VERBOSE = 1 visitor.ASTVisitor.VERBOSE = visitor.ASTVisitor.VERBOSE + 1 if k == '-q': if sys.platform[:3]=="win": f = open('nul', 'wb') # /dev/null fails on Windows... else: f = open('/dev/null', 'wb') sys.stdout = f if k == '-d': DISPLAY = 1 if k == '-c': CONTINUE = 1 if not args: print "no files to compile" else: for filename in args: if VERBOSE: print filename try: compile(filename, DISPLAY) ## profile.run('compile(%s, %s)' % (`filename`, `DISPLAY`), ## filename + ".prof") except SyntaxError, err: print err print err.lineno if not CONTINUE: sys.exit(-1) if __name__ == "__main__": main()
import sys import getopt from compiler import compile, visitor import profile def main(): VERBOSE = 0 DISPLAY = 0 PROFILE = 0 CONTINUE = 0 opts, args = getopt.getopt(sys.argv[1:], 'vqdcp') for k, v in opts: if k == '-v': VERBOSE = 1 visitor.ASTVisitor.VERBOSE = visitor.ASTVisitor.VERBOSE + 1 if k == '-q': if sys.platform[:3]=="win": f = open('nul', 'wb') # /dev/null fails on Windows... else: f = open('/dev/null', 'wb') sys.stdout = f if k == '-d': DISPLAY = 1 if k == '-c': CONTINUE = 1 if k == '-p': PROFILE = 1 if not args: print "no files to compile" else: for filename in args: if VERBOSE: print filename try: if PROFILE: profile.run('compile(%s, %s)' % (`filename`, `DISPLAY`), filename + ".prof") else: compile(filename, DISPLAY) except SyntaxError, err: print err print err.lineno if not CONTINUE: sys.exit(-1) if __name__ == "__main__": main()
Add -p option to invoke Python profiler
Add -p option to invoke Python profiler
Python
mit
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
a67176ba0ba06d1a7cfff5d8e21446bb78a30518
subscription/api.py
subscription/api.py
from tastypie import fields from tastypie.resources import ModelResource, ALL from tastypie.authentication import ApiKeyAuthentication from tastypie.authorization import Authorization from subscription.models import Subscription, MessageSet from djcelery.models import PeriodicTask class PeriodicTaskResource(ModelResource): class Meta: queryset = PeriodicTask.objects.all() resource_name = 'periodic_task' list_allowed_methods = ['get'] include_resource_uri = True always_return_data = True authentication = ApiKeyAuthentication() class MessageSetResource(ModelResource): class Meta: queryset = MessageSet.objects.all() resource_name = 'message_set' list_allowed_methods = ['get'] include_resource_uri = True always_return_data = True authentication = ApiKeyAuthentication() class SubscriptionResource(ModelResource): schedule = fields.ToOneField(PeriodicTaskResource, 'schedule') message_set = fields.ToOneField(MessageSetResource, 'message_set') class Meta: queryset = Subscription.objects.all() resource_name = 'subscription' list_allowed_methods = ['post', 'get'] include_resource_uri = True always_return_data = True authentication = ApiKeyAuthentication() authorization = Authorization() filtering = { 'to_addr': ALL, 'user_account': ALL }
from tastypie import fields from tastypie.resources import ModelResource, ALL from tastypie.authentication import ApiKeyAuthentication from tastypie.authorization import Authorization from subscription.models import Subscription, MessageSet from djcelery.models import PeriodicTask class PeriodicTaskResource(ModelResource): class Meta: queryset = PeriodicTask.objects.all() resource_name = 'periodic_task' list_allowed_methods = ['get'] include_resource_uri = True always_return_data = True authentication = ApiKeyAuthentication() class MessageSetResource(ModelResource): class Meta: queryset = MessageSet.objects.all() resource_name = 'message_set' list_allowed_methods = ['get'] include_resource_uri = True always_return_data = True authentication = ApiKeyAuthentication() class SubscriptionResource(ModelResource): schedule = fields.ToOneField(PeriodicTaskResource, 'schedule') message_set = fields.ToOneField(MessageSetResource, 'message_set') class Meta: queryset = Subscription.objects.all() resource_name = 'subscription' list_allowed_methods = ['post', 'get', 'put', 'patch'] include_resource_uri = True always_return_data = True authentication = ApiKeyAuthentication() authorization = Authorization() filtering = { 'to_addr': ALL, 'user_account': ALL }
Update tastypie methods allowed for subscriptions
Update tastypie methods allowed for subscriptions
Python
bsd-3-clause
praekelt/ndoh-control,praekelt/ndoh-control,praekelt/ndoh-control,praekelt/ndoh-control
dfb6d41be3acf5fc4d4d0f3d8a7fb9d3507e9ae7
labware/microplates.py
labware/microplates.py
from .grid import GridContainer, GridItem from .liquids import LiquidWell class Microplate(GridContainer): rows = 12 cols = 8 volume = 100 min_vol = 50 max_vol = 90 height = 14.45 length = 127.76 width = 85.47 diameter = 7.15 depth = 3.25 a1_x = 14.38 a1_y = 11.24 spacing = 9 child_class = LiquidWell def well(self, position): return self.get_child(position) def calibrate(self, **kwargs): """ Coordinates should represent the center and near-bottom of well A1 with the pipette tip in place. """ super(Microplate, self).calibrate(**kwargs)
from .grid import GridContainer, GridItem from .liquids import LiquidWell class Microplate(GridContainer): rows = 12 cols = 8 volume = 100 min_vol = 50 max_vol = 90 height = 14.45 length = 127.76 width = 85.47 diameter = 7.15 depth = 3.25 a1_x = 14.38 a1_y = 11.24 spacing = 9 child_class = LiquidWell def well(self, position): return self.get_child(position) def calibrate(self, **kwargs): """ Coordinates should represent the center and near-bottom of well A1 with the pipette tip in place. """ super(Microplate, self).calibrate(**kwargs) class Microplate_96(Microplate): pass class Microplate_96_Deepwell(Microplate_96): volume = 400 min_vol = 50 max_vol = 380 height = 14.6 depth = 10.8
Revert of af99d4483acb36eda65b; Microplate subsets are special and important.
Revert of af99d4483acb36eda65b; Microplate subsets are special and important.
Python
apache-2.0
OpenTrons/opentrons-api,OpenTrons/opentrons-api,Opentrons/labware,OpenTrons/opentrons-api,OpenTrons/opentrons-api,OpenTrons/opentrons_sdk,OpenTrons/opentrons-api
abd4859f8bac46fd6d114352ffad4ee9af28aa5f
common/lib/xmodule/xmodule/tests/test_mongo_utils.py
common/lib/xmodule/xmodule/tests/test_mongo_utils.py
"""Tests for methods defined in mongo_utils.py""" import os from unittest import TestCase from uuid import uuid4 from pymongo import ReadPreference from django.conf import settings from xmodule.mongo_utils import connect_to_mongodb class MongoUtilsTests(TestCase): """ Tests for methods exposed in mongo_utils """ def test_connect_to_mongo_read_preference(self): """ Test that read_preference parameter gets converted to a valid pymongo read preference. """ host = 'edx.devstack.mongo' if 'BOK_CHOY_HOSTNAME' in os.environ else 'localhost' db = 'test_read_preference_%s' % uuid4().hex # Support for read_preference given in constant name form (ie. PRIMARY, SECONDARY_PREFERRED) connection = connect_to_mongodb(db, host, read_preference='SECONDARY_PREFERRED') self.assertEqual(connection.client.read_preference, ReadPreference.SECONDARY_PREFERRED) # Support for read_preference given as mongos name. connection = connect_to_mongodb(db, host, read_preference='secondaryPreferred') self.assertEqual(connection.client.read_preference, ReadPreference.SECONDARY_PREFERRED)
""" Tests for methods defined in mongo_utils.py """ import ddt import os from unittest import TestCase from uuid import uuid4 from pymongo import ReadPreference from django.conf import settings from xmodule.mongo_utils import connect_to_mongodb @ddt.ddt class MongoUtilsTests(TestCase): """ Tests for methods exposed in mongo_utils """ @ddt.data( ('PRIMARY', 'primary', ReadPreference.PRIMARY), ('SECONDARY_PREFERRED', 'secondaryPreferred', ReadPreference.SECONDARY_PREFERRED), ('NEAREST', 'nearest', ReadPreference.NEAREST), ) @ddt.unpack def test_connect_to_mongo_read_preference(self, enum_name, mongos_name, expected_read_preference): """ Test that read_preference parameter gets converted to a valid pymongo read preference. """ host = 'edx.devstack.mongo' if 'BOK_CHOY_HOSTNAME' in os.environ else 'localhost' db = 'test_read_preference_%s' % uuid4().hex # Support for read_preference given in constant name form (ie. PRIMARY, SECONDARY_PREFERRED) connection = connect_to_mongodb(db, host, read_preference=enum_name) self.assertEqual(connection.client.read_preference, expected_read_preference) # Support for read_preference given as mongos name. connection = connect_to_mongodb(db, host, read_preference=mongos_name) self.assertEqual(connection.client.read_preference, expected_read_preference)
Convert test to DDT and test for primary, nearest modes.
Convert test to DDT and test for primary, nearest modes.
Python
agpl-3.0
teltek/edx-platform,arbrandes/edx-platform,kmoocdev2/edx-platform,CredoReference/edx-platform,Stanford-Online/edx-platform,eduNEXT/edx-platform,EDUlib/edx-platform,appsembler/edx-platform,ahmedaljazzar/edx-platform,appsembler/edx-platform,gsehub/edx-platform,gymnasium/edx-platform,jolyonb/edx-platform,a-parhom/edx-platform,stvstnfrd/edx-platform,kmoocdev2/edx-platform,msegado/edx-platform,jolyonb/edx-platform,ahmedaljazzar/edx-platform,msegado/edx-platform,ESOedX/edx-platform,ESOedX/edx-platform,philanthropy-u/edx-platform,appsembler/edx-platform,kmoocdev2/edx-platform,gsehub/edx-platform,a-parhom/edx-platform,gymnasium/edx-platform,hastexo/edx-platform,ESOedX/edx-platform,eduNEXT/edunext-platform,angelapper/edx-platform,lduarte1991/edx-platform,gymnasium/edx-platform,kmoocdev2/edx-platform,a-parhom/edx-platform,msegado/edx-platform,Edraak/edraak-platform,procangroup/edx-platform,jolyonb/edx-platform,jolyonb/edx-platform,teltek/edx-platform,edx/edx-platform,cpennington/edx-platform,angelapper/edx-platform,EDUlib/edx-platform,gymnasium/edx-platform,CredoReference/edx-platform,hastexo/edx-platform,procangroup/edx-platform,ahmedaljazzar/edx-platform,BehavioralInsightsTeam/edx-platform,lduarte1991/edx-platform,mitocw/edx-platform,procangroup/edx-platform,eduNEXT/edx-platform,cpennington/edx-platform,kmoocdev2/edx-platform,lduarte1991/edx-platform,mitocw/edx-platform,hastexo/edx-platform,angelapper/edx-platform,CredoReference/edx-platform,proversity-org/edx-platform,edx-solutions/edx-platform,edx/edx-platform,msegado/edx-platform,philanthropy-u/edx-platform,eduNEXT/edx-platform,BehavioralInsightsTeam/edx-platform,teltek/edx-platform,gsehub/edx-platform,eduNEXT/edunext-platform,Edraak/edraak-platform,TeachAtTUM/edx-platform,Stanford-Online/edx-platform,philanthropy-u/edx-platform,EDUlib/edx-platform,cpennington/edx-platform,stvstnfrd/edx-platform,appsembler/edx-platform,mitocw/edx-platform,arbrandes/edx-platform,gsehub/edx-platform,proversity-org/edx-platform,eduNEXT/edx-platform,Stanford-Online/edx-platform,TeachAtTUM/edx-platform,EDUlib/edx-platform,edx/edx-platform,ESOedX/edx-platform,procangroup/edx-platform,cpennington/edx-platform,arbrandes/edx-platform,proversity-org/edx-platform,eduNEXT/edunext-platform,proversity-org/edx-platform,Stanford-Online/edx-platform,arbrandes/edx-platform,teltek/edx-platform,ahmedaljazzar/edx-platform,mitocw/edx-platform,BehavioralInsightsTeam/edx-platform,Edraak/edraak-platform,philanthropy-u/edx-platform,BehavioralInsightsTeam/edx-platform,TeachAtTUM/edx-platform,Edraak/edraak-platform,CredoReference/edx-platform,a-parhom/edx-platform,msegado/edx-platform,angelapper/edx-platform,edx-solutions/edx-platform,edx/edx-platform,eduNEXT/edunext-platform,edx-solutions/edx-platform,hastexo/edx-platform,stvstnfrd/edx-platform,stvstnfrd/edx-platform,lduarte1991/edx-platform,TeachAtTUM/edx-platform,edx-solutions/edx-platform
956cb919554c8103149fa6442254bdfed0ce32d1
lms/djangoapps/experiments/factories.py
lms/djangoapps/experiments/factories.py
import factory from experiments.models import ExperimentData, ExperimentKeyValue from student.tests.factories import UserFactory class ExperimentDataFactory(factory.DjangoModelFactory): class Meta(object): model = ExperimentData user = factory.SubFactory(UserFactory) experiment_id = factory.fuzzy.FuzzyInteger(0) key = factory.Sequence(lambda n: n) value = factory.Faker('word') class ExperimentKeyValueFactory(factory.DjangoModelFactory): class Meta(object): model = ExperimentKeyValue experiment_id = factory.fuzzy.FuzzyInteger(0) key = factory.Sequence(lambda n: n) value = factory.Faker('word')
import factory import factory.fuzzy from experiments.models import ExperimentData, ExperimentKeyValue from student.tests.factories import UserFactory class ExperimentDataFactory(factory.DjangoModelFactory): class Meta(object): model = ExperimentData user = factory.SubFactory(UserFactory) experiment_id = factory.fuzzy.FuzzyInteger(0) key = factory.Sequence(lambda n: n) value = factory.Faker('word') class ExperimentKeyValueFactory(factory.DjangoModelFactory): class Meta(object): model = ExperimentKeyValue experiment_id = factory.fuzzy.FuzzyInteger(0) key = factory.Sequence(lambda n: n) value = factory.Faker('word')
Add an import of a submodule to make pytest less complainy
Add an import of a submodule to make pytest less complainy
Python
agpl-3.0
angelapper/edx-platform,TeachAtTUM/edx-platform,a-parhom/edx-platform,CredoReference/edx-platform,gsehub/edx-platform,eduNEXT/edunext-platform,eduNEXT/edx-platform,stvstnfrd/edx-platform,eduNEXT/edx-platform,TeachAtTUM/edx-platform,Stanford-Online/edx-platform,a-parhom/edx-platform,eduNEXT/edx-platform,ahmedaljazzar/edx-platform,msegado/edx-platform,lduarte1991/edx-platform,kmoocdev2/edx-platform,edx-solutions/edx-platform,kmoocdev2/edx-platform,a-parhom/edx-platform,Edraak/edraak-platform,cpennington/edx-platform,eduNEXT/edunext-platform,msegado/edx-platform,gymnasium/edx-platform,hastexo/edx-platform,stvstnfrd/edx-platform,lduarte1991/edx-platform,hastexo/edx-platform,appsembler/edx-platform,gsehub/edx-platform,jolyonb/edx-platform,eduNEXT/edunext-platform,msegado/edx-platform,jolyonb/edx-platform,a-parhom/edx-platform,Edraak/edraak-platform,gymnasium/edx-platform,kmoocdev2/edx-platform,teltek/edx-platform,teltek/edx-platform,TeachAtTUM/edx-platform,lduarte1991/edx-platform,msegado/edx-platform,ESOedX/edx-platform,angelapper/edx-platform,edx-solutions/edx-platform,cpennington/edx-platform,teltek/edx-platform,mitocw/edx-platform,appsembler/edx-platform,arbrandes/edx-platform,jolyonb/edx-platform,edx-solutions/edx-platform,philanthropy-u/edx-platform,arbrandes/edx-platform,stvstnfrd/edx-platform,EDUlib/edx-platform,proversity-org/edx-platform,gsehub/edx-platform,eduNEXT/edx-platform,EDUlib/edx-platform,gymnasium/edx-platform,mitocw/edx-platform,edx-solutions/edx-platform,angelapper/edx-platform,kmoocdev2/edx-platform,gymnasium/edx-platform,cpennington/edx-platform,teltek/edx-platform,hastexo/edx-platform,CredoReference/edx-platform,procangroup/edx-platform,arbrandes/edx-platform,Stanford-Online/edx-platform,Edraak/edraak-platform,EDUlib/edx-platform,proversity-org/edx-platform,Stanford-Online/edx-platform,edx/edx-platform,BehavioralInsightsTeam/edx-platform,angelapper/edx-platform,procangroup/edx-platform,ahmedaljazzar/edx-platform,ahmedaljazzar/edx-platform,CredoReference/edx-platform,BehavioralInsightsTeam/edx-platform,gsehub/edx-platform,edx/edx-platform,EDUlib/edx-platform,mitocw/edx-platform,appsembler/edx-platform,Edraak/edraak-platform,mitocw/edx-platform,arbrandes/edx-platform,appsembler/edx-platform,proversity-org/edx-platform,eduNEXT/edunext-platform,philanthropy-u/edx-platform,ESOedX/edx-platform,edx/edx-platform,ESOedX/edx-platform,jolyonb/edx-platform,cpennington/edx-platform,kmoocdev2/edx-platform,ahmedaljazzar/edx-platform,procangroup/edx-platform,msegado/edx-platform,TeachAtTUM/edx-platform,philanthropy-u/edx-platform,procangroup/edx-platform,hastexo/edx-platform,Stanford-Online/edx-platform,stvstnfrd/edx-platform,proversity-org/edx-platform,ESOedX/edx-platform,BehavioralInsightsTeam/edx-platform,edx/edx-platform,BehavioralInsightsTeam/edx-platform,CredoReference/edx-platform,lduarte1991/edx-platform,philanthropy-u/edx-platform
ea3e9270788b251440b5f6fab1605361e0dc2ade
inonemonth/challenges/tests/test_forms.py
inonemonth/challenges/tests/test_forms.py
import unittest import django.test from django.core.exceptions import ValidationError from core.tests.setups import RobrechtSocialUserFactory from ..validators import RepoExistanceValidator ############################################################################### # Forms # ############################################################################### ''' from ..forms import InvestmentModelForm class InvestmentModelFormTestCase(TestCase): """ Tests for InvestmentModelForm """ def test_initial_value_of_investor_type(self): """ Verify initial value of investor_type field of InvestmentModelForm. """ investor_type_initial = InvestmentModelForm().fields["investor_type"].initial self.assertEqual(investor_type_initial, "PERSON") ''' ############################################################################### # Validators # ############################################################################### class RepoExistanceValidatorTestCase(django.test.TestCase): def test_name(self): user_rob = RobrechtSocialUserFactory() self.assertRaises(ValidationError, RepoExistanceValidator(user_rob), "asiakas/non_existing_branch")
import unittest import django.test from django.core.exceptions import ValidationError from core.tests.setups import RobrechtSocialUserFactory from ..validators import RepoExistanceValidator ############################################################################### # Forms # ############################################################################### ''' from ..forms import InvestmentModelForm class InvestmentModelFormTestCase(TestCase): """ Tests for InvestmentModelForm """ def test_initial_value_of_investor_type(self): """ Verify initial value of investor_type field of InvestmentModelForm. """ investor_type_initial = InvestmentModelForm().fields["investor_type"].initial self.assertEqual(investor_type_initial, "PERSON") ''' ############################################################################### # Validators # ############################################################################### # Test takes longer than average test because of requests call #@unittest.skip("") class RepoExistanceValidatorTestCase(django.test.TestCase): def test_repo_existance_validator(self): user_rob = RobrechtSocialUserFactory() self.assertRaises(ValidationError, RepoExistanceValidator(user_rob), "asiakas/non_existing_branch")
Add Comment to RepoExistanceValidator test and correct test name
Add Comment to RepoExistanceValidator test and correct test name
Python
mit
robrechtdr/inonemonth,robrechtdr/inonemonth,robrechtdr/inonemonth,robrechtdr/inonemonth
0bdcb1c36432cfa0506c6dd667e4e1910edcd371
ixprofile_client/management/commands/createsuperuser.py
ixprofile_client/management/commands/createsuperuser.py
""" A management command to create a user with a given email. """ from django.contrib.auth.models import User from django.core.management.base import BaseCommand, CommandError from ixprofile_client.webservice import UserWebService from optparse import make_option class Command(BaseCommand): """ The command to create a superuser with a given email. """ option_list = BaseCommand.option_list + ( make_option('--email', default=None, help='Specifies the email for the superuser.'), make_option('--noinput', action='store_false', dest='interactive', default=True, help='Tells Django to NOT prompt the user for input of ' + 'any kind. You must use --email with --noinput.'), ) def handle(self, *args, **options): interactive = options.get('interactive') email = options.get('email') verbosity = int(options.get('verbosity', 1)) if interactive and not email: email = raw_input("Email: ") if not email: raise CommandError("No email given.") user = User() user.email = email user.set_password(None) user.is_active = True user.is_staff = True user.is_superuser = True user_ws = UserWebService() user_ws.connect(user) if verbosity >= 1: self.stdout.write("Superuser created successfully.")
""" A management command to create a user with a given email. """ from django.contrib.auth.models import User from django.core.management.base import BaseCommand, CommandError from django.db import transaction from ixprofile_client.webservice import UserWebService from optparse import make_option class Command(BaseCommand): """ The command to create a superuser with a given email. """ option_list = BaseCommand.option_list + ( make_option('--email', default=None, help='Specifies the email for the superuser.'), make_option('--noinput', action='store_false', dest='interactive', default=True, help='Tells Django to NOT prompt the user for input of ' + 'any kind. You must use --email with --noinput.'), ) def handle(self, *args, **options): interactive = options.get('interactive') email = options.get('email') verbosity = int(options.get('verbosity', 1)) if interactive and not email: email = raw_input("Email: ") if not email: raise CommandError("No email given.") with transaction.atomic(): user, created = User.objects.get_or_create(email=email) user.set_password(None) user.is_active = True user.is_staff = True user.is_superuser = True user_ws = UserWebService() user_ws.connect(user) if verbosity >= 1: if created: self.stdout.write("Superuser created successfully.") else: self.stdout.write("Superuser flag added successfully.")
Handle the case where the user may already exist in the database
Handle the case where the user may already exist in the database
Python
mit
infoxchange/ixprofile-client,infoxchange/ixprofile-client
1cd3096322b5d4b4c4df0f1fba6891e29c911c53
spaces/utils.py
spaces/utils.py
import re import os def normalize_path(path): """ Normalizes a path: * Removes extra and trailing slashes * Converts special characters to underscore """ path = re.sub(r'/+', '/', path) # repeated slash path = re.sub(r'/*$', '', path) # trailing slash path = [to_slug(p) for p in path.split(os.sep)] return os.sep.join(path) # preserves leading slash def to_slug(value): """ Convert a string to a URL slug """ # Space to dashes value = re.sub(r'[\s_]+', '-', value) # Special characters value = re.sub(r'[^a-z0-9\-]+', '', value, flags=re.I) # Extra dashes value = re.sub(r'\-{2,}', '-', value) value = re.sub(r'(^\-)|(\-$)', '', value) return value
import re import os def normalize_path(path): """ Normalizes a path: * Removes extra and trailing slashes * Converts special characters to underscore """ if path is None: return "" path = re.sub(r'/+', '/', path) # repeated slash path = re.sub(r'/*$', '', path) # trailing slash path = [to_slug(p) for p in path.split(os.sep)] return os.sep.join(path) # preserves leading slash def to_slug(value): """ Convert a string to a URL slug. """ value = value.lower() # Space to dashes value = re.sub(r'[\s_]+', '-', value) # Special characters value = re.sub(r'[^a-z0-9\-]+', '', value, flags=re.I) # Extra dashes value = re.sub(r'\-{2,}', '-', value) value = re.sub(r'(^\-)|(\-$)', '', value) return value
Convert path to lowercase when normalizing
Convert path to lowercase when normalizing
Python
mit
jgillick/Spaces,jgillick/Spaces,jgillick/Spaces,jgillick/Spaces,jgillick/Spaces,jgillick/Spaces
33c03c8d50524dca3b9c5990958a0b44e9fe399e
isserviceup/services/models/statuspage.py
isserviceup/services/models/statuspage.py
import requests from bs4 import BeautifulSoup from isserviceup.services.models.service import Service, Status class StatusPagePlugin(Service): def get_status(self): r = requests.get(self.status_url) if r.status_code != 200: return Status.unavailable b = BeautifulSoup(r.content, 'html.parser') status = next(x for x in b.find(class_='page-status').attrs['class'] if x.startswith('status-')) if status == 'status-none': return Status.ok elif status == 'status-critical': return Status.critical elif status == 'status-major': return Status.major elif status == 'status-minor': return Status.minor elif status == 'status-maintenance': return Status.maintenance else: raise Exception('unexpected status')
import requests from bs4 import BeautifulSoup from isserviceup.services.models.service import Service, Status class StatusPagePlugin(Service): def get_status(self): r = requests.get(self.status_url) if r.status_code != 200: return Status.unavailable b = BeautifulSoup(r.content, 'html.parser') page_status = b.find(class_='page-status') if page_status is None: if b.find(class_='unresolved-incidents'): return Status.major status = next(x for x in page_status.attrs['class'] if x.startswith('status-')) if status == 'status-none': return Status.ok elif status == 'status-critical': return Status.critical elif status == 'status-major': return Status.major elif status == 'status-minor': return Status.minor elif status == 'status-maintenance': return Status.maintenance else: raise Exception('unexpected status')
Use unresolved-incidents when page-status is empty
Use unresolved-incidents when page-status is empty
Python
apache-2.0
marcopaz/is-service-up,marcopaz/is-service-up,marcopaz/is-service-up
e49ac8daeabf82708f2ba7bb623d7db73e1fcaff
readthedocs/core/subdomain_urls.py
readthedocs/core/subdomain_urls.py
from django.conf.urls.defaults import url, patterns from urls import urlpatterns as main_patterns urlpatterns = patterns('', url(r'^projects/(?P<project_slug>[\w.-]+)/(?P<lang_slug>\w{2})/(?P<version_slug>[\w.-]+)/(?P<filename>.*)$', 'core.views.subproject_serve_docs', name='subproject_docs_detail' ), url(r'^projects/(?P<project_slug>[\w.-]+)', 'core.views.subproject_serve_docs', name='subproject_docs_detail' ), url(r'^(?P<lang_slug>\w{2})/(?P<version_slug>[\w.-]+)/(?P<filename>.*)$', 'core.views.serve_docs', name='docs_detail' ), url(r'^(?P<lang_slug>\w{2})/(?P<version_slug>.*)/$', 'core.views.serve_docs', {'filename': 'index.html'}, name='docs_detail' ), url(r'^$', 'core.views.subdomain_handler'), ) urlpatterns += main_patterns
from django.conf.urls.defaults import url, patterns from urls import urlpatterns as main_patterns urlpatterns = patterns('', url(r'^projects/(?P<project_slug>[\w.-]+)/(?P<lang_slug>\w{2})/(?P<version_slug>[\w.-]+)/(?P<filename>.*)$', 'core.views.subproject_serve_docs', name='subproject_docs_detail' ), url(r'^projects/(?P<project_slug>[\w.-]+)', 'core.views.subproject_serve_docs', name='subproject_docs_detail' ), url(r'^(?P<lang_slug>\w{2})/(?P<version_slug>[\w.-]+)/(?P<filename>.*)$', 'core.views.serve_docs', name='docs_detail' ), url(r'^(?P<lang_slug>\w{2})/(?P<version_slug>.*)/$', 'core.views.serve_docs', {'filename': 'index.html'}, name='docs_detail' ), url(r'^(?P<version_slug>.*)/$', 'core.views.subdomain_handler', name='version_subdomain_handler' ), url(r'^$', 'core.views.subdomain_handler'), ) urlpatterns += main_patterns
Add verison_slug redirection back in for now.
Add verison_slug redirection back in for now.
Python
mit
agjohnson/readthedocs.org,kdkeyser/readthedocs.org,istresearch/readthedocs.org,davidfischer/readthedocs.org,KamranMackey/readthedocs.org,espdev/readthedocs.org,ojii/readthedocs.org,singingwolfboy/readthedocs.org,michaelmcandrew/readthedocs.org,SteveViss/readthedocs.org,hach-que/readthedocs.org,raven47git/readthedocs.org,dirn/readthedocs.org,dirn/readthedocs.org,sunnyzwh/readthedocs.org,GovReady/readthedocs.org,rtfd/readthedocs.org,titiushko/readthedocs.org,emawind84/readthedocs.org,CedarLogic/readthedocs.org,tddv/readthedocs.org,gjtorikian/readthedocs.org,espdev/readthedocs.org,mhils/readthedocs.org,kenshinthebattosai/readthedocs.org,CedarLogic/readthedocs.org,SteveViss/readthedocs.org,singingwolfboy/readthedocs.org,mhils/readthedocs.org,espdev/readthedocs.org,soulshake/readthedocs.org,nikolas/readthedocs.org,wijerasa/readthedocs.org,michaelmcandrew/readthedocs.org,istresearch/readthedocs.org,attakei/readthedocs-oauth,mrshoki/readthedocs.org,d0ugal/readthedocs.org,VishvajitP/readthedocs.org,Carreau/readthedocs.org,SteveViss/readthedocs.org,davidfischer/readthedocs.org,cgourlay/readthedocs.org,rtfd/readthedocs.org,kenshinthebattosai/readthedocs.org,ojii/readthedocs.org,soulshake/readthedocs.org,kenshinthebattosai/readthedocs.org,laplaceliu/readthedocs.org,attakei/readthedocs-oauth,espdev/readthedocs.org,michaelmcandrew/readthedocs.org,tddv/readthedocs.org,LukasBoersma/readthedocs.org,sunnyzwh/readthedocs.org,Tazer/readthedocs.org,fujita-shintaro/readthedocs.org,titiushko/readthedocs.org,cgourlay/readthedocs.org,VishvajitP/readthedocs.org,singingwolfboy/readthedocs.org,mhils/readthedocs.org,royalwang/readthedocs.org,fujita-shintaro/readthedocs.org,techtonik/readthedocs.org,laplaceliu/readthedocs.org,emawind84/readthedocs.org,SteveViss/readthedocs.org,GovReady/readthedocs.org,kenshinthebattosai/readthedocs.org,istresearch/readthedocs.org,safwanrahman/readthedocs.org,attakei/readthedocs-oauth,cgourlay/readthedocs.org,laplaceliu/readthedocs.org,atsuyim/readthedocs.org,espdev/readthedocs.org,sid-kap/readthedocs.org,wanghaven/readthedocs.org,raven47git/readthedocs.org,stevepiercy/readthedocs.org,gjtorikian/readthedocs.org,asampat3090/readthedocs.org,sils1297/readthedocs.org,jerel/readthedocs.org,GovReady/readthedocs.org,atsuyim/readthedocs.org,stevepiercy/readthedocs.org,kdkeyser/readthedocs.org,techtonik/readthedocs.org,istresearch/readthedocs.org,tddv/readthedocs.org,takluyver/readthedocs.org,raven47git/readthedocs.org,singingwolfboy/readthedocs.org,VishvajitP/readthedocs.org,safwanrahman/readthedocs.org,rtfd/readthedocs.org,KamranMackey/readthedocs.org,techtonik/readthedocs.org,clarkperkins/readthedocs.org,attakei/readthedocs-oauth,takluyver/readthedocs.org,takluyver/readthedocs.org,dirn/readthedocs.org,kenwang76/readthedocs.org,safwanrahman/readthedocs.org,ojii/readthedocs.org,kdkeyser/readthedocs.org,rtfd/readthedocs.org,fujita-shintaro/readthedocs.org,wanghaven/readthedocs.org,atsuyim/readthedocs.org,agjohnson/readthedocs.org,emawind84/readthedocs.org,cgourlay/readthedocs.org,CedarLogic/readthedocs.org,mrshoki/readthedocs.org,VishvajitP/readthedocs.org,ojii/readthedocs.org,sils1297/readthedocs.org,wijerasa/readthedocs.org,sils1297/readthedocs.org,nyergler/pythonslides,titiushko/readthedocs.org,CedarLogic/readthedocs.org,kenwang76/readthedocs.org,safwanrahman/readthedocs.org,nyergler/pythonslides,hach-que/readthedocs.org,jerel/readthedocs.org,pombredanne/readthedocs.org,nikolas/readthedocs.org,asampat3090/readthedocs.org,soulshake/readthedocs.org,d0ugal/readthedocs.org,LukasBoersma/readthedocs.org,sid-kap/readthedocs.org,sils1297/readthedocs.org,Carreau/readthedocs.org,nikolas/readthedocs.org,sunnyzwh/readthedocs.org,asampat3090/readthedocs.org,techtonik/readthedocs.org,royalwang/readthedocs.org,hach-que/readthedocs.org,soulshake/readthedocs.org,takluyver/readthedocs.org,nyergler/pythonslides,dirn/readthedocs.org,Tazer/readthedocs.org,davidfischer/readthedocs.org,jerel/readthedocs.org,raven47git/readthedocs.org,gjtorikian/readthedocs.org,LukasBoersma/readthedocs.org,kenwang76/readthedocs.org,GovReady/readthedocs.org,stevepiercy/readthedocs.org,mrshoki/readthedocs.org,mrshoki/readthedocs.org,atsuyim/readthedocs.org,fujita-shintaro/readthedocs.org,clarkperkins/readthedocs.org,pombredanne/readthedocs.org,gjtorikian/readthedocs.org,mhils/readthedocs.org,kenwang76/readthedocs.org,michaelmcandrew/readthedocs.org,KamranMackey/readthedocs.org,pombredanne/readthedocs.org,royalwang/readthedocs.org,davidfischer/readthedocs.org,wijerasa/readthedocs.org,nyergler/pythonslides,wanghaven/readthedocs.org,jerel/readthedocs.org,nikolas/readthedocs.org,KamranMackey/readthedocs.org,Carreau/readthedocs.org,sid-kap/readthedocs.org,sid-kap/readthedocs.org,emawind84/readthedocs.org,LukasBoersma/readthedocs.org,asampat3090/readthedocs.org,stevepiercy/readthedocs.org,agjohnson/readthedocs.org,Tazer/readthedocs.org,d0ugal/readthedocs.org,Carreau/readthedocs.org,wanghaven/readthedocs.org,sunnyzwh/readthedocs.org,d0ugal/readthedocs.org,clarkperkins/readthedocs.org,clarkperkins/readthedocs.org,hach-que/readthedocs.org,royalwang/readthedocs.org,agjohnson/readthedocs.org,Tazer/readthedocs.org,laplaceliu/readthedocs.org,titiushko/readthedocs.org,kdkeyser/readthedocs.org,wijerasa/readthedocs.org
0484d3f14f29aa489bc848f1d83a9fb20183532e
plaidml/keras/tile_sandbox.py
plaidml/keras/tile_sandbox.py
from collections import OrderedDict import numpy as np import plaidml import plaidml.keras plaidml.keras.install_backend() import keras.backend as K def main(code, tensor_A, tensor_B, output_shape): print(K.backend()) op = K._Op('sandbox_op', A.dtype, output_shape, code, OrderedDict([('A', tensor_A), ('B', tensor_B)]), ['O']) print(op.eval()) if __name__ == '__main__': plaidml._internal_set_vlog(3) A = K.variable(np.array([[1., 2., 3.], [4., 5., 6.]])) B = K.variable(np.array([-7., -1., 2.])) # code = """function (A[N, M], B[M]) -> (O) { # O[i, j: N, M] = =(A[i, j] + B[j]), i/2 + j/2 + 1/2 < 2; # }""" # out_shape = (2, 3) code = """function (A[N, M], B[M]) -> (O) { O[i: N] = +(A[i - j, 0] + B[0]), j < N; }""" out_shape = (3,) main(code, A, B, out_shape)
from collections import OrderedDict import numpy as np import plaidml import plaidml.tile as tile import plaidml.keras plaidml.keras.install_backend() import keras.backend as K class SandboxOp(tile.Operation): def __init__(self, code, a, b, output_shape): super(SandboxOp, self).__init__(code, [('A', a), ('B', b)], [('O', output_shape)]) def main(code, tensor_A, tensor_B, output_shape): print(K.backend()) op = SandboxOp(code, tensor_A, tensor_B, tile.Shape(plaidml.DType.FLOAT32, output_shape)) print(op.sole_output().shape) print(op.sole_output().eval()) if __name__ == '__main__': plaidml._internal_set_vlog(1) A = K.variable(np.arange(12).reshape(4, 3)) B = K.variable(np.arange(3).reshape(3)) code = """function (A[N, M], B[M]) -> (O) { O[i, j: N, M] = =(A[i, j] + B[j]), i/2 + j/2 + 1/2 < 2; }""" out_shape = (2, 3) main(code, A, B, out_shape)
Update Tile sandbox for op lib
Update Tile sandbox for op lib
Python
apache-2.0
plaidml/plaidml,plaidml/plaidml,plaidml/plaidml,plaidml/plaidml
583c946061f8af815c32254655f4aed8f0c18dc9
watcher/tests/api/test_config.py
watcher/tests/api/test_config.py
# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import imp from oslo_config import cfg from watcher.api import config as api_config from watcher.tests.api import base class TestRoot(base.FunctionalTest): def test_config_enable_webhooks_auth(self): acl_public_routes = ['/'] cfg.CONF.set_override('enable_webhooks_auth', True, 'api') imp.reload(api_config) self.assertEqual(acl_public_routes, api_config.app['acl_public_routes']) def test_config_disable_webhooks_auth(self): acl_public_routes = ['/', '/v1/webhooks/.*'] cfg.CONF.set_override('enable_webhooks_auth', False, 'api') imp.reload(api_config) self.assertEqual(acl_public_routes, api_config.app['acl_public_routes'])
# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import importlib from oslo_config import cfg from watcher.api import config as api_config from watcher.tests.api import base class TestRoot(base.FunctionalTest): def test_config_enable_webhooks_auth(self): acl_public_routes = ['/'] cfg.CONF.set_override('enable_webhooks_auth', True, 'api') importlib.reload(api_config) self.assertEqual(acl_public_routes, api_config.app['acl_public_routes']) def test_config_disable_webhooks_auth(self): acl_public_routes = ['/', '/v1/webhooks/.*'] cfg.CONF.set_override('enable_webhooks_auth', False, 'api') importlib.reload(api_config) self.assertEqual(acl_public_routes, api_config.app['acl_public_routes'])
Use importlib to take place of im module
Use importlib to take place of im module The imp module is deprecated[1] since version 3.4, use importlib to instead 1: https://docs.python.org/3/library/imp.html#imp.reload Change-Id: Ic126bc8e0936e5d7a2c7a910b54b7348026fedcb
Python
apache-2.0
openstack/watcher,openstack/watcher
ed5a151942ff6aeddeaab0fb2e23428821f89fc4
rovercode/drivers/grovepi_ultrasonic_ranger_binary.py
rovercode/drivers/grovepi_ultrasonic_ranger_binary.py
""" Class for communicating with the GrovePi ultrasonic ranger. Here we treat it as a binary sensor. """ import logging logging.basicConfig() LOGGER = logging.getLogger(__name__) LOGGER.setLevel(logging.getLevelName('INFO')) try: from GrovePi.Software.Python.grovepi import ultrasonicRead except ImportError: LOGGER.warning("GrovePi lib unavailable. Using dummy.") from drivers.dummy_grovepi_interface import ultrasonicRead class GrovePiUltrasonicRangerBinary: """A module to read from the GrovePi Ultrasonic as a binary sensor.""" def __init__(self, port, binary_threshold): """Create a GrovePi Ultrasonic Ranger (Binary) driver module.""" self.port = int(port) self.binary_threshold = binary_threshold print(f"Setting up GrovePi Ultrasonic Ranger (Binary) on port {port}") def is_high(self): """HIGH, meaning "not seeing something".""" # to match the old GPIO sensors, we'll make this sensor active low # False output means object detected # True output means no object detected return ultrasonicRead(self.port) > self.binary_threshold
""" Class for communicating with the GrovePi ultrasonic ranger. Here we treat it as a binary sensor. """ import logging logging.basicConfig() LOGGER = logging.getLogger(__name__) LOGGER.setLevel(logging.getLevelName('INFO')) try: from grovepi import ultrasonicRead except ImportError: LOGGER.warning("GrovePi lib unavailable. Using dummy.") from drivers.dummy_grovepi_interface import ultrasonicRead class GrovePiUltrasonicRangerBinary: """A module to read from the GrovePi Ultrasonic as a binary sensor.""" def __init__(self, port, binary_threshold): """Create a GrovePi Ultrasonic Ranger (Binary) driver module.""" self.port = int(port) self.binary_threshold = binary_threshold print(f"Setting up GrovePi Ultrasonic Ranger (Binary) on port {port}") def is_high(self): """HIGH, meaning "not seeing something".""" # to match the old GPIO sensors, we'll make this sensor active low # False output means object detected # True output means no object detected return ultrasonicRead(self.port) > self.binary_threshold
Fix grovepi import in sensor driver
Fix grovepi import in sensor driver
Python
apache-2.0
aninternetof/rover-code,aninternetof/rover-code,aninternetof/rover-code
95788f09949e83cf39588444b44eda55e13c6071
wluopensource/accounts/models.py
wluopensource/accounts/models.py
from django.contrib.auth.models import User from django.db import models from django.db.models.signals import post_save class UserProfile(models.Model): user = models.ForeignKey(User, blank=True, unique=True) url = models.URLField("Website", blank=True, verify_exists=False) def __unicode__(self): return self.user.username def profile_creation_handler(sender, **kwargs): if kwargs.get('created', False): UserProfile.objects.get_or_create(user=kwargs['instance']) post_save.connect(profile_creation_handler, sender=User)
from django.contrib.auth.models import User from django.db import models from django.db.models.signals import post_save class UserProfile(models.Model): user = models.ForeignKey(User, blank=True, unique=True) url = models.URLField("Website", blank=True) def __unicode__(self): return self.user.username def profile_creation_handler(sender, **kwargs): if kwargs.get('created', False): UserProfile.objects.get_or_create(user=kwargs['instance']) post_save.connect(profile_creation_handler, sender=User)
Remove verify false from user URL to match up with comment URL
Remove verify false from user URL to match up with comment URL
Python
bsd-3-clause
jeffcharles/Open-Source-at-Laurier-Website,jeffcharles/Open-Source-at-Laurier-Website,jeffcharles/Open-Source-at-Laurier-Website,jeffcharles/Open-Source-at-Laurier-Website
d3bc714478c3f7a665b39dfb1b8d65e7bc59ccd0
utuputki-webui/utuputki/handlers/logout.py
utuputki-webui/utuputki/handlers/logout.py
# -*- coding: utf-8 -*- from handlers.handlerbase import HandlerBase from db import db_session, Session class LogoutHandler(HandlerBase): def handle(self, packet_msg): # Remove session s = db_session() s.query(Session).filter_by(key=self.sock.sid).delete() s.commit() s.close() # Dump out log self.log.info("Logged out.") self.log.set_sid(None) # Deauthenticate & clear session ID self.sock.authenticated = False self.sock.sid = None
# -*- coding: utf-8 -*- from handlers.handlerbase import HandlerBase from db import db_session, Session class LogoutHandler(HandlerBase): def handle(self, packet_msg): # Remove session s = db_session() s.query(Session).filter_by(key=self.sock.sid).delete() s.commit() s.close() # Dump out log self.log.info("Logged out.") self.log.set_sid(None) # Deauthenticate & clear session ID self.sock.authenticated = False self.sock.sid = None self.sock.uid = None self.sock.level = 0
Clear all session data from websocket obj
Clear all session data from websocket obj
Python
mit
katajakasa/utuputki2,katajakasa/utuputki2,katajakasa/utuputki2,katajakasa/utuputki2
5bcd0d538bad1393d2ecbc1f91556a7b873343c8
subprocrunner/_logger/_logger.py
subprocrunner/_logger/_logger.py
""" .. codeauthor:: Tsuyoshi Hombashi <[email protected]> """ from typing import Callable, Optional from ._null_logger import NullLogger MODULE_NAME = "subprocrunner" DEFAULT_ERROR_LOG_LEVEL = "WARNING" try: from loguru import logger LOGURU_INSTALLED = True logger.disable(MODULE_NAME) except ImportError: LOGURU_INSTALLED = False logger = NullLogger() # type: ignore def get_logging_method(log_level: Optional[str] = None) -> Callable: if not LOGURU_INSTALLED: return logger.debug if log_level is None: log_level = "DEBUG" method_table = { "QUIET": lambda _x: None, "TRACE": logger.trace, "DEBUG": logger.debug, "INFO": logger.info, "SUCCESS": logger.success, "WARNING": logger.warning, "ERROR": logger.error, "CRITICAL": logger.critical, } method = method_table.get(log_level) if method is None: raise ValueError("unknown log level: {}".format(log_level)) return method def set_logger(is_enable: bool, propagation_depth: int = 1) -> None: if is_enable: logger.enable(MODULE_NAME) else: logger.disable(MODULE_NAME) def set_log_level(log_level): # deprecated return
""" .. codeauthor:: Tsuyoshi Hombashi <[email protected]> """ from typing import Callable, Optional from ._null_logger import NullLogger MODULE_NAME = "subprocrunner" DEFAULT_ERROR_LOG_LEVEL = "WARNING" try: from loguru import logger LOGURU_INSTALLED = True logger.disable(MODULE_NAME) except ImportError: LOGURU_INSTALLED = False logger = NullLogger() # type: ignore def get_logging_method(log_level: Optional[str] = None) -> Callable: if not LOGURU_INSTALLED: return logger.debug if log_level is None: log_level = "DEBUG" method_table = { "QUIET": lambda _x: None, "TRACE": logger.trace, "DEBUG": logger.debug, "INFO": logger.info, "SUCCESS": logger.success, "WARNING": logger.warning, "ERROR": logger.error, "CRITICAL": logger.critical, } method = method_table.get(log_level.upper()) if method is None: raise ValueError("unknown log level: {}".format(log_level)) return method def set_logger(is_enable: bool, propagation_depth: int = 1) -> None: if is_enable: logger.enable(MODULE_NAME) else: logger.disable(MODULE_NAME) def set_log_level(log_level): # deprecated return
Apply upper to the argument value
Apply upper to the argument value
Python
mit
thombashi/subprocrunner,thombashi/subprocrunner
5d5b59bde655fbeb2d07bd5539c2ff9b29879d1d
pythontutorials/books/AutomateTheBoringStuff/Ch14/P2_writeCSV.py
pythontutorials/books/AutomateTheBoringStuff/Ch14/P2_writeCSV.py
# This program uses the csv module to manipulate .csv files import csv # Writer Objects outputFile = open("output.csv", "w", newline='') outputWriter = csv.writer(outputFile) print(outputWriter.writerow(['spam', 'eggs', 'bacon', 'ham'])) print(outputWriter.writerow(['Hello, world!', 'eggs', 'bacon', 'ham'])) print(outputWriter.writerow([1, 2, 3.141592, 4])) outputFile.close() # Delimiter and lineterminator Keyword Arguments csvFile = open("example.tsv", 'w', newline='') csvWriter = csv.writer(csvFile, delimiter='\t', lineterminator='\n\n') print(csvWriter.writerow(['apples', 'oranges', 'grapes'])) print(csvWriter.writerow(['eggs', 'bacon', 'ham'])) print(csvWriter.writerow(['spam', 'spam', 'spam', 'spam', 'spam', 'spam'])) csvFile.close()
"""Write CSV This program uses :py:mod:`csv` to write .csv files. Note: Creates 'output.csv' and 'example.tsv' files. """ def main(): import csv # Writer Objects outputFile = open("output.csv", "w", newline='') outputWriter = csv.writer(outputFile) print(outputWriter.writerow(['spam', 'eggs', 'bacon', 'ham'])) print(outputWriter.writerow(['Hello, world!', 'eggs', 'bacon', 'ham'])) print(outputWriter.writerow([1, 2, 3.141592, 4])) outputFile.close() # Delimiter and lineterminator Keyword Arguments csvFile = open("example.tsv", 'w', newline='') csvWriter = csv.writer(csvFile, delimiter='\t', lineterminator='\n\n') print(csvWriter.writerow(['apples', 'oranges', 'grapes'])) print(csvWriter.writerow(['eggs', 'bacon', 'ham'])) print(csvWriter.writerow(['spam', 'spam', 'spam', 'spam', 'spam', 'spam'])) csvFile.close() if __name__ == '__main__': main()
Update P1_writeCSV.py added docstring and wrapped in main function
Update P1_writeCSV.py added docstring and wrapped in main function
Python
mit
JoseALermaIII/python-tutorials,JoseALermaIII/python-tutorials
20d94336b163c1e98458f14ab44651e2df8ed659
web/social/management/commands/stream_twitter.py
web/social/management/commands/stream_twitter.py
import logging from django.core.management.base import BaseCommand, CommandError from django.conf import settings from social.models import * from social.utils import * from tweetstream import FilterStream class Command(BaseCommand): help = "Start Twitter streaming" def handle(self, *args, **options): self.logger = logging.getLogger(__name__) self.updater = FeedUpdater(self.logger) feed_ids = Feed.objects.filter(type='TW').values_list('origin_id', flat=True) stream = FilterStream(settings.TWITTER_USERNAME, settings.TWITTER_PASSWORD, follow=feed_ids) self.logger.info("Waiting for tweets for %d feeds" % len(feed_ids)) for tweet in stream: self.updater.process_tweet(tweet)
import logging import time from django.core.management.base import BaseCommand, CommandError from django.conf import settings from social.models import * from social.utils import * from tweetstream import FilterStream, ConnectionError class Command(BaseCommand): help = "Start Twitter streaming" def handle(self, *args, **options): self.logger = logging.getLogger(__name__) self.updater = FeedUpdater(self.logger) feed_ids = Feed.objects.filter(type='TW').values_list('origin_id', flat=True) self.logger.info("Waiting for tweets for %d feeds" % len(feed_ids)) reconnect_timeout = 1 while True: stream = FilterStream(settings.TWITTER_USERNAME, settings.TWITTER_PASSWORD, follow=feed_ids) try: for tweet in stream: reconnect_timeout = 1 self.updater.process_tweet(tweet) except ConnectionError as e: self.logger.error("%s" % e) reconnect_timeout = 2 * reconnect_timeout time.sleep(reconnect_timeout)
Add ConnectionError handling and reconnection to Twitter streamer
Add ConnectionError handling and reconnection to Twitter streamer
Python
agpl-3.0
kansanmuisti/datavaalit,kansanmuisti/datavaalit
72919640ac70da7f05ba36e345666909eb002187
python/ramldoc/django_urls.py
python/ramldoc/django_urls.py
import re as regex from django.conf.urls import patterns, url def build_patterns(modules, version): pattern_list = [] for module in modules: url_string = r'^' url_string += str(version) + r'/' # NOTE, the assumption here is that get_path() is an instance of the AnnotationBaseClass: url_string += module.get_path_abstract() + r'$' url_string = regex.sub(r'{', r'(?P<', url_string) url_string = regex.sub(r'}', r'>[\w\s.@-]+)', url_string) url_string = regex.sub('[\?]?[/]?\$$', '/?$', url_string) pattern_list.append(url(url_string, module())) return patterns('', *pattern_list)
import re as regex from django.conf.urls import patterns, url def build_patterns(modules, version): pattern_list = [] for module in modules: url_string = r'^' url_string += str(version) + r'/' # NOTE, the assumption here is that get_path() is an instance of the AnnotationBaseClass: url_string += module.get_path_abstract() + r'$' url_string = regex.sub(r'{', r'(?P<', url_string) url_string = regex.sub(r'}', r'>[^/]+)', url_string) url_string = regex.sub('[\?]?[/]?\$$', '/?$', url_string) pattern_list.append(url(url_string, module())) return patterns('', *pattern_list)
Fix for supporting special characters in the url.
Fix for supporting special characters in the url.
Python
apache-2.0
SungardAS-CloudDevelopers/ramldoc
e2eab36586652b2c7fe37ca96fedea89760665fc
kpcc_backroom_handshakes/urls.py
kpcc_backroom_handshakes/urls.py
from django.conf import settings from django.conf.urls import patterns, include, url from django.core.urlresolvers import reverse from django.views.generic import TemplateView from django.views.generic import RedirectView from django.contrib import admin import os import logging logger = logging.getLogger("kpcc_backroom_handshakes") admin.autodiscover() urlpatterns = [ url(r"^admin/doc/", include("django.contrib.admindocs.urls")), url(r"^admin/", include(admin.site.urls)), url(r"^admin/", include("massadmin.urls")), url(r"^elections/", include("newscast.urls", namespace="newscast")), url(r"^elections/", include("ballot_box.urls", namespace="ballot-box")), url(r"^elections/", include("measure_finance.urls", namespace="campaign-finance")), url(r"^elections/", include("election_registrar.urls", namespace="elections")), url(r"^", RedirectView.as_view(url="elections/", permanent=False)), ]
from django.conf import settings from django.conf.urls import patterns, include, url from django.core.urlresolvers import reverse from django.views.generic import TemplateView from django.views.generic import RedirectView from django.contrib import admin import os import logging logger = logging.getLogger("kpcc_backroom_handshakes") admin.autodiscover() urlpatterns = [ url(r"^admin/doc/", include("django.contrib.admindocs.urls")), url(r"^admin/", include(admin.site.urls)), url(r"^admin/", include("massadmin.urls")), url(r"^elections/", include("newscast.urls", namespace="newscast")), url(r"^elections/", include("ballot_box.urls", namespace="ballot-box")), url(r"^elections/", include("measure_finance.urls", namespace="campaign-finance")), url(r"^elections/", include("election_registrar.urls", namespace="elections")), # url(r"^", RedirectView.as_view(url="elections/", permanent=False)), ]
Disable redirect that prevented access to the admin panel.
Disable redirect that prevented access to the admin panel.
Python
mit
SCPR/kpcc_backroom_handshakes,SCPR/kpcc_backroom_handshakes,SCPR/kpcc_backroom_handshakes
415e34fb913feaf623320827fb7680ee56c9d335
gunicorn.conf.py
gunicorn.conf.py
#!/usr/bin/env python # -*- coding: utf-8 -*- bind = '127.0.0.1:8001' workers = 6 proc_name = 'lastuser'
#!/usr/bin/env python # -*- coding: utf-8 -*- bind = '127.0.0.1:8002' workers = 6 proc_name = 'lastuser'
Use a different port for lastuser
Use a different port for lastuser
Python
bsd-2-clause
hasgeek/lastuser,hasgeek/lastuser,sindhus/lastuser,hasgeek/lastuser,sindhus/lastuser,hasgeek/funnel,hasgeek/funnel,hasgeek/funnel,hasgeek/funnel,hasgeek/lastuser,sindhus/lastuser,sindhus/lastuser,sindhus/lastuser,hasgeek/funnel,hasgeek/lastuser
6bf762b7aeabcb47571fe4d23fe13ae8e4b3ebc3
editorsnotes/main/views.py
editorsnotes/main/views.py
from django.conf import settings from django.http import HttpResponse from django.shortcuts import render_to_response from django.contrib.auth.decorators import login_required from models import Term, Reference @login_required def index(request): o = {} o['term_list'] = Term.objects.all() return render_to_response('index.html', o) @login_required def term(request, slug): o = {} o['contact'] = { 'name': settings.ADMINS[0][0], 'email': settings.ADMINS[0][1] } o['term'] = Term.objects.get(slug=slug) o['note_list'] = list(o['term'].note_set.filter(type__exact='N')) o['query_list'] = list(o['term'].note_set.filter(type__exact='Q')) o['note_dict'] = [ (n, n.references.all()) for n in o['note_list'] ] o['query_dict'] = [ (q, q.references.all()) for q in o['query_list'] ] for note in o['note_list'] + o['query_list']: if ('last_updated' not in o) or (note.last_updated > o['last_updated']): o['last_updated'] = note.last_updated o['last_updater'] = note.last_updater.username o['last_updated_display'] = note.last_updated_display() return render_to_response('term.html', o)
from django.conf import settings from django.http import HttpResponse from django.shortcuts import render_to_response, get_object_or_404 from django.contrib.auth.decorators import login_required from models import Term, Reference @login_required def index(request): o = {} o['term_list'] = Term.objects.all() return render_to_response('index.html', o) @login_required def term(request, slug): o = {} o['term'] = get_object_or_404(Term, slug=slug) o['contact'] = { 'name': settings.ADMINS[0][0], 'email': settings.ADMINS[0][1] } o['note_list'] = list(o['term'].note_set.filter(type__exact='N')) o['query_list'] = list(o['term'].note_set.filter(type__exact='Q')) o['note_dict'] = [ (n, n.references.all()) for n in o['note_list'] ] o['query_dict'] = [ (q, q.references.all()) for q in o['query_list'] ] for note in o['note_list'] + o['query_list']: if ('last_updated' not in o) or (note.last_updated > o['last_updated']): o['last_updated'] = note.last_updated o['last_updater'] = note.last_updater.username o['last_updated_display'] = note.last_updated_display() return render_to_response('term.html', o)
Throw 404 for non-existent terms.
Throw 404 for non-existent terms.
Python
agpl-3.0
editorsnotes/editorsnotes,editorsnotes/editorsnotes
2bf3e59e5ec0ca5d1003cd52f06a8f12a5ee7caf
tools/metrics/apk_size.py
tools/metrics/apk_size.py
# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. # Check APK file size for limit from os import path, listdir, stat from sys import exit SIZE_LIMIT = 4194304 PATH = path.join(path.dirname(path.abspath(__file__)), '../../app/build/outputs/apk/') files = [] try: files = [f for f in listdir(PATH) if path.isfile(path.join(PATH, f)) and f.endswith('.apk') and "release" in f] except OSError as e: if e.errno == 2: print("Directory is missing, build apk first!") exit(1) print("Unknown error: {err}".format(err=str(e))) exit(2) for apk_file in files: file_size = stat(path.join(PATH, apk_file)).st_size if file_size > SIZE_LIMIT: print(" * [TOOBIG] {filename} ({filesize} > {sizelimit})".format( filename=apk_file, filesize=file_size, sizelimit=SIZE_LIMIT )) exit(27) print(" * [OKAY] {filename} ({filesize} <= {sizelimit})".format( filename=apk_file, filesize=file_size, sizelimit=SIZE_LIMIT ))
# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. # Check APK file size for limit from os import path, listdir, stat from sys import exit SIZE_LIMIT = 4500000 PATH = path.join(path.dirname(path.abspath(__file__)), '../../app/build/outputs/apk/') files = [] try: files = [f for f in listdir(PATH) if path.isfile(path.join(PATH, f)) and f.endswith('.apk') and "release" in f] except OSError as e: if e.errno == 2: print("Directory is missing, build apk first!") exit(1) print("Unknown error: {err}".format(err=str(e))) exit(2) for apk_file in files: file_size = stat(path.join(PATH, apk_file)).st_size if file_size > SIZE_LIMIT: print(" * [TOOBIG] {filename} ({filesize} > {sizelimit})".format( filename=apk_file, filesize=file_size, sizelimit=SIZE_LIMIT )) exit(27) print(" * [OKAY] {filename} ({filesize} <= {sizelimit})".format( filename=apk_file, filesize=file_size, sizelimit=SIZE_LIMIT ))
Increase apk size limit to 4.5MB
Increase apk size limit to 4.5MB
Python
mpl-2.0
pocmo/focus-android,mozilla-mobile/focus-android,ekager/focus-android,pocmo/focus-android,mastizada/focus-android,liuche/focus-android,pocmo/focus-android,pocmo/focus-android,jonalmeida/focus-android,mozilla-mobile/focus-android,jonalmeida/focus-android,Benestar/focus-android,mozilla-mobile/focus-android,mastizada/focus-android,liuche/focus-android,Benestar/focus-android,liuche/focus-android,liuche/focus-android,jonalmeida/focus-android,ekager/focus-android,mastizada/focus-android,pocmo/focus-android,ekager/focus-android,pocmo/focus-android,jonalmeida/focus-android,liuche/focus-android,jonalmeida/focus-android,mastizada/focus-android,mozilla-mobile/focus-android,ekager/focus-android,liuche/focus-android,ekager/focus-android,Benestar/focus-android,Benestar/focus-android,ekager/focus-android,mozilla-mobile/focus-android,Benestar/focus-android,mozilla-mobile/focus-android,mastizada/focus-android,jonalmeida/focus-android
1fa22ca68394d4ce55a4e10aa7c23f7bcfa02f79
zc_common/remote_resource/mixins.py
zc_common/remote_resource/mixins.py
""" Class Mixins. """ from django.db import IntegrityError from django.http import Http404 class MultipleIDMixin(object): """ Override get_queryset for multiple id support """ def get_queryset(self): """ Override :meth:``get_queryset`` """ if hasattr(self.request, 'query_params') and 'ids' in self.request.query_params: query_param_ids = self.request.query_params.get('ids') ids = [] if not query_param_ids else query_param_ids.split(',') try: self.queryset = self.queryset.filter(pk__in=ids) except (ValueError, IntegrityError): raise Http404 return self.queryset
""" Class Mixins. """ from django.db import IntegrityError from django.http import Http404 class MultipleIDMixin(object): """ Override get_queryset for multiple id support """ def get_queryset(self): """ Override :meth:``get_queryset`` """ if hasattr(self.request, 'query_params') and 'filter[id]' in self.request.query_params: query_param_ids = self.request.query_params.get('filter[id]') ids = [] if not query_param_ids else query_param_ids.split(',') try: self.queryset = self.queryset.filter(pk__in=ids) except (ValueError, IntegrityError): raise Http404 return self.queryset
Update query param for mixin
Update query param for mixin
Python
mit
ZeroCater/zc_common,ZeroCater/zc_common
c5f6a9632b6d996fc988bfc9317915208ff69a42
domain/companies.py
domain/companies.py
# -*- coding: utf-8 -*- """ 'companies' resource and schema settings. :copyright: (c) 2014 by Nicola Iarocci and CIR2000. :license: BSD, see LICENSE for more details. """ from common import required_string _schema = { # company id ('id') 'n': required_string, # name 'p': {'type': 'string', 'nullable': True}, # password } definition = { 'url': 'companies', 'item_title': 'company', # 'additional_lookup': company_lookup, 'schema': _schema, }
# -*- coding: utf-8 -*- """ 'companies' resource and schema settings. :copyright: (c) 2014 by Nicola Iarocci and CIR2000. :license: BSD, see LICENSE for more details. """ from common import required_string _schema = { # company id ('id') 'name': required_string, 'password': {'type': 'string', 'nullable': True}, 'state_or_province': {'type': 'string', 'nullable': True}, } definition = { 'url': 'companies', 'item_title': 'company', # 'additional_lookup': company_lookup, 'schema': _schema, }
Add a snake_cased field to the test document.
Add a snake_cased field to the test document.
Python
bsd-3-clause
nicolaiarocci/Eve.NET-testbed
9502de0e6be30e4592f4f0cf141abc27db64ccf4
dependencies.py
dependencies.py
import os import pkgutil import site if pkgutil.find_loader("gi"): try: import gi print('Found gi:', os.path.abspath(gi.__file__)) gi.require_version('Gst', '1.0') # from gi.repository import GLib, Gst except ValueError: print('Couldn\'t find Gst') print('Please run \'sudo apt-get install gir1.2-gstreamer-1.0\'') return False print('Environment seems to be ok.') else: print('No gi installed', '\n', 'Please run \'sudo apt-get install python3-gi\'', '\n', 'A virtual environment might need extra actions like symlinking, ', '\n', 'you might need to do a symlink looking similar to this:', '\n', 'ln -s /usr/lib/python3/dist-packages/gi ', '/srv/homeassistant/lib/python3.4/site-packages', '\n', 'run this script inside and outside of the virtual environment to find the paths needed') print(site.getsitepackages())
import os import pkgutil import site from sys import exit if pkgutil.find_loader('gi'): try: import gi print("Found gi at:", os.path.abspath(gi.__file__)) gi.require_version('Gst', '1.0') # from gi.repository import Gst except ValueError: print("Couldn\'t find Gst", '\n', "Please run \'sudo apt-get install gir1.2-gstreamer-1.0\'") exit(False) print("Environment seems to be ok.") else: print("No gi available in this environment", '\n', "Please run \'sudo apt-get install python3-gi\'", '\n', "A virtual environment might need extra actions like symlinking,", '\n', "you might need to do a symlink looking similar to this:", '\n', "ln -s /usr/lib/python3/dist-packages/gi", "/srv/homeassistant/lib/python3.4/site-packages", '\n', "run this script inside and outside of the virtual environment", "to find the paths needed") print(site.getsitepackages())
Clean up of text Proper exit when exception has been raised
Clean up of text Proper exit when exception has been raised
Python
mit
Kane610/axis
099892e56f02c683879d05625b1215212fda7c9e
links/views.py
links/views.py
from django.shortcuts import render, redirect from django.http import Http404 from .models import Link, LinkForm from urlparse import urlparse def catchall(request, id): try: link = Link.objects.get(id=id) return redirect(link.url) except: parsed = urlparse(id) if parsed.netloc: link = Link(url=id) link.save(); context = {'form': LinkForm} context['short_url'] = "http://" + str(request.get_host()) + "/" + str(link.id) return render(request, 'index.html', context) raise Http404("Link does not exist") def home(request): context = {'form': LinkForm} if 'link' in request.POST: link = Link(link=request.POST['url']) link.save(); context['short_url'] = "http://" + str(request.get_host()) + "/" + str(link.id) return render(request, 'index.html', context)
from django.shortcuts import render, redirect from django.http import Http404 from .models import Link, LinkForm from urlparse import urlparse def catchall(request, id): try: link = Link.objects.get(id=id) return redirect(link.url) except: parsed = urlparse(id) if parsed.netloc: link = Link(url=id) link.save(); context = {'form': LinkForm} context['short_url'] = "http://" + str(request.get_host()) + "/" + str(link.id) return render(request, 'index.html', context) raise Http404("Link does not exist") def home(request): context = {'form': LinkForm} if 'url' in request.POST: link = Link(url=request.POST['url']) link.save(); context['short_url'] = "http://" + str(request.get_host()) + "/" + str(link.id) return render(request, 'index.html', context)
Update post handling after migration
Update post handling after migration
Python
mit
kudos/min.ie,kudos/min.ie