commit
stringlengths 40
40
| old_file
stringlengths 4
264
| new_file
stringlengths 4
264
| old_contents
stringlengths 0
3.26k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
624
| message
stringlengths 15
4.7k
| lang
stringclasses 3
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
|
---|---|---|---|---|---|---|---|---|---|
562a0868b3648e3ba40c29289ba7f4ebd4c75800
|
pyinfra/api/__init__.py
|
pyinfra/api/__init__.py
|
# pyinfra
# File: pyinfra/api/__init__.py
# Desc: import some stuff
from .config import Config # noqa: F401
from .deploy import deploy # noqa: F401
from .exceptions import ( # noqa: F401
DeployError,
InventoryError,
OperationError,
)
from .facts import FactBase # noqa: F401
from .inventory import Inventory # noqa: F401
from .operation import operation # noqa: F401
from .state import State # noqa: F401
|
# pyinfra
# File: pyinfra/api/__init__.py
# Desc: import some stuff
from .config import Config # noqa: F401
from .deploy import deploy # noqa: F401
from .exceptions import ( # noqa: F401
DeployError,
InventoryError,
OperationError,
)
from .facts import FactBase, ShortFactBase # noqa: F401
from .inventory import Inventory # noqa: F401
from .operation import operation # noqa: F401
from .state import State # noqa: F401
|
Add `ShortFactBase` import to `pyinfra.api`.
|
Add `ShortFactBase` import to `pyinfra.api`.
|
Python
|
mit
|
Fizzadar/pyinfra,Fizzadar/pyinfra
|
eebb025f4466a5c26bc99f9a1f24a5f14ca14387
|
pycom/tests/test_logging_dict_config.py
|
pycom/tests/test_logging_dict_config.py
|
# encoding: utf-8
import unittest
from pycom.logging_dict_config import get_config
class LoggingTests(unittest.TestCase):
def test_get_config_project(self):
config = get_config("project")
left = "/var/log/project/project.log"
right = config["handlers"]["file"]["filename"]
self.assertEqual(left, right)
def test_get_config_config(self):
conf = {
"root": {
"level": "INFO",
}
}
config = get_config("project", config=conf)
self.assertEqual("INFO", config["root"]["level"])
|
Add the test of `logging_dict_config`.
|
Add the test of `logging_dict_config`.
|
Python
|
mit
|
xgfone/xutils,xgfone/pycom
|
|
6854f128461c5e94e90e12183ab91c4cc4a8724b
|
tools/grit/grit/extern/FP.py
|
tools/grit/grit/extern/FP.py
|
#!/usr/bin/python2.2
# Copyright (c) 2006-2008 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import md5
"""64-bit fingerprint support for strings.
Usage:
from extern import FP
print 'Fingerprint is %ld' % FP.FingerPrint('Hello world!')
"""
def UnsignedFingerPrint(str, encoding='utf-8'):
"""Generate a 64-bit fingerprint by taking the first half of the md5
of the string."""
hex128 = md5.new(str).hexdigest()
int64 = long(hex128[:16], 16)
return int64
def FingerPrint(str, encoding='utf-8'):
fp = UnsignedFingerPrint(str, encoding=encoding)
# interpret fingerprint as signed longs
if fp & 0x8000000000000000L:
fp = - ((~fp & 0xFFFFFFFFFFFFFFFFL) + 1)
return fp
|
#!/usr/bin/python
# Copyright (c) 2006-2008 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import md5
"""64-bit fingerprint support for strings.
Usage:
from extern import FP
print 'Fingerprint is %ld' % FP.FingerPrint('Hello world!')
"""
def UnsignedFingerPrint(str, encoding='utf-8'):
"""Generate a 64-bit fingerprint by taking the first half of the md5
of the string."""
hex128 = md5.new(str).hexdigest()
int64 = long(hex128[:16], 16)
return int64
def FingerPrint(str, encoding='utf-8'):
fp = UnsignedFingerPrint(str, encoding=encoding)
# interpret fingerprint as signed longs
if fp & 0x8000000000000000L:
fp = - ((~fp & 0xFFFFFFFFFFFFFFFFL) + 1)
return fp
|
Remove version number from Python shebang.
|
Remove version number from Python shebang.
On special request from someone trying to purge python2.2 from code indexed
internally at Google.
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@7071 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
hgl888/chromium-crosswalk,zcbenz/cefode-chromium,axinging/chromium-crosswalk,Fireblend/chromium-crosswalk,bright-sparks/chromium-spacewalk,fujunwei/chromium-crosswalk,timopulkkinen/BubbleFish,mogoweb/chromium-crosswalk,keishi/chromium,nacl-webkit/chrome_deps,dednal/chromium.src,ondra-novak/chromium.src,robclark/chromium,ltilve/chromium,krieger-od/nwjs_chromium.src,zcbenz/cefode-chromium,Pluto-tv/chromium-crosswalk,dednal/chromium.src,rogerwang/chromium,M4sse/chromium.src,Chilledheart/chromium,markYoungH/chromium.src,rogerwang/chromium,hgl888/chromium-crosswalk,fujunwei/chromium-crosswalk,hujiajie/pa-chromium,hgl888/chromium-crosswalk-efl,Pluto-tv/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,markYoungH/chromium.src,robclark/chromium,rogerwang/chromium,Fireblend/chromium-crosswalk,pozdnyakov/chromium-crosswalk,jaruba/chromium.src,hujiajie/pa-chromium,junmin-zhu/chromium-rivertrail,zcbenz/cefode-chromium,dushu1203/chromium.src,M4sse/chromium.src,timopulkkinen/BubbleFish,hgl888/chromium-crosswalk,Fireblend/chromium-crosswalk,ondra-novak/chromium.src,robclark/chromium,jaruba/chromium.src,Jonekee/chromium.src,jaruba/chromium.src,TheTypoMaster/chromium-crosswalk,Chilledheart/chromium,Pluto-tv/chromium-crosswalk,ltilve/chromium,mohamed--abdel-maksoud/chromium.src,rogerwang/chromium,fujunwei/chromium-crosswalk,Jonekee/chromium.src,littlstar/chromium.src,krieger-od/nwjs_chromium.src,crosswalk-project/chromium-crosswalk-efl,timopulkkinen/BubbleFish,nacl-webkit/chrome_deps,crosswalk-project/chromium-crosswalk-efl,Just-D/chromium-1,hgl888/chromium-crosswalk,timopulkkinen/BubbleFish,anirudhSK/chromium,hgl888/chromium-crosswalk,chuan9/chromium-crosswalk,keishi/chromium,ChromiumWebApps/chromium,hgl888/chromium-crosswalk-efl,ChromiumWebApps/chromium,axinging/chromium-crosswalk,krieger-od/nwjs_chromium.src,PeterWangIntel/chromium-crosswalk,junmin-zhu/chromium-rivertrail,dushu1203/chromium.src,dednal/chromium.src,hujiajie/pa-chromium,jaruba/chromium.src,junmin-zhu/chromium-rivertrail,patrickm/chromium.src,markYoungH/chromium.src,ChromiumWebApps/chromium,zcbenz/cefode-chromium,mohamed--abdel-maksoud/chromium.src,markYoungH/chromium.src,pozdnyakov/chromium-crosswalk,mogoweb/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,Pluto-tv/chromium-crosswalk,hujiajie/pa-chromium,hgl888/chromium-crosswalk-efl,patrickm/chromium.src,pozdnyakov/chromium-crosswalk,krieger-od/nwjs_chromium.src,zcbenz/cefode-chromium,anirudhSK/chromium,keishi/chromium,littlstar/chromium.src,hgl888/chromium-crosswalk,keishi/chromium,ChromiumWebApps/chromium,mogoweb/chromium-crosswalk,Fireblend/chromium-crosswalk,bright-sparks/chromium-spacewalk,nacl-webkit/chrome_deps,timopulkkinen/BubbleFish,ChromiumWebApps/chromium,markYoungH/chromium.src,dednal/chromium.src,dushu1203/chromium.src,Just-D/chromium-1,pozdnyakov/chromium-crosswalk,markYoungH/chromium.src,PeterWangIntel/chromium-crosswalk,axinging/chromium-crosswalk,zcbenz/cefode-chromium,krieger-od/nwjs_chromium.src,robclark/chromium,robclark/chromium,rogerwang/chromium,axinging/chromium-crosswalk,Just-D/chromium-1,patrickm/chromium.src,markYoungH/chromium.src,dushu1203/chromium.src,hgl888/chromium-crosswalk-efl,fujunwei/chromium-crosswalk,rogerwang/chromium,hgl888/chromium-crosswalk,rogerwang/chromium,dushu1203/chromium.src,Just-D/chromium-1,Fireblend/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,jaruba/chromium.src,junmin-zhu/chromium-rivertrail,anirudhSK/chromium,crosswalk-project/chromium-crosswalk-efl,robclark/chromium,TheTypoMaster/chromium-crosswalk,dednal/chromium.src,ChromiumWebApps/chromium,junmin-zhu/chromium-rivertrail,anirudhSK/chromium,keishi/chromium,M4sse/chromium.src,hgl888/chromium-crosswalk-efl,jaruba/chromium.src,littlstar/chromium.src,nacl-webkit/chrome_deps,Fireblend/chromium-crosswalk,markYoungH/chromium.src,patrickm/chromium.src,PeterWangIntel/chromium-crosswalk,hgl888/chromium-crosswalk-efl,Jonekee/chromium.src,ChromiumWebApps/chromium,PeterWangIntel/chromium-crosswalk,patrickm/chromium.src,Chilledheart/chromium,hujiajie/pa-chromium,krieger-od/nwjs_chromium.src,timopulkkinen/BubbleFish,ondra-novak/chromium.src,fujunwei/chromium-crosswalk,ltilve/chromium,nacl-webkit/chrome_deps,chuan9/chromium-crosswalk,Chilledheart/chromium,ltilve/chromium,mogoweb/chromium-crosswalk,rogerwang/chromium,patrickm/chromium.src,mohamed--abdel-maksoud/chromium.src,jaruba/chromium.src,dushu1203/chromium.src,bright-sparks/chromium-spacewalk,Chilledheart/chromium,mogoweb/chromium-crosswalk,dednal/chromium.src,Chilledheart/chromium,littlstar/chromium.src,Fireblend/chromium-crosswalk,hgl888/chromium-crosswalk-efl,mohamed--abdel-maksoud/chromium.src,Chilledheart/chromium,chuan9/chromium-crosswalk,pozdnyakov/chromium-crosswalk,nacl-webkit/chrome_deps,zcbenz/cefode-chromium,mohamed--abdel-maksoud/chromium.src,chuan9/chromium-crosswalk,ChromiumWebApps/chromium,Just-D/chromium-1,nacl-webkit/chrome_deps,ondra-novak/chromium.src,Pluto-tv/chromium-crosswalk,ChromiumWebApps/chromium,hgl888/chromium-crosswalk-efl,Chilledheart/chromium,chuan9/chromium-crosswalk,Pluto-tv/chromium-crosswalk,fujunwei/chromium-crosswalk,zcbenz/cefode-chromium,axinging/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,anirudhSK/chromium,M4sse/chromium.src,littlstar/chromium.src,robclark/chromium,Jonekee/chromium.src,M4sse/chromium.src,littlstar/chromium.src,hujiajie/pa-chromium,crosswalk-project/chromium-crosswalk-efl,TheTypoMaster/chromium-crosswalk,krieger-od/nwjs_chromium.src,ltilve/chromium,Jonekee/chromium.src,TheTypoMaster/chromium-crosswalk,ChromiumWebApps/chromium,ondra-novak/chromium.src,Pluto-tv/chromium-crosswalk,dushu1203/chromium.src,fujunwei/chromium-crosswalk,mogoweb/chromium-crosswalk,Chilledheart/chromium,TheTypoMaster/chromium-crosswalk,hujiajie/pa-chromium,Pluto-tv/chromium-crosswalk,robclark/chromium,pozdnyakov/chromium-crosswalk,timopulkkinen/BubbleFish,keishi/chromium,dednal/chromium.src,junmin-zhu/chromium-rivertrail,PeterWangIntel/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,rogerwang/chromium,TheTypoMaster/chromium-crosswalk,chuan9/chromium-crosswalk,markYoungH/chromium.src,rogerwang/chromium,Pluto-tv/chromium-crosswalk,ltilve/chromium,ltilve/chromium,TheTypoMaster/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,Jonekee/chromium.src,nacl-webkit/chrome_deps,littlstar/chromium.src,Just-D/chromium-1,zcbenz/cefode-chromium,dednal/chromium.src,krieger-od/nwjs_chromium.src,zcbenz/cefode-chromium,ltilve/chromium,keishi/chromium,patrickm/chromium.src,mohamed--abdel-maksoud/chromium.src,robclark/chromium,krieger-od/nwjs_chromium.src,axinging/chromium-crosswalk,Fireblend/chromium-crosswalk,mogoweb/chromium-crosswalk,markYoungH/chromium.src,PeterWangIntel/chromium-crosswalk,pozdnyakov/chromium-crosswalk,junmin-zhu/chromium-rivertrail,hgl888/chromium-crosswalk-efl,Just-D/chromium-1,hujiajie/pa-chromium,M4sse/chromium.src,M4sse/chromium.src,anirudhSK/chromium,axinging/chromium-crosswalk,ondra-novak/chromium.src,PeterWangIntel/chromium-crosswalk,M4sse/chromium.src,jaruba/chromium.src,hgl888/chromium-crosswalk,littlstar/chromium.src,jaruba/chromium.src,nacl-webkit/chrome_deps,hgl888/chromium-crosswalk,Just-D/chromium-1,bright-sparks/chromium-spacewalk,hujiajie/pa-chromium,hujiajie/pa-chromium,markYoungH/chromium.src,bright-sparks/chromium-spacewalk,anirudhSK/chromium,patrickm/chromium.src,keishi/chromium,PeterWangIntel/chromium-crosswalk,krieger-od/nwjs_chromium.src,ondra-novak/chromium.src,fujunwei/chromium-crosswalk,ChromiumWebApps/chromium,nacl-webkit/chrome_deps,junmin-zhu/chromium-rivertrail,chuan9/chromium-crosswalk,timopulkkinen/BubbleFish,junmin-zhu/chromium-rivertrail,anirudhSK/chromium,zcbenz/cefode-chromium,keishi/chromium,chuan9/chromium-crosswalk,Fireblend/chromium-crosswalk,Jonekee/chromium.src,bright-sparks/chromium-spacewalk,anirudhSK/chromium,pozdnyakov/chromium-crosswalk,dushu1203/chromium.src,anirudhSK/chromium,bright-sparks/chromium-spacewalk,ondra-novak/chromium.src,PeterWangIntel/chromium-crosswalk,mogoweb/chromium-crosswalk,dednal/chromium.src,axinging/chromium-crosswalk,timopulkkinen/BubbleFish,ChromiumWebApps/chromium,pozdnyakov/chromium-crosswalk,dushu1203/chromium.src,dednal/chromium.src,pozdnyakov/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,M4sse/chromium.src,jaruba/chromium.src,keishi/chromium,jaruba/chromium.src,crosswalk-project/chromium-crosswalk-efl,pozdnyakov/chromium-crosswalk,hujiajie/pa-chromium,anirudhSK/chromium,Jonekee/chromium.src,keishi/chromium,bright-sparks/chromium-spacewalk,chuan9/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,junmin-zhu/chromium-rivertrail,nacl-webkit/chrome_deps,timopulkkinen/BubbleFish,krieger-od/nwjs_chromium.src,dushu1203/chromium.src,mogoweb/chromium-crosswalk,ltilve/chromium,axinging/chromium-crosswalk,ondra-novak/chromium.src,fujunwei/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,bright-sparks/chromium-spacewalk,mogoweb/chromium-crosswalk,M4sse/chromium.src,patrickm/chromium.src,timopulkkinen/BubbleFish,dednal/chromium.src,TheTypoMaster/chromium-crosswalk,Just-D/chromium-1,robclark/chromium,axinging/chromium-crosswalk,Jonekee/chromium.src,mohamed--abdel-maksoud/chromium.src,anirudhSK/chromium,Jonekee/chromium.src,dushu1203/chromium.src,junmin-zhu/chromium-rivertrail,Jonekee/chromium.src,hgl888/chromium-crosswalk-efl,axinging/chromium-crosswalk,M4sse/chromium.src
|
be1199ac1f6c086c17c4c39d54ca1e160d0508cf
|
test/win/gyptest-link-pdb.py
|
test/win/gyptest-link-pdb.py
|
#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that the 'Profile' attribute in VCLinker is extracted properly.
"""
import TestGyp
import os
import sys
if sys.platform == 'win32':
test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
CHDIR = 'linker-flags'
test.run_gyp('program-database.gyp', chdir=CHDIR)
test.build('program-database.gyp', test.ALL, chdir=CHDIR)
def FindFile(pdb):
full_path = test.built_file_path(pdb, chdir=CHDIR)
return os.path.isfile(full_path)
# Verify the specified PDB is created when ProgramDatabaseFile
# is provided.
if not FindFile('name_set.pdb'):
test.fail_test()
else:
test.pass_test()
|
#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that the 'Profile' attribute in VCLinker is extracted properly.
"""
import TestGyp
import os
import sys
if sys.platform == 'win32':
test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
CHDIR = 'linker-flags'
test.run_gyp('program-database.gyp', chdir=CHDIR)
test.build('program-database.gyp', test.ALL, chdir=CHDIR)
def FindFile(pdb):
full_path = test.built_file_path(pdb, chdir=CHDIR)
return os.path.isfile(full_path)
# Verify the specified PDB is created when ProgramDatabaseFile
# is provided.
if not FindFile('name_set.pdb'):
test.fail_test()
else:
test.pass_test()
|
Insert empty line at to fix patch.
|
Insert empty line at to fix patch.
gyptest-link-pdb.py was checked in without a blank line. This appears
to cause a patch issue with the try bots. This CL is only a whitespace
change to attempt to fix that problem.
SEE:
patching file test/win/gyptest-link-pdb.py
Hunk #1 FAILED at 26.
1 out of 1 hunk FAILED -- saving rejects to file test/win/gyptest-link-pdb.py.rej
===================================================================
--- test/win/gyptest-link-pdb.py (revision 1530)
+++ test/win/gyptest-link-pdb.py (working copy)
@@ -26,7 +26,9 @@
# Verify the specified PDB is created when ProgramDatabaseFile
# is provided.
- if not FindFile('name_set.pdb'):
+ if not FindFile('name_outdir.pdb'):
test.fail_test()
- else:
- test.pass_test()
\ No newline at end of file
+ if not FindFile('name_proddir.pdb'):
+ test.fail_test()
+
+ test.pass_test()
Index: test/win/linker-flags/program-database.gyp
[email protected]
Review URL: https://codereview.chromium.org/11368061
|
Python
|
bsd-3-clause
|
csulmone/gyp,csulmone/gyp,csulmone/gyp,csulmone/gyp
|
663a61362c30b737f2532de42b5b680795ccf608
|
quran_text/models.py
|
quran_text/models.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.utils.translation import ugettext_lazy as _
from django.db import models
class Sura(models.Model):
"""
Model to hold the Quran Chapters "Sura"
"""
index = models.PositiveIntegerField(primary_key=True)
name = models.CharField(max_length=20, unique=True, verbose_name=_('Sura'))
def __str__(self):
return self.name
class Meta:
ordering = ['index']
class Ayah(models.Model):
"""
Model to hold chapters' text ot Verse "Ayat"
"""
number = models.PositiveIntegerField(verbose_name=_('Number'))
sura = models.ForeignKey(Sura, related_name='ayat')
text = models.TextField()
def __str__(self):
return '{} - {}'.format(self.sura.index, self.number)
class Meta:
unique_together = ['number', 'sura']
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.utils.translation import ugettext_lazy as _
from django.db import models
class Sura(models.Model):
"""
Model to hold the Quran Chapters "Sura"
"""
index = models.PositiveIntegerField(primary_key=True)
name = models.CharField(max_length=20, unique=True, verbose_name=_('Sura'))
def __str__(self):
return self.name
class Meta:
ordering = ['index']
class Ayah(models.Model):
"""
Model to hold chapters' text ot Verse "Ayat"
"""
number = models.PositiveIntegerField(verbose_name=_('Number'))
sura = models.ForeignKey(Sura, related_name='ayat')
text = models.TextField()
def __str__(self):
return '{} - {}'.format(self.sura.index, self.number)
class Meta:
unique_together = ['number', 'sura']
ordering = ['sura', 'number']
|
Add ordering to Ayah model
|
Add ordering to Ayah model
|
Python
|
mit
|
EmadMokhtar/tafseer_api
|
ddabef55b9dde75af422d4dedb2d5578d7019905
|
tests/test_authentication.py
|
tests/test_authentication.py
|
import unittest
from flask import json
from api import db
from api.BucketListAPI import app
from instance.config import application_config
class AuthenticationTestCase(unittest.TestCase):
def setUp(self):
app.config.from_object(application_config['TestingEnv'])
self.client = app.test_client()
# Binds the app to current context
with app.app_context():
# Create all tables
db.create_all()
def test_index_route(self):
response = self.client.get('/')
self.assertEqual(response.status_code, 201)
self.assertIn('Welcome Message', response.data.decode())
def test_registration_with_missing_dredentials(self):
"""Should throw error for missing credentials"""
user = json.dumps({
'name': '',
'email': '',
'password': ''
})
response = self.client.post('/auth/register', data=user)
self.assertEqual(response.status_code, 400)
self.assertIn('Missing', response.data.decode())
def tearDown(self):
# Drop all tables
with app.app_context():
# Drop all tables
db.session.remove()
db.drop_all()
if __name__ == '__main__':
unittest.main()
|
import unittest
from flask import json
from api import db
from api.BucketListAPI import app
from instance.config import application_config
class AuthenticationTestCase(unittest.TestCase):
def setUp(self):
app.config.from_object(application_config['TestingEnv'])
self.client = app.test_client()
# Binds the app to current context
with app.app_context():
# Create all tables
db.create_all()
def test_index_route(self):
response = self.client.get('/')
self.assertEqual(response.status_code, 201)
self.assertIn('Welcome Message', response.data.decode())
def test_registration_with_missing_dredentials(self):
"""Should throw error for missing credentials"""
user = json.dumps({
'name': '',
'email': '',
'password': ''
})
response = self.client.post('/auth/register', data=user)
self.assertEqual(response.status_code, 400)
self.assertIn('Missing', response.data.decode())
def test_registration_with_invalid_email(self):
"""Should return invalid email"""
user = json.dumps({
'name': 'Patrick',
'email': 'pato',
'password': 'pat'
})
response = self.client.post('/auth/register', data=user)
self.assertEqual(response.status_code, 400)
self.assertIn('Invalid Email', response.data.decode())
def tearDown(self):
# Drop all tables
with app.app_context():
# Drop all tables
db.session.remove()
db.drop_all()
if __name__ == '__main__':
unittest.main()
|
Add test for invalid email
|
Add test for invalid email
|
Python
|
mit
|
patlub/BucketListAPI,patlub/BucketListAPI
|
adcba0285ef700738a63986c7657bd9e5ac85d85
|
wikipendium/user/forms.py
|
wikipendium/user/forms.py
|
from django.forms import Form, CharField, ValidationError
from django.contrib.auth.models import User
class UserChangeForm(Form):
username = CharField(max_length=30, label='New username')
def clean(self):
cleaned_data = super(UserChangeForm, self).clean()
if User.objects.filter(username=cleaned_data['username']).count():
raise ValidationError('Username already taken!')
return cleaned_data
class EmailChangeForm(Form):
email = CharField(max_length=75, label='New email')
|
from django.forms import Form, CharField, EmailField, ValidationError
from django.contrib.auth.models import User
class UserChangeForm(Form):
username = CharField(max_length=30, label='New username')
def clean(self):
cleaned_data = super(UserChangeForm, self).clean()
if User.objects.filter(username=cleaned_data['username']).count():
raise ValidationError('Username already taken!')
return cleaned_data
class EmailChangeForm(Form):
email = EmailField(max_length=75, label='New email')
|
Use EmailField for email validation
|
Use EmailField for email validation
|
Python
|
apache-2.0
|
stianjensen/wikipendium.no,stianjensen/wikipendium.no,stianjensen/wikipendium.no
|
945baec1540ff72b85b3d0563511d93cb33d660e
|
nbgrader/tests/formgrader/fakeuser.py
|
nbgrader/tests/formgrader/fakeuser.py
|
import os
from jupyterhub.auth import LocalAuthenticator
from jupyterhub.spawner import LocalProcessSpawner
from tornado import gen
class FakeUserAuth(LocalAuthenticator):
"""Authenticate fake users"""
@gen.coroutine
def authenticate(self, handler, data):
"""If the user is on the whitelist, authenticate regardless of password.
If not, then don't authenticate.
"""
username = data['username']
if not self.check_whitelist(username):
return
return username
@staticmethod
def system_user_exists(user):
return True
class FakeUserSpawner(LocalProcessSpawner):
def user_env(self, env):
env['USER'] = self.user.name
env['HOME'] = os.getcwd()
env['SHELL'] = '/bin/bash'
return env
def make_preexec_fn(self, name):
home = os.getcwd()
def preexec():
# don't forward signals
os.setpgrp()
# start in the cwd
os.chdir(home)
return preexec
|
import os
from jupyterhub.auth import LocalAuthenticator
from jupyterhub.spawner import LocalProcessSpawner
from tornado import gen
class FakeUserAuth(LocalAuthenticator):
"""Authenticate fake users"""
@gen.coroutine
def authenticate(self, handler, data):
"""If the user is on the whitelist, authenticate regardless of password.
If not, then don't authenticate.
"""
username = data['username']
if not self.check_whitelist(username):
return
return username
@staticmethod
def system_user_exists(user):
return True
class FakeUserSpawner(LocalProcessSpawner):
def user_env(self, env):
env['USER'] = self.user.name
env['HOME'] = os.getcwd()
env['SHELL'] = '/bin/bash'
return env
def make_preexec_fn(self, name):
home = os.getcwd()
def preexec():
# start in the cwd
os.chdir(home)
return preexec
|
Remove os.setpgrp() from fake spawner
|
Remove os.setpgrp() from fake spawner
|
Python
|
bsd-3-clause
|
jhamrick/nbgrader,jupyter/nbgrader,EdwardJKim/nbgrader,jupyter/nbgrader,jhamrick/nbgrader,jupyter/nbgrader,ellisonbg/nbgrader,ellisonbg/nbgrader,EdwardJKim/nbgrader,ellisonbg/nbgrader,EdwardJKim/nbgrader,ellisonbg/nbgrader,jhamrick/nbgrader,jupyter/nbgrader,EdwardJKim/nbgrader,jhamrick/nbgrader,jupyter/nbgrader
|
1bbf986cbde2d0ec8add3ac845cb10fcd061e46d
|
nodeconductor/server/test_settings.py
|
nodeconductor/server/test_settings.py
|
# Django test settings for nodeconductor project.
from nodeconductor.server.doc_settings import *
INSTALLED_APPS += (
'nodeconductor.quotas.tests',
'nodeconductor.structure.tests',
)
ROOT_URLCONF = 'nodeconductor.structure.tests.urls'
|
# Django test settings for nodeconductor project.
from nodeconductor.server.doc_settings import *
INSTALLED_APPS += (
'nodeconductor.quotas.tests',
'nodeconductor.structure.tests',
)
ROOT_URLCONF = 'nodeconductor.structure.tests.urls'
# XXX: This option should be removed after itacloud assembly creation.
NODECONDUCTOR['IS_ITACLOUD'] = True
|
Add "IS_ITACLOUD" flag to settings
|
Add "IS_ITACLOUD" flag to settings
- itacloud-7125
|
Python
|
mit
|
opennode/nodeconductor,opennode/nodeconductor,opennode/nodeconductor
|
cdfd622f4e7017ab1860e1f7420d6f26424a69f1
|
dashboard_app/extension.py
|
dashboard_app/extension.py
|
from lava_server.extension import LavaServerExtension
class DashboardExtension(LavaServerExtension):
@property
def app_name(self):
return "dashboard_app"
@property
def name(self):
return "Dashboard"
@property
def main_view_name(self):
return "dashboard_app.views.bundle_stream_list"
@property
def description(self):
return "Validation Dashboard"
@property
def version(self):
import versiontools
import dashboard_app
return versiontools.format_version(dashboard_app.__version__)
def contribute_to_settings(self, settings):
super(DashboardExtension, self).contribute_to_settings(settings)
settings['INSTALLED_APPS'].extend([
"linaro_django_pagination",
"south",
])
settings['MIDDLEWARE_CLASSES'].append(
'linaro_django_pagination.middleware.PaginationMiddleware')
settings['RESTRUCTUREDTEXT_FILTER_SETTINGS'] = {
"initial_header_level": 4}
def contribute_to_settings_ex(self, settings_module, settings_object):
settings_module['DATAVIEW_DIRS'] = settings_object._settings.get(
"DATAVIEW_DIRS", [])
settings_module['DATAREPORT_DIRS'] = settings_object._settings.get(
"DATAREPORT_DIRS", [])
|
from lava_server.extension import LavaServerExtension
class DashboardExtension(LavaServerExtension):
@property
def app_name(self):
return "dashboard_app"
@property
def name(self):
return "Dashboard"
@property
def main_view_name(self):
return "dashboard_app.views.bundle_stream_list"
@property
def description(self):
return "Validation Dashboard"
@property
def version(self):
import versiontools
import dashboard_app
return versiontools.format_version(dashboard_app.__version__)
def contribute_to_settings(self, settings_module):
super(DashboardExtension, self).contribute_to_settings(settings_module)
settings_module['INSTALLED_APPS'].extend([
"linaro_django_pagination",
"south",
])
settings_module['MIDDLEWARE_CLASSES'].append(
'linaro_django_pagination.middleware.PaginationMiddleware')
def contribute_to_settings_ex(self, settings_module, settings_object):
settings_module['DATAVIEW_DIRS'] = settings_object._settings.get(
"DATAVIEW_DIRS", [])
settings_module['DATAREPORT_DIRS'] = settings_object._settings.get(
"DATAREPORT_DIRS", [])
# Enable constrained dataview database if requested
if settings_object._settings.get("use_dataview_database"):
# Copy everything from the default database and append _dataview to user
# name. The rest is out of scope (making sure it's actually setup
# properly, having permissions to login, permissions to view proper data)
settings_module['DATABASES']['dataview'] = dict(settings_module['DATABASES']['default'])
settings_module['DATABASES']['dataview']['USER'] += "_dataview"
|
Move support for dataview-specific database from lava-server
|
Move support for dataview-specific database from lava-server
|
Python
|
agpl-3.0
|
Linaro/lava-server,Linaro/lava-server,Linaro/lava-server,OSSystems/lava-server,OSSystems/lava-server,OSSystems/lava-server,Linaro/lava-server
|
419d2ca4d53e33c58d556b45bcc6910bd28ef91a
|
djangae/apps.py
|
djangae/apps.py
|
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
class DjangaeConfig(AppConfig):
name = 'djangae'
verbose_name = _("Djangae")
def ready(self):
from .patches.contenttypes import patch
patch()
from djangae.db.backends.appengine.caching import reset_context
from django.core.signals import request_finished, request_started
request_finished.connect(reset_context)
request_started.connect(reset_context)
|
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
class DjangaeConfig(AppConfig):
name = 'djangae'
verbose_name = _("Djangae")
def ready(self):
from .patches.contenttypes import patch
patch()
from djangae.db.backends.appengine.caching import reset_context
from django.core.signals import request_finished, request_started
request_finished.connect(reset_context, dispatch_uid="request_finished_context_reset")
request_started.connect(reset_context, dispatch_uid="request_started_context_reset")
|
Make sure we only connect to the signals onces
|
Make sure we only connect to the signals onces
|
Python
|
bsd-3-clause
|
kirberich/djangae,asendecka/djangae,asendecka/djangae,SiPiggles/djangae,wangjun/djangae,potatolondon/djangae,kirberich/djangae,SiPiggles/djangae,SiPiggles/djangae,leekchan/djangae,armirusco/djangae,chargrizzle/djangae,trik/djangae,grzes/djangae,armirusco/djangae,jscissr/djangae,trik/djangae,jscissr/djangae,wangjun/djangae,asendecka/djangae,leekchan/djangae,chargrizzle/djangae,wangjun/djangae,grzes/djangae,trik/djangae,potatolondon/djangae,grzes/djangae,jscissr/djangae,chargrizzle/djangae,kirberich/djangae,armirusco/djangae,leekchan/djangae
|
b7bd7a0412566509f55d0cadc422aa8cfc502975
|
product_configurator_mrp/models/product.py
|
product_configurator_mrp/models/product.py
|
# -*- coding: utf-8 -*-
from openerp import models, api
class ProductTemplate(models.Model):
_inherit = 'product.template'
@api.multi
def create_variant(self, value_ids, custom_values=None):
"""Add bill of matrials to the configured variant."""
if custom_values is None:
custom_values = {}
variant = super(ProductTemplate, self).create_variant(
value_ids, custom_values=custom_values
)
attr_products = variant.attribute_value_ids.mapped('product_id')
line_vals = [
(0, 0, {'product_id': product.id}) for product in attr_products
]
values = {
'product_tmpl_id': self.id,
'product_id': variant.id,
'bom_line_ids': line_vals
}
self.env['mrp.bom'].create(values)
return variant
|
# -*- coding: utf-8 -*-
from openerp import models, api
class ProductTemplate(models.Model):
_inherit = 'product.template'
@api.multi
def create_get_variant(self, value_ids, custom_values=None):
"""Add bill of matrials to the configured variant."""
if custom_values is None:
custom_values = {}
variant = super(ProductTemplate, self).create_get_variant(
value_ids, custom_values=custom_values
)
attr_products = variant.attribute_value_ids.mapped('product_id')
line_vals = [
(0, 0, {'product_id': product.id}) for product in attr_products
]
values = {
'product_tmpl_id': self.id,
'product_id': variant.id,
'bom_line_ids': line_vals
}
self.env['mrp.bom'].create(values)
return variant
|
Fix creation of BOM when create Variant
|
Fix creation of BOM when create Variant
|
Python
|
agpl-3.0
|
pledra/odoo-product-configurator,pledra/odoo-product-configurator,pledra/odoo-product-configurator
|
d81a68a46fbdc98f803c94a2123b48cca6f5da31
|
tests/aqdb/test_rebuild.py
|
tests/aqdb/test_rebuild.py
|
#!/ms/dist/python/PROJ/core/2.5.4-0/bin/python
"""Test module for rebuilding the database."""
import os
import __init__
import aquilon.aqdb.depends
import nose
import unittest
from subprocess import Popen, PIPE
class TestRebuild(unittest.TestCase):
def testrebuild(self):
env = {}
for (key, value) in os.environ.items():
env[key] = value
cmd = ['./build_db.py', '--delete', '--populate']
_DIR = os.path.dirname(os.path.realpath(__file__))
p = Popen(cmd, stdout=1, stderr=2, env=env, cwd=_DIR)
(out, err) = p.communicate()
self.assertEqual(p.returncode, 0, "Database rebuild failed:\n%s" % err)
if __name__=='__main__':
nose.runmodule()
# Copyright (C) 2008 Morgan Stanley
# This module is part of Aquilon
# ex: set expandtab softtabstop=4 shiftwidth=4: -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
|
#!/ms/dist/python/PROJ/core/2.5.4-0/bin/python
"""Test module for rebuilding the database."""
import os
import __init__
import aquilon.aqdb.depends
import nose
import unittest
from subprocess import Popen, PIPE
from aquilon.config import Config
class TestRebuild(unittest.TestCase):
def testrebuild(self):
env = {}
for (key, value) in os.environ.items():
env[key] = value
env["AQDCONF"] = Config().baseconfig
cmd = ['./build_db.py', '--delete', '--populate']
_DIR = os.path.dirname(os.path.realpath(__file__))
p = Popen(cmd, stdout=1, stderr=2, env=env, cwd=_DIR)
(out, err) = p.communicate()
self.assertEqual(p.returncode, 0, "Database rebuild failed:\n%s" % err)
if __name__=='__main__':
nose.runmodule()
# Copyright (C) 2008 Morgan Stanley
# This module is part of Aquilon
# ex: set expandtab softtabstop=4 shiftwidth=4: -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
|
Fix aqdb rebuild to work when not using AQDCONF env variable.
|
Fix aqdb rebuild to work when not using AQDCONF env variable.
|
Python
|
apache-2.0
|
guillaume-philippon/aquilon,quattor/aquilon,quattor/aquilon,stdweird/aquilon,stdweird/aquilon,guillaume-philippon/aquilon,quattor/aquilon,stdweird/aquilon,guillaume-philippon/aquilon
|
2db81321de1c506d6b61d8851de9ad4794deba3e
|
lmj/sim/base.py
|
lmj/sim/base.py
|
# Copyright (c) 2013 Leif Johnson <[email protected]>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
'''Base classes for simulations.'''
class World(object):
'''World is a small base class for simulation worlds.'''
def needs_reset(self):
'''Return True iff the world needs to be reset.'''
return False
def reset(self):
'''Reset the world state.'''
pass
def trace(self):
'''Return a string containing world state for later analysis.'''
return None
def step(self):
'''Advance the world simulation by one time step.'''
raise NotImplementedError
|
# Copyright (c) 2013 Leif Johnson <[email protected]>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
'''Base classes for simulations.'''
class World(object):
'''World is a small base class for simulation worlds.'''
def needs_reset(self):
'''Return True iff the world needs to be reset.'''
return False
def reset(self):
'''Reset the world state.'''
pass
def trace(self):
'''Return a string containing world state for later analysis.'''
return None
def step(self):
'''Advance the world simulation by one time step.'''
raise NotImplementedError
def on_key_press(self, key, keys):
'''Handle an otherwise-unhandled keypress event.'''
self.reset()
|
Allow for World to handle key presses.
|
Allow for World to handle key presses.
|
Python
|
mit
|
EmbodiedCognition/pagoda,EmbodiedCognition/pagoda
|
df58b36b6f62c39030d6ff28c6fb67c11f112df0
|
pyxrf/gui_module/main_window.py
|
pyxrf/gui_module/main_window.py
|
from PyQt5.QtWidgets import QMainWindow
_main_window_geometry = {
"initial_height": 800,
"initial_width": 1000,
"min_height": 400,
"min_width": 500,
}
class MainWindow(QMainWindow):
def __init__(self):
super().__init__()
self.initialize()
def initialize(self):
self.resize(_main_window_geometry["initial_width"],
_main_window_geometry["initial_height"])
self.setMinimumWidth(_main_window_geometry["min_width"])
self.setMinimumHeight(_main_window_geometry["min_height"])
|
from PyQt5.QtWidgets import QMainWindow
_main_window_geometry = {
"initial_height": 800,
"initial_width": 1000,
"min_height": 400,
"min_width": 500,
}
class MainWindow(QMainWindow):
def __init__(self):
super().__init__()
self.initialize()
def initialize(self):
self.resize(_main_window_geometry["initial_width"],
_main_window_geometry["initial_height"])
self.setMinimumWidth(_main_window_geometry["min_width"])
self.setMinimumHeight(_main_window_geometry["min_height"])
self.setWindowTitle("PyXRF window title")
|
Test window title on Mac
|
Test window title on Mac
|
Python
|
bsd-3-clause
|
NSLS-II-HXN/PyXRF,NSLS-II/PyXRF,NSLS-II-HXN/PyXRF
|
3005b947312c0219c6754e662496c876e46aafc4
|
model/openacademy_session.py
|
model/openacademy_session.py
|
# -*- coding: utf-8 -*-
from openerp import fields, models
class Session(models.Model):
_name = 'openacademy.session'
name = fields.Char(required=True)
start_date = fields.Date()
duration = fields.Float(digits=(6, 2), help="Duration in days")
seats = fields.Integer(string="Number of seats")
instructor_id = fields.Many2one('res.partner', string="instructor")
course_id = fields.Many2one('openacademy.course',
ondelete='cascade', string="Course", required=True)
attendee_ids = fields.Many2many('res.partner', string="Attendees")
|
# -*- coding: utf-8 -*-
from openerp import fields, models
class Session(models.Model):
_name = 'openacademy.session'
name = fields.Char(required=True)
start_date = fields.Date()
duration = fields.Float(digits=(6, 2), help="Duration in days")
seats = fields.Integer(string="Number of seats")
instructor_id = fields.Many2one('res.partner', string="instructor",
domain=['|', ('instructor', '=', True),
('category_id.name', 'ilike', "Teacher")])
course_id = fields.Many2one('openacademy.course',
ondelete='cascade', string="Course", required=True)
attendee_ids = fields.Many2many('res.partner', string="Attendees")
|
Add domain or and ilike
|
[REF] openacademy: Add domain or and ilike
|
Python
|
apache-2.0
|
luisll-vauxoo/openacademy
|
7371244c00c94fdc552c5d146ab1a245b643427e
|
reeprotocol/ip.py
|
reeprotocol/ip.py
|
"""IP Physical Layer
"""
from __future__ import absolute_import
import socket
from .protocol import PhysicalLayer
class Ip(PhysicalLayer):
"""IP Physical Layer"""
def __init__(self, addr):
"""Create an IP Physical Layer.
:addr tuple: Address tuple (host, port)
"""
self.addr = addr
self.connection = None
def connect(self):
"""Connect to `self.addr`
"""
self.connection = socket.create_connection(self.addr)
def disconnect(self):
"""Disconnects
"""
if self.connection:
self.connection.close()
def send_byte(self, byte):
"""Send a byte"""
assert isinstance(self.connection, socket.socket)
self.connection.send(byte)
def get_byte(self, timeout):
"""Read a byte"""
assert isinstance(self.connection, socket.socket)
self.connection.recv(1)
|
"""IP Physical Layer
"""
from __future__ import absolute_import
import socket
import queue
import threading
from .protocol import PhysicalLayer
class Ip(PhysicalLayer):
"""IP Physical Layer"""
def __init__(self, addr):
"""Create an IP Physical Layer.
:addr tuple: Address tuple (host, port)
"""
self.addr = addr
self.connection = None
self.connected = False
self.queue = queue.Queue()
self.thread = threading.Thread(target=self.read_port)
def connect(self):
"""Connect to `self.addr`
"""
self.connection = socket.create_connection(self.addr)
self.connected = True
self.thread.start()
def disconnect(self):
"""Disconnects
"""
if self.connection:
self.connection.close()
self.connected = False
def read_port(self):
"""Read bytes from socket
"""
while self.connected:
response = self.connection.recv(16)
if not response:
continue
for byte_resp in response:
self.queue.put(byte_resp)
def send_byte(self, byte):
"""Send a byte"""
assert isinstance(self.connection, socket.socket)
self.connection.send(byte)
def get_byte(self, timeout=60):
"""Read a byte"""
return self.queue.get(True, timeout=timeout)
|
Implement reading socket with threading
|
Implement reading socket with threading
|
Python
|
agpl-3.0
|
javierdelapuente/reeprotocol
|
e2b691810f9d9a33f054bf245f1429d6999338a6
|
dataproperty/_interface.py
|
dataproperty/_interface.py
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <[email protected]>
"""
from __future__ import absolute_import
import abc
import six
from ._function import is_nan
from ._typecode import Typecode
@six.add_metaclass(abc.ABCMeta)
class DataPeropertyInterface(object):
__slots__ = ()
@abc.abstractproperty
def align(self): # pragma: no cover
pass
@abc.abstractproperty
def decimal_places(self): # pragma: no cover
pass
@abc.abstractproperty
def typecode(self): # pragma: no cover
pass
@property
def format_str(self):
if self.typecode == Typecode.INT:
return "d"
if self.typecode == Typecode.FLOAT:
if is_nan(self.decimal_places):
return "f"
return ".%df" % (self.decimal_places)
return "s"
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <[email protected]>
"""
from __future__ import absolute_import
import abc
import six
from ._function import is_nan
from ._typecode import Typecode
@six.add_metaclass(abc.ABCMeta)
class DataPeropertyInterface(object):
__slots__ = ()
@abc.abstractproperty
def align(self): # pragma: no cover
pass
@abc.abstractproperty
def decimal_places(self): # pragma: no cover
pass
@abc.abstractproperty
def typecode(self): # pragma: no cover
pass
|
Delete property from the interface class
|
Delete property from the interface class
|
Python
|
mit
|
thombashi/DataProperty
|
367d8415773a44356ce604ecfc839117798f7d3a
|
tests/test_pytestplugin.py
|
tests/test_pytestplugin.py
|
from io import FileIO
from six import next
from pkg_resources import resource_filename, working_set
from wex.readable import EXT_WEXIN
from wex.output import EXT_WEXOUT
from wex import pytestplugin
def pytest_funcarg__parent(request):
return request.session
response = b"""HTTP/1.1 200 OK\r
Content-type: application/json\r
\r
{"args":{"this":"that"}}"""
def setup_module():
entry = resource_filename(__name__, 'fixtures/TestMe.egg')
working_set.add_entry(entry)
def test_pytest_collect_file(tmpdir, parent):
# FTM just to see how to coverage test the plugin
r0_wexin = tmpdir.join('0' + EXT_WEXIN)
r0_wexout = tmpdir.join('0' + EXT_WEXOUT)
with FileIO(r0_wexin.strpath, 'w') as fp:
fp.write(response)
with FileIO(r0_wexout.strpath, 'w') as fp:
fp.write(b'this\t"that"\n')
fileobj = pytestplugin.pytest_collect_file(parent, r0_wexin)
item = next(fileobj.collect())
item.runtest()
|
from io import FileIO
from six import next
import pytest
from pkg_resources import resource_filename, working_set
from wex.readable import EXT_WEXIN
from wex.output import EXT_WEXOUT
from wex import pytestplugin
@pytest.fixture
def parent(request):
return request.session
response = b"""HTTP/1.1 200 OK\r
Content-type: application/json\r
\r
{"args":{"this":"that"}}"""
def setup_module():
entry = resource_filename(__name__, 'fixtures/TestMe.egg')
working_set.add_entry(entry)
def test_pytest_collect_file(tmpdir, parent):
# FTM just to see how to coverage test the plugin
r0_wexin = tmpdir.join('0' + EXT_WEXIN)
r0_wexout = tmpdir.join('0' + EXT_WEXOUT)
with FileIO(r0_wexin.strpath, 'w') as fp:
fp.write(response)
with FileIO(r0_wexout.strpath, 'w') as fp:
fp.write(b'this\t"that"\n')
fileobj = pytestplugin.pytest_collect_file(parent, r0_wexin)
item = next(fileobj.collect())
item.runtest()
|
Replace funcarg with fixture for pytestplugin
|
Replace funcarg with fixture for pytestplugin
|
Python
|
bsd-3-clause
|
gilessbrown/wextracto,eBay/wextracto,eBay/wextracto,gilessbrown/wextracto
|
a0fbf7c19aeebbf1451beab74904ec872c01c9b4
|
us_ignite/settings/production.py
|
us_ignite/settings/production.py
|
# Production settings for us_ignite
import os
from us_ignite.settings import *
# Sensitive values are saved as env variables:
env = os.getenv
PROJECT_ROOT = os.path.dirname(os.path.realpath(__file__))
# settings is one directory up now
here = lambda *x: os.path.join(PROJECT_ROOT, '..', *x)
SITE_URL = 'http://us-ignite.herokuapp.com'
# Make this unique, and don't share it with anybody.
SECRET_KEY = env('SECRET_KEY')
# Basic authentication for Heroku
BASIC_WWW_AUTHENTICATION_USERNAME = env('WWW_USERNAME')
BASIC_WWW_AUTHENTICATION_PASSWORD = env('WWW_PASSWORD')
BASIC_WWW_AUTHENTICATION = False
# Email
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_HOST = env('EMAIL_HOST')
EMAIL_PORT = env('EMAIL_PORT')
EMAIL_HOST_USER = env('EMAIL_HOST_USER')
EMAIL_HOST_PASSWORD = env('EMAIL_HOST_PASSWORD')
# Settings to use the filesystem
DEFAULT_FILE_STORAGE = 'django.core.files.storage.FileSystemStorage'
STATICFILES_STORAGE = 'django.contrib.staticfiles.storage.StaticFilesStorage'
STATIC_URL = '/static/'
|
# Production settings for us_ignite
import os
from us_ignite.settings import *
# Sensitive values are saved as env variables:
env = os.getenv
PROJECT_ROOT = os.path.dirname(os.path.realpath(__file__))
# settings is one directory up now
here = lambda *x: os.path.join(PROJECT_ROOT, '..', *x)
SITE_URL = 'http://us-ignite.herokuapp.com'
# Make this unique, and don't share it with anybody.
SECRET_KEY = env('SECRET_KEY')
# Basic authentication for Heroku
BASIC_WWW_AUTHENTICATION_USERNAME = env('WWW_USERNAME')
BASIC_WWW_AUTHENTICATION_PASSWORD = env('WWW_PASSWORD')
BASIC_WWW_AUTHENTICATION = False
# Email
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_HOST = env('EMAIL_HOST')
EMAIL_PORT = env('EMAIL_PORT')
EMAIL_HOST_USER = env('HOST_USER')
EMAIL_HOST_PASSWORD = env('EMAIL_HOST_PASSWORD')
# Settings to use the filesystem
DEFAULT_FILE_STORAGE = 'django.core.files.storage.FileSystemStorage'
STATICFILES_STORAGE = 'django.contrib.staticfiles.storage.StaticFilesStorage'
STATIC_URL = '/static/'
|
Update email username to use the right variable name.
|
Update email username to use the right variable name.
|
Python
|
bsd-3-clause
|
us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite
|
26eaaffb872d7046be9417ae53302e59dbc7b808
|
TrainingDataGenerator/Scripts/generateNumberImage.py
|
TrainingDataGenerator/Scripts/generateNumberImage.py
|
# -*- coding: utf-8 -*-
import threading
import os
import shutil
from PIL import Image, ImageDraw2, ImageDraw, ImageFont
import random
count = range(0, 200)
path = './generatedNumberImages'
text = '0123456789X'
def start():
if os.path.exists(path):
shutil.rmtree(path)
os.mkdir(path)
for idx in count:
t = threading.Thread(target=create_image, args=([idx]))
t.start()
def create_image(idx):
o_image = Image.open('background.png')
drawBrush = ImageDraw.Draw(o_image)
drawBrush.text((100 + random.randint(-30, 30), 20 + random.randint(-5, 5)), text, fill='black', font=ImageFont.truetype('./OCR-B 10 BT.ttf', 20 + random.randint(-5, 5)))
o_image.rotate(random.randint(-2, 2)).save(path + '/%d.png' % idx)
if __name__ == '__main__':
start()
|
# -*- coding: utf-8 -*-
import threading
import os
import shutil
from PIL import Image, ImageDraw2, ImageDraw, ImageFont, ImageEnhance
import random
count = range(0, 200)
path = './generatedNumberImages'
text = '0123456789X'
def start():
if os.path.exists(path):
shutil.rmtree(path)
os.mkdir(path)
for idx in count:
t = threading.Thread(target=create_image, args=([idx]))
t.start()
def create_image(idx):
o_image = Image.open('background.png')
drawBrush = ImageDraw.Draw(o_image)
drawBrush.text((100 + random.randint(-30, 30), 20 + random.randint(-5, 5)), text, fill='black', font=ImageFont.truetype('./OCR-B 10 BT.ttf', 20 + random.randint(-5, 5)))
o_image = ImageEnhance.Color(o_image).enhance(random.uniform(0, 2)) # 着色
o_image = ImageEnhance.Brightness(o_image).enhance(random.uniform(0.3, 2)) #亮度
o_image = ImageEnhance.Contrast(o_image).enhance(random.uniform(0.2, 2)) # 对比度
o_image = ImageEnhance.Sharpness(o_image).enhance(random.uniform(0.2, 3.0)) #对比度
o_image = o_image.rotate(random.randint(-2, 2))
o_image.save(path + '/%d.png' % idx)
if __name__ == '__main__':
start()
|
Add Image Enhance for generated image.
|
Add Image Enhance for generated image.
|
Python
|
apache-2.0
|
KevinGong2013/ChineseIDCardOCR,KevinGong2013/ChineseIDCardOCR,KevinGong2013/ChineseIDCardOCR
|
cb321e7cd20123209fbaca610b9511a166cb5c62
|
dsub/_dsub_version.py
|
dsub/_dsub_version.py
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.3.4.dev0'
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.3.4'
|
Update dsub version to 0.3.4
|
Update dsub version to 0.3.4
PiperOrigin-RevId: 272279225
|
Python
|
apache-2.0
|
DataBiosphere/dsub,DataBiosphere/dsub
|
4f84482803049b40d7b7da26d9d624a6a63b4820
|
core/utils.py
|
core/utils.py
|
# -*- coding: utf-8 -*-
from django.utils import timezone
def duration_string(duration, precision='s'):
"""Format hours, minutes and seconds as a human-friendly string (e.g. "2
hours, 25 minutes, 31 seconds") with precision to h = hours, m = minutes or
s = seconds.
"""
h, m, s = duration_parts(duration)
duration = ''
if h > 0:
duration = '{} hour{}'.format(h, 's' if h > 1 else '')
if m > 0 and precision != 'h':
duration += '{}{} minute{}'.format(
'' if duration == '' else ', ', m, 's' if m > 1 else '')
if s > 0 and precision != 'h' and precision != 'm':
duration += '{}{} second{}'.format(
'' if duration == '' else ', ', s, 's' if s > 1 else '')
return duration
def duration_parts(duration):
"""Get hours, minutes and seconds from a timedelta.
"""
if not isinstance(duration, timezone.timedelta):
raise TypeError('Duration provided must be a timedetla')
h, remainder = divmod(duration.seconds, 3600)
h += duration.days * 24
m, s = divmod(remainder, 60)
return h, m, s
|
# -*- coding: utf-8 -*-
from django.utils import timezone
from django.utils.translation import ngettext
def duration_string(duration, precision='s'):
"""Format hours, minutes and seconds as a human-friendly string (e.g. "2
hours, 25 minutes, 31 seconds") with precision to h = hours, m = minutes or
s = seconds.
"""
h, m, s = duration_parts(duration)
duration = ''
if h > 0:
duration = ngettext('%(hours)s hour', '%(hours)s hours', h) % {
'hours': h
}
if m > 0 and precision != 'h':
if duration != '':
duration += ', '
duration += ngettext('%(minutes)s minute', '%(minutes)s minutes', m) % {
'minutes': m
}
if s > 0 and precision != 'h' and precision != 'm':
if duration != '':
duration += ', '
duration += ngettext('%(seconds)s second', '%(seconds)s seconds', s) % {
'seconds': s
}
return duration
def duration_parts(duration):
"""Get hours, minutes and seconds from a timedelta.
"""
if not isinstance(duration, timezone.timedelta):
raise TypeError('Duration provided must be a timedetla')
h, remainder = divmod(duration.seconds, 3600)
h += duration.days * 24
m, s = divmod(remainder, 60)
return h, m, s
|
Add translation support to `duration_string` utility
|
Add translation support to `duration_string` utility
|
Python
|
bsd-2-clause
|
cdubz/babybuddy,cdubz/babybuddy,cdubz/babybuddy
|
786ebc992ac09cd4b25e90ee2a243447e39c237f
|
director/accounts/forms.py
|
director/accounts/forms.py
|
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Layout, Submit
from django import forms
from accounts.models import Account, Team
from lib.data_cleaning import clean_slug, SlugType
from lib.forms import ModelFormWithCreate
class CleanNameMixin:
def clean_name(self):
return clean_slug(self.cleaned_data["name"], SlugType.ACCOUNT)
class AccountSettingsForm(CleanNameMixin, forms.ModelForm):
helper = FormHelper()
helper.layout = Layout(
"name", "logo", Submit("submit", "Update", css_class="button is-primary")
)
class Meta:
model = Account
fields = ("name", "logo")
class AccountCreateForm(CleanNameMixin, ModelFormWithCreate):
helper = FormHelper()
helper.layout = Layout("name", "logo",)
class Meta:
model = Account
fields = ("name", "logo")
class TeamForm(forms.ModelForm):
helper = FormHelper()
helper.form_tag = False
class Meta:
model = Team
fields = ("name", "description")
widgets = {"name": forms.TextInput}
|
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Layout, Div, HTML, Submit
from django import forms
from accounts.models import Account, Team
from lib.data_cleaning import clean_slug, SlugType
from lib.forms import ModelFormWithCreate
from assets.thema import themes
class CleanNameMixin:
def clean_name(self):
return clean_slug(self.cleaned_data["name"], SlugType.ACCOUNT)
class AccountCreateForm(CleanNameMixin, ModelFormWithCreate):
helper = FormHelper()
helper.layout = Layout("name", "logo",)
class Meta:
model = Account
fields = ("name", "logo")
class AccountSettingsForm(CleanNameMixin, forms.ModelForm):
helper = FormHelper()
helper.layout = Layout(
Div(
HTML(
'<p class="title is-4">Identity</p>'
'<p class="subtitle is-5">Settings for your account\'s public profile.</p>'
),
"name",
"logo",
css_class="section",
),
Div(
HTML(
'<p class="title is-4">Content</p>'
'<p class="subtitle is-5">Settings affecting how content is served for your projects.</p>'
),
"theme",
"hosts",
css_class="section",
),
Submit("submit", "Update", css_class="button is-primary"),
)
class Meta:
model = Account
fields = ("name", "logo", "theme", "hosts")
widgets = {
"theme": forms.Select(choices=[(theme, theme) for theme in themes]),
"hosts": forms.TextInput(),
}
class TeamForm(forms.ModelForm):
helper = FormHelper()
helper.form_tag = False
class Meta:
model = Team
fields = ("name", "description")
widgets = {"name": forms.TextInput}
|
Add theme and hosts to settings
|
feat(Accounts): Add theme and hosts to settings
|
Python
|
apache-2.0
|
stencila/hub,stencila/hub,stencila/hub,stencila/hub,stencila/hub
|
c3c1b9c6a1d13f38cd50762b451ca19eb0a05ff2
|
run_deploy_job_wr.py
|
run_deploy_job_wr.py
|
#!/usr/bin/env python
import json
import os
import subprocess
import sys
from tempfile import NamedTemporaryFile
def main():
command = [
'$HOME/juju-ci-tools/run-deploy-job-remote.bash',
os.environ['revision_build'],
os.environ['JOB_NAME'],
]
command.extend(sys.argv[2:])
with NamedTemporaryFile() as config_file:
json.dump({'command': command, 'install': {}}, config_file)
config_file.flush()
subprocess.check_call(['workspace-run', config_file.name, sys.argv[1]])
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
import json
import os
import subprocess
import sys
from tempfile import NamedTemporaryFile
def main():
revision_build = os.environ['revision_build']
job_name = os.environ['JOB_NAME']
build_number = os.environ['BUILD_NUMBER']
prefix='juju-ci/products/version-{}/{}/build-{}'.format(
revision_build, job_name, build_number)
s3_config = join(os.environ['HOME'], 'cloud-city/juju-qa.s3cfg')
command = [
'$HOME/juju-ci-tools/run-deploy-job-remote.bash',
revision_build,
job_name,
]
command.extend(sys.argv[2:])
with NamedTemporaryFile() as config_file:
json.dump({
'command': command, 'install': {},
'artifacts': {'artifacts': ['*']},
'bucket': 'juju-qa-data',
}, config_file)
config_file.flush()
subprocess.check_call([
'workspace-run', config_file.name, sys.argv[1], prefix,
'--s3-config', s3_config,
])
if __name__ == '__main__':
main()
|
Update for more artifact support.
|
Update for more artifact support.
|
Python
|
agpl-3.0
|
mjs/juju,mjs/juju,mjs/juju,mjs/juju,mjs/juju,mjs/juju,mjs/juju
|
41236c2be66b6f790308cba321cb482807814323
|
ubersmith/calls/device.py
|
ubersmith/calls/device.py
|
"""Device call classes.
These classes implement any response cleaning and validation needed. If a
call class isn't defined for a given method then one is created using
ubersmith.calls.BaseCall.
"""
from ubersmith.calls import BaseCall, GroupCall
from ubersmith.utils import prepend_base
__all__ = [
'GetCall',
'ListCall',
]
_ = prepend_base(__name__.split('.')[-1])
class GetCall(BaseCall):
method = _('get')
required_fields = ['device_id']
class ListCall(GroupCall):
method = _('list')
rename_fields = {'clientid': 'client_id'}
int_fields = ['client_id']
|
"""Device call classes.
These classes implement any response cleaning and validation needed. If a
call class isn't defined for a given method then one is created using
ubersmith.calls.BaseCall.
"""
from ubersmith.calls import BaseCall, GroupCall
from ubersmith.utils import prepend_base
__all__ = [
'GetCall',
'ListCall',
]
_ = prepend_base(__name__.split('.')[-1])
class GetCall(BaseCall):
method = _('get')
required_fields = ['device_id']
class ListCall(GroupCall):
method = _('list')
rename_fields = {'clientid': 'client_id'}
int_fields = ['client_id']
class ModuleGraphCall(FileCall):
method = _('module_graph')
|
Make module graph call return a file.
|
Make module graph call return a file.
|
Python
|
mit
|
jasonkeene/python-ubersmith,jasonkeene/python-ubersmith,hivelocity/python-ubersmith,hivelocity/python-ubersmith
|
61a6d057302767aa49633d6d010f7da583035533
|
web/templatetags/getattribute.py
|
web/templatetags/getattribute.py
|
import re
from django import template
from django.conf import settings
numeric_test = re.compile("^\d+$")
register = template.Library()
def getattribute(value, arg):
"""Gets an attribute of an object dynamically from a string name"""
if hasattr(value, str(arg)):
return getattr(value, arg)
elif hasattr(value, 'has_key') and value.has_key(arg):
return value[arg]
elif numeric_test.match(str(arg)) and len(value) > int(arg):
return value[int(arg)]
else:
return settings.TEMPLATE_STRING_IF_INVALID
register.filter('getattribute', getattribute)
# Then, in template:
# {% load getattribute %}
# {{ object|getattribute:dynamic_string_var }}
|
import re
from django import template
from django.conf import settings
numeric_test = re.compile("^\d+$")
register = template.Library()
def getattribute(value, arg):
"""Gets an attribute of an object dynamically from a string name"""
if hasattr(value, str(arg)):
if callable(getattr(value, arg)):
return getattr(value, arg)()
return getattr(value, arg)
elif hasattr(value, 'has_key') and value.has_key(arg):
return value[arg]
elif numeric_test.match(str(arg)) and len(value) > int(arg):
return value[int(arg)]
else:
return settings.TEMPLATE_STRING_IF_INVALID
register.filter('getattribute', getattribute)
# Then, in template:
# {% load getattribute %}
# {{ object|getattribute:dynamic_string_var }}
|
Call objects methods directly from the templates yay
|
web: Call objects methods directly from the templates yay
|
Python
|
apache-2.0
|
SchoolIdolTomodachi/SchoolIdolAPI,laurenor/SchoolIdolAPI,dburr/SchoolIdolAPI,laurenor/SchoolIdolAPI,rdsathene/SchoolIdolAPI,SchoolIdolTomodachi/SchoolIdolAPI,SchoolIdolTomodachi/SchoolIdolAPI,rdsathene/SchoolIdolAPI,rdsathene/SchoolIdolAPI,dburr/SchoolIdolAPI,dburr/SchoolIdolAPI,laurenor/SchoolIdolAPI
|
f4c989567fa77002541c5e5199f2fc3f8e53d6da
|
test_htmlgen/image.py
|
test_htmlgen/image.py
|
from unittest import TestCase
from asserts import assert_equal
from htmlgen import Image
from test_htmlgen.util import parse_short_tag
class ImageTest(TestCase):
def test_attributes(self):
image = Image("my-image.png", "Alternate text")
assert_equal("my-image.png", image.url)
assert_equal("Alternate text", image.alternate_text)
def test_attributes_default_alt(self):
image = Image("my-image.png")
assert_equal("", image.alternate_text)
def test_with_alt(self):
image = Image("my-image.png", "Alternate text")
tag = parse_short_tag(str(image))
assert_equal("img", tag.name)
assert_equal("my-image.png", image._attributes["src"])
assert_equal("Alternate text", image._attributes["alt"])
def test_without_alt(self):
image = Image("my-image.png")
tag = parse_short_tag(str(image))
assert_equal("img", tag.name)
assert_equal("my-image.png", image._attributes["src"])
assert_equal("", image._attributes["alt"])
|
from unittest import TestCase
from asserts import assert_equal
from htmlgen import Image
from test_htmlgen.util import parse_short_tag
class ImageTest(TestCase):
def test_attributes(self):
image = Image("my-image.png", "Alternate text")
assert_equal("my-image.png", image.url)
assert_equal("Alternate text", image.alternate_text)
def test_attributes_default_alt(self):
image = Image("my-image.png")
assert_equal("", image.alternate_text)
def test_with_alt(self):
image = Image("my-image.png", "Alternate text")
tag = parse_short_tag(str(image))
assert_equal("img", tag.name)
assert_equal("my-image.png", image.get_attribute("src"))
assert_equal("Alternate text", image.get_attribute("alt"))
def test_without_alt(self):
image = Image("my-image.png")
tag = parse_short_tag(str(image))
assert_equal("img", tag.name)
assert_equal("my-image.png", image.get_attribute("src"))
assert_equal("", image.get_attribute("alt"))
|
Use public API in tests
|
[tests] Use public API in tests
|
Python
|
mit
|
srittau/python-htmlgen
|
8e2b9e1c80e3a91df7e2cce775c19208aa9d4839
|
exam/asserts.py
|
exam/asserts.py
|
IRRELEVANT = object()
class ChangeWatcher(object):
def __init__(self, thing, *args, **kwargs):
self.thing = thing
self.args = args
self.kwargs = kwargs
self.expected_before = kwargs.pop('before', IRRELEVANT)
self.expected_after = kwargs.pop('after', IRRELEVANT)
def __enter__(self):
self.before = self.__apply()
if not self.expected_before is IRRELEVANT:
check = self.before == self.expected_before
assert check, self.__precondition_failure_msg_for('before')
def __exit__(self, type, value, traceback):
self.after = self.__apply()
if not self.expected_after is IRRELEVANT:
check = self.after == self.expected_after
assert check, self.__precondition_failure_msg_for('after')
assert self.before != self.after, self.__equality_failure_message
def __apply(self):
return self.thing(*self.args, **self.kwargs)
@property
def __equality_failure_message(self):
return 'Expected before %r != %r after' % (self.before, self.after)
def __precondition_failure_msg_for(self, condition):
return '%s value did not change (%s)' % (
condition,
getattr(self, condition)
)
class AssertsMixin(object):
assertChanges = ChangeWatcher
|
IRRELEVANT = object()
class ChangeWatcher(object):
def __init__(self, thing, *args, **kwargs):
self.thing = thing
self.args = args
self.kwargs = kwargs
self.expected_before = kwargs.pop('before', IRRELEVANT)
self.expected_after = kwargs.pop('after', IRRELEVANT)
def __enter__(self):
self.before = self.__apply()
if not self.expected_before is IRRELEVANT:
check = self.before == self.expected_before
assert check, self.__precondition_failure_msg_for('before')
def __exit__(self, exec_type, exac_value, traceback):
self.after = self.__apply()
if not self.expected_after is IRRELEVANT:
check = self.after == self.expected_after
assert check, self.__precondition_failure_msg_for('after')
assert self.before != self.after, self.__equality_failure_message
def __apply(self):
return self.thing(*self.args, **self.kwargs)
@property
def __equality_failure_message(self):
return 'Expected before %r != %r after' % (self.before, self.after)
def __precondition_failure_msg_for(self, condition):
return '%s value did not change (%s)' % (
condition,
getattr(self, condition)
)
class AssertsMixin(object):
assertChanges = ChangeWatcher
|
Change __exit__arg names to not be built ins
|
Change __exit__arg names to not be built ins
|
Python
|
mit
|
gterzian/exam,Fluxx/exam,Fluxx/exam,gterzian/exam
|
24d67552f1ae16179fb1aa21a06c191c6d596fb1
|
akhet/urlgenerator.py
|
akhet/urlgenerator.py
|
"""
Contributed by Michael Mericikel.
"""
from pyramid.decorator import reify
import pyramid.url as url
class URLGenerator(object):
def __init__(self, context, request):
self.context = context
self.request = request
@reify
def context(self):
return url.resource_url(self.context, self.request)
@reify
def app(self):
return self.request.application_url
def route(self, route_name, *elements, **kw):
return url.route_url(route_name, self.request, *elements, **kw)
# sugar for calling url('home')
__call__ = route
def current(self, *elements, **kw):
return url.current_route_url(self.request, *elements, **kw)
@reify
def static(self):
return url.static_url('baseline:static/', self.request)
@reify
def deform(self):
return url.static_url('deform:static/', self.request)
|
"""
Contributed by Michael Mericikel.
"""
from pyramid.decorator import reify
import pyramid.url as url
class URLGenerator(object):
def __init__(self, context, request):
self.context = context
self.request = request
@reify
def context(self):
return url.resource_url(self.context, self.request)
@reify
def app(self):
return self.request.application_url
def route(self, route_name, *elements, **kw):
return url.route_url(route_name, self.request, *elements, **kw)
# sugar for calling url('home')
__call__ = route
def current(self, *elements, **kw):
return url.current_route_url(self.request, *elements, **kw)
## Commented because I'm unsure of the long-term API.
## If you want to use this, or a more particular one for your
## static package(s), define it in a subclass.
##
# A future version might make 'path' optional, defaulting to
# a value passed to the constructor ("myapp:static/").
#
#def static(self, path, **kw):
# return url.static_url(path, self.request, **kw)
## If you're using the Deform package you may find this useful.
#
#@reify
#def deform(self):
# return url.static_url("deform:static/", self.request)
|
Comment out 'static' and 'deform' methods; disagreements on long-term API.
|
Comment out 'static' and 'deform' methods; disagreements on long-term API.
|
Python
|
mit
|
hlwsmith/akhet,hlwsmith/akhet,Pylons/akhet,hlwsmith/akhet,Pylons/akhet
|
d7bec88009b73a57124dbfacc91446927328abf9
|
src/command_modules/azure-cli-network/azure/cli/command_modules/network/custom.py
|
src/command_modules/azure-cli-network/azure/cli/command_modules/network/custom.py
|
# pylint: disable=no-self-use,too-many-arguments
from azure.mgmt.network.models import Subnet, SecurityRule
from ._factory import _network_client_factory
def create_update_subnet(resource_group_name, subnet_name, virtual_network_name, address_prefix):
'''Create or update a virtual network (VNet) subnet'''
subnet_settings = Subnet(name=subnet_name, address_prefix=address_prefix)
ncf = _network_client_factory()
return ncf.subnets.create_or_update(
resource_group_name, virtual_network_name, subnet_name, subnet_settings)
def create_update_nsg_rule(resource_group_name, network_security_group_name, security_rule_name,
protocol, source_address_prefix, destination_address_prefix,
access, direction, source_port_range, destination_port_range,
description=None, priority=None, name=None):
settings = SecurityRule(protocol=protocol, source_address_prefix=source_address_prefix,
destination_address_prefix=destination_address_prefix, access=access,
direction=direction,
description=description, source_port_range=source_port_range,
destination_port_range=destination_port_range, priority=priority,
name=name)
ncf = _network_client_factory()
return ncf.security_rules.create_or_update(
resource_group_name, network_security_group_name, security_rule_name, settings)
create_update_nsg_rule.__doc__ = SecurityRule.__doc__
|
# pylint: disable=no-self-use,too-many-arguments
from azure.mgmt.network.models import Subnet, SecurityRule
from ._factory import _network_client_factory
def create_update_subnet(resource_group_name, subnet_name, virtual_network_name, address_prefix):
'''Create or update a virtual network (VNet) subnet'''
subnet_settings = Subnet(name=subnet_name, address_prefix=address_prefix)
ncf = _network_client_factory()
return ncf.subnets.create_or_update(
resource_group_name, virtual_network_name, subnet_name, subnet_settings)
def create_update_nsg_rule(resource_group_name, network_security_group_name, security_rule_name,
protocol, source_address_prefix, destination_address_prefix,
access, direction, source_port_range, destination_port_range,
description=None, priority=None):
settings = SecurityRule(protocol=protocol, source_address_prefix=source_address_prefix,
destination_address_prefix=destination_address_prefix, access=access,
direction=direction,
description=description, source_port_range=source_port_range,
destination_port_range=destination_port_range, priority=priority,
name=security_rule_name)
ncf = _network_client_factory()
return ncf.security_rules.create_or_update(
resource_group_name, network_security_group_name, security_rule_name, settings)
create_update_nsg_rule.__doc__ = SecurityRule.__doc__
|
Fix broken NSG create command (duplicate --name parameter)
|
Fix broken NSG create command (duplicate --name parameter)
|
Python
|
mit
|
QingChenmsft/azure-cli,samedder/azure-cli,BurtBiel/azure-cli,QingChenmsft/azure-cli,yugangw-msft/azure-cli,yugangw-msft/azure-cli,yugangw-msft/azure-cli,samedder/azure-cli,samedder/azure-cli,QingChenmsft/azure-cli,QingChenmsft/azure-cli,yugangw-msft/azure-cli,yugangw-msft/azure-cli,samedder/azure-cli,yugangw-msft/azure-cli,BurtBiel/azure-cli
|
cde815fd3c87cbe000620060f41bf2b29976555d
|
kindergarten-garden/kindergarten_garden.py
|
kindergarten-garden/kindergarten_garden.py
|
CHILDREN = ["Alice", "Bob", "Charlie", "David", "Eve", "Fred",
"Ginny", "Harriet", "Ileana", "Joseph", "Kincaid", "Larry"]
PLANTS = {"C": "Clover", "G": "Grass", "R": "Radishes", "V": "Violets"}
class Garden(object):
def __init__(self, garden, students=CHILDREN):
self.students = sorted(students)
rows = garden.split()
patches = [rows[0][i:i+2] + rows[1][i:i+2]
for i in range(0,2*len(self.students),2)]
self._garden = {s: [PLANTS[ch] for ch in p]
for s, p in zip(self.students, patches)}
def plants(self, student):
return self._garden[student]
|
CHILDREN = ["Alice", "Bob", "Charlie", "David", "Eve", "Fred",
"Ginny", "Harriet", "Ileana", "Joseph", "Kincaid", "Larry"]
PLANTS = {"C": "Clover", "G": "Grass", "R": "Radishes", "V": "Violets"}
class Garden(object):
def __init__(self, garden, students=CHILDREN):
self.students = sorted(students)
row1, row2 = garden.split()
patches = [row1[i:i+2] + row2[i:i+2]
for i in range(0,2*len(self.students),2)]
self._garden = {s: [PLANTS[ch] for ch in p]
for s, p in zip(self.students, patches)}
def plants(self, student):
return self._garden[student]
|
Use unpacking for simpler code
|
Use unpacking for simpler code
|
Python
|
agpl-3.0
|
CubicComet/exercism-python-solutions
|
a23edc2bb3caf15160d9b0d311a40cfb7f75131c
|
tests/test_system.py
|
tests/test_system.py
|
import datetime
from decimal import Decimal
from mock import Mock
import ubersmith.order
# TODO: setup/teardown module with default request handler
# TODO: mock out requests library vs mocking out request handler
def test_order_list():
handler = Mock()
response = {
"60": {
"client_id": "50",
"activity": "1272400333",
"ts": "1272400333",
"total": "33.22",
"order_id": "60",
},
}
handler.process_request.return_value = response
expected = {
60: {
u'client_id': 50,
u'activity': datetime.datetime(2010, 4, 27, 16, 32, 13),
u'ts': datetime.datetime(2010, 4, 27, 16, 32, 13),
u'total': Decimal('33.22'),
u'order_id': 60,
}
}
result = ubersmith.order.list(client_id=50, request_handler=handler)
assert expected == result
|
import datetime
from decimal import Decimal
from mock import Mock
import ubersmith.order
# TODO: setup/teardown module with default request handler
# TODO: mock out requests library vs mocking out request handler
def test_order_list():
handler = Mock()
response = {
"60": {
"client_id": "50",
"activity": "1272400333",
"ts": "1272400333",
"total": "33.22",
"order_id": "60",
},
}
handler.process_request.return_value = response
expected = {
60: {
u'client_id': 50,
u'activity': datetime.datetime.fromtimestamp(float("1272400333")),
u'ts': datetime.datetime.fromtimestamp(float("1272400333")),
u'total': Decimal('33.22'),
u'order_id': 60,
}
}
result = ubersmith.order.list(client_id=50, request_handler=handler)
assert expected == result
|
Fix timezone difference with travis.
|
Fix timezone difference with travis.
|
Python
|
mit
|
jasonkeene/python-ubersmith,hivelocity/python-ubersmith,hivelocity/python-ubersmith,jasonkeene/python-ubersmith
|
31f6cc777054f4b48a37bb93453bcf405a9101a3
|
examples/example_import.py
|
examples/example_import.py
|
import xkcdpass.xkcd_password as xp
import random
def random_capitalisation(s, chance):
new_str = []
for i, c in enumerate(s):
new_str.append(c.upper() if random.random() < chance else c)
return "".join(new_str)
words = xp.locate_wordfile()
mywords = xp.generate_wordlist(wordfile=words, min_length=5, max_length=8)
raw_password = xp.generate_xkcdpassword(mywords)
for i in range(5):
print(random_capitalisation(raw_password, i/10.0))
|
import xkcdpass.xkcd_password as xp
import random
def random_capitalisation(s, chance):
new_str = []
for i, c in enumerate(s):
new_str.append(c.upper() if random.random() < chance else c)
return "".join(new_str)
def capitalize_first_letter(s):
new_str = []
s = s.split(" ")
for i, c in enumerate(s):
new_str.append(c.capitalize())
return "".join(new_str)
words = xp.locate_wordfile()
mywords = xp.generate_wordlist(wordfile=words, min_length=5, max_length=8)
raw_password = xp.generate_xkcdpassword(mywords)
for i in range(5):
print(random_capitalisation(raw_password, i/10.0))
print(capitalize_first_letter(raw_password))
|
Add "capitalize first letter" function to examples
|
Add "capitalize first letter" function to examples
|
Python
|
bsd-3-clause
|
amiryal/XKCD-password-generator,amiryal/XKCD-password-generator
|
520487df7b9612e18dc06764ba8632b0ef28aad2
|
solvent/bring.py
|
solvent/bring.py
|
from solvent import config
from solvent import run
from solvent import requirementlabel
import logging
import os
class Bring:
def __init__(self, repositoryBasename, product, hash, destination):
self._repositoryBasename = repositoryBasename
self._product = product
self._hash = hash
self._destination = destination
def go(self):
requirementLabel = requirementlabel.RequirementLabel(
basename=self._repositoryBasename, product=self._product, hash=self._hash)
label = requirementLabel.matching()
self.label(label=label, destination=self._destination)
@classmethod
def label(cls, label, destination):
logging.info("Checking out '%(label)s'", dict(label=label))
if not os.path.isdir(destination):
os.makedirs(destination)
run.run([
"osmosis", "checkout", destination, label,
"--MD5", "--removeUnknownFiles", "--putIfMissing",
"--objectStores=" + config.objectStoresOsmosisParameter()])
|
from solvent import config
from solvent import run
from solvent import requirementlabel
import logging
import os
class Bring:
def __init__(self, repositoryBasename, product, hash, destination):
self._repositoryBasename = repositoryBasename
self._product = product
self._hash = hash
self._destination = destination
def go(self):
requirementLabel = requirementlabel.RequirementLabel(
basename=self._repositoryBasename, product=self._product, hash=self._hash)
label = requirementLabel.matching()
self.label(label=label, destination=self._destination)
@classmethod
def label(cls, label, destination):
logging.info("Checking out '%(label)s'", dict(label=label))
if not os.path.isdir(destination):
os.makedirs(destination)
myUIDandGID = ["--myUIDandGIDcheckout"] if os.getuid() != 0 else []
run.run([
"osmosis", "checkout", destination, label,
"--MD5", "--removeUnknownFiles", "--putIfMissing",
"--objectStores=" + config.objectStoresOsmosisParameter()] + myUIDandGID)
|
Bring now uses --myUIDandGIDCheckout if root is not the invoker
|
Bring now uses --myUIDandGIDCheckout if root is not the invoker
|
Python
|
apache-2.0
|
Stratoscale/solvent,Stratoscale/solvent
|
cea212a73e76c1b1e5b4795687e5b2a394614227
|
hkisaml/urls.py
|
hkisaml/urls.py
|
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.http import HttpResponse
from django.conf.urls.static import static
from django.contrib.staticfiles import views as static_views
from .api import UserView, GetJWTView
from users.views import LoginView
def show_login(request):
html = "<html><body>"
if request.user.is_authenticated:
html += "%s" % request.user
else:
html += "not logged in"
html += "</body></html>"
return HttpResponse(html)
urlpatterns = patterns('',
url(r'^admin/', include(admin.site.urls)),
url(r'^saml2/', include('djangosaml2.urls')),
url(r'^accounts/profile/', show_login),
url(r'^accounts/', include('allauth.urls')),
url(r'^oauth2/', include('oauth2_provider.urls', namespace='oauth2_provider')),
url(r'^user/(?P<username>[\w.@+-]+)/?$', UserView.as_view()),
url(r'^user/$', UserView.as_view()),
url(r'^jwt-token/$', GetJWTView.as_view()),
url(r'^login/$', LoginView.as_view()),
)
if settings.DEBUG:
urlpatterns += [url(r'^static/(?P<path>.*)$', static_views.serve)]
|
from django.conf import settings
from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.http import HttpResponse
from django.conf.urls.static import static
from django.contrib.staticfiles import views as static_views
from django.views.defaults import permission_denied
from .api import UserView, GetJWTView
from users.views import LoginView
def show_login(request):
html = "<html><body>"
if request.user.is_authenticated:
html += "%s" % request.user
else:
html += "not logged in"
html += "</body></html>"
return HttpResponse(html)
urlpatterns = patterns('',
url(r'^admin/', include(admin.site.urls)),
url(r'^saml2/', include('djangosaml2.urls')),
url(r'^accounts/profile/', show_login),
url(r'^accounts/', include('allauth.urls')),
url(r'^oauth2/applications/', permission_denied),
url(r'^oauth2/', include('oauth2_provider.urls', namespace='oauth2_provider')),
url(r'^user/(?P<username>[\w.@+-]+)/?$', UserView.as_view()),
url(r'^user/$', UserView.as_view()),
url(r'^jwt-token/$', GetJWTView.as_view()),
url(r'^login/$', LoginView.as_view()),
)
if settings.DEBUG:
urlpatterns += [url(r'^static/(?P<path>.*)$', static_views.serve)]
|
Disable application views from oauth2-toolkit
|
Disable application views from oauth2-toolkit
|
Python
|
mit
|
mikkokeskinen/tunnistamo,mikkokeskinen/tunnistamo
|
1be7ac84b951a1e5803bd46de931235e44e40d9a
|
2018/covar/covar-typecheck.py
|
2018/covar/covar-typecheck.py
|
from typing import TypeVar, List
class Mammal:
pass
class Cat(Mammal):
pass
T = TypeVar('T')
def count_mammals(seq : List[Mammal]) -> int:
return len(seq)
lst = [1, 2, 3]
mlst = [Mammal(), Mammal()]
clst = [Cat(), Cat()]
print(count_mammals(clst))
|
# Sample of using typing.TypeVar with covariant settings.
# Run with python3.6+
#
# For type-checking with mypy:
#
# > mypy covar-typecheck.py
#
# Eli Bendersky [https://eli.thegreenplace.net]
# This code is in the public domain.
from typing import List, TypeVar, Iterable, Generic
class Mammal:
pass
class Cat(Mammal):
pass
def count_mammals_list(seq : List[Mammal]) -> int:
return len(seq)
mlst = [Mammal(), Mammal()]
print(count_mammals_list(mlst))
# This will fail a mypy check, because List is not covariant.
clst = [Cat(), Cat()]
print(count_mammals_list(clst))
# Now we define a simplistic immutable list wrapper with a covariant type
# parameter. This will pass type checking
T_co = TypeVar('T_co', covariant=True)
class ImmutableList(Generic[T_co]):
def __init__(self, items: Iterable[T_co]) -> None:
self.lst = list(items)
def __len__(self) -> int:
return len(self.lst)
def count_mammals_ilist(seq : ImmutableList[Mammal]) -> int:
return len(seq)
mimmlst = ImmutableList([Mammal(), Mammal()])
print(count_mammals_ilist(mimmlst))
cimmlst = ImmutableList([Cat(), Cat()])
print(count_mammals_ilist(cimmlst))
|
Update the sample with covariant markings
|
Update the sample with covariant markings
|
Python
|
unlicense
|
eliben/code-for-blog,eliben/code-for-blog,eliben/code-for-blog,eliben/code-for-blog,eliben/code-for-blog,eliben/code-for-blog,eliben/code-for-blog,eliben/code-for-blog,eliben/code-for-blog,eliben/code-for-blog,eliben/code-for-blog
|
7405bda939632a5f8cac93413b4e99939ef716c2
|
ideas/models.py
|
ideas/models.py
|
from __future__ import unicode_literals
from django.db import models
class Idea(models.Model):
name = models.CharField(max_length=200)
description = models.TextField()
votes = models.IntegerField(default=0)
def __unicode__(self):
return self.name
|
from __future__ import unicode_literals
from django.db import models
class Idea(models.Model):
name = models.CharField(max_length=200, unique=True)
description = models.TextField()
votes = models.IntegerField(default=0)
def __unicode__(self):
return self.name
|
Add unique parameter to idea name
|
Add unique parameter to idea name
|
Python
|
mit
|
neosergio/vote_hackatrix_backend
|
18a03c7bdb433e9f75fd93a39735ccb935e0072c
|
pavement.py
|
pavement.py
|
# -*- coding: utf-8 -*-
"""Config-like for paver tool."""
from paver.easy import task, sh, path # noqa
# pylint: disable=invalid-name
cli_command_name = 'serverauditor'
@task
def lint():
"""Check code style and conventions."""
sh('prospector')
@task
def bats():
"""Run tests on CLI usage."""
sh('bats --tap tests/integration')
@task
def nosetests():
"""Run unit tests."""
sh('nosetests')
@task
def completion_tests():
"""Run integration tests for bash completion."""
sh('nosetests tests/integration/completion/bash/')
@task
def coverage():
"""Run test and collect coverage."""
sh('nosetests --with-coverage')
sh('coverage xml')
@task
def create_compeletion(info):
"""Generate bash completion."""
completion_dir = path('contrib/completion/bash')
if not completion_dir.exists():
completion_dir.makedirs_p()
completion_path = completion_dir / cli_command_name
if completion_path.exists():
info('Completion exists')
else:
sh('{} complete > {}'.format(cli_command_name, completion_path))
@task
def clean_compeletion(info):
"""Generate bash completion."""
completion_path = path('contrib/bash/complete') / cli_command_name
completion_path.remove()
info('Completion exists')
|
# -*- coding: utf-8 -*-
"""Config-like for paver tool."""
from paver.easy import task, sh, path # noqa
# pylint: disable=invalid-name
cli_command_name = 'serverauditor'
@task
def lint():
"""Check code style and conventions."""
sh('prospector')
@task
def bats():
"""Run tests on CLI usage."""
sh('bats --tap tests/integration')
@task
def nosetests():
"""Run unit tests."""
sh('nosetests')
@task
def completion_tests():
"""Run integration tests for bash completion."""
sh('nosetests tests/integration/completion/bash/')
@task
def coverage():
"""Run test and collect coverage."""
sh('nosetests --with-coverage')
sh('coverage xml')
@task
def create_compeletion(info):
"""Generate bash completion."""
completion_dir = path('contrib/completion/bash')
if not completion_dir.exists():
completion_dir.makedirs_p()
completion_path = completion_dir / cli_command_name
if completion_path.exists():
info('Completion exists')
else:
sh('{} complete > {}'.format(cli_command_name, completion_path))
@task
def clean_compeletion(info):
"""Generate bash completion."""
completion_path = path('contrib/bash/complete') / cli_command_name
completion_path.remove()
info('Completion exists')
|
Add newline between func defs.
|
Add newline between func defs.
|
Python
|
bsd-3-clause
|
EvgeneOskin/termius-cli,EvgeneOskin/termius-cli,Crystalnix/serverauditor-sshconfig,Crystalnix/serverauditor-sshconfig,EvgeneOskin/serverauditor-sshconfig
|
7f79645182de6fed4d7f09302cbc31351defe467
|
snippet_parser/fr.py
|
snippet_parser/fr.py
|
#-*- encoding: utf-8 -*-
import base
def handle_date(template):
year = None
if len(template.params) >= 3:
try:
year = int(unicode(template.params[2]))
except ValueError:
pass
if isinstance(year, int):
# assume {{date|d|m|y|...}}
return ' '.join(map(unicode, template.params[:3]))
else:
# assume {{date|d m y|...}}
return unicode(template.params[0])
def handle_s(template):
ret = template.params[0]
if len(template.params) == 2:
ret += template.params[1]
if template.name.matches('-s'):
ret += ' av. J.-C'
return ret
class SnippetParser(base.SnippetParserBase):
def strip_template(self, template, normalize, collapse):
if template.name.matches('unité'):
return ' '.join(map(unicode, template.params[:2]))
elif template.name.matches('date'):
return handle_date(template)
elif template.name.matches('s') or template.name.matches('-s'):
return handle_s(template)
elif self.is_citation_needed(template):
repl = [base.CITATION_NEEDED_MARKER]
if template.params:
repl = [template.params[0].value.strip_code()] + repl
return ''.join(repl)
return ''
|
#-*- encoding: utf-8 -*-
import base
def handle_date(template):
year = None
if len(template.params) >= 3:
try:
year = int(unicode(template.params[2]))
except ValueError:
pass
if isinstance(year, int):
# assume {{date|d|m|y|...}}
return ' '.join(map(unicode, template.params[:3]))
else:
# assume {{date|d m y|...}}
return unicode(template.params[0])
def handle_s(template):
ret = unicode(template.params[0])
if len(template.params) == 2:
ret += unicode(template.params[1])
if template.name.matches('-s'):
ret += ' av. J.-C'
return ret
class SnippetParser(base.SnippetParserBase):
def strip_template(self, template, normalize, collapse):
if template.name.matches('unité'):
return ' '.join(map(unicode, template.params[:2]))
elif template.name.matches('date'):
return handle_date(template)
elif template.name.matches('s') or template.name.matches('-s'):
return handle_s(template)
elif self.is_citation_needed(template):
repl = [base.CITATION_NEEDED_MARKER]
if template.params:
repl = [template.params[0].value.strip_code()] + repl
return ''.join(repl)
return ''
|
Fix params handling in {{s}}.
|
Fix params handling in {{s}}.
Former-commit-id: 15eae70c91cd08f9028944f8b6a3990d3170aa28
|
Python
|
mit
|
guilherme-pg/citationhunt,guilherme-pg/citationhunt,eggpi/citationhunt,guilherme-pg/citationhunt,eggpi/citationhunt,guilherme-pg/citationhunt,eggpi/citationhunt,eggpi/citationhunt
|
53bb27bd88cb59424e231e7cadbbabcc91cc44e2
|
pywikibot/families/commons_family.py
|
pywikibot/families/commons_family.py
|
# -*- coding: utf-8 -*-
"""Family module for Wikimedia Commons."""
#
# (C) Pywikibot team, 2005-2017
#
# Distributed under the terms of the MIT license.
#
from __future__ import absolute_import, unicode_literals
__version__ = '$Id$'
from pywikibot import family
# The Wikimedia Commons family
class Family(family.WikimediaFamily):
"""Family class for Wikimedia Commons."""
name = 'commons'
def __init__(self):
"""Constructor."""
super(Family, self).__init__()
self.langs = {
'commons': 'commons.wikimedia.org',
'beta': 'commons.wikimedia.beta.wmflabs.org'
}
self.interwiki_forward = 'wikipedia'
# Templates that indicate a category redirect
# Redirects to these templates are automatically included
self.category_redirect_templates = {
'_default': (
u'Category redirect',
u'Synonym taxon category redirect',
u'Invalid taxon category redirect',
u'Monotypic taxon category redirect',
),
}
# Subpages for documentation.
self.doc_subpages = {
'_default': ((u'/doc', ), ['commons']),
}
|
# -*- coding: utf-8 -*-
"""Family module for Wikimedia Commons."""
#
# (C) Pywikibot team, 2005-2018
#
# Distributed under the terms of the MIT license.
#
from __future__ import absolute_import, unicode_literals
__version__ = '$Id$'
from pywikibot import family
# The Wikimedia Commons family
class Family(family.WikimediaFamily):
"""Family class for Wikimedia Commons."""
name = 'commons'
def __init__(self):
"""Constructor."""
super(Family, self).__init__()
self.langs = {
'commons': 'commons.wikimedia.org',
'beta': 'commons.wikimedia.beta.wmflabs.org'
}
self.interwiki_forward = 'wikipedia'
# Templates that indicate a category redirect
# Redirects to these templates are automatically included
self.category_redirect_templates = {
'_default': (
u'Category redirect',
u'Synonym taxon category redirect',
u'Invalid taxon category redirect',
u'Monotypic taxon category redirect',
'Endashcatredirect',
),
}
# Subpages for documentation.
self.doc_subpages = {
'_default': ((u'/doc', ), ['commons']),
}
|
Add Endashcatredirect as a new item in list of category redirect templates
|
Add Endashcatredirect as a new item in list of category redirect templates
Bug: T183987
Change-Id: I72b6ded1ccab48d5d905f4edd4ce6b9485703563
|
Python
|
mit
|
magul/pywikibot-core,PersianWikipedia/pywikibot-core,wikimedia/pywikibot-core,wikimedia/pywikibot-core,magul/pywikibot-core
|
17fbd2f3fa24da128cb5cabef4a8c94b59b50b0c
|
sqrl/client/crypt.py
|
sqrl/client/crypt.py
|
#!/usr/bin/env python
import ed25519
import hmac
from sqrl.utils import baseconv
class Crypt:
"""
Crypt
- Creating site specific key pair
- Signing SRQL response
- Providing public key
"""
def __init__(self, masterkey):
self.masterkey = masterkey
def _site_key_pair(self, domain):
seed = self._site_seed(domain)
sk = ed25519.SigningKey(seed)
vk = sk.get_verifying_key()
return sk, vk
def _site_seed(self, domain):
"""
Generates a seed to based on the masterkey
and the current site you authenicating with
The seed is used to generate the key pair
used for signing the request body
"""
key = self.masterkey
local_hmac = hmac.new(key)
local_hmac.update(domain)
return local_hmac.hexdigest()
def sign(self, value):
signed = self.sk.sign(value)
return baseconv.encode(signed)
def getPublicKey(self, domain):
self.sk, self.vk = self._site_key_pair(domain)
key = self.vk.to_bytes()
return baseconv.encode(key)
|
#!/usr/bin/env python
import ed25519
import hmac
import baseconv
class Crypt:
"""
Crypt
- Creating site specific key pair
- Signing SRQL response
- Providing public key
"""
def __init__(self, masterkey):
self.masterkey = masterkey
def _site_key_pair(self, domain):
seed = self._site_seed(domain)
sk = ed25519.SigningKey(seed)
vk = sk.get_verifying_key()
return sk, vk
def _site_seed(self, domain):
"""
Generates a seed to based on the masterkey
and the current site you authenicating with
The seed is used to generate the key pair
used for signing the request body
"""
key = self.masterkey
local_hmac = hmac.new(key)
local_hmac.update(domain)
return local_hmac.hexdigest()
def sign(self, value):
signed = self.sk.sign(value)
return baseconv.encode(signed)
def getPublicKey(self, domain):
self.sk, self.vk = self._site_key_pair(domain)
key = self.vk.to_bytes()
return baseconv.encode(key)
|
Fix up imports after module has moved
|
Fix up imports after module has moved
|
Python
|
mit
|
vegarwe/sqrl,vegarwe/sqrl,vegarwe/sqrl,vegarwe/sqrl
|
a5ce8febd35795a06288291ae67df1a92b4ba664
|
test_knot.py
|
test_knot.py
|
# -*- coding: utf-8 -*-
import unittest
from flask import Flask
from flask.ext.knot import Knot, get_container
def create_app():
app = Flask(__name__)
app.config['TESTING'] = True
return app
class TestKnot(unittest.TestCase):
def test_acts_like_container(self):
app = create_app()
dic = Knot(app)
def foo(c):
return 'bar'
dic.add_factory(foo)
self.assertEqual(dic.provide('foo'), 'bar')
def test_does_use_app_config_on_initialization(self):
app = create_app()
app.config['foo'] = 'bar'
dic = Knot(app)
self.assertEqual(dic['foo'], 'bar')
def test_does_not_use_app_config_after_initialization(self):
app = create_app()
app.config['foo'] = 'bar'
dic = Knot()
dic.init_app(app)
self.assertRaises(KeyError, lambda: dic['foo'])
def test_container_is_shared(self):
app1 = create_app()
app2 = create_app()
dic = Knot()
dic.init_app(app1)
dic.init_app(app2)
dic1 = get_container(app1)
dic2 = get_container(app2)
assert dic1 is dic2
if __name__ == '__main__':
unittest.main()
|
# -*- coding: utf-8 -*-
import unittest
from flask import Flask
from flask.ext.knot import Knot, get_container
def create_app():
app = Flask(__name__)
app.config['TESTING'] = True
return app
class TestKnot(unittest.TestCase):
def test_acts_like_container(self):
app = create_app()
dic = Knot(app)
def foo(c):
return 'bar'
dic.add_factory(foo)
self.assertEqual(dic.provide('foo'), 'bar')
def test_does_use_app_config_on_initialization(self):
app = create_app()
app.config['foo'] = 'bar'
dic = Knot(app)
self.assertEqual(dic['foo'], 'bar')
def test_does_not_use_app_config_after_initialization(self):
app = create_app()
app.config['foo'] = 'bar'
dic = Knot()
dic.init_app(app)
self.assertRaises(KeyError, lambda: dic['foo'])
def test_container_is_shared(self):
app1 = create_app()
app2 = create_app()
dic = Knot()
dic.init_app(app1)
dic.init_app(app2)
dic1 = get_container(app1)
dic2 = get_container(app2)
assert dic1 is dic2
def test_registration_is_required(self):
app = create_app()
self.assertRaises(RuntimeError, lambda: get_container(app))
if __name__ == '__main__':
unittest.main()
|
Add test for required registration.
|
Add test for required registration.
|
Python
|
mit
|
jaapverloop/flask-knot
|
116e9107f8ef9d4e074f17a4445f7b3ecceb7ab1
|
sympy/interactive/tests/test_ipython.py
|
sympy/interactive/tests/test_ipython.py
|
"""Tests of tools for setting up interactive IPython sessions. """
from sympy.interactive.session import init_ipython_session, enable_automatic_symbols
from sympy.core import Symbol
from sympy.external import import_module
from sympy.utilities.pytest import raises
# TODO: The code below could be made more granular with something like:
#
# @requires('IPython', version=">=0.11")
# def test_automatic_symbols(ipython):
ipython = import_module("IPython", min_module_version="0.11")
if not ipython:
#bin/test will not execute any tests now
disabled = True
# TODO: Add tests that would verify that enable_automatic_symbols() doesn't
# break anything. For example typing `factorial` or `all` in an interpreter
# shouldn't result in a new symbol.
def test_automatic_symbols():
app = init_ipython_session()
app.run_cell("from sympy import *")
enable_automatic_symbols(app)
symbol = "verylongsymbolname"
assert symbol not in app.user_ns
app.run_cell(symbol, False)
assert symbol in app.user_ns
assert isinstance(app.user_ns[symbol], Symbol)
|
"""Tests of tools for setting up interactive IPython sessions. """
from sympy.interactive.session import init_ipython_session, enable_automatic_symbols
from sympy.core import Symbol
from sympy.external import import_module
from sympy.utilities.pytest import raises
# TODO: The code below could be made more granular with something like:
#
# @requires('IPython', version=">=0.11")
# def test_automatic_symbols(ipython):
ipython = import_module("IPython", min_module_version="0.11")
if not ipython:
#bin/test will not execute any tests now
disabled = True
# TODO: Add tests that would verify that enable_automatic_symbols() doesn't
# break anything. For example typing `factorial` or `all` in an interpreter
# shouldn't result in a new symbol.
def test_automatic_symbols():
app = init_ipython_session()
app.run_cell("from sympy import *")
enable_automatic_symbols(app)
symbol = "verylongsymbolname"
assert symbol not in app.user_ns
app.run_cell(symbol, False)
assert symbol in app.user_ns
assert isinstance(app.user_ns[symbol], Symbol)
# Check that built-in names aren't overridden
app.run_cell("a = all == __builtin__.all", False)
assert "all" not in app.user_ns
assert app.user_ns['a'] == True
# Check that sympy names aren't overridden
app.run_cell("import sympy")
app.run_cell("a = factorial == sympy.factorial")
assert app.user_ns['a'] == True
|
Add some more tests for isympy -a
|
Add some more tests for isympy -a
|
Python
|
bsd-3-clause
|
atreyv/sympy,wyom/sympy,Curious72/sympy,jerli/sympy,yashsharan/sympy,dqnykamp/sympy,debugger22/sympy,iamutkarshtiwari/sympy,cswiercz/sympy,AkademieOlympia/sympy,drufat/sympy,kaichogami/sympy,postvakje/sympy,ahhda/sympy,srjoglekar246/sympy,abhiii5459/sympy,cswiercz/sympy,meghana1995/sympy,Titan-C/sympy,ga7g08/sympy,pandeyadarsh/sympy,debugger22/sympy,Curious72/sympy,madan96/sympy,MridulS/sympy,AunShiLord/sympy,mcdaniel67/sympy,jaimahajan1997/sympy,kaushik94/sympy,asm666/sympy,lindsayad/sympy,AkademieOlympia/sympy,kaushik94/sympy,sampadsaha5/sympy,lidavidm/sympy,lindsayad/sympy,grevutiu-gabriel/sympy,aktech/sympy,bukzor/sympy,Arafatk/sympy,amitjamadagni/sympy,drufat/sympy,emon10005/sympy,oliverlee/sympy,shikil/sympy,shikil/sympy,grevutiu-gabriel/sympy,Mitchkoens/sympy,jerli/sympy,aktech/sympy,yukoba/sympy,debugger22/sympy,hargup/sympy,mafiya69/sympy,cccfran/sympy,bukzor/sympy,asm666/sympy,atreyv/sympy,garvitr/sympy,MechCoder/sympy,rahuldan/sympy,moble/sympy,oliverlee/sympy,meghana1995/sympy,shikil/sympy,oliverlee/sympy,Sumith1896/sympy,abhiii5459/sympy,vipulroxx/sympy,MechCoder/sympy,sampadsaha5/sympy,sahmed95/sympy,AunShiLord/sympy,Sumith1896/sympy,saurabhjn76/sympy,shipci/sympy,toolforger/sympy,MridulS/sympy,atreyv/sympy,Gadal/sympy,beni55/sympy,shipci/sympy,mcdaniel67/sympy,VaibhavAgarwalVA/sympy,abloomston/sympy,jamesblunt/sympy,liangjiaxing/sympy,kaichogami/sympy,yashsharan/sympy,vipulroxx/sympy,saurabhjn76/sympy,Vishluck/sympy,flacjacket/sympy,sunny94/temp,wyom/sympy,wanglongqi/sympy,souravsingh/sympy,MridulS/sympy,dqnykamp/sympy,Shaswat27/sympy,Designist/sympy,kmacinnis/sympy,Vishluck/sympy,ahhda/sympy,souravsingh/sympy,ga7g08/sympy,farhaanbukhsh/sympy,kaichogami/sympy,kevalds51/sympy,Arafatk/sympy,yashsharan/sympy,drufat/sympy,atsao72/sympy,kumarkrishna/sympy,rahuldan/sympy,farhaanbukhsh/sympy,kaushik94/sympy,sahilshekhawat/sympy,MechCoder/sympy,jbbskinny/sympy,diofant/diofant,skidzo/sympy,jamesblunt/sympy,jamesblunt/sympy,pbrady/sympy,bukzor/sympy,beni55/sympy,farhaanbukhsh/sympy,atsao72/sympy,lidavidm/sympy,pandeyadarsh/sympy,dqnykamp/sympy,wyom/sympy,mafiya69/sympy,hrashk/sympy,garvitr/sympy,iamutkarshtiwari/sympy,kmacinnis/sympy,yukoba/sympy,souravsingh/sympy,wanglongqi/sympy,skidzo/sympy,skirpichev/omg,Shaswat27/sympy,Titan-C/sympy,saurabhjn76/sympy,atsao72/sympy,pandeyadarsh/sympy,Davidjohnwilson/sympy,asm666/sympy,Mitchkoens/sympy,pbrady/sympy,liangjiaxing/sympy,vipulroxx/sympy,hargup/sympy,garvitr/sympy,yukoba/sympy,jaimahajan1997/sympy,jerli/sympy,Curious72/sympy,Vishluck/sympy,sahmed95/sympy,kmacinnis/sympy,ChristinaZografou/sympy,emon10005/sympy,aktech/sympy,sahilshekhawat/sympy,jbbskinny/sympy,kumarkrishna/sympy,AkademieOlympia/sympy,ga7g08/sympy,beni55/sympy,sahmed95/sympy,pbrady/sympy,meghana1995/sympy,sunny94/temp,cccfran/sympy,sahilshekhawat/sympy,lidavidm/sympy,ChristinaZografou/sympy,ChristinaZografou/sympy,Designist/sympy,chaffra/sympy,mcdaniel67/sympy,kevalds51/sympy,VaibhavAgarwalVA/sympy,maniteja123/sympy,sampadsaha5/sympy,iamutkarshtiwari/sympy,lindsayad/sympy,Gadal/sympy,mafiya69/sympy,cswiercz/sympy,moble/sympy,amitjamadagni/sympy,kumarkrishna/sympy,Titan-C/sympy,sunny94/temp,hrashk/sympy,jbbskinny/sympy,rahuldan/sympy,madan96/sympy,Sumith1896/sympy,abloomston/sympy,Mitchkoens/sympy,abhiii5459/sympy,toolforger/sympy,skidzo/sympy,cccfran/sympy,ahhda/sympy,AunShiLord/sympy,maniteja123/sympy,liangjiaxing/sympy,Davidjohnwilson/sympy,wanglongqi/sympy,madan96/sympy,chaffra/sympy,chaffra/sympy,moble/sympy,Shaswat27/sympy,hargup/sympy,VaibhavAgarwalVA/sympy,Designist/sympy,kevalds51/sympy,Davidjohnwilson/sympy,Arafatk/sympy,hrashk/sympy,grevutiu-gabriel/sympy,maniteja123/sympy,emon10005/sympy,postvakje/sympy,abloomston/sympy,toolforger/sympy,jaimahajan1997/sympy,Gadal/sympy,shipci/sympy,postvakje/sympy
|
3e1f5adf1402d6e9ddd4ef6a08f4a667be950e1d
|
src/ansible/admin.py
|
src/ansible/admin.py
|
from django.contrib import admin
from .models import Playbook, Registry, Repository
admin.site.register(Playbook)
admin.site.register(Registry)
admin.site.register(Repository)
admin.site.site_header = 'Ansible Admin'
|
from django.contrib import admin
from .models import Playbook, Registry, Repository
admin.site.register(Playbook)
admin.site.register(Registry)
admin.site.register(Repository)
admin.site.site_header = 'Ansible Admin'
admin.site.site_title = 'Ansible Admin'
admin.site.index_title = 'Admin Tool'
|
Add ansible app site title
|
Add ansible app site title
|
Python
|
bsd-3-clause
|
lozadaOmr/ansible-admin,lozadaOmr/ansible-admin,lozadaOmr/ansible-admin
|
98f7c1080765e00954d0c38a98ab1bb3e207c059
|
podcoder.py
|
podcoder.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) Ubuntu Podcast
# http://www.ubuntupodcast.org
# See the file "LICENSE" for the full license governing this code.
from podpublish import configuration
from podpublish import encoder
from podpublish import uploader
def main():
config = configuration.Configuration('podcoder.ini')
encoder.audio_encode(config, 'mp3')
encoder.mp3_tag(config)
encoder.mp3_coverart(config)
encoder.audio_encode(config, 'ogg')
encoder.ogg_tag(config)
encoder.ogg_coverart(config)
encoder.png_header(config)
encoder.png_poster(config)
encoder.mkv_encode(config)
uploader.youtube_upload(config)
if __name__ == '__main__':
main()
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) Ubuntu Podcast
# http://www.ubuntupodcast.org
# See the file "LICENSE" for the full license governing this code.
from podpublish import configuration
from podpublish import encoder
def main():
config = configuration.Configuration('podcoder.ini')
if not config.mp3['skip']:
encoder.audio_encode(config, 'mp3')
encoder.mp3_tag(config)
encoder.mp3_coverart(config)
if not config.ogg['skip']:
encoder.audio_encode(config, 'ogg')
encoder.ogg_tag(config)
encoder.ogg_coverart(config)
if not config.youtube['skip']:
encoder.png_header(config)
encoder.png_poster(config)
encoder.mkv_encode(config)
if __name__ == '__main__':
main()
|
Determine what to encode based in skip options.
|
Determine what to encode based in skip options.
|
Python
|
lgpl-2.1
|
rikai/podpublish
|
1ea51baec10ebc76bfb2be88270df2050a29fbb5
|
http-error-static-pages/5xx-static-html-generator.py
|
http-error-static-pages/5xx-static-html-generator.py
|
import os, errno
# Create build folder if it doesn't exist
try:
os.makedirs('build')
except OSError as e:
if e.errno != errno.EEXIST:
raise
template = open('./5xx.template.html', 'r')
templateString = template.read()
template.close()
# We only use 0-11 according to
# https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#5xx_Server_error
for i in range(12):
numString = str(i) if i >= 10 else '0{}'.format(i)
fileName = './build/5{}.html'.format(numString)
outputFile = open(fileName, 'w')
htmlString = templateString.replace('{ERROR_CODE_PLACEHOLDER}', '5{}'.format(numString))
outputFile.write(htmlString)
outputFile.close()
|
import os, errno
# Create build folder if it doesn't exist
def get_path(relative_path):
cur_dir = os.path.dirname(__file__)
return os.path.join(cur_dir, relative_path)
try:
os.makedirs(get_path('build'))
except OSError as e:
if e.errno != errno.EEXIST:
raise
template = open(get_path('./5xx.template.html'), 'r')
templateString = template.read()
template.close()
# We only use 0-11 according to
# https://en.wikipedia.org/wiki/List_of_HTTP_status_codes#5xx_Server_error
for i in range(12):
numString = str(i) if i >= 10 else '0{}'.format(i)
fileName = './build/5{}.html'.format(numString)
outputFile = open(get_path(fileName), 'w')
htmlString = templateString.replace('{ERROR_CODE_PLACEHOLDER}', '5{}'.format(numString))
outputFile.write(htmlString)
outputFile.close()
|
Make static http error code generator directory agnostic
|
Make static http error code generator directory agnostic
|
Python
|
mit
|
thegazelle-ad/gazelle-server,thegazelle-ad/gazelle-server,thegazelle-ad/gazelle-front-end,thegazelle-ad/gazelle-server,thegazelle-ad/gazelle-server,thegazelle-ad/gazelle-server,thegazelle-ad/gazelle-front-end
|
c7ef5d2c049beba4bd1b12ec2e62a61446746a8a
|
unsubscribe/views.py
|
unsubscribe/views.py
|
from django import http
from mailgun import utils
import models as unsubscribe_model
def unsubscribe_webhook(request):
verified = utils.verify_webhook(
request.POST.get('token'),
request.POST.get('timestamp'),
request.POST.get('signature')
)
if not verified:
return http.HttpResponseForbidden()
address = request.POST.get('recipient')
if request.POST.get('mailing-list'):
unsubscribe_model.unsubscribe_from_sequence(address)
else:
unsubscribe_model.unsubscribe_completely(address)
|
from django import http
from django.views.decorators.csrf import csrf_exempt
from mailgun import utils
import models as unsubscribe_model
@csrf_exempt
def unsubscribe_webhook(request):
verified = utils.verify_webhook(
request.POST.get('token'),
request.POST.get('timestamp'),
request.POST.get('signature')
)
if not verified:
return http.HttpResponseForbidden()
address = request.POST.get('recipient')
try:
if request.POST.get('mailing-list'):
unsubscribe_model.unsubscribe_from_sequence(address)
else:
unsubscribe_model.unsubscribe_user(address)
except:
raise
return http.HttpResponse('')
|
Return http 200 for webhooks
|
Return http 200 for webhooks
|
Python
|
mit
|
p2pu/mechanical-mooc,p2pu/mechanical-mooc,p2pu/mechanical-mooc,p2pu/mechanical-mooc
|
c3479ba8d8486ae9a274367b4601e9e4b6699a1a
|
prj/urls.py
|
prj/urls.py
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns('',
url(r'^djadmin/', include(admin.site.urls)),
)
|
from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns('',
url(r'^djadmin/', include(admin.site.urls)),
# Root
url( r'^$', 'wishlist.views.index' ),
)
|
Add root URL (to serve public wishlist)
|
Add root URL (to serve public wishlist)
|
Python
|
mit
|
cgarvey/django-mywishlist,cgarvey/django-mywishlist
|
465fd8892c177925d8da3080d08676daad866195
|
core/urls.py
|
core/urls.py
|
from django.conf.urls import url
from core import views
urlpatterns = [
url(r'^sensors/$', views.SensorList.as_view()),
url(r'^sensors/(?P<pk>[0-9]+)/$', views.SensorDetail.as_view()),
url(r'^stations/$', views.StationList.as_view()),
url(r'^stations/(?P<pk>[0-9]+)/$', views.StationDetail.as_view()),
url(r'^readings/$', views.ReadingList.as_view()),
url(r'^readings/(?P<pk>[0-9]+)/$', views.ReadingDetail.as_view()),
]
|
from django.conf.urls import url
from core import views
urlpatterns = [
url(r'^$', views.api_root),
url(r'^sensors/$', views.SensorList.as_view(), name='sensor-list'),
url(r'^sensors/(?P<pk>[0-9]+)/$', views.SensorDetail.as_view(), name='sensor-detail'),
url(r'^sensors/(?P<pk>[0-9]+)/data/$', views.SensorData.as_view(), name='sensor-data'),
url(r'^stations/$', views.StationList.as_view(), name='station-list'),
url(r'^stations/(?P<pk>[0-9]+)/$', views.StationDetail.as_view(), name='station-detail'),
url(r'^readings/$', views.ReadingList.as_view(), name='reading-list'),
url(r'^readings/(?P<pk>[0-9]+)/$', views.ReadingDetail.as_view(), name='reading-detail'),
url(r'^messages/$', views.MessageList.as_view(), name='message-list'),
url(r'^messages/(?P<pk>[0-9]+)/$', views.MessageDetail.as_view(), name='message-detail'),
url(r'^users/$', views.UserList.as_view(), name='user-list'),
url(r'^users/(?P<pk>[0-9]+)/$', views.UserDetail.as_view(), name='user-detail'),
]
|
Add URLs for previous views.
|
Add URLs for previous views.
|
Python
|
apache-2.0
|
qubs/climate-data-api,qubs/data-centre,qubs/climate-data-api,qubs/data-centre
|
8255fd2fdee3a7d6b96859eb7b8d1297431c730b
|
utils/00-cinspect.py
|
utils/00-cinspect.py
|
import inspect
from cinspect import getsource, getfile
import IPython.core.oinspect as OI
from IPython.utils.py3compat import cast_unicode
old_find_file = OI.find_file
old_getsource = inspect.getsource
inspect.getsource = getsource
def patch_find_file(obj):
fname = old_find_file(obj)
if fname is None:
try:
fname = cast_unicode(getfile(obj))
except:
pass
return fname
OI.find_file = patch_find_file
|
""" A startup script for IPython to patch it to 'inspect' using cinspect. """
# Place this file in ~/.ipython/<PROFILE_DIR>/startup to patch your IPython to
# use cinspect for the code inspection.
import inspect
from cinspect import getsource, getfile
import IPython.core.oinspect as OI
from IPython.utils.py3compat import cast_unicode
old_find_file = OI.find_file
old_getsource = inspect.getsource
inspect.getsource = getsource
def patch_find_file(obj):
fname = old_find_file(obj)
if fname is None:
try:
fname = cast_unicode(getfile(obj))
except:
pass
return fname
OI.find_file = patch_find_file
|
Add a note for the utility script.
|
Add a note for the utility script.
|
Python
|
bsd-3-clause
|
punchagan/cinspect,punchagan/cinspect
|
41c7d60556dff4be1c5f39cf694470d3af4869f0
|
qual/iso.py
|
qual/iso.py
|
from datetime import date, timedelta
def iso_to_gregorian(year, week, weekday):
jan_8 = date(year, 1, 8).isocalendar()
offset = (week - jan_8[1]) * 7 + (weekday - jan_8[2])
return date(year, 1, 8) + timedelta(days=offset)
|
from datetime import date, timedelta
def iso_to_gregorian(year, week, weekday):
if week < 1 or week > 54:
raise ValueError("Week number %d is invalid for an ISO calendar." % (week, ))
jan_8 = date(year, 1, 8).isocalendar()
offset = (week - jan_8[1]) * 7 + (weekday - jan_8[2])
d = date(year, 1, 8) + timedelta(days=offset)
if d.isocalendar()[0] != year:
raise ValueError("Week number %d is invalid for ISO year %d." % (week, year))
return d
|
Add checks for a reasonable week number.
|
Add checks for a reasonable week number.
|
Python
|
apache-2.0
|
jwg4/calexicon,jwg4/qual
|
09dd2e16ef29b6c79ee344a55bea5bd0e59c7a59
|
fireplace/cards/gvg/shaman.py
|
fireplace/cards/gvg/shaman.py
|
from ..utils import *
##
# Spells
# Ancestor's Call
class GVG_029:
action = [
ForcePlay(CONTROLLER, RANDOM(CONTROLLER_HAND + MINION)),
ForcePlay(OPPONENT, RANDOM(OPPONENT_HAND + MINION)),
]
|
from ..utils import *
##
# Minions
# Vitality Totem
class GVG_039:
OWN_TURN_END = [Heal(FRIENDLY_HERO, 4)]
# Siltfin Spiritwalker
class GVG_040:
def OWN_MINION_DESTROY(self, minion):
if minion.race == Race.MURLOC:
return [Draw(CONTROLLER, 1)]
##
# Spells
# Ancestor's Call
class GVG_029:
action = [
ForcePlay(CONTROLLER, RANDOM(CONTROLLER_HAND + MINION)),
ForcePlay(OPPONENT, RANDOM(OPPONENT_HAND + MINION)),
]
# Crackle
class GVG_038:
def action(self, target):
return [Hit(TARGET, random.randint(3, 6))]
##
# Weapons
# Powermace
class GVG_036:
action = [Buff(RANDOM(FRIENDLY_MINIONS + MECH), "GVG_036e")]
|
Implement Powermace, Crackle, Vitality Totem and Siltfin Spiritwalker
|
Implement Powermace, Crackle, Vitality Totem and Siltfin Spiritwalker
|
Python
|
agpl-3.0
|
oftc-ftw/fireplace,Meerkov/fireplace,Ragowit/fireplace,butozerca/fireplace,smallnamespace/fireplace,smallnamespace/fireplace,amw2104/fireplace,oftc-ftw/fireplace,NightKev/fireplace,butozerca/fireplace,amw2104/fireplace,beheh/fireplace,liujimj/fireplace,jleclanche/fireplace,Ragowit/fireplace,Meerkov/fireplace,liujimj/fireplace
|
0d37a94593a7749dca4b2553334f1b67c946d3f8
|
ambassador/tests/t_lua_scripts.py
|
ambassador/tests/t_lua_scripts.py
|
from kat.harness import Query
from abstract_tests import AmbassadorTest, ServiceType, HTTP
class LuaTest(AmbassadorTest):
target: ServiceType
def init(self):
self.target = HTTP()
def manifests(self) -> str:
return super().manifests() + self.format('''
---
apiVersion: getambassador.io/v1
kind: Module
metadata:
name: ambassador
spec:
ambassador_id: {self.ambassador_id}
config:
lua_scripts: |
function envoy_on_response(response_handle)
response_handle: headers():add("Lua-Scripts-Enabled", "Processed")
end
---
apiVersion: getambassador.io/v1
kind: Mapping
metadata:
name: lua-target-mapping
spec:
ambassador_id: {self.ambassador_id}
prefix: /target/
service: {self.target.path.fqdn}
''')
def queries(self):
yield Query(self.url("target/"))
def check(self):
for r in self.results:
assert r.headers.get('Lua-Scripts-Enabled', None) == ['Processed']
|
from kat.harness import Query
from abstract_tests import AmbassadorTest, ServiceType, HTTP
class LuaTest(AmbassadorTest):
target: ServiceType
def init(self):
self.target = HTTP()
self.env = ["LUA_SCRIPTS_ENABLED=Processed"]
def manifests(self) -> str:
return super().manifests() + self.format('''
---
apiVersion: getambassador.io/v1
kind: Module
metadata:
name: ambassador
spec:
ambassador_id: {self.ambassador_id}
config:
lua_scripts: |
function envoy_on_response(response_handle)
response_handle: headers():add("Lua-Scripts-Enabled", "${LUA_SCRIPTS_ENABLED}")
end
---
apiVersion: getambassador.io/v1
kind: Mapping
metadata:
name: lua-target-mapping
spec:
ambassador_id: {self.ambassador_id}
prefix: /target/
service: {self.target.path.fqdn}
''')
def queries(self):
yield Query(self.url("target/"))
def check(self):
for r in self.results:
assert r.headers.get('Lua-Scripts-Enabled', None) == ['Processed']
|
Update LUA test to perform interpolation
|
Update LUA test to perform interpolation
|
Python
|
apache-2.0
|
datawire/ambassador,datawire/ambassador,datawire/ambassador,datawire/ambassador,datawire/ambassador
|
5c7c1155a6bfe0e1dda8def877bcef9d8c528ee3
|
vaux/api/__init__.py
|
vaux/api/__init__.py
|
import os
from flask import Flask, abort, request
from flask.ext import restful
from vaux.storage import LibreDB
from datetime import datetime
from werkzeug import secure_filename
from cors import crossdomain
app = Flask(__name__)
database = LibreDB('../data', 'localhost', 28015, 'docliber')
from peer import PeerResource, PeerInstance
from document import DocumentResource, DocumentInstance
api = restful.Api(app)
api.decorators=[crossdomain(origin='*')]
api.add_resource(PeerResource, '/peers/')
api.add_resource(PeerInstance, '/peers/<string:id>/')
api.add_resource(DocumentResource, '/documents/')
api.add_resource(DocumentInstance, '/documents/<string:id>/')
|
import os
import ConfigParser
from flask import Flask, abort, request
from flask.ext import restful
from vaux.storage import LibreDB
from datetime import datetime
from werkzeug import secure_filename
from cors import crossdomain
app = Flask(__name__)
config = ConfigParser.SafeConfigParser()
config.read('/etc/vaux.ini')
database = LibreDB(
config.get('data', 'path'),
config.get('database', 'host'),
config.getint('database', 'port'),
config.get('database', 'database'))
from peer import PeerResource, PeerInstance
from document import DocumentResource, DocumentInstance
api = restful.Api(app)
api.decorators=[crossdomain(origin='*')]
api.add_resource(PeerResource, '/peers/')
api.add_resource(PeerInstance, '/peers/<string:id>/')
api.add_resource(DocumentResource, '/documents/')
api.add_resource(DocumentInstance, '/documents/<string:id>/')
|
Read in the database and data options from a config file
|
Read in the database and data options from a config file
I hope this works.
|
Python
|
mit
|
VauxIo/core
|
0948eced6cd551df7f136614b136378e9864b4eb
|
forms.py
|
forms.py
|
from flask import flash
from flask_wtf import FlaskForm
from wtforms import StringField, PasswordField
from wtforms.validators import DataRequired, Email, Length
def flash_errors(form):
""" Universal interface to handle form error.
Handles form error with the help of flash message
"""
for field, errors in form.errors.items():
for error in errors:
flash(u'Error in the %s field - %s' % (
getattr(form, field).label.text,
error
))
class Login(FlaskForm):
username = StringField('Username',
validators=[DataRequired(), Length(max=16)])
password = PasswordField('Password',
validators=[DataRequired()])
|
from flask import flash
from flask_wtf import FlaskForm
from wtforms import StringField, PasswordField
from wtforms.validators import DataRequired, Email, Length, EqualTo
def flash_errors(form):
""" Universal interface to handle form error.
Handles form error with the help of flash message
"""
for field, errors in form.errors.items():
for error in errors:
flash(u'Error in the %s field - %s' % (
getattr(form, field).label.text,
error
))
class Login(FlaskForm):
username = StringField('Username',
validators=[DataRequired(), Length(max=16)])
password = PasswordField('Password',
validators=[DataRequired()])
class AddEmployee(FlaskForm):
username = StringField(
'Username',
validators=[
DataRequired(),
Length(max=16)
]
)
fullname = StringField(
'Full name',
validators=[DataRequired()]
)
nip = StringField(
'Nip',
validators=[DataRequired()]
)
password = PasswordField(
'password',
validators=[
DataRequired(),
EqualTo('verifyPassword', message='Password must match.')
]
)
verifyPassword = PasswordField('Verify password')
|
Add input rule for adding employee
|
Add input rule for adding employee
|
Python
|
mit
|
openedoo/module_employee,openedoo/module_employee,openedoo/module_employee
|
848384b0283538556a231a32e4128d52ba9e1407
|
direlog.py
|
direlog.py
|
#!/usr/bin/env python
# encoding: utf-8
import sys
import re
import argparse
import fileinput
from argparse import RawDescriptionHelpFormatter
from patterns import pre_patterns
def prepare(infile, outfile=sys.stdout):
"""
Apply pre_patterns from patterns to infile
:infile: input file
"""
try:
for line in infile:
result = line
for pattern in pre_patterns:
result = re.sub(pattern[0], pattern[1], result, re.VERBOSE)
outfile.write(result)
except (KeyboardInterrupt):
pass
except:
raise
def main():
parser = argparse.ArgumentParser(description=\
"""
Parse file[s]\n\n
examlpe: cat error_log | tail -n 1000 | ./direlog.py
""", formatter_class=RawDescriptionHelpFormatter)
parser.add_argument('file', nargs='*', default=[],
help='file[s] to do some work')
parser.add_argument('-s', '--stat', action='store_const', const=True,
help='get statistics')
args = parser.parse_args()
input_stream = fileinput.input(args.stat)
prepare(input_stream)
pass
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
# encoding: utf-8
import sys
import re
import argparse
import fileinput
from argparse import RawDescriptionHelpFormatter
from patterns import pre_patterns
def prepare(infile, outfile=sys.stdout):
"""
Apply pre_patterns from patterns to infile
:infile: input file
"""
try:
for line in infile:
result = line
for pattern in pre_patterns:
result = re.sub(pattern[0], pattern[1], result, re.VERBOSE)
outfile.write(result)
except (KeyboardInterrupt):
pass
except:
raise
def main():
parser = argparse.ArgumentParser(description=\
"""
Parse file[s]\n\n
examlpe: cat error_log | tail -n 1000 | ./direlog.py
""", formatter_class=RawDescriptionHelpFormatter)
parser.add_argument('file', nargs='*', default=[],
help='file[s] to do some work')
args = parser.parse_args()
input_stream = fileinput.input(args.file)
prepare(input_stream)
pass
if __name__ == '__main__':
main()
|
Fix args.file and remove stat from argparse
|
Fix args.file and remove stat from argparse
|
Python
|
mit
|
abcdw/direlog,abcdw/direlog
|
65d9fcaecaa51d61d330b911342d372c277b1783
|
openfisca_web_site/templates/tree/graphe-formules.py
|
openfisca_web_site/templates/tree/graphe-formules.py
|
# -*- coding: utf-8 -*-
# OpenFisca -- A versatile microsimulation software
# By: OpenFisca Team <[email protected]>
#
# Copyright (C) 2011, 2012, 2013, 2014 OpenFisca Team
# https://github.com/openfisca
#
# This file is part of OpenFisca.
#
# OpenFisca is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# OpenFisca is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from openfisca_web_site import model, urls
class Node(model.Redirect):
title = u"Graphe des formules socio-fiscales"
def get_location(self, ctx):
return urls.get_url(ctx, 'exemples', 'graphe-formules')
|
Add missing redirection in empty file.
|
Add missing redirection in empty file.
|
Python
|
agpl-3.0
|
openfisca/openfisca-web-site,openfisca/openfisca-web-site,openfisca/openfisca-web-site,openfisca/openfisca-web-site
|
|
ef89d3608b9ab54aef105528f2c15fa9cc437bcd
|
runtests.py
|
runtests.py
|
#!/usr/bin/env python
import sys
from os.path import abspath, dirname
from django.conf import settings
sys.path.insert(0, abspath(dirname(__file__)))
if not settings.configured:
settings.configure(
INSTALLED_APPS=(
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.auth',
'django.contrib.admin',
'email_log',
'email_log.tests',
),
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
EMAIL_LOG_BACKEND = 'django.core.mail.backends.locmem.EmailBackend',
ROOT_URLCONF='email_log.tests.urls',
)
def runtests():
from django.test.simple import DjangoTestSuiteRunner
failures = DjangoTestSuiteRunner(failfast=False).run_tests(['tests'])
sys.exit(failures)
if __name__ == "__main__":
runtests()
|
#!/usr/bin/env python
import sys
from os.path import abspath, dirname
from django.conf import settings
import django
sys.path.insert(0, abspath(dirname(__file__)))
if not settings.configured:
settings.configure(
INSTALLED_APPS=(
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.auth',
'django.contrib.admin',
'email_log',
'email_log.tests',
),
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
EMAIL_LOG_BACKEND = 'django.core.mail.backends.locmem.EmailBackend',
ROOT_URLCONF='email_log.tests.urls',
)
def runtests():
if hasattr(django, 'setup'):
django.setup()
try:
from django.test.runner import DiscoverRunner
runner_class = DiscoverRunner
test_args = ['email_log.tests']
except ImportError:
from django.test.simple import DjangoTestSuiteRunner
runner_class = DjangoTestSuiteRunner
test_args = ['tests']
failures = runner_class(failfast=False).run_tests(test_args)
sys.exit(failures)
if __name__ == "__main__":
runtests()
|
Fix tests for Django 1.7
|
Fix tests for Django 1.7
|
Python
|
mit
|
treyhunner/django-email-log,treyhunner/django-email-log
|
da39bc268e3fe94af348690262fc116e3e0b2c9c
|
attachments/admin.py
|
attachments/admin.py
|
from attachments.models import Attachment
from django.contrib.contenttypes import generic
class AttachmentInlines(generic.GenericStackedInline):
model = Attachment
extra = 1
|
from attachments.models import Attachment
from django.contrib.contenttypes import admin
class AttachmentInlines(admin.GenericStackedInline):
model = Attachment
extra = 1
|
Fix deprecated modules for content types
|
Fix deprecated modules for content types
|
Python
|
bsd-3-clause
|
leotrubach/django-attachments,leotrubach/django-attachments
|
969334fec0822a30d1e5f10a458f79556053836a
|
fabfile.py
|
fabfile.py
|
from fabric.api import *
from fabric.colors import *
env.colorize_errors = True
env.hosts = ['sanaprotocolbuilder.me']
env.user = 'root'
env.virtualenv = 'source /usr/local/bin/virtualenvwrapper.sh'
env.project_root = '/opt/sana.protocol_builder'
def test():
local('python sana_builder/manage.py syncdb')
local('python sana_builder/manage.py test')
def deploy():
local('git push origin master')
with cd(env.project_root), prefix(env.virtualenv), prefix('workon sana_protocol_builder'):
print(green('Pulling latest revision...'))
run('git pull origin master')
print(green('Installing dependencies...'))
run('pip install -qr requirements.txt')
print(green('Creating database tables...'))
run('python sana_builder/manage.py syncdb --noinput')
print(green('Importing fixtures...'))
run('python sana_builder/manage.py loaddata sana_builder/fixtures/pages.json')
print(green('Collecting static files...'))
run('python sana_builder/manage.py collectstatic --noinput')
print(green('Restarting gunicorn...'))
run('supervisorctl restart gunicorn')
|
from fabric.api import *
from fabric.colors import *
env.colorize_errors = True
env.hosts = ['sanaprotocolbuilder.me']
env.user = 'root'
env.virtualenv = 'source /usr/local/bin/virtualenvwrapper.sh'
env.project_root = '/opt/sana.protocol_builder'
def test():
local('python sana_builder/manage.py syncdb')
local('python sana_builder/manage.py test')
def update_host():
with cd(env.project_root), prefix(env.virtualenv), prefix('workon sana_protocol_builder'):
print(green('Pulling latest revision...'))
run('git pull origin master')
print(green('Installing dependencies...'))
run('pip install -qr requirements.txt')
print(green('Creating database tables...'))
run('python sana_builder/manage.py syncdb --noinput')
print(green('Importing fixtures...'))
run('python sana_builder/manage.py loaddata sana_builder/fixtures/pages.json')
print(green('Collecting static files...'))
run('python sana_builder/manage.py collectstatic --noinput')
print(green('Restarting gunicorn...'))
run('supervisorctl restart gunicorn')
def travis_deploy():
update_host()
def local_deploy():
local('git push origin master')
update_host()
|
Update deploy script to support Travis.
|
Update deploy script to support Travis.
|
Python
|
bsd-3-clause
|
SanaMobile/sana.protocol_builder,SanaMobile/sana.protocol_builder,SanaMobile/sana.protocol_builder,SanaMobile/sana.protocol_builder,SanaMobile/sana.protocol_builder
|
8e8370a76c67d7905c73bcb808f89e3cd4b994a3
|
runtests.py
|
runtests.py
|
#!/usr/bin/env python
import django
from django.conf import settings
from django.core.management import call_command
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
ALLOWED_HOSTS=[
'testserver',
],
INSTALLED_APPS=[
'django.contrib.auth',
'django.contrib.contenttypes',
'permissions',
'permissions.tests',
],
MIDDLEWARE_CLASSES=[],
ROOT_URLCONF='permissions.tests.urls',
TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': True,
}],
TEST_RUNNER='django.test.runner.DiscoverRunner',
)
if django.VERSION[:2] >= (1, 7):
from django import setup
else:
setup = lambda: None
setup()
call_command("test")
|
#!/usr/bin/env python
import django
from django.conf import settings
from django.core.management import call_command
settings.configure(
DATABASES={
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
},
ALLOWED_HOSTS=[
'testserver',
],
INSTALLED_APPS=[
'django.contrib.auth',
'django.contrib.contenttypes',
'permissions',
'permissions.tests',
],
MIDDLEWARE_CLASSES=[],
PERMISSIONS={
'allow_staff': False,
},
ROOT_URLCONF='permissions.tests.urls',
TEMPLATES=[{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'APP_DIRS': True,
}],
TEST_RUNNER='django.test.runner.DiscoverRunner',
)
if django.VERSION[:2] >= (1, 7):
from django import setup
else:
setup = lambda: None
setup()
call_command("test")
|
Add PERMISSIONS setting to test settings
|
Add PERMISSIONS setting to test settings
|
Python
|
mit
|
wylee/django-perms,PSU-OIT-ARC/django-perms
|
40a2faa67a5852ae347ab62f6239edfa6c666a1e
|
docs/conf.py
|
docs/conf.py
|
source_suffix = '.rst'
master_doc = 'index'
html_theme = 'alabaster'
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.autosummary',
]
|
import sys
sys.path.append('..')
source_suffix = '.rst'
master_doc = 'index'
html_theme = 'alabaster'
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.autosummary',
]
|
Add root path to sys.path
|
Add root path to sys.path
|
Python
|
mit
|
bureaucratic-labs/yargy
|
926731b05f22566e98a02737d673cca3fc0b28ec
|
docs/conf.py
|
docs/conf.py
|
# -*- coding: utf-8 -*-
### General settings
extensions = []
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'Firebase Admin SDK for PHP'
author = u'Jérôme Gamez'
copyright = u'Jérôme Gamez'
version = u'4.x'
html_title = u'Firebase Admin SDK for PHP Documentation'
html_short_title = u'Firebase Admin SDK for PHP'
exclude_patterns = ['_build']
html_static_path = ['_static']
suppress_warnings = ['image.nonlocal_uri']
### Theme settings
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
### Syntax Highlighting
from sphinx.highlighting import lexers
from pygments.lexers.web import PhpLexer
lexers['php'] = PhpLexer(startinline=True, linenos=1)
lexers['php-annotations'] = PhpLexer(startinline=True, linenos=1)
### Integrations
html_context = {
"display_github": True,
"github_user": "kreait",
"github_repo": "firebase-php",
"github_version": "master",
"conf_py_path": "/docs/",
"source_suffix": ".rst",
}
|
# -*- coding: utf-8 -*-
### General settings
extensions = []
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'Firebase Admin SDK for PHP'
author = u'Jérôme Gamez'
copyright = u'Jérôme Gamez'
version = u'4.x'
html_title = u'Firebase Admin SDK for PHP Documentation'
html_short_title = u'Firebase Admin SDK for PHP'
exclude_patterns = ['_build']
html_static_path = ['_static']
suppress_warnings = ['image.nonlocal_uri']
### Theme settings
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
html_theme_options = {
'canonical_url': 'https://firebase-php.readthedocs.io',
'analytics_id': 'UA-82654714-3'
}
### Syntax Highlighting
from sphinx.highlighting import lexers
from pygments.lexers.web import PhpLexer
lexers['php'] = PhpLexer(startinline=True, linenos=1)
lexers['php-annotations'] = PhpLexer(startinline=True, linenos=1)
### Integrations
html_context = {
"display_github": True,
"github_user": "kreait",
"github_repo": "firebase-php",
"github_version": "",
"conf_py_path": "/docs/",
"source_suffix": ".rst",
}
|
Fix "Edit on GitHub" links
|
Fix "Edit on GitHub" links
Using "master" seems to mess it up, see
https://github.com/readthedocs/readthedocs.org/issues/5518
|
Python
|
mit
|
kreait/firebase-php
|
7aaa385da78bef57c8b6339f6db04044ace08807
|
api/taxonomies/serializers.py
|
api/taxonomies/serializers.py
|
from rest_framework import serializers as ser
from api.base.serializers import JSONAPISerializer, LinksField, JSONAPIListField
class TaxonomyField(ser.Field):
def to_representation(self, obj):
if obj is not None:
return {'id': obj._id,
'text': obj.text}
return None
def to_internal_value(self, data):
return data
class TaxonomySerializer(JSONAPISerializer):
filterable_fields = frozenset([
'text',
'parents',
'id'
])
id = ser.CharField(source='_id', required=True)
text = ser.CharField(max_length=200)
parents = JSONAPIListField(child=TaxonomyField())
links = LinksField({
'parents': 'get_parent_urls',
'self': 'get_absolute_url',
})
def get_parent_urls(self, obj):
return [p.get_absolute_url() for p in obj.parents]
def get_absolute_url(self, obj):
return obj.get_absolute_url()
class Meta:
type_ = 'taxonomies'
|
from rest_framework import serializers as ser
from api.base.serializers import JSONAPISerializer, LinksField, JSONAPIListField
class TaxonomyField(ser.Field):
def to_representation(self, obj):
if obj is not None:
return {'id': obj._id,
'text': obj.text}
return None
def to_internal_value(self, data):
return data
class TaxonomySerializer(JSONAPISerializer):
filterable_fields = frozenset([
'text',
'parents',
'id'
])
id = ser.CharField(source='_id', required=True)
text = ser.CharField(max_length=200)
parents = JSONAPIListField(child=TaxonomyField())
child_count = ser.IntegerField()
links = LinksField({
'parents': 'get_parent_urls',
'self': 'get_absolute_url',
})
def get_parent_urls(self, obj):
return [p.get_absolute_url() for p in obj.parents]
def get_absolute_url(self, obj):
return obj.get_absolute_url()
class Meta:
type_ = 'taxonomies'
|
Add child_count taken from new Subject property
|
Add child_count taken from new Subject property
|
Python
|
apache-2.0
|
adlius/osf.io,brianjgeiger/osf.io,chrisseto/osf.io,rdhyee/osf.io,sloria/osf.io,brianjgeiger/osf.io,sloria/osf.io,binoculars/osf.io,mattclark/osf.io,saradbowman/osf.io,aaxelb/osf.io,baylee-d/osf.io,Johnetordoff/osf.io,leb2dg/osf.io,laurenrevere/osf.io,samchrisinger/osf.io,binoculars/osf.io,aaxelb/osf.io,rdhyee/osf.io,alexschiller/osf.io,HalcyonChimera/osf.io,mfraezz/osf.io,cwisecarver/osf.io,CenterForOpenScience/osf.io,acshi/osf.io,cslzchen/osf.io,mattclark/osf.io,crcresearch/osf.io,TomBaxter/osf.io,acshi/osf.io,samchrisinger/osf.io,monikagrabowska/osf.io,laurenrevere/osf.io,adlius/osf.io,acshi/osf.io,mluo613/osf.io,HalcyonChimera/osf.io,felliott/osf.io,baylee-d/osf.io,Johnetordoff/osf.io,felliott/osf.io,mattclark/osf.io,saradbowman/osf.io,erinspace/osf.io,hmoco/osf.io,emetsger/osf.io,alexschiller/osf.io,icereval/osf.io,mfraezz/osf.io,TomBaxter/osf.io,HalcyonChimera/osf.io,hmoco/osf.io,chrisseto/osf.io,erinspace/osf.io,monikagrabowska/osf.io,leb2dg/osf.io,pattisdr/osf.io,CenterForOpenScience/osf.io,monikagrabowska/osf.io,laurenrevere/osf.io,Nesiehr/osf.io,acshi/osf.io,cslzchen/osf.io,caseyrollins/osf.io,monikagrabowska/osf.io,emetsger/osf.io,adlius/osf.io,caneruguz/osf.io,monikagrabowska/osf.io,crcresearch/osf.io,cwisecarver/osf.io,brianjgeiger/osf.io,mluo613/osf.io,Nesiehr/osf.io,sloria/osf.io,cslzchen/osf.io,icereval/osf.io,caseyrollins/osf.io,binoculars/osf.io,aaxelb/osf.io,caseyrollins/osf.io,cslzchen/osf.io,icereval/osf.io,CenterForOpenScience/osf.io,erinspace/osf.io,acshi/osf.io,brianjgeiger/osf.io,adlius/osf.io,caneruguz/osf.io,pattisdr/osf.io,cwisecarver/osf.io,mluo613/osf.io,chrisseto/osf.io,leb2dg/osf.io,mluo613/osf.io,alexschiller/osf.io,samchrisinger/osf.io,chennan47/osf.io,caneruguz/osf.io,Nesiehr/osf.io,CenterForOpenScience/osf.io,rdhyee/osf.io,chrisseto/osf.io,mluo613/osf.io,Johnetordoff/osf.io,rdhyee/osf.io,felliott/osf.io,mfraezz/osf.io,aaxelb/osf.io,chennan47/osf.io,crcresearch/osf.io,alexschiller/osf.io,pattisdr/osf.io,HalcyonChimera/osf.io,emetsger/osf.io,leb2dg/osf.io,emetsger/osf.io,cwisecarver/osf.io,alexschiller/osf.io,Nesiehr/osf.io,hmoco/osf.io,hmoco/osf.io,felliott/osf.io,chennan47/osf.io,samchrisinger/osf.io,mfraezz/osf.io,Johnetordoff/osf.io,caneruguz/osf.io,TomBaxter/osf.io,baylee-d/osf.io
|
6c349621dd3331bf92f803d2d66c96868f8e94c6
|
src/geelweb/django/editos/runtests.py
|
src/geelweb/django/editos/runtests.py
|
import os
import sys
os.environ['DJANGO_SETTINGS_MODULE'] = 'test_settings'
test_dir = os.path.dirname(__file__)
sys.path.insert(0, test_dir)
from django.test.utils import get_runner
from django.conf import settings
def runtests():
TestRunner = get_runner(settings)
test_runner = TestRunner(verbosity=1, interactive=True)
failures = test_runner.run_tests(['geelweb.django.editos'])
sys.exit(bool(failures))
|
import os
import sys
os.environ['DJANGO_SETTINGS_MODULE'] = 'test_settings'
test_dir = os.path.dirname(__file__)
sys.path.insert(0, test_dir)
import django
from django.test.utils import get_runner
from django.conf import settings
def runtests():
if django.VERSION[0] == 1 and django.VERSION[1] < 7:
from django.test.utils import setup_test_environment
setup_test_environment()
if django.VERSION[0] == 1 and django.VERSION[1] >= 7:
django.setup()
TestRunner = get_runner(settings)
test_runner = TestRunner()
failures = test_runner.run_tests(['geelweb.django.editos'])
sys.exit(bool(failures))
|
Upgrade to test using django 1.7 and 1.8
|
Upgrade to test using django 1.7 and 1.8
|
Python
|
mit
|
geelweb/django-editos,geelweb/django-editos
|
de441445dbdade4d937783626f1beeb9f439ee11
|
helpers.py
|
helpers.py
|
import feedparser
import datetime
from .models import RssEntry
class RssSyncHelper(object):
def __init__(self, feed):
self.feed = feed
def save_entry(self, result):
pub_date = result.updated_parsed
published = datetime.date(pub_date[0], pub_date[1], pub_date[2])
return RssEntry.objects.get_or_create(
title=result.title,
feed=self.feed,
summary=result.content[0]['value'],
link=result.link,
date=published,
)
def sync(self):
feed = feedparser.parse(self.feed.url)
for entry in feed.entries:
self.save_entry(entry)
def sync_wordpress_paginated(self, page):
"""Sync a Wordpress paginated feed"""
feed = feedparser.parse('%s&paged=%d' % (self.feed.url, page))
for entry in feed.entries:
self.save_entry(entry)
|
import feedparser
import datetime
from .models import RssEntry
def add_custom_acceptable_elements(elements):
"""
Add custom acceptable elements so iframes and other potential video
elements will get synched.
"""
elements += list(feedparser._HTMLSanitizer.acceptable_elements)
feedparser._HTMLSanitizer.acceptable_elements = set(elements)
custom_acceptable_elements = ['iframe', 'embed', 'object',]
add_custom_acceptable_elements(custom_acceptable_elements)
class RssSyncHelper(object):
def __init__(self, feed):
self.feed = feed
def save_entry(self, result):
pub_date = result.updated_parsed
published = datetime.date(pub_date[0], pub_date[1], pub_date[2])
return RssEntry.objects.get_or_create(
title=result.title,
feed=self.feed,
summary=result.content[0]['value'],
link=result.link,
date=published,
)
def sync(self):
feed = feedparser.parse(self.feed.url)
for entry in feed.entries:
self.save_entry(entry)
def sync_wordpress_paginated(self, page):
"""Sync a Wordpress paginated feed"""
feed = feedparser.parse('%s&paged=%d' % (self.feed.url, page))
for entry in feed.entries:
self.save_entry(entry)
|
Allow iframes to be synched
|
Allow iframes to be synched
|
Python
|
bsd-3-clause
|
ebrelsford/django-rsssync
|
fe5eb7db52725f8d136cbeba4341f5c3a33cf199
|
tensorflow_model_optimization/python/core/api/quantization/keras/quantizers/__init__.py
|
tensorflow_model_optimization/python/core/api/quantization/keras/quantizers/__init__.py
|
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Module containing Quantization abstraction and quantizers."""
# quantize with custom quantization parameterization or implementation, or
# handle custom Keras layers.
from tensorflow_model_optimization.python.core.quantization.keras.quantizers import LastValueQuantizer
from tensorflow_model_optimization.python.core.quantization.keras.quantizers import MovingAverageQuantizer
from tensorflow_model_optimization.python.core.quantization.keras.quantizers import Quantizer
|
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Module containing Quantization abstraction and quantizers."""
# quantize with custom quantization parameterization or implementation, or
# handle custom Keras layers.
from tensorflow_model_optimization.python.core.quantization.keras.quantizers import AllValuesQuantizer
from tensorflow_model_optimization.python.core.quantization.keras.quantizers import LastValueQuantizer
from tensorflow_model_optimization.python.core.quantization.keras.quantizers import MovingAverageQuantizer
from tensorflow_model_optimization.python.core.quantization.keras.quantizers import Quantizer
|
Include AllValuesQuantizer in external APIs
|
Include AllValuesQuantizer in external APIs
PiperOrigin-RevId: 320104499
|
Python
|
apache-2.0
|
tensorflow/model-optimization,tensorflow/model-optimization
|
589e2df8c9af8ce8102904c9cfebbf87ee2df744
|
ckanext/orgdashboards/tests/helpers.py
|
ckanext/orgdashboards/tests/helpers.py
|
from ckan.tests import factories
def create_mock_data(**kwargs):
mock_data = {}
mock_data['organization'] = factories.Organization()
mock_data['organization_name'] = mock_data['organization']['name']
mock_data['organization_id'] = mock_data['organization']['id']
mock_data['dataset'] = factories.Dataset(owner_org=mock_data['organization_id'])
mock_data['dataset_name'] = mock_data['dataset']['name']
mock_data['package_id'] = mock_data['dataset']['id']
mock_data['resource'] = factories.Resource(package_id=mock_data['package_id'])
mock_data['resource_name'] = mock_data['resource']['name']
mock_data['resource_id'] = mock_data['resource']['id']
mock_data['resource_view'] = factories.ResourceView(
resource_id=mock_data['resource_id'])
mock_data['resource_view_title'] = mock_data['resource_view']['title']
mock_data['context'] = {
'user': factories._get_action_user_name(kwargs)
}
return mock_data
|
''' Helper methods for tests '''
import string
import random
from ckan.tests import factories
def create_mock_data(**kwargs):
mock_data = {}
mock_data['organization'] = factories.Organization()
mock_data['organization_name'] = mock_data['organization']['name']
mock_data['organization_id'] = mock_data['organization']['id']
mock_data['dataset'] = factories.Dataset(owner_org=mock_data['organization_id'])
mock_data['dataset_name'] = mock_data['dataset']['name']
mock_data['package_id'] = mock_data['dataset']['id']
mock_data['resource'] = factories.Resource(package_id=mock_data['package_id'])
mock_data['resource_name'] = mock_data['resource']['name']
mock_data['resource_id'] = mock_data['resource']['id']
mock_data['resource_view'] = factories.ResourceView(
resource_id=mock_data['resource_id'])
mock_data['resource_view_title'] = mock_data['resource_view']['title']
mock_data['context'] = {
'user': factories._get_action_user_name(kwargs)
}
return mock_data
def id_generator(size=6, chars=string.ascii_lowercase + string.digits):
''' Create random id which is a combination of letters and numbers '''
return ''.join(random.choice(chars) for _ in range(size))
|
Add function for generating random id
|
Add function for generating random id
|
Python
|
agpl-3.0
|
ViderumGlobal/ckanext-orgdashboards,ViderumGlobal/ckanext-orgdashboards,ViderumGlobal/ckanext-orgdashboards,ViderumGlobal/ckanext-orgdashboards
|
4efbc3278c89355df9076f3caccf1032aae9d930
|
setup.py
|
setup.py
|
"""colorise module setup script for distribution."""
from setuptools import setup
import os
def get_version(filename):
with open(filename) as fh:
for line in fh:
if line.startswith('__version__'):
return line.split('=')[-1].strip()[1:-1]
setup(
name='colorise',
version=get_version(os.path.join('colorise', '__init__.py')),
author='Alexander Asp Bock',
author_email='[email protected]',
platforms='Platform independent',
description=('Easily print colored text to the console'),
license='BSD 3-Clause License',
keywords='text, color, colorise, colorize, console, terminal',
packages=['colorise', 'colorise.win', 'colorise.nix'],
package_data={'colorise': ['tests', 'examples']},
url='https://github.com/MisanthropicBit/colorise',
long_description=open('README.md').read(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Topic :: Utilities',
'Topic :: Terminals',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: Implementation :: PyPy'
]
)
|
"""colorise module setup script for distribution."""
from setuptools import setup
import os
def get_version(filename):
with open(filename) as fh:
for line in fh:
if line.startswith('__version__'):
return line.split('=')[-1].strip()[1:-1]
setup(
name='colorise',
version=get_version(os.path.join('colorise', '__init__.py')),
author='Alexander Asp Bock',
author_email='[email protected]',
platforms='Platform independent',
description=('Easily print colored text to the console'),
license='BSD 3-Clause License',
keywords='text, color, colorise, colorize, console, terminal',
packages=['colorise', 'colorise.win', 'colorise.nix'],
package_data={'colorise': ['tests', 'examples']},
url='https://github.com/MisanthropicBit/colorise',
long_description=open('README.md').read(),
long_description_content_type='text/markdown',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Topic :: Utilities',
'Topic :: Terminals',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: Implementation :: PyPy'
]
)
|
Make twine check happy
|
Make twine check happy [ci skip]
|
Python
|
bsd-3-clause
|
MisanthropicBit/colorise
|
ff05e249e2ad2d479f6d630049f8693d0ef661e9
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
import sys, os
def read(*path):
"""
Read and return content from ``path``
"""
f = open(
os.path.join(
os.path.dirname(__file__),
*path
),
'r'
)
try:
return f.read().decode('UTF-8')
finally:
f.close()
setup(
name='flea',
version='2',
description="Test WSGI applications using lxml",
long_description=read('README'),
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Topic :: Internet :: WWW/HTTP :: WSGI',
'Topic :: Software Development :: Testing',
],
keywords='',
author='Oliver Cope',
author_email='[email protected]',
url='',
license='BSD',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[
'pesto ==12, ==13',
'lxml',
# -*- Extra requirements: -*-
],
entry_points="""
# -*- Entry points: -*-
""",
)
|
from setuptools import setup, find_packages
import sys, os
def read(*path):
"""
Read and return content from ``path``
"""
f = open(
os.path.join(
os.path.dirname(__file__),
*path
),
'r'
)
try:
return f.read().decode('UTF-8')
finally:
f.close()
setup(
name='flea',
version='2',
description="Test WSGI applications using lxml",
long_description=read('README'),
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Topic :: Internet :: WWW/HTTP :: WSGI',
'Topic :: Software Development :: Testing',
],
keywords='',
author='Oliver Cope',
author_email='[email protected]',
url='',
license='BSD',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[
'pesto ==12, ==13, ==14',
'lxml',
# -*- Extra requirements: -*-
],
entry_points="""
# -*- Entry points: -*-
""",
)
|
Update for compatibility with pesto==14
|
Update for compatibility with pesto==14
Ignore-this: 7a596766eb3deedefb2c9ba36ce5ecfc
darcs-hash:20100419210717-8e352-6286dc43ea83998229ccdcd619ff1f4990ff82ee.gz
|
Python
|
mit
|
Singletoned/testino
|
8145850506ba238682f189403f4545183db9bec7
|
setup.py
|
setup.py
|
from setuptools import setup
import os
PROJECT_ROOT, _ = os.path.split(__file__)
DESCRIPTION = open( os.path.join(PROJECT_ROOT, "README") ).read()
VERSION = REVISION = '0.1.4'
PROJECT_NAME = 'JenkinsAPI'
PROJECT_AUTHORS = "Salim Fadhley, Ramon van Alteren, Ruslan Lutsenko"
PROJECT_EMAILS = '[email protected], [email protected], [email protected]'
PROJECT_URL = "https://github.com/salimfadhley/jenkinsapi"
SHORT_DESCRIPTION = 'A Python API for accessing resources on a Jenkins continuous-integration server.'
GLOBAL_ENTRY_POINTS = {
"console_scripts":[ "jenkins_invoke=jenkinsapi.command_line.jenkins_invoke:main"]
}
# Actual setup
setup(name=PROJECT_NAME.lower(),
version=VERSION,
author=PROJECT_AUTHORS,
author_email=PROJECT_EMAILS,
packages=["jenkinsapi",'jenkinsapi.utils','jenkinsapi.command_line'],
zip_safe=True,
include_package_data = False,
entry_points = GLOBAL_ENTRY_POINTS,
url=PROJECT_URL,
description=SHORT_DESCRIPTION,
long_description=DESCRIPTION,
)
|
from setuptools import setup
import os
PROJECT_ROOT, _ = os.path.split(__file__)
DESCRIPTION = open(os.path.join(PROJECT_ROOT, "README.rst")).read()
VERSION = REVISION = '0.1.4'
PROJECT_NAME = 'JenkinsAPI'
PROJECT_AUTHORS = "Salim Fadhley, Ramon van Alteren, Ruslan Lutsenko"
PROJECT_EMAILS = '[email protected], [email protected], [email protected]'
PROJECT_URL = "https://github.com/salimfadhley/jenkinsapi"
SHORT_DESCRIPTION = 'A Python API for accessing resources on a Jenkins continuous-integration server.'
GLOBAL_ENTRY_POINTS = {
"console_scripts": ["jenkins_invoke=jenkinsapi.command_line.jenkins_invoke:main"]
}
# Actual setup
setup(name=PROJECT_NAME.lower(),
version=VERSION,
author=PROJECT_AUTHORS,
author_email=PROJECT_EMAILS,
packages=["jenkinsapi", 'jenkinsapi.utils', 'jenkinsapi.command_line'],
zip_safe=True,
include_package_data=False,
entry_points=GLOBAL_ENTRY_POINTS,
url=PROJECT_URL,
description=SHORT_DESCRIPTION,
long_description=DESCRIPTION,
)
|
Fix pep8 and open file description
|
Fix pep8 and open file description
|
Python
|
mit
|
mistermocha/jenkinsapi,jduan/jenkinsapi,mistermocha/jenkinsapi,zaro0508/jenkinsapi,imsardine/jenkinsapi,JohnLZeller/jenkinsapi,mistermocha/jenkinsapi,aerickson/jenkinsapi,EwoutVDC/jenkinsapi,JohnLZeller/jenkinsapi,imsardine/jenkinsapi,salimfadhley/jenkinsapi,imsardine/jenkinsapi,zaro0508/jenkinsapi,zaro0508/jenkinsapi,domenkozar/jenkinsapi,JohnLZeller/jenkinsapi,domenkozar/jenkinsapi,jduan/jenkinsapi,aerickson/jenkinsapi,salimfadhley/jenkinsapi,ramonvanalteren/jenkinsapi
|
ce365ef62d26e0555e9a38d152ab6a4f0f96626d
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name='gdcdatamodel',
packages=find_packages(),
install_requires=[
'pytz==2016.4',
'graphviz==0.4.2',
'jsonschema==2.5.1',
'python-dateutil==2.4.2',
'psqlgraph',
'gdcdictionary',
'dictionaryutils>=2.0.0,<3.0.0',
'cdisutils',
],
package_data={
"gdcdatamodel": [
"xml_mappings/*.yaml",
]
},
dependency_links=[
'git+https://github.com/NCI-GDC/cdisutils.git@863ce13772116b51bcf5ce7e556f5df3cb9e6f63#egg=cdisutils',
'git+https://github.com/NCI-GDC/[email protected]#egg=psqlgraph',
'git+https://github.com/NCI-GDC/gdcdictionary.git@release/jibboo#egg=gdcdictionary',
],
entry_points={
'console_scripts': [
'gdc_postgres_admin=gdcdatamodel.gdc_postgres_admin:main'
]
},
)
|
from setuptools import setup, find_packages
setup(
name='gdcdatamodel',
packages=find_packages(),
install_requires=[
'pytz==2016.4',
'graphviz==0.4.2',
'jsonschema==2.5.1',
'python-dateutil==2.4.2',
'psqlgraph',
'gdcdictionary',
'dictionaryutils>=2.0.0,<3.0.0',
'cdisutils',
],
package_data={
"gdcdatamodel": [
"xml_mappings/*.yaml",
]
},
dependency_links=[
'git+https://github.com/NCI-GDC/cdisutils.git@863ce13772116b51bcf5ce7e556f5df3cb9e6f63#egg=cdisutils',
'git+https://github.com/NCI-GDC/[email protected]#egg=psqlgraph',
'git+https://github.com/NCI-GDC/[email protected]#egg=gdcdictionary',
],
entry_points={
'console_scripts': [
'gdc_postgres_admin=gdcdatamodel.gdc_postgres_admin:main'
]
},
)
|
Update to dictionary release tag
|
chore(pins): Update to dictionary release tag
- Update to dictionary release tag
|
Python
|
apache-2.0
|
NCI-GDC/gdcdatamodel,NCI-GDC/gdcdatamodel
|
57829ac0344df841d6600294dfcd5c745b7ff00b
|
setup.py
|
setup.py
|
import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='pyaavso',
version=__import__('pyaavso').__version__,
description='A Python library for working with AAVSO data.',
long_description=read('README.rst'),
author='Zbigniew Siciarz',
author_email='[email protected]',
url='http://github.com/zsiciarz/pyaavso',
download_url='http://pypi.python.org/pypi/pyaavso',
license='MIT',
packages=find_packages(exclude=['tests']),
include_package_data=True,
install_requires=['lxml>=2.0', 'requests>=1.0'],
tests_require=['pytest'],
platforms='any',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Scientific/Engineering :: Astronomy',
'Topic :: Utilities'
],
)
|
import os
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='pyaavso',
version=__import__('pyaavso').__version__,
description='A Python library for working with AAVSO data.',
long_description=read('README.rst'),
author='Zbigniew Siciarz',
author_email='[email protected]',
url='http://github.com/zsiciarz/pyaavso',
download_url='http://pypi.python.org/pypi/pyaavso',
license='MIT',
packages=find_packages(exclude=['tests']),
include_package_data=True,
install_requires=['lxml>=2.0', 'requests>=1.0'],
tests_require=['pytest'],
platforms='any',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Scientific/Engineering :: Astronomy',
'Topic :: Utilities'
],
)
|
Add classifier for Python 3-only compatibility.
|
Add classifier for Python 3-only compatibility.
|
Python
|
mit
|
zsiciarz/pyaavso
|
f3c9550ee6719278cba7f4edda26274220e954f3
|
setup.py
|
setup.py
|
#!/usr/bin/python
"""
Copyright (c) 2015 Red Hat, Inc
All rights reserved.
This software may be modified and distributed under the terms
of the BSD license. See the LICENSE file for details.
"""
import re
from setuptools import setup, find_packages
data_files = {
"share/osbs": [
"inputs/prod.json",
"inputs/prod_inner.json",
"inputs/simple.json",
"inputs/simple_inner.json",
],
}
def _get_requirements(path):
try:
with open(path) as f:
packages = f.read().splitlines()
except (IOError, OSError) as ex:
raise RuntimeError("Can't open file with requirements: %s", repr(ex))
return [p.strip() for p in packages if not re.match(r"^\s*#", p)]
def _install_requirements():
requirements = _get_requirements('requirements.txt')
return requirements
setup(
name="osbs-client",
description='Python module and command line client for OpenShift Build Service',
version="0.14",
author='Tomas Tomecek',
author_email='[email protected]',
url='https://github.com/projectatomic/osbs-client',
license="BSD",
packages=find_packages(exclude=["tests"]),
entry_points={
'console_scripts': ['osbs=osbs.cli.main:main'],
},
install_requires=_install_requirements(),
data_files=data_files.items(),
)
|
#!/usr/bin/python
"""
Copyright (c) 2015 Red Hat, Inc
All rights reserved.
This software may be modified and distributed under the terms
of the BSD license. See the LICENSE file for details.
"""
import re
from setuptools import setup, find_packages
data_files = {
"share/osbs": [
"inputs/prod.json",
"inputs/prod_inner.json",
"inputs/simple.json",
"inputs/simple_inner.json",
],
}
def _get_requirements(path):
try:
with open(path) as f:
packages = f.read().splitlines()
except (IOError, OSError) as ex:
raise RuntimeError("Can't open file with requirements: %s", repr(ex))
return [p.strip() for p in packages if not re.match(r"^\s*#", p)]
def _install_requirements():
requirements = _get_requirements('requirements.txt')
return requirements
setup(
name="osbs-client",
description='Python module and command line client for OpenShift Build Service',
version="0.14",
author='Red Hat, Inc.',
author_email='[email protected]',
url='https://github.com/projectatomic/osbs-client',
license="BSD",
packages=find_packages(exclude=["tests"]),
entry_points={
'console_scripts': ['osbs=osbs.cli.main:main'],
},
install_requires=_install_requirements(),
data_files=data_files.items(),
)
|
Change author and email to the same as atomic-reactor has
|
Change author and email to the same as atomic-reactor has
|
Python
|
bsd-3-clause
|
bfontecc007/osbs-client,vrutkovs/osbs-client,twaugh/osbs-client,vrutkovs/osbs-client,jpopelka/osbs-client,DBuildService/osbs-client,projectatomic/osbs-client,bfontecc007/osbs-client,twaugh/osbs-client,pombredanne/osbs-client,jpopelka/osbs-client,DBuildService/osbs-client,projectatomic/osbs-client,pombredanne/osbs-client
|
e151a6f85c79821a0e56b6dbdb1e6f2155b725a0
|
setup.py
|
setup.py
|
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="accelerometer",
version="2.0",
author="Aiden Doherty",
author_email="[email protected]",
description="A package to extract meaningful health information from large accelerometer datasets e.g. how much time individuals spend in sleep, sedentary behaviour, walking and moderate intensity physical activity",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/activityMonitoring/biobankAccelerometerAnalysis",
packages=setuptools.find_packages(),
install_requires=[
'argparse',
'joblib',
'matplotlib',
'numpy',
'scipy',
'pandas>=0.24',
'scikit-learn==0.21.2',
'sphinx',
'sphinx-rtd-theme',
'statsmodels',
],
classifiers=[
"Programming Language :: Python :: 3",
"Operating System :: MacOS :: MacOS X",
"Operating System :: Unix",
],
)
|
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="accelerometer",
version="2.0",
author="Aiden Doherty",
author_email="[email protected]",
description="A package to extract meaningful health information from large accelerometer datasets e.g. how much time individuals spend in sleep, sedentary behaviour, walking and moderate intensity physical activity",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/activityMonitoring/biobankAccelerometerAnalysis",
packages=setuptools.find_packages(),
install_requires=[
'argparse',
'joblib',
'matplotlib',
'numpy',
'scipy',
'pandas>=0.24',
'scikit-learn>=0.21.2',
'sphinx',
'sphinx-rtd-theme',
'statsmodels',
],
classifiers=[
"Programming Language :: Python :: 3",
"Operating System :: MacOS :: MacOS X",
"Operating System :: Unix",
],
)
|
Allow newer versions of sklearn - this will now allow software to be compiled again on OSX. Also tested on Linux.
|
Allow newer versions of sklearn - this will now allow software to be compiled again on OSX. Also tested on Linux.
|
Python
|
bsd-2-clause
|
aidendoherty/biobankAccelerometerAnalysis,aidendoherty/biobankAccelerometerAnalysis,computationalEpidemiology/biobankAccelerometerAnalysis,computationalEpidemiology/biobankAccelerometerAnalysis
|
6fcc0d294f6c501d0926d53c386ddb11cff7db25
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup
setup(
name="PyTumblr",
version="0.0.5",
description="A Python API v2 wrapper for Tumblr",
author="John Bunting",
author_email="[email protected]",
url="https://github.com/tumblr/pytumblr",
packages = ['pytumblr'],
license = "LICENSE",
test_suite='nose.collector',
install_requires = [
'oauth2',
'httpretty'
],
setup_requires=[
'nose',
'nose-cov',
'mock'
]
)
|
#!/usr/bin/env python
from setuptools import setup
setup(
name="PyTumblr",
version="0.0.5",
description="A Python API v2 wrapper for Tumblr",
author="John Bunting",
author_email="[email protected]",
url="https://github.com/tumblr/pytumblr",
packages = ['pytumblr'],
license = "LICENSE",
test_suite='nose.collector',
install_requires = [
'oauth2',
'httpretty'
],
tests_require=[
'nose',
'nose-cov',
'mock'
]
)
|
Move test dependencies to tests_require.
|
Move test dependencies to tests_require.
Move nose, nose-cov, and mock to tests_require, since they are only
required for tests and not needed for general package setup.
|
Python
|
apache-2.0
|
gcd0318/pytumblr,philgroshens/pytumblr,megacoder/pytumblr,tumblr/pytumblr,dianakhuang/pytumblr,PegasusWang/pytumblr,socrateslee/pytumblr
|
146d71c86c5e58b0ed41e66de7a5c94e937fc6a9
|
setup.py
|
setup.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from setuptools import find_packages, setup
VERSION = "1.0.0"
with open("requirements.txt", "rt") as f:
requirements= f.read().splitlines()
setup(name="sacad",
version=VERSION,
author="desbma",
packages=find_packages(),
entry_points={"console_scripts": ["sacad = sacad:cl_main"]},
package_data={"": ["LICENSE", "README.md", "requirements.txt"]},
test_suite="tests",
install_requires=requirements,
description="Search and download music album covers",
url="https://github.com/desbma/sacad",
download_url="https://github.com/desbma/sacad/tarball/%s" % (VERSION),
keywords=["dowload", "album", "cover", "art", "albumart", "music"],
classifiers=["Development Status :: 4 - Beta",
"Environment :: Console",
"Intended Audience :: End Users/Desktop",
"License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Multimedia :: Graphics",
"Topic :: Utilities"])
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from setuptools import find_packages, setup
VERSION = "1.0.0"
with open("requirements.txt", "rt") as f:
requirements= f.read().splitlines()
setup(name="sacad",
version=VERSION,
author="desbma",
packages=find_packages(),
entry_points={"console_scripts": ["sacad = sacad:cl_main"]},
package_data={"": ["LICENSE", "README.md", "requirements.txt"]},
test_suite="tests",
install_requires=requirements,
description="Search and download music album covers",
url="https://github.com/desbma/sacad",
download_url="https://github.com/desbma/sacad/archive/%s.tar.gz" % (VERSION),
keywords=["dowload", "album", "cover", "art", "albumart", "music"],
classifiers=["Development Status :: 4 - Beta",
"Environment :: Console",
"Intended Audience :: End Users/Desktop",
"License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Multimedia :: Graphics",
"Topic :: Utilities"])
|
Use alternate GitHub download URL
|
Use alternate GitHub download URL
|
Python
|
mpl-2.0
|
desbma/sacad,desbma/sacad
|
77e06add286e2d4544031fd1d9baf4cffcb16359
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import find_packages, setup
setup(
name="pylast",
version="2.2.0.dev0",
author="Amr Hassan <[email protected]>",
install_requires=['six'],
tests_require=['mock', 'pytest', 'coverage', 'pycodestyle', 'pyyaml',
'pyflakes', 'flaky'],
description="A Python interface to Last.fm and Libre.fm",
author_email="[email protected]",
url="https://github.com/pylast/pylast",
classifiers=[
"Development Status :: 5 - Production/Stable",
"License :: OSI Approved :: Apache Software License",
"Topic :: Internet",
"Topic :: Multimedia :: Sound/Audio",
"Topic :: Software Development :: Libraries :: Python Modules",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
],
python_requires='>=2.7.10, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*',
keywords=["Last.fm", "music", "scrobble", "scrobbling"],
packages=find_packages(exclude=('tests*',)),
license="Apache2"
)
# End of file
|
#!/usr/bin/env python
from setuptools import find_packages, setup
setup(
name="pylast",
version="2.2.0.dev0",
author="Amr Hassan <[email protected]> and Contributors",
install_requires=['six'],
tests_require=['mock', 'pytest', 'coverage', 'pycodestyle', 'pyyaml',
'pyflakes', 'flaky'],
description="A Python interface to Last.fm and Libre.fm",
author_email="[email protected]",
url="https://github.com/pylast/pylast",
classifiers=[
"Development Status :: 5 - Production/Stable",
"License :: OSI Approved :: Apache Software License",
"Topic :: Internet",
"Topic :: Multimedia :: Sound/Audio",
"Topic :: Software Development :: Libraries :: Python Modules",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
],
python_requires='>=2.7.10, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*',
keywords=["Last.fm", "music", "scrobble", "scrobbling"],
packages=find_packages(exclude=('tests*',)),
license="Apache2"
)
# End of file
|
Add 'and Contributors' to Author
|
Add 'and Contributors' to Author
|
Python
|
apache-2.0
|
pylast/pylast,hugovk/pylast
|
efb8b12eb92d51cddee8e24d569a4c85227cd15a
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
def listify(filename):
return filter(None, open(filename,'r').read().split('\n'))
def remove_externals(requirements):
return filter(lambda e: not e.startswith('-e'), requirements)
setup(
name = "vumi",
version = "0.1.0",
url = 'http://github.com/praekelt/vumi',
license = 'BSD',
description = "Super-scalable messaging engine for the delivery of SMS, "
"Star Menu and chat messages to diverse audiences in "
"emerging markets and beyond.",
long_description = open('README.rst','r').read(),
author = 'Praekelt Foundation',
author_email = '[email protected]',
packages = find_packages(),
install_requires = ['setuptools'] + remove_externals(listify('config/requirements.pip')),
classifiers = [
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: System :: Networking'
]
)
|
from setuptools import setup, find_packages
def listify(filename):
return filter(None, open(filename,'r').read().split('\n'))
def remove_externals(requirements):
return filter(lambda e: not e.startswith('-e'), requirements)
setup(
name = "vumi",
version = "0.2.0a",
url = 'http://github.com/praekelt/vumi',
license = 'BSD',
description = "Super-scalable messaging engine for the delivery of SMS, "
"Star Menu and chat messages to diverse audiences in "
"emerging markets and beyond.",
long_description = open('README.rst','r').read(),
author = 'Praekelt Foundation',
author_email = '[email protected]',
packages = find_packages(),
install_requires = ['setuptools'] + remove_externals(listify('config/requirements.pip')),
classifiers = [
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: System :: Networking'
]
)
|
Set initial version for series 0.2.x
|
Set initial version for series 0.2.x
|
Python
|
bsd-3-clause
|
vishwaprakashmishra/xmatrix,vishwaprakashmishra/xmatrix,harrissoerja/vumi,harrissoerja/vumi,TouK/vumi,TouK/vumi,vishwaprakashmishra/xmatrix,TouK/vumi,harrissoerja/vumi
|
b3e14ce837cf9b90f5f7633e028408e21cc48bb6
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name='osmviz',
version='1.1.0',
description='OSMViz is a small set of Python tools for retrieving '
'and using Mapnik tiles from a Slippy Map server '
'(you may know these as OpenStreetMap images).',
long_description=open('README.md', 'r').read(),
classifiers=[
# 'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Software Development :: Documentation'],
keywords='osm openstreetmap tiles visualization',
author='Colin Bick',
author_email='[email protected]',
url='https://hugovk.github.io/osmviz',
license='MIT',
package_dir={'': 'src'},
packages=find_packages(where='src'),
include_package_data=True,
# osmviz actually only requires either PyGame or PIL, not necessarily both
requires=(
'PyGame',
'PIL',
)
)
|
from setuptools import setup, find_packages
setup(
name='osmviz',
version='1.1.0',
description='OSMViz is a small set of Python tools for retrieving '
'and using Mapnik tiles from a Slippy Map server '
'(you may know these as OpenStreetMap images).',
long_description=open('README.md', 'r').read(),
python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*',
classifiers=[
# 'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Software Development :: Documentation'],
keywords='osm openstreetmap tiles visualization',
author='Colin Bick',
author_email='[email protected]',
url='https://hugovk.github.io/osmviz',
license='MIT',
package_dir={'': 'src'},
packages=find_packages(where='src'),
include_package_data=True,
# osmviz actually only requires either PyGame or PIL, not necessarily both
requires=(
'PyGame',
'PIL',
)
)
|
Add python_requires to help pip
|
Add python_requires to help pip
|
Python
|
mit
|
hugovk/osmviz,hugovk/osmviz
|
9fe6b64f83b17e1fe8f9a8d70cddcd8bf438e385
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name = "cmdparse",
version = "0.9",
author = "Peter Teichman",
license = "MIT",
packages = find_packages(exclude="tests"),
test_suite = "tests.cmdparse_suite",
)
|
#!/usr/bin/env python
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(
name = "cmdparse",
version = "0.9",
author = "Peter Teichman",
license = "MIT",
packages = find_packages(exclude=["tests"]),
test_suite = "tests.cmdparse_suite",
)
|
Fix find_packages excludes to contain a list rather than a bare string
|
Fix find_packages excludes to contain a list rather than a bare string
|
Python
|
mit
|
pteichman/python-cmdparse
|
b8abb57f7a0e822ba32be2d379bb967f4abfbc21
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(
name='Zuice',
version='0.2-dev',
description='A dependency injection framework for Python',
author='Michael Williamson',
author_email='[email protected]',
url='http://gitorious.org/zuice',
packages=['zuice'],
)
|
from distutils.core import setup
setup(
name='Zuice',
version='0.2-dev',
description='A dependency injection framework for Python',
author='Michael Williamson',
author_email='[email protected]',
url='https://github.com/mwilliamson/zuice',
packages=['zuice'],
)
|
Update package URL to use GitHub
|
Update package URL to use GitHub
|
Python
|
bsd-2-clause
|
mwilliamson/zuice
|
f3ecf3000f6291bac711857e51e4d894d52ad24a
|
setup.py
|
setup.py
|
import os.path
import sys
from setuptools import setup, find_packages
from build_manpage import build_manpage
HOME=os.path.expanduser('~')
setup(
name='popup',
version='0.2.0',
author='Jay Edwards',
cmdclass={'build_manpage': build_manpage},
author_email='[email protected]',
packages=['PopupServer', 'PopupServer.test'],
package_data={'PopupServer': ['playbooks/*/*.yaml']},
data_files=[('%s/.popup/config/ssh_configs' % HOME, []),
('%s/.popup/config/ssh_control' % HOME, []), ('%s/.popup/keys' % HOME, []),
('%s/.popup/manifests' % HOME, []), ('%s/share/man/man1' % sys.prefix, ['doc/popup.1'])],
url="http://pypi.python.org/pypi/popup",
license='BSD',
description='Quickly setup an EC2 server running OpenVPN and other useful tools',
long_description=open('README.txt').read(),
install_requires=[
"ansible == 0.9",
"boto >= 2.7.0",
],
setup_requires=[
"github-distutils >= 0.1.0",
],
entry_points = {
'console_scripts': [
'popup = PopupServer.popup:main',
],
'distutils.commands': [
'build_manpage = build_manpage.build_manpage'
]
}
)
|
import os.path
import sys
from setuptools import setup, find_packages
from build_manpage import build_manpage
HOME=os.path.expanduser('~')
setup(
name='popup',
version='0.1.0',
author='Jay Edwards',
cmdclass={'build_manpage': build_manpage},
author_email='[email protected]',
packages=['PopupServer', 'PopupServer.test'],
package_data={'PopupServer': ['playbooks/*/*.yaml']},
data_files=[('%s/.popup/config/ssh_configs' % HOME, []),
('%s/.popup/config/ssh_control' % HOME, []), ('%s/.popup/keys' % HOME, []),
('%s/.popup/manifests' % HOME, []), ('%s/share/man/man1' % sys.prefix, ['doc/popup.1'])],
url="http://pypi.python.org/pypi/popup",
license='BSD',
description='Quickly setup an EC2 server running OpenVPN and other useful tools',
long_description=open('README.txt').read(),
install_requires=[
"ansible == 0.9",
"boto >= 2.7.0",
],
setup_requires=[
"github-distutils >= 0.1.0",
],
entry_points = {
'console_scripts': [
'popup = PopupServer.popup:main',
],
'distutils.commands': [
'build_manpage = build_manpage.build_manpage'
]
}
)
|
Revert "Bump version to 0.2.0"
|
Revert "Bump version to 0.2.0"
This reverts commit 189e4bc4bcd7a6764740d9974a633996f6ba1fc7.
|
Python
|
bsd-2-clause
|
jayed/popup
|
31aa3fb5aa44d43327942767b89acf0f1375f6b2
|
setup.py
|
setup.py
|
#!/usr/bin/env python
import os
from glob import glob
if os.environ.get('USE_SETUPTOOLS'):
from setuptools import setup
setup_kwargs = dict(zip_safe=0)
else:
from distutils.core import setup
setup_kwargs = dict()
storage_dirs = [ ('storage/whisper',[]), ('storage/lists',[]),
('storage/log',[]), ('storage/rrd',[]) ]
conf_files = [ ('conf', glob('conf/*.example')) ]
init_scripts = [ ('/etc/init.d', ['init/carbon-cache', 'init/carbon-relay', 'init/carbon-aggregator']) ]
setup(
name='carbon',
version='0.9.10_pre3',
url='https://launchpad.net/graphite',
author='Chris Davis',
author_email='[email protected]',
license='Apache Software License 2.0',
description='Backend data caching and persistence daemon for Graphite',
packages=['carbon', 'carbon.aggregator', 'twisted.plugins'],
package_dir={'' : 'lib'},
scripts=glob('bin/*'),
package_data={ 'carbon' : ['*.xml'] },
data_files=storage_dirs + conf_files + init_scripts,
install_requires=['twisted', 'txamqp'],
**setup_kwargs
)
|
#!/usr/bin/env python
import os
from glob import glob
if os.environ.get('USE_SETUPTOOLS'):
from setuptools import setup
setup_kwargs = dict(zip_safe=0)
else:
from distutils.core import setup
setup_kwargs = dict()
storage_dirs = [ ('storage/whisper',[]), ('storage/lists',[]),
('storage/log',[]), ('storage/rrd',[]) ]
conf_files = [ ('conf', glob('conf/*.example')) ]
init_scripts = [ ('/etc/init.d', ['init/carbon-cache', 'init/carbon-relay', 'init/carbon-aggregator']) ]
setup(
name='carbon',
version='0.9.10_pre4',
url='https://launchpad.net/graphite',
author='Chris Davis',
author_email='[email protected]',
license='Apache Software License 2.0',
description='Backend data caching and persistence daemon for Graphite',
packages=['carbon', 'carbon.aggregator', 'twisted.plugins'],
package_dir={'' : 'lib'},
scripts=glob('bin/*'),
package_data={ 'carbon' : ['*.xml'] },
data_files=storage_dirs + conf_files + init_scripts,
install_requires=['twisted', 'txamqp'],
**setup_kwargs
)
|
Make 0.9.10_pre4 to match webapp
|
Make 0.9.10_pre4 to match webapp
|
Python
|
apache-2.0
|
mleinart/carbon,kharandziuk/carbon,lyft/carbon,protochron/carbon,pu239ppy/carbon,JeanFred/carbon,johnseekins/carbon,xadjmerripen/carbon,pratX/carbon,criteo-forks/carbon,graphite-project/carbon,obfuscurity/carbon,benburry/carbon,piotr1212/carbon,xadjmerripen/carbon,graphite-server/carbon,kharandziuk/carbon,iain-buclaw-sociomantic/carbon,JeanFred/carbon,cbowman0/carbon,krux/carbon,lyft/carbon,criteo-forks/carbon,obfuscurity/carbon,deniszh/carbon,deniszh/carbon,piotr1212/carbon,cbowman0/carbon,krux/carbon,pu239ppy/carbon,iain-buclaw-sociomantic/carbon,pratX/carbon,graphite-server/carbon,benburry/carbon,mleinart/carbon,johnseekins/carbon,protochron/carbon,graphite-project/carbon
|
584b4f08cb1dacd57fde85b11824709e10d657ce
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(
name='hadmin',
version='0.1',
packages=find_packages(),
author='Alec Ten Harmsel',
author_email='[email protected]',
description='A Hadoop configuration manager',
url='http://github.com/trozamon/hadmin',
license='MIT',
test_suite='hadmin.test',
setup_requires=['flake8'],
entry_points={
'console_scripts': [
'hadmin = hadmin.util:run'
]
}
)
|
from setuptools import setup, find_packages
setup(
name='hadmin',
version='0.1',
packages=find_packages(),
author='Alec Ten Harmsel',
author_email='[email protected]',
description='A Hadoop configuration manager',
url='http://github.com/trozamon/hadmin',
license='MIT',
test_suite='hadmin.test',
tests_require=['flake8'],
entry_points={
'console_scripts': [
'hadmin = hadmin.util:run'
]
}
)
|
Make flake8 test only dependency
|
Make flake8 test only dependency
|
Python
|
mit
|
trozamon/hadmin
|
75a8d2ed6a3fa03ca132388182b1e7876fb6413e
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from __future__ import unicode_literals
from setuptools import setup, find_packages
install_requires = [
"Jinja2",
"boto>=2.36.0",
"flask",
"httpretty==0.8.10",
"requests",
"xmltodict",
"six",
"werkzeug",
"sure",
"freezegun"
]
extras_require = {
# No builtin OrderedDict before 2.7
':python_version=="2.6"': ['ordereddict'],
}
setup(
name='moto',
version='0.4.28',
description='A library that allows your python tests to easily'
' mock out the boto library',
author='Steve Pulec',
author_email='[email protected]',
url='https://github.com/spulec/moto',
entry_points={
'console_scripts': [
'moto_server = moto.server:main',
],
},
packages=find_packages(exclude=("tests", "tests.*")),
install_requires=install_requires,
extras_require=extras_require,
license="Apache",
test_suite="tests",
classifiers=[
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"License :: OSI Approved :: Apache Software License",
"Topic :: Software Development :: Testing",
],
)
|
#!/usr/bin/env python
from __future__ import unicode_literals
from setuptools import setup, find_packages
install_requires = [
"Jinja2",
"boto>=2.36.0",
"flask",
"httpretty==0.8.10",
"requests",
"xmltodict",
"six",
"werkzeug",
"sure",
"freezegun"
]
extras_require = {
# No builtin OrderedDict before 2.7
':python_version=="2.6"': ['ordereddict'],
}
setup(
name='moto',
version='0.4.27',
description='A library that allows your python tests to easily'
' mock out the boto library',
author='Steve Pulec',
author_email='[email protected]',
url='https://github.com/spulec/moto',
entry_points={
'console_scripts': [
'moto_server = moto.server:main',
],
},
packages=find_packages(exclude=("tests", "tests.*")),
install_requires=install_requires,
extras_require=extras_require,
license="Apache",
test_suite="tests",
classifiers=[
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"License :: OSI Approved :: Apache Software License",
"Topic :: Software Development :: Testing",
],
)
|
Revert "Bumping the version reflecting the bugfix"
|
Revert "Bumping the version reflecting the bugfix"
This reverts commit 7f3daf4755aff19d04acf865df39f7d188655b15.
|
Python
|
apache-2.0
|
okomestudio/moto,spulec/moto,spulec/moto,heddle317/moto,Affirm/moto,okomestudio/moto,whummer/moto,2rs2ts/moto,Affirm/moto,ZuluPro/moto,rocky4570/moto,Brett55/moto,spulec/moto,gjtempleton/moto,2rs2ts/moto,okomestudio/moto,2rs2ts/moto,gjtempleton/moto,Brett55/moto,heddle317/moto,botify-labs/moto,spulec/moto,2rs2ts/moto,heddle317/moto,whummer/moto,okomestudio/moto,gjtempleton/moto,rocky4570/moto,kefo/moto,dbfr3qs/moto,Brett55/moto,ZuluPro/moto,silveregg/moto,botify-labs/moto,whummer/moto,dbfr3qs/moto,dbfr3qs/moto,whummer/moto,spulec/moto,heddle317/moto,Brett55/moto,ZuluPro/moto,rocky4570/moto,Affirm/moto,Affirm/moto,dbfr3qs/moto,william-richard/moto,william-richard/moto,braintreeps/moto,botify-labs/moto,kefo/moto,Affirm/moto,botify-labs/moto,william-richard/moto,whummer/moto,kefo/moto,gjtempleton/moto,william-richard/moto,botify-labs/moto,Brett55/moto,kefo/moto,ZuluPro/moto,rocky4570/moto,dbfr3qs/moto,william-richard/moto,2rs2ts/moto,spulec/moto,kefo/moto,whummer/moto,rocky4570/moto,heddle317/moto,william-richard/moto,ZuluPro/moto,gjtempleton/moto,ZuluPro/moto,okomestudio/moto,Affirm/moto,botify-labs/moto,Brett55/moto,dbfr3qs/moto,rocky4570/moto,okomestudio/moto
|
4c4a7a016abed4a0710ceffc35a985210cc52ab7
|
setup.py
|
setup.py
|
from setuptools import setup
import os
version = 0.4
setup(
version=version,
description="A script allowing to setup Amazon EC2 instances through configuration files.",
long_description=open("README.txt").read() + "\n\n" +
open(os.path.join("docs", "HISTORY.txt")).read(),
name="mr.awsome",
author='Florian Schulze',
author_email='[email protected]',
url='http://github.com/fschulze/mr.awsome',
include_package_data=True,
zip_safe=False,
packages=['mr'],
namespace_packages=['mr'],
install_requires=[
'setuptools',
'boto',
'Fabric',
],
)
|
from setuptools import setup
import os
version = 0.4
setup(
version=version,
description="A script allowing to setup Amazon EC2 instances through configuration files.",
long_description=open("README.txt").read() + "\n\n" +
open(os.path.join("docs", "HISTORY.txt")).read(),
name="mr.awsome",
author='Florian Schulze',
author_email='[email protected]',
url='http://github.com/fschulze/mr.awsome',
include_package_data=True,
zip_safe=False,
packages=['mr'],
namespace_packages=['mr'],
install_requires=[
'setuptools',
'boto >= 1.9b',
'Fabric >= 0.9.0',
],
)
|
Set minimum version for dependencies.
|
Set minimum version for dependencies.
|
Python
|
bsd-3-clause
|
ployground/ploy,fschulze/ploy,ployground/ploy_fabric,ployground/ploy_ec2
|
18bd0bcc0d892aef4ea9babfc6ec2af6e40cea62
|
manager/urls.py
|
manager/urls.py
|
from django.conf.urls import url
from manager import views
urlpatterns = [
url(r'^$', views.package_list, name='package_list'),
url(r'^packages/(?P<package_name>[a-zA-Z0-9_+-]+)/$', views.package_detail, name='package_detail'),
url(r'^packages/(?P<package_name>[a-zA-Z0-9_+-]+)/build/$', views.package_build, name='package_build'),
url(r'^packages/(?P<package_name>[a-zA-Z0-9_+-]+)/(?P<build_number>\d+)/$', views.build_detail, name='build_detail')
]
|
from django.conf.urls import url
from manager import views
urlpatterns = [
url(r'^$', views.package_list, name='package_list'),
url(r'^packages/$', views.package_list, name='package_list'),
url(r'^packages/(?P<package_name>[a-zA-Z0-9_+-]+)/$', views.package_detail, name='package_detail'),
url(r'^packages/(?P<package_name>[a-zA-Z0-9_+-]+)/build/$', views.package_build, name='package_build'),
url(r'^packages/(?P<package_name>[a-zA-Z0-9_+-]+)/(?P<build_number>\d+)/$', views.build_detail, name='build_detail')
]
|
Add alternative package list url
|
Add alternative package list url
|
Python
|
mit
|
colajam93/aurpackager,colajam93/aurpackager,colajam93/aurpackager,colajam93/aurpackager
|
646b0f8babf346f3ec21ae688453deee24fb410f
|
tests/core/tests/base_formats_tests.py
|
tests/core/tests/base_formats_tests.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
from django.test import TestCase
from django.utils.text import force_text
from import_export.formats import base_formats
class XLSTest(TestCase):
def test_binary_format(self):
self.assertTrue(base_formats.XLS().is_binary())
class CSVTest(TestCase):
def setUp(self):
self.format = base_formats.CSV()
def test_import_dos(self):
filename = os.path.join(
os.path.dirname(__file__),
os.path.pardir,
'exports',
'books-dos.csv')
in_stream = open(filename, self.format.get_read_mode()).read()
expected = 'id,name,author_email\n1,Some book,[email protected]\n'
self.assertEqual(in_stream, expected)
def test_import_unicode(self):
# importing csv UnicodeEncodeError 347
filename = os.path.join(
os.path.dirname(__file__),
os.path.pardir,
'exports',
'books-unicode.csv')
in_stream = open(filename, self.format.get_read_mode())
data = force_text(in_stream.read())
base_formats.CSV().create_dataset(data)
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
from django.test import TestCase
try:
from django.utils.encoding import force_text
except ImportError:
from django.utils.encoding import force_unicode as force_text
from import_export.formats import base_formats
class XLSTest(TestCase):
def test_binary_format(self):
self.assertTrue(base_formats.XLS().is_binary())
class CSVTest(TestCase):
def setUp(self):
self.format = base_formats.CSV()
def test_import_dos(self):
filename = os.path.join(
os.path.dirname(__file__),
os.path.pardir,
'exports',
'books-dos.csv')
in_stream = open(filename, self.format.get_read_mode()).read()
expected = 'id,name,author_email\n1,Some book,[email protected]\n'
self.assertEqual(in_stream, expected)
def test_import_unicode(self):
# importing csv UnicodeEncodeError 347
filename = os.path.join(
os.path.dirname(__file__),
os.path.pardir,
'exports',
'books-unicode.csv')
in_stream = open(filename, self.format.get_read_mode())
data = force_text(in_stream.read())
base_formats.CSV().create_dataset(data)
|
Fix importing force_text tests for 1.4 compatibility
|
Fix importing force_text tests for 1.4 compatibility
use 1.4 compat code
|
Python
|
bsd-2-clause
|
copperleaftech/django-import-export,PetrDlouhy/django-import-export,PetrDlouhy/django-import-export,rhunwicks/django-import-export,copperleaftech/django-import-export,Apkawa/django-import-export,jnns/django-import-export,PetrDlouhy/django-import-export,daniell/django-import-export,django-import-export/django-import-export,django-import-export/django-import-export,pajod/django-import-export,daniell/django-import-export,brillgen/django-import-export,pajod/django-import-export,bmihelac/django-import-export,manelclos/django-import-export,jnns/django-import-export,brillgen/django-import-export,jnns/django-import-export,jnns/django-import-export,copperleaftech/django-import-export,PetrDlouhy/django-import-export,bmihelac/django-import-export,pajod/django-import-export,bmihelac/django-import-export,Apkawa/django-import-export,bmihelac/django-import-export,Apkawa/django-import-export,daniell/django-import-export,daniell/django-import-export,copperleaftech/django-import-export,manelclos/django-import-export,brillgen/django-import-export,django-import-export/django-import-export,rhunwicks/django-import-export,pajod/django-import-export,django-import-export/django-import-export,manelclos/django-import-export,rhunwicks/django-import-export,brillgen/django-import-export
|
913d7562a8be59561c2e7309eb51aa63bc013ad3
|
build/setup.py
|
build/setup.py
|
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup
from glob import glob
import os
binary_libs = [os.path.basename(f) for f in glob('jaxlib/*.so*')]
setup(
name='jaxlib',
version='0.1.4',
description='XLA library for JAX',
author='JAX team',
author_email='[email protected]',
packages=['jaxlib'],
install_requires=['numpy>=1.12', 'six', 'protobuf>=3.6.0', 'absl-py'],
url='https://github.com/google/jax',
license='Apache-2.0',
package_data={'jaxlib': binary_libs},
)
|
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup
from glob import glob
import os
binary_libs = [os.path.basename(f) for f in glob('jaxlib/*.so*')]
setup(
name='jaxlib',
version='0.1.4',
description='XLA library for JAX',
author='JAX team',
author_email='[email protected]',
packages=['jaxlib'],
install_requires=['scipy', 'numpy>=1.12', 'six', 'protobuf>=3.6.0', 'absl-py'],
url='https://github.com/google/jax',
license='Apache-2.0',
package_data={'jaxlib': binary_libs},
)
|
Add scipy as a dependency of jaxlib.
|
Add scipy as a dependency of jaxlib.
Jaxlib depends on LAPACK kernels provided by Scipy.
|
Python
|
apache-2.0
|
google/jax,google/jax,tensorflow/probability,google/jax,tensorflow/probability,google/jax
|
a3d655bd311161679bafbcad66f678d412e158f0
|
colour/examples/volume/examples_rgb.py
|
colour/examples/volume/examples_rgb.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Showcases RGB colourspace volume computations.
"""
from __future__ import division, unicode_literals
import colour
from colour.utilities.verbose import message_box
message_box('RGB Colourspace Volume Computations')
message_box('Computing "ProPhoto RGB" RGB colourspace limits.')
limits = colour.RGB_colourspace_limits(colour.PROPHOTO_RGB_COLOURSPACE)
print(limits)
print('\n')
samples = 10e4
message_box(('Computing "ProPhoto RGB" RGB colourspace volume using '
'{0} samples.'.format(samples)))
print(colour.RGB_colourspace_volume_MonteCarlo(
colour.PROPHOTO_RGB_COLOURSPACE,
samples=samples,
limits=limits * 1.1))
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Showcases RGB colourspace volume computations.
"""
from __future__ import division, unicode_literals
import colour
from colour.utilities.verbose import message_box
message_box('RGB Colourspace Volume Computations')
message_box('Computing "ProPhoto RGB" RGB colourspace limits.')
limits = colour.RGB_colourspace_limits(colour.PROPHOTO_RGB_COLOURSPACE)
print(limits)
print('\n')
samples = 10e4
message_box(('Computing "ProPhoto RGB" RGB colourspace volume using '
'{0} samples.'.format(samples)))
print(colour.RGB_colourspace_volume_MonteCarlo(
colour.PROPHOTO_RGB_COLOURSPACE,
samples=samples,
limits=limits * 1.1))
print('\n')
message_box(('Computing "ProPhoto RGB" RGB colourspace coverage of Pointer\'s '
'Gamut using {0} samples.'.format(samples)))
print(colour.RGB_colourspace_pointer_gamut_coverage_MonteCarlo(
colour.PROPHOTO_RGB_COLOURSPACE,
samples=samples))
|
Add "Pointer's Gamut" coverage computation example.
|
Add "Pointer's Gamut" coverage computation example.
|
Python
|
bsd-3-clause
|
colour-science/colour
|
160f29d42086a10bc38d255d8e03a30b1eb01deb
|
medical_prescription_sale/__openerp__.py
|
medical_prescription_sale/__openerp__.py
|
# -*- coding: utf-8 -*-
# © 2016 LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
'name': 'Medical Prescription Sales',
'summary': 'Create Sale Orders from Prescriptions',
'version': '9.0.0.1.0',
'author': "LasLabs, Odoo Community Association (OCA)",
'category': 'Medical',
'depends': [
'sale',
'medical_prescription',
'medical_pharmacy',
'medical_prescription_thread',
],
"website": "https://laslabs.com",
"license": "AGPL-3",
"data": [
'data/ir_sequence.xml',
'data/product_category_data.xml',
'wizards/medical_sale_wizard_view.xml',
'wizards/medical_sale_temp_view.xml',
'views/prescription_order_line_view.xml',
'views/prescription_order_view.xml',
'views/sale_order_view.xml',
'views/medical_physician_view.xml',
'views/medical_patient_view.xml',
],
'demo': [
'demo/medical_medicament_demo.xml',
'demo/medical_medication_demo.xml',
'demo/medical_prescription_order_demo.xml',
'demo/medical_prescription_order_line_demo.xml',
],
'installable': True,
'auto_install': False,
}
|
# -*- coding: utf-8 -*-
# © 2016 LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
'name': 'Medical Prescription Sales',
'summary': 'Create Sale Orders from Prescriptions',
'version': '9.0.0.1.0',
'author': "LasLabs, Odoo Community Association (OCA)",
'category': 'Medical',
'depends': [
'sale',
'stock',
'medical_prescription',
'medical_pharmacy',
'medical_prescription_thread',
],
"website": "https://laslabs.com",
"license": "AGPL-3",
"data": [
'data/ir_sequence.xml',
'data/product_category_data.xml',
'wizards/medical_sale_wizard_view.xml',
'wizards/medical_sale_temp_view.xml',
'views/prescription_order_line_view.xml',
'views/prescription_order_view.xml',
'views/sale_order_view.xml',
'views/medical_physician_view.xml',
'views/medical_patient_view.xml',
],
'demo': [
'demo/medical_medicament_demo.xml',
'demo/medical_medication_demo.xml',
'demo/medical_prescription_order_demo.xml',
'demo/medical_prescription_order_line_demo.xml',
],
'installable': True,
'auto_install': False,
}
|
Add dependency * Add dependency on stock to manifest file. This is needed by some of the demo data in the module, which was not installing due to its absence.
|
[FIX] medical_prescription_sale: Add dependency
* Add dependency on stock to manifest file. This is needed by some of the demo
data in the module, which was not installing due to its absence.
|
Python
|
agpl-3.0
|
laslabs/vertical-medical,laslabs/vertical-medical
|
81b77db1a455a976a5c516decb5fdd141f10bc31
|
Lib/test/test_fork1.py
|
Lib/test/test_fork1.py
|
"""This test checks for correct fork() behavior.
"""
import os
import time
import unittest
from test.fork_wait import ForkWait
from test.test_support import run_unittest, reap_children
try:
os.fork
except AttributeError:
raise unittest.SkipTest, "os.fork not defined -- skipping test_fork1"
class ForkTest(ForkWait):
def wait_impl(self, cpid):
for i in range(10):
# waitpid() shouldn't hang, but some of the buildbots seem to hang
# in the forking tests. This is an attempt to fix the problem.
spid, status = os.waitpid(cpid, os.WNOHANG)
if spid == cpid:
break
time.sleep(1.0)
self.assertEqual(spid, cpid)
self.assertEqual(status, 0, "cause = %d, exit = %d" % (status&0xff, status>>8))
def test_main():
run_unittest(ForkTest)
reap_children()
if __name__ == "__main__":
test_main()
|
"""This test checks for correct fork() behavior.
"""
import os
import time
from test.fork_wait import ForkWait
from test.test_support import run_unittest, reap_children, import_module
import_module('os.fork')
class ForkTest(ForkWait):
def wait_impl(self, cpid):
for i in range(10):
# waitpid() shouldn't hang, but some of the buildbots seem to hang
# in the forking tests. This is an attempt to fix the problem.
spid, status = os.waitpid(cpid, os.WNOHANG)
if spid == cpid:
break
time.sleep(1.0)
self.assertEqual(spid, cpid)
self.assertEqual(status, 0, "cause = %d, exit = %d" % (status&0xff, status>>8))
def test_main():
run_unittest(ForkTest)
reap_children()
if __name__ == "__main__":
test_main()
|
Convert import try/except to use test_support.import_module().
|
Convert import try/except to use test_support.import_module().
|
Python
|
mit
|
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
|
9ea9d111c8b6a20015f9ad6149f690c9e8c0774d
|
tools/tiny-test-fw/Utility/__init__.py
|
tools/tiny-test-fw/Utility/__init__.py
|
from __future__ import print_function
import sys
_COLOR_CODES = {
"white": '\033[0m',
"red": '\033[31m',
"green": '\033[32m',
"orange": '\033[33m',
"blue": '\033[34m',
"purple": '\033[35m',
"W": '\033[0m',
"R": '\033[31m',
"G": '\033[32m',
"O": '\033[33m',
"B": '\033[34m',
"P": '\033[35m'
}
def console_log(data, color="white", end="\n"):
"""
log data to console.
(if not flush console log, Gitlab-CI won't update logs during job execution)
:param data: data content
:param color: color
"""
if color not in _COLOR_CODES:
color = "white"
color_codes = _COLOR_CODES[color]
print(color_codes + data, end=end)
if color not in ["white", "W"]:
# reset color to white for later logs
print(_COLOR_CODES["white"] + "\r")
sys.stdout.flush()
|
from __future__ import print_function
import sys
_COLOR_CODES = {
"white": u'\033[0m',
"red": u'\033[31m',
"green": u'\033[32m',
"orange": u'\033[33m',
"blue": u'\033[34m',
"purple": u'\033[35m',
"W": u'\033[0m',
"R": u'\033[31m',
"G": u'\033[32m',
"O": u'\033[33m',
"B": u'\033[34m',
"P": u'\033[35m'
}
def console_log(data, color="white", end="\n"):
"""
log data to console.
(if not flush console log, Gitlab-CI won't update logs during job execution)
:param data: data content
:param color: color
"""
if color not in _COLOR_CODES:
color = "white"
color_codes = _COLOR_CODES[color]
if type(data) is type(b''):
data = data.decode('utf-8', 'replace')
print(color_codes + data, end=end)
if color not in ["white", "W"]:
# reset color to white for later logs
print(_COLOR_CODES["white"] + u"\r")
sys.stdout.flush()
|
Make Utility.console_log accept Unicode and byte strings as well
|
tools: Make Utility.console_log accept Unicode and byte strings as well
|
Python
|
apache-2.0
|
mashaoze/esp-idf,espressif/esp-idf,armada-ai/esp-idf,www220/esp-idf,www220/esp-idf,mashaoze/esp-idf,www220/esp-idf,www220/esp-idf,mashaoze/esp-idf,espressif/esp-idf,www220/esp-idf,espressif/esp-idf,espressif/esp-idf,armada-ai/esp-idf,mashaoze/esp-idf,armada-ai/esp-idf,armada-ai/esp-idf,mashaoze/esp-idf
|
7baaac652f74ea44817cd48eb1a4b3aa36f94e23
|
armstrong/hatband/sites.py
|
armstrong/hatband/sites.py
|
from django.contrib.admin.sites import AdminSite as DjangoAdminSite
from django.contrib.admin.sites import site as django_site
class AdminSite(DjangoAdminSite):
def get_urls(self):
from django.conf.urls.defaults import patterns, url
return patterns('',
# Custom hatband Views here
) + super(AdminSite, self).get_urls()
site = AdminSite()
site._registry = django_site._registry
|
from django.contrib.admin.sites import AdminSite as DjangoAdminSite
from django.contrib.admin.sites import site as django_site
class HatbandAndDjangoRegistry(object):
def __init__(self, site, default_site=None):
if default_site is None:
default_site = django_site
super(HatbandAndDjangoRegistry, self).__init__()
self._site = site
self._registry = {}
self.dicts = [self._registry, default_site._registry]
def items(self):
for d in self.dicts:
for item in d.items():
yield item
def iteritems(self):
return iter(self.items())
def __contains__(self, k):
for d in self.dicts:
if k in d:
return True
return False
class AdminSite(DjangoAdminSite):
def __init__(self, default_site=None, *args, **kwargs):
if default_site is None:
default_site = django_site
super(AdminSite, self).__init__(*args, **kwargs)
self._registry = HatbandAndDjangoRegistry(self,
default_site=default_site)
def get_urls(self):
from django.conf.urls.defaults import patterns, url
return patterns('',
# Custom hatband Views here
) + super(AdminSite, self).get_urls()
site = AdminSite()
|
Revert "Simplify this code and make sure AdminSite doesn't act like a singleton"
|
Revert "Simplify this code and make sure AdminSite doesn't act like a singleton"
Unfortunately, it's not that simple. Without the runtime merging from
inside hatband.AdminSite, this doesn't seem to pick up everything else.
This reverts commit 122b4e6982fe7a74ee668c1b146c32a61c72ec7b.
|
Python
|
apache-2.0
|
texastribune/armstrong.hatband,armstrong/armstrong.hatband,armstrong/armstrong.hatband,armstrong/armstrong.hatband,texastribune/armstrong.hatband,texastribune/armstrong.hatband
|
812d456599e1540e329a4ddc05a7541b5bfdc149
|
labonneboite/conf/__init__.py
|
labonneboite/conf/__init__.py
|
import imp
import os
from labonneboite.conf.common import settings_common
# Settings
# --------
# Default settings of the application are defined in `labonneboite/conf/common/settings_common.py`.
# A specific environment (staging, production...) can define its custom settings by:
# - creating a specific `settings` file, e.g. `lbb_staging_settings.py`
# - defining an environment variable containing the path to this specific `settings` file
#
# Specific and default settings will be merged, and values found in specific settings will take precedence.
# When no specific settings are found, `labonneboite/conf/local_settings.py` is used.
# Dynamically import LBB_SETTINGS environment variable as the `settings`
# module, or import `local_settings.py` as the `settings` module if it does not
# exist.
settings = settings_common
if settings_common.get_current_env() != settings_common.ENV_TEST:
# Don't override settings in tests
settings_module = os.path.join(os.path.dirname(__file__), 'local_settings.py')
settings_module = os.environ.get('LBB_SETTINGS', settings_module)
settings = imp.load_source('settings', settings_module)
# Iterate over each setting defined in the `settings_common` module and add them to the dynamically
# imported `settings` module if they don't already exist.
for setting in dir(settings_common):
if not hasattr(settings, setting):
setattr(settings, setting, getattr(settings_common, setting))
|
import imp
import os
from labonneboite.conf.common import settings_common
# Settings
# --------
# Default settings of the application are defined in `labonneboite/conf/common/settings_common.py`.
# A specific environment (staging, production...) can define its custom settings by:
# - creating a specific `settings` file, e.g. `lbb_staging_settings.py`
# - defining an environment variable containing the path to this specific `settings` file
#
# Specific and default settings will be merged, and values found in specific settings will take precedence.
# When no specific settings are found, `labonneboite/conf/local_settings.py` is used.
# Dynamically import LBB_SETTINGS environment variable as the `settings`
# module, or import `local_settings.py` as the `settings` module if it does not
# exist.
settings = settings_common
if settings_common.get_current_env() != settings_common.ENV_TEST:
# Don't override settings in tests
settings_module = os.path.join(os.path.dirname(__file__), 'local_settings.py')
settings_module = os.environ.get('LBB_SETTINGS', settings_module)
try:
settings = imp.load_source('settings', settings_module)
except FileNotFoundError:
pass
else:
# Iterate over each setting defined in the `settings_common` module and add them to the dynamically
# imported `settings` module if they don't already exist.
for setting in dir(settings_common):
if not hasattr(settings, setting):
setattr(settings, setting, getattr(settings_common, setting))
|
Fix FileNotFoundError on missing local_settings.py
|
Fix FileNotFoundError on missing local_settings.py
This has been broken for a long time... When running LBB without a
local_settings.py and without an LBB_ENV environment variable, importing
local_settings.py was resulting in a FileNotFoundError.
|
Python
|
agpl-3.0
|
StartupsPoleEmploi/labonneboite,StartupsPoleEmploi/labonneboite,StartupsPoleEmploi/labonneboite,StartupsPoleEmploi/labonneboite
|
1f4ff058d14a32e7e7b9a28daee54a2e8ea1eb02
|
media.py
|
media.py
|
# media.py
class Movie(object):
def __init__(self,
title,
storyline,
poster_image_url,
trailer_youtube_url,
lead_actors,
release_date,
mpaa_rating,
language,
runtime,
production_companies,
trivia
):
self.title = title
self.storyline = storyline
self.poster_image_url = poster_image_url
self.trailer_youtube_url = trailer_youtube_url
self.lead_actors = lead_actors
self.release_date = release_date
self.mpaa_rating = mpaa_rating
self.language = language
self.runtime = runtime
self.production_companies = production_companies
self.trivia = trivia
|
# media.py
class Movie(object):
""" Movie class for creating a movie """
def __init__(self,
title,
storyline,
poster_image_url,
trailer_youtube_url,
lead_actors,
release_date,
mpaa_rating,
language,
runtime,
production_companies,
trivia
):
"""
Args:
title (str): Title of the movie.
storyline (str): Brief storyline of the movie.
poster_image_url (str): URL of the movie's poster image.
trailer_youtube_url (str): Youtube URL of the movie's trailer.
lead_actors (List[str]): A list of the main cast.
release_date (str): Release date of the movie.
mpaa_rating (str): MPAA rating of the movie.
language (str): Language of the movie.
runtime (str): Runtime of the movie.
production_companies (List[str]): A list of the production companies.
trivia (List[str]): A list of some movie trivia.
"""
self.title = title
self.storyline = storyline
self.poster_image_url = poster_image_url
self.trailer_youtube_url = trailer_youtube_url
self.lead_actors = lead_actors
self.release_date = release_date
self.mpaa_rating = mpaa_rating
self.language = language
self.runtime = runtime
self.production_companies = production_companies
self.trivia = trivia
|
Add docstring for class Movie
|
Add docstring for class Movie
|
Python
|
mit
|
vishallama/udacity-fullstack-movie-trailer,vishallama/udacity-fullstack-movie-trailer
|
4dcb0e56627d3b801b5377d77fca721c43090ce2
|
bom_data_parser/axf_parser.py
|
bom_data_parser/axf_parser.py
|
import csv
def read_axf(axf_string):
blocks = {}
state = 'new_block'
for line in axf_string.split('\n'):
if line == '[$]' or line == '':
pass
elif line.startswith('['):
block_key = line.replace('[',"").replace(']',"")
print block_key
else:
if block_key not in blocks:
blocks[block_key] = []
blocks[block_key].append(line)
for k in blocks.keys():
if k == 'data':
is_data = False
data_block = {}
for row in csv.reader(blocks[k]):
if is_data:
data_block[row[1]] = {}
for col_name, col_value in zip(header_row, row):
data_block[row[1]][col_name] = col_value
else:
header_row = row
is_data = True
else:
# probably notice or header ... do something awesome with them
pass
return data_block
def read_axf_file(axf_file):
with open(axf_file, 'r') as f:
return read_axf(f.read())
if __name__ == "__main__":
print read_axf_file('../tests/data/IDV60700.axf')
|
import csv
def read_axf(axf_string):
blocks = {}
state = 'new_block'
for line in axf_string.split('\n'):
if line == '[$]' or line == '':
pass
elif line.startswith('['):
block_key = line.replace('[',"").replace(']',"")
else:
if block_key not in blocks:
blocks[block_key] = []
blocks[block_key].append(line)
for k in blocks.keys():
if k == 'data':
is_data = False
data_block = {}
for row in csv.reader(blocks[k]):
if is_data:
data_block[row[1]] = {}
for col_name, col_value in zip(header_row, row):
data_block[row[1]][col_name] = col_value
else:
header_row = row
is_data = True
else:
# probably notice or header ... do something awesome with them
pass
return data_block
def read_axf_file(axf_file):
with open(axf_file, 'r') as f:
return read_axf(f.read())
if __name__ == "__main__":
print(read_axf_file('../tests/data/IDV60700.axf'))
|
Fix some print statments for Python3 compatibility.
|
Fix some print statments for Python3 compatibility.
|
Python
|
bsd-3-clause
|
amacd31/bom_data_parser,amacd31/bom_data_parser
|
828d03d7a49d65e8584d4bc373ae4d429b291104
|
tests/test_tensorflow_addons.py
|
tests/test_tensorflow_addons.py
|
import unittest
import tensorflow as tf
import tensorflow_addons as tfa
class TestTensorflowAddons(unittest.TestCase):
def test_tfa_image(self):
img_raw = tf.io.read_file('/input/tests/data/dot.png')
img = tf.io.decode_image(img_raw)
img = tf.image.convert_image_dtype(img, tf.float32)
mean = tfa.image.mean_filter2d(img, filter_shape=1)
self.assertEqual(1, len(mean))
|
import unittest
import numpy as np
import tensorflow as tf
import tensorflow_addons as tfa
class TestTensorflowAddons(unittest.TestCase):
def test_tfa_image(self):
img_raw = tf.io.read_file('/input/tests/data/dot.png')
img = tf.io.decode_image(img_raw)
img = tf.image.convert_image_dtype(img, tf.float32)
mean = tfa.image.mean_filter2d(img, filter_shape=1)
self.assertEqual(1, len(mean))
# This test exercises TFA Custom Op. See: b/145555176
def test_gelu(self):
x = tf.constant([[0.5, 1.2, -0.3]])
layer = tfa.layers.GELU()
result = layer(x)
self.assertEqual((1, 3), result.shape)
|
Add a test exercising TFA custom op.
|
Add a test exercising TFA custom op.
To prevent future regression.
BUG=145555176
|
Python
|
apache-2.0
|
Kaggle/docker-python,Kaggle/docker-python
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.