commit
stringlengths 40
40
| subject
stringlengths 4
1.73k
| repos
stringlengths 5
127k
| old_file
stringlengths 2
751
| new_file
stringlengths 2
751
| new_contents
stringlengths 1
8.98k
| old_contents
stringlengths 0
6.59k
| license
stringclasses 13
values | lang
stringclasses 23
values |
---|---|---|---|---|---|---|---|---|
72a07977de03d722dc6b5b5b3768202e6c6ac81f | cover coordinates detection with unit tests | opennode/nodeconductor,opennode/nodeconductor,opennode/nodeconductor | nodeconductor/structure/tests/unittests/test_tasks.py | nodeconductor/structure/tests/unittests/test_tasks.py | # encoding: utf-8
from django.test import TestCase
from nodeconductor.structure.tests import factories
from nodeconductor.structure.tests import models
from nodeconductor.core import utils as core_utils
from nodeconductor.structure import tasks
from mock import patch
class TestTasks(TestCase):
@patch('requests.get')
def test_detect_vm_coordinates_sets_coordinates(self, mock_request_get):
ip_address = "127.0.0.1"
expected_latitude = 20
expected_longitude = 20
instance = factories.TestInstanceFactory(external_ips=ip_address)
mock_request_get.return_value.ok = True
response = {"ip": ip_address, "latitude": expected_latitude, "longitude": expected_longitude}
mock_request_get.return_value.json.return_value = response
tasks.detect_vm_coordinates(core_utils.serialize_instance(instance))
instance_updated = models.TestInstance.objects.get(pk=instance.id)
self.assertIsNotNone(instance_updated.latitude)
self.assertEqual(instance_updated.latitude, expected_latitude)
self.assertIsNotNone(instance_updated.longitude)
self.assertEqual(instance_updated.longitude, expected_longitude)
@patch('requests.get')
def test_detect_vm_coordinates_does_not_set_coordinates_if_response_is_not_ok(self, mock_request_get):
ip_address = "127.0.0.1"
expected_latitude = 20
expected_longitude = 20
instance = factories.TestInstanceFactory(external_ips=ip_address)
mock_request_get.return_value.ok = False
response = {"ip": ip_address, "latitude": expected_latitude, "longitude": expected_longitude}
mock_request_get.return_value.json.return_value = response
tasks.detect_vm_coordinates(core_utils.serialize_instance(instance))
instance_updated = models.TestInstance.objects.get(pk=instance.id)
self.assertIsNone(instance_updated.latitude)
self.assertIsNone(instance_updated.longitude)
| mit | Python |
|
eee6b08e07e60a8ec1f3c2fa2e156344e01737d2 | clean out | ad-m/django-avatar,imgmix/django-avatar,therocode/django-avatar,barbuza/django-avatar,brajeshvit/avatarmodule,jezdez/django-avatar,imgmix/django-avatar,MachineandMagic/django-avatar,jezdez/django-avatar,grantmcconnaughey/django-avatar,grantmcconnaughey/django-avatar,MachineandMagic/django-avatar,dannybrowne86/django-avatar,ad-m/django-avatar,dannybrowne86/django-avatar,tbabej/django-avatar,tbabej/django-avatar,therocode/django-avatar,barbuza/django-avatar,brajeshvit/avatarmodule | avatar/admin.py | avatar/admin.py | from django.contrib import admin
from django.utils.translation import ugettext_lazy as _
from django.utils import six
from django.template.loader import render_to_string
from avatar.models import Avatar
from avatar.signals import avatar_updated
from avatar.util import get_user_model
class AvatarAdmin(admin.ModelAdmin):
list_display = ('get_avatar', 'user', 'primary', "date_uploaded")
list_filter = ('primary',)
search_fields = ('user__%s' % getattr(get_user_model(), 'USERNAME_FIELD', 'username'),)
list_per_page = 50
def get_avatar(self, avatar_in):
context = dict({
'user': avatar_in.user,
'url': avatar_in.avatar.url,
'alt': six.text_type(avatar_in.user),
'size': 80,
})
return render_to_string('avatar/avatar_tag.html', context)
get_avatar.short_description = _('Avatar')
get_avatar.allow_tags = True
def save_model(self, request, obj, form, change):
super(AvatarAdmin, self).save_model(request, obj, form, change)
avatar_updated.send(sender=Avatar, user=request.user, avatar=obj)
admin.site.register(Avatar, AvatarAdmin)
| from django.contrib import admin
from django.utils.translation import ugettext_lazy as _
from django.utils import six
from django.template.loader import render_to_string
from avatar.models import Avatar
from avatar.signals import avatar_updated
from avatar.util import get_user_model
class AvatarAdmin(admin.ModelAdmin):
list_display = ('get_avatar', 'user', 'primary', "date_uploaded")
list_filter = ('primary',)
search_fields = ('user__%s' % getattr(get_user_model(), 'USERNAME_FIELD', 'username'),)
list_per_page = 50
def get_avatar(self, avatar_in):
context = dict({
'user': avatar_in.user,
'url': avatar_in.avatar.url,
'alt': six.text_type(avatar_in.user),
'size': 80,
})
return render_to_string('avatar/avatar_tag.html',context)
get_avatar.short_description = _('Avatar')
get_avatar.allow_tags = True
def save_model(self, request, obj, form, change):
super(AvatarAdmin, self).save_model(request, obj, form, change)
avatar_updated.send(sender=Avatar, user=request.user, avatar=obj)
admin.site.register(Avatar, AvatarAdmin)
| bsd-3-clause | Python |
2d9300aeefc840e007d7c615ce48ad36343038f2 | Add "--optimize-autoloader" to `composer.phar install` command | hperadin/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,testn/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,jamming/FrameworkBenchmarks,actframework/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,valyala/FrameworkBenchmarks,torhve/FrameworkBenchmarks,zloster/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,khellang/FrameworkBenchmarks,zloster/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,torhve/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,torhve/FrameworkBenchmarks,torhve/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,herloct/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,sgml/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,leafo/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,Verber/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,methane/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,grob/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,torhve/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,valyala/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,zapov/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,zapov/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,actframework/FrameworkBenchmarks,torhve/FrameworkBenchmarks,denkab/FrameworkBenchmarks,khellang/FrameworkBenchmarks,zloster/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,denkab/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,herloct/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,methane/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,Verber/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,denkab/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,khellang/FrameworkBenchmarks,torhve/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,doom369/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,zapov/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,denkab/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,denkab/FrameworkBenchmarks,khellang/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,dmacd/FB-try1,khellang/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,jamming/FrameworkBenchmarks,doom369/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,sxend/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,sgml/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,zapov/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,zapov/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,jamming/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,methane/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,doom369/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,sxend/FrameworkBenchmarks,sgml/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,leafo/FrameworkBenchmarks,grob/FrameworkBenchmarks,methane/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,valyala/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,methane/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,khellang/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,khellang/FrameworkBenchmarks,grob/FrameworkBenchmarks,grob/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,actframework/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,sgml/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,sxend/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,testn/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,dmacd/FB-try1,doom369/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,herloct/FrameworkBenchmarks,khellang/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,actframework/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,sgml/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,testn/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,methane/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,testn/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,zapov/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,zloster/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,grob/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,sgml/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,testn/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,zloster/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,valyala/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,sxend/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,sxend/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,jamming/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,sgml/FrameworkBenchmarks,Verber/FrameworkBenchmarks,zapov/FrameworkBenchmarks,torhve/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,grob/FrameworkBenchmarks,denkab/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,zapov/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,khellang/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,sxend/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,zloster/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,testn/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,denkab/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,grob/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,joshk/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,Verber/FrameworkBenchmarks,testn/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,zapov/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,dmacd/FB-try1,sagenschneider/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,joshk/FrameworkBenchmarks,grob/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,testn/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,Verber/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,jamming/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,zloster/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,joshk/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,leafo/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,sxend/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,actframework/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,valyala/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,zloster/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,Verber/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,grob/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,grob/FrameworkBenchmarks,zloster/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,zloster/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,jamming/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,joshk/FrameworkBenchmarks,joshk/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,leafo/FrameworkBenchmarks,leafo/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,Verber/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,herloct/FrameworkBenchmarks,zapov/FrameworkBenchmarks,methane/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,jamming/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,sxend/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,sxend/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,methane/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,dmacd/FB-try1,seem-sky/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,jamming/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,dmacd/FB-try1,jeevatkm/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,actframework/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,zapov/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,joshk/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,dmacd/FB-try1,alubbe/FrameworkBenchmarks,doom369/FrameworkBenchmarks,doom369/FrameworkBenchmarks,torhve/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,zapov/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,sxend/FrameworkBenchmarks,dmacd/FB-try1,saturday06/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,jamming/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,Verber/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,leafo/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,herloct/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,valyala/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,torhve/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,leafo/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,denkab/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,zloster/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,khellang/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,sxend/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,doom369/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,methane/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,methane/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,actframework/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,testn/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,sxend/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,herloct/FrameworkBenchmarks,zloster/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,Verber/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,dmacd/FB-try1,zhuochenKIDD/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,denkab/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,testn/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,dmacd/FB-try1,victorbriz/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,actframework/FrameworkBenchmarks,valyala/FrameworkBenchmarks,herloct/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,sxend/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,doom369/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,leafo/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,sgml/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,sxend/FrameworkBenchmarks,zloster/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,Verber/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,valyala/FrameworkBenchmarks,sxend/FrameworkBenchmarks,methane/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,leafo/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,herloct/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,leafo/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,Verber/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,doom369/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,dmacd/FB-try1,greenlaw110/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,zapov/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,denkab/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,jamming/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,actframework/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,sgml/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,jamming/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,doom369/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,methane/FrameworkBenchmarks,torhve/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,actframework/FrameworkBenchmarks,valyala/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,zloster/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,denkab/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,sxend/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,grob/FrameworkBenchmarks,denkab/FrameworkBenchmarks,joshk/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,dmacd/FB-try1,martin-g/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,jamming/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,herloct/FrameworkBenchmarks,zloster/FrameworkBenchmarks,joshk/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,leafo/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,torhve/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,doom369/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,actframework/FrameworkBenchmarks,methane/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,khellang/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,dmacd/FB-try1,nathana1/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,Verber/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,leafo/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,doom369/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,doom369/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,actframework/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,Verber/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,herloct/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,herloct/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,actframework/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,khellang/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,khellang/FrameworkBenchmarks,jamming/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,herloct/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,denkab/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,valyala/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,joshk/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,sgml/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,doom369/FrameworkBenchmarks,joshk/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,testn/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,grob/FrameworkBenchmarks,sgml/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,joshk/FrameworkBenchmarks,doom369/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,herloct/FrameworkBenchmarks,denkab/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,methane/FrameworkBenchmarks,joshk/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,doom369/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,sgml/FrameworkBenchmarks,julienschmidt/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,zapov/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,testn/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,valyala/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,sxend/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,testn/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,herloct/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,zloster/FrameworkBenchmarks,Verber/FrameworkBenchmarks,zapov/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,joshk/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,doom369/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,grob/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,zloster/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,grob/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,actframework/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,sgml/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,khellang/FrameworkBenchmarks,sgml/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,valyala/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,joshk/FrameworkBenchmarks,herloct/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,testn/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,actframework/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,zapov/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,sxend/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,valyala/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,actframework/FrameworkBenchmarks,valyala/FrameworkBenchmarks,jamming/FrameworkBenchmarks,zloster/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks | php-silex/setup.py | php-silex/setup.py | import subprocess
import sys
import setup_util
from os.path import expanduser
home = expanduser("~")
def start(args):
setup_util.replace_text("php-silex/web/index.php", "192.168.100.102", "" + args.database_host + "")
setup_util.replace_text("php-silex/deploy/php-silex", "\".*\/FrameworkBenchmarks", "\"" + home + "/FrameworkBenchmarks")
setup_util.replace_text("php-silex/deploy/php-silex", "Directory .*\/FrameworkBenchmarks", "Directory " + home + "/FrameworkBenchmarks")
setup_util.replace_text("php-silex/deploy/nginx.conf", "root .*\/FrameworkBenchmarks", "root " + home + "/FrameworkBenchmarks")
try:
#subprocess.check_call("sudo cp cake/deploy/cake /etc/apache2/sites-available/", shell=True)
#subprocess.check_call("sudo a2ensite cake", shell=True)
#subprocess.check_call("sudo chown -R www-data:www-data cake", shell=True)
#subprocess.check_call("sudo /etc/init.d/apache2 start", shell=True)
subprocess.check_call("composer.phar install --optimize-autoloader", shell=True, cwd="php-silex")
subprocess.check_call("sudo php-fpm --fpm-config config/php-fpm.conf -g " + home + "/FrameworkBenchmarks/php-silex/deploy/php-fpm.pid", shell=True)
subprocess.check_call("sudo /usr/local/nginx/sbin/nginx -c " + home + "/FrameworkBenchmarks/php-silex/deploy/nginx.conf", shell=True)
return 0
except subprocess.CalledProcessError:
return 1
def stop():
try:
subprocess.call("sudo /usr/local/nginx/sbin/nginx -s stop", shell=True)
subprocess.call("sudo kill -QUIT $( cat php-silex/deploy/php-fpm.pid )", shell=True)
#subprocess.check_call("sudo a2dissite cake", shell=True)
#subprocess.check_call("sudo /etc/init.d/apache2 stop", shell=True)
#subprocess.check_call("sudo chown -R $USER:$USER cake", shell=True)
return 0
except subprocess.CalledProcessError:
return 1
|
import subprocess
import sys
import setup_util
from os.path import expanduser
home = expanduser("~")
def start(args):
setup_util.replace_text("php-silex/web/index.php", "192.168.100.102", "" + args.database_host + "")
setup_util.replace_text("php-silex/deploy/php-silex", "\".*\/FrameworkBenchmarks", "\"" + home + "/FrameworkBenchmarks")
setup_util.replace_text("php-silex/deploy/php-silex", "Directory .*\/FrameworkBenchmarks", "Directory " + home + "/FrameworkBenchmarks")
setup_util.replace_text("php-silex/deploy/nginx.conf", "root .*\/FrameworkBenchmarks", "root " + home + "/FrameworkBenchmarks")
try:
#subprocess.check_call("sudo cp cake/deploy/cake /etc/apache2/sites-available/", shell=True)
#subprocess.check_call("sudo a2ensite cake", shell=True)
#subprocess.check_call("sudo chown -R www-data:www-data cake", shell=True)
#subprocess.check_call("sudo /etc/init.d/apache2 start", shell=True)
subprocess.check_call("composer.phar install", shell=True, cwd="php-silex")
subprocess.check_call("sudo php-fpm --fpm-config config/php-fpm.conf -g " + home + "/FrameworkBenchmarks/php-silex/deploy/php-fpm.pid", shell=True)
subprocess.check_call("sudo /usr/local/nginx/sbin/nginx -c " + home + "/FrameworkBenchmarks/php-silex/deploy/nginx.conf", shell=True)
return 0
except subprocess.CalledProcessError:
return 1
def stop():
try:
subprocess.call("sudo /usr/local/nginx/sbin/nginx -s stop", shell=True)
subprocess.call("sudo kill -QUIT $( cat php-silex/deploy/php-fpm.pid )", shell=True)
#subprocess.check_call("sudo a2dissite cake", shell=True)
#subprocess.check_call("sudo /etc/init.d/apache2 stop", shell=True)
#subprocess.check_call("sudo chown -R $USER:$USER cake", shell=True)
return 0
except subprocess.CalledProcessError:
return 1
| bsd-3-clause | Python |
26011563bf0880206269582a87f9fff61f262c83 | add a new migration | Johnetordoff/osf.io,icereval/osf.io,TomBaxter/osf.io,CenterForOpenScience/osf.io,CenterForOpenScience/osf.io,binoculars/osf.io,brianjgeiger/osf.io,laurenrevere/osf.io,icereval/osf.io,baylee-d/osf.io,mfraezz/osf.io,cslzchen/osf.io,sloria/osf.io,crcresearch/osf.io,crcresearch/osf.io,leb2dg/osf.io,felliott/osf.io,Johnetordoff/osf.io,laurenrevere/osf.io,adlius/osf.io,leb2dg/osf.io,cslzchen/osf.io,TomBaxter/osf.io,saradbowman/osf.io,HalcyonChimera/osf.io,adlius/osf.io,crcresearch/osf.io,leb2dg/osf.io,Johnetordoff/osf.io,adlius/osf.io,caseyrollins/osf.io,aaxelb/osf.io,saradbowman/osf.io,brianjgeiger/osf.io,binoculars/osf.io,felliott/osf.io,CenterForOpenScience/osf.io,mattclark/osf.io,TomBaxter/osf.io,aaxelb/osf.io,brianjgeiger/osf.io,caseyrollins/osf.io,CenterForOpenScience/osf.io,Johnetordoff/osf.io,cslzchen/osf.io,chennan47/osf.io,felliott/osf.io,icereval/osf.io,HalcyonChimera/osf.io,aaxelb/osf.io,adlius/osf.io,aaxelb/osf.io,mfraezz/osf.io,pattisdr/osf.io,erinspace/osf.io,binoculars/osf.io,mfraezz/osf.io,cslzchen/osf.io,baylee-d/osf.io,leb2dg/osf.io,mfraezz/osf.io,pattisdr/osf.io,HalcyonChimera/osf.io,sloria/osf.io,caseyrollins/osf.io,erinspace/osf.io,laurenrevere/osf.io,pattisdr/osf.io,mattclark/osf.io,HalcyonChimera/osf.io,chennan47/osf.io,baylee-d/osf.io,brianjgeiger/osf.io,sloria/osf.io,chennan47/osf.io,erinspace/osf.io,mattclark/osf.io,felliott/osf.io | osf/migrations/0056_citationstyle_has_bibliography.py | osf/migrations/0056_citationstyle_has_bibliography.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-08-29 14:25
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('osf', '0055_auto_20170823_1648'),
]
operations = [
migrations.AddField(
model_name='citationstyle',
name='has_bibliography',
field=models.BooleanField(default=False),
),
]
| apache-2.0 | Python |
|
d637cbe9c904fb0f0b67fbc10f66db299d153f4e | Add basic smoke tests for doc generation | boto/boto3 | tests/functional/test_docs.py | tests/functional/test_docs.py | # Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from tests import unittest
import botocore.session
from boto3.docs import docs_for
class TestDocs(unittest.TestCase):
def setUp(self):
self.session = botocore.session.get_session()
def test_resource_docs_generated(self):
docs_str = docs_for('s3', self.session)
self.assertIn('Service Resource', docs_str)
self.assertIn('A resource representing Amazon Simple Storage Service',
docs_str)
def test_client_docs_generated(self):
docs_str = docs_for('s3', self.session)
self.assertIn('s3.Client', docs_str)
self.assertIn(
'A low-level client representing Amazon Simple Storage Service',
docs_str)
def test_waiter_docs_generated(self):
docs_str = docs_for('s3', self.session)
self.assertIn('Waiter', docs_str)
self.assertIn('bucket_exists', docs_str)
| apache-2.0 | Python |
|
8fb94de6b72847bdb618ffa60fa037d16bab443e | Add closing tests module | danforthcenter/plantcv,danforthcenter/plantcv,danforthcenter/plantcv | tests/plantcv/test_closing.py | tests/plantcv/test_closing.py | import pytest
import cv2
import numpy as np
from plantcv.plantcv import closing
def test_closing(test_data):
# Read in test data
bin_img = cv2.imread(test_data.small_bin_img, -1)
filtered_img = closing(gray_img=bin_img)
# Assert that the output image has the dimensions of the input image and is binary
assert bin_img.shape == filtered_img.shape and np.array_equal(np.unique(filtered_img), np.array([0, 255]))
def test_closing_grayscale(test_data):
# Read in test data
gray_img = cv2.imread(test_data.small_gray_img, -1)
filtered_img = closing(gray_img=gray_img, kernel=np.ones((4, 4), np.uint8))
assert np.sum(filtered_img) == 33160632
def test_closing_bad_input(test_data):
# Read in test data
rgb_img = cv2.imread(test_data.small_rgb_img)
with pytest.raises(RuntimeError):
_ = closing(gray_img=rgb_img)
| mit | Python |
|
548f4f6512ced9a9c41a074a3c8382f87ccafa66 | add image resizing to python script and give it a better name | dagar/xkcd-1110,dagar/xkcd-1110 | xkcd1110_stitch.py | xkcd1110_stitch.py | #! /usr/bin/env python
"""Combines resized tiles grabbed from xkcd 1110 into one large png"""
import Image
import os
import sys
def coord(image_x, image_y, tilesize):
'''
converts x, y coordinates to tile naming format
'''
image_dir = "images/"
if image_x > 0:
#east
lng = "%se" % (image_x)
else:
#west
lng = "%sw" % (-image_x+1)
if image_y > 0:
#north
lat = "%sn" % (image_y)
else:
#south
lat = "%ss" % (-image_y+1)
return_file = image_dir + lat + lng + ".png"
if os.path.isfile(return_file):
return return_file
else:
# insert black or white tiles in the empty spots
if image_y > 0:
return image_dir + "white.png"
else:
return image_dir + "black.png"
def merge_images(xmin, xmax, ymin, ymax, tilesize) :
'''
combines tiles into one large image
'''
out = Image.new('RGB', ((xmax-xmin+1) * tilesize, (ymax-ymin+1) * tilesize))
imx = 0
for image_x in range(xmin, xmax+1) :
imy = 0
for image_y in range(ymin, ymax+1) :
#print image_x, image_y, "->",
#print coord(image_x, -image_y, tilesize), "->", imx, imy
tile = Image.open(coord(image_x, -image_y, tilesize))
resized_tile = tile.resize((tilesize, tilesize))
out.paste(tile, (imx, imy))
imy += tilesize
imx += tilesize
out.save("xkcd_1110_combined_%s.png" % (tilesize))
try:
input_arg = int(sys.argv[1])
if 0 < input_arg <= 2048:
merge_images(-32, 48, -13, 18, input_arg)
except ValueError:
sys.exit(-1)
| mit | Python |
|
c32d62eb82bbe6d728d66d7544c45dfb296afcd4 | Implement regression test runner in Python | bitsuperlab/cpp-play,frrp/bitshares,camponez/bitshares,frrp/bitshares,RemitaBit/Remitabit,FollowMyVote/bitshares,bitshares/bitshares-0.x,camponez/bitshares,jakeporter/Bitshares,dacsunlimited/dac_play,FollowMyVote/bitshares,FollowMyVote/bitshares,bitshares/bitshares,frrp/bitshares,RemitaBit/Remitabit,dacsunlimited/dac_play,FollowMyVote/bitshares,jakeporter/Bitshares,bitshares/bitshares-0.x,jakeporter/Bitshares,camponez/bitshares,bitshares/devshares,bitshares/devshares,RemitaBit/Remitabit,bitshares/devshares,bitshares/bitshares,bitshares/bitshares,camponez/bitshares,jakeporter/Bitshares,bitsuperlab/cpp-play,FollowMyVote/bitshares,frrp/bitshares,jakeporter/Bitshares,bitshares/devshares,RemitaBit/Remitabit,bitshares/bitshares-0.x,jakeporter/Bitshares,camponez/bitshares,dacsunlimited/dac_play,RemitaBit/Remitabit,dacsunlimited/dac_play,bitshares/devshares,dacsunlimited/dac_play,bitshares/bitshares-0.x,bitshares/bitshares,RemitaBit/Remitabit,bitsuperlab/cpp-play,bitshares/bitshares,bitsuperlab/cpp-play,dacsunlimited/dac_play,frrp/bitshares,bitsuperlab/cpp-play,bitshares/bitshares-0.x,camponez/bitshares,frrp/bitshares,bitshares/bitshares,bitshares/bitshares-0.x,bitsuperlab/cpp-play,FollowMyVote/bitshares,bitshares/devshares | tests/run_regression_tests.py | tests/run_regression_tests.py | #!/usr/bin/env python3
# this file is an experiment to run regression tests in parallel
# using Python for cross platform to run with the same script on all platforms
# (currently .sh for Linux, .bat for Windows, ??? for Mac)
import os
import queue
import subprocess
import threading
class TestRunner(object):
def __init__(self):
self.basedir = "regression_tests"
self.q_test = queue.Queue()
self.q_test_result = queue.Queue()
self.test_timeout = 5*60
return
def list_tests(self):
test_list = []
print(os.listdir(self.basedir))
for regression_test in os.listdir(self.basedir):
if regression_test.startswith("_"):
print("failed _")
continue
if regression_test.startswith("."):
print("failed .")
continue
if not os.path.isdir(os.path.join(self.basedir, regression_test)):
print("failed isdir")
continue
if not os.path.exists(os.path.join(self.basedir, regression_test, "test.config")):
print("failed exists test.config")
continue
if os.path.exists(os.path.join(self.basedir, regression_test, "wip")):
print("failed exists wip")
continue
test_list.append(regression_test)
print("here is tests:", test_list)
return test_list
def run_regression_test(self, name):
result = subprocess.call(["./wallet_tests", "-t", "regression_tests_without_network/"+name],
stdin=subprocess.DEVNULL, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL,
timeout=self.test_timeout,
)
return result
def run_worker_thread(self):
while True:
try:
test_name = self.q_test.get_nowait()
except queue.Empty:
break
try:
result = self.run_regression_test(test_name)
except subprocess.TimeoutExpired:
result = "TIMEOUT"
self.q_test_result.put((test_name, result))
return
def run_parallel_regression_tests(self, test_list, parallel_jobs=8):
worker_threads = []
for test_name in test_list:
print("adding test:", test_name)
self.q_test.put(test_name)
for n in range(parallel_jobs):
t = threading.Thread(target=self.run_worker_thread, name="test-worker-"+str(n))
worker_threads.append(t)
t.start()
# block and show results
while True:
# are there any results?
while True:
try:
test_name, test_result = self.q_test_result.get(block=False)
except queue.Empty:
break
if test_result == 0:
condition = "passed"
elif test_result == "TIMEOUT":
condition = "timed out"
else:
condition = "failed"
print("test "+test_name+" "+condition+" (rc="+repr(test_result)+")")
for t in worker_threads:
if t.is_alive():
t.join(0.050)
break
else:
# no threads were alive
break
return
if __name__ == "__main__":
runner = TestRunner()
runner.run_parallel_regression_tests(runner.list_tests())
| unlicense | Python |
|
c226835aa56a2d5ba8583e63c4b75765cd24711d | add new package (#27971) | LLNL/spack,LLNL/spack,LLNL/spack,LLNL/spack,LLNL/spack | var/spack/repos/builtin/packages/py-zipfile-deflate64/package.py | var/spack/repos/builtin/packages/py-zipfile-deflate64/package.py | # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyZipfileDeflate64(PythonPackage):
"""Extract Deflate64 ZIP archives with Python's zipfile API."""
homepage = "https://github.com/brianhelba/zipfile-deflate64"
pypi = "zipfile-deflate64/zipfile-deflate64-0.2.0.tar.gz"
version('0.2.0', sha256='875a3299de102edf1c17f8cafcc528b1ca80b62dc4814b9cb56867ec59fbfd18')
depends_on('[email protected]:', type=('build', 'run'))
depends_on('py-setuptools@42:', type='build')
depends_on('[email protected]:+toml', type='build')
| lgpl-2.1 | Python |
|
8c6646d75ec6f9345e1582c02611984a1d953582 | add reproducing case | pabigot/pyxb,pabigot/pyxb | tests/trac/test-issue-0092.py | tests/trac/test-issue-0092.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import logging
import pyxb.binding.generate
import pyxb.utils.domutils
import xml.dom.minidom as dom
if __name__ == '__main__':
logging.basicConfig()
_log = logging.getLogger(__name__)
xsd = '''<?xml version="1.0" encoding="UTF-8"?>
<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema">
<xs:element name="HOST">
<xs:complexType>
<xs:sequence>
<xs:element name="ID" type="xs:integer"/>
<xs:element name="TEMPLATE" type="xs:anyType"/>
</xs:sequence>
</xs:complexType>
</xs:element>
</xs:schema>
'''
code = pyxb.binding.generate.GeneratePython(schema_text=xsd)
rv = compile(code, 'test', 'exec')
eval(rv)
import unittest
class TestIssue0092 (unittest.TestCase):
def testCreateEmptyTemplate (self):
xmlt = '<HOST><ID>1</ID><TEMPLATE/></HOST>';
xmld = xmlt.encode('utf-8');
doc = CreateFromDocument(xmld);
self.assertEqual(doc.ID,1)
def testCreateToDom (self):
xmlt = '<HOST><ID>1</ID><TEMPLATE><NODE>1</NODE></TEMPLATE></HOST>';
xmld = xmlt.encode('utf-8');
doc = CreateFromDocument(xmld);
templateFragment=doc.TEMPLATE.toDOM()
self.assertEqual(templateFragment.toxml(), '''<?xml version="1.0" ?><TEMPLATE><NODE>1</NODE></TEMPLATE>''')
def testCreateWithCDATAToDom (self):
xmlt = '<HOST><ID>1</ID><TEMPLATE><NODE><![CDATA[text]]></NODE></TEMPLATE></HOST>';
xmld = xmlt.encode('utf-8');
doc = CreateFromDocument(xmld);
templateFragment=doc.TEMPLATE.toDOM()
self.assertEqual(templateFragment.toxml(), '''<?xml version="1.0" ?><TEMPLATE><NODE>text</NODE></TEMPLATE>''')
def testCreateFromDOMWithCDATAToDom (self):
xmlt = '<HOST><ID>1</ID><TEMPLATE><NODE><![CDATA[text]]></NODE></TEMPLATE></HOST>';
xmld = xmlt.encode('utf-8');
domDoc=dom.parseString(xmld);
doc = CreateFromDOM(domDoc);
templateFragment=doc.TEMPLATE.toDOM()
self.assertEqual(templateFragment.toxml(), '''<?xml version="1.0" ?><TEMPLATE><NODE>text</NODE></TEMPLATE>''')
if __name__ == '__main__':
unittest.main()
| apache-2.0 | Python |
|
86d8f0fd48ccb577a8300362ea9d181e63d2fa5d | Add unit tests for bandit.core.issue | stackforge/bandit,chair6/bandit,pombredanne/bandit,pombredanne/bandit,stackforge/bandit | tests/unit/core/test_issue.py | tests/unit/core/test_issue.py | # -*- coding:utf-8 -*-
#
# Copyright 2015 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import testtools
import bandit
from bandit.core import issue
class IssueTests(testtools.TestCase):
def test_issue_create(self):
new_issue = _get_issue_instance()
self.assertIsInstance(new_issue, issue.Issue)
def test_issue_str(self):
test_issue = _get_issue_instance()
self.assertEqual(
("Issue: 'Test issue' from bandit_plugin: Severity: MEDIUM "
"Confidence: MEDIUM at code.py:1"),
str(test_issue)
)
def test_issue_as_dict(self):
test_issue = _get_issue_instance()
test_issue_dict = test_issue.as_dict(with_code=False)
self.assertIsInstance(test_issue_dict, dict)
for attr in [
'filename', 'test_name', 'issue_severity', 'issue_confidence',
'issue_text', 'line_number', 'line_range'
]:
self.assertIn(attr, test_issue_dict)
def test_issue_filter(self):
test_issue = _get_issue_instance()
result = test_issue.filter(bandit.HIGH, bandit.HIGH)
self.assertFalse(result)
result = test_issue.filter(bandit.MEDIUM, bandit.MEDIUM)
self.assertTrue(result)
result = test_issue.filter(bandit.LOW, bandit.LOW)
self.assertTrue(result)
result = test_issue.filter(bandit.LOW, bandit.HIGH)
self.assertFalse(result)
result = test_issue.filter(bandit.HIGH, bandit.LOW)
self.assertFalse(result)
def _get_issue_instance():
new_issue = issue.Issue(bandit.MEDIUM, bandit.MEDIUM, 'Test issue')
new_issue.fname = 'code.py'
new_issue.test = 'bandit_plugin'
new_issue.lineno = 1
return new_issue
| apache-2.0 | Python |
|
3d935fcc7d2d2afb004348a8839f2ec7813fe78c | Add unbound performance plugin | kadashu/satori,kadashu/satori,kadashu/satori,kadashu/satori,kadashu/satori,kadashu/satori | satori-rules/plugin/unbound/30_unbound.py | satori-rules/plugin/unbound/30_unbound.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
# -- prioritized --
import sys
import os.path
# sys.path.append(os.path.join(os.path.dirname(__file__), '../libs'))
# -- stdlib --
import json
import re
import socket
import subprocess
import time
# -- third party --
# -- own --
# -- code --
endpoint = socket.gethostname()
ts = int(time.time())
proc = subprocess.Popen(['/usr/sbin/unbound-control', 'stats'], stdout=subprocess.PIPE)
stats = {
match[0]: float(match[1])
for match in re.findall(r'(.*)\=(.*)', proc.stdout.read(), re.MULTILINE)
}
rst = {
'uptime': stats['time.up'],
'queries.total': stats['total.num.queries'],
'queries.pending': stats['total.requestlist.current.all'],
'queries.hit_rate': (stats['total.num.cachehits'] / stats['total.num.queries']) * 100,
}
print json.dumps([
{
"metric": "unbound.{}".format(k),
"endpoint": endpoint,
"timestamp": ts,
"step": 30,
"value": int(v),
"tags": {"server": endpoint},
}
for k, v in rst.items()
])
| apache-2.0 | Python |
|
a0714c8754c769c4fee868f2b449d9dc69d144a9 | Add Welcome plugin to welcome new members | itsmartin/nimbus,Brottweiler/nimbus,bcbwilla/nimbus,Plastix/nimbus | plugins/welcome.py | plugins/welcome.py | import json
from plugin import Plugin
class Welcome(Plugin):
"""
Welcomes new members when they join the Slack team
"""
def __init__(self):
Plugin.__init__(self)
self.event_type = 'team_join'
def on_event(self, bot, event, response):
# Get list of all channels (don't include archived channels)
channel_response = bot.sc.api_call('channels.list', **{'exclude_archived': 1})
# Convert string response to JSON
channel_response = json.loads(channel_response)
# Find general channel
general_channel = None
if channel_response.get('ok'):
for channel in channel_response['channels']:
if channel.get('is_general'):
general_channel = channel['id']
# Post welcome to general channel if one found
if general_channel:
user = event['user']['id']
response['channel'] = general_channel
response['link_names'] = 1 # Enables linking of names
response['text'] = 'Welcome to the Slack team <@%s>!' % user
bot.sc.api_call('chat.postMessage', **response)
| mit | Python |
|
3ce048f8c0346c30173b52a691bd18ece1cbc13d | Add a TensorFlow Probability sample | zettsu-t/cPlusPlusFriend,zettsu-t/cPlusPlusFriend,zettsu-t/cPlusPlusFriend,zettsu-t/cPlusPlusFriend,zettsu-t/cPlusPlusFriend,zettsu-t/cPlusPlusFriend,zettsu-t/cPlusPlusFriend | scripts/stock_price/tough_question_tfp.py | scripts/stock_price/tough_question_tfp.py | #!/usr/bin/python3
# coding: utf-8
'''
Implementation of the article below with TensorFlow Probability
'Bayesian Methods for Hackers'
https://github.com/CamDavidsonPilon/Probabilistic-Programming-and-Bayesian-Methods-for-Hackers/blob/master/Chapter2_MorePyMC/Ch2_MorePyMC_PyMC3.ipynb
Based on an example of TensorFlow Probability
https://github.com/tensorflow/probability/tree/master/tensorflow_probability/python/edward2
https://www.hellocybernetics.tech/entry/2018/11/09/231817
'''
import matplotlib.pyplot as plt
import numpy as np
import tensorflow as tf
import tensorflow_probability as tfp
## from tensorflow_probability import edward2 as ed
tfd = tfp.distributions
N = 1000
X = 300
N_RESULTS = 2000
N_BURNIN = 1000
## Explanatory variable(s)
true_prob = tf.random_uniform([], minval=0.0, maxval=1.0)
## Observed data
observations = tf.random.shuffle(tf.concat([tf.ones(X, dtype=tf.int32), tf.zeros(N-X, dtype=tf.int32)], 0))
def target_log_prob_fn(true_prob):
log_prob_parts = [
tfd.Bernoulli(probs=0.5).log_prob(tf.fill([N], 1)) + tfd.Bernoulli(probs=true_prob).log_prob(observations),
tfd.Bernoulli(probs=0.5).log_prob(tf.fill([N], 0)) + tfd.Bernoulli(probs=0.5).log_prob(observations)
]
sum_log_prob = tf.reduce_sum(tf.reduce_logsumexp(log_prob_parts, 0))
return sum_log_prob
hmc_kernel = tfp.mcmc.HamiltonianMonteCarlo(
target_log_prob_fn=target_log_prob_fn,
step_size=0.01,
num_leapfrog_steps=5)
states, kernels_results = tfp.mcmc.sample_chain(
num_results=N_RESULTS,
current_state=[true_prob],
kernel=hmc_kernel,
num_burnin_steps=N_BURNIN)
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
states_, results_ = sess.run([states, kernels_results])
plt.hist(states_[0], bins=50)
plt.show()
| mit | Python |
|
e42c115f8a612b3995e30b3606913acb7e7b0f63 | Create 2-off.py | CamJam-EduKit/EduKit1 | Code/2-off.py | Code/2-off.py | import RPi.GPIO as GPIO
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
GPIO.setup(18,GPIO.OUT)
GPIO.setup(23,GPIO.OUT)
GPIO.setup(24,GPIO.OUT)
print "Lights off"
GPIO.output(18,GPIO.LOW)
GPIO.output(23,GPIO.LOW)
GPIO.output(24,GPIO.LOW)
GPIO.cleanup()
| mit | Python |
|
a2214039defb1094d47b7ce0abc4e56032136508 | Add merge migration | Johnetordoff/osf.io,cslzchen/osf.io,cslzchen/osf.io,Johnetordoff/osf.io,pattisdr/osf.io,CenterForOpenScience/osf.io,pattisdr/osf.io,adlius/osf.io,mattclark/osf.io,felliott/osf.io,saradbowman/osf.io,brianjgeiger/osf.io,felliott/osf.io,aaxelb/osf.io,baylee-d/osf.io,brianjgeiger/osf.io,CenterForOpenScience/osf.io,caseyrollins/osf.io,baylee-d/osf.io,aaxelb/osf.io,mfraezz/osf.io,HalcyonChimera/osf.io,saradbowman/osf.io,felliott/osf.io,mattclark/osf.io,cslzchen/osf.io,adlius/osf.io,HalcyonChimera/osf.io,HalcyonChimera/osf.io,baylee-d/osf.io,adlius/osf.io,mfraezz/osf.io,brianjgeiger/osf.io,brianjgeiger/osf.io,adlius/osf.io,mfraezz/osf.io,cslzchen/osf.io,caseyrollins/osf.io,aaxelb/osf.io,mfraezz/osf.io,pattisdr/osf.io,aaxelb/osf.io,Johnetordoff/osf.io,mattclark/osf.io,CenterForOpenScience/osf.io,felliott/osf.io,CenterForOpenScience/osf.io,HalcyonChimera/osf.io,caseyrollins/osf.io,Johnetordoff/osf.io | osf/migrations/0137_merge_20181011_1525.py | osf/migrations/0137_merge_20181011_1525.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.13 on 2018-10-11 15:25
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('osf', '0136_merge_20181010_2242'),
('osf', '0136_add_ember_auth_register_waffle_flag'),
]
operations = [
]
| apache-2.0 | Python |
|
1a3b9eec2a947a8f036fdea80a4d7de4b7549211 | Add prime functions library | dimkarakostas/project-euler | prime_functions.py | prime_functions.py | import numpy as np
from math import sqrt
def get_primes_below(n):
# http://stackoverflow.com/questions/2068372/fastest-way-to-list-all-primes-below-n-in-python/3035188#3035188
""" Input n>=6, Returns a array of primes, 2 <= p < n """
sieve = np.ones(n/3 + (n % 6 == 2), dtype=np.bool)
sieve[0] = False
for i in xrange(int(n**0.5)/3+1):
if sieve[i]:
k = 3*i+1 | 1
sieve[((k*k)/3)::2*k] = False
sieve[(k*k+4*k-2*k*(i & 1))/3::2*k] = False
return np.r_[2, 3, ((3*np.nonzero(sieve)[0]+1) | 1)]
def is_prime(n):
if n == 2:
return True
if n % 2 == 0 or n <= 1:
return False
for i in range(3, int(sqrt(n))+1, 2):
if n % i == 0:
return False
return True
| mit | Python |
|
3f01678bdcdf62d1e6e7189db6ed1337f57bdfd8 | Create rename_add_prefix.py | RedRogueXIII/MetasequoiaScripts | rename_add_prefix.py | rename_add_prefix.py | def applyPrefix( prefix, name):
test = name.find(prefix)
if test == -1 or test != 0:
name = prefix + name
return name
class renamePrefixDialog(MQWidget.Dialog):
def __init__(self, parent):
MQWidget.Dialog.__init__(self, parent)
self.title = "Add Prefix to Selected Objects"
self.frame0 = self.createHorizontalFrame(self)
self.label = MQWidget.Label(self.frame0)
self.label.text = "Prefix:"
self.pText = MQWidget.Edit(self.frame0)
self.pText.text = ""
self.plabel = MQWidget.Label(self.frame0)
self.plabel.text = "Recursive:"
self.rCheck = MQWidget.CheckBox(self.frame0)
self.rCheck.checked = 1
self.frame1 = self.createHorizontalFrame(self)
self.frame1.uniformSize = True
self.okbtn = MQWidget.Button(self.frame1)
self.okbtn.text = MQSystem.getResourceString("OK")
self.okbtn.modalResult = "ok"
self.okbtn.default = 1
self.okbtn.fillBeforeRate = 1
self.cancelbtn = MQWidget.Button(self.frame1)
self.cancelbtn.text = MQSystem.getResourceString("Cancel")
self.cancelbtn.modalResult = "cancel"
self.cancelbtn.default = 1
self.cancelbtn.fillAfterRate = 1
# for all objects that are selected , rename them with prefix.
# if recursive is on, rename their children objects too.
dlg = renamePrefixDialog(MQWidget.getMainWindow())
if dlg.execute() == "ok":
recursiveApply = dlg.rCheck.checked
prefix = dlg.pText.text
if prefix[len(prefix) - 1] != '_':
prefix += '_'
doc = MQSystem.getDocument()
for i in range(0, len(doc.object)):
if doc.object[i] is None: continue
if doc.object[i].select == 1:
doc.object[i].name = applyPrefix(prefix, doc.object[i].name)
if recursiveApply == 1:
for k in range(i, len(doc.object)):
if doc.object[k] is None: continue
if doc.isAncestorObject( doc.object[i], doc.object[k]):
doc.object[k].name = applyPrefix(prefix, doc.object[k].name)
'''
currentObj.name = applyPrefix(prefix, currentObj.name)
if recursiveApply == 1:
chd = doc.getChildObjectCount(doc.object[i])
for j in range(0, chd):
childObj = doc.getChildObject( doc.object[i] , j )
childObj.name = applyPrefix(prefix, childObj.name)
'''
| mit | Python |
|
451a65d6b5cbc182418f00703e2da84b7c346a70 | Create Dictionary._((glas+python?glaskrypt)) | bhdz/glaslang,bhdz/glaslang | Dictionary.py | Dictionary.py | #!/usr/bin/env python
#
# Basic hack
#
# What?Needed??
# Bison like parser for js
# _WHY? Because I forget things easily
class Dict(dict)
def __init__(self, keyd, *arguments,**context):
self._context = context
# Elaborate on that
class Elaboration(Dict)
pass
# To bind them together... I have no idea what the hell I am doing, here
class Dictionary(Elaboration):
def __init__(self, *args, **kw):
pass
#
# Read the input(STDIN), and translate it on the output(STDOUT)
#
class py(object):
class code(dict):
pass
#
#... Interface !translate
#
class Rune(py.code, dict, Dictionary):
pass
def translate(outputs, *runes):
rune = next(runes)
output.append(
translate(next(outputs), rune)
)
# Dictionary: `py; ~ translate: ``glas.cup
| bsd-3-clause | Python |
|
0ebddf569f291ceca050972fe9cfd3d9e498e87c | add timeout decorator | pyannote/pyannote-audio,pyannote/pyannote-audio,pyannote/pyannote-audio | pyannote/audio/utils/timeout.py | pyannote/audio/utils/timeout.py | #!/usr/bin/env python
# encoding: utf-8
# Shamelessly stolen from
# https://gist.github.com/TySkby/143190ad1b88c6115597c45f996b030c
"""Easily put time restrictions on things
Note: Requires Python 3.x
Usage as a context manager:
```
with timeout(10):
something_that_should_not_exceed_ten_seconds()
```
Usage as a decorator:
```
@timeout(10)
def something_that_should_not_exceed_ten_seconds():
do_stuff_with_a_timeout()
```
Handle timeouts:
```
try:
with timeout(10):
something_that_should_not_exceed_ten_seconds()
except TimeoutError:
log('Got a timeout, couldn't finish')
```
Suppress TimeoutError and just die after expiration:
```
with timeout(10, suppress_timeout_errors=True):
something_that_should_not_exceed_ten_seconds()
print('Maybe exceeded 10 seconds, but finished either way')
```
"""
import contextlib
import errno
import os
import signal
DEFAULT_TIMEOUT_MESSAGE = os.strerror(errno.ETIME)
class timeout(contextlib.ContextDecorator):
def __init__(self, seconds, *, timeout_message=DEFAULT_TIMEOUT_MESSAGE, suppress_timeout_errors=False):
self.seconds = int(seconds)
self.timeout_message = timeout_message
self.suppress = bool(suppress_timeout_errors)
def _timeout_handler(self, signum, frame):
raise TimeoutError(self.timeout_message)
def __enter__(self):
signal.signal(signal.SIGALRM, self._timeout_handler)
signal.alarm(self.seconds)
def __exit__(self, exc_type, exc_val, exc_tb):
signal.alarm(0)
if self.suppress and exc_type is TimeoutError:
return True
| mit | Python |
|
152def3ad56af928b81586867e3c8ee85cbf5311 | Add a client application class | will-hart/blitz,will-hart/blitz | blitz/client.py | blitz/client.py | __author__ = 'Will Hart'
from blitz.io.database import DatabaseClient
from blitz.io.tcp import TcpClient
import blitz.web.api as blitz_api
import blitz.web.http as blitz_http
import logging
#import json
import os.path
import tornado.httpserver
import tornado.ioloop
import tornado.web
class Config(object):
"""
Holds configuration for a client application
"""
settings = {}
def __init__(self):
"""
Sets up default settings
"""
self.settings = {
"template_path": os.path.join(os.path.dirname(__file__), "templates"),
"static_path": os.path.join(os.path.dirname(__file__), "static"),
"database_path": os.path.join(os.path.dirname(__file__), "data", "app.db"),
#"schema_path": os.path.join(
# os.path.dirname(__file__), "data", "client_schema.sql"),
#"settings_path": os.path.join(
# os.path.dirname(__file__), "data", "settings.txt"),
"autoescape": None,
"debug": True
}
def get(self, key):
"""
Gets an item from settings
:raises: KeyError if the item doesn't exist
:returns: A value corresponding to the given key
"""
if key in self.settings.keys:
return self.settings[key]
raise KeyError("Unknown configuration setting - " + key)
def set(self, key, value):
"""
Sets the given configuration key to value
:param key: the key to set
:param value: the value to set the key to
:returns: the value that was set
"""
self.settings[key] = value
return value
def __getitem__(self, item):
"""
A custom implementation of dict getters
"""
return self.get(item)
def __setitem__(self, key, value):
"""
A custom implementation of dict setters
"""
return self.set(key, value)
class Application(object):
"""
A basic application which exposes the Api and HTTP request handlers
provided by Tornado
"""
def __init__(self):
"""
Create a new client web application, setting defaults
"""
# create a file logger and set it up for logging to file
self.logger = logging.getLogger('Application')
log_handler = logging.FileHandler(os.path.join(os.path.dirname(__file__), "log.txt"))
log_formatter = self.logger.Formatter('%(asctime)s %(levelname)s %(message)s')
log_handler.setFormatter(log_formatter)
self.logger.addHandler(log_handler)
self.logger.setLevel(logging.INFO)
# load configuration
self.config = Config()
# create a database connection
self.data = DatabaseClient()
self.logger.info("Initialised client database")
# create a TCP connection
self.socket = TcpClient()
self.logger.info("Initialised TCP socket - not connected")
# create an application
self.application = tornado.web.Application([
(r'/', blitz_http.IndexHandler),
('r/categories', blitz_api.CategoriesHandler),
('r/cache/(?P<since>[^\/]+)', blitz_api.CacheHandler),
('r/download/(?P<session_id>[^\/]+)', blitz_api.DownloadHandler),
('r/session/(?P<session_id>[^\/]+)', blitz_api.SessionHandler),
('r/sessions', blitz_api.SessionsHandler),
('r/config', blitz_api.ConfigHandler)
], **self.config.settings)
self.logger.info("Initialised client application")
# create an HTTP server
self.http_server = tornado.httpserver.HTTPServer(self.application)
self.logger.info("Initialised client HTTP server")
def start(self):
"""
Starts the application
"""
# start listening on the configured port and IP
self.http_server.listen(self.config['port'])
self.logger.info("HTTP server started listening on port " + self.config['port'])
# start the IO loop
self.logger.info("HTTP server starting IO loop")
tornado.ioloop.IOLoop.instance().start()
| agpl-3.0 | Python |
|
ca0bec705a6c68c7540c9b7f0a02972e1f26723c | Create py-递归设置.py | ganmk/python-prctice | py-递归设置.py | py-递归设置.py | #!/usr/bin/python
# -*- encoding:utf-8 -*-
import sys
sys.setrecursionlimit(1500) # set the maximum depth as 1500
def recursion(n):
if(n <= 0):
return
print n
recursion(n - 1)
if __name__ == "__main__":
recursion(1200)
| mit | Python |
|
5ade8c78e6ab875047ca29779dc37f9029a9f0d6 | Create set_auth.py | brunocfnba/docker-airflow,brunocfnba/docker-airflow | bluemix/set_auth.py | bluemix/set_auth.py | import airflow
from airflow import models, settings
from airflow.contrib.auth.backends.password_auth import PasswordUser
user = PasswordUser(models.User())
user.username = 'username'
user.email = '[email protected]'
user.password = 'pwd'
session = settings.Session()
session.add(user)
session.commit()
session.close()
| apache-2.0 | Python |
|
6ce3ae4ef4a274e76bf1f6d76f0675bec2391d17 | add first pass of maria DB slave nagiors monitor - slave_sql and slave_io checks are done | ordergroove/check_mariadb_slaves | check_mariadb_slaves.py | check_mariadb_slaves.py | #!/usr/bin/env python
"""MariaDB slave status checker"""
import sys
import argparse
import MySQLdb
class SlaveStatusCheck(object):
"""Class to help us run slave status queries against MariaDB"""
REPLICATION_LAG_MODE = 'replication_lag'
SLAVESQL_MODE = 'slave_sql'
SLAVEIO_MODE = 'slave_io'
MODES = (REPLICATION_LAG_MODE,
SLAVESQL_MODE,
SLAVEIO_MODE)
def __init__(self, hostname, username, password, slave_conn,
mode, verbose=False, warning=None, critical=None):
self.hostname = hostname
self.username = username
self.password = password
self.warning = warning
self.critical = critical
self.verbose = verbose
self.mode = mode
# Execute the query and store the results
self._result = {}
self.get_slave_status(slave_conn)
def run_check(self):
"""Execute the check against the given mode"""
check_fn = getattr(self, self.mode)
check_fn()
def replication_lag(self):
pass
def slave_sql(self):
"""Check that Slave_SQL_Running = Yes"""
if self._result.get('Slave_SQL_Running') == "Yes":
print "OK - Slave sql is running"
sys.exit(0)
else:
print "CRITICAL - Slave sql is not running"
sys.exit(2)
def slave_io(self):
"""Check that Slave_IO_Running = Yes"""
if self._result.get('Slave_IO_Running') == "Yes":
print "OK - Slave io is running"
sys.exit(0)
else:
print "CRITICAL - Slave io is not running"
sys.exit(2)
def get_slave_status(self, slave_connection):
"""Run the query!"""
try:
sql = 'SHOW SLAVE "{0}" STATUS'.format(slave_connection)
conn = None
conn = MySQLdb.Connection(
self.hostname,
self.username,
self.password)
curs = conn.cursor(MySQLdb.cursors.DictCursor)
curs.execute(sql)
conn.commit()
self._result = curs.fetchall()[0]
if self.verbose:
print self._result
except MySQLdb.Error, exc:
print "ERROR - {0}: {1}".format(exc.args[0], exc.args[1])
sys.exit(1)
finally:
if conn:
conn.close()
def main():
"""starter method"""
parser = argparse.ArgumentParser(description='MariaDB slave status checker')
parser.add_argument('--hostname', default='localhost', type=str,
help="MariaDB hostname")
parser.add_argument('--username', type=str, help="MariaDB username")
parser.add_argument('--password', type=str, help="MariaDB password")
parser.add_argument('--connection', required=True, type=str,
help="MariaDB slave connection")
parser.add_argument('--mode', type=str, required=True,
choices=SlaveStatusCheck.MODES,
help="slave state to check")
parser.add_argument('-w', '--warning', type=int, default=None,
help="warning limit")
parser.add_argument('-c', '--critical', type=int, default=None,
help="critical limit")
parser.add_argument('--verbose', action='store_true', default=False,
help="enable verbose mode")
args = parser.parse_args()
ssc = SlaveStatusCheck(args.hostname, args.username, args.password,
args.connection, args.mode, args.verbose,
args.warning, args.critical)
ssc.run_check()
| mit | Python |
|
cb5b85fc4a011f7eb9628b7099311b399f4d033d | Create born_on_a_friday.py | cclauss/Ten-lines-or-less | born_on_a_friday.py | born_on_a_friday.py | #!/usr/bin/env python3
from datetime import datetime
from typing import Tuple
def ask_month_day_year(prompt: str = "Enter your birthday") -> Tuple[int, int, int]:
date = input(f"{prompt} in the format: MM/DD/YYYY ")
month, day, year = (int(x.strip()) for x in date.split("/"))
return month, day, year
def day_of_the_week(year, month, day):
return f"{datetime(year, month, day):%A}"
month, day, year = ask_month_day_year()
print(f"You were born on a {day_of_the_week(year, month, day)}.")
| apache-2.0 | Python |
|
576dd7270714ec63beab9ce6af22f94e20dc1dd5 | Add admin model classes to forum_tracking app | franga2000/django-machina,ellmetha/django-machina,reinbach/django-machina,reinbach/django-machina,ellmetha/django-machina,reinbach/django-machina,franga2000/django-machina,franga2000/django-machina,ellmetha/django-machina | machina/apps/forum_tracking/admin.py | machina/apps/forum_tracking/admin.py | # -*- coding: utf-8 -*-
# Standard library imports
from __future__ import unicode_literals
# Third party imports
from django.contrib import admin
# Local application / specific library imports
from machina.core.db.models import get_model
ForumReadTrack = get_model('forum_tracking', 'ForumReadTrack')
TopicReadTrack = get_model('forum_tracking', 'TopicReadTrack')
class ForumReadTrackAdmin(admin.ModelAdmin):
list_display = ('__str__', 'user', 'forum', 'mark_time',)
list_filter = ('mark_time',)
class TopicReadTrackAdmin(admin.ModelAdmin):
list_display = ('__str__', 'user', 'topic', 'mark_time',)
list_filter = ('mark_time',)
admin.site.register(ForumReadTrack, ForumReadTrackAdmin)
admin.site.register(TopicReadTrack, TopicReadTrackAdmin)
| bsd-3-clause | Python |
|
115d17320e9fcd3eab4e60f222f382a7d551948b | Add unittests for magnumclient.v1.client module | openstack/python-magnumclient | magnumclient/tests/v1/test_client.py | magnumclient/tests/v1/test_client.py | # Copyright (c) 2015 Thales Services SAS
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
import testtools
from magnumclient.v1 import client
class ClientTest(testtools.TestCase):
ENDPOINTS = {'container': [{'publicURL': 'http://myurl/'}]}
@mock.patch('magnumclient.common.httpclient.HTTPClient')
@mock.patch.object(client.Client, 'get_keystone_client')
def test_init_with_token_and_url(self, keystone_client, http_client):
client.Client(input_auth_token='mytoken', magnum_url='http://myurl/')
self.assertFalse(keystone_client.called)
http_client.assert_called_once_with(
'http://myurl/', token='mytoken', auth_ref=None)
@mock.patch('magnumclient.common.httpclient.HTTPClient')
@mock.patch.object(client.Client, 'get_keystone_client')
def test_init_with_token(self, keystone_client, http_client):
mocked = mock.Mock()
mocked.service_catalog.get_endpoints.return_value = self.ENDPOINTS
keystone_client.return_value = mocked
client.Client(input_auth_token='mytoken', auth_url='authurl')
keystone_client.assert_called_once_with(
token='mytoken', username=None, api_key=None,
project_name=None, project_id=None,
auth_url='authurl')
http_client.assert_called_once_with(
'http://myurl/', token='mytoken', auth_ref=None)
@mock.patch('magnumclient.common.httpclient.HTTPClient')
@mock.patch.object(client.Client, 'get_keystone_client')
def test_init_with_user(self, keystone_client, http_client):
mocked = mock.Mock()
mocked.auth_token = 'mytoken'
mocked.service_catalog.get_endpoints.return_value = self.ENDPOINTS
keystone_client.return_value = mocked
client.Client(username='user', api_key='pass', project_name='prj',
auth_url='authurl')
keystone_client.assert_called_once_with(
username='user', api_key='pass',
project_name='prj', project_id=None,
auth_url='authurl')
http_client.assert_called_once_with(
'http://myurl/', token='mytoken', auth_ref=None)
@mock.patch.object(client.Client, 'get_keystone_client')
def test_init_unauthorized(self, keystone_client):
mocked = mock.Mock()
mocked.auth_token = None
keystone_client.return_value = mocked
self.assertRaises(
RuntimeError, client.Client,
username='user', api_key='pass', project_name='prj',
auth_url='authurl')
def _test_get_keystone_client(self, auth_url, keystone_client):
class FakeClient(client.Client):
def __init__(self):
# Disable parent __init__
pass
FakeClient().get_keystone_client(
username='user', api_key='pass', project_name='prj',
auth_url=auth_url)
self.assertTrue(keystone_client.called)
@mock.patch('keystoneclient.v2_0.client.Client')
def test_get_keystone_client_v2(self, keystone_client):
self._test_get_keystone_client(
'http://authhost/v2.0', keystone_client)
@mock.patch('keystoneclient.v3.client.Client')
def test_get_keystone_client_v3(self, keystone_client):
self._test_get_keystone_client(
'http://authhost/v3', keystone_client)
def test_get_keystone_client_no_url(self):
self.assertRaises(RuntimeError,
self._test_get_keystone_client,
None, None)
| apache-2.0 | Python |
|
a59f86ea4905534237f9a1e055bce6a3a3d5fb81 | add migration so edit_messaging is set to True when the role has edit_data set to True | dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq | corehq/apps/users/migrations/0037_add_edit_messaging_permission.py | corehq/apps/users/migrations/0037_add_edit_messaging_permission.py |
from django.db import migrations
from corehq.apps.users.models_role import SQLPermission, UserRole
from corehq.util.django_migrations import skip_on_fresh_install
@skip_on_fresh_install
def migrate_edit_migrations_permissions(apps, schema_editor):
permission, created = SQLPermission.objects.get_or_create(value='edit_messaging')
edit_data_permission = SQLPermission.objects.get(value='edit_data')
role_ids_with_edit_data = set(UserRole.objects.filter(rolepermission__permission_fk_id=edit_data_permission.id)
.values_list("id", flat=True))
for role in UserRole.objects.filter(id__in=role_ids_with_edit_data):
role.rolepermission_set.get_or_create(permission_fk=permission, defaults={"allow_all": True})
class Migration(migrations.Migration):
dependencies = [
('users', '0036_reset_user_history_records'),
]
operations = [
migrations.RunPython(migrate_edit_migrations_permissions, migrations.RunPython.noop)
]
| bsd-3-clause | Python |
|
a824039003abd693ff568d753c60bb6025bfd868 | Add cleanup_slaves.py script. | eunchong/build,eunchong/build,eunchong/build,eunchong/build | scripts/tools/cleanup_slaves.py | scripts/tools/cleanup_slaves.py | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Removes checkouts from try slaves."""
import os
import subprocess
import sys
ROOT_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', '..')
def parse_master(master):
sys.path.append(os.path.join(ROOT_DIR, 'scripts', 'master', 'unittests'))
import test_env # pylint: disable=F0401,W0612
masterpath = os.path.join(ROOT_DIR, 'masters', master)
os.chdir(masterpath)
variables = {}
master = os.path.join(masterpath, 'master.cfg')
execfile(master, variables)
return variables['c']
def main():
"""It starts a fake in-process buildbot master just enough to parse
master.cfg.
Then it queries all the builders and all the slaves to determine the current
configuration and process accordingly.
"""
c = parse_master('master.tryserver.chromium')
# Create a mapping of slavebuilddir with each slaves connected to it.
slavebuilddirs = {}
# Slaves per OS
all_slaves = {}
for builder in c['builders']:
builder_os = builder['name'].split('_', 1)[0]
if builder_os in ('cros', 'android'):
builder_os = 'linux'
slavenames = set(builder['slavenames'])
all_slaves.setdefault(builder_os, set())
all_slaves[builder_os] |= slavenames
slavebuilddir = builder.get('slavebuilddir', builder['name'])
slavebuilddirs.setdefault(builder_os, {})
slavebuilddirs[builder_os].setdefault(slavebuilddir, set())
slavebuilddirs[builder_os][slavebuilddir] |= slavenames
# Queue of commands to run, per slave.
queue = {}
for builder_os, slavebuilddirs in slavebuilddirs.iteritems():
os_slaves = all_slaves[builder_os]
for slavebuilddir, slaves in slavebuilddirs.iteritems():
for slave in os_slaves - slaves:
queue.setdefault((builder_os, slave), []).append(slavebuilddir)
print 'Out of %d slaves, %d will be cleaned' % (len(c['slaves']), len(queue))
commands = []
for key in sorted(queue):
slave_os, slavename = key
dirs = queue[key]
if slave_os == 'win':
cmd = 'cmd.exe /c rd /q %s' % ' '.join(
'e:\\b\\build\\slave\\%s' % s for s in dirs)
else:
cmd = 'rm -rf %s' % ' '.join('/b/build/slave/%s' % s for s in dirs)
commands.append(('ssh', slavename, cmd))
# TODO(maruel): Use pssh.
failed = []
for command in commands:
if subprocess.call(command):
failed.append(command[1])
if failed:
print 'These slaves failed:'
for i in failed:
print ' %s' % i
return 0
if __name__ == '__main__':
sys.exit(main())
| bsd-3-clause | Python |
|
72467acd590ae5a3494e5059ce2ba99cf656baaa | Add IsAdminOrReadOnly permission class | rstens/gwells,bcgov/gwells,bcgov/gwells,bcgov/gwells,bcgov/gwells,rstens/gwells,rstens/gwells,rstens/gwells | registries/permissions.py | registries/permissions.py | from rest_framework.permissions import IsAdminUser, SAFE_METHODS
class IsAdminOrReadOnly(IsAdminUser):
"""
Allows read-only access to all users (including anonymous users) and write access to admin users only
"""
def has_permission(self, request, view):
is_admin = super().has_permission(request, view)
return is_admin or request.method in SAFE_METHODS
| apache-2.0 | Python |
|
d34b2a13b454ca2c08bd5e8bc3b38d80fb5367c6 | add initial mockup of curses UI | janusnic/fzsl,janusnic/fzsl,jsbronder/fzsl,jsbronder/fzsl | pyfs/ui.py | pyfs/ui.py | import curses
import os
import sys
import pyfs
class SimplePager(object):
def __init__(self):
self._old_stdout = sys.__stdout__
self._old_stdout_fd = os.dup(sys.stdout.fileno())
os.dup2(sys.stderr.fileno(), sys.stdout.fileno())
tty = open('/dev/tty')
os.dup2(tty.fileno(), 0)
self._scr = curses.initscr()
curses.noecho()
curses.cbreak()
curses.raw()
self._scr.keypad(1)
self._config = {
'default': 'find ./'
}
def cleanup(self):
self._scr.keypad(0)
curses.nocbreak()
curses.echo()
curses.endwin()
os.dup2(self._old_stdout_fd, sys.stdout.fileno())
sys.stdout = self._old_stdout
def run(self):
scanner = pyfs.Scanner(self._config)
scorer = pyfs.WeightedDistanceScore()
self._scr.addstr("Scanning ...")
self._scr.refresh()
files = scanner.scan()
max_y, _ = self._scr.getmaxyx()
max_y -= 1
self._scr.clear()
for line, match in enumerate(files[:max_y]):
self._scr.addstr(line, 0, match)
self._scr.refresh()
fm = pyfs.FuzzyMatch(files=files, scorer=scorer)
search = ''
while True:
c = self._scr.getch()
if c in (curses.KEY_ENTER, ord('\n')):
break
elif c in (curses.KEY_DC, curses.KEY_BACKSPACE):
if len(search):
search = search[:-1]
else:
search += chr(c)
fm.update_scores(search)
self._scr.clear()
for line, match in enumerate(fm.top_matches(max_y)):
self._scr.addstr(line, 0, match)
self._scr.refresh()
self._scr.refresh()
self.cleanup()
return fm.top_matches(1)[0]
def main():
ui = SimplePager()
result = ui.run()
sys.stdout.write(result.strip())
if __name__ == '__main__':
main()
| bsd-3-clause | Python |
|
01b9d4a491e2d732e9684d0782dcbf38df5eeec9 | Add adapters.py to new channelworm directory | gsarma/ChannelWorm,joebowen/ChannelWorm,openworm/ChannelWorm,joebowen/ChannelWorm,VahidGh/ChannelWorm,VahidGh/ChannelWorm,openworm/ChannelWorm,joebowen/ChannelWorm,VahidGh/ChannelWorm,openworm/ChannelWorm,gsarma/ChannelWorm,gsarma/ChannelWorm,cheelee/ChannelWorm,openworm/ChannelWorm,gsarma/ChannelWorm,cheelee/ChannelWorm,joebowen/ChannelWorm,cheelee/ChannelWorm,VahidGh/ChannelWorm,cheelee/ChannelWorm | channelworm/adapters.py | channelworm/adapters.py | # configure django to use default settings
# note that this can also be done using an environment variable
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
if hasattr(settings, 'DEBUG'):
# settings are configured already
pass
else:
# load default settings if they're not set
from web_app import settings as defaults
settings.configure(default_settings=defaults, DEBUG=True)
import ion_channel.models as C
import PyOpenWorm as P
from django.forms.models import model_to_dict
class PatchClampAdapter(object):
"""Map a channelworm model to a pyopenworm model"""
def __init__(self, cw_obj):
# initialize PyOpenWorm connection so we can access its API
P.connect()
self.channelworm_object = cw_obj
cw_dict = model_to_dict(self.channelworm_object)
experiment_id = cw_dict.pop('experiment')
patch_clamp_id = cw_dict.pop('id')
self.pyopenworm_object = P.Experiment()
# get the CW model's experiment
cw_evidence = C.Experiment.objects.get(id=experiment_id)
# make a PyOW evidence object with it
pow_evidence = P.Evidence(doi=cw_evidence.doi)
# add it to the PyOW experiment model
self.pyopenworm_object.reference(pow_evidence)
for key, value in cw_dict.iteritems():
self.pyopenworm_object.conditions.set(key, value)
# we not longer need PyOW API so we can kill the connection
P.disconnect()
def get_pow(self):
return self.pyopenworm_object
def get_cw(self):
return self.channelworm_object
| mit | Python |
|
f4689709f55a5e3209de7221853cb35a36699dcd | add file | hydrology-tep/documentation,hydrology-tep/documentation,hydrology-tep/documentation | check_sphinx.py | check_sphinx.py | import py
import subprocess
def test_linkcheck(tmpdir):
doctrees = tmpdir.join("_build/doctrees")
htmldir = tmpdir.join("_build/html")
subprocess.check_call(["sphinx-build", "-W", "-blinkcheck", "-d",
str(doctrees), "source", str(htmldir)])
def test_build_docs(tmpdir):
doctrees = tmpdir.join("_build/doctrees")
htmldir = tmpdir.join("_build/html")
subprocess.check_call([ "sphinx-build", "-n", "-W", "-bhtml", "-d",
str(doctrees), "source", str(htmldir)])
| cc0-1.0 | Python |
|
45a1efa21162a5f1c39d8255d13d2bbca8f6c0ca | Create 7_ten_row_abacus.py | roy2020china/BingDemo | 7_ten_row_abacus.py | 7_ten_row_abacus.py | #########################################################################
# 10-row School abacus
# by
# Michael H
#########################################################################
# Description partially extracted from from wikipedia
#
# Around the world, abaci have been used in pre-schools and elementary
#
# In Western countries, a bead frame similar to the Russian abacus but
# with straight wires and a vertical frame has been common (see image).
# Helps schools as an aid in teaching the numeral system and arithmetic
#
# |00000***** | row factor 1000000000
# |00000***** | row factor 100000000
# |00000***** | row factor 10000000
# |00000***** | row factor 1000000
# |00000***** | row factor 100000
# |00000***** | row factor 10000
# |00000***** | row factor 1000
# |00000**** *| row factor 100 * 1
# |00000*** **| row factor 10 * 2
# |00000** ***| row factor 1 * 3
# -----------
# Sum 123
#
# Each row represents a different row factor, starting with x1 at the
# bottom, ascending up to x1000000000 at the top row.
######################################################################
# TASK:
# Define a procedure print_abacus(integer) that takes a positive integer and prints a visual representation (image) of an abacus setup for a given positive integer value.
# 任务:
# 定义一个函数print_abacus(整数),向它传入一个正整数,打印出与该给定值相对应算盘的图形。
# Ranking
# 1 STAR: solved the problem!
# 2 STARS: 6 < lines <= 9
# 3 STARS: 3 < lines <= 6
# 4 STARS: 0 < lines <= 3
# 评分
# 1 星: solved the problem!
# 2 星: 6 < 代码行数 <= 9
# 3 星: 3 < 代码行数 <= 6
# 4 星: 0 < 代码行数 <= 3
def print_abacus(value):
#row_list = ["|", "0", "0", "0", "0", "0", "*", "*", "*", "*", "*", " ", " ", " ", "|"]
row_list_const = ["|", "0", "0", "0", "0", "0", "*", "*", "*", "*", "*", "|"]
value_next = value
for i in range(9, -1, -1):
index_0 = value_next / (10 ** i)
index_1 = 0 - (index_0 + 1)
row_list_let = row_list_const[:]
row_list_let.insert(index_1, (" " * 3))
row_str = "".join(row_list_let)
# value_next = value_next % (10 ** i)
value_next %= (10 ** i)
print row_str
| mit | Python |
|
9207041afb78f8d36442b7ee19b95055ebbc99cd | add test forms | valdergallo/mock_django_orm | app/tests/test_form.py | app/tests/test_form.py | from django.test import TestCase
from app.forms import FormAppOne
class TestForm(TestCase):
def test_invalid_name_form(self):
form = FormAppOne({'name': '1234', 'description': 'validate name'})
self.assertFalse(form.is_valid())
self.assertEquals(form.errors, {'name': [u'Name must be only text']})
def test_invalid_description_form(self):
form = FormAppOne({'name': 'asd'})
self.assertFalse(form.is_valid())
self.assertEquals(form.errors, {'description':
[u'This field is required.']})
def test_required_fields(self):
form = FormAppOne({})
self.assertFalse(form.is_valid())
self.assertEquals(form.errors, {'name': [u'This field is required.'],
'description': [u'This field is required.']})
def test_valid_form(self):
form = FormAppOne({'name': 'valder', 'description': 'validate name'})
self.assertTrue(form.is_valid())
| mit | Python |
|
a460b73861d406b14519b3e391190d1b8d7e57a9 | Add maximal margin classifier. | lidalei/DataMining | max_margin_classifier.py | max_margin_classifier.py | import numpy as np
import matplotlib.pylab as plt
from sklearn.svm import SVC
X = np.array([[3, 4], [2, 2], [4, 4], [1, 4], [2, 1], [4, 3], [4, 1]])
y = np.array(['Red', 'Red', 'Red', 'Red', 'Blue', 'Blue', 'Blue'])
linear_svm = SVC(kernel = 'linear', C = 2 ** 15)
linear_svm.fit(X, y)
## w0 * X_1 + w1 * X_2 + b = 0 <=> X_2 = -w0 / w1 * X_1 - b / w1
w = linear_svm.coef_[0]
print('Margin: %s'%(1.0 / np.linalg.norm(w)))
b = linear_svm.intercept_
slope = -w[0] / w[1]
## points in the separating line
xx = np.linspace(np.amin(X[:, 0]), np.amax(X[:, 0]))
yy = slope * xx - b / w[1]
## points in the two gutters
yy_top = yy + 1.0 / w[1]
yy_bottom = yy - 1.0 / w[1]
## canvas
fig, ax = plt.subplots(1, 1)
ax.set_title('Maximal margin classifier')
# draw points
ax.scatter(X[:, 0], X[:, 1], c = y)
# draw separating line
ax.plot(xx, yy, 'k-')
# draw gutters
ax.plot(xx, yy_top, 'k--')
ax.plot(xx, yy_bottom, 'k--')
# draw support vectors
ax.scatter(linear_svm.support_vectors_[:, 0], linear_svm.support_vectors_[:, 1],
s = 100, facecolors = 'none')
# set labels
ax.set_xlabel('X_1')
ax.set_ylabel('X_2')
plt.show() | mit | Python |
|
2e821ab48542c89ac41ebc17036bddc164506a22 | Backup of some unused code | jakelever/knowledgediscovery,jakelever/knowledgediscovery,jakelever/knowledgediscovery,jakelever/knowledgediscovery | combine_data/cartesianProductOfIDs.py | combine_data/cartesianProductOfIDs.py | import argparse
import itertools
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Generate the cartesian product of two ID files')
parser.add_argument('--idFileA',required=True,type=str,help='First file of IDs')
parser.add_argument('--idFileB',required=True,type=str,help='Second file of IDS')
parser.add_argument('--outFile',required=True,type=str,help='Output file')
args = parser.parse_args()
with open(args.idFileA) as f:
idsA = [ int(line.strip()) for line in f ]
with open(args.idFileB) as f:
idsB = [ int(line.strip()) for line in f ]
idsA = sorted(list(set(idsA)))
idsB = sorted(list(set(idsB)))
with open(args.outFile,'w') as outF:
for a,b in itertools.product(idsA,idsB):
outF.write("%d\t%d\n" % (min(a,b),max(a,b)))
print "Processing complete."
| mit | Python |
|
178dff5f0af375f4f49416aeb41b5d7a718d69e8 | Add python script to demystify compiler output | 1blankz7/actor-framework,DavadDi/actor-framework,1blankz7/actor-framework,DavadDi/actor-framework,DavadDi/actor-framework,1blankz7/actor-framework,actor-framework/actor-framework,actor-framework/actor-framework,nq-ebaratte/actor-framework,actor-framework/actor-framework,DavadDi/actor-framework,nq-ebaratte/actor-framework,nq-ebaratte/actor-framework,nq-ebaratte/actor-framework,actor-framework/actor-framework,1blankz7/actor-framework | scripts/demystify.py | scripts/demystify.py | #!/usr/bin/env python
# This script demystifies C++ compiler output for CAF by
# replacing cryptic `typed_mpi<...>` templates with
# `replies_to<...>::with<...>` and `atom_constant<...>`
# with human-readable representation of the actual atom.
import sys
# decodes 6bit characters to ASCII
DECODING_TABLE = ' 0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ_abcdefghijklmnopqrstuvwxyz'
# CAF type strings
ATOM_CONSTANT_SUFFIX = "caf::atom_constant<"
# `pos` points to first character after '<':
# template_name<...>
# ^
# and returns the position of the closing '>'
def end_of_template(x, pos):
open_templates = 1
while open_templates > 0:
if line[pos] == '<':
open_templates += 1
elif line[pos] == '>':
open_templates -= 1
pos += 1
# exclude final '>'
return pos - 1
def next_element(x, pos, last):
# scan for ',' that isn't inside <...>
while pos < last and x[pos] != ',':
if x[pos] == '<':
pos = end_of_template(x, pos + 1)
else:
pos += 1
return pos
def atom_read(x):
result = ""
read_chars = ((x & 0xF000000000000000) >> 60) == 0xF
mask = 0x0FC0000000000000
bitshift = 54
while bitshift >= 0:
if read_chars:
result += DECODING_TABLE[(x & mask) >> bitshift]
elif ((x & mask) >> bitshift) == 0xF:
read_chars = True
bitshift -= 6
mask = mask >> 6
return result
def decompose_type_list(x, first, last):
res = []
i = first
n = next_element(x, first, last)
while i != last:
res.append(x[i:n])
# skip following ','
i = min(n + 2, last)
n = next_element(x, i, last)
return res
def stringify(x):
if x.startswith(ATOM_CONSTANT_SUFFIX):
begin = len(ATOM_CONSTANT_SUFFIX)
end = len(x) - 1
res = "'"
res += atom_read(int(x[begin:end]))
res += "'"
return res
return x
def stringify_list(xs):
res = ""
for index in range(len(xs)):
if index > 0:
res += ", "
res += stringify(xs[index].strip(' '))
return res
def decompose_typed_actor(x, first, last):
needle = "caf::detail::type_list<"
# first type list -> input types
j = x.find(needle, first) + len(needle)
k = end_of_template(x, j)
inputs = decompose_type_list(x, j, k)
# second type list -> outputs
j = x.find(needle, k) + len(needle)
k = end_of_template(x, j)
outputs = decompose_type_list(x, j, k)
# replace all 'caf::atom_constant<...>' entries in inputs
res = "replies_to<"
res += stringify_list(inputs)
res += ">::with<"
res += stringify_list(outputs)
res += ">"
return res
for line in sys.stdin:
# replace "std::__1" with "std::" (Clang libc++)
line = line.replace("std::__1", "std::")
needle = "caf::typed_mpi<"
idx = line.find(needle)
while idx != -1:
# find end of typed_actor<...>
first = idx + len(needle)
last = end_of_template(line, first)
updated = decompose_typed_actor(line, first, last)
prefix = line[:idx]
suffix = line[last:]
line = prefix + updated + suffix
idx = line.find(needle, idx + len(updated))
sys.stdout.write(line.replace("caf::", ""))
| bsd-3-clause | Python |
|
01e551bdfbe298cdbd30734a52305be25b65147a | add docker build.py | GregDMeyer/dynamite,GregDMeyer/dynamite | docker/build.py | docker/build.py | """
This script builds Docker images for various combinations of
parameters. Should be run from inside the git tree.
"""
import sys
from os import path
from argparse import ArgumentParser
from subprocess import run, PIPE, Popen
from time import time
import shutil
def parse_args(argv=None):
if argv is None:
argv = sys.argv
parser = ArgumentParser(argv)
parser.add_argument("--targets", type=lambda x: x.split(','),
default=['release', 'jupyter'],
help='Targets to build, delimited by commas.')
parser.add_argument("--hardware", type=lambda x: x.split(','),
default=['cpu', 'gpu'],
help='Whether to build the CPU and/or GPU versions.')
parser.add_argument("-v", "--verbose", action='store_true',
help='Show build output.')
return parser.parse_args()
def main():
args = parse_args()
git_branch = run(["git", "rev-parse", "--abbrev-ref", "HEAD"],
capture_output=True, text=True, check=True).stdout.strip()
git_commit = run(["git", "describe", "--always"],
capture_output=True, text=True, check=True).stdout.strip()
# checkout a clean version to build from
git_root = run(["git", "rev-parse", "--show-toplevel"],
capture_output=True, text=True, check=True).stdout.strip()
build_dir = "/tmp/dnm_docker_build"
if path.exists(build_dir):
build_dir += '_'+str(int(time()))
run(["git", "clone", git_root, build_dir], check=True)
completed = []
# run builds
for target in args.targets:
for hardware in args.hardware:
dockerfile = f"Dockerfile-{hardware}"
tag = "latest"
if hardware == 'gpu':
tag += '-cuda'
if target == 'jupyter':
tag += '-jupyter'
cmd = [
"docker", "build",
"--build-arg", f"GIT_BRANCH={git_branch}",
"--build-arg", f"GIT_COMMIT={git_commit}",
"-f", f"docker/{dockerfile}",
"--target", target,
"-t", f"gdmeyer/dynamite:{tag}",
"."
]
print(f"Building '{tag}'...", end="")
if args.verbose:
print()
print()
build_output = ""
prev_time = 0
with Popen(cmd, cwd=build_dir, stdout=PIPE, bufsize=1, text=True) as sp:
for line in sp.stdout:
if args.verbose:
print(line, end="")
else:
build_output += line
if time() - prev_time > 1:
print('.', end="", flush=True)
prev_time = time()
print()
if sp.returncode != 0:
print("Build failed!")
if not args.verbose:
print("Output:")
print()
print(build_output)
sys.exit()
else:
completed.append(tag)
print("Removing build files...")
if not build_dir.startswith("/tmp"):
# something has gone horribly wrong
print("not removing build files, not in /tmp")
else:
shutil.rmtree(build_dir)
print("Successfully completed builds", ", ".join(completed))
if __name__ == '__main__':
main()
| mit | Python |
|
d3210b3d25a2eef7c4d066878d444b9b381243eb | add roulette | desaster/uusipuu | modules/ruletti.py | modules/ruletti.py | # -*- coding: ISO-8859-15 -*-
from twisted.internet import reactor
from core.Uusipuu import UusipuuModule
import random
class Module(UusipuuModule):
def startup(self):
self.scheduled['unban'] = []
def cmd_ruletti(self, user, target, params):
nick = user.split('!', 1)[0]
if random.choice(range(0, 6)) < 3:
self.bot.mode(self.channel, True, 'b %s!*@*' % nick)
self.bot.kick(self.channel, nick, 'naps!')
self.log('%s - Nyt napsahti!' % nick)
d = reactor.callLater(5, self.unban, nick)
self.scheduled['unban'].append(d)
else:
self.chanmsg('%s: klik!' % nick)
self.log('%s - Klik!' % nick)
def unban(self, nick):
self.bot.mode(self.channel, False, 'b %s!*@*' % nick)
# vim: set et sw=4:
| bsd-2-clause | Python |
|
432cbfc65ea1e6c1b9079915cce20769b88502fe | add wrapper script to run dbify module | jaeddy/bripipetools,jaeddy/bripipetools,BenaroyaResearch/bripipetools,jaeddy/bripipetools,BenaroyaResearch/bripipetools,BenaroyaResearch/bripipetools,BenaroyaResearch/bripipetools,jaeddy/bripipetools | scripts/run_dbify.py | scripts/run_dbify.py | import logging
import os
import sys
import argparse
import _mypath
from bripipetools import genlims
from bripipetools import dbify
def parse_input_args(parser=None):
parser.add_argument('-p', '--import_path',
required=True,
default=None,
help=("path to flowcell run folder - e.g., "
"/mnt/genomics/Illumina/"
"150218_D00565_0081_BC5UF5ANXX/ - "
"or workflow batch file"))
parser.add_argument('-d', '--debug',
action='store_true',
help=("Set logging level to debug"))
# Parse and collect input arguments
args = parser.parse_args()
return parser.parse_args()
def main(argv):
parser = argparse.ArgumentParser()
args = parse_input_args(parser)
if args.debug:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
logger.info("importing data based on path {}"
.format(args.import_path))
dbify.ImportManager(path=args.import_path, db=genlims.db).run()
if __name__ == "__main__":
main(sys.argv[1:])
| mit | Python |
|
d5b3bce22aec3e84b59fad379859aa636f2d3f1a | Create ColorConvolution.py | DigitalSlideArchive/HistomicsTK,DigitalSlideArchive/HistomicsTK | ColorConvolution.py | ColorConvolution.py | import numpy
def ColorConvolution ( I, W ):
'''
Reconstructs a color image from the stain matrix "W" and the stain intensity
images generated by ColorDeconvolution.
*Inputs:
I (rgbimage) - an RGB image of type unsigned char.
W (matrix) - a 3x3 matrix containing the color vectors in columns.
For two stain images the third column is zero and will be
complemented using cross-product. Minumum two nonzero
columns required.
*Outputs:
RGB (rgbimage) - a reconstructed RGB image with values ranging from
[0, 255], suitable for display.
*Related functions:
ComplementStainMatrix, OpticalDensityFwd, OpticalDensityInv, ColorDeconvolution
'''
#transform 3D input stain image to 2D stain matrix format
m = I.shape[0]
n = I.shape[1]
I = numpy.reshape(I, (m*n,3))
#transform input stains to optical density values and convolve, tfm back to stain
I = I.astype(dtype=numpy.float32)
ODfwd = OpticalDensityFwd(I)
ODdeconv = numpy.dot(ODfwd, numpy.transpose(W))
ODinv = OpticalDensityInv(ODdeconv)
#reshape output, transform type
RGB = numpy.reshape(ODinv, (m,n,3))
RGB[RGB > 255] = 255
RGB = RGB.astype(numpy.uint8)
return(RGB)
| apache-2.0 | Python |
|
67d4bc38778632b482d9c372efb41104ecabedfa | add test cases for aggregate.py | ylxdzsw/MrBam,ylxdzsw/MrBam | test/test_aggregate.py | test/test_aggregate.py | from MrBam.bam import get_reads
from MrBam.aggregate import aggregate_reads
from helper import make_bam
from argparse import Namespace
from pysam import AlignmentFile
def test_aggregate_reads_1():
"it should aggregate pairs"
o = Namespace(verbos=False, qual=20)
reads = (
("r1", 'A', 60, 2, 11, 4, 11, False, True),
("r1", 'A', 60, 4, 13, 2, -11, False, True),
("r2", 'C', 60, 2, 11, 4, 11, False, True),
("r2", 'C', 60, 4, 13, 2, -11, False, True)
)
unique_pairs, *_ = aggregate_reads(o, reads)
assert len(unique_pairs) == 1
def test_aggregate_reads_2():
"it should aggregate singles"
o = Namespace(verbos=False, qual=20)
reads = (
("r1", 'A', 60, 2, 11, 0, 0, False, False),
("r2", 'C', 60, 2, 11, 0, 0, False, False),
("r3", 'C', 60, 2, 11, 0, 0, True, False)
)
_, unique_single, *_ = aggregate_reads(o, reads)
assert len(unique_single) == 2
def test_aggregate_reads_3():
"it should ignore when 3+ reads share the same name"
o = Namespace(verbos=False, qual=20)
reads = (
("r1", 'A', 60, 2, 11, 2, 9, False, True),
("r1", 'C', 60, 2, 11, 2, -9, False, True),
("r1", 'C', 60, 2, 11, 2, 9, True, True),
("r2", 'C', 60, 2, 11, 0, 0, True, False)
)
unique_pairs, unique_single, _, nerror, *_ = aggregate_reads(o, reads)
assert len(unique_pairs) == 0
assert len(unique_single) == 1
assert nerror == 3
def test_aggregate_reads_4():
"it should ignore when base in overlap area inconsistent between two reads"
o = Namespace(verbos=False, qual=20)
reads = (
("r1", 'A', 60, 2, 11, 4, 11, False, True),
("r1", 'C', 60, 4, 13, 2, -11, False, True),
("r2", 'C', 60, 3, 12, 5, 11, False, True),
("r2", 'C', 60, 5, 14, 3, -11, False, True)
)
unique_pairs, unique_single, *_, ninconsis = aggregate_reads(o, reads)
assert len(unique_pairs) == 1
assert ninconsis == 2
def test_aggregate_reads_5():
"work with reads returned by MrBam.bam"
o = Namespace(verbos=False, qual=20)
make_bam(tmpdir.strpath, """
123456789_123456789_12
r1 + ...........
r1 - ......*....
r2 + .........*.
r2 - .....*.......
r3 + ...........
r3 - ....*......
r4 + ...........
r4 - ...........
123456789_123456789_12
""")
sam = AlignmentFile(tmpdir.join("test.bam").strpath)
unique_pairs, unique_single, *_ = aggregate_reads(o, get_reads(o, sam, 'ref', '12'))
assert len(unique_pairs) == 3
| mit | Python |
|
8f718c536897711663051a613e7f50d564fb4cbc | Call repair as part of upgrade | ifduyue/sentry,zenefits/sentry,zenefits/sentry,mvaled/sentry,imankulov/sentry,mvaled/sentry,mvaled/sentry,imankulov/sentry,beeftornado/sentry,BuildingLink/sentry,fotinakis/sentry,nicholasserra/sentry,nicholasserra/sentry,alexm92/sentry,BuildingLink/sentry,ifduyue/sentry,alexm92/sentry,JackDanger/sentry,jean/sentry,JamesMura/sentry,ifduyue/sentry,JackDanger/sentry,mvaled/sentry,looker/sentry,fotinakis/sentry,JamesMura/sentry,gencer/sentry,BuildingLink/sentry,BuildingLink/sentry,ifduyue/sentry,mvaled/sentry,beeftornado/sentry,BayanGroup/sentry,jean/sentry,zenefits/sentry,daevaorn/sentry,BayanGroup/sentry,beeftornado/sentry,JamesMura/sentry,gencer/sentry,gencer/sentry,jean/sentry,looker/sentry,looker/sentry,looker/sentry,JamesMura/sentry,JamesMura/sentry,daevaorn/sentry,BayanGroup/sentry,gencer/sentry,ifduyue/sentry,mvaled/sentry,BuildingLink/sentry,jean/sentry,gencer/sentry,looker/sentry,daevaorn/sentry,zenefits/sentry,alexm92/sentry,daevaorn/sentry,mitsuhiko/sentry,fotinakis/sentry,imankulov/sentry,mitsuhiko/sentry,zenefits/sentry,jean/sentry,JackDanger/sentry,nicholasserra/sentry,fotinakis/sentry | src/sentry/management/commands/upgrade.py | src/sentry/management/commands/upgrade.py | """
sentry.management.commands.upgrade
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2012 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import, print_function
from django.core.management import call_command
from django.core.management.base import BaseCommand
from optparse import make_option
class Command(BaseCommand):
help = 'Performs any pending database migrations and upgrades'
option_list = BaseCommand.option_list + (
make_option('--noinput',
action='store_true',
dest='noinput',
default=False,
help='Tells Django to NOT prompt the user for input of any kind.',
),
)
def handle(self, **options):
call_command(
'syncdb',
migrate=True,
interactive=(not options['noinput']),
traceback=options['traceback'],
verbosity=options['verbosity'],
)
call_command(
'repair',
interactive=(not options['noinput']),
traceback=options['traceback'],
verbosity=options['verbosity'],
)
| """
sentry.management.commands.upgrade
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2012 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import, print_function
from django.core.management import call_command
from django.core.management.base import BaseCommand
from optparse import make_option
class Command(BaseCommand):
help = 'Performs any pending database migrations and upgrades'
option_list = BaseCommand.option_list + (
make_option('--noinput',
action='store_true',
dest='noinput',
default=False,
help='Tells Django to NOT prompt the user for input of any kind.',
),
)
def handle(self, **options):
call_command(
'syncdb',
migrate=True,
interactive=(not options['noinput']),
traceback=options['traceback'],
verbosity=options['verbosity'],
)
| bsd-3-clause | Python |
6569d7e36693512fdaadfb22a5aaf6f11fe0e084 | migrate dataregistry repeater | dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq | corehq/motech/repeaters/management/commands/migrate_dataregistrycaseupdaterepeater.py | corehq/motech/repeaters/management/commands/migrate_dataregistrycaseupdaterepeater.py | from corehq.motech.repeaters.management.commands.migrate_caserepeater import Command as MigrateCaseRepeaters
from corehq.motech.repeaters.models import SQLDataRegistryCaseUpdateRepeater
class Command(MigrateCaseRepeaters):
@classmethod
def couch_doc_type(cls):
return 'DataRegistryCaseUpdateRepeater'
@classmethod
def sql_class(cls):
return SQLDataRegistryCaseUpdateRepeater
| bsd-3-clause | Python |
|
5aa5ac33d2b841fa1d9c707681a9d024168672c4 | Create cdbtabledef.py | ChristinaHammer/Client_Database | cdbtabledef.py | cdbtabledef.py | """cdbtabledef.py
Developer: Noelle Todd
Last Updated: June 5, 2014
This module will create 4 tables for the client database, using the
sqlalchemy module, and the sqlite database. This module is still in
early testing stages, and as such, is subject to many changes, and
probably contains bugs.
"""
from sqlalchemy import Column, DateTime, String, Integer, ForeignKey, func
from sqlalchemy.orm import relationship, backref
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
engine = create_engine('sqlite:///test_db.sqlite')
session = sessionmaker()
session.configure(bind=engine)
base = declarative_base()
class Household(base):
"""
This class creates a table with columns for household data.
"""
__tablename__ = 'household'
id = Column(Integer, primary_key = True)
street_address = Column(String)
apt = Column(String)
city = Column(String, default = 'Troy')
state = Column(String, default = 'NY')
zip = Column(Integer, default = '12180')
#contact_ID = Column(Integer, ForeignKey('person.id'))
date_verified = Column(DateTime)
class Person(base):
"""
This class creates a table with columns for individual's data.
"""
__tablename__ = 'person'
id = Column(Integer, primary_key = True)
first_name = Column(String)
last_name = Column(String)
DOB = Column(DateTime)
age = Column(Integer)
phone = Column(Integer)
date_joined = Column(DateTime)
HH_ID = Column(Integer, ForeignKey('household.id'))
household = relationship(Household, backref=backref('members',
uselist = True))
class Volunteer(base):
"""
This class creates a table with columns for volunteer data.
"""
__tablename__ = 'volunteer'
id = Column(Integer, primary_key = True)
first_name = Column(String)
last_name = Column(String)
phone = Column(Integer)
class Visit(base):
"""
This class creates a table with columns tracking visit history.
"""
__tablename__ = 'visit'
id = Column(Integer, primary_key = True)
I_ID = Column(Integer, ForeignKey('person.id'))
HH_ID = Column(Integer, ForeignKey('household.id'))
Vol_ID = Column(Integer, ForeignKey('volunteer.id'))
date = Column(DateTime, default = func.now())
base.metadata.create_all(engine)
| mit | Python |
|
1172811d073e544d249aeba64f2b6828ee75bd5d | test geometry | leethargo/geonet | tests/test_geometry.py | tests/test_geometry.py | import numpy as np
from numpy.testing import assert_allclose
from geonet.geometry import unit_vector, angle_between
def test_unit_vector():
v1 = np.array([1, 0, 0])
assert_allclose(unit_vector(v1), v1)
v2 = np.array([1, 1, 0])
u2 = unit_vector(v2)
assert_allclose(np.linalg.norm(u2), 1.0)
def test_angle_between():
v1 = np.array([1, 1])
v2 = np.array([1, 0])
v3 = np.array([0, 1])
for v in (v1, v2, v3):
assert_allclose(angle_between(v, v), 0.0, atol=1e-6)
assert_allclose(angle_between(v1, v2), np.pi/4, atol=1e-6)
assert_allclose(angle_between(v2, v1), np.pi/4, atol=1e-6)
assert_allclose(angle_between(v2, v3), np.pi/2, atol=1e-6)
assert_allclose(angle_between(v3, v2), np.pi/2, atol=1e-6)
| mit | Python |
|
f5aab57e443e5b5a7c2507f10c0c6f608d677500 | Add simple unparser test | pjwerneck/pyrql | tests/test_unparser.py | tests/test_unparser.py | # -*- coding: utf-8 -*-
from pyrql.parser import parser
from pyrql.unparser import unparser
import pytest
@pytest.mark.parametrize('func', ['eq', 'lt', 'le', 'gt', 'ge', 'ne'])
def test_cmp_functions(func):
parsed = {'name': func, 'args': ['a', 1]}
assert unparser.unparse(parsed) == '%s(a,1)' % func
parsed = {'name': func, 'args': [('a', 'b', 'c'), 1]}
assert unparser.unparse(parsed) == '%s((a,b,c),1)' % func
| mit | Python |
|
0d623e0029dc7d7d6fb0bf9634904b23f2a11732 | Add files via upload | sainzad/stackOverflowCodeIdentifier | XMLAnalyze.py | XMLAnalyze.py | # Author: Andrew Sainz
#
# Purpose: XMLParser is designed to iterate through a collection of Post data collected from Stack Overflow
# forums. Data collected to analize the code tagged information to find the language of the code
# being utilized.
#
# How to use: To run from command line input "python XMLParser.py [XML file name].xml"
import xml.etree.ElementTree as ET
import sys
import re
from nltk.util import ngrams
def parseBodyForTagCode(body):
try:
# Code is a string that contains all code tag data within the body
# ex. code = ['<code>EXCEPT</code>, <code>LEFT JOIN</code>']
code = [body[m.start():m.end()] for m in re.finditer('<code>(.+?)</code>', body)]
# print(code)
except AttributeError:
code = None
return code
# Known list tag fields
knownJava = []
knownC = []
knownCSharp = []
knownPython = []
xmldoc = sys.argv[1]
tree = ET.parse(xmldoc)
root = tree.getroot()
# print (root.attrib)
myList = []
# for each row in the xml document gather body information
for row in root:
# Body holds all comment information from post
body = row.get('Body')
rowId = row.get('Id')
# Tags for comment post
tags = row.get('Tags')
# parse body to find code tags
code = parseBodyForTagCode(body)
# Encode list information about code into UTF8
codeUni = repr([x.encode('UTF8') for x in code])
# If code isn't present ignore
if codeUni == '[]':
continue
# print (codeUni)
if tags != None:
# Assign all known code to list
if ("<java>" in tags) or ("java" in body):
knownJava.append(rowId+'`'+codeUni+'`'+tags)
if ("<python>" in tags) or ("python" in body):
knownPython.append(rowId+'`'+codeUni+'`'+tags)
if ("<C>" in tags) or ("C" in body):
knownC.append(rowId+'`'+codeUni+'`'+tags)
if ("<C#>" in tags) or ("C#" in body):
knownCSharp.append(rowId+'`'+codeUni+'`'+tags)
myList.append(rowId+'`'+codeUni+'`'+tags)
else:
myList.append(rowId+'`'+codeUni)
# Ngram section
# print(myList)
############################################################################
for item in myList:
allCodeTags = [item[m.start():m.end()] for m in re.finditer('<code>(.+?)</code>', item)]
for code in allCodeTags:
cleanCode = re.sub('<code>|</code>','',code)
# print (cleanCode)
trigrams = ngrams(cleanCode.split(), 3)
for grams in trigrams:
print (grams)
# break | mit | Python |
|
5fb7d1912eda9d6381af3e0cfa7655ed2d6795f2 | Create Xclipboard.py | adyezik/Xclipboard | Xclipboard.py | Xclipboard.py | from tkinter import Tk
__all__=['copy','paste','clear']
__author__='Calvin(Martin)Adyezik [email protected]'
__doc__="""simple Module to work with clipboard based on tkinter -Python 3"""
__name__='Xclipboard'
def copy(text):
"""copy text to clipboard """
try:
root=Tk()
root.withdraw()
root.clipboard_append(text)
except Exception as e:
print ('Error: ',e)
finally:
root.destroy()
def paste():
"""paste text from clipboad"""
try:
root=Tk()
root.withdraw()
return root.clipboard_get()
except Exception as e:
print ('Error: ',e)
finally:
root.destroy()
def clear():
"""clear clipboard"""
try:
root=Tk()
root.withdraw()
root.clipboard_clear()
except Exception as e:
print ('Error: ',e)
finally:
root.destroy()
| mit | Python |
|
501454e30a93b6ec706add520a6b106940b537d9 | Create card_pick.py | andrewzwicky/puzzles | FiveThirtyEightRiddler/2017-04-21/card_pick.py | FiveThirtyEightRiddler/2017-04-21/card_pick.py | import random
from collections import Counter
import matplotlib.pyplot as plt
from multiprocessing import Pool
import numpy as np
import itertools
from mpl_toolkits.mplot3d import Axes3D
from matplotlib import cm
def simulate_single_run(num_cards, hand_perc, stop_percentage):
hand_size = int(num_cards * hand_perc)
remaining_cards = list(range(1, num_cards + 1))
hand = random.sample(remaining_cards, hand_size)
seen_cards = []
# print(hand, max(hand))
for num_card, card in enumerate(hand, start=1):
seen_cards.append(card)
remaining_cards.remove(card)
high_card_so_far = max(seen_cards)
prob_draw_higher_than_highest = len([c for c in remaining_cards if c > high_card_so_far]) / len(remaining_cards)
prob_any_remaining_higher = 1 - ((1 - prob_draw_higher_than_highest) ** (hand_size - num_card))
# print(seen_cards, high_card_so_far, prob_draw_higher_than_highest, prob_any_remaining_higher)
if prob_any_remaining_higher <= stop_percentage:
return card == max(hand)
def simulate_single_percentage(num_cards, hand_perc, stop_percentage, trials):
return Counter(simulate_single_run(num_cards, hand_perc, stop_percentage) for _ in range(trials))[True] / trials
def trail_multiple_percentages(num_cards, hand_perc, stop_percentages, trials):
result = 0
for pct in stop_percentages:
result = max(result, simulate_single_percentage(num_cards, hand_perc, pct, trials))
print(num_cards, hand_perc, result)
return result
if __name__ == '__main__':
#NUM_CARDS = np.logspace(2, 5, num=4, dtype=int)
NUM_CARDS = np.linspace(100, 1000, num=4, dtype=int)
HAND_PERC = np.linspace(.2, .7, num=6, dtype=float)
PERCENTAGES = np.linspace(0, 1, num=10, dtype=float)
SAMPLE_SIZE = 1000
with Pool(4) as p:
results = p.starmap(trail_multiple_percentages,
[(num_cards, hand_size, PERCENTAGES, SAMPLE_SIZE) for num_cards, hand_size in
itertools.product(NUM_CARDS, HAND_PERC)])
results = np.array(results).reshape((len(NUM_CARDS), len(HAND_PERC))).T
NUM_CARDS, HAND_PERC = np.meshgrid(NUM_CARDS, HAND_PERC)
fig = plt.figure()
ax = fig.gca(projection='3d')
surf = ax.plot_surface(NUM_CARDS, HAND_PERC, results, linewidth=0, antialiased=False, cmap=cm.coolwarm)
plt.show()
| mit | Python |
|
1730cecbc05928e93e2dec826a67139bf6765cc6 | Add a src-side script to display a summary of host info. | chuan9/chromium-crosswalk,chuan9/chromium-crosswalk,ltilve/chromium,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk,ltilve/chromium,hgl888/chromium-crosswalk,fujunwei/chromium-crosswalk,Just-D/chromium-1,Pluto-tv/chromium-crosswalk,Fireblend/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,axinging/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Just-D/chromium-1,axinging/chromium-crosswalk,axinging/chromium-crosswalk,Pluto-tv/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,fujunwei/chromium-crosswalk,axinging/chromium-crosswalk,Pluto-tv/chromium-crosswalk,chuan9/chromium-crosswalk,Just-D/chromium-1,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk,ltilve/chromium,PeterWangIntel/chromium-crosswalk,fujunwei/chromium-crosswalk,fujunwei/chromium-crosswalk,hgl888/chromium-crosswalk,axinging/chromium-crosswalk,Chilledheart/chromium,Chilledheart/chromium,Chilledheart/chromium,Fireblend/chromium-crosswalk,ltilve/chromium,fujunwei/chromium-crosswalk,Just-D/chromium-1,ltilve/chromium,Fireblend/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Just-D/chromium-1,Just-D/chromium-1,Chilledheart/chromium,hgl888/chromium-crosswalk,Pluto-tv/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,Just-D/chromium-1,Chilledheart/chromium,chuan9/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,axinging/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,chuan9/chromium-crosswalk,ltilve/chromium,fujunwei/chromium-crosswalk,Chilledheart/chromium,TheTypoMaster/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,Chilledheart/chromium,Pluto-tv/chromium-crosswalk,Pluto-tv/chromium-crosswalk,ltilve/chromium,fujunwei/chromium-crosswalk,hgl888/chromium-crosswalk,ltilve/chromium,hgl888/chromium-crosswalk,fujunwei/chromium-crosswalk,Just-D/chromium-1,PeterWangIntel/chromium-crosswalk,Chilledheart/chromium,axinging/chromium-crosswalk,fujunwei/chromium-crosswalk,Fireblend/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,chuan9/chromium-crosswalk,Pluto-tv/chromium-crosswalk,axinging/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Pluto-tv/chromium-crosswalk,axinging/chromium-crosswalk,axinging/chromium-crosswalk,Fireblend/chromium-crosswalk,hgl888/chromium-crosswalk,Fireblend/chromium-crosswalk,Fireblend/chromium-crosswalk,Fireblend/chromium-crosswalk,Chilledheart/chromium,Fireblend/chromium-crosswalk,Just-D/chromium-1,axinging/chromium-crosswalk,ltilve/chromium,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk,chuan9/chromium-crosswalk,TheTypoMaster/chromium-crosswalk | testing/scripts/host_info.py | testing/scripts/host_info.py | #!/usr/bin/env python
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
import multiprocessing
import os
import platform
import subprocess
import sys
import common
def is_linux():
return sys.platform.startswith('linux')
def get_free_disk_space(failures):
"""Returns the amount of free space on the current disk, in GiB.
Returns:
The amount of free space on the current disk, measured in GiB.
"""
if os.name == 'posix':
# Stat the current path for info on the current disk.
stat_result = os.statvfs('.')
# Multiply block size by number of free blocks, express in GiB.
return stat_result.f_frsize * stat_result.f_bavail / (
1024.0 / 1024.0 / 1024.0)
failures.append('get_free_disk_space: OS %s not supported.' % os.name)
return 0
def get_num_cpus(failures):
"""Returns the number of logical CPUs on this machine.
Returns:
The number of logical CPUs on this machine, or 'unknown' if indeterminate.
"""
try:
return multiprocessing.cpu_count()
except NotImplementedError:
failures.append('get_num_cpus')
return 'unknown'
def get_device_info(args, failures):
"""Parses the device info for each attached device, and returns a summary
of the device info and any mismatches.
Returns:
A dict indicating the result.
"""
if not is_linux():
return {}
with common.temporary_file() as tempfile_path:
rc = common.run_command([
sys.executable,
os.path.join(args.paths['checkout'],
'build',
'android',
'buildbot',
'bb_device_status_check.py'),
'--json-output', tempfile_path])
if rc:
failures.append('bb_device_status_check')
return {}
with open(tempfile_path, 'r') as src:
device_info = json.load(src)
results = {}
results['devices'] = sorted(v['serial'] for v in device_info)
details = [v['build_detail'] for v in device_info]
def unique_build_details(index):
return sorted(list(set([v.split(':')[index] for v in details])))
parsed_details = {
'device_names': unique_build_details(0),
'build_versions': unique_build_details(1),
'build_types': unique_build_details(2),
}
for k, v in parsed_details.iteritems():
if len(v) == 1:
results[k] = v[0]
else:
results[k] = 'MISMATCH'
results['%s_list' % k] = v
failures.append(k)
return results
def main_run(args):
failures = []
host_info = {}
host_info['os_system'] = platform.system()
host_info['os_release'] = platform.release()
host_info['processor'] = platform.processor()
host_info['num_cpus'] = get_num_cpus(failures)
host_info['free_disk_space'] = get_free_disk_space(failures)
host_info['python_version'] = platform.python_version()
host_info['python_path'] = sys.executable
host_info['devices'] = get_device_info(args, failures)
json.dump({
'valid': True,
'failures': failures,
'_host_info': host_info,
}, args.output)
return len(failures) != 0
def main_compile_targets(args):
json.dump([], args.output)
if __name__ == '__main__':
funcs = {
'run': main_run,
'compile_targets': main_compile_targets,
}
sys.exit(common.run_script(sys.argv[1:], funcs))
| bsd-3-clause | Python |
|
ab31fea8a0d30bc4b68813ce635880d4682cfc2f | Write base classes for collection profiles. | dseomn/cohydra | cohydra/profile.py | cohydra/profile.py | import abc
import logging
import six
class Profile(six.with_metaclass(abc.ABCMeta)):
"""Base class for all collection profiles.
Attributes:
top_dir: Where this profile's files will be stored.
parent: The profile from which this profile is derived, or
None for a root profile.
children: List of child profiles.
"""
def __init__(self, top_dir, parent):
"""Create a profile.
"""
self.top_dir = top_dir
self.parent = parent
self.children = []
if self.parent is not None:
self.parent.children.append(self)
def __str__(self):
return '%s.%s(top_dir=%r, parent=%r)' % (
self.__class__.__module__,
self.__class__.__name__,
self.top_dir,
None if self.parent is None else self.parent.top_dir,
)
def generate_all(self, depth=0):
"""Generate this profile and all of its children.
"""
logging.info('%sGenerating %s', ' ' * depth, self)
self.generate()
# TODO: parallelize?
for child in self.children:
child.generate_all(depth + 1)
def print_all(self, depth=0):
"""List all profiles, for debugging.
"""
print(' ' * depth + str(self))
for child in self.children:
child.print_all(depth + 1)
def log(self, level, msg, *args, **kwargs):
"""Log, with additional info about the profile.
"""
logging.log(
level,
'%s: %s' % (self, msg),
*args,
**kwargs)
@abc.abstractmethod
def generate(self):
"""Generate this profile from its parent.
This method assumes that the parent is up-to-date.
"""
pass
class RootProfile(Profile):
"""Root profile.
This is a profile that consists of a directory with original files,
instead of a profile derived from another profile's files.
"""
def __init__(self, top_dir):
Profile.__init__(self, top_dir, None)
def generate(self):
pass
| apache-2.0 | Python |
|
c2dac9161705e6edbf5b059fd8a442c2754577ff | Add Lambda job | tomislacker/python-mke-trash-pickup,tomislacker/python-mke-trash-pickup | refusereminder.py | refusereminder.py | #!/usr/bin/env python
from __future__ import print_function
import boto3
import json
import os
from mkerefuse.refuse import RefuseQuery
from mkerefuse.refuse import RefuseQueryAddress
DEFAULT_SNS_TOPIC = 'mke-trash-pickup'
"""Default topic to notify for pickup changes"""
DEFAULT_S3_BUCKET = 'mke-trash-pickup'
"""Default S3 bucket name for storing persistent data"""
DEFAULT_S3_PREFIX = ''
"""Default S3 key prefix for persistent data"""
DEFAULT_S3_KEY = 'mke-trash-pickup.json'
"""Default S3 key for persistent data"""
def get_sns_topic_arn(topic_name, aws_region=None, aws_account_num=None):
if aws_region is None:
aws_region = boto3.session.Session().region_name
if aws_account_num is None:
aws_account_num = boto3.client('sts').get_caller_identity()['Account']
return ":".join([
"arn",
"aws",
"sns",
aws_region,
aws_account_num,
topic_name])
def notify_pickup_change(pickup, sns_topic):
"""
Produces a notification for a garbage pickup change
"""
print("Notifying SNS: {}".format(sns_topic.arn))
notify_msg = """
Garbage: {garbage}
Recycle (After): {recycle_after}
Recycle (Before): {recycle_before}""".format(
garbage=pickup.next_pickup_garbage,
recycle_after=pickup.next_pickup_recycle_after,
recycle_before=pickup.next_pickup_recycle_before).strip()
print("\n{}\n".format(notify_msg))
return
sns_topic.publish(
Subject='Garbage Day Update',
Message=notify_msg)
def lambda_handler(event, context):
"""
Detects garbage day changes & updates them
"""
# Compose the address
address = RefuseQueryAddress(
house_number=event['house_number'],
direction=event['direction'],
street_name=event['street_name'],
street_type=event['street_type'])
print("Querying address: {num} {d} {name} {t}".format(
num=address.house_number,
d=address.direction,
name=address.street_name,
t=address.street_type))
# Query for the collection schedule
pickup = RefuseQuery.Execute(address)
# Create an S3 resource for fetching/storing persistent data
s3 = boto3.resource('s3')
# Attempt reading the last pickup information
s3_bucket = event.get('s3_bucket', DEFAULT_S3_BUCKET)
s3_key = os.path.join(
event.get('s3_prefix', DEFAULT_S3_PREFIX),
event.get('s3_key', DEFAULT_S3_KEY)).lstrip('/')
s3_object = s3.Object(s3_bucket, s3_key)
last_data = json.loads('{}')
try:
print("Loading previous pickup data from s3://{b}/{k}".format(
b=s3_object.bucket_name,
k=s3_object.key))
last_data = json.loads(s3_object.get()['Body'].read().decode('utf-8'))
except Exception as e:
# Failed to load old data for some reason
# Ignore it and assume a change in dates
print("Failed to load previous pickup data")
print(e)
# Overwrite previous pickup data with the new data
s3_object.put(Body=json.dumps(pickup.to_dict()))
# If the information differs, notify of the changes
if last_data != pickup.to_dict():
print("Pickup change detected")
sns = boto3.resource('sns')
notify_pickup_change(
pickup,
sns_topic=sns.Topic(
get_sns_topic_arn(event.get('sns_topic', DEFAULT_SNS_TOPIC))))
| unlicense | Python |
|
8cac10350cdbc33d243a561ba06c25f5d01e9a04 | fix for lists | VirusTotal/content,demisto/content,demisto/content,VirusTotal/content,demisto/content,demisto/content,VirusTotal/content,VirusTotal/content | Scripts/SearchIncidents_5.0/SearchIncidents.py | Scripts/SearchIncidents_5.0/SearchIncidents.py | from typing import Dict, List
import demistomock as demisto
from CommonServerPython import *
from CommonServerUserPython import *
special = ['n', 't', '\\', '"', '\'', '7', 'r']
def check_if_found_incident(res: List):
if res and isinstance(res, list) and isinstance(res[0].get('Contents'), dict):
if 'data' not in res[0]['Contents']:
raise DemistoException(res[0].get('Contents'))
elif res[0]['Contents']['data'] is None:
raise DemistoException("Incidents not found.")
else:
raise DemistoException(f'failed to get incidents from demisto.\nGot: {res}')
def is_valid_args(args: Dict):
array_args: List[str] = ['id', 'name', 'status', 'notstatus', 'reason', 'level', 'owner', 'type', 'query']
error_msg: List[str] = []
for _key, value in args.items():
if _key in array_args:
value = ','.join(value)
i = 0
while i < len(value):
if value[i] == '\\':
if value[i + 1] not in special:
error_msg.append(f'Error while parsing the argument: "{_key}" '
f'\nSucceeded parsing untill:\n- "{value[0:i]}"')
else:
i += 1
i += 1
if len(error_msg) != 0:
raise DemistoException('\n'.join(error_msg))
return True
def search_incidents(args: Dict):
if is_valid_args(args):
res: List = demisto.executeCommand('getIncidents', args)
check_if_found_incident(res)
data: Dict = res[0]['Contents']['data']
context_entry: Dict = {'foundIncidents': data}
headers: List[str] = ['id', 'name', 'severity', 'status', 'owner', 'created', 'closed']
md: str = tableToMarkdown(name="Incidents found", t=data, headers=headers)
return_outputs(md, context_entry, res)
def main():
args: Dict = demisto.args()
try:
search_incidents(args)
except DemistoException as error:
return_error(str(error), error)
if __name__ in ('__main__', '__builtin__', 'builtins'):
main()
| from typing import Dict, List
import demistomock as demisto
from CommonServerPython import *
from CommonServerUserPython import *
special = ['n', 't', '\\', '"', '\'', '7', 'r']
def check_if_found_incident(res: List):
if res and isinstance(res, list) and isinstance(res[0].get('Contents'), dict):
if 'data' not in res[0]['Contents']:
raise DemistoException(res[0].get('Contents'))
elif res[0]['Contents']['data'] is None:
raise DemistoException("Incidents not found.")
else:
raise DemistoException(f'failed to get incidents from demisto.\nGot: {res}')
def is_valid_args(args: Dict):
error_msg: List[str] = []
for _key, value in args.items():
i = 0
while i < len(value):
if value[i] == '\\':
if value[i + 1] not in special:
error_msg.append(f'Error while parsing the argument: "{_key}" '
f'\nSucceeded parsing untill:\n- "{value[0:i]}"')
else:
i += 1
i += 1
if len(error_msg) != 0:
raise DemistoException('\n'.join(error_msg))
return True
def search_incidents(args: Dict):
if is_valid_args(args):
res: List = demisto.executeCommand('getIncidents', args)
check_if_found_incident(res)
data: Dict = res[0]['Contents']['data']
context_entry: Dict = {'foundIncidents': data}
headers: List[str] = ['id', 'name', 'severity', 'status', 'owner', 'created', 'closed']
md: str = tableToMarkdown(name="Incidents found", t=data, headers=headers)
return_outputs(md, context_entry, res)
def main():
args: Dict = demisto.args()
try:
search_incidents(args)
except DemistoException as error:
return_error(str(error), error)
if __name__ in ('__main__', '__builtin__', 'builtins'):
main()
| mit | Python |
d88a473030d52529ad0abc626776bbb5d4886067 | Add tests of raw FIPS U2F commands | Yubico/yubikey-manager,Yubico/yubikey-manager | test/on_yubikey/test_fips_u2f_commands.py | test/on_yubikey/test_fips_u2f_commands.py | import unittest
from fido2.hid import (CTAPHID)
from ykman.util import (TRANSPORT)
from ykman.driver_fido import (FIPS_U2F_CMD)
from .util import (DestructiveYubikeyTestCase, is_fips, open_device)
HID_CMD = 0x03
@unittest.skipIf(not is_fips(), 'FIPS YubiKey required.')
class TestFipsU2fCommands(DestructiveYubikeyTestCase):
def test_echo_command(self):
dev = open_device(transports=TRANSPORT.FIDO)
res = dev.driver._dev.call(
CTAPHID.MSG,
[*[0, FIPS_U2F_CMD.ECHO], 0, 0, *[0, 0, 6], *b'012345'])
self.assertEqual(res, b'012345\x90\x00')
def test_pin_commands(self):
# Assumes no PIN is set at beginning of test
dev = open_device(transports=TRANSPORT.FIDO)
verify_res1 = dev.driver._dev.call(
CTAPHID.MSG,
[*[0, FIPS_U2F_CMD.VERIFY_PIN], 0, 0, *[0, 0, 6], *b'012345'])
res = dev.driver._dev.call(
CTAPHID.MSG,
[*[0, FIPS_U2F_CMD.SET_PIN], 0, 0, *[0, 0, 7], *[6, *b'012345']])
verify_res2 = dev.driver._dev.call(
CTAPHID.MSG,
[*[0, FIPS_U2F_CMD.VERIFY_PIN], 0, 0, *[0, 0, 6], *b'543210'])
verify_res3 = dev.driver._dev.call(
CTAPHID.MSG,
[*[0, FIPS_U2F_CMD.VERIFY_PIN], 0, 0, *[0, 0, 6], *b'012345'])
self.assertEqual(verify_res1, b'\x69\x86') # PIN not set
self.assertEqual(res, b'\x90\x00') # Success
self.assertEqual(verify_res2, b'\x63\xc0') # Incorrect PIN
self.assertEqual(verify_res3, b'\x90\x00') # Success
def test_reset_command(self):
dev = open_device(transports=TRANSPORT.FIDO)
res = dev.driver._dev.call(
CTAPHID.MSG, [*[0, FIPS_U2F_CMD.RESET], 0, 0])
# 0x6985: Touch required
# 0x6986: Power cycle required
# 0x9000: Success
self.assertIn(res, [b'\x69\x85', b'\x69\x86', b'\x90\x00'])
def test_verify_fips_mode_command(self):
dev = open_device(transports=TRANSPORT.FIDO)
res = dev.driver._dev.call(
CTAPHID.MSG, [*[0, FIPS_U2F_CMD.VERIFY_FIPS_MODE], 0, 0])
# 0x6a81: Function not supported (PIN not set - not FIPS mode)
# 0x9000: Success (PIN set - FIPS mode)
self.assertIn(res, [b'\x6a\x81', b'\x90\x00'])
| bsd-2-clause | Python |
|
7f1f8c53508d7cbaf3513b738dff21e6ea6e5ca1 | add script to generate multiple instance catalogs in different bands | LSSTDESC/Twinkles,DarkEnergyScienceCollaboration/Twinkles,rbiswas4/Twinkles,rbiswas4/Twinkles,DarkEnergyScienceCollaboration/Twinkles,LSSTDESC/Twinkles | twinkles/InstcatGenerator.py | twinkles/InstcatGenerator.py | """
Based upon examples by Scott Daniel ([email protected]) found here:
https://stash.lsstcorp.org/projects/SIM/repos/sims_catutils/browse/python/lsst/sims/
catUtils/exampleCatalogDefinitions/phoSimCatalogExamples.py
"""
from lsst.sims.catalogs.measures.instance import CompoundInstanceCatalog
from lsst.sims.catalogs.generation.db import CatalogDBObject
from lsst.sims.catUtils.baseCatalogModels import OpSim3_61DBObject
from lsst.sims.catUtils.exampleCatalogDefinitions.phoSimCatalogExamples import \
PhoSimCatalogPoint, PhoSimCatalogSersic2D, PhoSimCatalogZPoint
from lsst.sims.catUtils.utils import ObservationMetaDataGenerator
#from sprinkler import sprinklerCompound
class InstcatGenerator(object):
_starObjNames = ['msstars', 'bhbstars', 'wdstars', 'rrlystars',
'cepheidstars']
def __init__(self, opsim_db, fieldRA, fieldDec, boundLength=0.3):
gen = ObservationMetaDataGenerator(database=opsim_db,
driver='sqlite')
self.obs_md_results = gen.getObservationMetaData(fieldRA=fieldRA,
fieldDec=fieldDec,
boundLength=boundLength)
def find_visits(self, bandpass, nmax=None):
visits = []
for obs_metadata in self.obs_md_results:
if nmax is not None and len(visits) == nmax:
break
if obs_metadata.bandpass == bandpass:
visits.append(obs_metadata)
return visits
def __call__(self, outfile, obs_metadata):
catalogs = []
# Add Instance Catalogs for phoSim stars.
for starName in self._starObjNames:
starDBObj = CatalogDBObject.from_objid(starName)
catalogs.append(PhoSimCatalogPoint(starDBObj,
obs_metadata=obs_metadata))
# Add phosim Galaxy Instance Catalogs to compound Instance Catalog.
galsBulge = CatalogDBObject.from_objid('galaxyBulge')
catalogs.append(PhoSimCatalogSersic2D(galsBulge,
obs_metadata=obs_metadata))
galsDisk = CatalogDBObject.from_objid('galaxyDisk')
catalogs.append(PhoSimCatalogSersic2D(galsDisk,
obs_metadata=obs_metadata))
galsAGN = CatalogDBObject.from_objid('galaxyAgn')
catalogs.append(PhoSimCatalogZPoint(galsAGN,
obs_metadata=obs_metadata))
# Write the catalogs to the output file one at a time.
write_header = True
for catalog in catalogs:
catalog.write_catalog(outfile, write_mode='a',
write_header=write_header,
chunk_size=20000)
write_header = False
if __name__ == '__main__':
import os
import pickle
import time
# This following is a deep drilling field ID for enigma_1189, but
# fieldID is not one of the selection options in
# getObservationMetaData(...), so we need to continue using
# fieldRA, fieldDec
fieldID = 1427
fieldRA = (53, 54)
fieldDec = (-29, -27)
opsim_db = '/nfs/slac/g/ki/ki18/jchiang/DESC/Twinkles/work/enigma_1189_sqlite.db'
pickle_file = 'instcat_generator_enigma_1189_%(fieldID)i.pickle' % locals()
t0 = time.time()
if not os.path.isfile(pickle_file):
print "Extracting visits from %s:" % os.path.basename(opsim_db)
generator = InstcatGenerator(opsim_db, fieldRA, fieldDec)
pickle.dump(generator, open(pickle_file, 'w'))
print "execution time:", time.time() - t0
else:
print "Loading pickle file with visits:", pickle_file
generator = pickle.load(open(pickle_file))
print "execution time:", time.time() - t0
nmax = 1
for bandpass in 'ugrizy':
print "band pass:", bandpass
visits = generator.find_visits(bandpass, nmax=nmax)
for visit in visits:
obshistid = visit.phoSimMetaData['Opsim_obshistid'][0]
outfile = 'phosim_input_%s_%07i.txt' % (bandpass, obshistid)
print outfile
generator(outfile, visit)
| mit | Python |
|
1675ecd5ea2d4aaf8d8b6aa76d007d081f92eba6 | add context processor for static | praekelt/nurseconnect,praekelt/nurseconnect,praekelt/nurseconnect | nurseconnect/context_processors.py | nurseconnect/context_processors.py | from django.conf import settings
def compress_settings(request):
return {
'STATIC_URL': settings.STATIC_URL,
'ENV': settings.ENV
}
| bsd-2-clause | Python |
|
a1820a0e5f9bd891b20f70ab68dfd4bb385047a0 | Add utils to allow multiclass classification. | davidgasquez/kaggle-airbnb | utils/multiclassification.py | utils/multiclassification.py | from __future__ import division
import numpy as np
from sklearn.multiclass import OneVsOneClassifier
from sklearn.multiclass import _fit_binary
from sklearn.externals.joblib import Parallel, delayed
from unbalanced_dataset import SMOTE
def _fit_ovo_binary(estimator, X, y, i, j, sampling=None):
"""Fit a single binary estimator (one-vs-one)."""
cond = np.logical_or(y == i, y == j)
y = y[cond]
y_binary = np.empty(y.shape, np.int)
y_binary[y == i] = 0
y_binary[y == j] = 1
ind = np.arange(X.shape[0])
X_values = X[ind[cond]]
y_values = y_binary
if sampling == 'SMOTE':
print 'SMOTE'
ratio = 1
smote = SMOTE(ratio=ratio)
X_values, y_values = smote.fit_transform(X_values, y_values)
return _fit_binary(estimator, X_values, y_values, classes=[i, j])
class CustomOneVsOneClassifier(OneVsOneClassifier):
def __init__(self, estimator, n_jobs=1, sampling=None):
self.estimator = estimator
self.n_jobs = n_jobs
self.sampling = sampling
def predict_proba(self, X):
return super(CustomOneVsOneClassifier, self).decision_function(X)
def fit(self, X, y):
"""Fit underlying estimators.
Parameters
----------
X : (sparse) array-like, shape = [n_samples, n_features]
Data.
y : array-like, shape = [n_samples]
Multi-class targets.
Returns
-------
self
"""
y = np.asarray(y)
self.classes_ = np.unique(y)
n_classes = self.classes_.shape[0]
self.estimators_ = Parallel(n_jobs=self.n_jobs)(
delayed(_fit_ovo_binary)(
self.estimator, X, y,
self.classes_[i], self.classes_[j], sampling=self.sampling)
for i in range(n_classes) for j in range(i + 1, n_classes))
return self
| mit | Python |
|
175554c4af88e4ba634976e893510f04b43442b7 | Add tool for plotting a connection box lookahead delay matrix. | SymbiFlow/symbiflow-arch-defs,SymbiFlow/symbiflow-arch-defs | utils/plot_connection_box.py | utils/plot_connection_box.py | import argparse
import capnp
import os.path
import matplotlib.pyplot as plt
from matplotlib.colors import BoundaryNorm
from matplotlib.ticker import MaxNLocator
import numpy as np
def get_connection_box(cost_map, segment, connection_box):
assert cost_map.costMap.dims[0] == cost_map.offset.dims[0]
assert cost_map.costMap.dims[1] == cost_map.offset.dims[1]
nsegment = cost_map.costMap.dims[0]
nconnection_box = cost_map.costMap.dims[1]
m_itr = iter(cost_map.costMap.data)
offset_itr = iter(cost_map.offset.data)
for segment_idx in range(nsegment):
for connection_box_idx in range(nconnection_box):
m = next(m_itr).value
offset = next(offset_itr).value
x_off = offset.x
y_off = offset.y
if segment == segment_idx and connection_box_idx == connection_box:
return m, (x_off, y_off)
def plot_connection_box(cost_map, segment, connection_box):
m, (x_off, y_off) = get_connection_box(cost_map, segment, connection_box)
assert len(m.dims) == 2
x_dim = m.dims[0]
y_dim = m.dims[1]
# generate 2 2d grids for the x & y bounds
y, x = np.mgrid[slice(y_off, y_off + y_dim), slice(x_off, x_off + x_dim)]
delay = np.zeros((y_dim, x_dim))
congestion = np.zeros((y_dim, x_dim))
itr = iter(m.data)
for x_idx in range(x_dim):
for y_idx in range(y_dim):
value = next(itr)
x_val = x_idx + x_off
y_val = y_idx + y_off
delay[(x == x_val) & (y == y_val)] = value.value.delay
congestion[(x == x_val) & (y == y_val)] = value.value.congestion
print(delay)
# x and y are bounds, so z should be the value *inside* those bounds.
# Therefore, remove the last value from the z array.
delay_levels = MaxNLocator(nbins=50).tick_values(delay.min(), delay.max())
# pick the desired colormap, sensible levels, and define a normalization
# instance which takes data values and translates those into levels.
cmap = plt.get_cmap('PiYG')
norm = BoundaryNorm(delay_levels, ncolors=cmap.N, clip=True)
fig, (ax0, ax1) = plt.subplots(nrows=2)
im = ax0.pcolormesh(x, y, delay, cmap=cmap, norm=norm)
ax0.autoscale(False) # To avoid that the scatter changes limits
inf_idx = delay == float('inf')
ax0.scatter(x[inf_idx], y[inf_idx])
fig.colorbar(im, ax=ax0)
ax0.set_title('pcolormesh with levels')
# contours are *point* based plots, so convert our bound into point
# centers
cf = ax1.contourf(
x + 1. / 2., y + 1. / 2., delay, levels=delay_levels, cmap=cmap
)
fig.colorbar(cf, ax=ax1)
ax1.set_title('contourf with levels')
# adjust spacing between subplots so `ax1` title and `ax0` tick labels
# don't overlap
fig.tight_layout()
plt.show()
def main():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
'--schema_path', help='Path to connection map schema', required=True
)
parser.add_argument('--lookahead_map', required=True)
parser.add_argument('--segment', required=True, type=int)
parser.add_argument('--connection_box', required=True, type=int)
args = parser.parse_args()
connection_map = capnp.load(
os.path.join(args.schema_path, 'connection_map.capnp')
)
with open(args.lookahead_map, 'rb') as f:
cost_map = connection_map.VprCostMap.read(
f, traversal_limit_in_words=1024 * 1024 * 1024
)
plot_connection_box(cost_map, args.segment, args.connection_box)
if __name__ == "__main__":
main()
| isc | Python |
|
83f0e2ae541b6b08b3eb6d5d8097bdfd85ccbd3c | Create ROUSE_simulation.py | Shirui816/FTinMS | ROUSE_simulation.py | ROUSE_simulation.py | import numpy as np
from scipy.linalg import toeplitz
from scipy.optimize import curve_fit
import scipy.linalg as sla
from matplotlib import pyplot as plt
from numba import jit
from sys import argv
from tqdm import tqdm
from numba import guvectorize, float64, jit
def rouse_mat(n):
ret = np.array([[-1,1] + [0] * (n-2)])
for i in range(1, n-1):
ret = np.append(ret, np.array([[0] * (i-1) + [1,-2,1] + [0] * (n-2-i)]), axis=0)
return -np.append(ret, np.array([[0] * (n-2) + [1,-1]]), axis=0)
def zeta_mat(n, alpha, delta):
return sla.expm(-delta * toeplitz(np.exp(-alpha * np.arange(n))))
def Roessler2010_SRK2_rouse(A, B, y0, t, dW=None):
'''Simulate EQU as dX/dt = AX + B dW.
For ROUSE systems:
dr_i = 1/z_i * -((k_{i-1}(r_{i-1}-r_i)+k_i(r_{i+1}-r_i)) dt + 1/z_i \sqrt{2k_BTz_i} dW
coefficients in the LHS must be 1, k_i and mobility z_i can be modified.
and k_i and z_i must be constants.
:param A: matrix in RHS of eqs
:param B: fluctuations in RHS of eqs
:param y0: initial positions
:param t: time
'''
A2 = A.dot(A)
dt = t[1] - t[0]
if dW is None:
dW = np.random.normal(0, dt**0.5, (t.shape[0]-1, *y0.shape))
y = np.zeros((t.shape[0], *y0.shape))
y[0] = y0
for i in range(t.shape[0]-1):
yi = y[i]
y[i+1] = yi + A.dot(yi) * dt + 0.5 * A2.dot(yi) * dt ** 2 + dW[i] * B
return y
ndim = 3
ns = np.asarray(argv[1:], dtype=np.int)
T = 100
nT = int(T/0.02)
t=np.linspace(0,T,nT,endpoint=False)
for n in ns:
ret = np.zeros((nT, ndim))
msd = np.zeros((nT,))
R = rouse_mat(n)
for i in tqdm(range(1000), ascii=True, desc='Chian length of %d' % (n)):
r = Roessler2010_SRK2_rouse(-3*R, np.ones((n,1))*np.sqrt(2), np.zeros((n,ndim)), t).mean(axis=1)
ret += r
msd += np.sum(r ** 2, axis=-1)
np.savetxt('traj_cm_%d.txt' % (n), np.vstack([t,ret.T/1000]).T)
np.savetxt('msd_cm_%d.txt' % (n), np.vstack([t,msd/1000]).T)
| bsd-3-clause | Python |
|
18d11a06e1e84ece32277e0860229555aae321f0 | Add a snippet (Python). | jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets | python/cross-platform_home_directory.py | python/cross-platform_home_directory.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
print(os.path.expanduser("~"))
| mit | Python |
|
c8b78cab7a32a300e418033185595fd79a290823 | add bmf study | YcheLanguageStudio/PythonStudy | qlcoder/image_processing/bmp_python3.py | qlcoder/image_processing/bmp_python3.py | import binascii
fi = open("aaa.bmp", "rb")
header = fi.read(1078)
line = fi.read()
fi.close()
binline = ''
for i in range(0,len(line)):
binline += bin(line[i])[2:].zfill(8)
newbinline = ''
for i in range(len(binline)):
if(i%7 == 0):
newbinline+='0'
newbinline+=binline[i]
newhexline = hex(int(newbinline, 2))[2:]
newhexline = '0' + newhexline
newbyteline = bytes().fromhex(newhexline)
fo = open("out.bmp", "wb")
outbmp = header + newbyteline
line = fo.write(outbmp)
fo.close() | mit | Python |
|
c9b3bd8309d3d1448823787160021a8688e8f3c1 | Add python to make vv h5 file | sot/mica,sot/mica | vv_h5_setup.py | vv_h5_setup.py | import tables
vv_desc = dict(
obsid=tables.IntCol(pos=0),
revision=tables.IntCol(pos=1),
most_recent=tables.IntCol(pos=2),
slot=tables.IntCol(pos=3),
type=tables.StringCol(10,pos=4),
n_pts=tables.IntCol(pos=5),
rad_off=tables.FloatCol(pos=6),
frac_dy_big=tables.FloatCol(pos=7),
frac_dz_big=tables.FloatCol(pos=8),
frac_mag_big=tables.FloatCol(pos=9),
mean_y =tables.FloatCol(pos=10),
mean_z =tables.FloatCol(pos=11),
dy_mean=tables.FloatCol(pos=12),
dy_med =tables.FloatCol(pos=13),
dy_rms =tables.FloatCol(pos=14),
dz_mean=tables.FloatCol(pos=15),
dz_med =tables.FloatCol(pos=16),
dz_rms =tables.FloatCol(pos=17),
mag_mean=tables.FloatCol(pos=18),
mag_med =tables.FloatCol(pos=19),
mag_rms =tables.FloatCol(pos=20),
)
h5f = tables.openFile('/data/aca/archive/vv/vv.h5', 'a')
tbl = h5f.createTable('/', 'vv', vv_desc)
tbl.cols.obsid.createIndex()
h5f.close()
| bsd-3-clause | Python |
|
836c7aa92cd9d35e7d54046e835f285410780b84 | Create nodejs.py | vadimkantorov/wigwam | wigs/nodejs.py | wigs/nodejs.py | class nodejs(Wig):
git_uri = 'https://github.com/nodejs/node'
tarball_uri = 'https://github.com/nodejs/node/archive/v$RELEASE_VERSION$.tar.gz'
last_release_version = 'v4.7.2'
| mit | Python |
|
1edf0898422c74173a9b6526c789b140938664b5 | add main module | imsky/quickstart,imsky/quickstart | server/__main__.py | server/__main__.py | import server | mit | Python |
|
d48035b06b952b9ac4d95897d08de50d5977bf9f | Add basic test for OrderedDict. | selste/micropython,mgyenik/micropython,swegener/micropython,dhylands/micropython,feilongfl/micropython,neilh10/micropython,jmarcelino/pycom-micropython,paul-xxx/micropython,tdautc19841202/micropython,blmorris/micropython,ceramos/micropython,Timmenem/micropython,noahchense/micropython,tdautc19841202/micropython,emfcamp/micropython,chrisdearman/micropython,toolmacher/micropython,ahotam/micropython,kerneltask/micropython,mianos/micropython,xyb/micropython,toolmacher/micropython,noahwilliamsson/micropython,xyb/micropython,noahchense/micropython,ernesto-g/micropython,cwyark/micropython,ganshun666/micropython,xuxiaoxin/micropython,paul-xxx/micropython,ryannathans/micropython,danicampora/micropython,selste/micropython,AriZuu/micropython,cnoviello/micropython,puuu/micropython,xyb/micropython,martinribelotta/micropython,pfalcon/micropython,supergis/micropython,cloudformdesign/micropython,micropython/micropython-esp32,utopiaprince/micropython,tuc-osg/micropython,ruffy91/micropython,MrSurly/micropython,kerneltask/micropython,vriera/micropython,cnoviello/micropython,rubencabrera/micropython,stonegithubs/micropython,cnoviello/micropython,martinribelotta/micropython,redbear/micropython,supergis/micropython,lowRISC/micropython,Timmenem/micropython,heisewangluo/micropython,slzatz/micropython,rubencabrera/micropython,suda/micropython,noahchense/micropython,xhat/micropython,tobbad/micropython,tralamazza/micropython,tdautc19841202/micropython,mgyenik/micropython,MrSurly/micropython-esp32,ericsnowcurrently/micropython,jimkmc/micropython,adafruit/micropython,ahotam/micropython,lowRISC/micropython,bvernoux/micropython,selste/micropython,toolmacher/micropython,supergis/micropython,SHA2017-badge/micropython-esp32,pfalcon/micropython,adafruit/circuitpython,matthewelse/micropython,xuxiaoxin/micropython,ruffy91/micropython,mianos/micropython,mpalomer/micropython,ganshun666/micropython,henriknelson/micropython,deshipu/micropython,ericsnowcurrently/micropython,ChuckM/micropython,xuxiaoxin/micropython,trezor/micropython,infinnovation/micropython,torwag/micropython,AriZuu/micropython,misterdanb/micropython,ganshun666/micropython,ryannathans/micropython,selste/micropython,lbattraw/micropython,puuu/micropython,jmarcelino/pycom-micropython,heisewangluo/micropython,selste/micropython,adafruit/circuitpython,noahchense/micropython,toolmacher/micropython,adafruit/circuitpython,skybird6672/micropython,blazewicz/micropython,drrk/micropython,hosaka/micropython,alex-march/micropython,hosaka/micropython,mianos/micropython,PappaPeppar/micropython,infinnovation/micropython,tuc-osg/micropython,dxxb/micropython,dxxb/micropython,adamkh/micropython,ceramos/micropython,deshipu/micropython,vitiral/micropython,skybird6672/micropython,PappaPeppar/micropython,ernesto-g/micropython,ceramos/micropython,micropython/micropython-esp32,xyb/micropython,HenrikSolver/micropython,praemdonck/micropython,alex-march/micropython,neilh10/micropython,Peetz0r/micropython-esp32,dxxb/micropython,adamkh/micropython,paul-xxx/micropython,puuu/micropython,kerneltask/micropython,redbear/micropython,utopiaprince/micropython,rubencabrera/micropython,pramasoul/micropython,mhoffma/micropython,xyb/micropython,adamkh/micropython,turbinenreiter/micropython,infinnovation/micropython,alex-march/micropython,neilh10/micropython,adafruit/micropython,praemdonck/micropython,infinnovation/micropython,dinau/micropython,cwyark/micropython,adafruit/circuitpython,Peetz0r/micropython-esp32,MrSurly/micropython-esp32,galenhz/micropython,drrk/micropython,dmazzella/micropython,mgyenik/micropython,matthewelse/micropython,firstval/micropython,HenrikSolver/micropython,blmorris/micropython,mpalomer/micropython,ChuckM/micropython,noahwilliamsson/micropython,EcmaXp/micropython,danicampora/micropython,torwag/micropython,oopy/micropython,kostyll/micropython,orionrobots/micropython,warner83/micropython,misterdanb/micropython,mgyenik/micropython,feilongfl/micropython,dhylands/micropython,cloudformdesign/micropython,blazewicz/micropython,matthewelse/micropython,omtinez/micropython,ganshun666/micropython,mhoffma/micropython,turbinenreiter/micropython,suda/micropython,dhylands/micropython,MrSurly/micropython,alex-robbins/micropython,jimkmc/micropython,xuxiaoxin/micropython,Timmenem/micropython,jmarcelino/pycom-micropython,kostyll/micropython,adafruit/circuitpython,skybird6672/micropython,ahotam/micropython,dmazzella/micropython,cloudformdesign/micropython,omtinez/micropython,cwyark/micropython,cloudformdesign/micropython,jimkmc/micropython,alex-robbins/micropython,adafruit/micropython,lbattraw/micropython,pramasoul/micropython,suda/micropython,TDAbboud/micropython,HenrikSolver/micropython,mpalomer/micropython,mhoffma/micropython,heisewangluo/micropython,tobbad/micropython,pfalcon/micropython,emfcamp/micropython,omtinez/micropython,jimkmc/micropython,matthewelse/micropython,blazewicz/micropython,MrSurly/micropython-esp32,noahchense/micropython,stonegithubs/micropython,dinau/micropython,praemdonck/micropython,AriZuu/micropython,ericsnowcurrently/micropython,misterdanb/micropython,EcmaXp/micropython,xhat/micropython,mpalomer/micropython,oopy/micropython,vriera/micropython,heisewangluo/micropython,redbear/micropython,cnoviello/micropython,tdautc19841202/micropython,kerneltask/micropython,trezor/micropython,hiway/micropython,deshipu/micropython,tuc-osg/micropython,lowRISC/micropython,ericsnowcurrently/micropython,paul-xxx/micropython,orionrobots/micropython,orionrobots/micropython,henriknelson/micropython,vriera/micropython,dxxb/micropython,pramasoul/micropython,pfalcon/micropython,cnoviello/micropython,ganshun666/micropython,feilongfl/micropython,dxxb/micropython,ahotam/micropython,paul-xxx/micropython,henriknelson/micropython,warner83/micropython,hosaka/micropython,vitiral/micropython,jmarcelino/pycom-micropython,TDAbboud/micropython,drrk/micropython,chrisdearman/micropython,feilongfl/micropython,MrSurly/micropython-esp32,vriera/micropython,stonegithubs/micropython,torwag/micropython,cwyark/micropython,hosaka/micropython,bvernoux/micropython,swegener/micropython,supergis/micropython,Timmenem/micropython,mhoffma/micropython,noahwilliamsson/micropython,firstval/micropython,ahotam/micropython,mpalomer/micropython,dmazzella/micropython,ernesto-g/micropython,warner83/micropython,vitiral/micropython,alex-robbins/micropython,kerneltask/micropython,turbinenreiter/micropython,blazewicz/micropython,ryannathans/micropython,blmorris/micropython,ceramos/micropython,jlillest/micropython,adamkh/micropython,trezor/micropython,tuc-osg/micropython,toolmacher/micropython,firstval/micropython,misterdanb/micropython,tuc-osg/micropython,supergis/micropython,SHA2017-badge/micropython-esp32,alex-march/micropython,alex-robbins/micropython,tobbad/micropython,emfcamp/micropython,pozetroninc/micropython,tralamazza/micropython,hiway/micropython,jmarcelino/pycom-micropython,kostyll/micropython,SHA2017-badge/micropython-esp32,redbear/micropython,lbattraw/micropython,ryannathans/micropython,lowRISC/micropython,chrisdearman/micropython,infinnovation/micropython,pramasoul/micropython,oopy/micropython,stonegithubs/micropython,galenhz/micropython,galenhz/micropython,xhat/micropython,vriera/micropython,slzatz/micropython,slzatz/micropython,MrSurly/micropython,warner83/micropython,adamkh/micropython,feilongfl/micropython,micropython/micropython-esp32,tralamazza/micropython,martinribelotta/micropython,TDAbboud/micropython,henriknelson/micropython,PappaPeppar/micropython,adafruit/circuitpython,ChuckM/micropython,hiway/micropython,swegener/micropython,turbinenreiter/micropython,dinau/micropython,suda/micropython,neilh10/micropython,skybird6672/micropython,ruffy91/micropython,tdautc19841202/micropython,drrk/micropython,danicampora/micropython,xhat/micropython,utopiaprince/micropython,omtinez/micropython,slzatz/micropython,lbattraw/micropython,chrisdearman/micropython,pozetroninc/micropython,jlillest/micropython,warner83/micropython,HenrikSolver/micropython,AriZuu/micropython,micropython/micropython-esp32,EcmaXp/micropython,ernesto-g/micropython,martinribelotta/micropython,PappaPeppar/micropython,alex-robbins/micropython,vitiral/micropython,skybird6672/micropython,pozetroninc/micropython,jlillest/micropython,lowRISC/micropython,deshipu/micropython,jlillest/micropython,puuu/micropython,matthewelse/micropython,stonegithubs/micropython,xhat/micropython,pozetroninc/micropython,dinau/micropython,chrisdearman/micropython,AriZuu/micropython,mgyenik/micropython,ChuckM/micropython,EcmaXp/micropython,tralamazza/micropython,galenhz/micropython,tobbad/micropython,alex-march/micropython,lbattraw/micropython,cwyark/micropython,rubencabrera/micropython,bvernoux/micropython,hiway/micropython,turbinenreiter/micropython,Peetz0r/micropython-esp32,ruffy91/micropython,MrSurly/micropython,jimkmc/micropython,noahwilliamsson/micropython,praemdonck/micropython,ChuckM/micropython,oopy/micropython,orionrobots/micropython,MrSurly/micropython,xuxiaoxin/micropython,danicampora/micropython,puuu/micropython,SHA2017-badge/micropython-esp32,deshipu/micropython,MrSurly/micropython-esp32,pfalcon/micropython,ruffy91/micropython,torwag/micropython,cloudformdesign/micropython,Timmenem/micropython,vitiral/micropython,noahwilliamsson/micropython,mianos/micropython,Peetz0r/micropython-esp32,kostyll/micropython,SHA2017-badge/micropython-esp32,swegener/micropython,hosaka/micropython,oopy/micropython,dmazzella/micropython,utopiaprince/micropython,utopiaprince/micropython,martinribelotta/micropython,drrk/micropython,torwag/micropython,tobbad/micropython,suda/micropython,orionrobots/micropython,pramasoul/micropython,matthewelse/micropython,TDAbboud/micropython,omtinez/micropython,emfcamp/micropython,blmorris/micropython,misterdanb/micropython,dinau/micropython,ernesto-g/micropython,blmorris/micropython,kostyll/micropython,praemdonck/micropython,neilh10/micropython,HenrikSolver/micropython,jlillest/micropython,ceramos/micropython,emfcamp/micropython,EcmaXp/micropython,micropython/micropython-esp32,TDAbboud/micropython,adafruit/micropython,firstval/micropython,swegener/micropython,trezor/micropython,ryannathans/micropython,hiway/micropython,redbear/micropython,ericsnowcurrently/micropython,mhoffma/micropython,pozetroninc/micropython,firstval/micropython,blazewicz/micropython,trezor/micropython,bvernoux/micropython,PappaPeppar/micropython,slzatz/micropython,mianos/micropython,adafruit/micropython,rubencabrera/micropython,henriknelson/micropython,dhylands/micropython,galenhz/micropython,heisewangluo/micropython,bvernoux/micropython,danicampora/micropython,dhylands/micropython,Peetz0r/micropython-esp32 | tests/basics/ordereddict1.py | tests/basics/ordereddict1.py | try:
from collections import OrderedDict
except ImportError:
try:
from _collections import OrderedDict
except ImportError:
print("SKIP")
import sys
sys.exit()
d = OrderedDict([(10, 20), ("b", 100), (1, 2)])
print(list(d.keys()))
print(list(d.values()))
del d["b"]
print(list(d.keys()))
print(list(d.values()))
| mit | Python |
|
45ccdce362694f50c43828e3923fc9e3fa32c8bb | Add list_callbacks.py | Zeex/sampgdk,WopsS/sampgdk,WopsS/sampgdk,WopsS/sampgdk,Zeex/sampgdk,Zeex/sampgdk | scripts/list_callbacks.py | scripts/list_callbacks.py | #!/usr/bin/env python2
import sys
from parse_header import *
def main(argv):
for type, name, args, attrs in get_callbacks(sys.stdin.read()):
print name
if __name__ == "__main__":
main(sys.argv)
| apache-2.0 | Python |
|
f075f21b53e13d53fc26e38bcf995d55ea44df67 | Patch bump for pypi | exa-analytics/exa,tjduigna/exa,exa-analytics/exa,alexvmarch/exa,alexvmarch/exa,avmarchenko/exa,alexvmarch/exa | exa/__init__.py | exa/__init__.py | # -*- coding: utf-8 -*-
# Copyright (c) 2015-2016, Exa Analytics Development Team
# Distributed under the terms of the Apache License 2.0
'''
Exa
#########
This package creates a systematic infrastructure for an ecosystem of packages,
tailored to specific industry or academic displines, for organizing, processing,
analyzing, and visualizing data. It is built with minimal dependencies, leverages
established open-source packages, is itself extensible, and is targeted at both
industry and academic applications.
At a high level, data objects such as series or dataframes (i.e. `pandas`_
like objects) are organized into containers which track relationships between
these objects and provide methods for computation, conversion to other formats,
analysis, and visualization within the `Jupyter notebook`_ environment.
.. _pandas: http://pandas.pydata.org/pandas-docs/stable/index.html
.. _Jupyter notebook: http://jupyter.org/
'''
__exa_version__ = (0, 2, 5)
__version__ = '.'.join((str(v) for v in __exa_version__))
from exa import _config
from exa import log
from exa import test
from exa import relational
from exa import widget
from exa import math
from exa import distributed
from exa import mpl, tex
from exa import error
# User API
from exa.numerical import Series, DataFrame, Field3D, SparseSeries, SparseDataFrame
from exa.container import Container
from exa.editor import Editor
from exa.filetypes import CSV
| # -*- coding: utf-8 -*-
# Copyright (c) 2015-2016, Exa Analytics Development Team
# Distributed under the terms of the Apache License 2.0
'''
Exa
#########
This package creates a systematic infrastructure for an ecosystem of packages,
tailored to specific industry or academic displines, for organizing, processing,
analyzing, and visualizing data. It is built with minimal dependencies, leverages
established open-source packages, is itself extensible, and is targeted at both
industry and academic applications.
At a high level, data objects such as series or dataframes (i.e. `pandas`_
like objects) are organized into containers which track relationships between
these objects and provide methods for computation, conversion to other formats,
analysis, and visualization within the `Jupyter notebook`_ environment.
.. _pandas: http://pandas.pydata.org/pandas-docs/stable/index.html
.. _Jupyter notebook: http://jupyter.org/
'''
__exa_version__ = (0, 2, 4)
__version__ = '.'.join((str(v) for v in __exa_version__))
from exa import _config
from exa import log
from exa import test
from exa import relational
from exa import widget
from exa import math
from exa import distributed
from exa import mpl, tex
from exa import error
# User API
from exa.numerical import Series, DataFrame, Field3D, SparseSeries, SparseDataFrame
from exa.container import Container
from exa.editor import Editor
from exa.filetypes import CSV
| apache-2.0 | Python |
ec91d5106bfab93e0540e5dc4a0bbd9b7cea151b | add script to update old .h5 files to support normalization | pyannote/pyannote-audio,pyannote/pyannote-audio,pyannote/pyannote-audio | scripts/update_data_h5.py | scripts/update_data_h5.py | # update h5 files created by old versions of pyannote-speaker-embedding
# estimate mu/sigma and save it back to the file
# usage: update_data_h5.py /path/to/file.h5
import sys
import h5py
import numpy as np
from tqdm import tqdm
data_h5 = sys.argv[1]
with h5py.File(data_h5, mode='r') as fp:
X = fp['X']
weights, means, squared_means = zip(*(
(len(x), np.mean(x, axis=0), np.mean(x**2, axis=0))
for x in tqdm(X)))
mu = np.average(means, weights=weights, axis=0)
squared_mean = np.average(squared_means, weights=weights, axis=0)
sigma = np.sqrt(squared_mean - mu ** 2)
with h5py.File(data_h5, mode='r+') as fp:
X = fp['X']
X.attrs['mu'] = mu
X.attrs['sigma'] = sigma
| mit | Python |
|
50ba17b46c7fcc7eb42a48a5ec82e295fdbeae13 | Add missing migration | aromanovich/kozmic-ci,abak-press/kozmic-ci,artofhuman/kozmic-ci,artofhuman/kozmic-ci,abak-press/kozmic-ci,aromanovich/kozmic-ci,abak-press/kozmic-ci,artofhuman/kozmic-ci,aromanovich/kozmic-ci,aromanovich/kozmic-ci,abak-press/kozmic-ci,artofhuman/kozmic-ci | migrations/versions/25ecf1c9b3fb_introduce_deploykey_entity.py | migrations/versions/25ecf1c9b3fb_introduce_deploykey_entity.py | """Introduce DeployKey entity
Revision ID: 25ecf1c9b3fb
Revises: 1c314d48261a
Create Date: 2014-02-08 02:56:34.174597
"""
# revision identifiers, used by Alembic.
revision = '25ecf1c9b3fb'
down_revision = '1c314d48261a'
import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import mysql
from kozmic.models import db, DeployKey, Project
def upgrade():
op.create_table('deploy_key',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('project_id', sa.Integer(), nullable=False),
sa.Column('gh_id', sa.Integer(), nullable=False),
sa.Column('rsa_private_key', sa.Text(), nullable=False),
sa.Column('rsa_public_key', sa.Text(), nullable=False),
sa.ForeignKeyConstraint(['project_id'], ['project.id'], ),
sa.PrimaryKeyConstraint('id')
)
select = db.select(['id', 'is_public', 'rsa_public_key', 'rsa_private_key', 'gh_key_id'],
from_obj=Project.__tablename__)
for id, is_public, rsa_public_key, rsa_private_key, gh_key_id \
in db.session.execute(select).fetchall():
if is_public:
continue
insert = DeployKey.__table__.insert().values(
project_id=id,
rsa_public_key=rsa_public_key,
rsa_private_key=rsa_private_key,
gh_id=gh_key_id)
db.session.execute(insert)
db.session.commit()
op.drop_column(u'project', 'rsa_public_key')
op.drop_column(u'project', 'rsa_private_key')
op.drop_column(u'project', 'gh_key_id')
def downgrade():
op.add_column(u'project', sa.Column('gh_key_id', mysql.INTEGER(display_width=11), nullable=False))
op.add_column(u'project', sa.Column('rsa_private_key', mysql.MEDIUMTEXT(), nullable=False))
op.add_column(u'project', sa.Column('rsa_public_key', mysql.MEDIUMTEXT(), nullable=False))
op.drop_table('deploy_key')
| bsd-3-clause | Python |
|
0e833de83903c26fb3ca04c10b140c712350a12f | Create tests.py | rmotr-curriculum-testing/learn-testing-repo | unit-3-mixed-reading-and-assignment-lessons/lesson-3-assignment-one-code-block/tests.py | unit-3-mixed-reading-and-assignment-lessons/lesson-3-assignment-one-code-block/tests.py | import unittest
class ConvertTemperatureTestCase(unittest.TestCase):
def test_fahrenheit_to_celsius(self):
self.assertEqual(convert_temperature(32, to='celsius'), 0)
def test_celsius_to_fahrenheit(self):
self.assertEqual(convert_temperature(40, to='fahrenheit'), 104)
def test_default_parameter_is_celsius(self):
self.assertEqual(convert_temperature(32), 0)
| mit | Python |
|
6c6d3d365e021918fe88450136a75bbac7a21d5c | add .percol.d | temmings/dotfiles,temmings/dotfiles,temmings/dotfiles,temmings/dotfiles,temmings/dotfiles | .percol.d/rc.py | .percol.d/rc.py | # Emacs like
percol.import_keymap({
"C-h" : lambda percol: percol.command.delete_backward_char(),
"C-d" : lambda percol: percol.command.delete_forward_char(),
"C-k" : lambda percol: percol.command.kill_end_of_line(),
"C-y" : lambda percol: percol.command.yank(),
"C-t" : lambda percol: percol.command.transpose_chars(),
"C-a" : lambda percol: percol.command.beginning_of_line(),
"C-e" : lambda percol: percol.command.end_of_line(),
"C-b" : lambda percol: percol.command.backward_char(),
"C-f" : lambda percol: percol.command.forward_char(),
"M-f" : lambda percol: percol.command.forward_word(),
"M-b" : lambda percol: percol.command.backward_word(),
"M-d" : lambda percol: percol.command.delete_forward_word(),
"M-h" : lambda percol: percol.command.delete_backward_word(),
"C-n" : lambda percol: percol.command.select_next(),
"C-p" : lambda percol: percol.command.select_previous(),
"C-v" : lambda percol: percol.command.select_next_page(),
"M-v" : lambda percol: percol.command.select_previous_page(),
"M-<" : lambda percol: percol.command.select_top(),
"M->" : lambda percol: percol.command.select_bottom(),
"C-m" : lambda percol: percol.finish(),
"C-j" : lambda percol: percol.finish(),
"C-g" : lambda percol: percol.cancel(),
})
| apache-2.0 | Python |
|
d7595d6d80468ec5f0e4bde86db8a431c4384ad3 | Solve 41. | klen/euler | 041/solution.py | 041/solution.py | # coding: utf-8
""" Project Euler problem #41. """
import math as mt
def problem():
u""" Solve the problem.
We shall say that an n-digit number is pandigital if it makes use of all
the digits 1 to n exactly once. For example, 2143 is a 4-digit pandigital
and is also prime.
What is the largest n-digit pandigital prime that exists?
Solution: any integer divisible by 3 or 9 when sum of digits is divisible
by 3 or 9. So it's mean we could check only range (4321, 7654321), because
1+2+3+4+5+6+7+8=36
Answer: 7652413
"""
for x in xrange(7654321, 4321, -2):
if is_pandigital(x) and is_prime(x):
return x
def is_pandigital(*args):
""" Check numbers is pandigital through 9. """
return '123456789'.startswith(
''.join(sorted(x for arg in args for x in str(arg))))
def is_prime(num):
""" Check number is prime. """
if is_even(num) and num != 2 or num == 1:
return False
for dd in range(3, int(mt.sqrt(num)) + 1):
if num % dd == 0:
return False
return True
def is_even(num):
""" Check for number is even. """
return num % 2 == 0
if __name__ == '__main__':
print problem()
| mit | Python |
|
b395239526a4246193aa9ce0e541538e5690f408 | Return change | bandarji/lekhan | python/reddit/least_bills.py | python/reddit/least_bills.py | def bills_needed(money):
"""Determine optimal numbers of each bill denomination for amount.
Args:
money, int: Amount of money to figure bills for
Returns:
cash, dict: Count of each type of bill needed for sum
"""
denominations = [1, 2, 5, 10, 20, 50, 100]
cash = {}
balance = money
bill_count = 0
if money > 0:
for denomination in sorted(denominations, reverse=True):
bills = balance // denomination
if bills > 0:
cash[denomination] = bills
bill_count += bills
balance = balance % denomination
return bill_count, cash
def test_bills_needed():
tests = [
1,
2,
42,
51,
123,
222,
500,
]
for test in tests:
bill_count, cash = bills_needed(test)
print('Money: {}, Bills: {} - {}'.format(test, bill_count, cash))
def main():
test_bills_needed()
main()
| apache-2.0 | Python |
|
9bcb2566afa8191e24fb4f66b3fb882724ba4083 | Test ragged getitem | explosion/thinc,spacy-io/thinc,explosion/thinc,explosion/thinc,explosion/thinc,spacy-io/thinc,spacy-io/thinc | thinc/tests/test_indexing.py | thinc/tests/test_indexing.py | import pytest
import numpy
from numpy.testing import assert_allclose
from thinc.types import Ragged
@pytest.fixture
def ragged():
data = numpy.zeros((20, 4), dtype="f")
lengths = numpy.array([4, 2, 8, 1, 4], dtype="i")
data[0] = 0
data[1] = 1
data[2] = 2
data[3] = 3
data[4] = 4
data[5] = 5
return Ragged(data, lengths)
def test_ragged_starts_ends(ragged):
starts = ragged._get_starts()
ends = ragged._get_ends()
assert list(starts) == [0, 4, 6, 14, 15]
assert list(ends) == [4, 6, 14, 15, 19]
def test_ragged_simple_index(ragged, i=1):
r = ragged[i]
assert_allclose(r.data, ragged.data[4:6])
assert_allclose(r.lengths, ragged.lengths[i:i+1])
def test_ragged_slice_index(ragged, start=0, end=2):
r = ragged[start:end]
size = ragged.lengths[start:end].sum()
assert r.data.shape == (size, r.data.shape[1])
assert_allclose(r.lengths, ragged.lengths[start:end])
def test_ragged_array_index(ragged):
arr = numpy.array([2, 1, 4], dtype="i")
print(arr)
r = ragged[arr]
assert r.data.shape[0] == ragged.lengths[arr].sum()
| mit | Python |
|
f433cdb41f33a7b9daeaf276bf19d2617534e781 | Add Tensor Flow | kakaba2009/MachineLearning,kakaba2009/MachineLearning,kakaba2009/MachineLearning,kakaba2009/MachineLearning | python/src/fft/fourier_nd.py | python/src/fft/fourier_nd.py | import numpy as np
import pylab as plt
import pandas as pd
from numpy import fft
import src.mylib.mfile as mfile
def bandpass_filter(x, freq, frequency_of_signal=0, band=0.1):
if (frequency_of_signal - band) < abs(freq) < (frequency_of_signal + band):
return x
else:
return 0
d1 = mfile.loadClose('JPY=X', '../db/forex.db')
d2 = mfile.loadClose('GBP=X', '../db/forex.db')
ds = pd.concat([d1, d2], axis=1, join='inner')
x = ds.values
for i in range(1):
x = x[i:]
N = len(x)
spectrum = fft.fftn(x)
feq = fft.fftfreq(N) # frequencies
ampli = np.absolute(spectrum) # amplitude
phase = np.angle(spectrum) # phase
#print(phase)
index = np.argsort(-ampli, axis = 0)
sfreq = feq[index]
sampl = ampli[index]
#print(sampl[1:10])
#sfreq = np.where(sfreq > 0)
#big = list(zip(*sfreq))
print(sfreq[1:10] * N)
plt.plot(sfreq * N, 'o')
#F_filtered = np.asanyarray([bandpass_filter(x, freq) for x, freq in zip(spectrum, feq)])
#filtered_signal = np.fft.ifft(F_filtered)
#plt.semilogy(feq[1:], ampli[1:]), 'o') #zero feq is very large
#plt.semilogy(ampli[1:])
plt.legend()
plt.show()
| apache-2.0 | Python |
|
470063b8d468394432e729e7417c88263614b5f0 | Create msub_cluster.py | crazyhottommy/ChIP-seq-analysis | snakemake_ChIPseq_pipeline/msub_cluster.py | snakemake_ChIPseq_pipeline/msub_cluster.py | mit | Python |
||
a7b0fc1effd0e68018bc3c33f1dc0b952b23003b | update nav access restrictions | masschallenge/django-accelerator,masschallenge/django-accelerator | accelerator/migrations/0095_update_nav_tree_access_restrictions.py | accelerator/migrations/0095_update_nav_tree_access_restrictions.py | # Generated by Django 2.2.24 on 2022-03-29 16:24
from django.db import migrations
def update_nav_access_restrictions(apps, schema_editor):
NavTreeItem = apps.get_model('accelerator', 'NavTreeItem')
urls = ['/judging/panel/', '/judging/commitments/', '/nav/judging']
nav_items = NavTreeItem.objects.filter(url__in=urls)
# remove user_type restrictions
nav_items.update(user_type='')
# remove user_role restrictions
[nav_item.user_role.clear() for nav_item in nav_items]
class Migration(migrations.Migration):
dependencies = [
('accelerator', '0094_alter_startup_organization_fields'),
]
operations = [
migrations.RunPython(update_nav_access_restrictions,
migrations.RunPython.noop)
]
| mit | Python |
|
645efb8ffcc3c9a3e41db2619430ffcb7a6d570f | Migrate Req/Ass to have verified/finished date | andrei-karalionak/ggrc-core,VinnieJohns/ggrc-core,AleksNeStu/ggrc-core,NejcZupec/ggrc-core,j0gurt/ggrc-core,edofic/ggrc-core,plamut/ggrc-core,kr41/ggrc-core,edofic/ggrc-core,plamut/ggrc-core,edofic/ggrc-core,andrei-karalionak/ggrc-core,kr41/ggrc-core,selahssea/ggrc-core,josthkko/ggrc-core,prasannav7/ggrc-core,prasannav7/ggrc-core,AleksNeStu/ggrc-core,NejcZupec/ggrc-core,andrei-karalionak/ggrc-core,prasannav7/ggrc-core,j0gurt/ggrc-core,VinnieJohns/ggrc-core,selahssea/ggrc-core,selahssea/ggrc-core,edofic/ggrc-core,josthkko/ggrc-core,plamut/ggrc-core,AleksNeStu/ggrc-core,j0gurt/ggrc-core,josthkko/ggrc-core,NejcZupec/ggrc-core,prasannav7/ggrc-core,j0gurt/ggrc-core,kr41/ggrc-core,andrei-karalionak/ggrc-core,selahssea/ggrc-core,plamut/ggrc-core,josthkko/ggrc-core,VinnieJohns/ggrc-core,AleksNeStu/ggrc-core,VinnieJohns/ggrc-core,NejcZupec/ggrc-core,kr41/ggrc-core | src/ggrc/migrations/versions/20160314140338_4fd36860d196_add_finished_date_to_request_and_.py | src/ggrc/migrations/versions/20160314140338_4fd36860d196_add_finished_date_to_request_and_.py | # Copyright (C) 2016 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: [email protected]
# Maintained By: [email protected]
"""
add finished date to request and assessment
Create Date: 2016-03-14 14:03:38.026877
"""
# disable Invalid constant name pylint warning for mandatory Alembic variables.
# pylint: disable=invalid-name
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = '4fd36860d196'
down_revision = '39aec99639d5'
def upgrade_table(table):
"""Add columns finished_date and verified_date and populate them."""
op.add_column(table,
sa.Column('finished_date', sa.DateTime(), nullable=True))
op.add_column(table,
sa.Column('verified_date', sa.DateTime(), nullable=True))
op.execute("""
UPDATE {}
SET finished_date = updated_at
WHERE status in ("Finished", "Verified", "Final")
""".format(table))
op.execute("""
UPDATE {}
SET verified_date = updated_at, status = "Final"
WHERE status = "Verified"
""".format(table))
def upgrade():
upgrade_table('requests')
upgrade_table('assessments')
def downgrade():
"""Remove verified_date and finished_date columns."""
op.drop_column('assessments', 'verified_date')
op.drop_column('assessments', 'finished_date')
op.drop_column('requests', 'verified_date')
op.drop_column('requests', 'finished_date')
| apache-2.0 | Python |
|
b05ab5401e716eab305625ad369ec5d362ae491a | add andromercury tool | eight-pack-abdominals/androguard,zhakui/androguard,HackLinux/androguard,subho007/androguard,xysec/androguard,cdht/androguard,ropik/androguard,code4bones/androguard,subho007/androguard,eight-pack-abdominals/androguard,urwithajit9/androguard,Ever-Never/androguard,eight-pack-abdominals/androguard,mohsinjuni/androguard,jbaginski/androguard,palaniyappanBala/androguard,ropik/androguard,jbaginski/androguard,zyrikby/androguard,Ever-Never/androguard,drewet/androguard,cdht/androguard,lowks/androguard,dweinstein/androguard,xyzy/androguard-1,cdht/androguard,drewet/androguard,revolutionaryG/androguard,eight-pack-abdominals/androguard,benedeku/HolaMundo,cdht/androguard,ropik/androguard,sigma-random/androguard,zhakui/androguard,xwolf12/androguard,urwithajit9/androguard,sigma-random/androguard,mohsinjuni/androguard,drewet/androguard,subho007/androguard,tuusberg/androguard,code4bones/androguard,reox/androguard,drewet/androguard,zhakui/androguard,ropik/androguard,tuusberg/androguard,ropik/androguard,RoyZeng/androguard,drewet/androguard,jslhs/androguard,shuxin/androguard,draekko/androguard,jbremer/androguard,zyrikby/androguard,cdht/androguard,HackLinux/androguard,ApplauseAQI/androguard,guizos/androguard-acid,jbremer/androguard,RoyZeng/androguard,palaniyappanBala/androguard,Ever-Never/androguard,guizos/androguard-acid,ropik/androguard,cdht/androguard,palaniyappanBala/androguard,lowks/androguard,androguard/androguard,tuusberg/androguard,Fuzion24/androguard,drewet/androguard,benedeku/HolaMundo,xysec/androguard,draekko/androguard,aselims/androguard,lowks/androguard,aselims/androguard,blaquee/androguard,Fuzion24/androguard,jbremer/androguard,sigma-random/androguard,blaquee/androguard,HackLinux/androguard,jbaginski/androguard,urwithajit9/androguard,xyzy/androguard-1,jslhs/androguard,aselims/androguard,jslhs/androguard,blaquee/androguard,jbaginski/androguard,androguard/androguard,dweinstein/androguard,huangtao2003/androguard,jslhs/androguard,jslhs/androguard,xysec/androguard,jbremer/androguard,ApplauseAQI/androguard,zyrikby/androguard,jbaginski/androguard,draekko/androguard,guizos/androguard-acid,code4bones/androguard,ApplauseAQI/androguard,eight-pack-abdominals/androguard,RoyZeng/androguard,lowks/androguard,ApplauseAQI/androguard,xyzy/androguard-1,RoyZeng/androguard,eight-pack-abdominals/androguard,revolutionaryG/androguard,dweinstein/androguard,jbaginski/androguard,sigma-random/androguard,xwolf12/androguard,benedeku/HolaMundo,sigma-random/androguard,mohsinjuni/androguard,revolutionaryG/androguard,draekko/androguard,jslhs/androguard,sigma-random/androguard,Fuzion24/androguard,xwolf12/androguard | andromercury.py | andromercury.py | #!/usr/bin/env python
# This file is part of Androguard.
#
# Copyright (C) 2012, Anthony Desnos <desnos at t0t0.fr>
# All rights reserved.
#
# Androguard is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Androguard is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Androguard. If not, see <http://www.gnu.org/licenses/>.
import sys, re, os
from optparse import OptionParser
from androguard.core.bytecodes import apk
sys.path.append("./elsim/")
from elsim.elsign import dalvik_elsign
sys.path.append("./mercury/client")
from merc.lib.common import Session
option_0 = { 'name' : ('-l', '--list'), 'help' : 'list all packages', 'nargs' : 1 }
option_1 = { 'name' : ('-i', '--input'), 'help' : 'get specific packages (a filter)', 'nargs' : 1 }
option_2 = { 'name' : ('-r', '--remotehost'), 'help' : 'specify ip of emulator/device', 'nargs' : 1 }
option_3 = { 'name' : ('-p', '--port'), 'help' : 'specify the port', 'nargs' : 1 }
option_4 = { 'name' : ('-o', '--output'), 'help' : 'output directory to write packages', 'nargs' : 1 }
option_5 = { 'name' : ('-b', '--database'), 'help' : 'database : use this database', 'nargs' : 1 }
option_6 = { 'name' : ('-c', '--config'), 'help' : 'use this configuration', 'nargs' : 1 }
option_7 = { 'name' : ('-v', '--verbose'), 'help' : 'display debug information', 'action' : 'count' }
options = [option_0, option_1, option_2, option_3, option_4, option_5, option_6, option_7]
def display(ret, debug) :
print "---->", ret[0],
def main(options, arguments) :
sessionip = "127.0.0.1"
sessionport = 31415
if options.remotehost :
sessionip = options.remotehost
if options.port :
sessionport = int(options.port)
newsession = Session(sessionip, sessionport, "bind")
# Check if connection can be established
if newsession.executeCommand("core", "ping", None).data == "pong":
if options.list :
request = {'filter': options.list, 'permissions': None }
apks_info = newsession.executeCommand("packages", "info", {}).getPaddedErrorOrData()
print apks_info
elif options.input and options.output :
s = None
if options.database != None or options.config != None :
s = dalvik_elsign.MSignature( options.database, options.config, options.verbose != None, ps = dalvik_elsign.PublicSignature)
request = {'filter': options.input, 'permissions': None }
apks_info = newsession.executeCommand("packages", "info", request).getPaddedErrorOrData()
print apks_info
for i in apks_info.split("\n") :
if re.match("APK path:", i) != None :
name_app = i.split(":")[1][1:]
print name_app,
response = newsession.downloadFile(name_app, options.output)
print response.data, response.error,
if s != None :
a = apk.APK( options.output + "/" + os.path.basename(name_app) )
if a.is_valid_APK() :
display( s.check_apk( a ), options.verbose )
print
else:
print "\n**Network Error** Could not connect to " + sessionip + ":" + str(sessionport) + "\n"
if __name__ == "__main__" :
parser = OptionParser()
for option in options :
param = option['name']
del option['name']
parser.add_option(*param, **option)
options, arguments = parser.parse_args()
sys.argv[:] = arguments
main(options, arguments)
| apache-2.0 | Python |
|
bad82f3c77dbeebdc2332d193f0c8290c5186862 | add rudimentary api | mitdbg/aurum-datadiscovery,mitdbg/aurum-datadiscovery,mitdbg/aurum-datadiscovery,mitdbg/aurum-datadiscovery,mitdbg/aurum-datadiscovery,mitdbg/aurum-datadiscovery | frontend/app.py | frontend/app.py | import os
import sys
import inspect
import json
from flask import Flask
# move to top level
currentdir = os.path.dirname(
os.path.abspath(inspect.getfile(inspect.currentframe())))
parentdir = os.path.dirname(currentdir)
sys.path.insert(0, parentdir)
from api.reporting import Report
from api.apiutils import Scope, Relation
from modelstore.elasticstore import StoreHandler
from knowledgerepr import fieldnetwork
from algebra import API
path_to_serialized_model = parentdir + "/test/testmodel/"
network = fieldnetwork.deserialize_network(path_to_serialized_model)
store_client = StoreHandler()
api = API(network, store_client)
keyword_search = api.keyword_search
neighbor_search = api.neighbor_search
union = api.union
intersection = api.intersection
difference = api.difference
db = Scope.DB
source = Scope.SOURCE
feld = Scope.FIELD
content = Scope.CONTENT
schema = Relation.SCHEMA
schema_sim = Relation.SCHEMA_SIM
content_sim = Relation.CONTENT_SIM
entity_sim = Relation.ENTITY_SIM
pkfk = Relation.PKFK
app = Flask(__name__)
@app.route('/query/<query>')
def query(query):
try:
res = eval(query)
res = json.dumps(res.data)
except Exception as e:
res = "error: " + str(e)
return res
@app.route('/convert/<nid>')
def convert(nid):
try:
import pdb; pdb.set_trace()
nid = int(nid)
res = api._general_to_drs(nid)
res = json.dumps(res.data)
except Exception as e:
res = "error: " + str(e)
return res
if __name__ == '__main__':
app.run()
| mit | Python |
|
37baa669ed1e00fabddd33478fa75f4047075ce3 | Create Python object detection script. | vancezuo/cs473-vision | cs473vision/ObjectDetector.py | cs473vision/ObjectDetector.py | '''
Created on Feb 28, 2014
@author: Vance Zuo
'''
import numpy
import cv2
class ObjectDetector(object):
'''
classdocs
'''
def __init__(self, params):
'''
Constructor
'''
self.bg_img = None
self.fg_img = None
return
def load_image(self, bg_path, fg_path):
self.bg_img = cv2.imread(bg_path)
self.fg_img = cv2.imread(fg_path)
return True
def subtract_background(self):
# Take simple difference
naive = cv2.absdiff(self.bg_img, self.bg_img)
cv2.imwrite("naive.png", naive)
# MOG Subtraction
bg_subtractor = cv2.BackgroundSubtractorMOG()
bg_mask = bg_subtractor.apply(self.bg_img)
fg_mask = bg_subtractor.apply(self.fg_img)
cv2.imwrite("MOG.png", fg_mask)
# MOG2 Subtraction
bg_subtractor = cv2.BackgroundSubtractorMOG2()
bg_mask = bg_subtractor.apply(self.bg_img)
fg_mask = bg_subtractor.apply(self.fg_img)
cv2.imwrite("MOG2.png", fg_mask)
return | mit | Python |
|
4aacc8d55c138c405d561bbf9ddfd9ddab483e62 | add wxPython example. | devlights/try-python | trypython/extlib/gui/wx01.py | trypython/extlib/gui/wx01.py | """
wxPython についてのサンプルです
お決まりの Hello world について
REFERENCES:: http://bit.ly/2OcHRh7
"""
# noinspection PyPackageRequirements
import wx
from trypython.common.commoncls import SampleBase
class Sample(SampleBase):
def exec(self):
app = wx.App()
frm = wx.Frame(parent=None, title='Hello World')
frm.Show()
app.MainLoop()
def go():
obj = Sample()
obj.exec()
if __name__ == '__main__':
go()
| mit | Python |
|
9566d71a267015005ad937cd92bd105d2ffff274 | print users with multiple matching urls | tapilab/twnews,tapilab/twnews | twnews/print_urls_by_user.py | twnews/print_urls_by_user.py | """
Print users who were found for multiple urls.
user url1 score1 url2 score2 ...
"""
import codecs
from collections import defaultdict
import json
import sys
from . import __data__
sys.stdout = codecs.getwriter('utf8')(sys.stdout)
def print_urls_by_user(tweets_file=__data__ + '/tweets.json'):
user2urls = defaultdict(lambda: set())
url2score = defaultdict(lambda: 0.)
inf = codecs.open(tweets_file, 'rt', 'utf-8')
for line in inf:
js = json.loads(line)
if 'url_query' in js: # valid line
user2urls[js['user']['screen_name']].add(js['url_query'])
url2score[js['url_query']] = float(js['url_score'])
for user, urls in user2urls.iteritems():
if len(urls) > 1:
print user + '\t' + '\t'.join('%s\t%.3f' % (u, url2score[u]) for u in urls)
if __name__ == '__main__':
print_urls_by_user()
| bsd-3-clause | Python |
|
e5627134d9a2c052a523f66a2ec9867b3432fae2 | Test Issue #461: ent_iob tag incorrect after setting entities. | recognai/spaCy,spacy-io/spaCy,Gregory-Howard/spaCy,banglakit/spaCy,banglakit/spaCy,recognai/spaCy,aikramer2/spaCy,Gregory-Howard/spaCy,honnibal/spaCy,oroszgy/spaCy.hu,spacy-io/spaCy,aikramer2/spaCy,banglakit/spaCy,explosion/spaCy,recognai/spaCy,banglakit/spaCy,Gregory-Howard/spaCy,raphael0202/spaCy,honnibal/spaCy,aikramer2/spaCy,Gregory-Howard/spaCy,raphael0202/spaCy,recognai/spaCy,explosion/spaCy,recognai/spaCy,recognai/spaCy,Gregory-Howard/spaCy,oroszgy/spaCy.hu,raphael0202/spaCy,oroszgy/spaCy.hu,raphael0202/spaCy,explosion/spaCy,banglakit/spaCy,Gregory-Howard/spaCy,honnibal/spaCy,spacy-io/spaCy,aikramer2/spaCy,explosion/spaCy,banglakit/spaCy,raphael0202/spaCy,raphael0202/spaCy,aikramer2/spaCy,aikramer2/spaCy,spacy-io/spaCy,oroszgy/spaCy.hu,oroszgy/spaCy.hu,explosion/spaCy,oroszgy/spaCy.hu,spacy-io/spaCy,honnibal/spaCy,explosion/spaCy,spacy-io/spaCy | spacy/tests/tokens/test_add_entities.py | spacy/tests/tokens/test_add_entities.py | from __future__ import unicode_literals
import spacy
from spacy.vocab import Vocab
from spacy.matcher import Matcher
from spacy.tokens.doc import Doc
from spacy.attrs import *
from spacy.pipeline import EntityRecognizer
import pytest
@pytest.fixture(scope="module")
def en_vocab():
return spacy.get_lang_class('en').Defaults.create_vocab()
@pytest.fixture(scope="module")
def entity_recognizer(en_vocab):
return EntityRecognizer(en_vocab, features=[(2,), (3,)])
@pytest.fixture
def animal(en_vocab):
return nlp.vocab.strings[u"ANIMAL"]
@pytest.fixture
def doc(en_vocab, entity_recognizer):
doc = Doc(en_vocab, words=[u"this", u"is", u"a", u"lion"])
entity_recognizer(doc)
return doc
def test_set_ents_iob(doc):
assert len(list(doc.ents)) == 0
tags = [w.ent_iob_ for w in doc]
assert tags == (['O'] * len(doc))
doc.ents = [(doc.vocab.strings['ANIMAL'], 3, 4)]
tags = [w.ent_iob_ for w in doc]
assert tags == ['O', 'O', 'O', 'B']
doc.ents = [(doc.vocab.strings['WORD'], 0, 2)]
tags = [w.ent_iob_ for w in doc]
assert tags == ['B', 'I', 'O', 'O']
| mit | Python |
|
7cee0980a67b827e4cf06c15e0f1c3b412f68c22 | Create main.py to actually perform the test | rkawauchi/IHK,rkawauchi/IHK | main.py | main.py | import io
import argparse
def initialize_argument_parser():
parser = argparse.ArgumentParser(description='Simulate Indian health solutions')
parser.add_argument('-s', '--solution', dest='solution',
help='the solution to test', default='health kiosk')
return vars(parser.parse_args())
if __name__ == "__main__":
args = initialize_argument_parser()
print 'Solution to test:', args['solution']
| bsd-3-clause | Python |
|
233c7f0152a05afcef62b596f7c90abe7e9eec90 | add a base pipeline for converting json to bigquery | Jigsaw-Code/censoredplanet-analysis,Jigsaw-Code/censoredplanet-analysis | pipeline/flatten_json.py | pipeline/flatten_json.py | """Beam pipeline for converting json scan files into bigquery tables."""
from __future__ import absolute_import
import argparse
import json
import logging
from pprint import pprint
import apache_beam as beam
from apache_beam.io import ReadFromText
from apache_beam.io.gcp.internal.clients import bigquery
from apache_beam.options.pipeline_options import PipelineOptions
from apache_beam.options.pipeline_options import SetupOptions
bigquery_schema = {
'domain': 'string',
'ip': 'string',
'date': 'date',
'start_time': 'timestamp',
'end_time': 'timestamp',
'retries': 'integer',
'sent': 'string',
'received': 'string',
'error': 'string',
'blocked': 'boolean',
'success': 'boolean',
'fail_sanity': 'boolean',
'stateful_block': 'boolean'
}
# Future fields
"""
'row_number', 'integer',
'domain_category': 'string',
'netblock': 'string',
'asn': 'string',
'as_name': 'string',
'as_full_name': 'string',
'as_traffic': 'integer',
'as_class': 'string',
'country': 'string',
"""
def get_bigquery_schema():
"""Return a beam bigquery schema for the output table."""
table_schema = bigquery.TableSchema()
for (name, field_type) in bigquery_schema.items():
field_schema = bigquery.TableFieldSchema()
field_schema.name = name
field_schema.type = field_type
field_schema.mode = 'nullable' # all fields are flat
table_schema.fields.append(field_schema)
return table_schema
def flatten_measurement(line):
"""Flattens a measurement string into several roundtrip rows.
Args:
line: a json string describing a censored planet measurement. example
{'Keyword': 'test.com,
'Server': '1.2.3.4',
'Results': [{'Success': true},
{'Success': false}]}
Returns:
an array of dicts containing individual roundtrip information
[{'column_name': field_value}]
example
[{'domain': 'test.com', 'ip': '1.2.3.4', 'success': true}
{'domain': 'test.com', 'ip': '1.2.3.4', 'success': true}]
"""
scan = json.loads(line)
pprint(scan)
rows = []
for result in scan['Results']:
rows.append({
'domain': scan['Keyword'],
'ip': scan['Server'],
'date': result['StartTime'][:10],
'start_time': result['StartTime'],
'end_time': result['EndTime'],
'retries': scan['Retries'],
'sent': result['Sent'],
'received': result['Received'],
'error': result['Error'],
'blocked': scan['Blocked'],
'success': result['Success'],
'fail_sanity': scan['FailSanity'],
'stateful_block': scan['StatefulBlock'],
})
pprint(rows)
return rows
def run(argv=None, save_main_session=True):
parser = argparse.ArgumentParser()
parser.add_argument(
'--input',
dest='input',
# default='gs://firehook-scans/echo/CP_Quack-echo-2018-07-27-15-20-11/results.json',
default='gs://firehook-dataflow-test/results-short.json',
help='Input file to process.')
parser.add_argument(
'--output',
dest='output',
default='firehook-censoredplanet:test.example',
help='Output file to write results to.')
known_args, pipeline_args = parser.parse_known_args(argv)
pipeline_args.extend([
# DataflowRunner or DirectRunner
'--runner=DirectRunner',
'--project=firehook-censoredplanet',
'--region=us-east1',
'--staging_location=gs://firehook-dataflow-test/staging',
'--temp_location=gs://firehook-dataflow-test/temp',
'--job_name=flatten-json-job',
])
pipeline_options = PipelineOptions(pipeline_args)
pipeline_options.view_as(SetupOptions).save_main_session = save_main_session
with beam.Pipeline(options=pipeline_options) as p:
lines = p | ReadFromText(known_args.input)
rows = (lines | 'flatten json' >> (beam.FlatMap(flatten_measurement)))
rows | 'Write' >> beam.io.WriteToBigQuery(
known_args.output,
schema=get_bigquery_schema(),
create_disposition=beam.io.BigQueryDisposition.CREATE_IF_NEEDED,
# WRITE_TRUNCATE is slow when testing.
# write_disposition=beam.io.BigQueryDisposition.WRITE_TRUNCATE
write_disposition=beam.io.BigQueryDisposition.WRITE_APPEND)
if __name__ == '__main__':
logging.getLogger().setLevel(logging.INFO)
run()
| apache-2.0 | Python |
|
361c3496274a960c5e927899a39618f8fee9db0a | Add Basic Trellis Histogram to Examples | jakevdp/altair,ellisonbg/altair,altair-viz/altair | altair/vegalite/v2/examples/trellis_histogram.py | altair/vegalite/v2/examples/trellis_histogram.py | """
Trellis Histogram
-----------------
This example shows how to make a basic trellis histogram.
https://vega.github.io/vega-lite/examples/trellis_bar_histogram.html
"""
import altair as alt
cars = alt.load_dataset('cars')
chart = alt.Chart(cars).mark_bar().encode(
x=alt.X("Horsepower",
type="quantitative",
bin=alt.BinTransform(
maxbins=15
)),
y='count(*):Q',
row='Origin'
)
| bsd-3-clause | Python |
|
a7e8b3a2b63ed4717a783754446b30c4f288e76a | Create channelinfo.py | appu1232/Selfbot-for-Discord | cogs/channelinfo.py | cogs/channelinfo.py | import discord
from discord.ext import commands
from random import randint, choice
class Channelinfo:
"""Shows Channel infos."""
def __init__(self, bot):
self.bot = bot
@commands.command(aliases=['channel', 'cinfo', 'ci'], pass_context=True, no_pm=True)
async def channelinfo(self, ctx, *, channel: discord.channel=None):
"""Shows channel informations"""
if not channel:
channel = ctx.message.channel
# else:
# channel = ctx.message.guild.get_channel(int(chan))
# if not channel: channel = self.bot.get_channel(int(chan))
data = discord.Embed()
content = None
if hasattr(channel, 'mention'):
content = self.bot.bot_prefix+"**Informations about Channel:** "+channel.mention
if hasattr(channel, 'changed_roles'):
if len(channel.changed_roles) > 0:
if channel.changed_roles[0].permissions.read_messages:
data.color = discord.Colour.green()
else: data.color = discord.Colour.red()
if isinstance(channel, discord.TextChannel): _type = "Text"
elif isinstance(channel, discord.VoiceChannel): _type = "Voice"
else: _type = "Unknown"
data.add_field(name="Type", value=_type)
data.add_field(name="ID", value=channel.id)
if hasattr(channel, 'position'):
data.add_field(name="Position", value=channel.position)
if isinstance(channel, discord.VoiceChannel):
if channel.user_limit != 0:
data.add_field(name="User Number", value="{}/{}".format(len(channel.voice_members), channel.user_limit))
else:
data.add_field(name="User Number", value="{}".format(len(channel.voice_members)))
userlist = [r.display_name for r in channel.members]
if not userlist:
userlist = "None"
else:
userlist = "\n".join(userlist)
data.add_field(name="Users", value=userlist)
data.add_field(name="Bitrate", value=channel.bitrate)
elif isinstance(channel, discord.TextChannel):
if channel.members:
data.add_field(name="Members", value="%s"%len(channel.members))
if channel.topic:
data.add_field(name="Topic", value=channel.topic, inline=False)
_hidden = []; _allowed = []
for role in channel.changed_roles:
if role.permissions.read_messages: _allowed.append(role.mention)
else: _hidden.append(role.mention)
if len(_allowed) > 0: data.add_field(name='Allowed Roles (%s)'%len(_allowed), value=', '.join(_allowed), inline=False)
if len(_hidden) > 0: data.add_field(name='Restricted Roles (%s)'%len(_hidden), value=', '.join(_hidden), inline=False)
if channel.created_at:
data.set_footer(text=("Created on {} ({} days ago)".format(channel.created_at.strftime("%d %b %Y %H:%M"), (ctx.message.created_at - channel.created_at).days)))
# try:
await ctx.send(content if content else None, embed=data)
# except:
# await ctx.send(self.bot.bot_prefix+"I need the `Embed links` permission to send this")
def setup(bot):
bot.add_cog(Channelinfo(bot))
| mit | Python |
|
333453fe6a74d7bada941ee7aeed3660452efcaf | add tests | cupy/cupy,cupy/cupy,cupy/cupy,cupy/cupy | tests/install_tests/test_cupy_builder/test_command.py | tests/install_tests/test_cupy_builder/test_command.py | from cupy_builder._command import filter_files_by_extension
def test_filter_files_by_extension():
sources_cpp = ['a.cpp', 'b.cpp']
sources_pyx = ['c.pyx']
sources = sources_cpp + sources_pyx
assert filter_files_by_extension(
sources, '.cpp') == (sources_cpp, sources_pyx)
assert filter_files_by_extension(
sources, '.pyx') == (sources_pyx, sources_cpp)
assert filter_files_by_extension(
sources, '.cu') == ([], sources)
assert filter_files_by_extension(
sources_cpp, '.cpp') == (sources_cpp, [])
assert filter_files_by_extension(
sources_cpp, '.pyx') == ([], sources_cpp)
| mit | Python |
|
8a7963644ff470fc7da8bc22a7f3fd6ef1be3ed2 | define password generator. | soasme/rio,soasme/rio,soasme/rio | rio/utils/token.py | rio/utils/token.py | # -*- coding: utf-8 -*-
"""
rio.utils.token
~~~~~~~~~~~~~~~
"""
import random
import string
def password_generator(length):
"""Generate a random password.
:param length: integer.
"""
return ''.join(random.choice(string.ascii_lowercase + string.digits)
for _ in range(length))
| mit | Python |
|
1d4397860ffd297eb02b5f4b96b0e18a3c7e12cd | Add test recipes. | ulule/django-badgify,ulule/django-badgify | badgify/tests/recipes.py | badgify/tests/recipes.py | # -*- coding: utf-8 -*-
from ..recipe import BaseRecipe
from ..compat import get_user_model
class BadRecipe(object):
pass
class NotImplementedRecipe(BaseRecipe):
pass
class Recipe1(BaseRecipe):
name = 'Recipe 1'
slug = 'recipe1'
description = 'Recipe 1 description'
@property
def image(self):
return 'image'
@property
def user_ids(self):
return (get_user_model().objects.filter(love_python=True)
.values_list('id', flat=True))
class Recipe2(BaseRecipe):
name = 'Recipe 2'
slug = 'recipe2'
description = 'Recipe 2 description'
@property
def image(self):
return 'image'
@property
def user_ids(self):
return []
| mit | Python |
|
f6c07ad620f7e3ef73fdad5f2d97d1dd911e390f | Create signal.py | mh-g/check-mk_signal-cli,mh-g/check-mk_signal-cli | signal.py | signal.py | #!/usr/bin/python3
# Send via Signal
import os
from pydbus import SystemBus
from dateutil import parser
from datetime import datetime
eventhostname=os.environ.get ("NOTIFY_HOSTNAME", "<no host>")
eventdatetimeString=os.environ.get ("NOTIFY_LONGDATETIME", "<no time>")
if eventdatetimeString == "<no time>":
eventdatetime = datetime(2017,1,1)
else:
eventdatetime = parser.parse (eventdatetimeString)
eventduration=os.environ.get ("NOTIFY_LASTHOSTSTATECHANGE_REL", "<no duration>")
eventhoststate=os.environ.get ("NOTIFY_HOSTSTATE", "<no state>")
eventlasthoststate=os.environ.get ("NOTIFY_LASTHOSTSTATE", "<no last state>")
bus = SystemBus()
signal = bus.get ("org.asamk.Signal")
message = eventhostname + ": " + eventhoststate + " after " + eventlasthoststate + " (" + eventdurati$
signal.sendMessage(message, [], ['+XXXXXXXXXXXXXXXXXXX'])
| mit | Python |
|
269e9821a52f1d68e2a48beb76b20c227dc84b55 | add 0007(count one file omitting multiple-line comment) | lz199144/python,ZSeaPeng/python,haiyangd/python-show-me-the-code-,Yrthgze/prueba-sourcetree2,DIYgod/python,hooting/show-me-the-code-python,ZSeaPeng/python,yangzilong1986/python,dominjune/python,xiaoixa/python,Jaccorot/python,luoxufeiyan/python,JiYouMCC/python,merfii/PythonExercises,zhenglaizhang/python,karnikamit/python,Show-Me-the-Code/python,sravaniaitha/python,xiaoixa/python,YGIronMan/python,DIYgod/python,agogear/python-1,fairyzoro/python,snailwalker/python,YGIronMan/python,Ph0enixxx/python,keysona/python,lz199144/python,dominjune/python,karnikamit/python,starlightme/python,renzongxian/Show-Me-the-Code,ZSeaPeng/python,dominjune/python,Yrthgze/prueba-sourcetree2,starlightme/python,zhenglaizhang/python,Show-Me-the-Code/python,Supersuuu/python,luoxufeiyan/python,fairyzoro/python,YGIronMan/python,wangjun/python,xchaoinfo/python,yangzilong1986/python,xiaoixa/python,xchaoinfo/python,Mark24Code/python,whix/python,12wang3/python,snailwalker/python,Yrthgze/prueba-sourcetree2,tzq668766/python,keysona/python,Supersuuu/python,luoxufeiyan/python,Pritesh242/python,keysona/python,whix/python,ZuoGuocai/python,llluiop/python-1,Ph0enixxx/python,starlightme/python,YGIronMan/python,hooting/show-me-the-code-python,JiYouMCC/python,JiYouMCC/python,Pritesh242/python,llluiop/python-1,12wang3/python,ZuoGuocai/python,fairyzoro/python,zhakui/python,Pritesh242/python,merfii/PythonExercises,yangzilong1986/python,zhakui/python,Mark24Code/python,merfii/PythonExercises,hooting/show-me-the-code-python,renzongxian/Show-Me-the-Code,zhenglaizhang/python,Yrthgze/prueba-sourcetree2,DanielShangHai/python,EricSekyere/python,karnikamit/python,llluiop/python-1,Jaccorot/python,haiyangd/python-show-me-the-code-,xchaoinfo/python,12wang3/python,JiYouMCC/python,tzq668766/python,fairyzoro/python,dominjune/python,Friday21/python_show_me_the_code,DanielShangHai/python,EricSekyere/python,12wang3/python,Yrthgze/prueba-sourcetree2,EricSekyere/python,sravaniaitha/python,Show-Me-the-Code/python,Jaccorot/python,sravaniaitha/python,Mark24Code/python,Friday21/python_show_me_the_code,lz199144/python,karnikamit/python,lz199144/python,ZuoGuocai/python,zhakui/python,Friday21/python_show_me_the_code,ionutcipriananescu/python,whix/python,Friday21/python_show_me_the_code,snailwalker/python,agogear/python-1,haiyangd/python-show-me-the-code-,xiaoixa/python,hooting/show-me-the-code-python,karnikamit/python,xchaoinfo/python,fairyzoro/python,wangjun/python,starlightme/python,DIYgod/python,sravaniaitha/python,wangjun/python,Pritesh242/python,sravaniaitha/python,wangjun/python,DIYgod/python,ZSeaPeng/python,Friday21/python_show_me_the_code,whix/python,zhakui/python,llluiop/python-1,ZSeaPeng/python,snailwalker/python,yangzilong1986/python,llluiop/python-1,Mark24Code/python,ionutcipriananescu/python,DanielShangHai/python,Pritesh242/python,merfii/PythonExercises,DanielShangHai/python,keysona/python,Ph0enixxx/python,haiyangd/python-show-me-the-code-,merfii/PythonExercises,ionutcipriananescu/python,whix/python,agogear/python-1,EricSekyere/python,zhenglaizhang/python,Jaccorot/python,Ph0enixxx/python,EricSekyere/python,Show-Me-the-Code/python,xchaoinfo/python,luoxufeiyan/python,keysona/python,haiyangd/python-show-me-the-code-,Yrthgze/prueba-sourcetree2,ionutcipriananescu/python,ZuoGuocai/python,renzongxian/Show-Me-the-Code,zhakui/python,Supersuuu/python,lz199144/python,dominjune/python,hooting/show-me-the-code-python,Supersuuu/python,Jaccorot/python,tzq668766/python,Mark24Code/python,renzongxian/Show-Me-the-Code,wangjun/python,xiaoixa/python,YGIronMan/python,starlightme/python,12wang3/python,tzq668766/python,agogear/python-1,yangzilong1986/python,JiYouMCC/python,agogear/python-1,ZuoGuocai/python,ionutcipriananescu/python,snailwalker/python,Show-Me-the-Code/python,Ph0enixxx/python,luoxufeiyan/python,DanielShangHai/python,zhenglaizhang/python,renzongxian/Show-Me-the-Code,Show-Me-the-Code/python,tzq668766/python | robot527/0007/count_codes.py | robot527/0007/count_codes.py | #! usr/bin/python3
"""
第 0007 题:有个目录,里面是你自己写过的程序,统计一下你写过多少行代码。
包括空行和注释,但是要分别列出来。
"""
class PyfileInfo:
def __init__(self, file):
self.file_name = file
self.total_line_num = 0
self.blank_line_num = 0
self.comment_line_num = 0
def count_lines(self):
if self.file_name[-3:] != '.py':
print(self.file_name + ' is not a .py file!')
return
try:
with open(self.file_name) as code:
for each_line in code:
self.total_line_num += 1
temp = each_line.strip()
if temp == '':
self.blank_line_num += 1
elif temp[0] == '#':
self.comment_line_num += 1
except IOError as err:
print('File error: ' + str(err))
import os
target_path = '.'
file_list = [f for f in os.listdir(target_path)
if os.path.isfile(os.path.join(target_path, f))]
#print(file_list, len(file_list))
pyfile_list = [os.path.join(target_path, f) for f in file_list
if f[-3:] == '.py']
print(pyfile_list[0])
pyf1 = PyfileInfo(pyfile_list[0])
pyf1.count_lines()
#pyf2 = PyfileInfo('test.c')
#pyf2.count_lines()
print('==' * 18)
print('Total line number is:', pyf1.total_line_num)
print('Blank line number is:', pyf1.blank_line_num)
print('Comment line number is:', pyf1.comment_line_num)
| mit | Python |
|
74d8390dce4dd5a8fe8a6f7f4304b80afadfec1d | Add basic calendar framework | chrisfrederickson/firepi,chrisfrederickson/firepi,chrisfrederickson/firepi,chrisfrederickson/firepi | app/calender.py | app/calender.py | class CalenderManager:
def __init__(self):
pass
def add_event(self, user, starttime, endtime):
pass
def get_current_event(self):
#Return NONE if no event
pass
def remove_event(self, id):
pass
def get_upcoming_events(self, num):
pass | mit | Python |
|
93a03023cf0c73b78680d431c828f4b9ebc49802 | add disk_cache.py | EscapeLife/web_crawler | 3.下载缓存/3.disk_cache.py | 3.下载缓存/3.disk_cache.py | #!/usr/bin/env python
# coding:utf-8
import os
import re
import zlib
import shutil
import urlparse
import cPickle as pickle
from datetime import datetime, timedelta
from link_crawler import link_crawler
class DiskCache:
"""通过使用磁盘空间保存文件的方式对资源文件进行缓存
"""
def __init__(self, cache_dir='cache', expires=timedelta(days=30), compress=True):
"""设置代码保存的磁盘位置、设置文件过期时长、设置是否对文件进行压缩
"""
self.cache_dir = cache_dir
self.expires = expires
self.compress = compress
def __getitem__(self, url):
"""从磁盘加载数据的URL
"""
path = self.url_to_path(url)
if os.path.exists(path):
with open(path, 'rb') as fp:
data = fp.read()
if self.compress:
data = zlib.decompress(data)
result, timestamp = pickle.loads(data)
if self.has_expired(timestamp):
raise KeyError(url + ' has expired')
return result
else:
raise KeyError(url + ' does not exist')
def __setitem__(self, url, result):
"""这个URL保存数据到磁盘
"""
path = self.url_to_path(url)
folder = os.path.dirname(path)
if not os.path.exists(folder):
os.makedirs(folder)
data = pickle.dumps((result, datetime.utcnow()))
if self.compress:
data = zlib.compress(data)
with open(path, 'wb') as fp:
fp.write(data)
def __delitem__(self, url):
"""删除这个关键的价值和任何空的子目录
"""
path = self.url_to_path(url)
try:
os.remove(path)
os.removedirs(os.path.dirname(path))
except OSError:
pass
def url_to_path(self, url):
"""为这个URL创建文件系统路径
"""
components = urlparse.urlsplit(url)
path = components.path
if not path:
path = '/index.html'
elif path.endswith('/'):
path += 'index.html'
filename = components.netloc + path + components.query
filename = re.sub('[^/0-9a-zA-Z\-.,;_ ]', '_', filename)
filename = '/'.join(segment[:255] for segment in filename.split('/'))
return os.path.join(self.cache_dir, filename)
def has_expired(self, timestamp):
"""返回这个时间戳是否已经过期
"""
return datetime.utcnow() > timestamp + self.expires
def clear(self):
"""清除所有缓存的值
"""
if os.path.exists(self.cache_dir):
shutil.rmtree(self.cache_dir)
if __name__ == '__main__':
link_crawler('http://example.webscraping.com/', '/places/default/(index|view)', cache=DiskCache())
| mit | Python |
|
9c33c1e7fcaf111e7a590b035974aa288bbe5895 | Create weechat script | fwilson42/bot-regime,fwilson42/bot-regime | czar.py | czar.py | SCRIPT_NAME = 'czar'
SCRIPT_AUTHOR = 'Samuel Damashek <[email protected]>'
SCRIPT_VERSION = '1.0'
SCRIPT_LICENSE = 'GPL3'
SCRIPT_DESC = 'Run czar commands natively in WeeChat'
SCRIPT_COMMAND = 'czar'
try:
import weechat
except ImportError:
print('This script must be run under WeeChat.')
print('Get WeeChat now at: http://www.weechat.org/')
exit(1)
import hashlib, time
czar_settings_default = {
'key' : ('', 'key for signing messages'),
}
czar_settings = {}
def commandtoken(nick, command):
timestr = str(int(time.time()) // 300)
return hashlib.sha1("{}{}{}{}".format(timestr, nick, command, czar_settings['key']).encode()).hexdigest()
def optoken(challenge, nick):
return hashlib.sha1("{}{}{}".format(challenge, czar_settings['key'], nick).encode()).hexdigest()
def czar_config_cb(data, option, value):
global czar_settings
pos = option.rfind('.')
czar_settings[option[pos+1:]] = value
return weechat.WEECHAT_RC_OK
def czar_cmd_cb(data, buffer, args):
args = args.split(' ')
if args[0] == 'op':
servername = (weechat.buffer_get_string(buffer, 'name').split('.'))[0]
if len(args) > 2:
plugin = weechat.buffer_get_string(buffer, 'plugin')
name = weechat.buffer_get_string(buffer, 'name')
name = '.'.join(name.split('.')[:-1]+[args[2]])
buf_context = weechat.buffer_search(plugin, name)
else:
buf_context = buffer
weechat.command(buf_context, '%s: opme %s' % (args[1], commandtoken(weechat.info_get('irc_nick',servername), 'opme:')))
elif args[0] == 'cmd':
servername = (weechat.buffer_get_string(buffer, 'name').split('.'))[0]
cmdargs = ','.join(args[2:])
token = commandtoken(weechat.info_get('irc_nick',servername), ':'.join([args[1],cmdargs]))
weechat.command(buffer, ';;%s %s %s' % (args[1], token, ' '.join(args[2:])))
else:
weechat.prnt('', 'Invalid command in czar.')
return weechat.WEECHAT_RC_OK
if __name__ == '__main__':
if weechat.register(SCRIPT_NAME, SCRIPT_AUTHOR, SCRIPT_VERSION, SCRIPT_LICENSE, SCRIPT_DESC, '', ''):
version = weechat.info_get('version_number', '') or 0
for option, value in czar_settings_default.items():
if weechat.config_is_set_plugin(option):
czar_settings[option] = weechat.config_get_plugin(option)
else:
weechat.config_set_plugin(option, value[0])
czar_settings[option] = value[0]
if int(version) >= 0x00030500:
weechat.config_set_desc_plugin(option, '%s (default: "%s")' % (value[1], value[0]))
weechat.hook_config('plugins.var.python.%s.*' % SCRIPT_NAME, 'czar_config_cb', '')
#weechat.hook_print('', '', 'CHALLENGE', 1, 'czar_msg_cb', '')
weechat.hook_command(SCRIPT_COMMAND, SCRIPT_DESC, 'op|cmd',
' op <nick> [channel]: request to be opped in the current channel or [channel] if specified, by <nick>\n'
'cmd <cmd>: issue <cmd> to all bots (do not specify the cmdchar)', 'op|cmd', 'czar_cmd_cb', '')
| bsd-3-clause | Python |
|
af19c62cfa27f27cd37bf558ac77a7845dff7754 | Create generate_chapters.py | satishgoda/leo-editor-tutorial | sandbox/generate_chapters.py | sandbox/generate_chapters.py | @language python
VOLUME = 'II'
TOTAL_CHAPTERS = 42
URL = "http://www.feynmanlectures.caltech.edu/{0}_{1:02}.html"
copied_position = p.copy()
for index in range(1, TOTAL_CHAPTERS+1):
new_node = copied_position.insertAsLastChild()
new_node.h = "@chapter {0} Chapter {1:02} - ".format(VOLUME, index)
new_node.b = URL.format(VOLUME, index)
c.redraw_now()
| mit | Python |
|
48115d48da43f7f4517d8f55edee95d6c9e7cc45 | Create saveGraphToFile.py | mobile2015/neoPyth,mobile2015/neoPyth,mobile2015/neoPyth | save-load/saveGraphToFile.py | save-load/saveGraphToFile.py | from py2neo import Graph, Node, Relationship
graph = Graph()
# Get username
usr_name = input("Enter username: ")
print ("Username: " + usr_name)
# Get user_id
usr_id = input("Enter user id: ")
print ("User id: " + usr_id)
print("Nodes")
resultsAllNodes = graph.cypher.execute("MATCH (n) RETURN n")
print(resultsAllNodes)
print("Nodes plus relationships")
resultsAll = graph.cypher.execute("START n=node(*) MATCH (n)-[r]->(m) RETURN n,r,m")
print(resultsAll)
query1 = "MATCH (user { username:'"
query1 = query1 + usr_name
query1 = query1 + "' })-[:"
query1 = query1 + usr_id
query1 = query1 + "]->(n) RETURN n"
results = graph.cypher.execute(query1);
print(results)
query2 = "MATCH (user { username:'"
query2 = query2 + usr_name
query2 = query2 + "' })-[:"
query2 = query2 + usr_id
query2 = query2 + "]->(n)-[r]->(m) RETURN r"
results2 = graph.cypher.execute(query2);
print(results2)
f = open('graph.txt', 'w')
f.write(usr_name+"\n")
f.write(usr_id+"\n")
s = str(results)
s2 = str(results2)
f.write(s)
f.write(s2)
f.close()
| bsd-2-clause | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.