commit
stringlengths 40
40
| subject
stringlengths 4
1.73k
| repos
stringlengths 5
127k
| old_file
stringlengths 2
751
| new_file
stringlengths 2
751
| new_contents
stringlengths 1
8.98k
| old_contents
stringlengths 0
6.59k
| license
stringclasses 13
values | lang
stringclasses 23
values |
---|---|---|---|---|---|---|---|---|
9179907357c6e8aad33a8a5e5cd39b164b2f9cc0 | Update BUILD_OSS to 4680. | fcitx/mozc,google/mozc,fcitx/mozc,fcitx/mozc,fcitx/mozc,google/mozc,google/mozc,google/mozc,fcitx/mozc,google/mozc | src/data/version/mozc_version_template.bzl | src/data/version/mozc_version_template.bzl | # Copyright 2010-2021, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
MAJOR = 2
MINOR = 26
# BUILD number used for the OSS version.
BUILD_OSS = 4680
# Number to be increased. This value may be replaced by other tools.
BUILD = BUILD_OSS
# Represent the platform and release channel.
REVISION = 100
REVISION_MACOS = REVISION + 1
# This version represents the version of Mozc IME engine (converter, predictor,
# etc.). This version info is included both in the Mozc server and in the Mozc
# data set file so that the Mozc server can accept only the compatible version
# of data set file. The engine version must be incremented when:
# * POS matcher definition and/or conversion models were changed,
# * New data are added to the data set file, and/or
# * Any changes that loose data compatibility are made.
ENGINE_VERSION = 24
# This version is used to manage the data version and is included only in the
# data set file. DATA_VERSION can be incremented without updating
# ENGINE_VERSION as long as it's compatible with the engine.
# This version should be reset to 0 when ENGINE_VERSION is incremented.
DATA_VERSION = 10
| # Copyright 2010-2021, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
MAJOR = 2
MINOR = 26
# BUILD number used for the OSS version.
BUILD_OSS = 4666
# Number to be increased. This value may be replaced by other tools.
BUILD = BUILD_OSS
# Represent the platform and release channel.
REVISION = 100
REVISION_MACOS = REVISION + 1
# This version represents the version of Mozc IME engine (converter, predictor,
# etc.). This version info is included both in the Mozc server and in the Mozc
# data set file so that the Mozc server can accept only the compatible version
# of data set file. The engine version must be incremented when:
# * POS matcher definition and/or conversion models were changed,
# * New data are added to the data set file, and/or
# * Any changes that loose data compatibility are made.
ENGINE_VERSION = 24
# This version is used to manage the data version and is included only in the
# data set file. DATA_VERSION can be incremented without updating
# ENGINE_VERSION as long as it's compatible with the engine.
# This version should be reset to 0 when ENGINE_VERSION is incremented.
DATA_VERSION = 10
| bsd-3-clause | Python |
db2bb0356cfdf486a9e628726cd4e5879311fe8b | update version | overfly83/bjrobot | src/BJRobot/version.py | src/BJRobot/version.py | VERSION = '0.5.0'
| VERSION = '0.4.0'
| mit | Python |
137271045313a12bbe9388ab1ac6c8cb786b32b7 | Reset mock befor running test. | benkonrath/django-guardian,benkonrath/django-guardian,rmgorman/django-guardian,lukaszb/django-guardian,lukaszb/django-guardian,rmgorman/django-guardian,lukaszb/django-guardian,rmgorman/django-guardian,benkonrath/django-guardian | guardian/testapp/tests/test_management.py | guardian/testapp/tests/test_management.py | from __future__ import absolute_import
from __future__ import unicode_literals
from guardian.compat import get_user_model
from guardian.compat import mock
from guardian.compat import unittest
from guardian.management import create_anonymous_user
import django
mocked_get_init_anon = mock.Mock()
class TestGetAnonymousUser(unittest.TestCase):
@unittest.skipUnless(django.VERSION >= (1, 5), "Django >= 1.5 only")
@mock.patch('guardian.management.guardian_settings')
def test_uses_custom_function(self, guardian_settings):
mocked_get_init_anon.reset_mock()
path = 'guardian.testapp.tests.test_management.mocked_get_init_anon'
guardian_settings.GET_INIT_ANONYMOUS_USER = path
guardian_settings.ANONYMOUS_USER_ID = 219
User = get_user_model()
anon = mocked_get_init_anon.return_value = mock.Mock()
create_anonymous_user('sender')
mocked_get_init_anon.assert_called_once_with(User)
self.assertEqual(anon.pk, 219)
anon.save.assert_called_once_with()
| from __future__ import absolute_import
from __future__ import unicode_literals
from guardian.compat import get_user_model
from guardian.compat import mock
from guardian.compat import unittest
from guardian.management import create_anonymous_user
import django
mocked_get_init_anon = mock.Mock()
class TestGetAnonymousUser(unittest.TestCase):
@unittest.skipUnless(django.VERSION >= (1, 5), "Django >= 1.5 only")
@mock.patch('guardian.management.guardian_settings')
def test_uses_custom_function(self, guardian_settings):
path = 'guardian.testapp.tests.test_management.mocked_get_init_anon'
guardian_settings.GET_INIT_ANONYMOUS_USER = path
guardian_settings.ANONYMOUS_USER_ID = 219
User = get_user_model()
anon = mocked_get_init_anon.return_value = mock.Mock()
create_anonymous_user('sender')
mocked_get_init_anon.assert_called_once_with(User)
self.assertEqual(anon.pk, 219)
anon.save.assert_called_once_with()
| bsd-2-clause | Python |
30020d3826a2460288b6a57963753787020a945a | Implement support for the 'D' type in packb() | wbolster/temporenc-python | temporenc/temporenc.py | temporenc/temporenc.py |
import struct
SUPPORTED_TYPES = set([
'D',
'T',
'DT',
'DTZ',
'DTS',
'DTSZ',
])
STRUCT_32 = struct.Struct('>L')
def packb(type=None, year=None, month=None, day=None):
"""
Pack date and time information into a byte string.
:return: encoded temporenc value
:rtype: bytes
"""
# Input validation
if type not in SUPPORTED_TYPES:
raise ValueError("invalid temporenc type: {0!r}".format(type))
if year is None:
year = 4095
elif not 0 <= year <= 4094:
raise ValueError("'year' not in supported range")
if month is None:
month = 15
elif not 1 <= month <= 12:
raise ValueError("'month' not in supported range")
if day is None:
day = 31
elif not 1 <= day <= 31:
raise ValueError("'day' not in supported range")
# Component packing
if 'D' in type:
d = (year << 9) | (month - 1 << 5) | (day - 1)
# Byte packing
if type == 'D':
# Format: 100DDDDD DDDDDDDD DDDDDDDD
return STRUCT_32.pack(0b100 << 21 | d)[1:]
raise NotImplementedError()
|
def packb(type=None, year=None, month=None, day=None):
raise NotImplementedError()
| bsd-3-clause | Python |
7c63030bd70b32ec4c13ff4273d103ddbb0ffa0f | include tumblrprofile in djangoadmin | castaway2000/OpenStay,castaway2000/OpenStay,castaway2000/OpenStay | hackathon_starter/hackathon/admin.py | hackathon_starter/hackathon/admin.py | from django.contrib import admin
from hackathon.models import UserProfile, Profile, InstagramProfile, TwitterProfile, MeetupToken, GithubProfile, LinkedinProfile, TumblrProfile
# Register your models here.
class TwitterProfileAdmin(admin.ModelAdmin):
list_display = ('user','twitter_user')
admin.site.register(UserProfile)
admin.site.register(Profile)
admin.site.register(InstagramProfile)
admin.site.register(TwitterProfile, TwitterProfileAdmin)
admin.site.register(GithubProfile)
admin.site.register(MeetupToken)
admin.site.register(LinkedinProfile)
admin.site.register(TumblrProfile)
| from django.contrib import admin
from hackathon.models import UserProfile, Profile, InstagramProfile, TwitterProfile, MeetupToken, GithubProfile, LinkedinProfile
# Register your models here.
class TwitterProfileAdmin(admin.ModelAdmin):
list_display = ('user','twitter_user')
admin.site.register(UserProfile)
admin.site.register(Profile)
admin.site.register(InstagramProfile)
admin.site.register(TwitterProfile, TwitterProfileAdmin)
admin.site.register(GithubProfile)
admin.site.register(MeetupToken)
admin.site.register(LinkedinProfile)
| mpl-2.0 | Python |
c3df7d5adf551213c94f2d0e0598552ce6ee9aaf | move collection list filtering logic to db query | hydroshare/hydroshare,hydroshare/hydroshare,hydroshare/hydroshare,hydroshare/hydroshare,hydroshare/hydroshare | hs_collection_resource/page_processors.py | hs_collection_resource/page_processors.py | from django.http import HttpResponseRedirect, HttpResponseForbidden
from django.db.models import Q
from mezzanine.pages.page_processors import processor_for
from hs_core import page_processors
from hs_core.models import BaseResource
from hs_core.views import add_generic_context
from hs_core.views.utils import get_my_resources_list
from .models import CollectionResource
@processor_for(CollectionResource)
def landing_page(request, page):
content_model = page.get_content_model()
edit_resource = page_processors.check_resource_mode(request)
# current contained resources list
collection_items_list = list(content_model.resources.all())
# get the context from hs_core
context = page_processors.get_page_context(page, request.user,
resource_edit=edit_resource,
extended_metadata_layout=None,
request=request)
if edit_resource:
user = request.user
if not user.is_authenticated():
return HttpResponseForbidden();
user_all_accessible_resource_list = get_my_resources_list(user)
# resource is collectable if
# 1) Shareable=True
# 2) OR, current user is a owner of it
# 3) exclude this resource as well as resources already in the collection
user_all_accessible_resource_list.exclude(short_id=content_model.short_id)\
.exclude(id__in=content_model.resources.all())\
.exclude(Q(raccess__shareable=False) | Q(raccess__owners__contains=user.pk))
context['collection_candidate'] = user_all_accessible_resource_list.all()
context['collection_res_id'] = content_model.short_id
elif isinstance(context, HttpResponseRedirect):
# resource view mode
# sending user to login page
return context
context['deleted_resources'] = content_model.deleted_resources.all()
context['collection'] = collection_items_list
context['edit_mode'] = edit_resource
hs_core_dublin_context = add_generic_context(request, page)
context.update(hs_core_dublin_context)
return context
| from django.http import HttpResponseRedirect
from mezzanine.pages.page_processors import processor_for
from hs_core import page_processors
from hs_core.models import BaseResource
from hs_core.views import add_generic_context
from hs_core.views.utils import get_my_resources_list
from .models import CollectionResource
@processor_for(CollectionResource)
def landing_page(request, page):
content_model = page.get_content_model()
edit_resource = page_processors.check_resource_mode(request)
user = request.user
if user.is_authenticated():
user_all_accessible_resource_list = get_my_resources_list(user)
else: # anonymous user
user_all_accessible_resource_list = list(BaseResource.discoverable_resources.all())
# resource is collectable if
# 1) Shareable=True
# 2) OR, current user is a owner of it
user_all_collectable_resource_list = []
for res in user_all_accessible_resource_list:
if res.raccess.shareable or res.raccess.owners.filter(pk=user.pk).exists():
user_all_collectable_resource_list.append(res)
# current contained resources list
collection_items_list = list(content_model.resources.all())
# get the context from hs_core
context = page_processors.get_page_context(page, request.user,
resource_edit=edit_resource,
extended_metadata_layout=None,
request=request)
if edit_resource:
candidate_resources_list = []
for res in user_all_collectable_resource_list:
if content_model.short_id == res.short_id:
continue # skip current collection resource object
elif res in content_model.resources.all():
continue # skip resources that are already in current collection
candidate_resources_list.append(res)
context['collection_candidate'] = candidate_resources_list
context['collection_res_id'] = content_model.short_id
elif isinstance(context, HttpResponseRedirect):
# resource view mode
# sending user to login page
return context
context['deleted_resources'] = content_model.deleted_resources.all()
context['collection'] = collection_items_list
context['edit_mode'] = edit_resource
hs_core_dublin_context = add_generic_context(request, page)
context.update(hs_core_dublin_context)
return context
| bsd-3-clause | Python |
75a0dec32210432374b45dbed2845dfe171b9b36 | Set version number to 0.4.1 | brian-rose/climlab,cjcardinale/climlab,cjcardinale/climlab,cjcardinale/climlab,brian-rose/climlab | climlab/__init__.py | climlab/__init__.py | __version__ = '0.4.1'
# This list defines all the modules that will be loaded if a user invokes
# from climLab import *
# totally out of date!
#__all__ = ["constants", "thermo", "orbital_table",
# "long_orbital_table", "insolation", "ebm",
# "column", "convadj"]
#from climlab import radiation
# this should ensure that we can still import constants.py as climlab.constants
from climlab.utils import constants
from climlab.utils import thermo, legendre
# some more useful shorcuts
#from climlab.model import ebm, column
from climlab.model.column import GreyRadiationModel, RadiativeConvectiveModel, BandRCModel
from climlab.model.ebm import EBM, EBM_annual, EBM_seasonal
from climlab.domain import domain
from climlab.domain.field import Field, global_mean
from climlab.domain.axis import Axis
from climlab.domain.initial import column_state, surface_state
from climlab.process.process import Process, process_like, get_axes
from climlab.process.time_dependent_process import TimeDependentProcess
from climlab.process.implicit import ImplicitProcess
from climlab.process.diagnostic import DiagnosticProcess
from climlab.process.energy_budget import EnergyBudget
| __version__ = '0.4.1dev'
# This list defines all the modules that will be loaded if a user invokes
# from climLab import *
# totally out of date!
#__all__ = ["constants", "thermo", "orbital_table",
# "long_orbital_table", "insolation", "ebm",
# "column", "convadj"]
#from climlab import radiation
# this should ensure that we can still import constants.py as climlab.constants
from climlab.utils import constants
from climlab.utils import thermo, legendre
# some more useful shorcuts
#from climlab.model import ebm, column
from climlab.model.column import GreyRadiationModel, RadiativeConvectiveModel, BandRCModel
from climlab.model.ebm import EBM, EBM_annual, EBM_seasonal
from climlab.domain import domain
from climlab.domain.field import Field, global_mean
from climlab.domain.axis import Axis
from climlab.domain.initial import column_state, surface_state
from climlab.process.process import Process, process_like, get_axes
from climlab.process.time_dependent_process import TimeDependentProcess
from climlab.process.implicit import ImplicitProcess
from climlab.process.diagnostic import DiagnosticProcess
from climlab.process.energy_budget import EnergyBudget
| mit | Python |
85af2e031479c78aaef433e2294648125916251a | Improve color palette for cycling Curves | jvivian/rnaseq-lib,jvivian/rnaseq-lib | src/rnaseq_lib/plot/opts.py | src/rnaseq_lib/plot/opts.py | import holoviews as hv
color_sequence = ['#1f77b4', '#aec7e8', '#ff7f0e', '#ffbb78', '#2ca02c',
'#98df8a', '#d62728', '#ff9896', '#9467bd', '#c5b0d5',
'#8c564b', '#c49c94', '#e377c2', '#f7b6d2', '#7f7f7f',
'#c7c7c7', '#bcbd22', '#dbdb8d', '#17becf', '#9edae5']
gene_curves_opts = {
'Curve': {'plot': dict(height=120, width=600, tools=['hover'], invert_xaxis=True, yrotation=45, yaxis='left'),
'style': dict(line_width=1.5)},
'Curve.Percentage_of_Normal_Samples': {'plot': dict(xaxis=None, invert_yaxis=True),
'style': dict(color='Blue')},
'Curve.Gene_Expression': {'plot': dict(xaxis=None),
'style': dict(color='Green')},
'Curve.Log2_Fold_Change': {'plot': dict(height=150),
'style': dict(color='Purple')},
'Scatter': {'style': dict(color='red', size=3)}}
gene_kde_opts = {'Overlay': {'plot': dict(width=500, legend_position='left')}}
gene_distribution_opts = {'BoxWhisker': {'plot': dict(width=875, xrotation=70)}}
gene_de_opts = {
'Scatter': {'plot': dict(color_index='Tissue', legend_position='left', width=700, height=500, tools=['hover']),
'style': dict(cmap='tab20', size=10, alpha=0.5)}}
sample_count_opts = {
'Bars': {'plot': dict(width=875, xrotation=70, tools=['hover'], show_legend=False)}
}
l2fc_by_perc_samples_opts = {
'Curve': {'plot': dict(tools=['hover']),
'style': dict(color=hv.Cycle(values=color_sequence))},
'Overlay': {'plot': dict(legend_position='left', width=500)},
'Spikes': {'plot': dict(spike_length=100),
'style': dict(line_alpha=0.4, line_width=5)}
}
| gene_curves_opts = {
'Curve': {'plot': dict(height=120, width=600, tools=['hover'], invert_xaxis=True, yrotation=45, yaxis='left'),
'style': dict(line_width=1.5)},
'Curve.Percentage_of_Normal_Samples': {'plot': dict(xaxis=None, invert_yaxis=True),
'style': dict(color='Blue')},
'Curve.Gene_Expression': {'plot': dict(xaxis=None),
'style': dict(color='Green')},
'Curve.Log2_Fold_Change': {'plot': dict(height=150),
'style': dict(color='Purple')},
'Scatter': {'style': dict(color='red', size=3)}}
gene_kde_opts = {'Overlay': {'plot': dict(width=500, legend_position='left')}}
gene_distribution_opts = {'BoxWhisker': {'plot': dict(width=875, xrotation=70)}}
gene_de_opts = {
'Scatter': {'plot': dict(color_index='Tissue', legend_position='left', width=700, height=500, tools=['hover']),
'style': dict(cmap='tab20', size=10, alpha=0.5)}}
sample_count_opts = {
'Bars': {'plot': dict(width=875, xrotation=70, tools=['hover'], show_legend=False)}
}
l2fc_by_perc_samples_opts = {
'Curve': {'plot': dict(tools=['hover'])},
'Overlay': {'plot': dict(legend_position='left', width=500)},
'Spikes': {'plot': dict(spike_length=100),
'style': dict(line_alpha=0.4, line_width=5)}
}
| mit | Python |
72c5168ff71223db32ef37a12fd8781f28bfc433 | change CTCP VERSION reply | sammdot/circa | circa.py | circa.py | #!/usr/bin/env python3
import sdirc
import yaml
import threading
import importlib
import modules
VERSION = "1.0"
class Circa(sdirc.Client):
def __init__(self, **conf):
conf["autoconn"] = False
conf["prefix"] = conf["prefix"] if "prefix" in conf else "!"
sdirc.Client.__init__(self, **conf)
self.modules = {}
self.add_listener("registered",
lambda m: (self.send("UMODE2", "+B"), self.say("groupserv", "join !bots")))
for module in "cmd module leave".split() + self.conf["modules"]:
self.load_module(module)
self.add_listener("invite", lambda to, by, m: self.join(to))
self.add_listener("ctcp-version", self.version)
self.connect()
def version(self, fr, to, msg):
self.notice(fr, "\x01VERSION circa {0}\x01".format(VERSION))
@staticmethod
def wrap(line):
words = []
width = 80
for word in line.split():
if len(word) + 1 > width:
words.append("\xFF")
width = 80 - len(word)
else:
width = width - len(word) - 1
words.append(word)
line2 = " ".join(words)
sublines = line2.split(" \xFF ")
return sublines
def say(self, to, msg):
msg = [line.rstrip() for line in msg.split("\n")]
for line in msg:
for subline in Circa.wrap(line):
sdirc.Client.say(self, to, subline)
def load_module(self, name):
if name in self.modules:
return 2
try:
m = importlib.import_module("modules." + name).module
if hasattr(m, "require"):
for mod in m.require.split():
self.load_module(mod)
self.modules[name] = module = m(self)
for event, listeners in module.listeners.items():
for listener in listeners:
self.add_listener(event, listener)
return 0
except ImportError:
return 1
except AttributeError:
return 1
except TypeError:
return 1
def unload_module(self, name):
if name not in self.modules:
return 1
module = self.modules[name]
for event, listeners in module.listeners.items():
for listener in listeners:
self.remove_listener(event, listener)
del self.modules[name]
return 0
if __name__ == "__main__":
try:
file = open("config.yaml")
config = yaml.load(file)
file.close()
for c in config:
threading.Thread(target=lambda: Circa(**c)).start()
except KeyboardInterrupt:
print("Bye")
| #!/usr/bin/env python3
import sdirc
import yaml
import threading
import importlib
import modules
VERSION = "1.0"
class Circa(sdirc.Client):
def __init__(self, **conf):
conf["autoconn"] = False
conf["prefix"] = conf["prefix"] if "prefix" in conf else "!"
sdirc.Client.__init__(self, **conf)
self.modules = {}
self.add_listener("registered",
lambda m: (self.send("UMODE2", "+B"), self.say("groupserv", "join !bots")))
for module in "cmd module leave".split() + self.conf["modules"]:
self.load_module(module)
self.add_listener("invite", lambda to, by, m: self.join(to))
self.add_listener("ctcp-version", self.version)
self.connect()
def version(self, fr, to, msg):
self.say(fr, "\x01VERSION circa {0}\x01".format(VERSION))
@staticmethod
def wrap(line):
words = []
width = 80
for word in line.split():
if len(word) + 1 > width:
words.append("\xFF")
width = 80 - len(word)
else:
width = width - len(word) - 1
words.append(word)
line2 = " ".join(words)
sublines = line2.split(" \xFF ")
return sublines
def say(self, to, msg):
msg = [line.rstrip() for line in msg.split("\n")]
for line in msg:
for subline in Circa.wrap(line):
sdirc.Client.say(self, to, subline)
def load_module(self, name):
if name in self.modules:
return 2
try:
m = importlib.import_module("modules." + name).module
if hasattr(m, "require"):
for mod in m.require.split():
self.load_module(mod)
self.modules[name] = module = m(self)
for event, listeners in module.listeners.items():
for listener in listeners:
self.add_listener(event, listener)
return 0
except ImportError:
return 1
except AttributeError:
return 1
except TypeError:
return 1
def unload_module(self, name):
if name not in self.modules:
return 1
module = self.modules[name]
for event, listeners in module.listeners.items():
for listener in listeners:
self.remove_listener(event, listener)
del self.modules[name]
return 0
if __name__ == "__main__":
try:
file = open("config.yaml")
config = yaml.load(file)
file.close()
for c in config:
threading.Thread(target=lambda: Circa(**c)).start()
except KeyboardInterrupt:
print("Bye")
| bsd-3-clause | Python |
dc6100fea3097d97e7065bd653093798eac84909 | Allow passing in of timezone | pinax/pinax-calendars,eldarion/kairios,eldarion/kairios | kairios/templatetags/kairios_tags.py | kairios/templatetags/kairios_tags.py | import calendar as cal
import datetime
from django import template
from django.util import timezone
import pytz
register = template.Library()
def delta(year, month, d):
mm = month + d
yy = year
if mm > 12:
mm, yy = mm % 12, year + mm / 12
elif mm < 1:
mm, yy = 12 + mm, year - 1
return yy, mm
@register.inclusion_tag("kairios/calendar.html", takes_context=True)
def calendar(context, events, date=None, tz=None, **kwargs):
cal.setfirstweekday(cal.SUNDAY)
if tz:
today = timezone.localtime(timezone.now(), pytz.timezone(tz)).date()
else:
today = datetime.date.today()
if date is None:
date = today
plus_year, plus_month = delta(date.year, date.month, 1)
minus_year, minus_month = delta(date.year, date.month, -1)
next = events.month_url(plus_year, plus_month)
prev = events.month_url(minus_year, minus_month)
events_by_day = events.events_by_day(date.year, date.month)
title = "%s %s" % (cal.month_name[date.month], date.year)
matrix = cal.monthcalendar(date.year, date.month)
grid = []
for week in matrix:
row = []
for day in week:
is_today = date.year == today.year and date.month == today.month and today.day == day
if day:
day_events = events_by_day.get(day, [])
link = events.day_url(date.year, date.month, day, bool(day_events))
row.append((day, day_events, link, is_today))
else:
row.append(None)
grid.append(row)
context.update({
"title": title,
"calendar_date": date,
"prev": prev,
"next": next,
"grid": grid,
})
return context
| import calendar as cal
import datetime
from django import template
register = template.Library()
def delta(year, month, d):
mm = month + d
yy = year
if mm > 12:
mm, yy = mm % 12, year + mm / 12
elif mm < 1:
mm, yy = 12 + mm, year - 1
return yy, mm
@register.inclusion_tag("kairios/calendar.html", takes_context=True)
def calendar(context, events, date=None, **kwargs):
cal.setfirstweekday(cal.SUNDAY)
today = datetime.date.today()
if date is None:
date = today
plus_year, plus_month = delta(date.year, date.month, 1)
minus_year, minus_month = delta(date.year, date.month, -1)
next = events.month_url(plus_year, plus_month)
prev = events.month_url(minus_year, minus_month)
events_by_day = events.events_by_day(date.year, date.month)
title = "%s %s" % (cal.month_name[date.month], date.year)
matrix = cal.monthcalendar(date.year, date.month)
grid = []
for week in matrix:
row = []
for day in week:
is_today = date.year == today.year and date.month == today.month and today.day == day
if day:
day_events = events_by_day.get(day, [])
link = events.day_url(date.year, date.month, day, bool(day_events))
row.append((day, day_events, link, is_today))
else:
row.append(None)
grid.append(row)
context.update({
"title": title,
"calendar_date": date,
"prev": prev,
"next": next,
"grid": grid,
})
return context
| unknown | Python |
fd7577d34ef206869517f3717070880d098d4d8b | change URL dispach rules | indexofire/django-cms-content,indexofire/django-cms-content,indexofire/django-cms-content | cms_content/urls.py | cms_content/urls.py | # -*- coding: utf-8 -*-
from django.conf.urls.defaults import *
from cms_content.views import *
from cms_content.models import *
from cms_content.utils.queryset import queryset_iterator
urlpatterns = patterns ('',
url(r'^$', section_list, {'sections': CMSSection.objects.all()}, name='section'),
url(r'^(?P<slug>\w*)/$', category_list, name='category_list'),
url(r'^(?P<slug>\w*)/(?P<path>\w*)/$', article_list),
url(r'^(?P<slug>[-\w]+)/(?P<path>[-\w]+)/(?P<name>[-\w]+)/$', article_view),
)
| # -*- coding: utf-8 -*-
from django.conf.urls.defaults import *
from cms_content.views import *
from cms_content.models import *
from cms_content.utils.queryset import queryset_iterator
urlpatterns = patterns ('',
url(r'^$', section_list, {'sections': CMSSection.objects.all()}, name='section'),
url(r'^(?P<slug>\w*)/$', category_list, name='category_list'),
url(r'^(?P<slug>\w*)/(?P<path>\w*)/$', article_list),
url(r'^(?P<slug>\w*)/(?P<path>\w*)/(?P<name>[\w-]*)/$', article_view),
)
| bsd-3-clause | Python |
ce83a4fb2f650380b7683ea688791e078b6fe7ec | Fix wrong redirect on logout | YouNeedToSleep/sleepy,YouNeedToSleep/sleepy,YouNeedToSleep/sleepy | src/sleepy/web/views.py | src/sleepy/web/views.py | from django.contrib import messages
from django.contrib.auth import REDIRECT_FIELD_NAME, logout
from django.core.urlresolvers import reverse_lazy
from django.views.generic import RedirectView, TemplateView
from django.utils.http import is_safe_url
from django.utils.translation import ugettext
class IndexView(TemplateView):
"""View for the index page"""
template_name = 'sleepy/web/index.html'
class LogoutView(RedirectView):
url = reverse_lazy('sleepy-home')
permanent = False
def dispatch(self, request, *args, **kwargs):
if request.user.is_authenticated():
logout(self.request)
messages.success(request, ugettext('You have successfully logged out.'))
return super(LogoutView, self).get(request, *args, **kwargs)
def get_redirect_url(self, *args, **kwargs):
url = super(LogoutView, self).get_redirect_url(*args, **kwargs)
next_url = self.request.REQUEST.get(REDIRECT_FIELD_NAME, None)
if next_url and is_safe_url(url=next_url, host=self.request.get_host()):
url = next_url
return url
| from django.contrib import messages
from django.contrib.auth import REDIRECT_FIELD_NAME, logout
from django.core.urlresolvers import reverse_lazy
from django.views.generic import RedirectView, TemplateView
from django.utils.http import is_safe_url
from django.utils.translation import ugettext
class IndexView(TemplateView):
"""View for the index page"""
template_name = 'sleepy/web/index.html'
class LogoutView(RedirectView):
url = reverse_lazy('home')
permanent = False
def dispatch(self, request, *args, **kwargs):
if request.user.is_authenticated():
logout(self.request)
messages.success(request, ugettext('You have successfully logged out.'))
return super(LogoutView, self).get(request, *args, **kwargs)
def get_redirect_url(self, *args, **kwargs):
url = super(LogoutView, self).get_redirect_url(*args, **kwargs)
next_url = self.request.REQUEST.get(REDIRECT_FIELD_NAME, None)
if next_url and is_safe_url(url=next_url, host=self.request.get_host()):
url = next_url
return url
| bsd-3-clause | Python |
e8389c211ef56869cd9c6c1177aa6a610a915aa2 | Fix manifest and add format to properties | redmatter/combine | combine/manifest.py | combine/manifest.py | # Copyright (c) 2010 John Reese
# Licensed under the MIT license
import yaml
from combine import CombineError
MANIFEST_FORMAT = 1
class Manifest:
def __init__(self):
self.properties = {"manifest-format": MANIFEST_FORMAT}
self.actions = []
def add_property(self, name, value):
self.properties[name] = value
def add_action(self, action):
self.actions.append(action)
def to_dict(self):
"""
Generate a dictionary representation of the Manifest object.
"""
return dict(self.properties, actions=self.actions)
@classmethod
def from_dict(cls, data):
"""
Given a dictionary object, generate a new Manifest object.
"""
format = data["manifest-format"]
if (format > MANIFEST_FORMAT or format < 0):
raise CombineError("Unsupported manifest format")
mft = Manifest()
for key, value in data.items():
if key == "actions":
for action in value:
mft.add_action(action)
else:
mft.add_property(key, value)
for action in data["actions"]:
mft.add_action(action)
return mft
def to_yaml(self):
"""
Generate a YAML data string representing the Manifest object.
"""
str = yaml.safe_dump(self.to_dict(), default_flow_style=False)
return str
@classmethod
def from_yaml(cls, str):
"""
Given a string of YAML data, generate a new Manifest object.
"""
data = yaml.safe_load(str)
return cls.from_dict(data)
| # Copyright (c) 2010 John Reese
# Licensed under the MIT license
import yaml
from combine import Change, CombineError
MANIFEST_FORMAT = 1
class Manifest:
def __init__(self):
self.properties = {}
self.actions = []
def add_property(self, name, value):
self.properties[name] = value
def add_action(self, action):
self.actions.append(action)
def to_dict(self):
"""
Generate a dictionary representation of the Manifest object.
"""
return dict(self.properties, actions=self.actions)
@classmethod
def from_dict(cls, data):
"""
Given a dictionary object, generate a new Manifest object.
"""
format = data["manifest-format"]
if (format > MANIFEST_FORMAT or format < 0):
raise CombineError("Unsupported manifest format")
mft = Manifest()
for key, value in data.items():
if key == "actions":
for action in value:
mft.add_action(action)
else:
mft.add_property(key, value)
for action in data["actions"]:
mft.add_action(action)
return mft
def to_yaml(self):
"""
Generate a YAML data string representing the Manifest object.
"""
str = yaml.safe_dump(self.to_dict(), default_flow_style=False)
return str
@classmethod
def from_yaml(cls, str):
"""
Given a string of YAML data, generate a new Manifest object.
"""
data = yaml.safe_load(str)
return cls.from_dict(data)
| mit | Python |
291f11c6325a1ae082845be81692bc64521eab7e | refactor create-kdtree script | legacysurvey/legacypipe,legacysurvey/legacypipe | py/legacypipe/create-kdtrees.py | py/legacypipe/create-kdtrees.py | import os
from astrometry.libkd.spherematch import *
from astrometry.util.fits import fits_table
import numpy as np
# This script creates the survey-ccd-*.kd.fits kd-trees from
# survey-ccds-*.fits.gz (zeropoints) files
#
def create_kdtree(infn, outfn):
readfn = infn
# gunzip
if infn.endswith('.gz'):
tfn = '/tmp/ccds.fits'
cmd = 'gunzip -cd %s > %s' % (infn, tfn)
print(cmd)
rtn = os.system(cmd)
assert(rtn == 0)
readfn = tfn
# startree
sfn = '/tmp/startree.fits'
cmd = 'startree -i %s -o %s -P -T -k -n ccds' % (readfn, sfn)
print(cmd)
rtn = os.system(cmd)
assert(rtn == 0)
# add expnum-tree
T = fits_table(sfn, columns=['expnum'])
ekd = tree_build(np.atleast_2d(T.expnum.copy()).T.astype(float),
nleaf=60, bbox=False, split=True)
ekd.set_name('expnum')
efn = '/tmp/ekd.fits'
ekd.write(efn)
# merge
cmd = 'fitsgetext -i %s -o /tmp/ekd-%%02i -a -M' % (efn)
print(cmd)
rtn = os.system(cmd)
assert(rtn == 0)
cmd = 'cat %s /tmp/ekd-0[123456] > %s' % (sfn, outfn)
rtn = os.system(cmd)
assert(rtn == 0)
def pre_depthcut():
indir = '/global/projecta/projectdirs/cosmo/work/legacysurvey/dr8/DECaLS/'
outdir = '/global/cscratch1/sd/dstn/dr8new'
bands = 'grizY'
for band in bands:
infn = indir + 'survey-ccds-decam-%s.fits.gz' % band
print('Input:', infn)
outfn = outdir + '/survey-ccds-decam-%s.kd.fits' % band
create_kdtree(infn, outfn)
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('infn', help='Input filename (CCDs file)')
parser.add_argument('outfn', help='Output filename (survey-ccds-X.kd.fits file')
opt = parser.parse_args()
create_kdtree(opt.infn, opt.outfn)
| import os
from astrometry.libkd.spherematch import *
from astrometry.util.fits import fits_table
import numpy as np
# This script creates the survey-ccd-*.kd.fits kd-trees from
# survey-ccds-*.fits.gz (zeropoints) files
#
indir = '/global/projecta/projectdirs/cosmo/work/legacysurvey/dr8/DECaLS/'
outdir = '/global/cscratch1/sd/dstn/dr8new'
bands = 'grizY'
for band in bands:
infn = indir + 'survey-ccds-decam-%s.fits.gz' % band
print('Input:', infn)
# gunzip
tfn = '/tmp/survey-ccd-%s.fits' % band
cmd = 'gunzip -cd %s > %s' % (infn, tfn)
print(cmd)
os.system(cmd)
# startree
sfn = '/tmp/startree-%s.fits' % band
cmd = 'startree -i %s -o %s -P -T -k -n ccds' % (tfn, sfn)
print(cmd)
os.system(cmd)
# add expnum-tree
T = fits_table(sfn, columns=['expnum'])
ekd = tree_build(np.atleast_2d(T.expnum.copy()).T.astype(float),
nleaf=60, bbox=False, split=True)
ekd.set_name('expnum')
efn = '/tmp/ekd-%s.fits' % band
ekd.write(efn)
# merge
cmd = 'fitsgetext -i %s -o /tmp/ekd-%s-%%02i -a -M' % (efn, band)
print(cmd)
os.system(cmd)
outfn = outdir + '/survey-ccds-decam-%s.kd.fits' % band
cmd = 'cat %s /tmp/ekd-%s-0[123456] > %s' % (sfn, band, outfn)
os.system(cmd)
| bsd-3-clause | Python |
7b746d2d4ae732ee1eae326254f3a6df676a7973 | Add __str__ function for SgTable | lnishan/SQLGitHub | components/table.py | components/table.py | """A class to store tables."""
class SgTable:
"""A class to store tables."""
def __init__(self):
self._fields = []
self._table = []
def __len__(self):
return len(self._table)
def __iter__(self):
for row in self._table:
yield row
def __getitem__(self, key):
if not ((type(key) == int or type(key) == long) and key >= 0 and key < len(self._table)):
raise ValueError("Index illegal")
else:
return self._table[key]
def __setitem__(self, key, value):
if not ((type(key) == int or type(key) == long) and key >= 0 and key < len(self._table)):
raise ValueError("Index illegal")
else:
self._table[key] = value
def __str__(self):
ret = str(self._fields)
for row in self._table:
ret += "\n" + str(row)
return ret
def Append(self, row):
self._table.append(row)
def GetTable(self):
return self._table
def SetTable(self, table):
self._table = table
def GetFields(self):
return self._fields
def SetFields(self, fields):
self._fields = fields
| """A class to store tables."""
class SgTable:
"""A class to store tables."""
def __init__(self):
self._fields = []
self._table = []
def __len__(self):
return len(self._table)
def __iter__(self):
for row in self._table:
yield row
def __getitem__(self, key):
if not ((type(key) == int or type(key) == long) and key >= 0 and key < len(self._table)):
raise ValueError("Index illegal")
else:
return self._table[key]
def __setitem__(self, key, value):
if not ((type(key) == int or type(key) == long) and key >= 0 and key < len(self._table)):
raise ValueError("Index illegal")
else:
self._table[key] = value
def Append(self, row):
self._table.append(row)
def GetTable(self):
return self._table
def SetTable(self, table):
self._table = table
def GetFields(self):
return self._fields
def SetFields(self, fields):
self._fields = fields
| mit | Python |
8a45ca4dff9957a6fce07dfa067633fcd842bc51 | Update cpp.py | StatisKit/StatisKit,StatisKit/StatisKit | conda/libdev/cpp.py | conda/libdev/cpp.py | import os
from SCons.Defaults import Delete
def generate(env):
"""Add Builders and construction variables to the Environment."""
if not 'cpp' in env['TOOLS'][:-1]:
env.Tool('system')
env.Tool('prefix')
def BuildCpp(env, target, sources):
# Code to build "target" from "source"
SYSTEM = env['SYSTEM']
targets = env.Install(os.path.join(env['PREFIX'], "include", *target.split('_')),
[source for source in sources if source.suffix in ['.h', '.hpp', '.hxx', '.h++']])
if SYSTEM == 'osx':
kwargs = dict(FRAMEWORKSFLAGS = '-flat_namespace -undefined suppress')
else:
kwargs = dict()
targets += env.SharedLibrary(os.path.join(env['PREFIX'], "lib", target),
[source for source in sources if source.suffix in ['.c', '.cpp', '.cxx', '.c++']],
**kwargs)
if SYSTEM == 'win':
dll = [target for target in targets if target.suffix == '.dll'].pop()
exp = [target for target in targets if target.suffix == '.exp'].pop()
lib = [target for target in targets if target.suffix == '.lib'].pop()
targets = [target for target in targets if not target.suffix in ['.dll', '.exp', '.lib']]
targets += env.Install(os.path.join(env['PREFIX'], "bin"), dll)
targets += env.Command(lib, [exp, dll], [Delete("$SOURCE")])
return targets
env.AddMethod(BuildCpp)
def exists(env):
return 1
| import os
from SCons.Defaults import Move
def generate(env):
"""Add Builders and construction variables to the Environment."""
if not 'cpp' in env['TOOLS'][:-1]:
env.Tool('system')
env.Tool('prefix')
def BuildCpp(env, target, sources):
# Code to build "target" from "source"
SYSTEM = env['SYSTEM']
targets = env.Install(os.path.join(env['PREFIX'], "include", *target.split('_')),
[source for source in sources if source.suffix in ['.h', '.hpp', '.hxx', '.h++']])
if SYSTEM == 'osx':
kwargs = dict(FRAMEWORKSFLAGS = '-flat_namespace -undefined suppress')
else:
kwargs = dict()
targets += env.SharedLibrary(os.path.join(env['PREFIX'], "lib", target),
[source for source in sources if source.suffix in ['.c', '.cpp', '.cxx', '.c++']],
**kwargs)
if SYSTEM == 'win':
dll = [target for target in targets if target.suffix == '.dll'].pop()
exp = [target for target in targets if target.suffix == '.exp'].pop()
lib = [target for target in targets if target.suffix == '.lib'].pop()
targets = [target for target in targets if not target.suffix in ['.dll', '.exp', '.lib']]
targets += env.Install(os.path.join(env['PREFIX'], "bin"), dll)
targets += env.Command(lib, [exp, dll], [Delete("$SOURCE")])
return targets
env.AddMethod(BuildCpp)
def exists(env):
return 1
| apache-2.0 | Python |
e58b94f29888ac1c48bec77cb08fc90919c7720b | add filename attribute | accraze/python-twelve-tone | src/twelve_tone/midi.py | src/twelve_tone/midi.py | from miditime.miditime import MIDITime
class MIDIFile(object):
def __init__(self, BPM=120, filename='example.mid'):
self.pattern = MIDITime(BPM, filename)
self.step_counter = 0
self.filename = filename
def create(self, notes):
midinotes = []
offset = 60
attack = 200
beats = 1
for note in notes:
pitch = (note - 1) + offset
midinote = [self.step_counter, pitch, attack, beats]
midinotes.append(midinote)
self.step_counter = self.step_counter + 1
# Add a track with those notes
self.pattern.add_track(midinotes)
# Output the .mid file
self.pattern.save_midi()
| from miditime.miditime import MIDITime
class MIDIFile(object):
def __init__(self, BPM=120, filename='example.mid'):
self.pattern = MIDITime(BPM, filename)
self.step_counter = 0
def create(self, notes):
midinotes = []
offset = 60
attack = 200
beats = 1
for note in notes:
pitch = (note - 1) + offset
midinote = [self.step_counter, pitch, attack, beats]
midinotes.append(midinote)
self.step_counter = self.step_counter + 1
# Add a track with those notes
self.pattern.add_track(midinotes)
# Output the .mid file
self.pattern.save_midi()
| bsd-2-clause | Python |
aa46499c43bd7e4162dc657fa898b1df5e2dcee9 | Exclude windows from extended ascii mode because travis is unhappy | compas-dev/compas | src/compas/__main__.py | src/compas/__main__.py | # -*- coding: utf-8 -*-
from __future__ import print_function
import sys
import pkg_resources
import compas
if __name__ == '__main__':
c = 'DCDHDCACDHDCAEDEACDHDCAEDEACDHDCAEDCDEACDHDCADCACDEADHDCAEDADEACDHDADADADHDCACDCAEDEACDCACDHDCAEDEACDCAEDEACDCAEDBACDHDAEDEACDADADCAEDBADHDAGDEACDADEADCAEDEADHDBADEDCAEDEACDEDAGDHDADCAEDACDCADADADHDAGDADEACAEDADBADHDAGDCADEAEDEACDBADHDAGDCAEDADEACDBADHDBADADADADAGDHDAGDCADEDADBADHDBADADAGDHDEADEAEDEAEDADHDEADEDADEDADHDEACDADCAEDHDACDADCADHDEACDADCAEDHDEACDADCAEDHDEACDADCAEDHDEAFCDADCAEDHDEAEDHDEDH' # noqa: E501
r = 'fGfB]DSD]BYBHEIEHCXBUCFBYBFCUBSBEBOEOBEBSBQBEPBGBPBEQBOBDBRIRBDBOBNEUGUENBLBECRBCBCBCBRCEBLBKBDBBBDBNBCBEBCBNBDBBBDBKBKDBFCDBIDIDIBDCFBDKBJDBKCCCDDKBCDCCCKBDJBIBDPCBBCBMBCBBCPDBIBIERBCBBBCGCBCDREIBIDBQDEBDCDBEDQBDIBIDBOBDIBCBIBCBOBDIBIDBNBCBKCKBCBNBDIBIBDMDMCMDMDBIBJDBHBFNCNGHBDJBJBDGkGDBJBKBDFBGB[BGBFEKBLBDHCPCPCHELBMBDBWCWBDBMBOEBUCUBEOBPBEBSCSBEBPBRBEBQCQBEBRBUBECMCMCECTBXBFBDGCGDGCWB[DXC[BbObB' # noqa: E501
maps = ' !-X_`|\n' if compas.IPY or compas.WINDOWS else ' ▌▀█▄`▐\n'
for n, o in zip(r, c):
print((ord(n) - 65) * maps[ord(o) - 65], end='')
print()
print('Yay! COMPAS is installed correctly!')
print()
print('COMPAS: {}'.format(compas.__version__))
print('Python: {}'.format(str(sys.version)))
working_set = pkg_resources.working_set
packages = set([p.project_name for p in working_set]) - set(['COMPAS'])
compas_pkgs = [p for p in packages if p.lower().startswith('compas')]
if compas_pkgs:
print('Installed COMPAS extensions: {}'.format([p for p in compas_pkgs]))
| # -*- coding: utf-8 -*-
from __future__ import print_function
import sys
import pkg_resources
import compas
if __name__ == '__main__':
c = 'DCDHDCACDHDCAEDEACDHDCAEDEACDHDCAEDCDEACDHDCADCACDEADHDCAEDADEACDHDADADADHDCACDCAEDEACDCACDHDCAEDEACDCAEDEACDCAEDBACDHDAEDEACDADADCAEDBADHDAGDEACDADEADCAEDEADHDBADEDCAEDEACDEDAGDHDADCAEDACDCADADADHDAGDADEACAEDADBADHDAGDCADEAEDEACDBADHDAGDCAEDADEACDBADHDBADADADADAGDHDAGDCADEDADBADHDBADADAGDHDEADEAEDEAEDADHDEADEDADEDADHDEACDADCAEDHDACDADCADHDEACDADCAEDHDEACDADCAEDHDEACDADCAEDHDEAFCDADCAEDHDEAEDHDEDH' # noqa: E501
r = 'fGfB]DSD]BYBHEIEHCXBUCFBYBFCUBSBEBOEOBEBSBQBEPBGBPBEQBOBDBRIRBDBOBNEUGUENBLBECRBCBCBCBRCEBLBKBDBBBDBNBCBEBCBNBDBBBDBKBKDBFCDBIDIDIBDCFBDKBJDBKCCCDDKBCDCCCKBDJBIBDPCBBCBMBCBBCPDBIBIERBCBBBCGCBCDREIBIDBQDEBDCDBEDQBDIBIDBOBDIBCBIBCBOBDIBIDBNBCBKCKBCBNBDIBIBDMDMCMDMDBIBJDBHBFNCNGHBDJBJBDGkGDBJBKBDFBGB[BGBFEKBLBDHCPCPCHELBMBDBWCWBDBMBOEBUCUBEOBPBEBSCSBEBPBRBEBQCQBEBRBUBECMCMCECTBXBFBDGCGDGCWB[DXC[BbObB' # noqa: E501
maps = ' !-X_`|\n' if compas.IPY else ' ▌▀█▄`▐\n'
for n, o in zip(r, c):
print((ord(n) - 65) * maps[ord(o) - 65], end='')
print()
print('Yay! COMPAS is installed correctly!')
print()
print('COMPAS: {}'.format(compas.__version__))
print('Python: {}'.format(str(sys.version)))
working_set = pkg_resources.working_set
packages = set([p.project_name for p in working_set]) - set(['COMPAS'])
compas_pkgs = [p for p in packages if p.lower().startswith('compas')]
if compas_pkgs:
print('Installed COMPAS extensions: {}'.format([p for p in compas_pkgs]))
| mit | Python |
ee9646c5e71dcbaf776d9f9f929dead5e5c1fa82 | Revert "cookie.value() didn't really need to be a string, since QSettings will take a QVariant anyways." | Lkhagvadelger/phantomjs,nicksay/phantomjs,nin042/phantomjs,jjyycchh/phantomjs,chauhanmohit/phantomjs,mattvick/phantomjs,ChrisAntaki/phantomjs,liorvh/phantomjs,joomel1/phantomjs,ChrisAntaki/phantomjs,sporttech/phantomjs,tianzhihen/phantomjs,Observer-Wu/phantomjs,hexid/phantomjs,AladdinSonni/phantomjs,likaiwalkman/phantomjs,OCForks/phantomjs,jkenn99/phantomjs,zhulin2609/phantomjs,linjeffrey/phantomjs,chirilo/phantomjs,Andrey-Pavlov/phantomjs,NickelMedia/phantomjs,joomel1/phantomjs,mapbased/phantomjs,jkenn99/phantomjs,JingZhou0404/phantomjs,tinfoil/phantomjs,angelman/phantomjs,VinceZK/phantomjs,jkburges/phantomjs,NickelMedia/phantomjs,ye11ow/phantomjs,avinashkunuje/phantomjs,bukalov/phantomjs,chylli/phantomjs,mapbased/phantomjs,christoph-buente/phantomjs,eugene1g/phantomjs,smasala/phantomjs,Klaudit/phantomjs,Andrey-Pavlov/phantomjs,lseyesl/phantomjs,shinate/phantomjs,viewdy/phantomjs2,VinceZK/phantomjs,Tomtomgo/phantomjs,bettiolo/phantomjs,RobertoMalatesta/phantomjs,NickelMedia/phantomjs,fxtentacle/phantomjs,zhengyongbo/phantomjs,thomasrogers03/phantomjs,nicksay/phantomjs,tianzhihen/phantomjs,bmotlaghFLT/FLT_PhantomJS,avinashkunuje/phantomjs,gskachkov/phantomjs,martonw/phantomjs,Medium/phantomjs-1,liorvh/phantomjs,ramanajee/phantomjs,astefanutti/phantomjs,nin042/phantomjs,JamesMGreene/phantomjs,VinceZK/phantomjs,linjeffrey/phantomjs,paulfitz/phantomjs,pcarrier-packaging/deb-phantomjs,neraliu/tpjs,MaDKaTZe/phantomjs,Tomtomgo/phantomjs,avinashkunuje/phantomjs,markhu/phantomjs,christoph-buente/phantomjs,r3b/phantomjs,yoki/phantomjs,sporttech/phantomjs,jguyomard/phantomjs,sxhao/phantomjs,danigonza/phantomjs,tmuelle2/phantomjs,webmull/phantomjs,farhi-naz/phantomjs,lattwood/phantomjs,joomel1/phantomjs,chauhanmohit/phantomjs,admetricks/phantomjs,Lochlan/phantomjs,bukalov/phantomjs,bkrukowski/phantomjs,iver333/phantomjs,houzhenggang/phantomjs,farhi-naz/phantomjs,delighted/phantomjs,pcarrier-packaging/deb-phantomjs,chauhanmohit/phantomjs,wxkdesky/phantomjs,aljscott/phantomjs,dongritengfei/phantomjs,likaiwalkman/phantomjs,tinfoil/phantomjs,hexid/phantomjs,avinashkunuje/phantomjs,eugene1g/phantomjs,xsyntrex/phantomjs,sxhao/phantomjs,joomel1/phantomjs,you21979/phantomjs,klickagent/phantomjs,wxkdesky/phantomjs,tinfoil/phantomjs,astefanutti/phantomjs,cesarmarinhorj/phantomjs,Andrey-Pavlov/phantomjs,brandingbrand/phantomjs,cesarmarinhorj/phantomjs,Andrey-Pavlov/phantomjs,Lkhagvadelger/phantomjs,djmaze/phantomjs,bukalov/phantomjs,ariya/phantomjs,astefanutti/phantomjs,bjko/phantomjs,peakji/phantomjs,joomel1/phantomjs,christoph-buente/phantomjs,admetricks/phantomjs,iradul/phantomjs-clone,lseyesl/phantomjs,pataquets/phantomjs,klickagent/phantomjs,pcarrier-packaging/deb-phantomjs,Dinamize/phantomjs,raff/phantomjs,Vitallium/phantomjs,sxhao/phantomjs,PeterWangPo/phantomjs,ramanajee/phantomjs,houzhenggang/phantomjs,vegetableman/phantomjs,rishilification/phantomjs,jguyomard/phantomjs,dparshin/phantomjs,pataquets/phantomjs,martonw/phantomjs,fxtentacle/phantomjs,brandingbrand/phantomjs,bjko/phantomjs,youprofit/phantomjs,houzhenggang/phantomjs,Deepakpatle/phantomjs,forzi/phantomjs_stradivari_fork,smasala/phantomjs,eceglov/phantomjs,grevutiu-gabriel/phantomjs,woodpecker1/phantomjs,sharma1nitish/phantomjs,Dinamize/phantomjs,brandingbrand/phantomjs,DocuSignDev/phantomjs,iradul/phantomjs-clone,JamesMGreene/phantomjs,jkburges/phantomjs,neraliu/tpjs,kyroskoh/phantomjs,astefanutti/phantomjs,viewdy/phantomjs2,saisai/phantomjs,eceglov/phantomjs,Tomtomgo/phantomjs,etiennekruger/phantomjs-qt5,bkrukowski/phantomjs,dparshin/phantomjs,wxkdesky/phantomjs,StevenBlack/phantomjs,OCForks/phantomjs,S11001001/phantomjs,mark-ignacio/phantomjs,sharma1nitish/phantomjs,RobertoMalatesta/phantomjs,farhi-naz/phantomjs,chylli/phantomjs,avinashkunuje/phantomjs,Klaudit/phantomjs,djmaze/phantomjs,vietch2612/phantomjs,StevenBlack/phantomjs,toanalien/phantomjs,zhengyongbo/phantomjs,zackw/phantomjs,unb-libraries/phantomjs,iver333/phantomjs,gitromand/phantomjs,jdar/phantomjs-modified,DocuSignDev/phantomjs,iver333/phantomjs,asrie/phantomjs,toanalien/phantomjs,mattvick/phantomjs,bukalov/phantomjs,viewdy/phantomjs2,zhengyongbo/phantomjs,mapbased/phantomjs,fentas/phantomjs,Dinamize/phantomjs,unb-libraries/phantomjs,eceglov/phantomjs,RobertoMalatesta/phantomjs,jefleponot/phantomjs,JamesMGreene/phantomjs,ezoic/phantomjs,kyroskoh/phantomjs,ramanajee/phantomjs,paulfitz/phantomjs,jorik041/phantomjs,tinfoil/phantomjs,iradul/phantomjs-clone,Vitallium/phantomjs,MaDKaTZe/phantomjs,bmotlaghFLT/FLT_PhantomJS,jdar/phantomjs-modified,thomasrogers03/phantomjs,xsyntrex/phantomjs,bettiolo/phantomjs,gitromand/phantomjs,admetricks/phantomjs,chauhanmohit/phantomjs,jguyomard/phantomjs,pigshell/nhnick,bprodoehl/phantomjs,S11001001/phantomjs,pigshell/nhnick,viewdy/phantomjs2,youprofit/phantomjs,smasala/phantomjs,saisai/phantomjs,MaDKaTZe/phantomjs,webmull/phantomjs,liorvh/phantomjs,likaiwalkman/phantomjs,markhu/phantomjs,farhi-naz/phantomjs,raff/phantomjs,JingZhou0404/phantomjs,hexid/phantomjs,Deepakpatle/phantomjs,youprofit/phantomjs,attilahorvath/phantomjs,webmull/phantomjs,klim-iv/phantomjs-qt5,eceglov/phantomjs,PeterWangPo/phantomjs,wuxianghou/phantomjs,pbrazdil/phantomjs,r3b/phantomjs,neraliu/tainted-phantomjs,bkrukowski/phantomjs,likaiwalkman/phantomjs,paulfitz/phantomjs,paulfitz/phantomjs,petermat/phantomjs,JingZhou0404/phantomjs,woodpecker1/phantomjs,delighted/phantomjs,chirilo/phantomjs,sporttech/phantomjs,wuxianghou/phantomjs,attilahorvath/phantomjs,bmotlaghFLT/FLT_PhantomJS,Lochlan/phantomjs,apanda/phantomjs-intercept,dparshin/phantomjs,apanda/phantomjs-intercept,martonw/phantomjs,RobertoMalatesta/phantomjs,ramanajee/phantomjs,ChrisAntaki/phantomjs,Deepakpatle/phantomjs,jguyomard/phantomjs,pbrazdil/phantomjs,dhendo/phantomjs,r3b/phantomjs,JamesMGreene/phantomjs,zhengyongbo/phantomjs,jguyomard/phantomjs,mapbased/phantomjs,klickagent/phantomjs,toanalien/phantomjs,apanda/phantomjs-intercept,you21979/phantomjs,AladdinSonni/phantomjs,klickagent/phantomjs,forzi/phantomjs_stradivari_fork,rishilification/phantomjs,rishilification/phantomjs,eugene1g/phantomjs,zhengyongbo/phantomjs,dongritengfei/phantomjs,lattwood/phantomjs,tianzhihen/phantomjs,gskachkov/phantomjs,vegetableman/phantomjs,smasala/phantomjs,iradul/phantomjs,zhengyongbo/phantomjs,r3b/phantomjs,gitromand/phantomjs,JingZhou0404/phantomjs,vietch2612/phantomjs,peakji/phantomjs,joomel1/phantomjs,you21979/phantomjs,avinashkunuje/phantomjs,likaiwalkman/phantomjs,angelman/phantomjs,angelman/phantomjs,apanda/phantomjs-intercept,ezoic/phantomjs,dparshin/phantomjs,ramanajee/phantomjs,asrie/phantomjs,MeteorAdminz/phantomjs,vietch2612/phantomjs,iradul/phantomjs-clone,Dinamize/phantomjs,apanda/phantomjs-intercept,danigonza/phantomjs,Vitallium/phantomjs,grevutiu-gabriel/phantomjs,bukalov/phantomjs,Lkhagvadelger/phantomjs,webmull/phantomjs,neraliu/tainted-phantomjs,jjyycchh/phantomjs,wuxianghou/phantomjs,you21979/phantomjs,pigshell/nhnick,klim-iv/phantomjs-qt5,danigonza/phantomjs,revolutionaryG/phantomjs,woodpecker1/phantomjs,bettiolo/phantomjs,eceglov/phantomjs,tianzhihen/phantomjs,JamesMGreene/phantomjs,jillesme/phantomjs,OCForks/phantomjs,Andrey-Pavlov/phantomjs,wxkdesky/phantomjs,jkburges/phantomjs,lattwood/phantomjs,nin042/phantomjs,Medium/phantomjs-1,sharma1nitish/phantomjs,mark-ignacio/phantomjs,wuxianghou/phantomjs,OCForks/phantomjs,fentas/phantomjs,neraliu/tpjs,ye11ow/phantomjs,Observer-Wu/phantomjs,mark-ignacio/phantomjs,jkenn99/phantomjs,Dinamize/phantomjs,kinwahlai/phantomjs-ghostdriver,djmaze/phantomjs,Vitallium/phantomjs,bettiolo/phantomjs,bprodoehl/phantomjs,klickagent/phantomjs,ixiom/phantomjs,JingZhou0404/phantomjs,admetricks/phantomjs,bkrukowski/phantomjs,NickelMedia/phantomjs,ariya/phantomjs,pigshell/nhnick,raff/phantomjs,apanda/phantomjs-intercept,S11001001/phantomjs,JamesMGreene/phantomjs,liorvh/phantomjs,christoph-buente/phantomjs,attilahorvath/phantomjs,NickelMedia/phantomjs,iradul/phantomjs-clone,yoki/phantomjs,pigshell/nhnick,VinceZK/phantomjs,bmotlaghFLT/FLT_PhantomJS,tmuelle2/phantomjs,pbrazdil/phantomjs,dhendo/phantomjs,saisai/phantomjs,kyroskoh/phantomjs,klim-iv/phantomjs-qt5,cloudflare/phantomjs,jillesme/phantomjs,you21979/phantomjs,NickelMedia/phantomjs,VinceZK/phantomjs,tmuelle2/phantomjs,iradul/phantomjs-clone,Medium/phantomjs-1,paulfitz/phantomjs,Medium/phantomjs-1,bkrukowski/phantomjs,RobertoMalatesta/phantomjs,etiennekruger/phantomjs-qt5,youprofit/phantomjs,chauhanmohit/phantomjs,liorvh/phantomjs,ChrisAntaki/phantomjs,linjeffrey/phantomjs,chylli/phantomjs,saisai/phantomjs,petermat/phantomjs,pataquets/phantomjs,sxhao/phantomjs,shinate/phantomjs,woodpecker1/phantomjs,dhendo/phantomjs,Observer-Wu/phantomjs,iver333/phantomjs,Andrey-Pavlov/phantomjs,Deepakpatle/phantomjs,likaiwalkman/phantomjs,ariya/phantomjs,revolutionaryG/phantomjs,MeteorAdminz/phantomjs,chylli/phantomjs,ChrisAntaki/phantomjs,kyroskoh/phantomjs,StevenBlack/phantomjs,jorik041/phantomjs,thomasrogers03/phantomjs,toanalien/phantomjs,tianzhihen/phantomjs,AladdinSonni/phantomjs,attilahorvath/phantomjs,VinceZK/phantomjs,Lochlan/phantomjs,lattwood/phantomjs,ezoic/phantomjs,woodpecker1/phantomjs,VinceZK/phantomjs,cesarmarinhorj/phantomjs,iradul/phantomjs,martonw/phantomjs,yoki/phantomjs,Klaudit/phantomjs,peakji/phantomjs,peakji/phantomjs,woodpecker1/phantomjs,revolutionaryG/phantomjs,angelman/phantomjs,Klaudit/phantomjs,klim-iv/phantomjs-qt5,jjyycchh/phantomjs,etiennekruger/phantomjs-qt5,zhulin2609/phantomjs,matepeter90/phantomjs,PeterWangPo/phantomjs,gskachkov/phantomjs,klim-iv/phantomjs-qt5,zackw/phantomjs,fxtentacle/phantomjs,jkburges/phantomjs,linjeffrey/phantomjs,jkburges/phantomjs,S11001001/phantomjs,gskachkov/phantomjs,paulfitz/phantomjs,DocuSignDev/phantomjs,tmuelle2/phantomjs,dhendo/phantomjs,revolutionaryG/phantomjs,Vitallium/phantomjs,jkenn99/phantomjs,asrie/phantomjs,woodpecker1/phantomjs,gskachkov/phantomjs,nin042/phantomjs,Medium/phantomjs-1,fxtentacle/phantomjs,iradul/phantomjs,delighted/phantomjs,danigonza/phantomjs,PeterWangPo/phantomjs,etiennekruger/phantomjs-qt5,tmuelle2/phantomjs,VinceZK/phantomjs,cirrusone/phantom2,fxtentacle/phantomjs,kinwahlai/phantomjs-ghostdriver,matepeter90/phantomjs,Tomtomgo/phantomjs,linjeffrey/phantomjs,chirilo/phantomjs,webmull/phantomjs,vietch2612/phantomjs,pbrazdil/phantomjs,sharma1nitish/phantomjs,skyeckstrom/phantomjs,zhengyongbo/phantomjs,jkenn99/phantomjs,revolutionaryG/phantomjs,kyroskoh/phantomjs,xsyntrex/phantomjs,zackw/phantomjs,chirilo/phantomjs,angelman/phantomjs,dhendo/phantomjs,unb-libraries/phantomjs,eugene1g/phantomjs,grevutiu-gabriel/phantomjs,jefleponot/phantomjs,mattvick/phantomjs,pigshell/nhnick,jdar/phantomjs-modified,iver333/phantomjs,Deepakpatle/phantomjs,klim-iv/phantomjs-qt5,skyeckstrom/phantomjs,MaDKaTZe/phantomjs,skyeckstrom/phantomjs,jorik041/phantomjs,cirrusone/phantom2,delighted/phantomjs,neraliu/tpjs,wxkdesky/phantomjs,mattvick/phantomjs,Tomtomgo/phantomjs,unb-libraries/phantomjs,chylli/phantomjs,yoki/phantomjs,OCForks/phantomjs,vietch2612/phantomjs,jjyycchh/phantomjs,bprodoehl/phantomjs,joomel1/phantomjs,fentas/phantomjs,unb-libraries/phantomjs,vegetableman/phantomjs,shinate/phantomjs,ariya/phantomjs,jdar/phantomjs-modified,youprofit/phantomjs,klim-iv/phantomjs-qt5,dongritengfei/phantomjs,pbrazdil/phantomjs,neraliu/tainted-phantomjs,iradul/phantomjs,chauhanmohit/phantomjs,zhengyongbo/phantomjs,smasala/phantomjs,S11001001/phantomjs,liorvh/phantomjs,Deepakpatle/phantomjs,xsyntrex/phantomjs,lseyesl/phantomjs,asrie/phantomjs,MaDKaTZe/phantomjs,delighted/phantomjs,chauhanmohit/phantomjs,Andrey-Pavlov/phantomjs,iradul/phantomjs,wuxianghou/phantomjs,dhendo/phantomjs,wxkdesky/phantomjs,martonw/phantomjs,mattvick/phantomjs,sxhao/phantomjs,lseyesl/phantomjs,Klaudit/phantomjs,bjko/phantomjs,sxhao/phantomjs,forzi/phantomjs_stradivari_fork,bmotlaghFLT/FLT_PhantomJS,Deepakpatle/phantomjs,gitromand/phantomjs,matepeter90/phantomjs,paulfitz/phantomjs,gskachkov/phantomjs,houzhenggang/phantomjs,webmull/phantomjs,klickagent/phantomjs,mapbased/phantomjs,lattwood/phantomjs,fentas/phantomjs,wuxianghou/phantomjs,Deepakpatle/phantomjs,nicksay/phantomjs,asrie/phantomjs,jefleponot/phantomjs,farhi-naz/phantomjs,xsyntrex/phantomjs,Klaudit/phantomjs,revolutionaryG/phantomjs,dongritengfei/phantomjs,sxhao/phantomjs,pcarrier-packaging/deb-phantomjs,MeteorAdminz/phantomjs,Klaudit/phantomjs,jguyomard/phantomjs,peakji/phantomjs,lseyesl/phantomjs,chirilo/phantomjs,mark-ignacio/phantomjs,yoki/phantomjs,sporttech/phantomjs,hexid/phantomjs,Tomtomgo/phantomjs,chylli/phantomjs,linjeffrey/phantomjs,ixiom/phantomjs,pataquets/phantomjs,jefleponot/phantomjs,eceglov/phantomjs,Lkhagvadelger/phantomjs,vietch2612/phantomjs,saisai/phantomjs,Vitallium/phantomjs,matepeter90/phantomjs,liorvh/phantomjs,apanda/phantomjs-intercept,dparshin/phantomjs,S11001001/phantomjs,avinashkunuje/phantomjs,paulfitz/phantomjs,likaiwalkman/phantomjs,jorik041/phantomjs,jdar/phantomjs-modified,dhendo/phantomjs,bettiolo/phantomjs,tinfoil/phantomjs,AladdinSonni/phantomjs,DocuSignDev/phantomjs,attilahorvath/phantomjs,iver333/phantomjs,matepeter90/phantomjs,jdar/phantomjs-modified,jillesme/phantomjs,farhi-naz/phantomjs,pigshell/nhnick,farhi-naz/phantomjs,kyroskoh/phantomjs,delighted/phantomjs,zhulin2609/phantomjs,grevutiu-gabriel/phantomjs,jdar/phantomjs-modified,apanda/phantomjs-intercept,woodpecker1/phantomjs,sharma1nitish/phantomjs,apanda/phantomjs-intercept,etiennekruger/phantomjs-qt5,aljscott/phantomjs,ixiom/phantomjs,iradul/phantomjs,eceglov/phantomjs,Tomtomgo/phantomjs,tinfoil/phantomjs,admetricks/phantomjs,brandingbrand/phantomjs,attilahorvath/phantomjs,grevutiu-gabriel/phantomjs,tinfoil/phantomjs,neraliu/tainted-phantomjs,toanalien/phantomjs,cesarmarinhorj/phantomjs,neraliu/tpjs,iradul/phantomjs-clone,tianzhihen/phantomjs,paulfitz/phantomjs,Vitallium/phantomjs,tianzhihen/phantomjs,aljscott/phantomjs,eugene1g/phantomjs,petermat/phantomjs,delighted/phantomjs,wxkdesky/phantomjs,RobertoMalatesta/phantomjs,lattwood/phantomjs,nin042/phantomjs,kyroskoh/phantomjs,klickagent/phantomjs,Lkhagvadelger/phantomjs,jjyycchh/phantomjs,zhulin2609/phantomjs,aljscott/phantomjs,Lochlan/phantomjs,jillesme/phantomjs,gskachkov/phantomjs,mattvick/phantomjs,toanalien/phantomjs,klim-iv/phantomjs-qt5,mapbased/phantomjs,jillesme/phantomjs,forzi/phantomjs_stradivari_fork,mark-ignacio/phantomjs,ChrisAntaki/phantomjs,astefanutti/phantomjs,bukalov/phantomjs,rishilification/phantomjs,jdar/phantomjs-modified,asrie/phantomjs,revolutionaryG/phantomjs,fentas/phantomjs,jillesme/phantomjs,webmull/phantomjs,kyroskoh/phantomjs,ixiom/phantomjs,delighted/phantomjs,DocuSignDev/phantomjs,youprofit/phantomjs,cloudflare/phantomjs,astefanutti/phantomjs,skyeckstrom/phantomjs,lattwood/phantomjs,ixiom/phantomjs,klickagent/phantomjs,eceglov/phantomjs,revolutionaryG/phantomjs,Lochlan/phantomjs,ramanajee/phantomjs,martonw/phantomjs,linjeffrey/phantomjs,neraliu/tainted-phantomjs,ye11ow/phantomjs,aljscott/phantomjs,RobertoMalatesta/phantomjs,apanda/phantomjs-intercept,sharma1nitish/phantomjs,Andrey-Pavlov/phantomjs,thomasrogers03/phantomjs,petermat/phantomjs,cloudflare/phantomjs,tmuelle2/phantomjs,mattvick/phantomjs,iradul/phantomjs,paulfitz/phantomjs,smasala/phantomjs,zhengyongbo/phantomjs,DocuSignDev/phantomjs,bprodoehl/phantomjs,fxtentacle/phantomjs,bukalov/phantomjs,saisai/phantomjs,chauhanmohit/phantomjs,thomasrogers03/phantomjs,JingZhou0404/phantomjs,sporttech/phantomjs,pbrazdil/phantomjs,webmull/phantomjs,iver333/phantomjs,chylli/phantomjs,gitromand/phantomjs,Observer-Wu/phantomjs,nin042/phantomjs,forzi/phantomjs_stradivari_fork,mark-ignacio/phantomjs,sharma1nitish/phantomjs,jdar/phantomjs-modified,thomasrogers03/phantomjs,sporttech/phantomjs,StevenBlack/phantomjs,VinceZK/phantomjs,bjko/phantomjs,likaiwalkman/phantomjs,S11001001/phantomjs,aljscott/phantomjs,zackw/phantomjs,pataquets/phantomjs,jkenn99/phantomjs,zhulin2609/phantomjs,djmaze/phantomjs,brandingbrand/phantomjs,kinwahlai/phantomjs-ghostdriver,lattwood/phantomjs,raff/phantomjs,chauhanmohit/phantomjs,skyeckstrom/phantomjs,joomel1/phantomjs,StevenBlack/phantomjs,mattvick/phantomjs,OCForks/phantomjs,jjyycchh/phantomjs,lseyesl/phantomjs,aljscott/phantomjs,lattwood/phantomjs,grevutiu-gabriel/phantomjs,forzi/phantomjs_stradivari_fork,skyeckstrom/phantomjs,danigonza/phantomjs,RobertoMalatesta/phantomjs,hexid/phantomjs,bettiolo/phantomjs,raff/phantomjs,zhengyongbo/phantomjs,Klaudit/phantomjs,saisai/phantomjs,Lkhagvadelger/phantomjs,farhi-naz/phantomjs,ye11ow/phantomjs,cirrusone/phantom2,asrie/phantomjs,nicksay/phantomjs,etiennekruger/phantomjs-qt5,pataquets/phantomjs,bukalov/phantomjs,xsyntrex/phantomjs,you21979/phantomjs,MaDKaTZe/phantomjs,vietch2612/phantomjs,iver333/phantomjs,forzi/phantomjs_stradivari_fork,JamesMGreene/phantomjs,sharma1nitish/phantomjs,jkburges/phantomjs,Tomtomgo/phantomjs,Vitallium/phantomjs,martonw/phantomjs,mark-ignacio/phantomjs,ye11ow/phantomjs,cesarmarinhorj/phantomjs,viewdy/phantomjs2,tmuelle2/phantomjs,farhi-naz/phantomjs,Dinamize/phantomjs,ChrisAntaki/phantomjs,peakji/phantomjs,MaDKaTZe/phantomjs,Klaudit/phantomjs,viewdy/phantomjs2,pbrazdil/phantomjs,shinate/phantomjs,ChrisAntaki/phantomjs,cloudflare/phantomjs,martonw/phantomjs,MaDKaTZe/phantomjs,jkburges/phantomjs,zackw/phantomjs,eceglov/phantomjs,Klaudit/phantomjs,cloudflare/phantomjs,woodpecker1/phantomjs,jorik041/phantomjs,kyroskoh/phantomjs,yoki/phantomjs,StevenBlack/phantomjs,nin042/phantomjs,avinashkunuje/phantomjs,sporttech/phantomjs,wxkdesky/phantomjs,neraliu/tainted-phantomjs,thomasrogers03/phantomjs,jkenn99/phantomjs,sxhao/phantomjs,bprodoehl/phantomjs,MeteorAdminz/phantomjs,revolutionaryG/phantomjs,wuxianghou/phantomjs,vegetableman/phantomjs,VinceZK/phantomjs,bjko/phantomjs,markhu/phantomjs,cirrusone/phantom2,vegetableman/phantomjs,tinfoil/phantomjs,bkrukowski/phantomjs,neraliu/tpjs,delighted/phantomjs,djmaze/phantomjs,MeteorAdminz/phantomjs,chirilo/phantomjs,rishilification/phantomjs,pcarrier-packaging/deb-phantomjs,chirilo/phantomjs,eceglov/phantomjs,dparshin/phantomjs,PeterWangPo/phantomjs,sharma1nitish/phantomjs,fxtentacle/phantomjs,bjko/phantomjs,OCForks/phantomjs,nicksay/phantomjs,Observer-Wu/phantomjs,r3b/phantomjs,christoph-buente/phantomjs,NickelMedia/phantomjs,rishilification/phantomjs,chirilo/phantomjs,rishilification/phantomjs,danigonza/phantomjs,linjeffrey/phantomjs,martonw/phantomjs,christoph-buente/phantomjs,Andrey-Pavlov/phantomjs,cesarmarinhorj/phantomjs,gitromand/phantomjs,you21979/phantomjs,NickelMedia/phantomjs,Observer-Wu/phantomjs,mark-ignacio/phantomjs,ChrisAntaki/phantomjs,ixiom/phantomjs,bmotlaghFLT/FLT_PhantomJS,Lochlan/phantomjs,lseyesl/phantomjs,jefleponot/phantomjs,Dinamize/phantomjs,shinate/phantomjs,ixiom/phantomjs,Klaudit/phantomjs,iradul/phantomjs,bettiolo/phantomjs,hexid/phantomjs,matepeter90/phantomjs,iradul/phantomjs-clone,webmull/phantomjs,asrie/phantomjs,bmotlaghFLT/FLT_PhantomJS,eugene1g/phantomjs,pataquets/phantomjs,liorvh/phantomjs,jguyomard/phantomjs,bkrukowski/phantomjs,pataquets/phantomjs,neraliu/tpjs,eugene1g/phantomjs,bprodoehl/phantomjs,Andrey-Pavlov/phantomjs,jkburges/phantomjs,fentas/phantomjs,youprofit/phantomjs,sxhao/phantomjs,fentas/phantomjs,tianzhihen/phantomjs,markhu/phantomjs,woodpecker1/phantomjs,zackw/phantomjs,angelman/phantomjs,cirrusone/phantom2,thomasrogers03/phantomjs,shinate/phantomjs,cesarmarinhorj/phantomjs,danigonza/phantomjs,yoki/phantomjs,djmaze/phantomjs,mapbased/phantomjs,StevenBlack/phantomjs,ChrisAntaki/phantomjs,djmaze/phantomjs,houzhenggang/phantomjs,petermat/phantomjs,chauhanmohit/phantomjs,jjyycchh/phantomjs,admetricks/phantomjs,saisai/phantomjs,shinate/phantomjs,r3b/phantomjs,iver333/phantomjs,nin042/phantomjs,Dinamize/phantomjs,zhulin2609/phantomjs,toanalien/phantomjs,attilahorvath/phantomjs,dhendo/phantomjs,eugene1g/phantomjs,AladdinSonni/phantomjs,viewdy/phantomjs2,houzhenggang/phantomjs,sporttech/phantomjs,iver333/phantomjs,etiennekruger/phantomjs-qt5,asrie/phantomjs,yoki/phantomjs,dparshin/phantomjs,sxhao/phantomjs,jorik041/phantomjs,dhendo/phantomjs,Medium/phantomjs-1,raff/phantomjs,cloudflare/phantomjs,markhu/phantomjs,viewdy/phantomjs2,matepeter90/phantomjs,Dinamize/phantomjs,pigshell/nhnick,houzhenggang/phantomjs,admetricks/phantomjs,neraliu/tainted-phantomjs,NickelMedia/phantomjs,bprodoehl/phantomjs,klickagent/phantomjs,jorik041/phantomjs,AladdinSonni/phantomjs,AladdinSonni/phantomjs,ezoic/phantomjs,bettiolo/phantomjs,aljscott/phantomjs,djmaze/phantomjs,Lochlan/phantomjs,hexid/phantomjs,JingZhou0404/phantomjs,StevenBlack/phantomjs,fxtentacle/phantomjs,rishilification/phantomjs,tinfoil/phantomjs,mark-ignacio/phantomjs,AladdinSonni/phantomjs,lseyesl/phantomjs,Andrey-Pavlov/phantomjs,pcarrier-packaging/deb-phantomjs,StevenBlack/phantomjs,mapbased/phantomjs,djmaze/phantomjs,r3b/phantomjs,houzhenggang/phantomjs,you21979/phantomjs,Lkhagvadelger/phantomjs,jjyycchh/phantomjs,Tomtomgo/phantomjs,jdar/phantomjs-modified,dongritengfei/phantomjs,iradul/phantomjs-clone,skyeckstrom/phantomjs,smasala/phantomjs,petermat/phantomjs,pbrazdil/phantomjs,angelman/phantomjs,bprodoehl/phantomjs,delighted/phantomjs,wuxianghou/phantomjs,saisai/phantomjs,mark-ignacio/phantomjs,markhu/phantomjs,bukalov/phantomjs,MeteorAdminz/phantomjs,ramanajee/phantomjs,ramanajee/phantomjs,JingZhou0404/phantomjs,bettiolo/phantomjs,cirrusone/phantom2,ezoic/phantomjs,lseyesl/phantomjs,mapbased/phantomjs,raff/phantomjs,ramanajee/phantomjs,bmotlaghFLT/FLT_PhantomJS,ezoic/phantomjs,kyroskoh/phantomjs,bprodoehl/phantomjs,OCForks/phantomjs,yoki/phantomjs,bmotlaghFLT/FLT_PhantomJS,tinfoil/phantomjs,Lkhagvadelger/phantomjs,JamesMGreene/phantomjs,lattwood/phantomjs,JingZhou0404/phantomjs,joomel1/phantomjs,tmuelle2/phantomjs,houzhenggang/phantomjs,bjko/phantomjs,bukalov/phantomjs,AladdinSonni/phantomjs,PeterWangPo/phantomjs,attilahorvath/phantomjs,christoph-buente/phantomjs,gskachkov/phantomjs,gitromand/phantomjs,admetricks/phantomjs,woodpecker1/phantomjs,bjko/phantomjs,markhu/phantomjs,linjeffrey/phantomjs,petermat/phantomjs,zhulin2609/phantomjs,wuxianghou/phantomjs,liorvh/phantomjs,asrie/phantomjs,yoki/phantomjs,zackw/phantomjs,fxtentacle/phantomjs,yoki/phantomjs,dongritengfei/phantomjs,hexid/phantomjs,likaiwalkman/phantomjs,dongritengfei/phantomjs,klickagent/phantomjs,cesarmarinhorj/phantomjs,you21979/phantomjs,nicksay/phantomjs,StevenBlack/phantomjs,liorvh/phantomjs,neraliu/tainted-phantomjs,martonw/phantomjs,farhi-naz/phantomjs,markhu/phantomjs,shinate/phantomjs,raff/phantomjs,djmaze/phantomjs,jkenn99/phantomjs,wxkdesky/phantomjs,avinashkunuje/phantomjs,pataquets/phantomjs,cesarmarinhorj/phantomjs,you21979/phantomjs,mapbased/phantomjs,PeterWangPo/phantomjs,DocuSignDev/phantomjs,r3b/phantomjs,neraliu/tpjs,chylli/phantomjs,bettiolo/phantomjs,ChrisAntaki/phantomjs,Observer-Wu/phantomjs,raff/phantomjs,tmuelle2/phantomjs,vegetableman/phantomjs,zhulin2609/phantomjs,avinashkunuje/phantomjs,vegetableman/phantomjs,likaiwalkman/phantomjs,ramanajee/phantomjs,zackw/phantomjs,klim-iv/phantomjs-qt5,wxkdesky/phantomjs,cirrusone/phantom2,revolutionaryG/phantomjs,Deepakpatle/phantomjs,ramanajee/phantomjs,petermat/phantomjs,toanalien/phantomjs,likaiwalkman/phantomjs,cirrusone/phantom2,smasala/phantomjs,rishilification/phantomjs,PeterWangPo/phantomjs,fxtentacle/phantomjs,dparshin/phantomjs,Lkhagvadelger/phantomjs,jkenn99/phantomjs,toanalien/phantomjs,jguyomard/phantomjs,peakji/phantomjs,Lkhagvadelger/phantomjs,astefanutti/phantomjs,jorik041/phantomjs,vegetableman/phantomjs,PeterWangPo/phantomjs,gitromand/phantomjs,sxhao/phantomjs,petermat/phantomjs,jillesme/phantomjs,mapbased/phantomjs,lattwood/phantomjs,tmuelle2/phantomjs,cesarmarinhorj/phantomjs,unb-libraries/phantomjs,Tomtomgo/phantomjs,nicksay/phantomjs,nin042/phantomjs,delighted/phantomjs,S11001001/phantomjs,DocuSignDev/phantomjs,nicksay/phantomjs,paulfitz/phantomjs,bukalov/phantomjs,bmotlaghFLT/FLT_PhantomJS,iradul/phantomjs-clone,dongritengfei/phantomjs,pbrazdil/phantomjs,dparshin/phantomjs,webmull/phantomjs,astefanutti/phantomjs,kinwahlai/phantomjs-ghostdriver,unb-libraries/phantomjs,neraliu/tpjs,martonw/phantomjs,bjko/phantomjs,grevutiu-gabriel/phantomjs,dongritengfei/phantomjs,angelman/phantomjs,mark-ignacio/phantomjs,kinwahlai/phantomjs-ghostdriver,skyeckstrom/phantomjs,Lochlan/phantomjs,OCForks/phantomjs,angelman/phantomjs,thomasrogers03/phantomjs,aljscott/phantomjs,lseyesl/phantomjs,cirrusone/phantom2,Deepakpatle/phantomjs,DocuSignDev/phantomjs,fentas/phantomjs,kyroskoh/phantomjs,toanalien/phantomjs,MaDKaTZe/phantomjs,jillesme/phantomjs,saisai/phantomjs,RobertoMalatesta/phantomjs,ye11ow/phantomjs,rishilification/phantomjs,OCForks/phantomjs,tinfoil/phantomjs,grevutiu-gabriel/phantomjs,ixiom/phantomjs,christoph-buente/phantomjs,pataquets/phantomjs,aljscott/phantomjs,nin042/phantomjs,xsyntrex/phantomjs,mattvick/phantomjs,chylli/phantomjs,youprofit/phantomjs,tianzhihen/phantomjs,smasala/phantomjs,pigshell/nhnick,you21979/phantomjs,JingZhou0404/phantomjs,jefleponot/phantomjs,VinceZK/phantomjs,NickelMedia/phantomjs,MeteorAdminz/phantomjs,neraliu/tainted-phantomjs,jguyomard/phantomjs,angelman/phantomjs,ariya/phantomjs,fentas/phantomjs,unb-libraries/phantomjs,nin042/phantomjs,cloudflare/phantomjs,jillesme/phantomjs,saisai/phantomjs,chylli/phantomjs,pbrazdil/phantomjs,ezoic/phantomjs,bkrukowski/phantomjs,neraliu/tpjs,sharma1nitish/phantomjs,djmaze/phantomjs,tianzhihen/phantomjs,youprofit/phantomjs,dongritengfei/phantomjs,JingZhou0404/phantomjs,brandingbrand/phantomjs,chirilo/phantomjs,rishilification/phantomjs,asrie/phantomjs,RobertoMalatesta/phantomjs,jkenn99/phantomjs,jorik041/phantomjs,jkburges/phantomjs,Deepakpatle/phantomjs,vietch2612/phantomjs,revolutionaryG/phantomjs,bprodoehl/phantomjs,jorik041/phantomjs,astefanutti/phantomjs,youprofit/phantomjs,MeteorAdminz/phantomjs,chauhanmohit/phantomjs,admetricks/phantomjs,brandingbrand/phantomjs,skyeckstrom/phantomjs,neraliu/tainted-phantomjs,youprofit/phantomjs,peakji/phantomjs,fentas/phantomjs,dparshin/phantomjs,admetricks/phantomjs,StevenBlack/phantomjs,gitromand/phantomjs,bmotlaghFLT/FLT_PhantomJS,pigshell/nhnick,grevutiu-gabriel/phantomjs,bprodoehl/phantomjs,bkrukowski/phantomjs,OCForks/phantomjs,matepeter90/phantomjs,sharma1nitish/phantomjs,zhulin2609/phantomjs,jefleponot/phantomjs,jjyycchh/phantomjs,chirilo/phantomjs,joomel1/phantomjs,shinate/phantomjs,PeterWangPo/phantomjs,eugene1g/phantomjs,toanalien/phantomjs,attilahorvath/phantomjs,petermat/phantomjs,zackw/phantomjs,kinwahlai/phantomjs-ghostdriver,admetricks/phantomjs,webmull/phantomjs,pataquets/phantomjs,ezoic/phantomjs,JamesMGreene/phantomjs,nicksay/phantomjs,kinwahlai/phantomjs-ghostdriver,zhulin2609/phantomjs,Medium/phantomjs-1,farhi-naz/phantomjs,Lochlan/phantomjs,zhulin2609/phantomjs,unb-libraries/phantomjs,pcarrier-packaging/deb-phantomjs,klickagent/phantomjs,brandingbrand/phantomjs,xsyntrex/phantomjs,jdar/phantomjs-modified,viewdy/phantomjs2,bkrukowski/phantomjs,jjyycchh/phantomjs,petermat/phantomjs,Lkhagvadelger/phantomjs,PeterWangPo/phantomjs,fentas/phantomjs,attilahorvath/phantomjs,iradul/phantomjs,jorik041/phantomjs,joomel1/phantomjs,vietch2612/phantomjs,dparshin/phantomjs,jillesme/phantomjs,peakji/phantomjs,attilahorvath/phantomjs,AladdinSonni/phantomjs,jefleponot/phantomjs,aljscott/phantomjs,wuxianghou/phantomjs,jkburges/phantomjs,christoph-buente/phantomjs,cesarmarinhorj/phantomjs,liorvh/phantomjs,lseyesl/phantomjs,pigshell/nhnick,iradul/phantomjs-clone,JamesMGreene/phantomjs,Medium/phantomjs-1,vietch2612/phantomjs,bjko/phantomjs,matepeter90/phantomjs,bjko/phantomjs,Vitallium/phantomjs,shinate/phantomjs,jillesme/phantomjs,wuxianghou/phantomjs,AladdinSonni/phantomjs,smasala/phantomjs,linjeffrey/phantomjs,pcarrier-packaging/deb-phantomjs,christoph-buente/phantomjs,ariya/phantomjs,vegetableman/phantomjs,mattvick/phantomjs,NickelMedia/phantomjs,apanda/phantomjs-intercept,ye11ow/phantomjs,danigonza/phantomjs,smasala/phantomjs,Lochlan/phantomjs,viewdy/phantomjs2,ixiom/phantomjs,eugene1g/phantomjs,forzi/phantomjs_stradivari_fork,Observer-Wu/phantomjs,chylli/phantomjs,chirilo/phantomjs,cloudflare/phantomjs,Tomtomgo/phantomjs,Observer-Wu/phantomjs,gskachkov/phantomjs,mattvick/phantomjs,wxkdesky/phantomjs,ixiom/phantomjs,xsyntrex/phantomjs,MaDKaTZe/phantomjs,shinate/phantomjs,tmuelle2/phantomjs,ezoic/phantomjs,Medium/phantomjs-1,fxtentacle/phantomjs,r3b/phantomjs,vietch2612/phantomjs,bkrukowski/phantomjs,bettiolo/phantomjs,avinashkunuje/phantomjs,RobertoMalatesta/phantomjs,markhu/phantomjs,linjeffrey/phantomjs,angelman/phantomjs,JamesMGreene/phantomjs,grevutiu-gabriel/phantomjs,ixiom/phantomjs,thomasrogers03/phantomjs,ye11ow/phantomjs,iver333/phantomjs,jkenn99/phantomjs,gitromand/phantomjs,Observer-Wu/phantomjs,eceglov/phantomjs,jkburges/phantomjs,thomasrogers03/phantomjs,zhengyongbo/phantomjs,dongritengfei/phantomjs,gitromand/phantomjs,viewdy/phantomjs2,Observer-Wu/phantomjs,jefleponot/phantomjs,MeteorAdminz/phantomjs,jjyycchh/phantomjs,grevutiu-gabriel/phantomjs,Lochlan/phantomjs,brandingbrand/phantomjs,pbrazdil/phantomjs,christoph-buente/phantomjs,MaDKaTZe/phantomjs | python/pyphantomjs/cookiejar.py | python/pyphantomjs/cookiejar.py | '''
This file is part of the PyPhantomJS project.
Copyright (C) 2011 James Roe <[email protected]>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
from PyQt4.QtCore import QSettings
from PyQt4.QtNetwork import QNetworkCookie, QNetworkCookieJar
class CookieJar(QNetworkCookieJar):
def __init__(self, parent, cookiesFile):
super(CookieJar, self).__init__(parent)
self.m_cookiesFile = cookiesFile
def setCookiesFromUrl(self, cookieList, url):
settings = QSettings(self.m_cookiesFile, QSettings.IniFormat)
settings.beginGroup(url.host())
for cookie in cookieList:
settings.setValue(str(cookie.name()), str(cookie.value()))
settings.sync()
return True
def cookiesForUrl(self, url):
settings = QSettings(self.m_cookiesFile, QSettings.IniFormat)
cookieList = []
settings.beginGroup(url.host())
for cname in settings.childKeys():
cookieList.append(QNetworkCookie(cname, settings.value(cname)))
return cookieList
| '''
This file is part of the PyPhantomJS project.
Copyright (C) 2011 James Roe <[email protected]>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
from PyQt4.QtCore import QSettings
from PyQt4.QtNetwork import QNetworkCookie, QNetworkCookieJar
class CookieJar(QNetworkCookieJar):
def __init__(self, parent, cookiesFile):
super(CookieJar, self).__init__(parent)
self.m_cookiesFile = cookiesFile
def setCookiesFromUrl(self, cookieList, url):
settings = QSettings(self.m_cookiesFile, QSettings.IniFormat)
settings.beginGroup(url.host())
for cookie in cookieList:
settings.setValue(str(cookie.name()), cookie.value())
settings.sync()
return True
def cookiesForUrl(self, url):
settings = QSettings(self.m_cookiesFile, QSettings.IniFormat)
cookieList = []
settings.beginGroup(url.host())
for cname in settings.childKeys():
cookieList.append(QNetworkCookie(cname, settings.value(cname)))
return cookieList
| bsd-3-clause | Python |
1c8bd21fe895260254684d3e2b2f9f5b70fdb91f | Fix error msg | ExaScience/smurff,ExaScience/smurff,ExaScience/smurff,ExaScience/smurff,ExaScience/smurff,ExaScience/smurff | python/smurff/smurff/prepare.py | python/smurff/smurff/prepare.py | import numpy as np
import scipy as sp
import pandas as pd
import scipy.sparse
import numbers
from .helper import SparseTensor
def make_train_test(Y, ntest):
"""Splits a sparse matrix Y into a train and a test matrix.
Y scipy sparse matrix (coo_matrix, csr_matrix or csc_matrix)
ntest either a float below 1.0 or integer.
if float, then indicates the ratio of test cells
if integer, then indicates the number of test cells
returns Ytrain, Ytest (type coo_matrix)
"""
if type(Y) not in [sp.sparse.coo.coo_matrix, sp.sparse.csr.csr_matrix, sp.sparse.csc.csc_matrix]:
raise TypeError("Unsupported Y type: " + str(type(Y)))
if not isinstance(ntest, numbers.Real) or ntest < 0:
raise TypeError("ntest has to be a non-negative number (number or ratio of test samples).")
Y = Y.tocoo(copy = False)
if ntest < 1:
ntest = Y.nnz * ntest
ntest = int(round(ntest))
rperm = np.random.permutation(Y.nnz)
train = rperm[ntest:]
test = rperm[0:ntest]
Ytrain = sp.sparse.coo_matrix( (Y.data[train], (Y.row[train], Y.col[train])), shape=Y.shape )
Ytest = sp.sparse.coo_matrix( (Y.data[test], (Y.row[test], Y.col[test])), shape=Y.shape )
return Ytrain, Ytest
def make_train_test_df(Y, ntest, shape = None):
"""Splits rows of dataframe Y into a train and a test dataframe.
Y pandas dataframe
ntest either a float below 1.0 or integer.
if float, then indicates the ratio of test cells
if integer, then indicates the number of test cells
returns Ytrain, Ytest (type coo_matrix)
"""
if type(Y) != pd.core.frame.DataFrame:
raise TypeError("Y should be DataFrame.")
if not isinstance(ntest, numbers.Real) or ntest < 0:
raise TypeError("ntest has to be a non-negative number (number or ratio of test samples).")
## randomly spliting train-test
if ntest < 1:
ntest = Y.shape[0] * ntest
ntest = int(round(ntest))
rperm = np.random.permutation(Y.shape[0])
train = rperm[ntest:]
test = rperm[0:ntest]
Ytrain = SparseTensor(Y.iloc[train], shape)
Ytest = SparseTensor(Y.iloc[test], Ytrain.shape)
return Ytrain, Ytest
| import numpy as np
import scipy as sp
import pandas as pd
import scipy.sparse
import numbers
from .helper import SparseTensor
def make_train_test(Y, ntest):
"""Splits a sparse matrix Y into a train and a test matrix.
Y scipy sparse matrix (coo_matrix, csr_matrix or csc_matrix)
ntest either a float below 1.0 or integer.
if float, then indicates the ratio of test cells
if integer, then indicates the number of test cells
returns Ytrain, Ytest (type coo_matrix)
"""
if type(Y) not in [sp.sparse.coo.coo_matrix, sp.sparse.csr.csr_matrix, sp.sparse.csc.csc_matrix]:
raise TypeError("Unsupported Y type: %s" + type(Y))
if not isinstance(ntest, numbers.Real) or ntest < 0:
raise TypeError("ntest has to be a non-negative number (number or ratio of test samples).")
Y = Y.tocoo(copy = False)
if ntest < 1:
ntest = Y.nnz * ntest
ntest = int(round(ntest))
rperm = np.random.permutation(Y.nnz)
train = rperm[ntest:]
test = rperm[0:ntest]
Ytrain = sp.sparse.coo_matrix( (Y.data[train], (Y.row[train], Y.col[train])), shape=Y.shape )
Ytest = sp.sparse.coo_matrix( (Y.data[test], (Y.row[test], Y.col[test])), shape=Y.shape )
return Ytrain, Ytest
def make_train_test_df(Y, ntest, shape = None):
"""Splits rows of dataframe Y into a train and a test dataframe.
Y pandas dataframe
ntest either a float below 1.0 or integer.
if float, then indicates the ratio of test cells
if integer, then indicates the number of test cells
returns Ytrain, Ytest (type coo_matrix)
"""
if type(Y) != pd.core.frame.DataFrame:
raise TypeError("Y should be DataFrame.")
if not isinstance(ntest, numbers.Real) or ntest < 0:
raise TypeError("ntest has to be a non-negative number (number or ratio of test samples).")
## randomly spliting train-test
if ntest < 1:
ntest = Y.shape[0] * ntest
ntest = int(round(ntest))
rperm = np.random.permutation(Y.shape[0])
train = rperm[ntest:]
test = rperm[0:ntest]
Ytrain = SparseTensor(Y.iloc[train], shape)
Ytest = SparseTensor(Y.iloc[test], Ytrain.shape)
return Ytrain, Ytest
| mit | Python |
980b3eded1e06c8f152b873531273c1b0154a755 | Update Visualization-commandCenter.py | danjhenry/opencv-pythonbot | dataCenter/Visualization-commandCenter.py | dataCenter/Visualization-commandCenter.py | import matplotlib.pyplot as plt; plt.rcdefaults()
import numpy as np
import matplotlib.pyplot as plt
import pickle
with open('firefox-bot/config/iframe.txt', 'r') as loginInfo:
newName = loginInfo.readline()
newName = newName.rstrip()
def load_obj(name):
with open(name + '.pkl', 'rb') as f:
return pickle.load(f)
stats = load_obj('firefox-bot/statistics/' + newName)
print(stats)
d = stats['draws']
comItems = ('skill', 'super')
y_pos = np.arange(len(comItems))
width=(1/5)
for index, item in enumerate(comItems):
plt.bar(index, stats[item], width, label=item + ' ' + str(round((stats[item]/d)*100, 3)) + '%')
#' frequency: 1 / ' + str(round(spins/stats[item])))
if(stats[item]):
print(item, '1 out of ', round(d/stats[item]), ' draws')
plt.legend(loc='best')
plt.xticks(y_pos, comItems)
plt.ylabel('total collected')
plt.xlabel('items')
plt.title('totalDraws: ' + str(int(d)))
plt.show()
| import matplotlib.pyplot as plt; plt.rcdefaults()
import numpy as np
import matplotlib.pyplot as plt
import pickle
with open('firefox-bot/config/iframe.txt', 'r') as loginInfo:
newName = loginInfo.readline()
newName = newName.rstrip()
def load_obj(name):
with open(name + '.pkl', 'rb') as f:
return pickle.load(f)
stats = load_obj('firefox-bot/statistics/' + newName')
print(stats)
d = stats['draws']
comItems = ('skill', 'super')
y_pos = np.arange(len(comItems))
width=(1/5)
for index, item in enumerate(comItems):
plt.bar(index, stats[item], width, label=item + ' ' + str(round((stats[item]/d)*100, 3)) + '%')
#' frequency: 1 / ' + str(round(spins/stats[item])))
if(stats[item]):
print(item, '1 out of ', round(d/stats[item]), ' draws')
plt.legend(loc='best')
plt.xticks(y_pos, comItems)
plt.ylabel('total collected')
plt.xlabel('items')
plt.title('totalDraws: ' + str(int(d)))
plt.show()
| mit | Python |
18be6e0d3ee656f150e54bc0abe3959d92e2b35c | add message for script completion to dashboard | architecture-building-systems/CityEnergyAnalyst,architecture-building-systems/CityEnergyAnalyst,architecture-building-systems/CityEnergyAnalyst | cea/api.py | cea/api.py | """
Provide access to the scripts exported by the City Energy Analyst.
"""
from __future__ import print_function
import datetime
def register_scripts():
import cea.config
import cea.scripts
import importlib
config = cea.config.Configuration()
def script_wrapper(cea_script):
module_path = cea_script.module
script_module = importlib.import_module(module_path)
def script_runner(config=config, **kwargs):
option_list = cea_script.parameters
config.restrict_to(option_list)
for section, parameter in config.matching_parameters(option_list):
parameter_py_name = parameter.name.replace('-', '_')
if parameter_py_name in kwargs:
parameter.set(kwargs[parameter_py_name])
# run the script
cea_script.print_script_configuration(config)
t0 = datetime.datetime.now()
script_module.main(config)
# print success message
msg = "Script completed. Execution time: %.2fs" % (datetime.datetime.now() - t0).total_seconds()
print("")
print("-" * len(msg))
print(msg)
if script_module.__doc__:
script_runner.__doc__ = script_module.__doc__.strip()
else:
script_runner.__doc__ = 'FIXME: Add API documentation to {}'.format(module_path)
return script_runner
for cea_script in sorted(cea.scripts.list_scripts()):
script_py_name = cea_script.name.replace('-', '_')
globals()[script_py_name] = script_wrapper(cea_script)
register_scripts()
if __name__ == '__main__':
print(demand.__doc__) | """
Provide access to the scripts exported by the City Energy Analyst.
"""
from __future__ import print_function
def register_scripts():
import cea.config
import cea.scripts
import importlib
config = cea.config.Configuration()
def script_wrapper(cea_script):
module_path = cea_script.module
script_module = importlib.import_module(module_path)
def script_runner(config=config, **kwargs):
option_list = cea_script.parameters
config.restrict_to(option_list)
for section, parameter in config.matching_parameters(option_list):
parameter_py_name = parameter.name.replace('-', '_')
if parameter_py_name in kwargs:
parameter.set(kwargs[parameter_py_name])
# run the script
cea_script.print_script_configuration(config)
script_module.main(config)
if script_module.__doc__:
script_runner.__doc__ = script_module.__doc__.strip()
else:
script_runner.__doc__ = 'FIXME: Add API documentation to {}'.format(module_path)
return script_runner
for cea_script in sorted(cea.scripts.list_scripts()):
script_py_name = cea_script.name.replace('-', '_')
globals()[script_py_name] = script_wrapper(cea_script)
register_scripts()
if __name__ == '__main__':
print(demand.__doc__) | mit | Python |
ef5c049a4c32e69c9ce88c958ae8272bdfddeba4 | Add area info in check price result | skooby2011/crawler_lianjia | check_price.py | check_price.py | # -*- coding:utf-8 -*-
import pymysql
import pymysql.cursors
from prettytable import PrettyTable
from colorama import init, Fore
import pdb
database_name = "house_price_04"
# 打开数据库连接
db=pymysql.connect("localhost","root","aB123456",database_name,charset='utf8mb4')
# 使用cursor()方法获取操作游标
cursor=db.cursor()
#输入要查询的小区名称
data=[]
def main():
global check_name
check_name= input("请输入小区名称:");
#用于存储查询到包含关键字的小区信息
header = '地区 id 小区名称 价格 在售'.split()
pt = PrettyTable()
pt._set_field_names(header)
#获取所有table
tables=show_tables()
for table in tables:
select_info(table)
for row in data:
# row_list=list(row)
new_row=[
Fore.GREEN + row[0] + Fore.RESET,
row[1],
Fore.GREEN + row[2] + Fore.RESET,
Fore.RED + str(row[3]) + Fore.RESET,
row[4],
]
pt.add_row(new_row)
print(pt)
def show_tables():
sql="show tables;"
try:
cursor.execute(sql)
tables=cursor.fetchall()
except:
print ("Error: unable to fetch table data")
return tables
def select_info(table):
sql = "SELECT * FROM %s;" % table
try:
# 执行SQL语句
cursor.execute(sql)
# 获取所有记录列表
results = cursor.fetchall()
for row in results:
name=row[1]
if(check_name in name):
area= table[0]
rowList= list(row)
rowList.insert(0,area)
data.append(rowList)
except:
print ("Error: unable to 小区 data")
if __name__ == '__main__':
main() | # -*- coding:utf-8 -*-
import pymysql
import pymysql.cursors
from prettytable import PrettyTable
from colorama import init, Fore
database_name = "house_price_04"
# 打开数据库连接
db=pymysql.connect("localhost","root","aB123456",database_name,charset='utf8mb4')
# 使用cursor()方法获取操作游标
cursor=db.cursor()
#输入要查询的小区名称
check_name= input("请输入小区名称:");
#用于存储查询到包含关键字的小区信息
data=[]
def main():
header = 'id 小区名称 价格 在售'.split()
pt = PrettyTable()
pt._set_field_names(header)
#获取所有table
tables=show_tables()
for table in tables:
select_info(table)
for row in data:
row_list=list(row)
new_row=[
row[0],
Fore.GREEN + row[1] + Fore.RESET,
Fore.RED + str(row[2]) + Fore.RESET,
row[3],
]
pt.add_row(new_row)
print(pt)
def show_tables():
sql="show tables;"
try:
cursor.execute(sql)
tables=cursor.fetchall()
except:
print ("Error: unable to fetch data")
return tables
def select_info(table):
sql = "SELECT * FROM %s;" % table
try:
# 执行SQL语句
cursor.execute(sql)
# 获取所有记录列表
results = cursor.fetchall()
for row in results:
name=row[1]
if(check_name in name):
data.append(row)
except:
print ("Error: unable to fetch data")
if __name__ == '__main__':
main() | apache-2.0 | Python |
94dfdbeae55d4c47c7b1161c68795429ebc0687a | fix pprintInterface for unit with array intf | Nic30/HWToolkit | hwt/simulator/utils.py | hwt/simulator/utils.py | from random import Random
import sys
from hwt.serializer.serializerClases.indent import getIndent
from hwt.synthesizer.interfaceLevel.interfaceUtils.proxy import InterfaceProxy
from hwt.synthesizer.interfaceLevel.mainBases import InterfaceBase
def valueHasChanged(valA, valB):
return valA.val is not valB.val or valA.vldMask != valB.vldMask
def agent_randomize(agent, timeQuantum, seed):
random = Random(seed)
def randomEnProc(simulator):
# small space at start to modify agents when they are inactive
yield simulator.wait(timeQuantum / 4)
while True:
agent.enable = random.random() < 0.5
delay = int(random.random() * timeQuantum)
yield simulator.wait(delay)
return randomEnProc
def pprintInterface(intf, prefix="", indent=0, file=sys.stdout):
"""
Pretty print interface
"""
try:
s = intf._sig
except AttributeError:
s = ""
if s is not "":
s = " " + repr(s)
file.write("".join([getIndent(indent), prefix, repr(intf._getFullName()), s]))
file.write("\n")
for i in intf._interfaces:
if isinstance(intf, InterfaceProxy):
assert isinstance(i, InterfaceProxy), (intf, i)
pprintInterface(i, indent=indent + 1, file=file)
if intf._arrayElemCache:
assert len(intf) == len(intf._arrayElemCache)
for i, p in enumerate(intf):
pprintInterface(p, prefix="p%d:" % i, indent=indent + 1, file=file)
def pprintAgents(unitOrIntf, indent=0, prefix="", file=sys.stdout):
if isinstance(unitOrIntf, InterfaceBase):
ag = unitOrIntf._ag
arrayElemCache = unitOrIntf._arrayElemCache
else:
ag = None
arrayElemCache = None
if ag is not None:
file.write("%s%s%r\n" % (getIndent(indent), prefix, ag))
elif arrayElemCache:
file.write("%s%s\n" % (getIndent(indent), prefix + unitOrIntf._name + ":"))
for i in unitOrIntf._interfaces:
pprintAgents(i, indent + 1, file=file)
if arrayElemCache:
assert len(unitOrIntf) == len(arrayElemCache)
for i, p in enumerate(unitOrIntf):
pprintAgents(p, indent + 1, prefix="p%d:" % i, file=file)
| from random import Random
import sys
from hwt.serializer.serializerClases.indent import getIndent
from hwt.synthesizer.interfaceLevel.interfaceUtils.proxy import InterfaceProxy
def valueHasChanged(valA, valB):
return valA.val is not valB.val or valA.vldMask != valB.vldMask
def agent_randomize(agent, timeQuantum, seed):
random = Random(seed)
def randomEnProc(simulator):
# small space at start to modify agents when they are inactive
yield simulator.wait(timeQuantum / 4)
while True:
agent.enable = random.random() < 0.5
delay = int(random.random() * timeQuantum)
yield simulator.wait(delay)
return randomEnProc
def pprintInterface(intf, prefix="", indent=0, file=sys.stdout):
"""
Pretty print interface
"""
try:
s = intf._sig
except AttributeError:
s = ""
if s is not "":
s = repr(s)
file.write("".join([getIndent(indent), prefix, repr(intf._getFullName()), " ", s]))
file.write("\n")
for i in intf._interfaces:
if isinstance(intf, InterfaceProxy):
assert isinstance(i, InterfaceProxy), (intf, i)
pprintInterface(i, indent=indent + 1, file=file)
if intf._arrayElemCache:
assert len(intf) == len(intf._arrayElemCache)
for i, p in enumerate(intf):
pprintInterface(p, prefix="p%d:" % i, indent=indent + 1, file=file)
def _pprintAgents(intf, indent, prefix="", file=sys.stdout):
if intf._ag is not None:
file.write("%s%s%r\n" % (getIndent(indent), prefix, intf._ag))
for i in intf._interfaces:
_pprintAgents(i, indent + 1, file=file)
if intf._arrayElemCache:
assert len(intf) == len(intf._arrayElemCache)
for i, p in enumerate(intf):
_pprintAgents(p, indent + 1, prefix="p%d:" % i, file=file)
def pprintAgents(unitOrIntf, indent=0, file=sys.stdout):
"""
Pretty print agents
"""
prefix = unitOrIntf._name + ":"
for intf in unitOrIntf._interfaces:
_pprintAgents(intf, indent, prefix, file=file)
| mit | Python |
222e2bf4728440fdff2675756b4aa08aba4585fb | Update __init__.py | delitamakanda/socialite,delitamakanda/socialite,delitamakanda/socialite | app/__init__.py | app/__init__.py | from flask import Flask, render_template
from flask.ext.mail import Mail
from flask.ext.login import LoginManager
from flask.ext.moment import Moment
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.pagedown import PageDown
from flask.ext.flatpages import FlatPages
from config import config
from .util import assets
mail = Mail()
moment = Moment()
pagedown = PageDown()
pages = FlatPages()
db = SQLAlchemy()
login_manager = LoginManager()
login_manager.session_protection = 'strong'
login_manager.login_view = 'auth.login'
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(config[config_name])
config[config_name].init_app(app)
if not app.debug and not app.testing and not app.config['SSL_DISABLE']:
from flask.ext.sslify import SSLify
sslify = SSLify(app)
mail.init_app(app)
moment.init_app(app)
pagedown.init_app(app)
pages.init_app(app)
db.init_app(app)
login_manager.init_app(app)
from main import main as main_blueprint
from .auth import auth as auth_blueprint
from .api_1_0 import api as api_1_0_blueprint
app.register_blueprint(auth_blueprint, url_prefix='/auth')
app.register_blueprint(main_blueprint)
app.register_blueprint(api_1_0_blueprint, url_prefix='/api/v1.0')
return app
| from flask import Flask, render_template
from flask.ext.mail import Mail
from flask.ext.login import LoginManager
from flask.ext.moment import Moment
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.pagedown import PageDown
from flask.ext.flatpages import FlatPages
from config import config
mail = Mail()
moment = Moment()
pagedown = PageDown()
pages = FlatPages()
db = SQLAlchemy()
login_manager = LoginManager()
login_manager.session_protection = 'strong'
login_manager.login_view = 'auth.login'
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(config[config_name])
config[config_name].init_app(app)
if not app.debug and not app.testing and not app.config['SSL_DISABLE']:
from flask.ext.sslify import SSLify
sslify = SSLify(app)
mail.init_app(app)
moment.init_app(app)
pagedown.init_app(app)
pages.init_app(app)
db.init_app(app)
login_manager.init_app(app)
from main import main as main_blueprint
from .auth import auth as auth_blueprint
from .api_1_0 import api as api_1_0_blueprint
app.register_blueprint(auth_blueprint, url_prefix='/auth')
app.register_blueprint(main_blueprint)
app.register_blueprint(api_1_0_blueprint, url_prefix='/api/v1.0')
return app
| mit | Python |
690696493f110899282ad22f9b02d3d0fd91fe31 | Rewrite wirecloud.catalogue.admin module | rockneurotiko/wirecloud,jpajuelo/wirecloud,jpajuelo/wirecloud,rockneurotiko/wirecloud,rockneurotiko/wirecloud,jpajuelo/wirecloud,rockneurotiko/wirecloud,jpajuelo/wirecloud | src/wirecloud/catalogue/admin.py | src/wirecloud/catalogue/admin.py | # -*- coding: utf-8 -*-
# Copyright (c) 2013 CoNWeT Lab., Universidad Politécnica de Madrid
# This file is part of Wirecloud.
# Wirecloud is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Wirecloud is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with Wirecloud. If not, see <http://www.gnu.org/licenses/>.
from django.contrib import admin
from wirecloud.catalogue.models import CatalogueResource
class CatalogueResourceAdmin(admin.ModelAdmin):
search_fields = ('vendor', 'short_name', 'version', 'author')
list_display = ('vendor', 'short_name', 'version', 'resource_type')
verbose_name_plural = 'Resources'
admin.site.register(CatalogueResource, CatalogueResourceAdmin)
| # -*- coding: utf-8 -*-
#...............................licence...........................................
#
# (C) Copyright 2008 Telefonica Investigacion y Desarrollo
# S.A.Unipersonal (Telefonica I+D)
#
# This file is part of Morfeo EzWeb Platform.
#
# Morfeo EzWeb Platform is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Morfeo EzWeb Platform is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Morfeo EzWeb Platform. If not, see <http://www.gnu.org/licenses/>.
#
# Info about members and contributors of the MORFEO project
# is available at
#
# http://morfeo-project.org
#
#...............................licence...........................................#
#
from django.contrib import admin
from wirecloud.catalogue.models import CatalogueResource, WidgetWiring
from wirecloud.catalogue.models import UserTag, UserVote, Tag, Category
class CategoyAdminView(admin.ModelAdmin):
filter_horizontal = ('tags',)
verbose_name_plural = 'Categories'
class CatalogueResourceAdmin(admin.ModelAdmin):
search_fields = ['short_name', 'vendor', 'author']
list_display = ['short_name', 'vendor', 'author', 'resource_type']
verbose_name_plural = 'Resources'
admin.site.register(CatalogueResource, CatalogueResourceAdmin)
admin.site.register(WidgetWiring)
admin.site.register(UserTag)
admin.site.register(UserVote)
admin.site.register(Tag)
admin.site.register(Category, CategoyAdminView)
| agpl-3.0 | Python |
bc467365ebd287d96109ea0771403a10d3f56580 | set upload limit | mainulhossain/phenoproc,mainulhossain/phenoproc,mainulhossain/phenoproc,mainulhossain/phenoproc,mainulhossain/phenoproc | app/__init__.py | app/__init__.py | from flask import Flask
from flask_bootstrap import Bootstrap
from flask_mail import Mail
from flask_moment import Moment
from flask_sqlalchemy import SQLAlchemy
from flask_login import LoginManager
from flask_pagedown import PageDown
from config import config
import os
import flask_sijax
bootstrap = Bootstrap()
mail = Mail()
moment = Moment()
db = SQLAlchemy()
pagedown = PageDown()
login_manager = LoginManager()
login_manager.session_protection = 'strong'
login_manager.login_view = 'auth.login'
def create_app(config_name):
app = Flask(__name__)
app.config['MAX_CONTENT_LENGTH'] = 16 * 1024 * 1024
app.config.from_object(config[config_name])
config[config_name].init_app(app)
bootstrap.init_app(app)
mail.init_app(app)
moment.init_app(app)
db.init_app(app)
login_manager.init_app(app)
pagedown.init_app(app)
if not app.debug and not app.testing and not app.config['SSL_DISABLE']:
from flask_sslify import SSLify
sslify = SSLify(app)
from .main import main as main_blueprint
app.register_blueprint(main_blueprint)
from .auth import auth as auth_blueprint
app.register_blueprint(auth_blueprint, url_prefix='/auth')
from .api_1_0 import api as api_1_0_blueprint
app.register_blueprint(api_1_0_blueprint, url_prefix='/api/v1.0')
app.config['SIJAX_STATIC_PATH'] = os.path.join('.', os.path.dirname(__file__), 'static/js/sijax/')
app.config['SIJAX_JSON_URI'] = '/static/js/sijax/json2.js'
flask_sijax.Sijax(app)
return app
| from flask import Flask
from flask_bootstrap import Bootstrap
from flask_mail import Mail
from flask_moment import Moment
from flask_sqlalchemy import SQLAlchemy
from flask_login import LoginManager
from flask_pagedown import PageDown
from config import config
import os
import flask_sijax
bootstrap = Bootstrap()
mail = Mail()
moment = Moment()
db = SQLAlchemy()
pagedown = PageDown()
login_manager = LoginManager()
login_manager.session_protection = 'strong'
login_manager.login_view = 'auth.login'
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(config[config_name])
config[config_name].init_app(app)
bootstrap.init_app(app)
mail.init_app(app)
moment.init_app(app)
db.init_app(app)
login_manager.init_app(app)
pagedown.init_app(app)
if not app.debug and not app.testing and not app.config['SSL_DISABLE']:
from flask_sslify import SSLify
sslify = SSLify(app)
from .main import main as main_blueprint
app.register_blueprint(main_blueprint)
from .auth import auth as auth_blueprint
app.register_blueprint(auth_blueprint, url_prefix='/auth')
from .api_1_0 import api as api_1_0_blueprint
app.register_blueprint(api_1_0_blueprint, url_prefix='/api/v1.0')
app.config['SIJAX_STATIC_PATH'] = os.path.join('.', os.path.dirname(__file__), 'static/js/sijax/')
app.config['SIJAX_JSON_URI'] = '/static/js/sijax/json2.js'
flask_sijax.Sijax(app)
return app
| mit | Python |
a2e5e2d5b75acafe5b1de0b92a9206a6a2ec4d25 | Fix py36 unit tests | openstack/blazar,openstack/blazar,ChameleonCloud/blazar,ChameleonCloud/blazar,stackforge/blazar,stackforge/blazar | blazar/tests/api/test_root.py | blazar/tests/api/test_root.py | # Copyright (c) 2014 Bull.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from blazar.tests import api
class TestRoot(api.APITest):
def setUp(self):
super(TestRoot, self).setUp()
self.versions = {
"versions":
[{"status": "CURRENT",
"id": "v2.0",
"links": [{"href": "http://localhost/v2", "rel": "self"}]}]}
def test_version_discovery_root(self):
response = self.get_json('/',
expect_errors=True,
path_prefix='')
self.assertEqual(300, response.status_int)
self.assertEqual("application/json", response.content_type)
self.assertEqual(self.versions, response.json)
def test_version_discovery_versions(self):
response = self.get_json('/versions',
expect_errors=True,
path_prefix='')
self.assertEqual(300, response.status_int)
self.assertEqual("application/json", response.content_type)
self.assertEqual(self.versions, response.json)
def test_bad_uri(self):
response = self.get_json('/bad/path',
expect_errors=True,
path_prefix='')
self.assertEqual(response.status_int, 404)
self.assertEqual(response.content_type, "text/plain")
| # Copyright (c) 2014 Bull.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_serialization import jsonutils
from blazar.tests import api
class TestRoot(api.APITest):
def setUp(self):
super(TestRoot, self).setUp()
self.versions = jsonutils.dump_as_bytes(
{"versions":
[{"status": "CURRENT",
"id": "v2.0",
"links": [{"href": "http://localhost/v2", "rel": "self"}]}]})
def test_version_discovery_root(self):
response = self.get_json('/',
expect_errors=True,
path_prefix='')
self.assertEqual(300, response.status_int)
self.assertEqual("application/json", response.content_type)
self.assertEqual(self.versions, response.body)
def test_version_discovery_versions(self):
response = self.get_json('/versions',
expect_errors=True,
path_prefix='')
self.assertEqual(300, response.status_int)
self.assertEqual("application/json", response.content_type)
self.assertEqual(self.versions, response.body)
def test_bad_uri(self):
response = self.get_json('/bad/path',
expect_errors=True,
path_prefix='')
self.assertEqual(response.status_int, 404)
self.assertEqual(response.content_type, "text/plain")
| apache-2.0 | Python |
1bde8a92f47d49c6bea286a66fe89a3ccaca80a0 | Fix for .env being loaded for manage.py commands | ukgovdatascience/classifyintentsapp,ukgovdatascience/classifyintentsapp | app/__init__.py | app/__init__.py | from flask import Flask
from flask_bootstrap import Bootstrap
from flask_moment import Moment
from flask_sqlalchemy import SQLAlchemy
from flask_login import LoginManager
from flask_pagedown import PageDown
bootstrap = Bootstrap()
moment = Moment()
db = SQLAlchemy()
pagedown = PageDown()
login_manager = LoginManager()
login_manager.session_protection = 'strong'
login_manager.login_view = 'auth.login'
def create_app(config_name):
app = Flask(__name__)
# import config here rather than at module level to ensure that .env values
# are loaded into the environment first when running manage.py
from config import config
app.config.from_object(config[config_name])
config[config_name].init_app(app)
bootstrap.init_app(app)
# Set jquery version
from flask_bootstrap import WebCDN
app.extensions['bootstrap']['cdns']['jquery'] = WebCDN(
'//cdnjs.cloudflare.com/ajax/libs/jquery/3.2.1/'
)
moment.init_app(app)
db.init_app(app)
login_manager.init_app(app)
pagedown.init_app(app)
if not app.debug and not app.testing and not app.config['SSL_DISABLE']:
from flask_sslify import SSLify
sslify = SSLify(app)
from .main import main as main_blueprint
app.register_blueprint(main_blueprint)
from .auth import auth as auth_blueprint
app.register_blueprint(auth_blueprint, url_prefix='/auth')
# Tell browser not to cache any HTML responses, as most pages have
# sensitive information in them. (But CSS should be cached as normal.)
@app.after_request
def apply_caching(response):
if response.headers.get('Content-Type', '').startswith('text/html'):
response.headers['Cache-control'] = 'no-store'
response.headers['Pragma'] = 'no-cache'
return response
return app
| from flask import Flask
from flask_bootstrap import Bootstrap
from flask_moment import Moment
from flask_sqlalchemy import SQLAlchemy
from flask_login import LoginManager
from flask_pagedown import PageDown
from config import config
bootstrap = Bootstrap()
moment = Moment()
db = SQLAlchemy()
pagedown = PageDown()
login_manager = LoginManager()
login_manager.session_protection = 'strong'
login_manager.login_view = 'auth.login'
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(config[config_name])
config[config_name].init_app(app)
bootstrap.init_app(app)
# Set jquery version
from flask_bootstrap import WebCDN
app.extensions['bootstrap']['cdns']['jquery'] = WebCDN(
'//cdnjs.cloudflare.com/ajax/libs/jquery/3.2.1/'
)
moment.init_app(app)
db.init_app(app)
login_manager.init_app(app)
pagedown.init_app(app)
if not app.debug and not app.testing and not app.config['SSL_DISABLE']:
from flask_sslify import SSLify
sslify = SSLify(app)
from .main import main as main_blueprint
app.register_blueprint(main_blueprint)
from .auth import auth as auth_blueprint
app.register_blueprint(auth_blueprint, url_prefix='/auth')
# Tell browser not to cache any HTML responses, as most pages have
# sensitive information in them. (But CSS should be cached as normal.)
@app.after_request
def apply_caching(response):
if response.headers.get('Content-Type', '').startswith('text/html'):
response.headers['Cache-control'] = 'no-store'
response.headers['Pragma'] = 'no-cache'
return response
return app
| mit | Python |
fb236951e1658beb32bd6dc45cf8d49a4636162a | Add tests for repr on tables | dwillmer/blaze,jdmcbr/blaze,LiaoPan/blaze,scls19fr/blaze,mrocklin/blaze,LiaoPan/blaze,jcrist/blaze,ChinaQuants/blaze,maxalbert/blaze,alexmojaki/blaze,nkhuyu/blaze,nkhuyu/blaze,cpcloud/blaze,caseyclements/blaze,ContinuumIO/blaze,cowlicks/blaze,cpcloud/blaze,xlhtc007/blaze,xlhtc007/blaze,alexmojaki/blaze,mrocklin/blaze,caseyclements/blaze,maxalbert/blaze,cowlicks/blaze,scls19fr/blaze,dwillmer/blaze,jcrist/blaze,jdmcbr/blaze,ContinuumIO/blaze,ChinaQuants/blaze | blaze/api/tests/test_table.py | blaze/api/tests/test_table.py | from blaze.api.table import Table, compute, table_repr
from blaze.data.python import Python
from blaze.compute.core import compute
from blaze.compute.python import compute
from datashape import dshape
import pandas as pd
data = (('Alice', 100),
('Bob', 200))
t = Table(data, columns=['name', 'amount'])
def test_resources():
assert t.resources() == {t: t.data}
def test_compute():
assert compute(t) == data
def test_compute():
assert list(compute(t['amount'] + 1)) == [101, 201]
def test_create_with_schema():
t = Table(data, schema='{name: string, amount: float32}')
assert t.schema == dshape('{name: string, amount: float32}')
def test_create_with_raw_data():
t = Table(data, columns=['name', 'amount'])
assert t.schema == dshape('{name: string, amount: int64}')
assert t.name
assert t.data == data
def test_create_with_data_descriptor():
schema='{name: string, amount: int64}'
ddesc = Python(data, schema=schema)
t = Table(ddesc)
assert t.schema == dshape(schema)
assert t.name
assert t.data == ddesc
def test_repr():
result = table_repr(t['name'])
print(result)
assert isinstance(result, str)
assert 'Alice' in result
assert 'Bob' in result
assert '...' not in result
result = table_repr(t['amount'] + 1)
print(result)
assert '101' in result
t2 = Table(tuple((i, i**2) for i in range(100)), columns=['x', 'y'])
result = table_repr(t2)
print(result)
assert len(result.split('\n')) < 20
assert '...' in result
def test_mutable_backed_repr():
mutable_data = [range(2)]
mutable_backed_table = Table(mutable_data, columns=["mutable"])
repr(mutable_backed_table)
def test_dataframe_backed_repr():
mutable_data = range(2)
df = pd.DataFrame(data=mutable_data, columns=["mutable"])
dataframe_backed_table = Table(df)
repr(dataframe_backed_table)
| from blaze.api.table import Table, compute, table_repr
from blaze.data.python import Python
from blaze.compute.core import compute
from blaze.compute.python import compute
from datashape import dshape
data = (('Alice', 100),
('Bob', 200))
t = Table(data, columns=['name', 'amount'])
def test_resources():
assert t.resources() == {t: t.data}
def test_compute():
assert compute(t) == data
def test_compute():
assert list(compute(t['amount'] + 1)) == [101, 201]
def test_create_with_schema():
t = Table(data, schema='{name: string, amount: float32}')
assert t.schema == dshape('{name: string, amount: float32}')
def test_create_with_raw_data():
t = Table(data, columns=['name', 'amount'])
assert t.schema == dshape('{name: string, amount: int64}')
assert t.name
assert t.data == data
def test_create_with_data_descriptor():
schema='{name: string, amount: int64}'
ddesc = Python(data, schema=schema)
t = Table(ddesc)
assert t.schema == dshape(schema)
assert t.name
assert t.data == ddesc
def test_repr():
result = table_repr(t['name'])
print(result)
assert isinstance(result, str)
assert 'Alice' in result
assert 'Bob' in result
assert '...' not in result
result = table_repr(t['amount'] + 1)
print(result)
assert '101' in result
t2 = Table(tuple((i, i**2) for i in range(100)), columns=['x', 'y'])
result = table_repr(t2)
print(result)
assert len(result.split('\n')) < 20
assert '...' in result
| bsd-3-clause | Python |
a085573261c0ed69b6bcabc40c4914a1623dc757 | Add link to FB | ItsCalebJones/SpaceLaunchNow-Server,ItsCalebJones/SpaceLaunchNow-Server,ItsCalebJones/SpaceLaunchNow-Server | bot/app/buffer.py | bot/app/buffer.py | from buffpy import API
from buffpy.managers.profiles import Profiles
from spacelaunchnow import config
hashtags = '''\n
.
.
.⠀⠀
.⠀⠀
.⠀⠀
#SpaceLaunchNow #space #spacex #nasa #rocket #mars #aerospace #earth #solarsystem #iss #elonmusk
#moonlanding #spaceshuttle #spacewalk #esa #science #picoftheday #blueorigin #Florida #Falcon9
#falconheavy #starship #ULA'''
class BufferAPI:
def __init__(self, debug=None):
if debug is None:
self.DEBUG = config.DEBUG
else:
self.DEBUG = debug
self.api = API(client_id=config.BUFFER_CLIENT_ID,
client_secret=config.BUFFER_SECRET_ID,
access_token=config.BUFFER_ACCESS_TOKEN)
def send_to_all(self, message: str = None, image: str = None, link: str = None, now: bool = False):
profiles = Profiles(api=self.api).all()
for profile in profiles:
_message = message
if profile['service'] == 'instagram' and image is None:
continue
if profile['service'] == 'twitter':
if len(_message) > 280:
_message = (_message[:277] + '...')
profile.updates.new(text=_message, photo=image, link=link, now=now)
def send_to_instagram(self, message: str = None, image: str = None, now: bool = False):
profile = Profiles(api=self.api).filter(service='instagram')[0]
return profile.updates.new(text=message, photo=image, now=now)
def send_to_facebook(self, message: str = None, image: str = None, link: str = None, now: bool = False):
profile = Profiles(api=self.api).filter(service='facebook')[0]
if link:
message = message + "\n" + link
return profile.updates.new(text=message, photo=image, now=now)
def send_to_twitter(self, message: str = None, image: str = None, link: str = None, now: bool = False):
if len(message) > 280:
message = (message[:277] + '...')
profile = Profiles(api=self.api).filter(service='twitter')[0]
return profile.updates.new(text=message, photo=image, link=link, now=now)
| from buffpy import API
from buffpy.managers.profiles import Profiles
from spacelaunchnow import config
hashtags = '''\n
.
.
.⠀⠀
.⠀⠀
.⠀⠀
#SpaceLaunchNow #space #spacex #nasa #rocket #mars #aerospace #earth #solarsystem #iss #elonmusk
#moonlanding #spaceshuttle #spacewalk #esa #science #picoftheday #blueorigin #Florida #Falcon9
#falconheavy #starship #ULA'''
class BufferAPI:
def __init__(self, debug=None):
if debug is None:
self.DEBUG = config.DEBUG
else:
self.DEBUG = debug
self.api = API(client_id=config.BUFFER_CLIENT_ID,
client_secret=config.BUFFER_SECRET_ID,
access_token=config.BUFFER_ACCESS_TOKEN)
def send_to_all(self, message: str = None, image: str = None, link: str = None, now: bool = False):
profiles = Profiles(api=self.api).all()
for profile in profiles:
_message = message
if profile['service'] == 'instagram' and image is None:
continue
if profile['service'] == 'twitter':
if len(_message) > 280:
_message = (_message[:277] + '...')
profile.updates.new(text=_message, photo=image, link=link, now=now)
def send_to_instagram(self, message: str = None, image: str = None, now: bool = False):
profile = Profiles(api=self.api).filter(service='instagram')[0]
return profile.updates.new(text=message, photo=image, now=now)
def send_to_facebook(self, message: str = None, image: str = None, link: str = None, now: bool = False):
profile = Profiles(api=self.api).filter(service='facebook')[0]
return profile.updates.new(text=message, photo=image, now=now)
def send_to_twitter(self, message: str = None, image: str = None, link: str = None, now: bool = False):
if len(message) > 280:
message = (message[:277] + '...')
profile = Profiles(api=self.api).filter(service='twitter')[0]
return profile.updates.new(text=message, photo=image, link=link, now=now)
| apache-2.0 | Python |
d89252a2bbbe0677d2ad184f4c519e2b4d6ee9bd | Add JSON to data. | ItsCalebJones/SpaceLaunchNow-Server,ItsCalebJones/SpaceLaunchNow-Server,ItsCalebJones/SpaceLaunchNow-Server | bot/serializer.py | bot/serializer.py | from bot.models import Launch, Notification, DailyDigestRecord
from rest_framework import serializers
class NotificationSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Notification
fields = (
'launch', 'url', 'wasNotifiedTwentyFourHour', 'wasNotifiedOneHour', 'wasNotifiedTenMinutes',
'wasNotifiedDailyDigest', 'last_twitter_post', 'last_net_stamp',
'last_net_stamp_timestamp'
)
extra_kwargs = {
'id': {'read_only': False},
'slug': {'validators': []},
}
class DailyDigestRecordSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = DailyDigestRecord
fields = '__all__'
class LaunchSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Launch
fields = (
'id', 'name', 'url', 'status', 'netstamp', 'wsstamp', 'westamp', 'location_name', 'rocket_name',
'mission_name'
)
def create(self, validated_data):
launch = Launch.objects.get_or_create(**validated_data)
try:
if Notification.objects.get(launch=launch[0]) is None:
Notification.objects.get_or_create(launch=launch[0])
except:
Notification.objects.get_or_create(launch=launch[0])
return launch
def update(self, instance, validated_data):
instance.id = validated_data.get('id', instance.id)
instance.name = validated_data.get('name', instance.name)
instance.status = validated_data.get('status', instance.status)
instance.netstamp = validated_data.get('netstamp', instance.netstamp)
instance.wsstamp = validated_data.get('wsstamp', instance.wsstamp)
instance.westamp = validated_data.get('westamp', instance.westamp)
instance.location_name = validated_data.get('location_name', instance.location_name)
instance.rocket_name = validated_data.get('rocket_name', instance.rocket_name)
instance.mission_name = validated_data.get('mission_name', instance.mission_name)
instance.save()
return instance
def get_object(self):
return self.model(self.validated_data)
| from bot.models import Launch, Notification, DailyDigestRecord
from rest_framework import serializers
class NotificationSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Notification
fields = (
'launch', 'url', 'wasNotifiedTwentyFourHour', 'wasNotifiedOneHour', 'wasNotifiedTenMinutes',
'wasNotifiedDailyDigest', 'last_twitter_post', 'last_net_stamp',
'last_net_stamp_timestamp'
)
extra_kwargs = {
'id': {'read_only': False},
'slug': {'validators': []},
}
class DailyDigestRecordSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = DailyDigestRecord
fields = (
'url', 'timestamp', 'messages', 'count', 'data'
)
class LaunchSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Launch
fields = (
'id', 'name', 'url', 'status', 'netstamp', 'wsstamp', 'westamp', 'location_name', 'rocket_name',
'mission_name'
)
def create(self, validated_data):
launch = Launch.objects.get_or_create(**validated_data)
try:
if Notification.objects.get(launch=launch[0]) is None:
Notification.objects.get_or_create(launch=launch[0])
except:
Notification.objects.get_or_create(launch=launch[0])
return launch
def update(self, instance, validated_data):
instance.id = validated_data.get('id', instance.id)
instance.name = validated_data.get('name', instance.name)
instance.status = validated_data.get('status', instance.status)
instance.netstamp = validated_data.get('netstamp', instance.netstamp)
instance.wsstamp = validated_data.get('wsstamp', instance.wsstamp)
instance.westamp = validated_data.get('westamp', instance.westamp)
instance.location_name = validated_data.get('location_name', instance.location_name)
instance.rocket_name = validated_data.get('rocket_name', instance.rocket_name)
instance.mission_name = validated_data.get('mission_name', instance.mission_name)
instance.save()
return instance
def get_object(self):
return self.model(self.validated_data)
| apache-2.0 | Python |
43a53981c3da2db8a4d06c883cd72442b72eb4be | Update spec_driven_model/tests/fake_mixin.py | OCA/l10n-brazil,OCA/l10n-brazil,akretion/l10n-brazil,akretion/l10n-brazil,OCA/l10n-brazil,akretion/l10n-brazil | spec_driven_model/tests/fake_mixin.py | spec_driven_model/tests/fake_mixin.py | # Copyright 2021 Akretion - Raphael Valyi <[email protected]>
# License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl-3.0.en.html).
from odoo import fields, models
class PoXsdMixin(models.AbstractModel):
_description = "Abstract Model for PO XSD"
_name = "spec.mixin.poxsd"
_field_prefix = "poxsd10_"
_schema_name = "poxsd"
_schema_version = "1.0"
_odoo_module = "poxsd"
_spec_module = "odoo.addons.spec_driven_model.tests.spec_poxsd"
_binding_module = "odoo.addons.spec_driven_model.tests.purchase_order_lib"
# TODO rename
brl_currency_id = fields.Many2one(
comodel_name="res.currency",
string="Moeda",
compute="_compute_brl_currency_id",
default=lambda self: self.env.ref("base.EUR").id,
)
def _compute_brl_currency_id(self):
for item in self:
item.brl_currency_id = self.env.ref("base.EUR").id
def _valid_field_parameter(self, field, name):
if name in ("xsd_type", "xsd_required", "choice"):
return True
else:
return super()._valid_field_parameter(field, name)
| # Copyright 2021 Akretion - Raphael Valyi <[email protected]>
# License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl-3.0.en.html).
from odoo import fields, models
class PoXsdMixin(models.AbstractModel):
_description = "Abstract Model for PO XSD"
_name = "spec.mixin.poxsd"
_field_prefix = "poxsd10_"
_schema_name = "poxsd"
_schema_version = "1.0"
_odoo_module = "poxsd"
_spec_module = "odoo.addons.spec_driven_model.tests.spec_poxsd"
_binding_module = "odoo.addons.spec_driven_model.tests.purchase_order_lib"
# TODO rename
brl_currency_id = fields.Many2one(
comodel_name="res.currency",
string="Moeda",
compute="_compute_brl_currency_id",
default=lambda self: self.env.ref("base.EUR").id,
)
def _compute_brl_currency_id(self):
for item in self:
item.brl_currency_id = self.env.ref("base.EUR").id
| agpl-3.0 | Python |
f07a05f6a6edd0ef481dd9a24c1556b345fe7686 | Remove attempt to import module that no longer exists | IATI/iati.core,IATI/iati.core | iati/tests/conftest.py | iati/tests/conftest.py | """Configuration to exist in the global scope for pytest."""
import collections
import pytest
import iati.default
import iati.resources
import iati.tests.utilities
import iati
pytest_plugins = [ # name required by pytest # pylint: disable=invalid-name
'iati.tests.fixtures.comparison',
'iati.tests.fixtures.versions'
]
def _check_latest_version_mark(item):
"""Check that functions marked as supporting the latest version of the IATI Standard have been updated."""
latest_version_marker = item.get_marker('latest_version')
if latest_version_marker is not None:
latest_version = iati.Version(latest_version_marker.args[0])
assert latest_version == iati.version.STANDARD_VERSION_LATEST
def pytest_runtest_call(item):
"""Run operations that are called when tests are run."""
_check_latest_version_mark(item)
@pytest.fixture(params=[
('2.02', 62), # There are 38 embedded codelists at v2.02, plus 24 non-embedded codelists (which are valid for any version)
('2.01', 61), # There are 37 embedded codelists at v2.01, plus 24 non-embedded codelists (which are valid for any version)
('1.05', 59), # There are 35 embedded codelists at v1.05, plus 24 non-embedded codelists (which are valid for any version)
('1.04', 59) # There are 35 embedded codelists at v1.04, plus 24 non-embedded codelists (which are valid for any version)
])
def codelist_lengths_by_version(request): # latest_version fixture used to perform checks when adding new versions # pylint: disable=unused-argument
"""Return a tuple containing versions of the Standard, and the number of Codelists for that version.
Format: `(version=[standardVersion], expected_length=[numCodelists])`
"""
request.applymarker(pytest.mark.latest_version('2.02'))
output = collections.namedtuple('output', 'version expected_length')
return output(version=request.param[0], expected_length=request.param[1])
@pytest.fixture
def schema_ruleset():
"""Return a schema with the Standard Ruleset added.
Returns:
A valid Activity Schema with the Standard Ruleset added.
Todo:
Stop this being fixed to 2.02.
"""
schema = iati.default.activity_schema('2.02', False)
ruleset = iati.default.ruleset('2.02')
schema.rulesets.add(ruleset)
return schema
| """Configuration to exist in the global scope for pytest."""
import collections
import pytest
import iati.default
import iati.resources
import iati.tests.utilities
import iati
pytest_plugins = [ # name required by pytest # pylint: disable=invalid-name
'iati.tests.fixtures.comparison',
'iati.tests.fixtures.utility',
'iati.tests.fixtures.versions'
]
def _check_latest_version_mark(item):
"""Check that functions marked as supporting the latest version of the IATI Standard have been updated."""
latest_version_marker = item.get_marker('latest_version')
if latest_version_marker is not None:
latest_version = iati.Version(latest_version_marker.args[0])
assert latest_version == iati.version.STANDARD_VERSION_LATEST
def pytest_runtest_call(item):
"""Run operations that are called when tests are run."""
_check_latest_version_mark(item)
@pytest.fixture(params=[
('2.02', 62), # There are 38 embedded codelists at v2.02, plus 24 non-embedded codelists (which are valid for any version)
('2.01', 61), # There are 37 embedded codelists at v2.01, plus 24 non-embedded codelists (which are valid for any version)
('1.05', 59), # There are 35 embedded codelists at v1.05, plus 24 non-embedded codelists (which are valid for any version)
('1.04', 59) # There are 35 embedded codelists at v1.04, plus 24 non-embedded codelists (which are valid for any version)
])
def codelist_lengths_by_version(request): # latest_version fixture used to perform checks when adding new versions # pylint: disable=unused-argument
"""Return a tuple containing versions of the Standard, and the number of Codelists for that version.
Format: `(version=[standardVersion], expected_length=[numCodelists])`
"""
request.applymarker(pytest.mark.latest_version('2.02'))
output = collections.namedtuple('output', 'version expected_length')
return output(version=request.param[0], expected_length=request.param[1])
@pytest.fixture
def schema_ruleset():
"""Return a schema with the Standard Ruleset added.
Returns:
A valid Activity Schema with the Standard Ruleset added.
Todo:
Stop this being fixed to 2.02.
"""
schema = iati.default.activity_schema('2.02', False)
ruleset = iati.default.ruleset('2.02')
schema.rulesets.add(ruleset)
return schema
| mit | Python |
0c160c8e787a9019571f358b70633efa13cad466 | Support for inbox.util.eas in the /inbox-eas repo; this is where EAS-specific util code would live. | gale320/sync-engine,Eagles2F/sync-engine,jobscore/sync-engine,wakermahmud/sync-engine,EthanBlackburn/sync-engine,jobscore/sync-engine,Eagles2F/sync-engine,PriviPK/privipk-sync-engine,wakermahmud/sync-engine,Eagles2F/sync-engine,closeio/nylas,Eagles2F/sync-engine,wakermahmud/sync-engine,ErinCall/sync-engine,rmasters/inbox,gale320/sync-engine,closeio/nylas,Eagles2F/sync-engine,ErinCall/sync-engine,PriviPK/privipk-sync-engine,wakermahmud/sync-engine,gale320/sync-engine,closeio/nylas,ErinCall/sync-engine,closeio/nylas,jobscore/sync-engine,EthanBlackburn/sync-engine,PriviPK/privipk-sync-engine,ErinCall/sync-engine,EthanBlackburn/sync-engine,nylas/sync-engine,nylas/sync-engine,nylas/sync-engine,EthanBlackburn/sync-engine,PriviPK/privipk-sync-engine,nylas/sync-engine,gale320/sync-engine,ErinCall/sync-engine,rmasters/inbox,jobscore/sync-engine,rmasters/inbox,gale320/sync-engine,EthanBlackburn/sync-engine,rmasters/inbox,wakermahmud/sync-engine,PriviPK/privipk-sync-engine | inbox/util/__init__.py | inbox/util/__init__.py | """ Non-server-specific utility modules. These shouldn't depend on any code
from the inbox module tree!
Don't add new code here! Find the relevant submodule, or use misc.py if
there's really no other place.
"""
# Allow out-of-tree submodules.
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
| """ Non-server-specific utility modules. These shouldn't depend on any code
from the inbox module tree!
Don't add new code here! Find the relevant submodule, or use misc.py if
there's really no other place.
"""
| agpl-3.0 | Python |
933a082a76c6c9b72aaf275f45f0d155f66eeacf | Fix Python 3.3 calling another virtualenv as a subprocess. | waylonflinn/asv,airspeed-velocity/asv,edisongustavo/asv,giltis/asv,giltis/asv,qwhelan/asv,edisongustavo/asv,pv/asv,edisongustavo/asv,waylonflinn/asv,mdboom/asv,qwhelan/asv,cpcloud/asv,spacetelescope/asv,qwhelan/asv,pv/asv,giltis/asv,ericdill/asv,cpcloud/asv,mdboom/asv,mdboom/asv,ericdill/asv,cpcloud/asv,pv/asv,airspeed-velocity/asv,spacetelescope/asv,airspeed-velocity/asv,airspeed-velocity/asv,spacetelescope/asv,mdboom/asv,qwhelan/asv,spacetelescope/asv,ericdill/asv,waylonflinn/asv,pv/asv,ericdill/asv | asv/__init__.py | asv/__init__.py | # -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import sys
if sys.version_info >= (3, 3):
# OS X framework builds of Python 3.3 can not call other 3.3
# virtualenvs as a subprocess because `__PYENV_LAUNCHER__` is
# inherited.
if os.environ.get('__PYVENV_LAUNCHER__'):
os.unsetenv('__PYVENV_LAUNCHER__')
| # -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import (absolute_import, division, print_function,
unicode_literals)
| bsd-3-clause | Python |
91ef2866d14348971326df39d7868ad5c424b64c | remove the 10 article limit that was used for testing | osma/annif,osma/annif,osma/annif | autoindex_sk.py | autoindex_sk.py | #!/usr/bin/env python3
import sys
import csv
from bs4 import BeautifulSoup
import autoindex
from rdflib import Graph, URIRef, Literal
from rdflib.namespace import DC, DCTERMS, SKOS, XSD
def autoindex_doc(text, url, title, date, author, place):
g = Graph()
uri = URIRef(url)
g.add((uri, DCTERMS.title, Literal(title, 'fi')))
g.add((uri, DCTERMS.issued, Literal(date, datatype=XSD.date)))
if author:
g.add((uri, DCTERMS.creator, Literal(author, 'fi')))
if place:
g.add((uri, DCTERMS.spatial, Literal(place, 'fi')))
results = autoindex.autoindex(text, 'yso-finna-fi', threshold=0.85, maxhits=3)
for result in results:
g.add((uri, DCTERMS.subject, URIRef(result['uri'])))
g.add((URIRef(result['uri']), SKOS.prefLabel, Literal(result['label'], 'fi')))
return g
def html_to_text(html):
soup = BeautifulSoup(html, 'lxml')
return soup.get_text()
reader = csv.reader(open(sys.argv[1], 'r'), delimiter='|')
for row in reader:
id = row[0]
title = html_to_text(row[1])
date = row[2].strip()
author = row[3].strip()
place = row[4].strip()
text = title + " " + html_to_text(row[6])
url = "http://sk.example.com/%s" % id
g = autoindex_doc(text, url, title, date, author, place)
g.serialize(destination=sys.stdout.buffer, format='nt')
| #!/usr/bin/env python3
import sys
import csv
from bs4 import BeautifulSoup
import autoindex
from rdflib import Graph, URIRef, Literal
from rdflib.namespace import DC, DCTERMS, SKOS, XSD
def autoindex_doc(text, url, title, date, author, place):
g = Graph()
uri = URIRef(url)
g.add((uri, DCTERMS.title, Literal(title, 'fi')))
g.add((uri, DCTERMS.issued, Literal(date, datatype=XSD.date)))
if author:
g.add((uri, DCTERMS.creator, Literal(author, 'fi')))
if place:
g.add((uri, DCTERMS.spatial, Literal(place, 'fi')))
results = autoindex.autoindex(text, 'yso-finna-fi', threshold=0.85, maxhits=3)
for result in results:
g.add((uri, DCTERMS.subject, URIRef(result['uri'])))
g.add((URIRef(result['uri']), SKOS.prefLabel, Literal(result['label'], 'fi')))
return g
def html_to_text(html):
soup = BeautifulSoup(html, 'lxml')
return soup.get_text()
reader = csv.reader(open(sys.argv[1], 'r'), delimiter='|')
n = 0
for row in reader:
id = row[0]
title = html_to_text(row[1])
date = row[2].strip()
author = row[3].strip()
place = row[4].strip()
text = title + " " + html_to_text(row[6])
url = "http://sk.example.com/%s" % id
g = autoindex_doc(text, url, title, date, author, place)
g.serialize(destination=sys.stdout.buffer, format='nt')
n += 1
if n == 10:
break
| cc0-1.0 | Python |
5d21942823ea21a3c2eb38e43b4b8b4fa2ec2ac1 | Allow mayday.us for CORS | Rio517/pledgeservice,MayOneUS/pledgeservice,MayOneUS/pledgeservice,Rio517/pledgeservice,Rio517/pledgeservice | backend/util.py | backend/util.py | """General utilities."""
import urlparse
import logging
def ConstantTimeIsEqual(a, b):
"""Securely compare two strings without leaking timing information."""
if len(a) != len(b):
return False
acc = 0
for x, y in zip(a, b):
acc |= ord(x) ^ ord(y)
return acc == 0
# TODO(hjfreyer): Pull into some kind of middleware?
def EnableCors(handler):
"""Inside a request, set the headers to allow being called cross-domain."""
if 'Origin' in handler.request.headers:
origin = handler.request.headers['Origin']
_, netloc, _, _, _, _ = urlparse.urlparse(origin)
if not (netloc == 'mayone.us' or netloc.endswith('.mayone.us') or
netloc == 'mayday.us' or netloc.endswith('.mayday.us')):
logging.warning('Invalid origin: ' + origin)
handler.error(403)
return
handler.response.headers.add_header('Access-Control-Allow-Origin', origin)
handler.response.headers.add_header('Access-Control-Allow-Methods',
'GET, POST')
handler.response.headers.add_header('Access-Control-Allow-Headers',
'content-type, origin')
| """General utilities."""
import urlparse
import logging
def ConstantTimeIsEqual(a, b):
"""Securely compare two strings without leaking timing information."""
if len(a) != len(b):
return False
acc = 0
for x, y in zip(a, b):
acc |= ord(x) ^ ord(y)
return acc == 0
# TODO(hjfreyer): Pull into some kind of middleware?
def EnableCors(handler):
"""Inside a request, set the headers to allow being called cross-domain."""
if 'Origin' in handler.request.headers:
origin = handler.request.headers['Origin']
_, netloc, _, _, _, _ = urlparse.urlparse(origin)
if not (netloc == 'mayone.us' or netloc.endswith('.mayone.us')):
logging.warning('Invalid origin: ' + origin)
handler.error(403)
return
handler.response.headers.add_header('Access-Control-Allow-Origin', origin)
handler.response.headers.add_header('Access-Control-Allow-Methods',
'GET, POST')
handler.response.headers.add_header('Access-Control-Allow-Headers',
'content-type, origin')
| apache-2.0 | Python |
52873e4238a54cb93f403d509d2bebef8971ec9b | Work around deprecation warning with new cssutils versions. | 0x1997/webassets,rs/webassets,scorphus/webassets,wijerasa/webassets,heynemann/webassets,aconrad/webassets,scorphus/webassets,florianjacob/webassets,aconrad/webassets,0x1997/webassets,glorpen/webassets,aconrad/webassets,heynemann/webassets,wijerasa/webassets,glorpen/webassets,JDeuce/webassets,heynemann/webassets,florianjacob/webassets,JDeuce/webassets,glorpen/webassets,john2x/webassets,john2x/webassets | django_assets/filter/cssutils/__init__.py | django_assets/filter/cssutils/__init__.py | import logging
import logging.handlers
from django.conf import settings
from django_assets.filter import BaseFilter
__all__ = ('CSSUtilsFilter',)
class CSSUtilsFilter(BaseFilter):
"""Minifies CSS by removing whitespace, comments etc., using the Python
`cssutils <http://cthedot.de/cssutils/>`_ library.
Note that since this works as a parser on the syntax level, so invalid
CSS input could potentially result in data loss.
"""
name = 'cssutils'
def setup(self):
import cssutils
self.cssutils = cssutils
try:
# cssutils logs to stdout by default, hide that in production
if not settings.DEBUG:
log = logging.getLogger('assets.cssutils')
log.addHandler(logging.handlers.MemoryHandler(10))
# Newer versions of cssutils print a deprecation warning
# for 'setlog'.
if hasattr(cssutils.log, 'setLog'):
func = cssutils.log.setLog
else:
func = cssutils.log.setlog
func(log)
except ImportError:
# During doc generation, Django is not going to be setup and will
# fail when the settings object is accessed. That's ok though.
pass
def apply(self, _in, out):
sheet = self.cssutils.parseString(_in.read())
self.cssutils.ser.prefs.useMinified()
out.write(sheet.cssText) | import logging
import logging.handlers
from django.conf import settings
from django_assets.filter import BaseFilter
__all__ = ('CSSUtilsFilter',)
class CSSUtilsFilter(BaseFilter):
"""Minifies CSS by removing whitespace, comments etc., using the Python
`cssutils <http://cthedot.de/cssutils/>`_ library.
Note that since this works as a parser on the syntax level, so invalid
CSS input could potentially result in data loss.
"""
name = 'cssutils'
def setup(self):
import cssutils
self.cssutils = cssutils
try:
# cssutils logs to stdout by default, hide that in production
if not settings.DEBUG:
log = logging.getLogger('assets.cssutils')
log.addHandler(logging.handlers.MemoryHandler(10))
cssutils.log.setlog(log)
except ImportError:
# During doc generation, Django is not going to be setup and will
# fail when the settings object is accessed. That's ok though.
pass
def apply(self, _in, out):
sheet = self.cssutils.parseString(_in.read())
self.cssutils.ser.prefs.useMinified()
out.write(sheet.cssText) | bsd-2-clause | Python |
52d804aac69bceb9dee9c1b21044551b80bcdfdc | Fix handling default for `--output` option in `people_search` cmd. | nihn/linkedin-scraper,nihn/linkedin-scraper | linkedin_scraper/commands/people_search.py | linkedin_scraper/commands/people_search.py | from getpass import getpass
from scrapy.commands.crawl import Command as BaseCommand
def sanitize_query(query):
return query.replace(' ', '+')
class Command(BaseCommand):
def short_desc(self):
return "Scrap people from LinkedIn"
def syntax(self):
return "[options] <query>"
def add_options(self, parser):
super().add_options(parser)
parser.add_option('-u', '--username', help='Name of LinkedIn account')
parser.add_option('-p', '--password',
help='Password for LinkedIn account')
def process_options(self, args, opts):
opts.output = opts.output or 'results.csv'
super().process_options(args, opts)
people_search_options = {
'query': sanitize_query(args[0]),
'username': opts.username or input(
'Please provide your LinkedIn username: '),
'password': opts.password or getpass(
'Please provide password for your LinkedIn account: ')
}
opts.spargs.update(people_search_options)
def run(self, args, opts):
# Run people_search spider
args = ['people_search']
super().run(args, opts)
| from getpass import getpass
from scrapy.commands.crawl import Command as BaseCommand
def sanitize_query(query):
return query.replace(' ', '+')
class Command(BaseCommand):
def short_desc(self):
return "Scrap people from LinkedIn"
def syntax(self):
return "[options] <query>"
def add_options(self, parser):
super().add_options(parser)
parser.add_option('-u', '--username', help='Name of LinkedIn account')
parser.add_option('-p', '--password',
help='Password for LinkedIn account')
def process_options(self, args, opts):
super().process_options(args, opts)
opts.output = opts.output or 'results.csv'
people_search_options = {
'query': sanitize_query(args[0]),
'username': opts.username or input(
'Please provide your LinkedIn username: '),
'password': opts.password or getpass(
'Please provide password for your LinkedIn account: ')
}
opts.spargs.update(people_search_options)
def run(self, args, opts):
# Run people_search spider
args = ['people_search']
super().run(args, opts)
| mit | Python |
637b3c36e9a5952fc29ceaa705703e94f9f172d3 | Update app_settings.py | kartoza/django-wms-client,kartoza/django-wms-client,kartoza/django-wms-client,kartoza/django-wms-client | django_project/wms_client/app_settings.py | django_project/wms_client/app_settings.py | # coding=utf-8
"""Settings file for WMS Client.
"""
from django.conf import settings
# Allow base django project to override settings
default_leaflet_tiles = (
'OpenStreetMap',
'http://{s}.tile.openstreetmap.fr/hot/{z}/{x}/{y}.png',
('© <a href="http://www.openstreetmap.org" target="_parent">OpenStreetMap'
'</a> and contributors, under an <a '
'href="http://www.openstreetmap.org/copyright" target="_parent">open '
'license</a>')
)
LEAFLET_TILES = getattr(settings, 'LEAFLET_TILES', default_leaflet_tiles)
settings.TEMPLATE_CONTEXT_PROCESSORS += (
'django.core.context_processors.media',
)
| # coding=utf-8
"""Settings file for WMS Client.
"""
from django.conf import settings
# Allow base django project to override settings
default_leaflet_tiles = (
'OpenStreetMap',
'http://{s}.tile.openstreetmap.fr/hot/{z}/{x}/{y}.png',
('© <a hr ef="http://www.openstreetmap.org" target="_parent">OpenStreetMap'
'</a> and contributors, under an <a '
'href="http://www.openstreetmap.org/copyright" target="_parent">open '
'license</a>')
)
LEAFLET_TILES = getattr(settings, 'LEAFLET_TILES', default_leaflet_tiles)
settings.TEMPLATE_CONTEXT_PROCESSORS += (
'django.core.context_processors.media',
)
| bsd-2-clause | Python |
4071c77a6e598c27f7a8b2195ff5e68332120615 | Fix formatting. | nzlosh/st2,StackStorm/st2,Plexxi/st2,StackStorm/st2,nzlosh/st2,peak6/st2,peak6/st2,nzlosh/st2,lakshmi-kannan/st2,StackStorm/st2,Plexxi/st2,tonybaloney/st2,Plexxi/st2,tonybaloney/st2,StackStorm/st2,peak6/st2,lakshmi-kannan/st2,nzlosh/st2,tonybaloney/st2,lakshmi-kannan/st2,Plexxi/st2 | st2common/st2common/cmd/validate_config.py | st2common/st2common/cmd/validate_config.py | # Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Script for validating a config file against a a particular config schema.
"""
import yaml
from oslo_config import cfg
from st2common.constants.system import VERSION_STRING
from st2common.constants.exit_codes import SUCCESS_EXIT_CODE
from st2common.constants.exit_codes import FAILURE_EXIT_CODE
from st2common.util.pack import validate_config_against_schema
__all__ = [
'main'
]
def _do_register_cli_opts(opts, ignore_errors=False):
for opt in opts:
try:
cfg.CONF.register_cli_opt(opt)
except:
if not ignore_errors:
raise
def _register_cli_opts():
cli_opts = [
cfg.StrOpt('schema-path', default=None, required=True,
help='Path to the config schema to use for validation.'),
cfg.StrOpt('config-path', default=None, required=True,
help='Path to the config file to validate.'),
]
for opt in cli_opts:
cfg.CONF.register_cli_opt(opt)
def main():
_register_cli_opts()
cfg.CONF(args=None, version=VERSION_STRING)
schema_path = cfg.CONF.schema_path
config_path = cfg.CONF.config_path
print('Validating config "%s" against schema in "%s"' % (config_path, schema_path))
with open(schema_path, 'r') as fp:
config_schema = yaml.safe_load(fp.read())
with open(config_path, 'r') as fp:
config_object = yaml.safe_load(fp.read())
try:
validate_config_against_schema(config_schema=config_schema, config_object=config_object)
except Exception as e:
print('Failed to validate pack config: %s' % str(e))
return FAILURE_EXIT_CODE
print('Config "%s" successfuly validated against schema in %s.' % (config_path, schema_path))
return SUCCESS_EXIT_CODE
| # Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Script for validating a config file against a a particular config schema.
"""
import yaml
import traceback
from oslo_config import cfg
from st2common.constants.system import VERSION_STRING
from st2common.constants.exit_codes import SUCCESS_EXIT_CODE
from st2common.constants.exit_codes import FAILURE_EXIT_CODE
from st2common.util.pack import validate_config_against_schema
__all__ = [
'main'
]
def _do_register_cli_opts(opts, ignore_errors=False):
for opt in opts:
try:
cfg.CONF.register_cli_opt(opt)
except:
if not ignore_errors:
raise
def _register_cli_opts():
cli_opts = [
cfg.StrOpt('schema-path', default=None, required=True,
help='Path to the config schema to use for validation.'),
cfg.StrOpt('config-path', default=None, required=True,
help='Path to the config file to validate.'),
]
for opt in cli_opts:
cfg.CONF.register_cli_opt(opt)
def main():
_register_cli_opts()
cfg.CONF(args=None, version=VERSION_STRING)
schema_path = cfg.CONF.schema_path
config_path = cfg.CONF.config_path
print('Validating config "%s" against schema in "%s"' % (config_path, schema_path))
with open(schema_path, 'r') as fp:
config_schema = yaml.safe_load(fp.read())
with open(config_path, 'r') as fp:
config_object = yaml.safe_load(fp.read())
try:
validate_config_against_schema(config_schema=config_schema, config_object=config_object)
except Exception as e:
print('Failed to validate pack config: %s', str(e))
traceback.print_exc()
return FAILURE_EXIT_CODE
print('Config "%s" successfuly validated against schema in %s.' % (config_path, schema_path))
return SUCCESS_EXIT_CODE
| apache-2.0 | Python |
d35aed562b3c9eba6f7de7ac4aa7d6ad7723ec0a | Add listnener decos | BlackSynder/synbot | cogs/cancer.py | cogs/cancer.py | from discord.ext.commands import Cog
class Cancer(Cog):
def __init__(self, bot):
self.bot = bot
self.ok_list = [198101180180594688, 246291440106340352]
@Cog.listener
async def on_member_join(self, member):
if member.guild.id not in self.ok_list:
return
await member.guild.system_channel.send("yes " + member.mention)
@Cog.listener
async def on_member_remove(self, member):
if member.guild.id not in self.ok_list:
return
await member.guild.system_channel.send("no " + member.mention)
@Cog.listener
async def on_guild_emojis_update(self, guild, before, after):
if guild.id not in self.ok_list:
return
await guild.system_channel.send("the emojis were updated")
def setup(bot):
bot.add_cog(Cancer(bot))
| from discord.ext import commands
class Cancer(commands.Cog):
def __init__(self, bot):
self.bot = bot
self.ok_list = [198101180180594688, 246291440106340352]
async def on_member_join(self, member):
if member.guild.id not in self.ok_list:
return
await member.guild.system_channel.send("yes " + member.mention)
async def on_member_remove(self, member):
if member.guild.id not in self.ok_list:
return
await member.guild.system_channel.send("no " + member.mention)
async def on_guild_emojis_update(self, guild, before, after):
if guild.id not in self.ok_list:
return
await guild.system_channel.send("the emojis were updated")
def setup(bot):
bot.add_cog(Cancer(bot))
| mit | Python |
1b3c9e5f46f48865882f1087ced0ade168233711 | fix formatting and caching | Naught0/qtbot | cogs/stonks.py | cogs/stonks.py | import discord
import json
from datetime import datetime
from discord.ext import commands
from utils.aiohttp_wrap import aio_get_json
class Stonks(commands.Cog):
URL = "https://finnhub.io/api/v1/quote"
TTL = 60 * 15
def __init__(self, bot):
self.bot = bot
self.session = bot.aio_session
self.redis_client = bot.redis_client
# self.headers = {'X-Finnhub-Token': bot.api_keys["stonks"]}
with open('data/apikeys.json') as f:
self.api_key = json.load(f)["stonks"]
self.headers = {'X-Finnhub-Token': self.api_key}
@commands.command(name="stonk", aliases=["stonks", "stock", "stocks"])
async def stonks(self, ctx: commands.Context, *, symbol: str):
symbol = symbol.upper()
params = {"symbol": symbol}
redis_key = f"stonks:{symbol}"
if await self.redis_client.exists(redis_key):
resp = json.loads(await self.redis_client.get(redis_key))
else:
resp = await aio_get_json(self.session, self.URL, headers=self.headers, params=params)
if resp is None:
return await ctx.error("API Error", description="There was an issue with the stocks API, try again later")
if resp['t'] == 0:
return await ctx.error("Stock error", description=f"Couldn't find any stock information for `{symbol}`")
await self.redis_client.set(redis_key, json.dumps(resp), ex=self.TTL)
em = discord.Embed(color=discord.Color.blurple())
em.set_author(name=symbol, icon_url="https://emojipedia-us.s3.dualstack.us-west-1.amazonaws.com/thumbs/240/twitter/259/chart-increasing_1f4c8.png")
em.add_field(name="Current Price", value=f"${resp['c']:.2f}")
em.add_field(name="Previous Close", value=f"${resp['pc']:.2f}")
em.add_field(name="% Change Today", value=f"{(resp['c'] - resp['pc'])/resp['pc']:.2%}")
em.set_footer()
em.timestamp = datetime.fromtimestamp(resp['t'])
await ctx.send(embed=em)
def setup(bot):
bot.add_cog(Stonks(bot))
| import discord
import json
from datetime import datetime
from discord.ext import commands
from utils.aiohttp_wrap import aio_get_json
class Stonks(commands.Cog):
URL = "https://finnhub.io/api/v1/quote"
def __init__(self, bot):
self.bot = bot
self.session = bot.aio_session
self.redis_client = bot.redis_client
# self.headers = {'X-Finnhub-Token': bot.api_keys["stonks"]}
with open('data/apikeys.json') as f:
self.api_key = json.load(f)["stonks"]
self.headers = {'X-Finnhub-Token': self.api_key}
@commands.command(name="stonk", aliases=["stonks", "stock", "stocks"])
async def stonks(self, ctx: commands.Context, *, symbol: str):
symbol = symbol.upper()
params = {"symbol": symbol}
redis_key = f"stonks:{symbol}"
if await self.redis_client.exists(redis_key):
resp = json.loads(await self.redis_client.get(redis_key))
else:
resp = await aio_get_json(self.session, self.URL, headers=self.headers, params=params)
if resp is None:
return await ctx.error("API Error", description="There was an issue with the stocks API, try again later")
if resp['t'] == 0:
return await ctx.error("Stock error", description=f"Couldn't find any stock information for `{symbol}`")
await self.redis_client.set(redis_key, json.dumps(resp))
em = discord.Embed(color=discord.Color.blurple())
em.set_author(name=symbol, icon_url="https://emojipedia-us.s3.dualstack.us-west-1.amazonaws.com/thumbs/240/twitter/259/chart-increasing_1f4c8.png")
em.add_field(name="Current Price", value=f"${resp['c']}")
em.add_field(name="Previous Close", value=f"${resp['pc']}")
em.add_field(name="% Change today", value=f"{(resp['c'] - resp['pc'])/resp['pc']:.2%}")
em.timestamp = datetime.fromtimestamp(resp['t'])
await ctx.send(embed=em)
def setup(bot):
bot.add_cog(Stonks(bot))
| mit | Python |
50e69a0d53dffbc961b865f583ca071dfb49648c | Reformat class | berkmancenter/mediacloud,berkmancenter/mediacloud,berkmancenter/mediacloud,berkmancenter/mediacloud,berkmancenter/mediacloud | mediacloud/mediawords/util/sql.py | mediacloud/mediawords/util/sql.py | import time
import datetime
# noinspection PyPackageRequirements
import dateutil.parser
from mediawords.util.perl import decode_string_from_bytes_if_needed
def get_sql_date_from_epoch(epoch: int) -> str:
# Returns local date by default, no need to set timezone
try:
return datetime.datetime.fromtimestamp(int(epoch)).strftime('%Y-%m-%d %H:%M:%S')
except ValueError:
# Mimic perl's behavior of sending the 0 epoch date on an error
return '1970-01-01 00:00:00'
def sql_now() -> str:
return get_sql_date_from_epoch(int(time.time()))
def get_epoch_from_sql_date(date: str) -> int:
"""Given a date in the sql format 'YYYY-MM-DD', return the epoch time."""
date = decode_string_from_bytes_if_needed(date)
parsed_date = dateutil.parser.parse(date)
return int(parsed_date.timestamp())
def increment_day(date: str, days: int = 1) -> str:
"""Given a date in the sql format 'YYYY-MM-DD', increment it by $days days."""
date = decode_string_from_bytes_if_needed(date)
if days == 0:
return date
epoch_date = get_epoch_from_sql_date(date) + (((days * 24) + 12) * 60 * 60)
return datetime.datetime.fromtimestamp(int(epoch_date)).strftime('%Y-%m-%d')
| import time
import datetime
# noinspection PyPackageRequirements
import dateutil.parser
from mediawords.util.perl import decode_string_from_bytes_if_needed
def get_sql_date_from_epoch(epoch: int) -> str:
# Returns local date by default, no need to set timezone
try:
return datetime.datetime.fromtimestamp(int(epoch)).strftime('%Y-%m-%d %H:%M:%S')
except( ValueError ):
# mimic perl's behavior of sending the 0 epoch date on an error
return '1970-01-01 00:00:00'
def sql_now() -> str:
return get_sql_date_from_epoch(int(time.time()))
def get_epoch_from_sql_date(date: str) -> int:
"""Given a date in the sql format 'YYYY-MM-DD', return the epoch time."""
date = decode_string_from_bytes_if_needed(date)
parsed_date = dateutil.parser.parse(date)
return int(parsed_date.timestamp())
def increment_day(date: str, days: int = 1) -> str:
"""Given a date in the sql format 'YYYY-MM-DD', increment it by $days days."""
date = decode_string_from_bytes_if_needed(date)
if days == 0:
return date
epoch_date = get_epoch_from_sql_date(date) + (((days * 24) + 12) * 60 * 60)
return datetime.datetime.fromtimestamp(int(epoch_date)).strftime('%Y-%m-%d')
| agpl-3.0 | Python |
b87711d62a1f2c4974f945625312d8a33ba91fb6 | convert grp_members into a lambda and add usr_search lambda | alces/essentia-et-accidentia,alces/essentia-et-accidentia,alces/essentia-et-accidentia | code-samples/membersOfDomainGroup.py | code-samples/membersOfDomainGroup.py | #!/usr/bin/env python
# print a list of members of a domain group
param = {
'-f': 'mail', # field name
'-s': '\n', # separator
}
import getopt
import ldap
import re
import sys
try:
param.update(dict(getopt.getopt(sys.argv[1:], 'g:f:s:')[0]))
if '-g' not in param:
sys.stderr.write("-g parameter is required\n")
sys.exit(1)
except getopt.GetoptError:
sys.stderr.write("Usage: %s -g groupName [ -f LDAP field ] [ -s output separator ]\n" % sys.argv[0])
sys.exit(1)
ldapSrv = ldap.initialize('ldap://dc.example.com')
ldapSrv.bind_s('[email protected]', 'bindPasSw0rd')
# get output filed from ldap results
ldap_output = lambda r: r[1][param['-f']][0]
# make a flat list from a list of lists
flat = lambda lst: reduce(lambda l, e: l + flat(e) if isinstance(e, list) else l + [e], lst, [])
# search for a group by filter
grp_search = lambda fltr: ldapSrv.search_s('ou=Resources,dc=example,dc=com', ldap.SCOPE_SUBTREE, '(&(objectclass=group)(%s))' % fltr, ['dn'])
# search for users inside a given group
usr_search = lambda grpDN: ldapSrv.search_s('ou=Users,dc=example,dc=com', ldap.SCOPE_SUBTREE, '(&(objectclass=person)(memberOf=%s))' % grpDN, [param['-f']])
# get a nested list of the members of a group with a given DN
grp_members = lambda grpDN: [grp_members(grp[0]) for grp in grp_search('memberOf=%s' % grpDN)] + usr_search(grpDN)
grp = grp_search('name=%s' % param['-g'])
if not grp:
sys.stderr.write("Group '%s' isn't found in LDAP\n" % param['-g'])
sys.exit(2)
print param['-s'].join(sorted(set(ldap_output(res) for res in flat(grp_members(grp[0][0])) if res)))
| #!/usr/bin/env python
# print a list of members of a domain group
param = {
'-f': 'mail', # field name
'-s': '\n', # separator
}
import getopt
import ldap
import re
import sys
try:
param.update(dict(getopt.getopt(sys.argv[1:], 'g:f:s:')[0]))
if '-g' not in param:
sys.stderr.write("-g parameter is required\n")
sys.exit(1)
except getopt.GetoptError:
sys.stderr.write("Usage: %s -g groupName [ -f LDAP field ] [ -s output separator ]\n" % sys.argv[0])
sys.exit(1)
ldapSrv = ldap.initialize('ldap://dc.example.com')
ldapSrv.bind_s('[email protected]', 'bindPasSw0rd')
# get output filed from ldap results
ldap_output = lambda r: r[1][param['-f']][0]
# make a flat list from a list of lists
flat = lambda lst: reduce(lambda l, e: l + flat(e) if isinstance(e, list) else l + [e], lst, [])
# search for a group by filter
grp_search = lambda fltr: ldapSrv.search_s('ou=Resources,dc=example,dc=com', ldap.SCOPE_SUBTREE, '(&(objectclass=group)(%s))' % fltr, ['dn'])
# search for members in LDAP groups and return a nested list of them
def grp_members(gdn):
return [grp_members(grp[0]) for grp in grp_search('memberOf=%s' % gdn)
] + ldapSrv.search_s('ou=Users,dc=example,dc=com', ldap.SCOPE_SUBTREE, '(&(objectclass=person)(memberOf=%s))' % gdn, [param['-f']])
grp = grp_search('name=%s' % param['-g'])
if not grp:
sys.stderr.write("Group '%s' isn't found in LDAP\n" % param['-g'])
sys.exit(2)
print param['-s'].join(sorted(set(ldap_output(res) for res in flat(grp_members(grp[0][0])) if res)))
| bsd-2-clause | Python |
f47ebbe4dcacdd0ef96799a5d11925e0a8b6d5d5 | fix import path | jplusplus/statscraper | test/test_resultset.py | test/test_resultset.py | from unittest import TestCase
from statscraper import ResultSet
from pandas.api import types as ptypes
class TestResultSet(TestCase):
def test_pandas_export(self):
result = ResultSet()
result.append({'city': "Voi", 'value': 45483})
df = result.pandas
self.assertTrue(ptypes.is_numeric_dtype(df.value))
| from unittest import TestCase
from statscraper.base_scraper import ResultSet
from pandas.api import types as ptypes
class TestResultSet(TestCase):
def test_pandas_export(self):
result = ResultSet()
result.append({'city': "Voi", 'value': 45483})
df = result.pandas
self.assertTrue(ptypes.is_numeric_dtype(df.value))
| mit | Python |
cf5ad85a35824646a30d90de79d72f4068dade50 | Fix failing QML test with Qt 5.9 due to assert | qtproject/pyside-pyside,qtproject/pyside-pyside,qtproject/pyside-pyside,qtproject/pyside-pyside,qtproject/pyside-pyside | tests/QtQml/bug_557.py | tests/QtQml/bug_557.py | #############################################################################
##
## Copyright (C) 2016 The Qt Company Ltd.
## Contact: https://www.qt.io/licensing/
##
## This file is part of the test suite of PySide2.
##
## $QT_BEGIN_LICENSE:GPL-EXCEPT$
## Commercial License Usage
## Licensees holding valid commercial Qt licenses may use this file in
## accordance with the commercial license agreement provided with the
## Software or, alternatively, in accordance with the terms contained in
## a written agreement between you and The Qt Company. For licensing terms
## and conditions see https://www.qt.io/terms-conditions. For further
## information use the contact form at https://www.qt.io/contact-us.
##
## GNU General Public License Usage
## Alternatively, this file may be used under the terms of the GNU
## General Public License version 3 as published by the Free Software
## Foundation with exceptions as appearing in the file LICENSE.GPL3-EXCEPT
## included in the packaging of this file. Please review the following
## information to ensure the GNU General Public License requirements will
## be met: https://www.gnu.org/licenses/gpl-3.0.html.
##
## $QT_END_LICENSE$
##
#############################################################################
import sys
from helper import adjust_filename
from PySide2.QtCore import QUrl
from PySide2.QtGui import QGuiApplication
from PySide2.QtQml import QQmlEngine, QQmlComponent
app = QGuiApplication(sys.argv)
engine = QQmlEngine()
component = QQmlComponent(engine)
# This should segfault if the QDeclarativeComponent has not QQmlEngine
component.loadUrl(QUrl.fromLocalFile(adjust_filename('foo.qml', __file__)))
| #############################################################################
##
## Copyright (C) 2016 The Qt Company Ltd.
## Contact: https://www.qt.io/licensing/
##
## This file is part of the test suite of PySide2.
##
## $QT_BEGIN_LICENSE:GPL-EXCEPT$
## Commercial License Usage
## Licensees holding valid commercial Qt licenses may use this file in
## accordance with the commercial license agreement provided with the
## Software or, alternatively, in accordance with the terms contained in
## a written agreement between you and The Qt Company. For licensing terms
## and conditions see https://www.qt.io/terms-conditions. For further
## information use the contact form at https://www.qt.io/contact-us.
##
## GNU General Public License Usage
## Alternatively, this file may be used under the terms of the GNU
## General Public License version 3 as published by the Free Software
## Foundation with exceptions as appearing in the file LICENSE.GPL3-EXCEPT
## included in the packaging of this file. Please review the following
## information to ensure the GNU General Public License requirements will
## be met: https://www.gnu.org/licenses/gpl-3.0.html.
##
## $QT_END_LICENSE$
##
#############################################################################
import sys
from PySide2.QtCore import QUrl
from PySide2.QtGui import QGuiApplication
from PySide2.QtQml import QQmlEngine, QQmlComponent
app = QGuiApplication(sys.argv)
engine = QQmlEngine()
component = QQmlComponent(engine)
# This should segfault if the QDeclarativeComponent has not QQmlEngine
component.loadUrl(QUrl.fromLocalFile('foo.qml'))
| lgpl-2.1 | Python |
2ddfb4f0f4f2de060399a6e5b519a7f4b788ace5 | make it possible to show languages for selected values on a map | clld/autotyp,clld/autotyp | autotyp/adapters.py | autotyp/adapters.py | from sqlalchemy.orm import joinedload
from clld.interfaces import IParameter, IValue, IIndex
from clld.db.meta import DBSession
from clld.db.models.common import ValueSet
from clld.web.adapters.base import Index
from clld.web.adapters.geojson import GeoJsonParameter
from clld.web.maps import SelectedLanguagesMap
class GeoJsonFeature(GeoJsonParameter):
def feature_iterator(self, ctx, req):
return DBSession.query(ValueSet).filter(ValueSet.parameter_pk == ctx.pk)\
.options(joinedload(ValueSet.language))
def feature_properties(self, ctx, req, valueset):
return {}
class MapView(Index):
extension = str('map.html')
mimetype = str('text/vnd.clld.map+html')
send_mimetype = str('text/html')
template = 'language/map_html.mako'
def template_context(self, ctx, req):
languages = list(v.valueset.language for v in ctx.get_query(limit=8000))
return {
'map': SelectedLanguagesMap(ctx, req, languages),
'languages': languages}
def includeme(config):
config.register_adapter(GeoJsonFeature, IParameter)
config.register_adapter(MapView, IValue, IIndex)
| from sqlalchemy.orm import joinedload
from clld.interfaces import IParameter, ILanguage, IIndex
from clld.db.meta import DBSession
from clld.db.models.common import ValueSet
from clld.web.adapters.base import Index
from clld.web.adapters.geojson import GeoJsonParameter
from clld.web.maps import SelectedLanguagesMap
class GeoJsonFeature(GeoJsonParameter):
def feature_iterator(self, ctx, req):
return DBSession.query(ValueSet).filter(ValueSet.parameter_pk == ctx.pk)\
.options(joinedload(ValueSet.language))
def feature_properties(self, ctx, req, valueset):
return {}
class MapView(Index):
extension = str('map.html')
mimetype = str('text/vnd.clld.map+html')
send_mimetype = str('text/html')
template = 'language/map_html.mako'
def template_context(self, ctx, req):
languages = list(ctx.get_query(limit=8000))
return {
'map': SelectedLanguagesMap(ctx, req, languages),
'languages': languages}
def includeme(config):
config.register_adapter(GeoJsonFeature, IParameter)
config.register_adapter(MapView, ILanguage, IIndex)
| apache-2.0 | Python |
e3d082588db63690a846007beb8ddd42ebd4144e | Include pages urls into the main url patterns | FreeCodeCampRoma/precision_school-management,FreeCodeCampRoma/precision_school-management,FreeCodeCampRoma/precision_school-management,FreeCodeCampRoma/precision_school-management | config/urls.py | config/urls.py | from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from django.views.generic import TemplateView
from django.views import defaults as default_views
urlpatterns = [
# Django Admin, use {% url 'admin:index' %}
url(settings.ADMIN_URL, admin.site.urls),
# Custom urls
url(r'^', include('pages.urls', namespace='pages')),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
if settings.DEBUG:
# This allows the error pages to be debugged during development, just visit
# these url in browser to see how these error pages look like.
urlpatterns += [
url(r'^400/$', default_views.bad_request, kwargs={'exception': Exception('Bad Request!')}),
url(r'^403/$', default_views.permission_denied, kwargs={'exception': Exception('Permission Denied')}),
url(r'^404/$', default_views.page_not_found, kwargs={'exception': Exception('Page not Found')}),
url(r'^500/$', default_views.server_error),
]
if 'debug_toolbar' in settings.INSTALLED_APPS:
import debug_toolbar
urlpatterns = [
url(r'^__debug__/', include(debug_toolbar.urls)),
] + urlpatterns
| from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from django.views.generic import TemplateView
from django.views import defaults as default_views
urlpatterns = [
# Django Admin, use {% url 'admin:index' %}
url(settings.ADMIN_URL, admin.site.urls),
# To be removed
url(r'^$', TemplateView.as_view(template_name='pages/home.html'), name='home'),
url(r'^about/$', TemplateView.as_view(template_name='pages/about.html'), name='about'),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
if settings.DEBUG:
# This allows the error pages to be debugged during development, just visit
# these url in browser to see how these error pages look like.
urlpatterns += [
url(r'^400/$', default_views.bad_request, kwargs={'exception': Exception('Bad Request!')}),
url(r'^403/$', default_views.permission_denied, kwargs={'exception': Exception('Permission Denied')}),
url(r'^404/$', default_views.page_not_found, kwargs={'exception': Exception('Page not Found')}),
url(r'^500/$', default_views.server_error),
]
if 'debug_toolbar' in settings.INSTALLED_APPS:
import debug_toolbar
urlpatterns = [
url(r'^__debug__/', include(debug_toolbar.urls)),
] + urlpatterns
| mit | Python |
2f412d6d98e6b03c1e3997d6acb0d15cace12e28 | remove trailing spaces | felipecruz/coopy,felipecruz/coopy | coopy/utils.py | coopy/utils.py | def method_or_none(instance, name):
method = getattr(instance, name)
if (name[0:2] == '__' and name[-2,:] == '__') or \
not callable(method) :
return None
return method
def action_check(obj):
return (hasattr(obj, '__readonly'),
hasattr(obj, '__unlocked'),
hasattr(obj, '__abort_exception'))
def inject(obj, name, dependency):
obj.__dict__[name] = dependency
| def method_or_none(instance, name):
method = getattr(instance, name)
if (name[0:2] == '__' and name[-2,:] == '__') or \
not callable(method) :
return None
return method
def action_check(obj):
return (hasattr(obj, '__readonly'),
hasattr(obj, '__unlocked'),
hasattr(obj, '__abort_exception'))
def inject(obj, name, dependency):
obj.__dict__[name] = dependency
| bsd-3-clause | Python |
919a4f183e9a09ded7cf6272f9be300f22408c08 | fix method or none method name comparison | felipecruz/coopy,felipecruz/coopy | coopy/utils.py | coopy/utils.py | def method_or_none(instance, name):
method = getattr(instance, name)
if (name[0:2] == '__' and name[-2:] == '__') or \
not callable(method) :
return None
return method
def action_check(obj):
return (hasattr(obj, '__readonly'),
hasattr(obj, '__unlocked'),
hasattr(obj, '__abort_exception'))
def inject(obj, name, dependency):
obj.__dict__[name] = dependency
| def method_or_none(instance, name):
method = getattr(instance, name)
if (name[0:2] == '__' and name[-2,:] == '__') or \
not callable(method) :
return None
return method
def action_check(obj):
return (hasattr(obj, '__readonly'),
hasattr(obj, '__unlocked'),
hasattr(obj, '__abort_exception'))
def inject(obj, name, dependency):
obj.__dict__[name] = dependency
| bsd-3-clause | Python |
b59f21ee28cc8eaf56cbc49fd7926e243e92276f | Fix bug for users with Space inside their usernames. | luanfonceca/speakerfight,luanfonceca/speakerfight,mauricioabreu/speakerfight,mauricioabreu/speakerfight,luanfonceca/speakerfight,mauricioabreu/speakerfight | core/models.py | core/models.py | from django.core.exceptions import AppRegistryNotReady
from django.core.urlresolvers import reverse_lazy
from django.conf import settings
from django.db import models
from django.db.models.signals import post_save, pre_save
from django.utils.translation import ugettext as _
class Profile(models.Model):
about_me = models.TextField(
_('About me'), max_length=500, null=True, blank=True)
github = models.CharField(
_('Github username'), max_length=50, null=True, blank=True)
facebook = models.CharField(
_('Facebook username'), max_length=50, null=True, blank=True)
site = models.URLField(
_('Site url'), max_length=200, null=True, blank=True)
# relations
user = models.OneToOneField(to=settings.AUTH_USER_MODEL)
class Meta:
verbose_name = _('Profile')
def __unicode__(self):
return self.user.get_full_name()
def get_absolute_url(self):
return reverse_lazy(
'user_profile', kwargs={'user__username': self.user.username})
def get_github_url(self):
if self.github:
return 'http://github.com/{}'.format(self.github)
def get_facebook_url(self):
if self.facebook:
return 'http://facebook.com/{}'.format(self.facebook)
def get_site_url(self):
return self.site
def create_user_profile(sender, instance, created, **kwargs):
if created:
Profile.objects.create(user=instance)
def slugify_user_username(sender, instance, **kwargs):
instance.username = instance.username.replace(' ', '_')
try:
from django.contrib.auth import get_user_model
User = get_user_model()
except AppRegistryNotReady:
from django.contrib.auth.models import User
post_save.connect(create_user_profile, sender=User)
pre_save.connect(slugify_user_username, sender=User)
| from django.core.exceptions import AppRegistryNotReady
from django.core.urlresolvers import reverse_lazy
from django.conf import settings
from django.db import models
from django.db.models.signals import post_save
from django.utils.translation import ugettext as _
class Profile(models.Model):
about_me = models.TextField(
_('About me'), max_length=500, null=True, blank=True)
github = models.CharField(
_('Github username'), max_length=50, null=True, blank=True)
facebook = models.CharField(
_('Facebook username'), max_length=50, null=True, blank=True)
site = models.URLField(
_('Site url'), max_length=200, null=True, blank=True)
# relations
user = models.OneToOneField(to=settings.AUTH_USER_MODEL)
class Meta:
verbose_name = _('Profile')
def __unicode__(self):
return self.user.get_full_name()
def get_absolute_url(self):
return reverse_lazy(
'user_profile', kwargs={'user__username': self.user.username})
def get_github_url(self):
if self.github:
return 'http://github.com/{}'.format(self.github)
def get_facebook_url(self):
if self.facebook:
return 'http://facebook.com/{}'.format(self.facebook)
def get_site_url(self):
return self.site
def create_user_profile(sender, instance, created, **kwargs):
if created:
Profile.objects.create(user=instance)
try:
from django.contrib.auth import get_user_model
User = get_user_model()
except AppRegistryNotReady:
from django.contrib.auth.models import User
post_save.connect(create_user_profile, sender=User)
| mit | Python |
b1eb69620bbe875d117498ed95e009a019e54fab | Fix vote app URL patterns | kuboschek/jay,OpenJUB/jay,kuboschek/jay,OpenJUB/jay,OpenJUB/jay,kuboschek/jay | votes/urls.py | votes/urls.py | from django.conf.urls import include, url
from django.views.generic import TemplateView
from votes.views import VoteView, results, system_home
urlpatterns = [
url(r'^$', system_home, name="system"),
url(r'^(?P<vote_name>[\w-]+)/$', VoteView.as_view(), name="vote"),
url(r'^(?P<vote_name>[\w-]+)/results/$', results, name="results"),
]
| from django.conf.urls import include, url
from django.views.generic import TemplateView
from votes.views import VoteView, results, system_home
urlpatterns = [
url(r'^$', system_home, name="system"),
url(r'^(?P<vote_name>[\w-]+)$', VoteView.as_view(), name="vote"),
url(r'^(?P<vote_name>[\w-]+)/results$', results, name="results"),
]
| mit | Python |
1d0b114c7e918c87e14d9ea7a7c49cb9120db68b | Bump version (#128) | VirusTotal/vt-py | vt/version.py | vt/version.py | """Defines VT release version."""
__version__ = '0.17.3'
| """Defines VT release version."""
__version__ = '0.17.2'
| apache-2.0 | Python |
1034699a21dc0cf4862624d076d487deae7df9e2 | add NullHandler to avoid "no handlers could be found" error. | fonttools/fonttools,googlefonts/fonttools | Lib/fontTools/__init__.py | Lib/fontTools/__init__.py | from __future__ import print_function, division, absolute_import
from fontTools.misc.py23 import *
import logging
# add a do-nothing handler to the libary's top-level logger, to avoid
# "no handlers could be found" error if client doesn't configure logging
log = logging.getLogger(__name__)
log.addHandler(logging.NullHandler())
version = "3.0"
| from __future__ import print_function, division, absolute_import
from fontTools.misc.py23 import *
version = "3.0"
| mit | Python |
44620b2fa69500e1cada5622fa96eedd9c931006 | Add test for MessageBeep() | sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator | Lib/test/test_winsound.py | Lib/test/test_winsound.py | # Ridiculously simple test of the winsound module for Windows.
import winsound, time
for i in range(100, 2000, 100):
winsound.Beep(i, 75)
print "Hopefully you heard some sounds increasing in frequency!"
winsound.MessageBeep()
time.sleep(0.5)
winsound.MessageBeep(winsound.MB_OK)
time.sleep(0.5)
winsound.MessageBeep(winsound.MB_ICONASTERISK)
time.sleep(0.5)
winsound.MessageBeep(winsound.MB_ICONEXCLAMATION)
time.sleep(0.5)
winsound.MessageBeep(winsound.MB_ICONHAND)
time.sleep(0.5)
winsound.MessageBeep(winsound.MB_ICONQUESTION)
time.sleep(0.5)
| # Ridiculously simple test of the winsound module for Windows.
import winsound
for i in range(100, 2000, 100):
winsound.Beep(i, 75)
print "Hopefully you heard some sounds increasing in frequency!"
| mit | Python |
53a86e2318256e6edcca3d1e4ce2981a29bd8208 | Add flask-email configs | Kashomon/online-ratings,usgo/online-ratings,duckpunch/online-ratings,usgo/online-ratings,Kashomon/online-ratings,duckpunch/online-ratings,Kashomon/online-ratings,usgo/online-ratings | web/config.py | web/config.py | import os
basedir = os.path.abspath(os.path.dirname(__file__))
class BaseConfiguration(object):
DEBUG = False
TESTING = False
ADMINS = frozenset(['[email protected]'])
SECRET_KEY = 'SecretKeyForSessionSigning'
THREADS_PER_PAGE = 8
DATABASE = 'app.db'
DATABASE_PATH = os.path.join(basedir, DATABASE)
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + DATABASE_PATH
SECURITY_PASSWORD_HASH = 'sha512_crypt'
SECURITY_PASSWORD_SALT = 'SuPeRsEcReTsAlT'
SECURITY_POST_LOGIN_VIEW = '/ViewProfile'
SECURITY_CHANGEABLE = True
SECURITY_REGISTERABLE = True
SECURITY_TRACKABLE = True
SECURITY_SEND_REGISTER_EMAIL = False
SECURITY_SEND_PASSWORD_CHANGE_EMAIL = False
SECURITY_SEND_PASSWORD_RESET_NOTICE_EMAIL = False
MAIL_SUPPRESS_SEND = True
class DockerConfig(BaseConfiguration):
SECRET_KEY = os.environ.get('SECRET_KEY')
DB_NAME = os.environ.get('DB_NAME')
DB_USER = os.environ.get('DB_USER')
DB_PASS = os.environ.get('DB_PASS')
DB_SERVICE = os.environ.get('DB_SERVICE')
DB_PORT = os.environ.get('DB_PORT')
SQLALCHEMY_DATABASE_URI = 'postgresql://{0}:{1}@{2}:{3}/{4}'.format(
DB_USER, DB_PASS, DB_SERVICE, DB_PORT, DB_NAME
)
RQ_DEFAULT_HOST="redis_1"
RQ_DEFAULT_PORT=6379
MAIL_SERVER = "smtp_server.usgo.org"
MAIL_PORT = 587
MAIL_USE_TLS = True
MAIL_USERNAME = "[email protected]"
MAIL_PASSWORD = "password"
MAIL_DEFAULT_SENDER = "[email protected]"
class DebugConfiguration(DockerConfig):
DEBUG = True
class TestConfiguration(BaseConfiguration):
TESTING = True
DATABASE = 'tests.db'
DATABASE_PATH = os.path.join(basedir, DATABASE)
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + DATABASE_PATH
| import os
basedir = os.path.abspath(os.path.dirname(__file__))
class BaseConfiguration(object):
DEBUG = False
TESTING = False
ADMINS = frozenset(['[email protected]'])
SECRET_KEY = 'SecretKeyForSessionSigning'
THREADS_PER_PAGE = 8
DATABASE = 'app.db'
DATABASE_PATH = os.path.join(basedir, DATABASE)
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + DATABASE_PATH
SECURITY_PASSWORD_HASH = 'sha512_crypt'
SECURITY_PASSWORD_SALT = 'SuPeRsEcReTsAlT'
SECURITY_POST_LOGIN_VIEW = '/ViewProfile'
SECURITY_CHANGEABLE = True
SECURITY_REGISTERABLE = True
SECURITY_TRACKABLE = True
SECURITY_SEND_REGISTER_EMAIL = False
SECURITY_SEND_PASSWORD_CHANGE_EMAIL = False
SECURITY_SEND_PASSWORD_RESET_NOTICE_EMAIL = False
MAIL_SUPPRESS_SEND = True
class DockerConfig(BaseConfiguration):
SECRET_KEY = os.environ.get('SECRET_KEY')
DB_NAME = os.environ.get('DB_NAME')
DB_USER = os.environ.get('DB_USER')
DB_PASS = os.environ.get('DB_PASS')
DB_SERVICE = os.environ.get('DB_SERVICE')
DB_PORT = os.environ.get('DB_PORT')
SQLALCHEMY_DATABASE_URI = 'postgresql://{0}:{1}@{2}:{3}/{4}'.format(
DB_USER, DB_PASS, DB_SERVICE, DB_PORT, DB_NAME
)
RQ_DEFAULT_HOST="redis_1"
RQ_DEFAULT_PORT=6379
class DebugConfiguration(DockerConfig):
DEBUG = True
class TestConfiguration(BaseConfiguration):
TESTING = True
DATABASE = 'tests.db'
DATABASE_PATH = os.path.join(basedir, DATABASE)
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + DATABASE_PATH
| mit | Python |
d08012b044e7340ce7f8c41ce5634d72f40de35d | Update test_server.py | hushfile/hushfile-test | server-functional/test_server.py | server-functional/test_server.py | import requests
import unittest
import json
import logging
import zlib
import sys
from colorama import init, Fore, Back, Style
logger = logging.getLogger('test_server')
ENDPOINT = "you_forgot_to_provide_the_endpoint_as_the_first_command_line_argument"
class TestErrorHandling(unittest.TestCase):
def check_parsable_but_not_ok(self):
try:
self.assertNotEqual(self.resp.json()["status"], "OK")
except Exception as e:
logger.warning(self.resp.text)
raise e
def test_missing_post_params(self):
self.resp = requests.post(ENDPOINT + "/upload")
def test_invalid_uploadpassword(self):
self.resp = requests.post(ENDPOINT + "/upload", data={"cryptofile":"a","metadata":"","chunknumber":1,"finishupload":False,"deletepassword":"loldonkey"})
self.unfinished = True
self.resp = requests.post(ENDPOINT + "/upload", data={"cryptofile":"b","chunknumber":2,"finishupload":True,"fileid":self.resp.json()["fileid"], "uploadpassword": self.resp.json()["uploadpassword"] + "BOB"})
self.unfinished = False
def test_chunk_too_big(self):
if 100*10**6 < self.__class__.serverinfo["max_filesize_bytes"]+1:
print(Fore.RED + "skipping test, max_filesize_bytes very big: {}".format(self.__class__.serverinfo["max_filesize_bytes"]) + Fore.RESET)
return
self.resp = requests.post(ENDPOINT + "/upload", data=zlib.compress(json.dumps({"cryptofile":"\x00"*(self.__class__.serverinfo["max_filesize_bytes"]+1),"metadata":"","chunknumber":0,"finishupload":True,"deletepassword":"loldonkey"}).encode("utf-8")), headers={'Content-Encoding': 'gzip'})
def test_chunk_zero_but_not_finishing(self):
""" it should not be possible to download chunks before the whole hushfile is finished """
self.reference = "a"
self.resp = requests.post(ENDPOINT + "/upload", data={"cryptofile":self.reference,"metadata":"","chunknumber":0,"finishupload":False,"deletepassword":"loldonkey"})
self.resp = requests.get(ENDPOINT + "/file", params={"chunknumber":0,"fileid":self.resp.json()["fileid"]})
def tearDown(self):
self.check_parsable_but_not_ok()
self.assertFalse(self.unfinished, self.resp.text)
def setUp(self):
self.unfinished = False
@classmethod
def setUpClass(cls):
cls.serverinfo = requests.get(ENDPOINT + "/serverinfo").json()
logger.info(cls.serverinfo)
class TestFileEquality(unittest.TestCase):
def test_basic_one_chunk_equality(self):
self.reference = "a"
self.resp = requests.post(ENDPOINT + "/upload", data={"cryptofile":self.reference,"metadata":"","chunknumber":0,"finishupload":True,"deletepassword":"loldonkey"})
self.resp = requests.get(ENDPOINT + "/file", params={"chunknumber":0,"fileid":self.resp.json()["fileid"]})
def test_compressed_one_chunk_equality(self):
self.reference = "a"
self.resp = requests.post(ENDPOINT + "/upload", data=zlib.compress(json.dumps({"cryptofile":self.reference,"metadata":"","chunknumber":0,"finishupload":True,"deletepassword":"loldonkey"}).encode("utf-8")), headers={'Content-Encoding': 'gzip'})
self.assertEqual(self.resp.json()["status"], "OK")
self.resp = requests.get(ENDPOINT + "/file", params={"chunknumber":0,"fileid":self.resp.json()["fileid"]})
def tearDown(self):
self.assertEqual(self.reference, self.resp.text)
if __name__ == "__main__":
global ENDPOINT
init()
logging.basicConfig(level="DEBUG")
if sys.hexversion < 0x03000000:
sys.exit("Python 3 is required to run this program.")
if len(sys.argv) > 1: ENDPOINT = sys.argv.pop()
unittest.main()
| import requests
import unittest
import json
import logging
logger = logging.getLogger('test_server')
ENDPOINT = "http://localhost:8801/api"
class TestErrorHandling(unittest.TestCase):
def check_parsable_but_not_ok(self):
try:
self.assertNotEqual(self.resp.json()["status"], "OK")
except Exception as e:
logger.warning(self.resp.text)
raise e
def test_missing_post_params(self):
self.resp = requests.post(ENDPOINT + "/upload")
def test_invalid_uploadpassword(self):
self.resp = requests.post(ENDPOINT + "/upload", data={"cryptofile":"a","metadata":"","chunknumber":1,"finishupload":False,"deletepassword":"loldonkey"})
self.unfinished = True
self.resp = requests.post(ENDPOINT + "/upload", data={"cryptofile":"b","chunknumber":2,"finishupload":True,"fileid":self.resp.json()["fileid"], "uploadpassword": self.resp.json()["uploadpassword"] + "BOB"})
self.unfinished = False
def test_chunk_too_big(self):
self.resp = requests.post(ENDPOINT + "/upload", data={"cryptofile":b"\x00"*(self.__class__.serverinfo["max_filesize_bytes"]+1),"metadata":"","chunknumber":0,"finishupload":True,"deletepassword":"loldonkey"})
def test_chunk_zero_but_not_finishing(self):
self.resp = requests.post(ENDPOINT + "/upload", data={"cryptofile":"a","metadata":"","chunknumber":0,"finishupload":False,"deletepassword":"loldonkey"})
def tearDown(self):
self.check_parsable_but_not_ok()
self.assertFalse(self.unfinished, self.resp.text)
def setUp(self):
self.unfinished = False
@classmethod
def setUpClass(cls):
cls.serverinfo = requests.get(ENDPOINT + "/serverinfo").json()
logger.info(cls.serverinfo)
if __name__ == "__main__":
logging.basicConfig(level="INFO")
unittest.main()
| bsd-2-clause | Python |
5000eea27c511ad036f03b64e2be7dc69bac0845 | Add `JSONField` | prophile/jacquard,prophile/jacquard | jacquard/odm/fields.py | jacquard/odm/fields.py | import abc
import copy
class BaseField(object, metaclass=abc.ABCMeta):
def __init__(self, null=False, default=None):
self.null = null
self.default = default
@abc.abstractmethod
def transform_to_storage(self, value):
raise NotImplementedError()
@abc.abstractmethod
def transform_from_storage(self, value):
raise NotImplementedError()
def _learn_from_owner(self, owner):
if owner is None:
return
if hasattr(self, 'owner'):
return
self.owner = owner
for field_name, value in vars(owner):
if value is self:
self.name = field_name
def validate(self, raw_value):
if not self.null and raw_value is None:
raise ValueError("%s is not nullable" % self.name)
def __get__(self, obj, owner):
if obj is None:
self._learn_from_owner(owner)
return self
try:
raw_value = obj._fields[self.name]
except KeyError:
return self.default
return self.transform_from_storage(raw_value)
def __set__(self, obj, value):
self._learn_from_owner(type(obj))
if value is None:
obj._fields[self.name] = None
else:
obj._fields[self.name] = self.transform_to_storage(value)
if obj.session:
obj.session.mark_instance_dirty(obj)
def __set_name__(self, owner, name):
self.owner = owner
self.name = name
class TextField(BaseField):
def transform_to_storage(self, value):
return str(value)
def transform_from_storage(self, value):
return value
class JSONField(BaseField):
def transform_to_storage(self, value):
return copy.deepcopy(value)
def transform_from_storage(self, value):
return copy.deepcopy(value)
| import abc
class BaseField(object, metaclass=abc.ABCMeta):
def __init__(self, null=False, default=None):
self.null = null
self.default = default
@abc.abstractmethod
def transform_to_storage(self, value):
raise NotImplementedError()
@abc.abstractmethod
def transform_from_storage(self, value):
raise NotImplementedError()
def _learn_from_owner(self, owner):
if owner is None:
return
if hasattr(self, 'owner'):
return
self.owner = owner
for field_name, value in vars(owner):
if value is self:
self.name = field_name
def validate(self, raw_value):
if not self.null and raw_value is None:
raise ValueError("%s is not nullable" % self.name)
def __get__(self, obj, owner):
if obj is None:
self._learn_from_owner(owner)
return self
try:
raw_value = obj._fields[self.name]
except KeyError:
return self.default
return self.transform_from_storage(raw_value)
def __set__(self, obj, value):
self._learn_from_owner(type(obj))
if value is None:
obj._fields[self.name] = None
else:
obj._fields[self.name] = self.transform_to_storage(value)
if obj.session:
obj.session.mark_instance_dirty(obj)
def __set_name__(self, owner, name):
self.owner = owner
self.name = name
class TextField(BaseField):
def transform_to_storage(self, value):
return value
def transform_from_storage(self, value):
return value
| mit | Python |
aa3a8ee76f85ef1c3c4c0beb7b6c46a0c69961f1 | allow absent of tornado | mSOHU/http2 | http2/__init__.py | http2/__init__.py | # -*- coding: utf-8 -*-
try:
from tornado import version_info
except ImportError:
pass
else:
if version_info[0] >= 4:
from http2.torando4 import *
| # -*- coding: utf-8 -*-
from tornado import version_info
if version_info[0] >= 4:
from http2.torando4 import *
else:
raise NotImplementedError()
| apache-2.0 | Python |
50b7345c1dcb3c2fcc05fa61108fa1649ae17a0c | Add admin filters | Iceberg-Marketplace/django-iceberg,izberg-marketplace/django-izberg,izberg-marketplace/django-izberg,Iceberg-Marketplace/django-iceberg | django_iceberg/admin.py | django_iceberg/admin.py | # -*- coding: utf-8 -*-
from django.contrib import admin
from django_iceberg.models import UserIcebergModel
class UserIcebergModelAdmin(admin.ModelAdmin):
list_display = ('user', 'environment', 'last_updated', 'application_namespace')
list_filter = ('environment', 'last_updated')
search_fields = ('user_username', 'user_first_name')
raw_id_fields = ("user",)
admin.site.register(UserIcebergModel, UserIcebergModelAdmin)
| # -*- coding: utf-8 -*-
from django.contrib import admin
from django_iceberg.models import UserIcebergModel
class UserIcebergModelAdmin(admin.ModelAdmin):
list_display = ('user', 'environment', 'last_updated', 'application_namespace')
raw_id_fields = ("user",)
admin.site.register(UserIcebergModel, UserIcebergModelAdmin)
| mit | Python |
019a1ab10b71d4bb768e96957e9d485efeb588fc | add admin class for Attachment model --- djangobb_forum/admin.py | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) | saifrahmed/DjangoBB,agepoly/DjangoBB,hsoft/slimbb,slav0nic/DjangoBB,hsoft/slimbb,saifrahmed/DjangoBB,rashoodkhan/DjangoBB,slav0nic/DjangoBB,slav0nic/DjangoBB,hsoft/slimbb,hsoft/DjangoBB,hsoft/DjangoBB,rashoodkhan/DjangoBB,agepoly/DjangoBB,agepoly/DjangoBB,hsoft/DjangoBB,saifrahmed/DjangoBB | djangobb_forum/admin.py | djangobb_forum/admin.py | # -*- coding: utf-8 -*-
from django.contrib import admin
from django.contrib.auth import admin as auth_admin
from django.contrib.auth.models import User
from djangobb_forum.models import Category, Forum, Topic, Post, Profile, Reputation, \
Report, Ban, Attachment
class CategoryAdmin(admin.ModelAdmin):
list_display = ['name', 'position', 'forum_count']
class ForumAdmin(admin.ModelAdmin):
list_display = ['name', 'category', 'position', 'topic_count']
raw_id_fields = ['moderators', 'last_post']
class TopicAdmin(admin.ModelAdmin):
list_display = ['name', 'forum', 'created', 'head', 'post_count']
search_fields = ['name']
raw_id_fields = ['user', 'subscribers', 'last_post']
class PostAdmin(admin.ModelAdmin):
list_display = ['topic', 'user', 'created', 'updated', 'summary']
search_fields = ['body']
raw_id_fields = ['topic', 'user', 'updated_by']
class ProfileAdmin(admin.ModelAdmin):
list_display = ['user', 'status', 'time_zone', 'location', 'language']
raw_id_fields = ['user']
class ReputationAdmin(admin.ModelAdmin):
list_display = ['from_user', 'to_user', 'post', 'sign', 'time', 'reason']
raw_id_fields = ['from_user', 'to_user', 'post']
class ReportAdmin(admin.ModelAdmin):
list_display = ['reported_by', 'post', 'zapped', 'zapped_by', 'created', 'reason']
raw_id_fields = ['reported_by', 'post']
class BanAdmin(admin.ModelAdmin):
list_display = ['user', 'ban_start', 'ban_end', 'reason']
raw_id_fields = ['user']
class UserAdmin(auth_admin.UserAdmin):
list_display = ['username', 'email', 'first_name', 'last_name', 'is_staff', 'is_active']
def get_urls(self):
from django.conf.urls.defaults import patterns, url
return patterns('',
url(r'^(\d+)/password/$', self.admin_site.admin_view(self.user_change_password), name='user_change_password'),
) + super(auth_admin.UserAdmin, self).get_urls()
class AttachmentAdmin(admin.ModelAdmin):
list_display = ['id', 'name', 'size', 'path', 'hash', ]
search_fields = ['name']
list_display_links = ('name',)
list_filter = ("content_type",)
admin.site.unregister(User)
admin.site.register(User, UserAdmin)
admin.site.register(Category, CategoryAdmin)
admin.site.register(Forum, ForumAdmin)
admin.site.register(Topic, TopicAdmin)
admin.site.register(Post, PostAdmin)
admin.site.register(Profile, ProfileAdmin)
admin.site.register(Reputation, ReputationAdmin)
admin.site.register(Report, ReportAdmin)
admin.site.register(Ban, BanAdmin)
admin.site.register(Attachment, AttachmentAdmin)
admin.site.disable_action('delete_selected') #disabled, because delete_selected ignoring delete model method
| # -*- coding: utf-8 -*-
from django.contrib import admin
from django.contrib.auth import admin as auth_admin
from django.contrib.auth.models import User
from djangobb_forum.models import Category, Forum, Topic, Post, Profile, Reputation,\
Report, Ban
class CategoryAdmin(admin.ModelAdmin):
list_display = ['name', 'position', 'forum_count']
class ForumAdmin(admin.ModelAdmin):
list_display = ['name', 'category', 'position', 'topic_count']
raw_id_fields = ['moderators', 'last_post']
class TopicAdmin(admin.ModelAdmin):
list_display = ['name', 'forum', 'created', 'head', 'post_count']
search_fields = ['name']
raw_id_fields = ['user', 'subscribers', 'last_post']
class PostAdmin(admin.ModelAdmin):
list_display = ['topic', 'user', 'created', 'updated', 'summary']
search_fields = ['body']
raw_id_fields = ['topic', 'user', 'updated_by']
class ProfileAdmin(admin.ModelAdmin):
list_display = ['user', 'status', 'time_zone', 'location', 'language']
raw_id_fields = ['user']
class ReputationAdmin(admin.ModelAdmin):
list_display = ['from_user', 'to_user', 'post', 'sign', 'time', 'reason']
raw_id_fields = ['from_user', 'to_user', 'post']
class ReportAdmin(admin.ModelAdmin):
list_display = ['reported_by', 'post', 'zapped', 'zapped_by', 'created', 'reason']
raw_id_fields = ['reported_by', 'post']
class BanAdmin(admin.ModelAdmin):
list_display = ['user', 'ban_start', 'ban_end', 'reason']
raw_id_fields = ['user']
class UserAdmin(auth_admin.UserAdmin):
list_display = ['username', 'email', 'first_name', 'last_name', 'is_staff', 'is_active']
def get_urls(self):
from django.conf.urls.defaults import patterns, url
return patterns('',
url(r'^(\d+)/password/$', self.admin_site.admin_view(self.user_change_password), name='user_change_password'),
) + super(auth_admin.UserAdmin, self).get_urls()
admin.site.unregister(User)
admin.site.register(User, UserAdmin)
admin.site.register(Category, CategoryAdmin)
admin.site.register(Forum, ForumAdmin)
admin.site.register(Topic, TopicAdmin)
admin.site.register(Post, PostAdmin)
admin.site.register(Profile, ProfileAdmin)
admin.site.register(Reputation, ReputationAdmin)
admin.site.register(Report, ReportAdmin)
admin.site.register(Ban, BanAdmin)
admin.site.disable_action('delete_selected') #disabled, because delete_selected ignoring delete model method | bsd-3-clause | Python |
8a6b88c38b2844fba03b6664fe828ebbd5a08a68 | use pkdlog so it passes test for pkdp | radiasoft/pykern,radiasoft/pykern | tests/pkdebug2_test.py | tests/pkdebug2_test.py | # -*- coding: utf-8 -*-
u"""pytest for `pykern.pkdebug`
:copyright: Copyright (c) 2015 Bivio Software, Inc. All Rights Reserved.
:license: http://www.apache.org/licenses/LICENSE-2.0.html
"""
from __future__ import absolute_import, division, print_function
def test_format(capsys):
from pykern import pkconfig
pkconfig.reset_state_for_testing({
'PYKERN_PKDEBUG_MAX_DEPTH': '2',
'PYKERN_PKDEBUG_MAX_ELEMENTS': '5',
'PYKERN_PKDEBUG_MAX_STRING': '5',
})
from pykern.pkdebug import pkdlog
def _e(expected, value):
pkdlog('{}', value)
out, err = capsys.readouterr()
err = ' '.join(err.split(' ')[1:])
assert expected + '\n' == err, 'expected={} actual={}'.format(expected, err)
_e(
"{'a': 'b', 'c': {'d': {<SNIP>}}, 'h': 'i'}",
{'a': 'b', 'c': {'d': {'e': {'f': 'g'}}}, 'h': 'i'},
)
_e(
'[1, 2, 3, 4, 5, 6, <SNIP>]',
[1, 2, 3, 4, 5, 6, 7, 8],
)
_e(
'(1, 2, 3, 4)',
(1, 2, 3, 4),
)
_e(
'(1, {2, 3}, {4: 5}, [6, 7])',
(1, {2, 3}, {4: 5}, [6, 7])
)
_e(
"{'Passw<SNIP>': '<REDA<SNIP>', 'c': {'botp': '<REDA<SNIP>'}, 'totp': '<REDA<SNIP>', 'q': ['pAssw<SNIP>', 1], 'x': 'y', 's': 'r', <SNIP>}",
{'Passwd': 'b', 'c': {'botp': 'a'}, 'totp': 'iiii', 'q': ['pAssword', 1], 'x': 'y', 's': 'r', 't': 'u'},
)
_e('a' * 5 + '<SNIP>', 'a' * 80)
_e('<SNIP>' + 'a' * 5, '\n File "' + 'a' * 80)
| # -*- coding: utf-8 -*-
u"""pytest for `pykern.pkdebug`
:copyright: Copyright (c) 2015 Bivio Software, Inc. All Rights Reserved.
:license: http://www.apache.org/licenses/LICENSE-2.0.html
"""
from __future__ import absolute_import, division, print_function
def test_format(capsys):
from pykern import pkconfig
pkconfig.reset_state_for_testing({
'PYKERN_PKDEBUG_MAX_DEPTH': '2',
'PYKERN_PKDEBUG_MAX_ELEMENTS': '5',
'PYKERN_PKDEBUG_MAX_STRING': '5',
})
from pykern.pkdebug import pkdp
def _e(expected, value):
pkdp('{}', value)
out, err = capsys.readouterr()
err = ' '.join(err.split(' ')[1:])
assert expected + '\n' == err, 'expected={} actual={}'.format(expected, err)
_e(
"{'a': 'b', 'c': {'d': {<SNIP>}}, 'h': 'i'}",
{'a': 'b', 'c': {'d': {'e': {'f': 'g'}}}, 'h': 'i'},
)
_e(
'[1, 2, 3, 4, 5, 6, <SNIP>]',
[1, 2, 3, 4, 5, 6, 7, 8],
)
_e(
'(1, 2, 3, 4)',
(1, 2, 3, 4),
)
_e(
'(1, {2, 3}, {4: 5}, [6, 7])',
(1, {2, 3}, {4: 5}, [6, 7])
)
_e(
"{'Passw<SNIP>': '<REDA<SNIP>', 'c': {'botp': '<REDA<SNIP>'}, 'totp': '<REDA<SNIP>', 'q': ['pAssw<SNIP>', 1], 'x': 'y', 's': 'r', <SNIP>}",
{'Passwd': 'b', 'c': {'botp': 'a'}, 'totp': 'iiii', 'q': ['pAssword', 1], 'x': 'y', 's': 'r', 't': 'u'},
)
_e('a' * 5 + '<SNIP>', 'a' * 80)
_e('<SNIP>' + 'a' * 5, '\n File "' + 'a' * 80)
| apache-2.0 | Python |
db4b63ee097116c5be711d1b6a69100065f1a885 | update format unicode | alswl/weby | weby/utils.py | weby/utils.py | # coding=utf8
from datetime import datetime, date
import json
def format_dic(dic):
"""将 dic 格式化为 JSON,处理日期等特殊格式"""
for key, value in dic.iteritems():
dic[key] = format_value(value)
return dic
def format_value(value, include_fields=[], is_compact=True):
if isinstance(value, dict):
return format_dic(value)
elif isinstance(value, list):
return format_list(value)
elif isinstance(value, datetime):
return value.isoformat()
#elif isinstance(value, bool):
#return 1 if value else 0
elif hasattr(value, 'to_api_dic'):
return value.to_api_dic(include_fields, is_compact)
else:
try:
json.dumps(value)
return value
except:
return unicode(value)
def format_list(l):
return [format_value(x) for x in l]
| # coding=utf8
from datetime import datetime, date
def format_dic(dic):
"""将 dic 格式化为 JSON,处理日期等特殊格式"""
for key, value in dic.iteritems():
dic[key] = format_value(value)
return dic
def format_value(value):
if isinstance(value, dict):
return format_dic(value)
elif isinstance(value, list):
return format_list(value)
elif isinstance(value, datetime):
return value.isoformat()
elif isinstance(value, date):
return value.isoformat()
#elif isinstance(value, API_V1_Mixture):
#return value.to_api_dic(is_compact=True)
else:
return value
def format_list(l):
return [format_value(x) for x in l]
| mit | Python |
72df56880ffaf0aba3b6f919d5a7f2add32623dc | Update binary_clock.py | mcgyver5/python_binary_clock | binary_clock.py | binary_clock.py | __author__ = 'tim mcguire'
import datetime
import math
import Tkinter
import sys,os
def to_binary(dec, width):
x = width - 1
answer = ""
while x >= 0:
current_power = math.pow(2, x)
# how many powers of two fit into dec?
how_many = int(dec / current_power)
answer += str(how_many)
dec -= how_many * current_power
x -= 1
return answer
def draw_vertical_line(x):
main_canvas.create_line(x+17,start_y+20,x+17,start_y - 60)
def fill_dots(times_to_use, x,length):
tup = tens_and_ones(times_to_use)
for num in tup:
binary_string = to_binary(num, length)
length =4
x += right_step
y = start_y
for bit in reversed(binary_string):
coord = x, y, x + dot_size, y + dot_size
if bit == '1':
main_canvas.create_oval(coord, fill="red")
else:
main_canvas.create_oval(coord, fill="blue")
y -= 15
return x
def tens_and_ones(num):
tens = int(num / 10)
ones = num % 10
return tens, ones
def run(master):
t = datetime.datetime.now()
time_collection = t.hour, t.minute, t.second
x = 15
length =2
for val in time_collection:
# val is the numeric value, x is horizontal offset, length is how many dots tall the stack will be
x = fill_dots(val, x,length)
draw_vertical_line(x)
length =3
main_canvas.pack()
main_canvas.after(200, run, master)
time_format = 24
if len(sys.argv) >= 2:
time_format = sys.argv[1]
start_y = 150
right_step = 20
dot_size = 15
root = Tkinter.Tk()
root.geometry('300x200')
main_canvas = Tkinter.Canvas(root, bg="blue", height=300, width=200)
run(main_canvas)
root.mainloop()
| __author__ = 'tim mcguire'
import datetime
import math
import Tkinter
import sys,os
def to_binary(dec, width):
x = width - 1
answer = ""
while x >= 0:
current_power = math.pow(2, x)
# how many powers of two fit into dec?
how_many = int(dec / current_power)
answer += str(how_many)
dec -= how_many * current_power
x -= 1
return answer
def draw_vertical_line(x):
main_canvas.create_line(x+17,start_y+20,x+17,start_y - 60)
def fill_dots(times_to_use, x,length):
tup = tens_and_ones(times_to_use)
for num in tup:
binary_string = to_binary(num, length)
length =4
x += right_step
y = start_y
for bit in reversed(binary_string):
coord = x, y, x + dot_size, y + dot_size
if bit == '1':
main_canvas.create_oval(coord, fill="red")
else:
main_canvas.create_oval(coord, fill="blue")
y -= 15
return x
def tens_and_ones(num):
tens = int(num / 10)
ones = num % 10
return tens, ones
def run(master):
t = datetime.datetime.now()
time_collection = t.hour, t.minute, t.second
x = 15
length =2
for val in time_collection:
# val is the numeric value, x is horizontal offset, length is how many dots tall the stack will be
x = fill_dots(val, x,length)
draw_vertical_line(x)
length =3
main_canvas.pack()
main_canvas.after(200, run, master)
time_format = sys.argv[1]
start_y = 150
right_step = 20
dot_size = 15
root = Tkinter.Tk()
root.geometry('300x200')
main_canvas = Tkinter.Canvas(root, bg="blue", height=300, width=200)
run(main_canvas)
root.mainloop() | apache-2.0 | Python |
36ca52e816a2938c6723e3ec2ed4a350958c78d8 | remove comments | mcgyver5/python_binary_clock | binary_clock.py | binary_clock.py | __author__ = 'tim mcguire'
import datetime
import math
import Tkinter
def to_binary(dec, width):
x = width - 1
answer = ""
while x >= 0:
current_power = math.pow(2, x)
# how many powers of two fit into dec?
how_many = int(dec / current_power)
answer += str(how_many)
dec -= how_many * current_power
x -= 1
return answer
def fill_dots(times_to_use, x,length):
tup = tens_and_ones(times_to_use)
for num in tup:
binary_string = to_binary(num, length)
length =4
x += right_step
y = start_y
for bit in reversed(binary_string):
coord = x, y, x + dot_size, y + dot_size
if bit == '1':
main_canvas.create_oval(coord, fill="red")
else:
main_canvas.create_oval(coord, fill="blue")
y -= 15
return x
def tens_and_ones(num):
tens = int(num / 10)
ones = num % 10
return tens, ones
def run(master):
t = datetime.datetime.now()
time_collection = t.hour, t.minute, t.second
x = 15
length =2
for val in time_collection:
# val is the numeric value, x is horizontal offset, length is how many dots tall the stack will be
x = fill_dots(val, x,length)
length =3
main_canvas.pack()
main_canvas.after(200, run, master)
start_y = 150
right_step = 20
dot_size = 15
root = Tkinter.Tk()
root.geometry('300x200')
main_canvas = Tkinter.Canvas(root, bg="blue", height=300, width=200)
run(main_canvas)
root.mainloop() | __author__ = 'tim mcguire'
import datetime
import math
import Tkinter
def to_binary(dec, width):
x = width - 1
answer = ""
while x >= 0:
current_power = math.pow(2, x)
# how many powers of two fit into dec?
how_many = int(dec / current_power)
answer += str(how_many)
dec -= how_many * current_power
x -= 1
return answer
def fill_dots(times_to_use, x,length):
tup = tens_and_ones(times_to_use)
for num in tup:
#2,4,3,4,3,4
binary_string = to_binary(num, length)
length =4
x += right_step
y = start_y
for bit in reversed(binary_string):
coord = x, y, x + dot_size, y + dot_size
if bit == '1':
main_canvas.create_oval(coord, fill="red")
else:
main_canvas.create_oval(coord, fill="blue")
y -= 15
return x
def tens_and_ones(num):
tens = int(num / 10)
ones = num % 10
return tens, ones
def run(master):
t = datetime.datetime.now()
time_collection = t.hour, t.minute, t.second
x = 15
length =2
for val in time_collection:
# val is the numeric value, x is horizontal offset, length is how many dots tall the stack will be
x = fill_dots(val, x,length)
length =3
main_canvas.pack()
main_canvas.after(200, run, master)
start_y = 150
right_step = 20
dot_size = 15
root = Tkinter.Tk()
root.geometry('300x200')
main_canvas = Tkinter.Canvas(root, bg="blue", height=300, width=200)
run(main_canvas)
root.mainloop() | apache-2.0 | Python |
e59d6be5a31dbe775f6481d079f0f4e81a27a9ce | Add import of the re module to the utils module | SizzlingVortex/classyfd | classyfd/utils.py | classyfd/utils.py | """
Contains utility functions used within this library that are also useful
outside of it.
"""
import os
import pwd
import string
import random
import re
# Operating System Functions
def determine_if_os_is_posix_compliant():
"""
Determine if the operating system is POSIX compliant or not
Return Value:
(bool)
"""
return bool(os.name == "posix")
def determine_if_running_as_root_user():
"""
Determine if the user running Python is "root" or not
Supported Operating Systems:
Unix-like
Return Value:
(bool)
"""
# 0 is the UID used for most Unix-like systems for the root user. In the
# event that it's not, another check is done to see if the username is
# "root".
#
# For an explanation on why os.geteuid was used, instead of os.getuid,
# see: http://stackoverflow.com/a/14951764
is_running_as_root = bool(
os.geteuid() == 0 or
pwd.getpwuid(os.geteuid()).pw_name.lower() == "root"
)
return is_running_as_root
# File Functions
def get_random_file_name(directory):
"""
Generate a random, unique file name of 32 characters
The generated file name may include lowercase letters and numbers.
Parameters:
directory -- (str) the directory the file will be in. This will determine
the unique name given to it.
Return Value:
random_file_name -- (str) this is just a randomly generated file name, so
the full/absolute path is not included.
"""
CHARACTER_LENGTH = 32
NUMBERS = string.digits
LETTERS = string.ascii_lowercase
VALID_CHARACTERS = tuple(LETTERS + NUMBERS)
while True:
random_file_name = ""
for i in range(CHARACTER_LENGTH):
random_file_name += random.choice(VALID_CHARACTERS)
file_path_already_exists = os.path.exists(
os.path.join(directory, random_file_name)
)
if file_path_already_exists:
# Try again
continue
else:
# Sweet, use the generated file name
break
return random_file_name
| """
Contains utility functions used within this library that are also useful
outside of it.
"""
import os
import pwd
import string
import random
# Operating System Functions
def determine_if_os_is_posix_compliant():
"""
Determine if the operating system is POSIX compliant or not
Return Value:
(bool)
"""
return bool(os.name == "posix")
def determine_if_running_as_root_user():
"""
Determine if the user running Python is "root" or not
Supported Operating Systems:
Unix-like
Return Value:
(bool)
"""
# 0 is the UID used for most Unix-like systems for the root user. In the
# event that it's not, another check is done to see if the username is
# "root".
#
# For an explanation on why os.geteuid was used, instead of os.getuid,
# see: http://stackoverflow.com/a/14951764
is_running_as_root = bool(
os.geteuid() == 0 or
pwd.getpwuid(os.geteuid()).pw_name.lower() == "root"
)
return is_running_as_root
# File Functions
def get_random_file_name(directory):
"""
Generate a random, unique file name of 32 characters
The generated file name may include lowercase letters and numbers.
Parameters:
directory -- (str) the directory the file will be in. This will determine
the unique name given to it.
Return Value:
random_file_name -- (str) this is just a randomly generated file name, so
the full/absolute path is not included.
"""
CHARACTER_LENGTH = 32
NUMBERS = string.digits
LETTERS = string.ascii_lowercase
VALID_CHARACTERS = tuple(LETTERS + NUMBERS)
while True:
random_file_name = ""
for i in range(CHARACTER_LENGTH):
random_file_name += random.choice(VALID_CHARACTERS)
file_path_already_exists = os.path.exists(
os.path.join(directory, random_file_name)
)
if file_path_already_exists:
# Try again
continue
else:
# Sweet, use the generated file name
break
return random_file_name | mit | Python |
00e865178f8e1762e7cd1ec8d44713d73cc58c47 | tidy up of DynTypedNode in python | AndrewWalker/clast,AndrewWalker/clast,AndrewWalker/clast | clast/__init__.py | clast/__init__.py | import _clast
from _clast import *
def __get(self, kind):
return getattr(self, '_get_' + kind.__name__)()
# Monkey patch an extra method on that we can't do in C++
_clast.DynTypedNode.get = __get
| import _clast
from _clast import *
## REPRESENTATIVE CLASSES ONLY
def cxxRecordDecl(*args):
return _clast._cxxRecordDecl(list(args))
def decl(*args):
return _clast._decl(list(args))
def stmt(*args):
return _clast._stmt(list(args))
def forStmt(*args):
return _clast._forStmt(list(args))
def hasLoopInit(arg):
return _clast._hasLoopInit(arg)
def ifStmt(*args):
return _clast._ifStmt(list(args))
def hasCondition(expr):
return _clast._hasCondition(expr)
| mit | Python |
a4db65ff4c5b3edd4739b0864f4e1641b37b3b87 | Remove wrong comment | pypa/setuptools,pypa/setuptools,pypa/setuptools | setuptools/tests/test_logging.py | setuptools/tests/test_logging.py | import inspect
import logging
import os
import pytest
setup_py = """\
from setuptools import setup
setup(
name="test_logging",
version="0.0"
)
"""
@pytest.mark.parametrize(
"flag, expected_level", [("--dry-run", "INFO"), ("--verbose", "DEBUG")]
)
def test_verbosity_level(tmp_path, monkeypatch, flag, expected_level):
"""Make sure the correct verbosity level is set (issue #3038)"""
import setuptools # noqa: Import setuptools to monkeypatch distutils
import distutils # <- load distutils after all the patches take place
logger = logging.Logger(__name__)
monkeypatch.setattr(logging, "root", logger)
unset_log_level = logger.getEffectiveLevel()
assert logging.getLevelName(unset_log_level) == "NOTSET"
setup_script = tmp_path / "setup.py"
setup_script.write_text(setup_py)
dist = distutils.core.run_setup(setup_script, stop_after="init")
dist.script_args = [flag, "sdist"]
dist.parse_command_line() # <- where the log level is set
log_level = logger.getEffectiveLevel()
log_level_name = logging.getLevelName(log_level)
assert log_level_name == expected_level
def test_patching_does_not_cause_problems():
# Ensure `dist.log` is only patched if necessary
import setuptools.logging
from distutils import dist
setuptools.logging.configure()
if os.getenv("SETUPTOOLS_USE_DISTUTILS", "local").lower() == "local":
# Modern logging infra, no problematic patching.
assert isinstance(dist.log, logging.Logger)
else:
assert inspect.ismodule(dist.log)
| import inspect
import logging
import os
import pytest
setup_py = """\
from setuptools import setup
setup(
name="test_logging",
version="0.0"
)
"""
@pytest.mark.parametrize(
"flag, expected_level", [("--dry-run", "INFO"), ("--verbose", "DEBUG")]
)
def test_verbosity_level(tmp_path, monkeypatch, flag, expected_level):
"""Make sure the correct verbosity level is set (issue #3038)"""
import setuptools # noqa: Import setuptools to monkeypatch distutils
import distutils # <- load distutils after all the patches take place
logger = logging.Logger(__name__)
monkeypatch.setattr(logging, "root", logger)
unset_log_level = logger.getEffectiveLevel()
assert logging.getLevelName(unset_log_level) == "NOTSET"
setup_script = tmp_path / "setup.py"
setup_script.write_text(setup_py)
dist = distutils.core.run_setup(setup_script, stop_after="init")
dist.script_args = [flag, "sdist"]
dist.parse_command_line() # <- where the log level is set
log_level = logger.getEffectiveLevel()
log_level_name = logging.getLevelName(log_level)
assert log_level_name == expected_level
def test_patching_does_not_cause_problems():
# Ensure `dist.log` is only patched if necessary
import setuptools.logging
from distutils import dist # <- load distutils after all the patches take place
setuptools.logging.configure()
if os.getenv("SETUPTOOLS_USE_DISTUTILS", "local").lower() == "local":
# Modern logging infra, no problematic patching.
assert isinstance(dist.log, logging.Logger)
else:
assert inspect.ismodule(dist.log)
| mit | Python |
7bccd20523f96728db7a6b5fd23cb339787ecd3a | Bump to 1.1.4 | kivy/pyjnius,kivy/pyjnius,kivy/pyjnius | jnius/__init__.py | jnius/__init__.py | '''
Pyjnius
=======
Accessing Java classes from Python.
All the documentation is available at: http://pyjnius.readthedocs.org
'''
__version__ = '1.1.4'
from .jnius import * # noqa
from .reflect import * # noqa
from six import with_metaclass
# XXX monkey patch methods that cannot be in cython.
# Cython doesn't allow to set new attribute on methods it compiled
HASHCODE_MAX = 2 ** 31 - 1
class PythonJavaClass_(with_metaclass(MetaJavaBase, PythonJavaClass)):
@java_method('()I', name='hashCode')
def hashCode(self):
return id(self) % HASHCODE_MAX
@java_method('()Ljava/lang/String;', name='hashCode')
def hashCode_(self):
return '{}'.format(self.hashCode())
@java_method('()Ljava/lang/String;', name='toString')
def toString(self):
return repr(self)
@java_method('(Ljava/lang/Object;)Z', name='equals')
def equals(self, other):
return self.hashCode() == other.hashCode()
PythonJavaClass = PythonJavaClass_
# from https://gist.github.com/tito/09c42fb4767721dc323d
import os
if "ANDROID_ARGUMENT" in os.environ:
# on android, catch all exception to ensure about a jnius.detach
import threading
import jnius
orig_thread_run = threading.Thread.run
def jnius_thread_hook(*args, **kwargs):
try:
return orig_thread_run(*args, **kwargs)
finally:
jnius.detach()
threading.Thread.run = jnius_thread_hook
| '''
Pyjnius
=======
Accessing Java classes from Python.
All the documentation is available at: http://pyjnius.readthedocs.org
'''
__version__ = '1.1.4.dev0'
from .jnius import * # noqa
from .reflect import * # noqa
from six import with_metaclass
# XXX monkey patch methods that cannot be in cython.
# Cython doesn't allow to set new attribute on methods it compiled
HASHCODE_MAX = 2 ** 31 - 1
class PythonJavaClass_(with_metaclass(MetaJavaBase, PythonJavaClass)):
@java_method('()I', name='hashCode')
def hashCode(self):
return id(self) % HASHCODE_MAX
@java_method('()Ljava/lang/String;', name='hashCode')
def hashCode_(self):
return '{}'.format(self.hashCode())
@java_method('()Ljava/lang/String;', name='toString')
def toString(self):
return repr(self)
@java_method('(Ljava/lang/Object;)Z', name='equals')
def equals(self, other):
return self.hashCode() == other.hashCode()
PythonJavaClass = PythonJavaClass_
# from https://gist.github.com/tito/09c42fb4767721dc323d
import os
if "ANDROID_ARGUMENT" in os.environ:
# on android, catch all exception to ensure about a jnius.detach
import threading
import jnius
orig_thread_run = threading.Thread.run
def jnius_thread_hook(*args, **kwargs):
try:
return orig_thread_run(*args, **kwargs)
finally:
jnius.detach()
threading.Thread.run = jnius_thread_hook
| mit | Python |
6a27bd99352e4dc7f38c6f819a8a45b37c1a094c | Remove TODO to add requirements.txt | jbrudvik/yahoo-fantasy-basketball | start-active-players.py | start-active-players.py | """
Start active players for the week
Ideas:
- Include the names of players who cannot be started
- And maybe the full roster on those dates
"""
import requests
from bs4 import BeautifulSoup
# TODO: Configure this somewhere better (as a direct argument to the script, probably
TEAM_URL = 'http://basketball.fantasysports.yahoo.com/nba/178276/6/'
headers = {
'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.99 Safari/537.36'
}
response = requests.get(TEAM_URL, headers=headers)
soup = BeautifulSoup(response.text)
inputs = soup.find(id='hiddens').findAll('input')
fields = {input['name']: input['value'] for input in inputs}
print(fields)
| """
Start active players for the week
Ideas:
- Include the names of players who cannot be started
- And maybe the full roster on those dates
TODO:
- Add required packages in requirements.txt
"""
import requests
from bs4 import BeautifulSoup
# TODO: Configure this somewhere better (as a direct argument to the script, probably
TEAM_URL = 'http://basketball.fantasysports.yahoo.com/nba/178276/6/'
headers = {
'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/39.0.2171.99 Safari/537.36'
}
response = requests.get(TEAM_URL, headers=headers)
soup = BeautifulSoup(response.text)
inputs = soup.find(id='hiddens').findAll('input')
fields = {input['name']: input['value'] for input in inputs}
print(fields)
| mit | Python |
a6390df0f4fb9c9402b1c795d4bf65765b793412 | Bump dallinger version | Dallinger/Dallinger,Dallinger/Dallinger,Dallinger/Dallinger,Dallinger/Dallinger,Dallinger/Dallinger | dallinger/version.py | dallinger/version.py | """Dallinger version number."""
__version__ = "5.0.6"
| """Dallinger version number."""
__version__ = "5.0.5"
| mit | Python |
7ed8de3d15941c683ae70c15a6ce50bbe29a6580 | remove unused field from books | openstax/openstax-cms,openstax/openstax-cms,openstax/openstax-cms,Connexions/openstax-cms,Connexions/openstax-cms,openstax/openstax-cms | books/models.py | books/models.py | from django.db import models
from wagtail.wagtailcore.models import Page, Orderable
from wagtail.wagtailcore.fields import RichTextField
from wagtail.wagtailadmin.edit_handlers import (FieldPanel,
InlinePanel)
from wagtail.wagtailimages.edit_handlers import ImageChooserPanel
# Create your models here.
class Book(Page):
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
revision = models.CharField(max_length=255, blank=True, null=True)
description = RichTextField(blank=True)
cover_image = models.ForeignKey(
'wagtailimages.Image',
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name='+'
)
publish_date = models.DateField(blank=True, null=True)
isbn_10 = models.IntegerField(blank=True, null=True)
isbn_13 = models.CharField(max_length=255, blank=True, null=True)
content_panels = Page.content_panels + [
FieldPanel('revision'),
FieldPanel('description', classname="full"),
ImageChooserPanel('cover_image'),
FieldPanel('publish_date'),
FieldPanel('isbn_10'),
FieldPanel('isbn_13'),
]
api_fields = ('created',
'updated',
'revision',
'description',
'cover_image',
'publish_date',
'isbn_10',
'isbn_13') | from django.db import models
from wagtail.wagtailcore.models import Page, Orderable
from wagtail.wagtailcore.fields import RichTextField
from wagtail.wagtailadmin.edit_handlers import (FieldPanel,
InlinePanel)
from wagtail.wagtailimages.edit_handlers import ImageChooserPanel
# Create your models here.
class Book(Page):
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
revision = models.CharField(max_length=255, blank=True, null=True)
description = RichTextField(blank=True)
cover_image = models.ForeignKey(
'wagtailimages.Image',
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name='+'
)
publish_date = models.DateField(blank=True, null=True)
isbn_10 = models.IntegerField(blank=True, null=True)
isbn_13 = models.CharField(max_length=255, blank=True, null=True)
content_panels = Page.content_panels + [
FieldPanel('name'),
FieldPanel('revision'),
FieldPanel('description', classname="full"),
ImageChooserPanel('cover_image'),
FieldPanel('publish_date'),
FieldPanel('isbn_10'),
FieldPanel('isbn_13'),
]
api_fields = ('created',
'updated',
'revision',
'description',
'cover_image',
'publish_date',
'isbn_10',
'isbn_13') | agpl-3.0 | Python |
e0b3e23d4890a10f8bca4c699e5a9cd6294fee29 | add xpub | chaeplin/dashmnb,chaeplin/dashmnb | keepkey-for-mn.py | keepkey-for-mn.py | #!/usr/bin/env python3
import sys, os
sys.path.append( os.path.join( os.path.dirname(__file__), '.' ) )
sys.path.append( os.path.join( os.path.dirname(__file__), '.', 'dashlib' ) )
from config import *
from keepkeylib.client import KeepKeyClient
from keepkeylib.transport_hid import HidTransport
import keepkeylib.ckd_public as bip32
def main():
# List all connected KeepKeys on USB
devices = HidTransport.enumerate()
# Check whether we found any
if len(devices) == 0:
print('No KeepKey found')
return
# Use first connected device
transport = HidTransport(devices[0])
# Creates object for manipulating KeepKey
client = KeepKeyClient(transport)
# Print out KeepKey's features and settings
# print(client.features)
keypath = mpath
bip32_path = client.expand_path(keypath)
# xpub to use
#print('xpub/tpub --> ' + bip32.serialize(client.get_public_node(bip32_path).node, 0x043587CF))
print('xpub/tpub --> ' + bip32.serialize(client.get_public_node(bip32_path).node, ( 0x0488B21E if MAINNET else 0x043587CF )))
for i in range(max_gab):
child_path = '%s%s' % (keypath + '/', str(i))
address = client.get_address(coin_name, client.expand_path(child_path))
print (coin_name +' address:', child_path, address)
client.close()
if __name__ == '__main__':
main()
# end | #!/usr/bin/env python3
import sys, os
sys.path.append( os.path.join( os.path.dirname(__file__), '.' ) )
sys.path.append( os.path.join( os.path.dirname(__file__), '.', 'dashlib' ) )
from config import *
from keepkeylib.client import KeepKeyClient
from keepkeylib.transport_hid import HidTransport
import keepkeylib.ckd_public as bip32
def main():
# List all connected KeepKeys on USB
devices = HidTransport.enumerate()
# Check whether we found any
if len(devices) == 0:
print('No KeepKey found')
return
# Use first connected device
transport = HidTransport(devices[0])
# Creates object for manipulating KeepKey
client = KeepKeyClient(transport)
# Print out KeepKey's features and settings
# print(client.features)
keypath = mpath
bip32_path = client.expand_path(keypath)
# xpub to use
print('xpub --> ' + bip32.serialize(client.get_public_node(bip32_path).node, 0x043587CF))
for i in range(max_gab):
child_path = '%s%s' % (keypath + '/', str(i))
address = client.get_address(coin_name, client.expand_path(child_path))
print (coin_name +' address:', child_path, address)
client.close()
if __name__ == '__main__':
main()
# end | mit | Python |
6fc68abdb48134f4e647f0a1d69becd374d1147f | add missing Python file encoding | brasilcomvc/brasilcomvc,brasilcomvc/brasilcomvc,brasilcomvc/brasilcomvc | brasilcomvc/accounts/admin.py | brasilcomvc/accounts/admin.py | # coding: utf8
from __future__ import unicode_literals
from django.contrib import admin
from .models import User, UserAddress
class UserAdmin(admin.ModelAdmin):
class UserAddressInline(admin.StackedInline):
model = UserAddress
list_display = ('email', 'full_name', 'username',)
fieldsets = (
('Informações Pessoais', {
'fields': ('full_name', 'username', 'email',),
}),
('Informações Profissionais', {
'fields': ('job_title', 'bio',),
}),
('Notificações', {
'fields': ('email_newsletter',),
}),
)
inlines = (UserAddressInline,)
admin.site.register(User, UserAdmin)
| from django.contrib import admin
from .models import User, UserAddress
class UserAdmin(admin.ModelAdmin):
class UserAddressInline(admin.StackedInline):
model = UserAddress
list_display = ('email', 'full_name', 'username',)
fieldsets = (
('Informações Pessoais', {
'fields': ('full_name', 'username', 'email',),
}),
('Informações Profissionais', {
'fields': ('job_title', 'bio',),
}),
('Notificações', {
'fields': ('email_newsletter',),
}),
)
inlines = (UserAddressInline,)
admin.site.register(User, UserAdmin)
| apache-2.0 | Python |
56cbbef7b8bbfa31445dad1561c4014804250fd5 | fix test | DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core | kyototycoon/test/test_kyototycoon.py | kyototycoon/test/test_kyototycoon.py | # (C) Datadog, Inc. 2010-2017
# All rights reserved
# Licensed under Simplified BSD License (see LICENSE)
# stdlib
from nose.plugins.attrib import attr
# 3p
import requests
# project
from checks import AgentCheck
from tests.checks.common import AgentCheckTest
config = {
'instances': [{
'report_url': 'http://localhost:1978/rpc/report',
'tags': ['optional:tag1']
}]
}
METRICS = [
'kyototycoon.threads',
'kyototycoon.connections_per_s',
'kyototycoon.ops.get.hits_per_s',
'kyototycoon.ops.get.misses_per_s',
'kyototycoon.ops.set.hits_per_s',
'kyototycoon.ops.set.misses_per_s',
'kyototycoon.ops.del.hits_per_s',
'kyototycoon.ops.del.misses_per_s',
'kyototycoon.records',
'kyototycoon.size',
'kyototycoon.ops.get.total_per_s',
'kyototycoon.ops.get.total_per_s',
'kyototycoon.ops.set.total_per_s',
'kyototycoon.ops.set.total_per_s',
'kyototycoon.ops.del.total_per_s',
'kyototycoon.ops.del.total_per_s',
# 'kyototycoon.replication.delay', # Since I am not spinning up multiple servers, this should be 0
]
@attr(requires='kyototycoon')
class TestKyototycoon(AgentCheckTest):
"""Basic Test for kyototycoon integration."""
CHECK_NAME = 'kyototycoon'
def setUp(self):
dat = {
'dddd': 'dddd'
}
headers = {
'X-Kt-Mode': 'set'
}
for x in range(0, 100):
requests.put('http://localhost:1978', data=dat, headers=headers)
requests.get('http://localhost:1978')
def test_check(self):
"""
Testing Kyototycoon check.
"""
self.run_check_twice(config)
for mname in METRICS:
if mname == 'kyototycoon.records' or 'kyototycoon.size':
self.assertMetric(mname, count=1, at_least=0, tags=['optional:tag1', 'db:0'])
else:
self.assertMetric(mname, count=1, at_least=0, tags=['optional:tag1'])
self.assertServiceCheck('kyototycoon.can_connect', status=AgentCheck.OK, tags=['optional:tag1'], at_least=1)
self.coverage_report()
| # (C) Datadog, Inc. 2010-2017
# All rights reserved
# Licensed under Simplified BSD License (see LICENSE)
# stdlib
from nose.plugins.attrib import attr
# 3p
import requests
# project
from checks import AgentCheck
from tests.checks.common import AgentCheckTest
config = {
'instances': [{
'report_url': 'http://localhost:1978/rpc/report',
'tags': ['optional:tag1']
}]
}
METRICS = [
'kyototycoon.threads',
'kyototycoon.connections_per_s',
'kyototycoon.ops.get.hits_per_s',
'kyototycoon.ops.get.misses_per_s',
'kyototycoon.ops.set.hits_per_s',
'kyototycoon.ops.set.misses_per_s',
'kyototycoon.ops.del.hits_per_s',
'kyototycoon.ops.del.misses_per_s',
'kyototycoon.records',
'kyototycoon.size',
'kyototycoon.ops.get.total_per_s',
'kyototycoon.ops.get.total_per_s',
'kyototycoon.ops.set.total_per_s',
'kyototycoon.ops.set.total_per_s',
'kyototycoon.ops.del.total_per_s',
'kyototycoon.ops.del.total_per_s',
# 'kyototycoon.replication.delay', # Since I am not spinning up multiple servers, this should be 0
]
@attr(requires='kyototycoon')
class TestKyototycoon(AgentCheckTest):
"""Basic Test for kyototycoon integration."""
CHECK_NAME = 'kyototycoon'
def setUp(self):
dat = {
'dddd': 'dddd'
}
headers = {
'X-Kt-Mode': 'set'
}
for x in range(0, 100):
requests.put('http://localhost:1978', data=dat, headers=headers)
requests.get('http://localhost:1978')
def test_check(self):
"""
Testing Kyototycoon check.
"""
self.run_check_twice(config)
for mname in METRICS:
self.assertMetric(mname, count=1, at_least=0, tags=['optional:tag1'])
self.assertServiceCheck('kyototycoon.can_connect', status=AgentCheck.OK, tags=['optional:tag1'], at_least=1)
self.coverage_report()
| bsd-3-clause | Python |
251a91c1bf245b3674c2612149382a0f1e18dc98 | Add tests for getrpcinfo | droark/bitcoin,achow101/bitcoin,midnightmagic/bitcoin,AkioNak/bitcoin,yenliangl/bitcoin,cdecker/bitcoin,pataquets/namecoin-core,bitcoinsSG/bitcoin,practicalswift/bitcoin,bespike/litecoin,rnicoll/bitcoin,namecoin/namecore,litecoin-project/litecoin,dscotese/bitcoin,fujicoin/fujicoin,CryptArc/bitcoin,bespike/litecoin,mm-s/bitcoin,monacoinproject/monacoin,vertcoin/vertcoin,mruddy/bitcoin,bitcoinknots/bitcoin,fanquake/bitcoin,lateminer/bitcoin,dscotese/bitcoin,jtimon/bitcoin,particl/particl-core,myriadcoin/myriadcoin,n1bor/bitcoin,anditto/bitcoin,afk11/bitcoin,nikkitan/bitcoin,bitcoinsSG/bitcoin,OmniLayer/omnicore,namecoin/namecore,apoelstra/bitcoin,jtimon/bitcoin,peercoin/peercoin,jnewbery/bitcoin,bespike/litecoin,MarcoFalke/bitcoin,bitcoinsSG/bitcoin,fanquake/bitcoin,rnicoll/dogecoin,mruddy/bitcoin,myriadcoin/myriadcoin,r8921039/bitcoin,tecnovert/particl-core,MeshCollider/bitcoin,practicalswift/bitcoin,instagibbs/bitcoin,ElementsProject/elements,mitchellcash/bitcoin,namecoin/namecore,rnicoll/dogecoin,yenliangl/bitcoin,jambolo/bitcoin,bitcoinknots/bitcoin,achow101/bitcoin,domob1812/bitcoin,bitcoin/bitcoin,GroestlCoin/GroestlCoin,bitcoin/bitcoin,qtumproject/qtum,untrustbank/litecoin,Sjors/bitcoin,MeshCollider/bitcoin,EthanHeilman/bitcoin,MeshCollider/bitcoin,bitcoinsSG/bitcoin,alecalve/bitcoin,GroestlCoin/bitcoin,rnicoll/bitcoin,jamesob/bitcoin,droark/bitcoin,GroestlCoin/bitcoin,domob1812/namecore,jambolo/bitcoin,pataquets/namecoin-core,namecoin/namecoin-core,DigitalPandacoin/pandacoin,rnicoll/dogecoin,qtumproject/qtum,vertcoin/vertcoin,tecnovert/particl-core,mruddy/bitcoin,sipsorcery/bitcoin,jtimon/bitcoin,kallewoof/bitcoin,gjhiggins/vcoincore,practicalswift/bitcoin,andreaskern/bitcoin,jlopp/statoshi,n1bor/bitcoin,nikkitan/bitcoin,sstone/bitcoin,OmniLayer/omnicore,untrustbank/litecoin,jamesob/bitcoin,afk11/bitcoin,jambolo/bitcoin,EthanHeilman/bitcoin,prusnak/bitcoin,achow101/bitcoin,JeremyRubin/bitcoin,OmniLayer/omnicore,CryptArc/bitcoin,apoelstra/bitcoin,MarcoFalke/bitcoin,cdecker/bitcoin,pstratem/bitcoin,EthanHeilman/bitcoin,GroestlCoin/bitcoin,droark/bitcoin,n1bor/bitcoin,tjps/bitcoin,jtimon/bitcoin,nikkitan/bitcoin,nikkitan/bitcoin,prusnak/bitcoin,AkioNak/bitcoin,jlopp/statoshi,cdecker/bitcoin,pstratem/bitcoin,ajtowns/bitcoin,untrustbank/litecoin,ajtowns/bitcoin,kallewoof/bitcoin,namecoin/namecoin-core,mruddy/bitcoin,yenliangl/bitcoin,instagibbs/bitcoin,fanquake/bitcoin,sipsorcery/bitcoin,afk11/bitcoin,fujicoin/fujicoin,midnightmagic/bitcoin,tjps/bitcoin,bitcoinsSG/bitcoin,fanquake/bitcoin,Xekyo/bitcoin,dscotese/bitcoin,nikkitan/bitcoin,instagibbs/bitcoin,FeatherCoin/Feathercoin,mitchellcash/bitcoin,myriadteam/myriadcoin,domob1812/namecore,AkioNak/bitcoin,jambolo/bitcoin,wellenreiter01/Feathercoin,monacoinproject/monacoin,jtimon/bitcoin,mitchellcash/bitcoin,lateminer/bitcoin,AkioNak/bitcoin,mitchellcash/bitcoin,midnightmagic/bitcoin,namecoin/namecoin-core,sipsorcery/bitcoin,achow101/bitcoin,domob1812/bitcoin,bitcoin/bitcoin,ElementsProject/elements,pstratem/bitcoin,pataquets/namecoin-core,MarcoFalke/bitcoin,apoelstra/bitcoin,gjhiggins/vcoincore,midnightmagic/bitcoin,sipsorcery/bitcoin,ElementsProject/elements,wellenreiter01/Feathercoin,namecoin/namecore,bespike/litecoin,jonasschnelli/bitcoin,bespike/litecoin,peercoin/peercoin,GroestlCoin/GroestlCoin,sstone/bitcoin,particl/particl-core,jambolo/bitcoin,EthanHeilman/bitcoin,namecoin/namecoin-core,ahmedbodi/vertcoin,jonasschnelli/bitcoin,yenliangl/bitcoin,sipsorcery/bitcoin,FeatherCoin/Feathercoin,JeremyRubin/bitcoin,CryptArc/bitcoin,rnicoll/dogecoin,mm-s/bitcoin,andreaskern/bitcoin,instagibbs/bitcoin,prusnak/bitcoin,sipsorcery/bitcoin,kallewoof/bitcoin,jamesob/bitcoin,ahmedbodi/vertcoin,mitchellcash/bitcoin,kallewoof/bitcoin,dscotese/bitcoin,prusnak/bitcoin,MarcoFalke/bitcoin,GroestlCoin/GroestlCoin,ahmedbodi/vertcoin,nikkitan/bitcoin,qtumproject/qtum,vertcoin/vertcoin,jonasschnelli/bitcoin,AkioNak/bitcoin,jamesob/bitcoin,GroestlCoin/GroestlCoin,particl/particl-core,mitchellcash/bitcoin,fujicoin/fujicoin,vertcoin/vertcoin,bitcoinknots/bitcoin,Sjors/bitcoin,rnicoll/bitcoin,peercoin/peercoin,fujicoin/fujicoin,Sjors/bitcoin,ajtowns/bitcoin,CryptArc/bitcoin,ahmedbodi/vertcoin,peercoin/peercoin,DigitalPandacoin/pandacoin,alecalve/bitcoin,fujicoin/fujicoin,jlopp/statoshi,FeatherCoin/Feathercoin,myriadcoin/myriadcoin,MeshCollider/bitcoin,r8921039/bitcoin,namecoin/namecore,andreaskern/bitcoin,ahmedbodi/vertcoin,DigitalPandacoin/pandacoin,bitcoinsSG/bitcoin,afk11/bitcoin,GroestlCoin/GroestlCoin,tecnovert/particl-core,lateminer/bitcoin,jonasschnelli/bitcoin,tecnovert/particl-core,MeshCollider/bitcoin,peercoin/peercoin,cdecker/bitcoin,n1bor/bitcoin,untrustbank/litecoin,domob1812/namecore,ajtowns/bitcoin,wellenreiter01/Feathercoin,jonasschnelli/bitcoin,monacoinproject/monacoin,alecalve/bitcoin,domob1812/namecore,wellenreiter01/Feathercoin,fanquake/bitcoin,monacoinproject/monacoin,bitcoinknots/bitcoin,bitcoin/bitcoin,OmniLayer/omnicore,FeatherCoin/Feathercoin,dscotese/bitcoin,wellenreiter01/Feathercoin,droark/bitcoin,bespike/litecoin,gjhiggins/vcoincore,domob1812/namecore,alecalve/bitcoin,CryptArc/bitcoin,Xekyo/bitcoin,tjps/bitcoin,GroestlCoin/bitcoin,midnightmagic/bitcoin,myriadteam/myriadcoin,domob1812/bitcoin,myriadteam/myriadcoin,pataquets/namecoin-core,gjhiggins/vcoincore,litecoin-project/litecoin,pataquets/namecoin-core,domob1812/bitcoin,monacoinproject/monacoin,ajtowns/bitcoin,mm-s/bitcoin,apoelstra/bitcoin,sstone/bitcoin,peercoin/peercoin,yenliangl/bitcoin,gjhiggins/vcoincore,FeatherCoin/Feathercoin,practicalswift/bitcoin,lateminer/bitcoin,MeshCollider/bitcoin,ElementsProject/elements,anditto/bitcoin,JeremyRubin/bitcoin,gjhiggins/vcoincore,prusnak/bitcoin,pstratem/bitcoin,achow101/bitcoin,namecoin/namecore,vertcoin/vertcoin,particl/particl-core,mruddy/bitcoin,FeatherCoin/Feathercoin,mm-s/bitcoin,mm-s/bitcoin,tecnovert/particl-core,domob1812/bitcoin,monacoinproject/monacoin,rnicoll/bitcoin,n1bor/bitcoin,litecoin-project/litecoin,anditto/bitcoin,ajtowns/bitcoin,sstone/bitcoin,ElementsProject/elements,droark/bitcoin,untrustbank/litecoin,r8921039/bitcoin,JeremyRubin/bitcoin,sstone/bitcoin,practicalswift/bitcoin,fanquake/bitcoin,practicalswift/bitcoin,EthanHeilman/bitcoin,instagibbs/bitcoin,jnewbery/bitcoin,domob1812/namecore,myriadteam/myriadcoin,Sjors/bitcoin,ahmedbodi/vertcoin,anditto/bitcoin,particl/particl-core,GroestlCoin/GroestlCoin,jnewbery/bitcoin,rnicoll/bitcoin,tjps/bitcoin,DigitalPandacoin/pandacoin,bitcoinknots/bitcoin,domob1812/bitcoin,sstone/bitcoin,andreaskern/bitcoin,tjps/bitcoin,myriadcoin/myriadcoin,andreaskern/bitcoin,myriadcoin/myriadcoin,particl/particl-core,cdecker/bitcoin,r8921039/bitcoin,lateminer/bitcoin,myriadteam/myriadcoin,cdecker/bitcoin,untrustbank/litecoin,mruddy/bitcoin,MarcoFalke/bitcoin,alecalve/bitcoin,prusnak/bitcoin,kallewoof/bitcoin,DigitalPandacoin/pandacoin,GroestlCoin/bitcoin,apoelstra/bitcoin,andreaskern/bitcoin,midnightmagic/bitcoin,pstratem/bitcoin,namecoin/namecoin-core,alecalve/bitcoin,OmniLayer/omnicore,qtumproject/qtum,mm-s/bitcoin,apoelstra/bitcoin,litecoin-project/litecoin,OmniLayer/omnicore,vertcoin/vertcoin,DigitalPandacoin/pandacoin,yenliangl/bitcoin,namecoin/namecoin-core,anditto/bitcoin,instagibbs/bitcoin,myriadteam/myriadcoin,r8921039/bitcoin,qtumproject/qtum,rnicoll/bitcoin,jambolo/bitcoin,lateminer/bitcoin,tecnovert/particl-core,jamesob/bitcoin,JeremyRubin/bitcoin,Xekyo/bitcoin,jamesob/bitcoin,dscotese/bitcoin,JeremyRubin/bitcoin,anditto/bitcoin,CryptArc/bitcoin,Xekyo/bitcoin,jlopp/statoshi,wellenreiter01/Feathercoin,pataquets/namecoin-core,MarcoFalke/bitcoin,EthanHeilman/bitcoin,afk11/bitcoin,jlopp/statoshi,achow101/bitcoin,n1bor/bitcoin,jlopp/statoshi,Sjors/bitcoin,Xekyo/bitcoin,GroestlCoin/bitcoin,jnewbery/bitcoin,rnicoll/dogecoin,pstratem/bitcoin,fujicoin/fujicoin,kallewoof/bitcoin,qtumproject/qtum,litecoin-project/litecoin,myriadcoin/myriadcoin,droark/bitcoin,ElementsProject/elements,tjps/bitcoin,Xekyo/bitcoin,jtimon/bitcoin,bitcoin/bitcoin,r8921039/bitcoin,jnewbery/bitcoin,AkioNak/bitcoin,qtumproject/qtum,afk11/bitcoin,litecoin-project/litecoin,bitcoin/bitcoin | test/functional/interface_rpc.py | test/functional/interface_rpc.py | #!/usr/bin/env python3
# Copyright (c) 2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Tests some generic aspects of the RPC interface."""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal, assert_greater_than_or_equal
class RPCInterfaceTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.setup_clean_chain = True
def test_getrpcinfo(self):
self.log.info("Testing getrpcinfo...")
info = self.nodes[0].getrpcinfo()
assert_equal(len(info['active_commands']), 1)
command = info['active_commands'][0]
assert_equal(command['method'], 'getrpcinfo')
assert_greater_than_or_equal(command['duration'], 0)
def test_batch_request(self):
self.log.info("Testing basic JSON-RPC batch request...")
results = self.nodes[0].batch([
# A basic request that will work fine.
{"method": "getblockcount", "id": 1},
# Request that will fail. The whole batch request should still
# work fine.
{"method": "invalidmethod", "id": 2},
# Another call that should succeed.
{"method": "getbestblockhash", "id": 3},
])
result_by_id = {}
for res in results:
result_by_id[res["id"]] = res
assert_equal(result_by_id[1]['error'], None)
assert_equal(result_by_id[1]['result'], 0)
assert_equal(result_by_id[2]['error']['code'], -32601)
assert_equal(result_by_id[2]['result'], None)
assert_equal(result_by_id[3]['error'], None)
assert result_by_id[3]['result'] is not None
def run_test(self):
self.test_getrpcinfo()
self.test_batch_request()
if __name__ == '__main__':
RPCInterfaceTest().main()
| #!/usr/bin/env python3
# Copyright (c) 2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Tests some generic aspects of the RPC interface."""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal
class RPCInterfaceTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.setup_clean_chain = True
def test_batch_request(self):
self.log.info("Testing basic JSON-RPC batch request...")
results = self.nodes[0].batch([
# A basic request that will work fine.
{"method": "getblockcount", "id": 1},
# Request that will fail. The whole batch request should still
# work fine.
{"method": "invalidmethod", "id": 2},
# Another call that should succeed.
{"method": "getbestblockhash", "id": 3},
])
result_by_id = {}
for res in results:
result_by_id[res["id"]] = res
assert_equal(result_by_id[1]['error'], None)
assert_equal(result_by_id[1]['result'], 0)
assert_equal(result_by_id[2]['error']['code'], -32601)
assert_equal(result_by_id[2]['result'], None)
assert_equal(result_by_id[3]['error'], None)
assert result_by_id[3]['result'] is not None
def run_test(self):
self.test_batch_request()
if __name__ == '__main__':
RPCInterfaceTest().main()
| mit | Python |
46c09fd75c6f45d68cd722cd3a12b88d04257083 | Add tests for getrpcinfo | chaincoin/chaincoin,chaincoin/chaincoin,chaincoin/chaincoin,chaincoin/chaincoin,chaincoin/chaincoin,chaincoin/chaincoin | test/functional/interface_rpc.py | test/functional/interface_rpc.py | #!/usr/bin/env python3
# Copyright (c) 2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Tests some generic aspects of the RPC interface."""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal, assert_greater_than_or_equal
class RPCInterfaceTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.setup_clean_chain = True
def test_getrpcinfo(self):
self.log.info("Testing getrpcinfo...")
info = self.nodes[0].getrpcinfo()
assert_equal(len(info['active_commands']), 1)
command = info['active_commands'][0]
assert_equal(command['method'], 'getrpcinfo')
assert_greater_than_or_equal(command['duration'], 0)
def test_batch_request(self):
self.log.info("Testing basic JSON-RPC batch request...")
results = self.nodes[0].batch([
# A basic request that will work fine.
{"method": "getblockcount", "id": 1},
# Request that will fail. The whole batch request should still
# work fine.
{"method": "invalidmethod", "id": 2},
# Another call that should succeed.
{"method": "getbestblockhash", "id": 3},
])
result_by_id = {}
for res in results:
result_by_id[res["id"]] = res
assert_equal(result_by_id[1]['error'], None)
assert_equal(result_by_id[1]['result'], 0)
assert_equal(result_by_id[2]['error']['code'], -32601)
assert_equal(result_by_id[2]['result'], None)
assert_equal(result_by_id[3]['error'], None)
assert result_by_id[3]['result'] is not None
def run_test(self):
self.test_getrpcinfo()
self.test_batch_request()
if __name__ == '__main__':
RPCInterfaceTest().main()
| #!/usr/bin/env python3
# Copyright (c) 2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Tests some generic aspects of the RPC interface."""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal
class RPCInterfaceTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.setup_clean_chain = True
def test_batch_request(self):
self.log.info("Testing basic JSON-RPC batch request...")
results = self.nodes[0].batch([
# A basic request that will work fine.
{"method": "getblockcount", "id": 1},
# Request that will fail. The whole batch request should still
# work fine.
{"method": "invalidmethod", "id": 2},
# Another call that should succeed.
{"method": "getbestblockhash", "id": 3},
])
result_by_id = {}
for res in results:
result_by_id[res["id"]] = res
assert_equal(result_by_id[1]['error'], None)
assert_equal(result_by_id[1]['result'], 0)
assert_equal(result_by_id[2]['error']['code'], -32601)
assert_equal(result_by_id[2]['result'], None)
assert_equal(result_by_id[3]['error'], None)
assert result_by_id[3]['result'] is not None
def run_test(self):
self.test_batch_request()
if __name__ == '__main__':
RPCInterfaceTest().main()
| mit | Python |
35cf00b5f05f4b1df8b40b7edc3aac76534c8903 | enable some reduction tests | bh107/bohrium,bh107/bohrium,madsbk/bohrium,madsbk/bohrium,madsbk/bohrium,bh107/bohrium,madsbk/bohrium,bh107/bohrium | test/python/tests/test_reduce.py | test/python/tests/test_reduce.py | import util
class test_reduce_views:
""" Test reduction of all kind of views"""
def init(self):
for cmd, ndim in util.gen_random_arrays("R", 4, dtype="np.float32"):
cmd = "R = bh.random.RandomState(42); a = %s; " % cmd
for i in range(ndim):
yield (cmd, i)
for i in range(ndim):
yield (cmd, -i)
def test_reduce(self, (cmd, axis)):
cmd += "res = M.add.reduce(a, axis=%d)" % axis
return cmd
class test_reduce_sum:
""" Test reduction of sum() and prod()"""
def init(self):
for cmd, ndim in util.gen_random_arrays("R", 3, dtype="np.float32"):
cmd = "R = bh.random.RandomState(42); a = %s; " % cmd
for op in ["sum", "prod"]:
yield (cmd, op)
def test_func(self, (cmd, op)):
cmd += "res = M.%s(a)" % op
return cmd
def test_method(self, (cmd, op)):
cmd += "res = a.%s()" % op
return cmd
class test_reduce_primitives:
def init(self):
for op in ["add", "multiply", "minimum", "maximum"]:
yield (op, "np.float64")
for op in ["bitwise_or", "bitwise_xor"]:
yield (op, "np.uint64")
for op in ["add", "logical_or", "logical_and", "logical_xor"]:
yield (op, "np.bool")
def test_vector(self, (op, dtype)):
cmd = "R = bh.random.RandomState(42); a = R.random(10, dtype=%s, bohrium=BH); " % dtype
cmd += "res = M.%s.reduce(a)" % op
return cmd
| import util
class tes1t_reduce_views:
""" Test reduction of all kind of views"""
def init(self):
for cmd, ndim in util.gen_random_arrays("R", 4, dtype="np.float32"):
cmd = "R = bh.random.RandomState(42); a = %s; " % cmd
for i in range(ndim):
yield (cmd, i)
for i in range(ndim):
yield (cmd, -i)
def test_reduce(self, (cmd, axis)):
cmd += "res = M.add.reduce(a, axis=%d)" % axis
return cmd
class tes1t_reduce_sum:
""" Test reduction of sum() and prod()"""
def init(self):
for cmd, ndim in util.gen_random_arrays("R", 3, dtype="np.float32"):
cmd = "R = bh.random.RandomState(42); a = %s; " % cmd
for op in ["sum", "prod"]:
yield (cmd, op)
def test_func(self, (cmd, op)):
cmd += "res = M.%s(a)" % op
return cmd
def test_method(self, (cmd, op)):
cmd += "res = a.%s()" % op
return cmd
class test_reduce_primitives:
def init(self):
for op in ["add", "multiply", "minimum", "maximum"]:
yield (op, "np.float64")
for op in ["bitwise_or", "bitwise_xor"]:
yield (op, "np.uint64")
for op in ["add", "logical_or", "logical_and", "logical_xor"]:
yield (op, "np.bool")
def test_vector(self, (op, dtype)):
cmd = "R = bh.random.RandomState(42); a = R.random(10, dtype=%s, bohrium=BH); " % dtype
cmd += "res = M.%s.reduce(a)" % op
return cmd
| apache-2.0 | Python |
b2266a2640d542fa6f9734fa9565a7521d06f1b0 | Bump again | theonion/django-bulbs,theonion/django-bulbs,theonion/django-bulbs,theonion/django-bulbs,theonion/django-bulbs | bulbs/__init__.py | bulbs/__init__.py | __version__ = "0.11.4"
| __version__ = "0.11.3"
| mit | Python |
aee41bac296eece9c30565c5824db9a019833ee0 | Add decorator docs | n9code/calm,n9code/calm | calm/decorator.py | calm/decorator.py | """
This module defines general decorators to define the Calm Application.
"""
from calm.resource import Resource
from calm.ex import DefinitionError
def produces(resource_type):
"""Decorator to specify what kind of Resource the handler produces."""
if not isinstance(resource_type, Resource):
raise DefinitionError('@produces value should be of type Resource.')
def decor(func):
"""
The function wrapper.
It checks whether the function is already defined as a Calm handler or
not and sets the appropriate attribute based on that. This is done in
order to not enforce a particular order for the decorators.
"""
if getattr(func, 'handler_def', None):
func.handler_def.produces = resource_type
else:
func.produces = resource_type
return func
return decor
def consumes(resource_type):
"""Decorator to specify what kind of Resource the handler consumes."""
if not isinstance(resource_type, Resource):
raise DefinitionError('@consumes value should be of type Resource.')
def decor(func):
"""
The function wrapper.
It checks whether the function is already defined as a Calm handler or
not and sets the appropriate attribute based on that. This is done in
order to not enforce a particular order for the decorators.
"""
if getattr(func, 'handler_def', None):
func.handler_def.consumes = resource_type
else:
func.consumes = resource_type
return func
return decor
|
def produces(resource_type):
def decor(func):
if getattr(func, 'handler_def', None):
func.handler_def.produces = resource_type
else:
func.produces = resource_type
return func
return decor
def consumes(resource_type):
def decor(func):
if getattr(func, 'handler_def', None):
func.handler_def.consumes = resource_type
else:
func.consumes = resource_type
return func
return decor
| mit | Python |
6f0b75f0561563926afc37dca8451f886e2e2d4f | Handle unicode data like cdbdump | pombredanne/python-pure-cdb,dw/python-pure-cdb,dw/python-pure-cdb,pombredanne/python-pure-cdb | cdblib/cdbdump.py | cdblib/cdbdump.py | from __future__ import print_function
import argparse
import sys
import six
import cdblib
def cdbdump(parsed_args, **kwargs):
# Read binary data from stdin by default
stdin = kwargs.get('stdin')
if stdin is None:
stdin = sys.stdin if six.PY2 else sys.stdin.buffer
# Print text data to stdout by default
stdout = kwargs.get('stdout')
if stdout is None:
stdout = sys.stdout if six.PY2 else sys.stdout.buffer
# Consume stdin and parse the cdb file
reader_cls = cdblib.Reader64 if vars(parsed_args)['64'] else cdblib.Reader
data = stdin.read()
reader = reader_cls(data)
# Dump the file's contents to the ouput stream
for key, value in reader.iteritems():
item = (
b'+',
str(len(key)).encode('ascii'),
b',',
str(len(value)).encode('ascii'),
b':',
key,
b'->',
value,
b'\n',
)
stdout.write(b''.join(item))
# Print final newline
stdout.write(b'\n')
def main(args=None):
args = sys.argv[1:] if (args is None) else args
parser = argparse.ArgumentParser(
description=(
"Python version of djb's cdbdump. "
"Supports standard 32-bit cdb files as well as 64-bit variants."
)
)
parser.add_argument(
'-64', action='store_true', help='Use non-standard 64-bit file offsets'
)
parsed_args = parser.parse_args(args)
cdbdump(parsed_args)
if __name__ == '__main__':
main()
| from __future__ import print_function
import argparse
import sys
import six
import cdblib
def cdbdump(parsed_args, **kwargs):
# Read binary data from stdin by default
stdin = kwargs.get('stdin')
if stdin is None:
stdin = sys.stdin if six.PY2 else sys.stdin.buffer
# Print text data to stdout by default
stdout = kwargs.get('stdout', sys.stdout)
encoding = kwargs.get('encoding', sys.getdefaultencoding())
# Consume stdin and parse the cdb file
reader_cls = cdblib.Reader64 if vars(parsed_args)['64'] else cdblib.Reader
data = stdin.read()
reader = reader_cls(data)
# Dump the file's contents to the ouput stream
for key, value in reader.iteritems():
item = '+{:d},{:d}:{:s}->{:s}'.format(
len(key),
len(value),
key.decode(encoding),
value.decode(encoding)
)
print(item, file=stdout)
# Print final newline
print()
def main(args=None):
args = sys.argv[1:] if (args is None) else args
parser = argparse.ArgumentParser(
description=(
"Python version of djb's cdbdump. "
"Supports standard 32-bit cdb files as well as 64-bit variants."
)
)
parser.add_argument(
'-64', action='store_true', help='Use non-standard 64-bit file offsets'
)
parsed_args = parser.parse_args(args)
cdbdump(parsed_args)
if __name__ == '__main__':
main()
| mit | Python |
9e22b82b9f5848ae3bfc8def66fe7b3d23c8f5b8 | Change Alfred date of posting to be iso8601 compatible. | multiplechoice/workplace | jobs/spiders/alfred.py | jobs/spiders/alfred.py | import json
import urlparse
import dateutil.parser
import scrapy
from jobs.items import JobsItem
class AlfredSpider(scrapy.Spider):
name = "alfred"
start_urls = ['https://api.alfred.is/api/v3/web/open/jobs?cat=0&limit=100&page=0']
def parse(self, response):
# we're using an api rather than scraping a website so we need to grok the json response
content = json.loads(response.text)
# each job under the 'data' key refers to companies listed in the `included` key, so to make
# it easy to get at the data we make a dict keyed to the id of the company
included_data = {entry['id']: entry for entry in content['included']}
for job in content['data']:
job_id = job['id']
company_id = job['relationships']['brand']['data']['id']
item = JobsItem()
item['spider'] = self.name
item['company'] = included_data[company_id]['attributes']['name']
item['url'] = urlparse.urljoin('https://alfred.is/starf/', job_id)
api_url = urlparse.urljoin('https://api.alfred.is/api/v3/web/open/jobs/', job_id)
request = scrapy.Request(api_url, callback=self.parse_specific_job)
request.meta['item'] = item
yield request
def parse_specific_job(self, response):
content = json.loads(response.text)
job = content['data']['attributes']
item = response.meta['item']
item['title'] = job['title']
item['posted'] = job['start']
item['deadline'] = dateutil.parser.parse(job['deadline']).isoformat()
yield item
| import json
import urlparse
import scrapy
from jobs.items import JobsItem
from jobs.spiders.visir import decode_date_string
class AlfredSpider(scrapy.Spider):
name = "alfred"
start_urls = ['https://api.alfred.is/api/v3/web/open/jobs?cat=0&limit=100&page=0']
def parse(self, response):
# we're using an api rather than scraping a website so we need to grok the json response
content = json.loads(response.text)
# each job under the 'data' key refers to companies listed in the `included` key, so to make
# it easy to get at the data we make a dict keyed to the id of the company
included_data = {entry['id']: entry for entry in content['included']}
for job in content['data']:
job_id = job['id']
company_id = job['relationships']['brand']['data']['id']
item = JobsItem()
item['spider'] = self.name
item['company'] = included_data[company_id]['attributes']['name']
item['url'] = urlparse.urljoin('https://alfred.is/starf/', job_id)
api_url = urlparse.urljoin('https://api.alfred.is/api/v3/web/open/jobs/', job_id)
request = scrapy.Request(api_url, callback=self.parse_specific_job)
request.meta['item'] = item
yield request
def parse_specific_job(self, response):
content = json.loads(response.text)
job = content['data']['attributes']
item = response.meta['item']
item['title'] = job['title']
item['posted'] = job['start']
item['deadline'] = decode_date_string(job['deadline'])
yield item
| apache-2.0 | Python |
210c2cf58c246c3733542b8fee7c3eb9fe5d860d | bump version | project-callisto/callisto-core,SexualHealthInnovations/callisto-core,project-callisto/callisto-core,SexualHealthInnovations/callisto-core | callisto/delivery/__init__.py | callisto/delivery/__init__.py | __version__ = '0.4.1'
| __version__ = '0.4.0'
| agpl-3.0 | Python |
623ce2d8624a1a04156a35ae762d29a19fbc7b52 | fix broken docstring | rtulke/ceph-deploy,trhoden/ceph-deploy,branto1/ceph-deploy,ktdreyer/ceph-deploy,ktdreyer/ceph-deploy,codenrhoden/ceph-deploy,Vicente-Cheng/ceph-deploy,isyippee/ceph-deploy,SUSE/ceph-deploy,SUSE/ceph-deploy-to-be-deleted,jumpstarter-io/ceph-deploy,isyippee/ceph-deploy,codenrhoden/ceph-deploy,zhouyuan/ceph-deploy,shenhequnying/ceph-deploy,ddiss/ceph-deploy,branto1/ceph-deploy,ddiss/ceph-deploy,shenhequnying/ceph-deploy,ghxandsky/ceph-deploy,alfredodeza/ceph-deploy,SUSE/ceph-deploy,ceph/ceph-deploy,SUSE/ceph-deploy-to-be-deleted,trhoden/ceph-deploy,jumpstarter-io/ceph-deploy,osynge/ceph-deploy,alfredodeza/ceph-deploy,Vicente-Cheng/ceph-deploy,imzhulei/ceph-deploy,rtulke/ceph-deploy,ceph/ceph-deploy,osynge/ceph-deploy,zhouyuan/ceph-deploy,imzhulei/ceph-deploy,ghxandsky/ceph-deploy | ceph_deploy/util/templates.py | ceph_deploy/util/templates.py |
ceph_repo = """
[ceph]
name=Ceph packages for $basearch
baseurl={repo_url}/$basearch
enabled=1
gpgcheck=1
type=rpm-md
gpgkey={gpg_url}
[ceph-noarch]
name=Ceph noarch packages
baseurl={repo_url}/noarch
enabled=1
gpgcheck=1
type=rpm-md
gpgkey={gpg_url}
[ceph-source]
name=Ceph source packages
baseurl={repo_url}/SRPMS
enabled=0
gpgcheck=1
type=rpm-md
gpgkey={gpg_url}
"""
def custom_repo(**kw):
"""
Repo files need special care in that a whole line should not be present
if there is no value for it. Because we were using `format()` we could
not conditionally add a line for a repo file. So the end result would
contain a key with a missing value (say if we were passing `None`).
For example, it could look like::
[ceph repo]
name= ceph repo
proxy=
gpgcheck=
Which breaks. This function allows us to conditionally add lines,
preserving an order and be more careful.
Previously, and for historical purposes, this is how the template used
to look::
custom_repo =
[{repo_name}]
name={name}
baseurl={baseurl}
enabled={enabled}
gpgcheck={gpgcheck}
type={_type}
gpgkey={gpgkey}
proxy={proxy}
"""
lines = []
# by using tuples (vs a dict) we preserve the order of what we want to
# return, like starting with a [repo name]
tmpl = (
('reponame', '[%s]'),
('baseurl', 'baseurl=%s'),
('enabled', 'enabled=%s'),
('gpgcheck', 'gpgcheck=%s'),
('_type', 'type=%s'),
('gpgkey', 'gpgkey=%s'),
('proxy', 'proxy=%s'),
)
for line in tmpl:
tmpl_key, tmpl_value = line # key values from tmpl
# ensure that there is an actual value (not None nor empty string)
if tmpl_key in kw and kw.get(tmpl_key) not in (None, ''):
lines.append(tmpl_value % kw.get(tmpl_key))
return '\n'.join(lines)
|
ceph_repo = """
[ceph]
name=Ceph packages for $basearch
baseurl={repo_url}/$basearch
enabled=1
gpgcheck=1
type=rpm-md
gpgkey={gpg_url}
[ceph-noarch]
name=Ceph noarch packages
baseurl={repo_url}/noarch
enabled=1
gpgcheck=1
type=rpm-md
gpgkey={gpg_url}
[ceph-source]
name=Ceph source packages
baseurl={repo_url}/SRPMS
enabled=0
gpgcheck=1
type=rpm-md
gpgkey={gpg_url}
"""
def custom_repo(**kw):
"""
Repo files need special care in that a whole line should not be present
if there is no value for it. Because we were using `format()` we could
not conditionally add a line for a repo file. So the end result would
contain a key with a missing value (say if we were passing `None`).
For example, it could look like::
[ceph repo]
name= ceph repo
proxy=
gpgcheck=
Which breaks. This function allows us to conditionally add lines,
preserving an order and be more careful.
Previously, and for historical purposes, this is how the template used
to look::
custom_repo = """
[{repo_name}]
name={name}
baseurl={baseurl}
enabled={enabled}
gpgcheck={gpgcheck}
type={_type}
gpgkey={gpgkey}
proxy={proxy}
"""
"""
lines = []
# by using tuples (vs a dict) we preserve the order of what we want to
# return, like starting with a [repo name]
tmpl = (
('reponame', '[%s]'),
('baseurl', 'baseurl=%s'),
('enabled', 'enabled=%s'),
('gpgcheck', 'gpgcheck=%s'),
('_type', 'type=%s'),
('gpgkey', 'gpgkey=%s'),
('proxy', 'proxy=%s'),
)
for line in tmpl:
tmpl_key, tmpl_value = line # key values from tmpl
# ensure that there is an actual value (not None nor empty string)
if tmpl_key in kw and kw.get(tmpl_key) not in (None, ''):
lines.append(tmpl_value % kw.get(tmpl_key))
return '\n'.join(lines)
| mit | Python |
f4b8246aead0657e0f997773efed5fbc2147cce7 | add '# noqa' to imports to make flake8 happy | alfredodeza/ceph-doctor | ceph_medic/remote/__init__.py | ceph_medic/remote/__init__.py | import mon # noqa
import osd # noqa
import common # noqa
import functions # noqa
import commands # noqa
| import mon
import osd
import common
import functions
import commands
| mit | Python |
20e8ef6bd68100a70b9d50013630ff71d8b7ec94 | Support wildcard matches on coverage/junit results | dropbox/changes,bowlofstew/changes,wfxiang08/changes,dropbox/changes,wfxiang08/changes,bowlofstew/changes,bowlofstew/changes,dropbox/changes,bowlofstew/changes,wfxiang08/changes,wfxiang08/changes,dropbox/changes | changes/artifacts/__init__.py | changes/artifacts/__init__.py | from __future__ import absolute_import, print_function
from .manager import Manager
from .coverage import CoverageHandler
from .xunit import XunitHandler
manager = Manager()
manager.register(CoverageHandler, ['coverage.xml', '*.coverage.xml'])
manager.register(XunitHandler, ['xunit.xml', 'junit.xml', '*.xunit.xml', '*.junit.xml'])
| from __future__ import absolute_import, print_function
from .manager import Manager
from .coverage import CoverageHandler
from .xunit import XunitHandler
manager = Manager()
manager.register(CoverageHandler, ['coverage.xml'])
manager.register(XunitHandler, ['xunit.xml', 'junit.xml'])
| apache-2.0 | Python |
0bf6441863433575aebcbd0b238d27d95830c015 | Fix .iob converter (closes #3620) | honnibal/spaCy,explosion/spaCy,spacy-io/spaCy,spacy-io/spaCy,honnibal/spaCy,spacy-io/spaCy,honnibal/spaCy,honnibal/spaCy,explosion/spaCy,spacy-io/spaCy,spacy-io/spaCy,spacy-io/spaCy,explosion/spaCy,explosion/spaCy,explosion/spaCy,explosion/spaCy | spacy/cli/converters/iob2json.py | spacy/cli/converters/iob2json.py | # coding: utf8
from __future__ import unicode_literals
import re
from cytoolz import partition_all
from ...gold import iob_to_biluo
def iob2json(input_data, n_sents=10, *args, **kwargs):
"""
Convert IOB files into JSON format for use with train cli.
"""
sentences = read_iob(input_data.split("\n"))
docs = merge_sentences(sentences, n_sents)
return docs
def read_iob(raw_sents):
sentences = []
for line in raw_sents:
if not line.strip():
continue
tokens = [re.split("[^\w\-]", line.strip())]
if len(tokens[0]) == 3:
words, pos, iob = zip(*tokens)
elif len(tokens[0]) == 2:
words, iob = zip(*tokens)
pos = ["-"] * len(words)
else:
raise ValueError(
"The iob/iob2 file is not formatted correctly. Try checking whitespace and delimiters."
)
biluo = iob_to_biluo(iob)
sentences.append(
[
{"orth": w, "tag": p, "ner": ent}
for (w, p, ent) in zip(words, pos, biluo)
]
)
sentences = [{"tokens": sent} for sent in sentences]
paragraphs = [{"sentences": [sent]} for sent in sentences]
docs = [{"id": 0, "paragraphs": [para]} for para in paragraphs]
return docs
def merge_sentences(docs, n_sents):
merged = []
for group in partition_all(n_sents, docs):
group = list(group)
first = group.pop(0)
to_extend = first["paragraphs"][0]["sentences"]
for sent in group[1:]:
to_extend.extend(sent["paragraphs"][0]["sentences"])
merged.append(first)
return merged
| # coding: utf8
from __future__ import unicode_literals
import re
from ...gold import iob_to_biluo
from ...util import minibatch
def iob2json(input_data, n_sents=10, *args, **kwargs):
"""
Convert IOB files into JSON format for use with train cli.
"""
docs = []
for group in minibatch(docs, n_sents):
group = list(group)
first = group.pop(0)
to_extend = first["paragraphs"][0]["sentences"]
for sent in group[1:]:
to_extend.extend(sent["paragraphs"][0]["sentences"])
docs.append(first)
return docs
def read_iob(raw_sents):
sentences = []
for line in raw_sents:
if not line.strip():
continue
# tokens = [t.split("|") for t in line.split()]
tokens = [re.split("[^\w\-]", line.strip())]
if len(tokens[0]) == 3:
words, pos, iob = zip(*tokens)
elif len(tokens[0]) == 2:
words, iob = zip(*tokens)
pos = ["-"] * len(words)
else:
raise ValueError(
"The iob/iob2 file is not formatted correctly. Try checking whitespace and delimiters."
)
biluo = iob_to_biluo(iob)
sentences.append(
[
{"orth": w, "tag": p, "ner": ent}
for (w, p, ent) in zip(words, pos, biluo)
]
)
sentences = [{"tokens": sent} for sent in sentences]
paragraphs = [{"sentences": [sent]} for sent in sentences]
docs = [{"id": 0, "paragraphs": [para]} for para in paragraphs]
return docs
| mit | Python |
f190916a828ab4b8ecf16cc6a82ebf3cf8f821e1 | Add a test for executing specs with tags | nestorsalceda/mamba | spec/execution_with_tags_spec.py | spec/execution_with_tags_spec.py | from mamba import description, before, context, it
from doublex import Spy
from expects import expect, be_true, be_false
from mamba import reporter, runnable
from mamba.example import Example
from mamba.example_group import ExampleGroup
from spec.object_mother import an_example_group
TAGS = ['any_tag']
with description('Example execution using tags') as self:
with before.each:
self.reporter = Spy(reporter.Reporter)
self.example_group = an_example_group()
self.example_with_tags = Example(lambda x: x,
parent=self.example_group,
tags=TAGS)
self.other_example = Example(lambda x: x, parent=self.example_group)
with context('when tag is included in example tags'):
with it('executes example'):
self.example_with_tags.execute(self.reporter,
runnable.ExecutionContext(),
tags=TAGS)
expect(self.example_with_tags.was_run).to(be_true)
with context('when tag is not included in example tags'):
with it('does not execute example'):
self.other_example.execute(self.reporter,
runnable.ExecutionContext(),
tags=TAGS)
expect(self.other_example.was_run).to(be_false)
with context('when tag is included in example_group tags'):
with it('executes children'):
self.example_group = ExampleGroup('any example_group', tags=TAGS)
self.example = Example(lambda x: x)
self.other_example = Example(lambda x: x)
self.example_group.append(self.example)
self.example_group.append(self.other_example)
self.example_group.execute(self.reporter,
runnable.ExecutionContext(),
tags=TAGS)
expect(self.example.was_run).to(be_true)
expect(self.other_example.was_run).to(be_true)
| from mamba import description, before, context, it
from doublex import Spy
from expects import expect, be_true, be_false
from mamba import reporter, runnable
from mamba.example import Example
from mamba.example_group import ExampleGroup
from spec.object_mother import an_example_group
TAGS = ['any_tag']
with description('Example execution using tags') as self:
with before.each:
self.reporter = Spy(reporter.Reporter)
self.example_group = an_example_group()
self.example = Example(lambda x: x, parent=self.example_group,
tags=TAGS)
self.other_example = Example(lambda x: x, parent=self.example_group)
with context('when tag is included in example tags'):
with it('executes example'):
self.example.execute(self.reporter,
runnable.ExecutionContext(),
tags=TAGS)
expect(self.example.was_run).to(be_true)
with context('when tag is not included in example tags'):
with it('does not execute example'):
self.other_example.execute(self.reporter,
runnable.ExecutionContext(),
tags=TAGS)
expect(self.other_example.was_run).to(be_false)
with context('when tag is included in example_group tags'):
with it('executes children'):
self.example_group = ExampleGroup('any example_group', tags=TAGS)
self.example = Example(lambda x: x)
self.other_example = Example(lambda x: x)
self.example_group.append(self.example)
self.example_group.append(self.other_example)
self.example_group.execute(self.reporter,
runnable.ExecutionContext(),
tags=TAGS)
expect(self.example.was_run).to(be_true)
expect(self.other_example.was_run).to(be_true)
| mit | Python |
d57fb3ca8c1f4329c8ac90cb785b27123d98aee5 | Bump the version to 0.3.1 | dmtucker/backlog | backlog/__init__.py | backlog/__init__.py | """A Simple Note Manager"""
from __future__ import absolute_import
from backlog.backlog import Backlog
__version__ = '0.3.1'
| """A Simple Note Manager"""
from __future__ import absolute_import
from backlog.backlog import Backlog
__version__ = '0.3.0'
| lgpl-2.1 | Python |
0f782215e58eba53b72667bffde667f4d03a0d4a | Update version. | alphagov/notifications-python-client,alphagov/notifications-python-client | client/version.py | client/version.py | __version__ = '0.2.0'
| __version__ = '0.1.9'
| mit | Python |
6749060a7546b7dee3c6e643c7dfad4db7934061 | package for release | 9nix00/cliez | cliez/__init__.py | cliez/__init__.py | # -*- coding: utf-8 -*-
version = "1.6.10"
version_info = (1, 6, 10)
| # -*- coding: utf-8 -*-
version = "1.6.9"
version_info = (1, 6, 9)
| mit | Python |
05f45992e871dc0d98613fb31269c43e21869414 | Add envy help command | cloudenvy/cloudenvy | cloudenvy/main.py | cloudenvy/main.py | # vim: tabstop=4 shiftwidth=4 softtabstop=4
import argparse
import logging
from cloudenvy.config import EnvyConfig
from cloudenvy.commands.envy_up import EnvyUp
from cloudenvy.commands.envy_list import EnvyList
from cloudenvy.commands.envy_provision import EnvyProvision
from cloudenvy.commands.envy_snapshot import EnvySnapshot
from cloudenvy.commands.envy_ip import EnvyIP
from cloudenvy.commands.envy_scp import EnvySCP
from cloudenvy.commands.envy_dotfiles import EnvyDotfiles
from cloudenvy.commands.envy_ssh import EnvySSH
from cloudenvy.commands.envy_destroy import EnvyDestroy
from cloudenvy.commands.envy_run import EnvyRun
def _build_parser():
parser = argparse.ArgumentParser(
description='Launch a virtual machine in an openstack environment.')
parser.add_argument('-v', '--verbosity', action='count',
help='increase output verbosity')
parser.add_argument('-c', '--cloud', action='store',
help='specify which cloud to use')
subparsers = parser.add_subparsers(title='Available commands:')
# Load up all of the subparser classes
EnvyUp(subparsers)
EnvyList(subparsers)
EnvyProvision(subparsers)
EnvySnapshot(subparsers)
EnvyIP(subparsers)
EnvySCP(subparsers)
EnvyDotfiles(subparsers)
EnvySSH(subparsers)
EnvyDestroy(subparsers)
EnvyRun(subparsers)
def find_command_help(config, args):
if args.command:
subparsers.choices[args.command].print_help()
else:
parser.print_help()
help_subparser = subparsers.add_parser('help',
help='Display help information for a specfiic command')
help_subparser.add_argument('command', action='store', nargs='?')
help_subparser.set_defaults(func=find_command_help)
return parser
def main():
parser = _build_parser()
args = parser.parse_args()
config = EnvyConfig(args).get_config()
if args.verbosity == 3:
logging.getLogger().setLevel(logging.DEBUG)
logging.getLogger('novaclient').setLevel(logging.DEBUG)
elif args.verbosity == 2:
logging.getLogger().setLevel(logging.DEBUG)
logging.getLogger('novaclient').setLevel(logging.INFO)
elif args.verbosity == 1:
logging.getLogger().setLevel(logging.INFO)
args.func(config, args)
| # vim: tabstop=4 shiftwidth=4 softtabstop=4
import argparse
import logging
from cloudenvy.config import EnvyConfig
from cloudenvy.commands.envy_up import EnvyUp
from cloudenvy.commands.envy_list import EnvyList
from cloudenvy.commands.envy_provision import EnvyProvision
from cloudenvy.commands.envy_snapshot import EnvySnapshot
from cloudenvy.commands.envy_ip import EnvyIP
from cloudenvy.commands.envy_scp import EnvySCP
from cloudenvy.commands.envy_dotfiles import EnvyDotfiles
from cloudenvy.commands.envy_ssh import EnvySSH
from cloudenvy.commands.envy_destroy import EnvyDestroy
from cloudenvy.commands.envy_run import EnvyRun
def _build_parser():
parser = argparse.ArgumentParser(
description='Launch a virtual machine in an openstack environment.')
parser.add_argument('-v', '--verbosity', action='count',
help='increase output verbosity')
parser.add_argument('-c', '--cloud', action='store',
help='specify which cloud to use')
subparsers = parser.add_subparsers(title='Available commands:')
# Load up all of the subparser classes
EnvyUp(subparsers)
EnvyList(subparsers)
EnvyProvision(subparsers)
EnvySnapshot(subparsers)
EnvyIP(subparsers)
EnvySCP(subparsers)
EnvyDotfiles(subparsers)
EnvySSH(subparsers)
EnvyDestroy(subparsers)
EnvyRun(subparsers)
return parser
def main():
parser = _build_parser()
args = parser.parse_args()
config = EnvyConfig(args).get_config()
if args.verbosity == 3:
logging.getLogger().setLevel(logging.DEBUG)
logging.getLogger('novaclient').setLevel(logging.DEBUG)
elif args.verbosity == 2:
logging.getLogger().setLevel(logging.DEBUG)
logging.getLogger('novaclient').setLevel(logging.INFO)
elif args.verbosity == 1:
logging.getLogger().setLevel(logging.INFO)
args.func(config, args)
| apache-2.0 | Python |
41bb51f7f0aa1fda927af51498ec1acbf9eeddcc | fix history links | LeagueOfAutomatedNations/LeagueBot,LeagueOfAutomatedNations/LeagueBot | leaguebot/services/alerters/slack.py | leaguebot/services/alerters/slack.py | from leaguebot import app
import leaguebot.models.map as screepmap
import leaguebot.services.screeps as screeps
import leaguebot.services.slack as slack
import re
def sendBattleMessage(battleinfo):
message = getBattleMessageText(battleinfo)
sendToSlack(message)
def getBattleMessageText(battleinfo):
room_name = battleinfo['_id']
room_owner = screepmap.getRoomOwner(room_name)
pvp_time = str(battleinfo['lastPvpTime'])
history_link = '<https://screeps.com/a/#!/history/' + room_name + '?t=' + pvp_time + '|' + pvp_time + '>'
message = history_link + ' - Battle: ' + '<https://screeps.com/a/#!/room/' + room_name + '|' + room_name + '>'
if not room_owner:
return message
room_level = screepmap.getRoomLevel(room_name)
if room_level and room_level > 0:
message += ' RCL ' + str(room_level)
message += ', defender ' + '<https://screeps.com/a/#!/profile/' + room_owner + '|' + room_owner + '>'
room_alliance = screepmap.getUserAlliance(room_owner)
if room_alliance:
message += ' (' + room_alliance + ')'
return message
def sendNukeMessage(nukeinfo):
message = getNukeMessageText(nukeinfo)
sendToSlack(message)
def getNukeMessageText(nukeinfo):
tick = screeps.get_time()
eta = str(nukeinfo['landTime']-tick)
room_name = nukeinfo['room']
room_owner = screepmap.getRoomOwner(room_name)
message = str(tick) + ' - Nuke: ' + room_name + ' in ' + str(eta) + ' ticks'
if not room_owner:
message += ', abandoned'
else:
room_alliance = screepmap.getUserAlliance(room_owner)
message += ', defender ' + room_owner
if room_alliance:
message += ' (' + room_alliance + ')'
return message
def sendToSlack(message):
if 'SEND_TO_SLACK' not in app.config or not app.config['SEND_TO_SLACK']:
return False
try:
channel = app.config['SLACK_CHANNEL']
slack.send_slack_message(channel, message)
print (message)
return True
except:
return False
| from leaguebot import app
import leaguebot.models.map as screepmap
import leaguebot.services.screeps as screeps
import leaguebot.services.slack as slack
import re
def sendBattleMessage(battleinfo):
message = getBattleMessageText(battleinfo)
sendToSlack(message)
def getBattleMessageText(battleinfo):
room_name = battleinfo['_id']
room_owner = screepmap.getRoomOwner(room_name)
pvp_time = str(battleinfo['lastPvpTime'])
history_link = '<https://screeps.com/a/#!/history/E53N64?t=' + pvp_time + '|' + pvp_time + '>'
message = history_link + ' - Battle: ' + '<https://screeps.com/a/#!/room/' + room_name + '|' + room_name + '>'
if not room_owner:
return message
room_level = screepmap.getRoomLevel(room_name)
if room_level and room_level > 0:
message += ' RCL ' + str(room_level)
message += ', defender ' + '<https://screeps.com/a/#!/profile/' + room_owner + '|' + room_owner + '>'
room_alliance = screepmap.getUserAlliance(room_owner)
if room_alliance:
message += ' (' + room_alliance + ')'
return message
def sendNukeMessage(nukeinfo):
message = getNukeMessageText(nukeinfo)
sendToSlack(message)
def getNukeMessageText(nukeinfo):
tick = screeps.get_time()
eta = str(nukeinfo['landTime']-tick)
room_name = nukeinfo['room']
room_owner = screepmap.getRoomOwner(room_name)
message = str(tick) + ' - Nuke: ' + room_name + ' in ' + str(eta) + ' ticks'
if not room_owner:
message += ', abandoned'
else:
room_alliance = screepmap.getUserAlliance(room_owner)
message += ', defender ' + room_owner
if room_alliance:
message += ' (' + room_alliance + ')'
return message
def sendToSlack(message):
if 'SEND_TO_SLACK' not in app.config or not app.config['SEND_TO_SLACK']:
return False
try:
channel = app.config['SLACK_CHANNEL']
slack.send_slack_message(channel, message)
print (message)
return True
except:
return False
| mit | Python |
2c9a0f9783c72af122d7c728a7760c8a2027d45f | Fix remove debug print | genropy/genropy-core,genropy/genropy-core | tests/resolver_test.py | tests/resolver_test.py | from gnr.core.gnrbag import Bag, BagCbResolver
def hello(x=''):
return 'i say : %s ' % x
b = Bag()
b.setCallBackItem('say_hello', hello, x='hello')
b.setCallBackItem('say_muu', hello, x='muu')
b.setCallBackItem('say_buzbuz', hello, x='buzbuz')
resolver = BagCbResolver(hello, x='fatto da resolver e non da setCallBackItem')
b.setItem('say_resolver', resolver) | from gnr.core.gnrbag import Bag, BagCbResolver
def hello(x=''):
return 'i say : %s ' % x
b = Bag()
b.setCallBackItem('say_hello', hello, x='hello')
b.setCallBackItem('say_muu', hello, x='muu')
b.setCallBackItem('say_buzbuz', hello, x='buzbuz')
resolver = BagCbResolver(hello, x='fatto da resolver e non da setCallBackItem')
b.setItem('say_resolver', resolver)
print b['say_hello']
print b['say_muu']
print b['say_buzbuz']
print b['say_resolver'] | lgpl-2.1 | Python |
539c11706d91db92e36f49694603f2ed668d8cbb | Add a __unicode__ method to the Book model. Will show book title in admin instead of "Book object". | zbyte64/django-dockit,zbyte64/django-dockit | test_environment/books/models.py | test_environment/books/models.py | from dockit.schema import Document, Schema, ModelReferenceField, \
TextField, DictField, SchemaField, FileField, IntegerField, \
ReferenceField, ListField, GenericSchemaField, CharField, DateField
from django.contrib.auth.models import User
class Author(Document):
user = ModelReferenceField(User)
internal_id = TextField()
class Meta:
collection = 'author'
class Address(Schema):
street_1 = TextField()
street_2 = TextField(blank=True)
city = TextField()
postal_code = TextField()
region = TextField()
country = TextField()
extra_data = DictField(blank=True)
class Publisher(Document):
name = TextField()
address = SchemaField(Address)
def __unicode__(self):
return self.name
class Meta:
collection = 'publisher'
class Book(Document):
title = TextField()
cover_image = FileField(upload_to='book-images')
year = IntegerField()
publisher = ReferenceField(Publisher)
authors = ListField(ReferenceField(Author), db_index=True)
tags = ListField(TextField(), db_index=True)
def __unicode__(self):
return self.title
class Meta:
collection = 'book'
Book.objects.index('tags').commit()
class SubComplexTwo(Schema):
field2 = TextField()
class SubComplexOne(Schema):
field1 = TextField()
nested = SchemaField(SubComplexTwo)
class ComplexObject(Document):
field1 = TextField()
image = FileField(upload_to='complex-images', blank=True)
addresses = ListField(SchemaField(Address), blank=True)
main_address = SchemaField(Address, blank=True)
generic_objects = ListField(GenericSchemaField(), blank=True)
nested = SchemaField(SubComplexOne, blank=True)
def __unicode__(self):
return unicode(self.field1)
class Meta:
collection = 'complex_object'
class Publication(Document):
name = CharField()
date = DateField()
class Meta:
typed_field = '_type'
class Newspaper(Publication):
city = CharField()
class Meta:
typed_key = 'newspaper'
class Magazine(Publication):
issue_number = CharField()
class Meta:
typed_key = 'magazine'
class BaseProduct(Document):
name = CharField()
class Meta:
typed_field = '_type'
class Brand(Document):
name = CharField()
products = ListField(SchemaField(BaseProduct))
class Shoes(BaseProduct):
class Meta:
typed_key = 'shoes'
class Shirt(BaseProduct):
class Meta:
typed_key = 'shirt'
| from dockit.schema import Document, Schema, ModelReferenceField, \
TextField, DictField, SchemaField, FileField, IntegerField, \
ReferenceField, ListField, GenericSchemaField, CharField, DateField
from django.contrib.auth.models import User
class Author(Document):
user = ModelReferenceField(User)
internal_id = TextField()
class Meta:
collection = 'author'
class Address(Schema):
street_1 = TextField()
street_2 = TextField(blank=True)
city = TextField()
postal_code = TextField()
region = TextField()
country = TextField()
extra_data = DictField(blank=True)
class Publisher(Document):
name = TextField()
address = SchemaField(Address)
def __unicode__(self):
return self.name
class Meta:
collection = 'publisher'
class Book(Document):
title = TextField()
cover_image = FileField(upload_to='book-images')
year = IntegerField()
publisher = ReferenceField(Publisher)
authors = ListField(ReferenceField(Author), db_index=True)
tags = ListField(TextField(), db_index=True)
class Meta:
collection = 'book'
Book.objects.index('tags').commit()
class SubComplexTwo(Schema):
field2 = TextField()
class SubComplexOne(Schema):
field1 = TextField()
nested = SchemaField(SubComplexTwo)
class ComplexObject(Document):
field1 = TextField()
image = FileField(upload_to='complex-images', blank=True)
addresses = ListField(SchemaField(Address), blank=True)
main_address = SchemaField(Address, blank=True)
generic_objects = ListField(GenericSchemaField(), blank=True)
nested = SchemaField(SubComplexOne, blank=True)
def __unicode__(self):
return unicode(self.field1)
class Meta:
collection = 'complex_object'
class Publication(Document):
name = CharField()
date = DateField()
class Meta:
typed_field = '_type'
class Newspaper(Publication):
city = CharField()
class Meta:
typed_key = 'newspaper'
class Magazine(Publication):
issue_number = CharField()
class Meta:
typed_key = 'magazine'
class BaseProduct(Document):
name = CharField()
class Meta:
typed_field = '_type'
class Brand(Document):
name = CharField()
products = ListField(SchemaField(BaseProduct))
class Shoes(BaseProduct):
class Meta:
typed_key = 'shoes'
class Shirt(BaseProduct):
class Meta:
typed_key = 'shirt'
| bsd-3-clause | Python |
bfb048d9a1ac34cd07e0fc8d94c0e97d901ee096 | fix simple_parser | mgilson/html5lib-python,alex/html5lib-python,dstufft/html5lib-python,alex/html5lib-python,mgilson/html5lib-python,mgilson/html5lib-python,html5lib/html5lib-python,gsnedders/html5lib-python,mindw/html5lib-python,html5lib/html5lib-python,dstufft/html5lib-python,dstufft/html5lib-python,ordbogen/html5lib-python,mindw/html5lib-python,alex/html5lib-python,ordbogen/html5lib-python,mindw/html5lib-python,gsnedders/html5lib-python,html5lib/html5lib-python,ordbogen/html5lib-python | tests/simple_parser.py | tests/simple_parser.py |
from test_parser import *
import sys, os
os.chdir(os.path.split(os.path.abspath(__file__))[0])
sys.path.insert(0, os.path.abspath(os.pardir))
import parser
if __name__ == "__main__":
x = ""
if len(sys.argv) > 1:
x = sys.argv[1]
else:
x = "x"
p = parser.HTMLParser()
document = p.parse(StringIO.StringIO(x))
print convertTreeDump(document.printTree())
| from test_parser import *
import sys
os.chdir("..")
import parser
if __name__ == "__main__":
x = ""
if len(sys.argv) > 1:
x = sys.argv[1]
else:
x = "x"
p = parser.HTMLParser()
document = p.parse(StringIO.StringIO(x))
print convertTreeDump(document.printTree())
| mit | Python |
ae8f9c39cd75d837a4cb5a4cea4d3d11fd1cabed | Add additional test case for comments | PyCQA/isort,PyCQA/isort | tests/test_comments.py | tests/test_comments.py | from hypothesis_auto import auto_pytest_magic
from isort import comments
auto_pytest_magic(comments.parse)
auto_pytest_magic(comments.add_to_line)
def test_add_to_line():
assert comments.add_to_line([], "import os # comment", removed=True).strip() == "import os"
| from hypothesis_auto import auto_pytest_magic
from isort import comments
auto_pytest_magic(comments.parse)
auto_pytest_magic(comments.add_to_line)
| mit | Python |
aa1008691e3433f8350d3f3a5e5d03d9c629a45c | Test for getting parameters back from ideal observer | achabotl/pambox | tests/test_idealobs.py | tests/test_idealobs.py | import pytest
import scipy.io as sio
from pambox import idealobs
import numpy as np
@pytest.fixture
def data():
return np.array([0.28032187, 1.07108181, 3.35513227, 8.66774961,
18.61914334, 33.63172026, 51.87228063, 69.72236134,
83.79127082, 92.72205919, 97.28779782, 99.16754416])
@pytest.fixture
def idealobs_parameters():
return (3.74647303e+00, 5.15928999e-02, -9.09197905e-07, 8000.)
@pytest.fixture
def snr():
return np.arange(-9, 3, 1)
@pytest.fixture
def snrenv(snr):
return 10. ** np.linspace(-2, 2, len(snr))
def test_fit_obs(data, snrenv, idealobs_parameters):
c = idealobs.IdealObs()
c.fit_obs(snrenv, data)
params = c.get_params()
res = [params['k'], params['q'], params['sigma_s']]
np.testing.assert_allclose(res, idealobs_parameters[0:3], atol=1e-5)
def test_psy_fn():
mat = sio.loadmat('./test_files/test_psychometric_function.mat')
x = mat['x'][0]
mu = 0.
sigma = 1.0
target = mat['p'][0]
y = idealobs.psy_fn(x, mu, sigma)
np.testing.assert_allclose(y, target)
def test_snr_env_to_pc(snrenv, idealobs_parameters, data):
c = idealobs.IdealObs(k=1., q=0.5, sigma_s=0.6, m=8000.)
pc = c.snrenv_to_pc(np.arange(0, 21))
target = np.array([0.0000, 0.0025, 0.0267, 0.1327, 0.4403, 1.1314, 2.4278,
4.5518, 7.6788, 11.8990, 17.1955, 23.4442, 30.4320,
37.8885, 45.5214, 53.0503, 60.2323, 66.8786, 72.8613,
78.1116, 82.6125])
np.testing.assert_allclose(pc, target, atol=1e-4)
def test_get_params():
p = {'k':1, 'q':2, 'sigma_s':0.5, 'm':800}
c = idealobs.IdealObs(**p)
assert p == c.get_params()
| import pytest
import scipy.io as sio
from pambox import idealobs
import numpy as np
@pytest.fixture
def data():
return np.array([0.28032187, 1.07108181, 3.35513227, 8.66774961,
18.61914334, 33.63172026, 51.87228063, 69.72236134,
83.79127082, 92.72205919, 97.28779782, 99.16754416])
@pytest.fixture
def idealobs_parameters():
return (3.74647303e+00, 5.15928999e-02, -9.09197905e-07, 8000.)
@pytest.fixture
def snr():
return np.arange(-9, 3, 1)
@pytest.fixture
def snrenv(snr):
return 10. ** np.linspace(-2, 2, len(snr))
def test_fit_obs(data, snrenv, idealobs_parameters):
c = idealobs.IdealObs()
c.fit_obs(snrenv, data)
params = c.get_params()
res = [params['k'], params['q'], params['sigma_s']]
np.testing.assert_allclose(res, idealobs_parameters[0:3], atol=1e-5)
def test_psy_fn():
mat = sio.loadmat('./test_files/test_psychometric_function.mat')
x = mat['x'][0]
mu = 0.
sigma = 1.0
target = mat['p'][0]
y = idealobs.psy_fn(x, mu, sigma)
np.testing.assert_allclose(y, target)
def test_snr_env_to_pc(snrenv, idealobs_parameters, data):
c = idealobs.IdealObs(k=1., q=0.5, sigma_s=0.6, m=8000.)
pc = c.snrenv_to_pc(np.arange(0, 21))
target = np.array([0.0000, 0.0025, 0.0267, 0.1327, 0.4403, 1.1314, 2.4278,
4.5518, 7.6788, 11.8990, 17.1955, 23.4442, 30.4320,
37.8885, 45.5214, 53.0503, 60.2323, 66.8786, 72.8613,
78.1116, 82.6125])
np.testing.assert_allclose(pc, target, atol=1e-4)
| bsd-3-clause | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.