commit
stringlengths 40
40
| old_file
stringlengths 4
264
| new_file
stringlengths 4
264
| old_contents
stringlengths 0
3.26k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
624
| message
stringlengths 15
4.7k
| lang
stringclasses 3
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
|
---|---|---|---|---|---|---|---|---|---|
b44d34f8bc5264d495dc4c2176654b0bd53bfb8a
|
mistral/api/wsgi.py
|
mistral/api/wsgi.py
|
# Copyright 2015 - StackStorm, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from mistral.api import app
from mistral import config
from mistral.engine import rpc
config.parse_args()
transport = rpc.get_transport()
application = app.setup_app(transport=transport)
|
# Copyright 2015 - StackStorm, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from mistral.api import app
from mistral import config
config.parse_args()
application = app.setup_app()
|
Remove transport from WSGI script
|
Remove transport from WSGI script
The setup_app method no longer requires transport as input.
Change-Id: I4caf397a48e30822d423c8cf7d40f2773f9aa951
Closes-Bug: 1443654
|
Python
|
apache-2.0
|
dennybaa/mistral,openstack/mistral,StackStorm/mistral,dennybaa/mistral,openstack/mistral,StackStorm/mistral
|
a16b51bb26761f8c4a30c06da4c711dac24ac3e0
|
mr/preprocessing.py
|
mr/preprocessing.py
|
import numpy as np
from scipy.ndimage.filters import uniform_filter
from scipy.ndimage.fourier import fourier_gaussian
def bandpass(image, lshort, llong, threshold=1):
"""Convolve with a Gaussian to remove short-wavelength noise,
and subtract out long-wavelength variations,
retaining features of intermediate scale."""
if not 2*lshort < llong:
raise ValueError("The smoothing length scale must be more" +
"than twice the noise length scale.")
settings = dict(mode='nearest', cval=0)
boxcar = uniform_filter(image, 2*llong+1, **settings)
gaussian = np.fft.ifftn(fourier_gaussian(np.fft.fftn(image), lshort))
result = gaussian - boxcar
result -= threshold # Features must be this level above the background.
return result.real.clip(min=0.)
def scale_to_gamut(image, original_dtype):
max_value = np.iinfo(original_dtype).max
scaled = (max_value/image.max()*image.clip(min=0.))
return scaled.astype(original_dtype)
|
import numpy as np
from scipy.ndimage.filters import uniform_filter
from scipy.ndimage.fourier import fourier_gaussian
import warnings
first_run = True
try:
import pyfftw
except ImportError:
fftn = np.fft.fftn
ifftn = np.fft.ifftn
else:
def _maybe_align(a):
global planned
if first_run:
warnings.warn("FFTW is configuring itself. This will take " +
"several sections, but subsequent calls will run " +
"*much* faster.", UserWarning)
planned = False
return pyfftw.n_byte_align(a, a.dtype.alignment)
fftn = lambda a: pyfftw.interfaces.numpy_fft.fftn(_maybe_align(a))
ifftn = lambda a: pyfftw.interfaces.numpy_fft.ifftn(_maybe_align(a))
def bandpass(image, lshort, llong, threshold=1):
"""Convolve with a Gaussian to remove short-wavelength noise,
and subtract out long-wavelength variations,
retaining features of intermediate scale."""
if not 2*lshort < llong:
raise ValueError("The smoothing length scale must be more" +
"than twice the noise length scale.")
settings = dict(mode='nearest', cval=0)
boxcar = uniform_filter(image, 2*llong+1, **settings)
gaussian = ifftn(fourier_gaussian(fftn(image), lshort))
result = gaussian - boxcar
result -= threshold # Features must be this level above the background.
return result.real.clip(min=0.)
def scale_to_gamut(image, original_dtype):
max_value = np.iinfo(original_dtype).max
scaled = (max_value/image.max()*image.clip(min=0.))
return scaled.astype(original_dtype)
|
Add optional dependence on FFTW for faster bandpass
|
ENH: Add optional dependence on FFTW for faster bandpass
|
Python
|
bsd-3-clause
|
daniorerio/trackpy,daniorerio/trackpy
|
af05872aaf08a32ea7855b80153c0596835755bd
|
tests/integration/test_webui.py
|
tests/integration/test_webui.py
|
import requests
import pytest
class TestWebUI(object):
def get_page(self, page):
return requests.get('http://127.0.0.1' + page)
pages = [
{
'page': '/',
'matching_text': 'Diamond',
},
{
'page': '/scoreboard',
},
{
'page': '/login',
'matching_text': 'Please sign in',
},
{
'page': '/about',
'matching_text': 'Use the following credentials to login',
},
{
'page': '/overview',
},
{
'page': '/api/overview/data'
}
]
@pytest.mark.parametrize("page_data", pages)
def test_page(self, page_data):
resp = self.get_page(page_data['page'])
assert resp.status_code == 200
if 'matching_text' in page_data:
assert page_data['matching_text'] in resp.text
|
import requests
import pytest
class TestWebUI(object):
def get_page(self, page):
return requests.get('http://nginx' + page)
pages = [
{
'page': '/',
'matching_text': 'Diamond',
},
{
'page': '/scoreboard',
},
{
'page': '/login',
'matching_text': 'Please sign in',
},
{
'page': '/about',
'matching_text': 'Use the following credentials to login',
},
{
'page': '/overview',
},
{
'page': '/api/overview/data'
}
]
@pytest.mark.parametrize("page_data", pages)
def test_page(self, page_data):
resp = self.get_page(page_data['page'])
assert resp.status_code == 200
if 'matching_text' in page_data:
assert page_data['matching_text'] in resp.text
|
Modify hostname for web integration tests
|
Modify hostname for web integration tests
|
Python
|
mit
|
pwnbus/scoring_engine,pwnbus/scoring_engine,pwnbus/scoring_engine,pwnbus/scoring_engine
|
47b031db83f5cb90f786029a6ffbdb7a599145db
|
timepiece/context_processors.py
|
timepiece/context_processors.py
|
from django.conf import settings
from timepiece import models as timepiece
from timepiece.forms import QuickSearchForm
def timepiece_settings(request):
default_famfamfam_url = settings.STATIC_URL + 'images/icons/'
famfamfam_url = getattr(settings, 'FAMFAMFAM_URL', default_famfamfam_url)
context = {
'FAMFAMFAM_URL': famfamfam_url,
}
return context
def quick_search(request):
return {
'quick_search_form': QuickSearchForm(),
}
def active_entries(request):
active_entries = timepiece.Entry.objects.filter(
end_time__isnull=True,
).exclude(
user=request.user,
).select_related('user', 'project', 'activity')
return {
'active_entries': active_entries,
}
def extra_nav(request):
context = {
'extra_nav': getattr(settings, 'EXTRA_NAV', {})
}
return context
|
from django.conf import settings
from timepiece import models as timepiece
from timepiece.forms import QuickSearchForm
def timepiece_settings(request):
default_famfamfam_url = settings.STATIC_URL + 'images/icons/'
famfamfam_url = getattr(settings, 'FAMFAMFAM_URL', default_famfamfam_url)
context = {
'FAMFAMFAM_URL': famfamfam_url,
}
return context
def quick_search(request):
return {
'quick_search_form': QuickSearchForm(),
}
def active_entries(request):
active_entries = None
if request.user.is_authenticated():
active_entries = timepiece.Entry.objects.filter(
end_time__isnull=True,
).exclude(
user=request.user,
).select_related('user', 'project', 'activity')
return {
'active_entries': active_entries,
}
def extra_nav(request):
context = {
'extra_nav': getattr(settings, 'EXTRA_NAV', {})
}
return context
|
Apply active_entries fix from payroll-reports branch
|
Apply active_entries fix from payroll-reports branch
|
Python
|
mit
|
gaga3966/django-timepiece,josesanch/django-timepiece,BocuStudio/django-timepiece,dannybrowne86/django-timepiece,josesanch/django-timepiece,BocuStudio/django-timepiece,gaga3966/django-timepiece,BocuStudio/django-timepiece,arbitrahj/django-timepiece,caktus/django-timepiece,arbitrahj/django-timepiece,gaga3966/django-timepiece,dannybrowne86/django-timepiece,caktus/django-timepiece,dannybrowne86/django-timepiece,caktus/django-timepiece,josesanch/django-timepiece,arbitrahj/django-timepiece
|
943e920603d5507a37c1b0c835c598972f0f2cff
|
github/models.py
|
github/models.py
|
import json, requests
from wagtail.wagtailadmin.edit_handlers import FieldPanel
from wagtail.wagtailcore.models import Page, Orderable
import django.utils.dateparse as dateparse
from django.db import models
from django.core.cache import cache
class GithubOrgIndexPage(Page):
github_org_name = models.CharField(default='City-of-Helsinki', max_length=200)
content_panels = Page.content_panels + [
FieldPanel('github_org_name'),
]
def events(self):
events = cache.get('github')
if not events:
cache.add('github',
requests.get('https://api.github.com/orgs/' + self.github_org_name + '/events?per_page=20').json(), 60)
events = cache.get('github')
for index, event in enumerate(events):
event['created_at'] = dateparse.parse_datetime(event['created_at'])
# get html repo url
event['repo']['url'] = event['repo']['url'].replace('https://api.github.com/repos/', 'https://github.com/')
return events
def top_events(self):
return self.events()[:3]
|
import json, requests
from wagtail.wagtailadmin.edit_handlers import FieldPanel
from wagtail.wagtailcore.models import Page, Orderable
import django.utils.dateparse as dateparse
from django.db import models
from django.core.cache import cache
class GithubOrgIndexPage(Page):
github_org_name = models.CharField(default='City-of-Helsinki', max_length=200)
content_panels = Page.content_panels + [
FieldPanel('github_org_name'),
]
def events(self):
events = cache.get('github')
if not events:
response = requests.get('https://api.github.com/orgs/' + self.github_org_name + '/events?per_page=20')
if response.status_code == 200:
cache.add('github', response.json(), 60)
events = cache.get('github')
for index, event in enumerate(events):
event['created_at'] = dateparse.parse_datetime(event['created_at'])
# get html repo url
event['repo']['url'] = event['repo']['url'].replace('https://api.github.com/repos/', 'https://github.com/')
return events
def top_events(self):
return self.events()[:3]
|
Check github response before parsing
|
Check github response before parsing
|
Python
|
agpl-3.0
|
City-of-Helsinki/devheldev,City-of-Helsinki/devheldev,terotic/devheldev,terotic/devheldev,City-of-Helsinki/devheldev,terotic/devheldev
|
6dc019f3495a940340668fd8039b7911db6273b7
|
resdk/analysis/_register.py
|
resdk/analysis/_register.py
|
"""Patch ReSDK resources with analysis methods."""
from __future__ import absolute_import, division, print_function, unicode_literals
from resdk.analysis.alignment import bowtie2, hisat2
from resdk.analysis.chip_seq import macs, rose2
from resdk.analysis.expressions import cuffnorm, cuffquant
from resdk.analysis.plots import bamliquidator, bamplot
from resdk.resources import Collection, Relation, Sample
Collection.run_bamliquidator = bamliquidator
Collection.run_bamplot = bamplot
Collection.run_bowtie2 = bowtie2
Collection.run_cuffnorm = cuffnorm
Collection.run_cuffquant = cuffquant
Collection.run_hisat2 = hisat2
Collection.run_macs = macs
Collection.run_rose2 = rose2
Relation.run_bamliquidator = bamliquidator
Relation.run_bamplot = bamplot
Relation.run_bowtie2 = bowtie2
Relation.run_cuffnorm = cuffnorm
Relation.run_cuffquant = cuffquant
Relation.run_hisat2 = hisat2
Relation.run_macs = macs
Relation.run_rose2 = rose2
Sample.run_bowtie2 = bowtie2
Sample.run_cuffquant = cuffquant
Sample.run_hisat2 = hisat2
Sample.run_macs = macs
Sample.run_rose2 = rose2
|
"""Patch ReSDK resources with analysis methods."""
from __future__ import absolute_import, division, print_function, unicode_literals
from resdk.analysis.alignment import bowtie2, hisat2
from resdk.analysis.chip_seq import macs, rose2
from resdk.analysis.expressions import cuffnorm, cuffquant
from resdk.analysis.plots import bamliquidator, bamplot
from resdk.resources import Collection, Relation, Sample
Collection.run_bamliquidator = bamliquidator
Collection.run_bamplot = bamplot
Collection.run_bowtie2 = bowtie2
Collection.run_cuffnorm = cuffnorm
Collection.run_cuffquant = cuffquant
Collection.run_hisat2 = hisat2
Collection.run_macs = macs
Collection.run_rose2 = rose2
Relation.run_bamliquidator = bamliquidator
Relation.run_bamplot = bamplot
Relation.run_bowtie2 = bowtie2
Relation.run_cuffnorm = cuffnorm
Relation.run_cuffquant = cuffquant
Relation.run_hisat2 = hisat2
Relation.run_macs = macs
Relation.run_rose2 = rose2
Sample.run_bowtie2 = bowtie2
Sample.run_cuffquant = cuffquant
Sample.run_hisat2 = hisat2
Sample.run_macs = macs
Sample.run_rose2 = rose2
|
Fix import error caught by new version of isort (4.2.8)
|
Fix import error caught by new version of isort (4.2.8)
|
Python
|
apache-2.0
|
genialis/resolwe-bio-py
|
30da968a43434088a7839941118e30d26683679e
|
storm/tests/conftest.py
|
storm/tests/conftest.py
|
# (C) Datadog, Inc. 2010-2016
# All rights reserved
# Licensed under Simplified BSD License (see LICENSE)
import os
import pytest
from .common import INSTANCE, HOST
from datadog_checks.dev import docker_run, get_here, run_command
from datadog_checks.dev.conditions import CheckCommandOutput
@pytest.fixture(scope='session')
def dd_environment():
compose_file = os.path.join(get_here(), 'compose', 'docker-compose.yaml')
# Build the topology jar to use in the environment
with docker_run(compose_file, build=True, service_name='topology-maker', sleep=15):
run_command(
['docker', 'cp', 'topology-build:/topology.jar', os.path.join(get_here(), 'compose')]
)
nimbus_condition = CheckCommandOutput(['nc', '-z', HOST, '6627'], 'succeeded')
with docker_run(compose_file, service_name='storm-nimbus', conditions=[nimbus_condition]):
with docker_run(compose_file, service_name='storm-ui',
log_patterns=[r'org.apache.storm.ui.core']):
with docker_run(
compose_file, service_name='topology',
log_patterns=['Finished submitting topology: topology']
):
yield INSTANCE
|
# (C) Datadog, Inc. 2010-2016
# All rights reserved
# Licensed under Simplified BSD License (see LICENSE)
import os
import pytest
from .common import INSTANCE, HOST
from datadog_checks.dev import docker_run, get_here, run_command
from datadog_checks.dev.conditions import CheckCommandOutput
@pytest.fixture(scope='session')
def dd_environment():
compose_file = os.path.join(get_here(), 'compose', 'docker-compose.yaml')
# Build the topology jar to use in the environment
with docker_run(compose_file, build=True, service_name='topology-maker', sleep=15):
run_command(
['docker', 'cp', 'topology-build:/topology.jar', os.path.join(get_here(), 'compose')]
)
nimbus_condition = CheckCommandOutput(['nc', '-zv', HOST, '6627'], 'succeeded')
with docker_run(compose_file, service_name='storm-nimbus', conditions=[nimbus_condition]):
with docker_run(compose_file, service_name='storm-ui',
log_patterns=[r'org.apache.storm.ui.core']):
with docker_run(
compose_file, service_name='topology',
log_patterns=['Finished submitting topology: topology']
):
yield INSTANCE
|
Add the -v flag to work with linux nc
|
Add the -v flag to work with linux nc
|
Python
|
bsd-3-clause
|
DataDog/integrations-extras,DataDog/integrations-extras,DataDog/integrations-extras,DataDog/integrations-extras,DataDog/integrations-extras
|
fba4fdf426b0a29ca06deb67587c2bd804adb017
|
tbgxmlutils/xmlutils.py
|
tbgxmlutils/xmlutils.py
|
#!/usr/bin/env python
from xml.dom import minidom
import xml.etree.ElementTree as ET
import xmltodict
def add(k, parent=None, txt=None, attrs=None):
if parent is None:
handle = ET.Element(k)
else:
handle = ET.SubElement(parent, k)
if txt: handle.text = unicode(txt)
try:
for k, v in attrs.iteritems(): handle.attrib[k] = v
except AttributeError:
pass
return handle
def etree2xml(e, encoding='UTF-8'):
return ET.tostring(e, encoding=encoding) if encoding else ET.tostring(e)
def pretty(xml=None, fn=None):
if fn is not None:
xml = minidom.parse(fn)
elif not isinstance(xml, minidom.Document):
xml = minidom.parseString(xml)
return xml.toprettyxml(indent=' ')
def xml_fn_to_json(fn):
fh = open(fn, 'r')
json = xmltodict.parse(fh.read())
return json
|
#!/usr/bin/env python
from xml.dom import minidom
import lxml.etree as ET
import xmltodict
def add(k, parent=None, txt=None, attrs=None):
if parent is None:
handle = ET.Element(k)
else:
handle = ET.SubElement(parent, k)
if txt: handle.text = unicode(txt)
try:
for k, v in attrs.iteritems(): handle.attrib[k] = v
except AttributeError:
pass
return handle
def etree2xml(e, encoding='UTF-8'):
return ET.tostring(e, encoding=encoding) if encoding else ET.tostring(e)
def pretty(xml=None, fn=None):
if fn is not None:
xml = minidom.parse(fn)
elif not isinstance(xml, minidom.Document):
xml = minidom.parseString(xml)
return xml.toprettyxml(indent=' ')
def xml_fn_to_json(fn):
fh = open(fn, 'r')
json = xmltodict.parse(fh.read())
return json
|
Use lxml instead of elementtree.
|
Use lxml instead of elementtree.
|
Python
|
mit
|
Schwarzschild/TBGXMLUtils
|
ac3f56f4ed0826600b9adbbf8dfe3b99ce508ac6
|
migrations/versions/0334_broadcast_message_number.py
|
migrations/versions/0334_broadcast_message_number.py
|
"""
Revision ID: 0334_broadcast_message_number
Revises: 0333_service_broadcast_provider
Create Date: 2020-12-04 15:06:22.544803
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
revision = '0334_broadcast_message_number'
down_revision = '0333_service_broadcast_provider'
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.execute("create sequence broadcast_provider_message_number_seq")
op.create_table(
'broadcast_provider_message_number',
sa.Column(
'broadcast_provider_message_number',
sa.Integer(),
server_default=sa.text("nextval('broadcast_provider_message_number_seq')"),
nullable=False
),
sa.Column('broadcast_provider_message_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.ForeignKeyConstraint(['broadcast_provider_message_id'], ['broadcast_provider_message.id'], ),
sa.PrimaryKeyConstraint('broadcast_provider_message_number')
)
op.execute(
"""
INSERT INTO
broadcast_provider_message_number (broadcast_provider_message_id)
SELECT
id
FROM
broadcast_provider_message
"""
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('broadcast_provider_message_number')
op.execute("drop sequence broadcast_provider_message_number_seq")
# ### end Alembic commands ###
|
"""
Revision ID: 0334_broadcast_message_number
Revises: 0333_service_broadcast_provider
Create Date: 2020-12-04 15:06:22.544803
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
revision = '0334_broadcast_message_number'
down_revision = '0333_service_broadcast_provider'
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.execute("create sequence broadcast_provider_message_number_seq")
op.create_table(
'broadcast_provider_message_number',
sa.Column(
'broadcast_provider_message_number',
sa.Integer(),
server_default=sa.text("nextval('broadcast_provider_message_number_seq')"),
nullable=False
),
sa.Column('broadcast_provider_message_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.ForeignKeyConstraint(['broadcast_provider_message_id'], ['broadcast_provider_message.id'], ),
sa.PrimaryKeyConstraint('broadcast_provider_message_number')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('broadcast_provider_message_number')
op.execute("drop sequence broadcast_provider_message_number_seq")
# ### end Alembic commands ###
|
Delete unneeded code form migration
|
Delete unneeded code form migration
|
Python
|
mit
|
alphagov/notifications-api,alphagov/notifications-api
|
6f3b0c997f7207279bf836edc94db1ac19d2ce1d
|
src/rabird/core/logging.py
|
src/rabird/core/logging.py
|
'''
@date 2013-5-9
@author Hong-She Liang <[email protected]>
'''
import sys
import os
# Import the global logging unit, not our logging .
global_logging = __import__('logging')
def load_default_config():
arguments = {
'level': None,
'filename': None,
'filemode': None,
'format': None,
'datefmt': None,
'style': None,
}
for k in list(arguments.keys()):
try:
envionment_text = 'PYTHON_LOGGING_{}'.format(k.upper())
arguments[k] = os.environ[envionment_text]
except ValueError:
pass
except KeyError:
pass
# Remove all arguments that is None value.
keys = list(arguments.keys())
for k in keys:
if arguments[k] is None:
del arguments[k]
# Set default level to logging.INFO .
if 'level' not in list(arguments.keys()):
arguments['level'] = global_logging.INFO
global_logging.basicConfig(**arguments)
# Added console handler only there have filename argument.
if 'filename' in list(arguments.keys()):
global_logging.getLogger().addHandler(global_logging.StreamHandler(sys.stdout))
|
'''
@date 2013-5-9
@author Hong-She Liang <[email protected]>
'''
import sys
import os
# Import the global logging unit, not our logging .
global_logging = __import__('logging')
def load_default_config():
arguments = {
'level': None,
'filename': None,
'filemode': None,
'format': None,
'datefmt': None,
'style': None,
}
for k in list(arguments.keys()):
try:
envionment_text = 'PYTHON_LOGGING_%s' % k.upper()
arguments[k] = os.environ[envionment_text]
except ValueError:
pass
except KeyError:
pass
# Remove all arguments that is None value.
keys = list(arguments.keys())
for k in keys:
if arguments[k] is None:
del arguments[k]
# Set default level to logging.INFO .
if 'level' not in list(arguments.keys()):
arguments['level'] = global_logging.INFO
global_logging.basicConfig(**arguments)
# Added console handler only there have filename argument.
if 'filename' in list(arguments.keys()):
global_logging.getLogger().addHandler(global_logging.StreamHandler(sys.stdout))
|
Use old style string format method to avoid formatting warning
|
Use old style string format method to avoid formatting warning
|
Python
|
apache-2.0
|
starofrainnight/rabird.core
|
26c96aaa57c745840944c2aea5613ff861bb717f
|
invocations/testing.py
|
invocations/testing.py
|
from invoke import ctask as task
@task(help={
'module': "Just runs tests/STRING.py.",
'runner': "Use STRING to run tests instead of 'spec'.",
'opts': "Extra flags for the test runner",
'pty': "Whether to run tests under a pseudo-tty",
})
def test(c, module=None, runner=None, opts=None, pty=True):
"""
Run a Spec or Nose-powered internal test suite.
"""
runner = runner or 'spec'
# Allow selecting specific submodule
specific_module = " --tests=tests/%s.py" % module
args = (specific_module if module else "")
if opts:
args += " " + opts
# Always enable timing info by default. OPINIONATED
args += " --with-timing"
# Use pty by default so the spec/nose/Python process buffers "correctly"
c.run(runner + args, pty=pty)
@task
def coverage(c, package=None):
"""
Run tests w/ coverage enabled, generating HTML, & opening it.
"""
opts = ""
if package is not None:
# TODO: make omission list more configurable
opts = "--include='{0}/*' --omit='{0}/vendor/*'".format(package)
test(c, opts="--with-coverage --cover-branches")
c.run("coverage html {0}".format(opts))
c.run("open htmlcov/index.html")
|
import sys
from invoke import ctask as task
@task(help={
'module': "Just runs tests/STRING.py.",
'runner': "Use STRING to run tests instead of 'spec'.",
'opts': "Extra flags for the test runner",
'pty': "Whether to run tests under a pseudo-tty",
})
def test(c, module=None, runner=None, opts=None, pty=True):
"""
Run a Spec or Nose-powered internal test suite.
"""
runner = runner or 'spec'
# Allow selecting specific submodule
specific_module = " --tests=tests/%s.py" % module
args = (specific_module if module else "")
if opts:
args += " " + opts
# Always enable timing info by default. OPINIONATED
args += " --with-timing"
# Use pty by default so the spec/nose/Python process buffers "correctly"
c.run(runner + args, pty=pty)
@task
def coverage(c, package=None):
"""
Run tests w/ coverage enabled, generating HTML, & opening it.
"""
if not c.run("which coverage", hide=True, warn=True).ok:
sys.exit("You need to 'pip install coverage' to use this task!")
opts = ""
if package is not None:
# TODO: make omission list more configurable
opts = "--include='{0}/*' --omit='{0}/vendor/*'".format(package)
test(c, opts="--with-coverage --cover-branches")
c.run("coverage html {0}".format(opts))
c.run("open htmlcov/index.html")
|
Quit coverage task early if no coverage installed
|
Quit coverage task early if no coverage installed
|
Python
|
bsd-2-clause
|
singingwolfboy/invocations,pyinvoke/invocations,mrjmad/invocations
|
2de7427d06ff33bf8bdfe0424e07b3fb34621b07
|
shop/user/views.py
|
shop/user/views.py
|
# -*- coding: utf-8 -*-
"""User views."""
from flask import Blueprint, render_template
from flask_login import login_required
blueprint = Blueprint('user', __name__, url_prefix='/users', static_folder='../static')
@blueprint.route('/')
@login_required
def members():
"""List members."""
return render_template('users/members.html')
|
# -*- coding: utf-8 -*-
"""User views."""
from flask import Blueprint, render_template
from flask_login import login_required
blueprint = Blueprint(
'user', __name__,
url_prefix='/users', static_folder='../static'
)
@blueprint.route('/')
@login_required
def members():
"""List members."""
return render_template('users/members.html')
|
Clean up code a bit
|
Clean up code a bit
|
Python
|
bsd-3-clause
|
joeirimpan/shop,joeirimpan/shop,joeirimpan/shop
|
0d3b274d220a9bc229d3ed7c14b231b21d5c8299
|
dthm4kaiako/poet/settings.py
|
dthm4kaiako/poet/settings.py
|
"""Settings for POET application."""
NUM_RESOURCES_PER_FORM = 3
MINIMUM_SUBMISSIONS_PER_RESOURCE = 10
|
"""Settings for POET application."""
NUM_RESOURCES_PER_FORM = 3
MINIMUM_SUBMISSIONS_PER_RESOURCE = 25
|
Increase threshold for showing resource submissions
|
Increase threshold for showing resource submissions
|
Python
|
mit
|
uccser/cs4teachers,uccser/cs4teachers,uccser/cs4teachers,uccser/cs4teachers
|
ff59a35d5ea90169e34d65bd9ec3a6177e1faebd
|
thinglang/execution/stack.py
|
thinglang/execution/stack.py
|
class StackFrame(object):
def __init__(self, instance):
self.instance = instance
self.data = {}
self.idx = 0
self.return_value = None
def __setitem__(self, key, value):
print('\tSET<{}> {}: {}'.format(self.idx, key, value))
self.data[key] = (self.idx, value)
def __getitem__(self, item):
print('\tGET<{}> {}: {}'.format(self.idx, item, self.data[item][1]))
return self.data[item][1]
def __contains__(self, item):
return item in self.data
def __iter__(self):
for key, value in self.data.items():
yield key, value
def enter(self):
print('\tINCR<{}> -> <{}>'.format(self.idx, self.idx + 1))
self.idx += 1
def exit(self):
print('\tDECR<{}> -> <{}>'.format(self.idx, self.idx - 1))
self.data = {
key: value for key, value in self.data.items() if value[1] != self.idx
}
self.idx -= 1
class StackFrameTerminator(object):
def __init__(self, target_arg=None):
self.target_arg = target_arg
class StackScopeTerminator(object):
pass
|
class StackFrame(object):
def __init__(self, instance):
self.instance = instance
self.data = {}
self.idx = 0
self.return_value = None
def __setitem__(self, key, value):
print('\tSET<{}> {}: {}'.format(self.idx, key, value))
self.data[key] = (self.idx, value)
def __getitem__(self, item):
print('\tGET<{}> {}: {}'.format(self.idx, item, self.data[item][1]))
return self.data[item][1]
def __contains__(self, item):
return item in self.data
def __iter__(self):
for key, value in self.data.items():
yield key, value
def enter(self):
print('\tINCR<{}> -> <{}>'.format(self.idx, self.idx + 1))
self.idx += 1
def exit(self):
assert self.idx > 0, 'Cannot exit lowest stack segment'
print('\tDECR<{}> -> <{}>'.format(self.idx, self.idx - 1))
self.data = {
key: value for key, value in self.data.items() if value[1] != self.idx
key: value for key, value in self.data.items() if value[0] != self.idx
}
self.idx -= 1
class StackFrameTerminator(object):
def __init__(self, target_arg=None):
self.target_arg = target_arg
class StackScopeTerminator(object):
pass
|
Add index assertion during segment exit and fix segment cleanup logic
|
Add index assertion during segment exit and fix segment cleanup logic
|
Python
|
mit
|
ytanay/thinglang,ytanay/thinglang,ytanay/thinglang,ytanay/thinglang
|
f024e340a6a443bb765b67bbdb811fa44fd3d19b
|
tests/test_resources.py
|
tests/test_resources.py
|
from flask import json
from helper import TestCase
from models import db, Major
class StudentsTestCase(TestCase):
def setUp(self):
super(StudentsTestCase, self).setUp()
with self.appx.app_context():
db.session.add(Major(id=1, university_id=1, name='Major1'))
db.session.add(Major(id=2, university_id=1, name='Major2'))
db.session.commit()
def test_students_patch(self):
headers = {
'Authorization': 'Bearer ' + self.jwt,
'Content-Type': 'application/json'
}
data = {
'graduation_year': 2018,
'gender': 'm',
'majors': [1, 2]
}
rv = self.app.patch('/students/0', headers=headers, data=json.dumps(data))
self.assertEqual(rv.status_code, 200)
|
from flask import json
from helper import TestCase
from models import db, Major, Student
class StudentsTestCase(TestCase):
def setUp(self):
super(StudentsTestCase, self).setUp()
with self.appx.app_context():
db.session.add(Major(id=1, university_id=1, name='Major1'))
db.session.add(Major(id=2, university_id=1, name='Major2'))
db.session.commit()
def test_students_patch(self):
headers = {
'Authorization': 'Bearer ' + self.jwt,
'Content-Type': 'application/json'
}
data = {
'graduation_year': 2018,
'gender': 'm',
'majors': [1, 2]
}
rv = self.app.patch('/students/0', headers=headers, data=json.dumps(data))
self.assertEqual(rv.status_code, 200)
with self.appx.app_context():
student = Student.query.get(0)
self.assertEqual(student.graduation_year, data['graduation_year'])
self.assertEqual(student.gender, data['gender'])
self.assertEqual(student.majors_list, data['majors'])
|
Improve testing of student patching
|
Improve testing of student patching
|
Python
|
agpl-3.0
|
SCUEvals/scuevals-api,SCUEvals/scuevals-api
|
938043259eefdec21994489d68b1cf737618ba34
|
test/test_conversion.py
|
test/test_conversion.py
|
import unittest
from src import conversion
class TestNotationConverter(unittest.TestCase):
"""Tests for NotationConverter class"""
def test_alg_search_good_input_a5(self):
"""Input with 'a5'"""
actual_result = main.TileLine('w').line
expected_result = ' '
self.assertEqual(actual_result, expected_result)
|
"""Tests for conversion module"""
import unittest
from src import conversion
class TestNotationConverter(unittest.TestCase):
"""Tests for NotationConverter class"""
def test_alg_search_good_input_a5(self):
"""Input with 'a5'"""
n_con = conversion.NotationConverter()
actual_result = n_con.alg_search('a5')
expected_result = ('a5', 'qr5', 'qr4')
self.assertEqual(actual_result, expected_result)
def test_alg_search_good_input_f7(self):
"""Input with 'f7'"""
n_con = conversion.NotationConverter()
actual_result = n_con.alg_search('f7')
expected_result = ('f7', 'kb7', 'kb2')
self.assertEqual(actual_result, expected_result)
def test_alg_search_nonexistant(self):
"""Input which does not exist"""
n_con = conversion.NotationConverter()
self.assertRaises(LookupError, n_con.alg_search, 'f99')
def test_desc_search_good_white(self):
"""Input with good value"""
n_con = conversion.NotationConverter()
actual_result = n_con.desc_search('qn3', 'white')
expected_result = ('b3', 'qn3', 'qn6')
self.assertEqual(actual_result, expected_result)
def test_desc_search_good_black(self):
"""Input with good value"""
n_con = conversion.NotationConverter()
actual_result = n_con.desc_search('qn6', 'black')
expected_result = ('b3', 'qn3', 'qn6')
self.assertEqual(actual_result, expected_result)
def test_desc_search_nonexistant(self):
"""Input with good value"""
n_con = conversion.NotationConverter()
self.assertRaises(LookupError, n_con.desc_search, 'qn333', 'white')
|
Add tests for NotationConverter methods
|
Add tests for NotationConverter methods
|
Python
|
mit
|
blairck/chess_notation
|
b6cfa50e127d3f74247ab148219ef6336e445cca
|
InvenTree/InvenTree/ready.py
|
InvenTree/InvenTree/ready.py
|
import sys
def canAppAccessDatabase():
"""
Returns True if the apps.py file can access database records.
There are some circumstances where we don't want the ready function in apps.py
to touch the database
"""
# If any of the following management commands are being executed,
# prevent custom "on load" code from running!
excluded_commands = [
'flush',
'loaddata',
'dumpdata',
'makemirations',
'migrate',
'check',
'dbbackup',
'mediabackup',
'dbrestore',
'mediarestore',
'shell',
'createsuperuser',
'wait_for_db',
'prerender',
'collectstatic',
'makemessages',
'compilemessages',
'test',
]
for cmd in excluded_commands:
if cmd in sys.argv:
return False
return True
|
import sys
def canAppAccessDatabase():
"""
Returns True if the apps.py file can access database records.
There are some circumstances where we don't want the ready function in apps.py
to touch the database
"""
# If any of the following management commands are being executed,
# prevent custom "on load" code from running!
excluded_commands = [
'flush',
'loaddata',
'dumpdata',
'makemirations',
'migrate',
'check',
'dbbackup',
'mediabackup',
'dbrestore',
'mediarestore',
'shell',
'createsuperuser',
'wait_for_db',
'prerender',
'collectstatic',
'makemessages',
'compilemessages',
]
for cmd in excluded_commands:
if cmd in sys.argv:
return False
return True
|
Allow data operations to run for 'test'
|
Allow data operations to run for 'test'
|
Python
|
mit
|
inventree/InvenTree,inventree/InvenTree,SchrodingersGat/InvenTree,SchrodingersGat/InvenTree,inventree/InvenTree,SchrodingersGat/InvenTree,inventree/InvenTree,SchrodingersGat/InvenTree
|
fef28556bc4d105feb44345782c632b8d3befa3f
|
server/acre/settings/dev.py
|
server/acre/settings/dev.py
|
from .base import *
import os
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': os.environ['RDS_DB_NAME'],
'HOST': os.environ['RDS_HOSTNAME'],
'PORT': os.environ['RDS_PORT'],
'USER': os.environ['RDS_USERNAME'],
'PASSWORD': os.environ['RDS_PASSWORD'],
}
}
ALLOWED_HOSTS = [".us-east-2.elasticbeanstalk.com", "localhost"]
|
from .base import *
import os
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': os.environ['RDS_DB_NAME'],
'HOST': os.environ['RDS_HOSTNAME'],
'PORT': os.environ['RDS_PORT'],
'USER': os.environ['RDS_USERNAME'],
'PASSWORD': os.environ['RDS_PASSWORD'],
}
}
ALLOWED_HOSTS = [".acre.one", ".us-east-2.elasticbeanstalk.com", "localhost"]
|
Add acre.one to allowed host
|
Add acre.one to allowed host
|
Python
|
mit
|
yizhang7210/Acre,yizhang7210/Acre,yizhang7210/Acre,yizhang7210/Acre
|
1636fe834830ebb6644d17f908f893a3c2a41e33
|
tests/test_sentences.py
|
tests/test_sentences.py
|
#
import pytest
from sdsc import sentencesegmenter
@pytest.mark.parametrize("sentence,expected",
(
# 1
("This is a simple ##@command-2## sentence. This one too.",
["This is a simple ##@command-2## sentence", "This one too"]),
# 2
("This is not a test in one go. openSUSE is not written with a capital letter.",
["This is not a test in one go",
"openSUSE is not written with a capital letter"]),
# 3
("This is a sentence, e.g. for me.",
["This is a sentence, e.g. for me"]),
# 4
("E. g. this is a sentence.",
["E. g. this is a sentence"]),
# 5
("An above average chance stands e.g. Michael. Marta is also on the list.",
["An above average chance stands e.g. Michael",
"Marta is also on the list"]),
# Add more entries here:
))
def test_sentencesegmenter(sentence, expected):
"""checks whether sentencesegmenter behaves sane"""
sentences = sentencesegmenter(sentence)
assert sentences == expected
|
#
import pytest
from sdsc import sentencesegmenter
@pytest.mark.parametrize("sentence,expected",
(
# 0 - a single simple sentence
("This is a simple sentence.",
["This is a simple sentence"]),
# 1 - two simple sentences
("This is a simple ##@command-2## sentence. This one is too.",
["This is a simple ##@command-2## sentence", "This one is too"]),
# 2 - lowercase letter starts second sentence
("This is not a test in one go. openSUSE is not written with a capital letter.",
["This is not a test in one go",
"openSUSE is not written with a capital letter"]),
# 3 - abbreviation in the middle of the sentence
("This is a sentence, e.g. for me.",
["This is a sentence, e.g. for me"]),
# 4 - abbreviation at the start of the sentence
("E. g. this is a sentence.",
["E. g. this is a sentence"]),
# 5 - abbreviation in the middle of sentence before a capital letter
("An above average chance stands e.g. Michael. Marta is also on the list.",
["An above average chance stands e.g. Michael",
"Marta is also on the list"]),
# 6 - sentences with parentheses around them
("(We speak in circles. We dance in code.)",
["We speak in circles",
"We dance in code"]),
# 6 - sentences with parentheses around them
("We speak in circles. (We dance in code.)",
["We speak in circles",
"We dance in code"]),
))
def test_sentencesegmenter(sentence, expected):
"""checks whether sentencesegmenter behaves sanely"""
sentences = sentencesegmenter(sentence)
assert sentences == expected
|
Expand the sentence segmentation tests a little()
|
Expand the sentence segmentation tests a little()
|
Python
|
lgpl-2.1
|
sknorr/suse-doc-style-checker,sknorr/suse-doc-style-checker,sknorr/suse-doc-style-checker
|
dd7513f4146679d11aff6d528f11927131dc692f
|
feder/monitorings/factories.py
|
feder/monitorings/factories.py
|
from .models import Monitoring
from feder.users.factories import UserFactory
import factory
class MonitoringFactory(factory.django.DjangoModelFactory):
name = factory.Sequence(lambda n: 'monitoring-%04d' % n)
user = factory.SubFactory(UserFactory)
class Meta:
model = Monitoring
django_get_or_create = ('name', )
|
from .models import Monitoring
from feder.users.factories import UserFactory
import factory
class MonitoringFactory(factory.django.DjangoModelFactory):
name = factory.Sequence(lambda n: 'monitoring-%04d' % n)
user = factory.SubFactory(UserFactory)
description = factory.Sequence(lambda n: 'description no.%04d' % n)
template = factory.Sequence(lambda n:
'template no.%04d. reply to {{EMAIL}}' % n)
class Meta:
model = Monitoring
django_get_or_create = ('name', )
|
Add description and template to MonitoringFactory
|
Add description and template to MonitoringFactory
|
Python
|
mit
|
watchdogpolska/feder,watchdogpolska/feder,watchdogpolska/feder,watchdogpolska/feder
|
b17e39436bde57558c1a9d6e70330a51dd1d0d19
|
website/addons/osffiles/utils.py
|
website/addons/osffiles/utils.py
|
from website.addons.osffiles.exceptions import FileNotFoundError
def get_versions(filename, node):
"""Return file versions for a :class:`NodeFile`.
:raises: FileNotFoundError if file does not exists for the node.
"""
try:
return node.files_versions[filename.replace('.', '_')]
except KeyError:
raise FileNotFoundError('{0!r} not found for node {1!r}'.format(
filename, node._id
))
def get_latest_version_number(filename, node):
"""Return the current version number (0-indexed) for a NodeFile.
:raises: FileNotFoundError if file does not exists for the node.
"""
versions = get_versions(filename, node)
return len(versions) - 1
|
from website.addons.osffiles.exceptions import FileNotFoundError
def get_versions(filename, node):
"""Return IDs for a file's version records.
:param str filename: The name of the file.
:param Node node: The node which has the requested file.
:return: List of ids (strings) for :class:`NodeFile` records.
:raises: FileNotFoundError if file does not exists for the node.
"""
try:
return node.files_versions[filename.replace('.', '_')]
except KeyError:
raise FileNotFoundError('{0!r} not found for node {1!r}'.format(
filename, node._id
))
def get_latest_version_number(filename, node):
"""Return the current version number (0-indexed) for a file.
:param str filename: The name of the file.
:param Node node: The node which has the requested file.
:raises: FileNotFoundError if file does not exists for the node.
"""
versions = get_versions(filename, node)
return len(versions) - 1
|
Clarify documentation for get_versions and get_latest_version_number.
|
Clarify documentation for get_versions and get_latest_version_number.
|
Python
|
apache-2.0
|
bdyetton/prettychart,Johnetordoff/osf.io,caneruguz/osf.io,ZobairAlijan/osf.io,brandonPurvis/osf.io,arpitar/osf.io,GageGaskins/osf.io,DanielSBrown/osf.io,brianjgeiger/osf.io,fabianvf/osf.io,caseyrygt/osf.io,dplorimer/osf,MerlinZhang/osf.io,zkraime/osf.io,zkraime/osf.io,hmoco/osf.io,lamdnhan/osf.io,cosenal/osf.io,lyndsysimon/osf.io,HarryRybacki/osf.io,caseyrollins/osf.io,zamattiac/osf.io,ZobairAlijan/osf.io,RomanZWang/osf.io,cldershem/osf.io,cldershem/osf.io,asanfilippo7/osf.io,Johnetordoff/osf.io,laurenrevere/osf.io,asanfilippo7/osf.io,brianjgeiger/osf.io,TomBaxter/osf.io,mluo613/osf.io,rdhyee/osf.io,cosenal/osf.io,ticklemepierce/osf.io,Ghalko/osf.io,saradbowman/osf.io,emetsger/osf.io,cwisecarver/osf.io,mattclark/osf.io,petermalcolm/osf.io,CenterForOpenScience/osf.io,TomHeatwole/osf.io,reinaH/osf.io,pattisdr/osf.io,doublebits/osf.io,brianjgeiger/osf.io,Nesiehr/osf.io,kushG/osf.io,jmcarp/osf.io,zachjanicki/osf.io,abought/osf.io,RomanZWang/osf.io,haoyuchen1992/osf.io,pattisdr/osf.io,kushG/osf.io,monikagrabowska/osf.io,Nesiehr/osf.io,jolene-esposito/osf.io,TomBaxter/osf.io,doublebits/osf.io,Ghalko/osf.io,binoculars/osf.io,amyshi188/osf.io,wearpants/osf.io,Ghalko/osf.io,TomHeatwole/osf.io,kushG/osf.io,zkraime/osf.io,kwierman/osf.io,danielneis/osf.io,HalcyonChimera/osf.io,asanfilippo7/osf.io,jeffreyliu3230/osf.io,jeffreyliu3230/osf.io,ticklemepierce/osf.io,barbour-em/osf.io,TomBaxter/osf.io,lamdnhan/osf.io,jnayak1/osf.io,billyhunt/osf.io,acshi/osf.io,adlius/osf.io,AndrewSallans/osf.io,mfraezz/osf.io,leb2dg/osf.io,mluo613/osf.io,icereval/osf.io,Nesiehr/osf.io,AndrewSallans/osf.io,njantrania/osf.io,chrisseto/osf.io,doublebits/osf.io,chennan47/osf.io,HarryRybacki/osf.io,caseyrygt/osf.io,samanehsan/osf.io,DanielSBrown/osf.io,cslzchen/osf.io,dplorimer/osf,sloria/osf.io,RomanZWang/osf.io,bdyetton/prettychart,DanielSBrown/osf.io,haoyuchen1992/osf.io,caseyrollins/osf.io,HarryRybacki/osf.io,zamattiac/osf.io,jmcarp/osf.io,laurenrevere/osf.io,haoyuchen1992/osf.io,amyshi188/osf.io,wearpants/osf.io,ckc6cz/osf.io,doublebits/osf.io,fabianvf/osf.io,jnayak1/osf.io,barbour-em/osf.io,caseyrollins/osf.io,SSJohns/osf.io,lyndsysimon/osf.io,reinaH/osf.io,ticklemepierce/osf.io,brandonPurvis/osf.io,rdhyee/osf.io,DanielSBrown/osf.io,abought/osf.io,arpitar/osf.io,felliott/osf.io,zachjanicki/osf.io,revanthkolli/osf.io,alexschiller/osf.io,rdhyee/osf.io,CenterForOpenScience/osf.io,mattclark/osf.io,kch8qx/osf.io,binoculars/osf.io,crcresearch/osf.io,SSJohns/osf.io,HalcyonChimera/osf.io,abought/osf.io,rdhyee/osf.io,saradbowman/osf.io,CenterForOpenScience/osf.io,sbt9uc/osf.io,GaryKriebel/osf.io,monikagrabowska/osf.io,asanfilippo7/osf.io,njantrania/osf.io,samchrisinger/osf.io,MerlinZhang/osf.io,sbt9uc/osf.io,jinluyuan/osf.io,caseyrygt/osf.io,adlius/osf.io,barbour-em/osf.io,kwierman/osf.io,jnayak1/osf.io,aaxelb/osf.io,erinspace/osf.io,jeffreyliu3230/osf.io,adlius/osf.io,HalcyonChimera/osf.io,CenterForOpenScience/osf.io,cslzchen/osf.io,jolene-esposito/osf.io,jeffreyliu3230/osf.io,mluo613/osf.io,billyhunt/osf.io,GaryKriebel/osf.io,jnayak1/osf.io,Ghalko/osf.io,revanthkolli/osf.io,acshi/osf.io,fabianvf/osf.io,HalcyonChimera/osf.io,amyshi188/osf.io,RomanZWang/osf.io,ckc6cz/osf.io,hmoco/osf.io,jmcarp/osf.io,lamdnhan/osf.io,samanehsan/osf.io,mluke93/osf.io,bdyetton/prettychart,erinspace/osf.io,pattisdr/osf.io,TomHeatwole/osf.io,dplorimer/osf,GageGaskins/osf.io,SSJohns/osf.io,brandonPurvis/osf.io,danielneis/osf.io,njantrania/osf.io,petermalcolm/osf.io,ZobairAlijan/osf.io,leb2dg/osf.io,samanehsan/osf.io,SSJohns/osf.io,zachjanicki/osf.io,kch8qx/osf.io,cldershem/osf.io,caneruguz/osf.io,hmoco/osf.io,kwierman/osf.io,baylee-d/osf.io,haoyuchen1992/osf.io,barbour-em/osf.io,himanshuo/osf.io,samanehsan/osf.io,MerlinZhang/osf.io,felliott/osf.io,mluke93/osf.io,ckc6cz/osf.io,doublebits/osf.io,samchrisinger/osf.io,aaxelb/osf.io,RomanZWang/osf.io,arpitar/osf.io,monikagrabowska/osf.io,jinluyuan/osf.io,revanthkolli/osf.io,acshi/osf.io,icereval/osf.io,jinluyuan/osf.io,himanshuo/osf.io,adlius/osf.io,chrisseto/osf.io,cosenal/osf.io,caneruguz/osf.io,billyhunt/osf.io,petermalcolm/osf.io,alexschiller/osf.io,cwisecarver/osf.io,leb2dg/osf.io,sbt9uc/osf.io,cosenal/osf.io,lamdnhan/osf.io,mluke93/osf.io,aaxelb/osf.io,ckc6cz/osf.io,mluo613/osf.io,zamattiac/osf.io,mfraezz/osf.io,brandonPurvis/osf.io,billyhunt/osf.io,felliott/osf.io,cwisecarver/osf.io,monikagrabowska/osf.io,kch8qx/osf.io,acshi/osf.io,arpitar/osf.io,abought/osf.io,icereval/osf.io,crcresearch/osf.io,zachjanicki/osf.io,njantrania/osf.io,crcresearch/osf.io,jolene-esposito/osf.io,GageGaskins/osf.io,zkraime/osf.io,binoculars/osf.io,chennan47/osf.io,billyhunt/osf.io,ZobairAlijan/osf.io,acshi/osf.io,jolene-esposito/osf.io,TomHeatwole/osf.io,kushG/osf.io,brianjgeiger/osf.io,Johnetordoff/osf.io,KAsante95/osf.io,sloria/osf.io,revanthkolli/osf.io,kwierman/osf.io,monikagrabowska/osf.io,kch8qx/osf.io,cldershem/osf.io,mluke93/osf.io,wearpants/osf.io,sloria/osf.io,jinluyuan/osf.io,emetsger/osf.io,emetsger/osf.io,chrisseto/osf.io,kch8qx/osf.io,mfraezz/osf.io,zamattiac/osf.io,GageGaskins/osf.io,aaxelb/osf.io,laurenrevere/osf.io,GaryKriebel/osf.io,mattclark/osf.io,KAsante95/osf.io,caneruguz/osf.io,Johnetordoff/osf.io,dplorimer/osf,mfraezz/osf.io,mluo613/osf.io,lyndsysimon/osf.io,reinaH/osf.io,reinaH/osf.io,fabianvf/osf.io,himanshuo/osf.io,KAsante95/osf.io,danielneis/osf.io,alexschiller/osf.io,ticklemepierce/osf.io,MerlinZhang/osf.io,jmcarp/osf.io,leb2dg/osf.io,samchrisinger/osf.io,alexschiller/osf.io,himanshuo/osf.io,Nesiehr/osf.io,emetsger/osf.io,GageGaskins/osf.io,alexschiller/osf.io,chennan47/osf.io,brandonPurvis/osf.io,cslzchen/osf.io,KAsante95/osf.io,GaryKriebel/osf.io,bdyetton/prettychart,amyshi188/osf.io,HarryRybacki/osf.io,sbt9uc/osf.io,samchrisinger/osf.io,felliott/osf.io,caseyrygt/osf.io,hmoco/osf.io,chrisseto/osf.io,baylee-d/osf.io,cslzchen/osf.io,petermalcolm/osf.io,erinspace/osf.io,cwisecarver/osf.io,danielneis/osf.io,lyndsysimon/osf.io,baylee-d/osf.io,KAsante95/osf.io,wearpants/osf.io
|
bcb8084cc5e84a6417d4e8580005b5f7cf614005
|
giturlparse/platforms/bitbucket.py
|
giturlparse/platforms/bitbucket.py
|
# Imports
from .base import BasePlatform
class BitbucketPlatform(BasePlatform):
PATTERNS = {
'https': r'https://(?P<_user>.+)@(?P<domain>.+)/(?P<owner>.+)/(?P<repo>.+).git',
'ssh': r'git@(?P<domain>.+)/(?P<owner>.+)/(?P<repo>.+).git'
}
FORMATS = {
'https': r'https://%(owner)s@%(domain)s/%(owner)s/%(repo)s.git',
'ssh': r'git@%(domain)s/%(owner)s/%(repo)s.git'
}
DOMAINS = ('bitbucket.org',)
|
# Imports
from .base import BasePlatform
class BitbucketPlatform(BasePlatform):
PATTERNS = {
'https': r'https://(?P<_user>.+)@(?P<domain>.+)/(?P<owner>.+)/(?P<repo>.+).git',
'ssh': r'git@(?P<domain>.+):(?P<owner>.+)/(?P<repo>.+).git'
}
FORMATS = {
'https': r'https://%(owner)s@%(domain)s/%(owner)s/%(repo)s.git',
'ssh': r'git@%(domain)s:%(owner)s/%(repo)s.git'
}
DOMAINS = ('bitbucket.org',)
|
Fix bug in BitBucket's SSH url
|
Fix bug in BitBucket's SSH url
|
Python
|
apache-2.0
|
FriendCode/giturlparse.py,yakky/giturlparse.py,yakky/giturlparse
|
78665865038cf676290fb1058bd2194e4c506869
|
__init__.py
|
__init__.py
|
_VERSION = 'CVS'
_TEMP_DIR = '.SloppyCell'
import logging
logging.basicConfig()
logger = logging.getLogger('__init__')
# Check for debugging option. I tried using optparse for this, but ran into
# issues with ipython and mpirun, both of which pollute sys.argv.
import sys
for arg in sys.argv:
if arg.startswith('--debugSC'):
words = arg.split('=')
import Utility
if len(words) == 2:
Utility.enable_debugging_msgs(words[1])
else:
Utility.enable_debugging_msgs(None)
try:
import pypar
HAVE_PYPAR = True
num_procs = pypar.size()
my_rank = pypar.rank()
my_host = pypar.get_processor_name()
import atexit
atexit.register(pypar.finalize)
except:
HAVE_PYPAR = False
num_procs = 1
my_rank = 0
import socket
my_host = socket.gethostname()
logger.debug('Node %i is on host %s.' % (my_rank, my_host))
import os
if my_rank == 0 and not os.path.isdir(_TEMP_DIR):
os.mkdir(_TEMP_DIR)
import OldScipySupport
|
_VERSION = 'CVS'
_TEMP_DIR = '.SloppyCell'
import logging
logging.basicConfig()
logger = logging.getLogger('__init__')
# Check for debugging option. I tried using optparse for this, but ran into
# issues with ipython and mpirun, both of which pollute sys.argv.
import sys
for arg in sys.argv:
if arg.startswith('--debugSC'):
words = arg.split('=')
import Utility
if len(words) == 2:
Utility.enable_debugging_msgs(words[1])
else:
Utility.enable_debugging_msgs(None)
import os
currdir = os.getcwd()
try:
import pypar
os.chdir(currdir)
HAVE_PYPAR = True
num_procs = pypar.size()
my_rank = pypar.rank()
my_host = pypar.get_processor_name()
import atexit
atexit.register(pypar.finalize)
except ImportError:
os.chdir(currdir)
HAVE_PYPAR = False
num_procs = 1
my_rank = 0
import socket
my_host = socket.gethostname()
logger.debug('Node %i is on host %s.' % (my_rank, my_host))
import os
if my_rank == 0 and not os.path.isdir(_TEMP_DIR):
os.mkdir(_TEMP_DIR)
import OldScipySupport
|
Fix for annoying pypar directory change
|
Fix for annoying pypar directory change
|
Python
|
bsd-3-clause
|
GutenkunstLab/SloppyCell,GutenkunstLab/SloppyCell
|
0f9418eed089938e0094f40cc15682ef59e041a1
|
__init__.py
|
__init__.py
|
# -*- coding: utf8 -*-
import default_settings
from flask.ext.plugins import Plugin
from flask import current_app as app
from pybossa_gravatar.gravatar import Gravatar
from pybossa.model.user import User
from sqlalchemy import event
__plugin__ = "PyBossaGravatar"
__version__ = "0.1.0"
gravatar = Gravatar()
class PyBossaGravatar(Plugin):
"""A PyBossa plugin for Gravatar integration."""
def setup(self):
"""Setup the plugin."""
self.load_config()
gravatar.init_app(app)
self.setup_event_listener()
def load_config(self):
"""Configure the plugin."""
settings = [key for key in dir(default_settings) if key.isupper()]
for s in settings:
if not app.config.get(s):
app.config[s] = getattr(default_settings, s)
def setup_event_listener(self):
"""Setup event listener."""
@event.listens_for(User, 'before_insert')
def add_user_event(mapper, conn, target):
"""Set gravatar by default for new users."""
gravatar.set(target, update_repo=False)
|
# -*- coding: utf8 -*-
import default_settings
from flask.ext.plugins import Plugin
from flask import current_app as app
from flask import redirect
from pybossa_gravatar.gravatar import Gravatar
from pybossa.model.user import User
from sqlalchemy import event
from flask.ext.login import current_user
__plugin__ = "PyBossaGravatar"
__version__ = "0.1.0"
gravatar = Gravatar()
class PyBossaGravatar(Plugin):
"""A PyBossa plugin for Gravatar integration."""
def setup(self):
"""Setup the plugin."""
self.load_config()
gravatar.init_app(app)
self.setup_event_listener()
self.setup_url_rule()
def load_config(self):
"""Configure the plugin."""
settings = [key for key in dir(default_settings) if key.isupper()]
for s in settings:
if not app.config.get(s):
app.config[s] = getattr(default_settings, s)
def setup_event_listener(self):
"""Setup event listener."""
@event.listens_for(User, 'before_insert')
def add_user_event(mapper, conn, target):
"""Set gravatar by default for new users."""
gravatar.set(target, update_repo=False)
def setup_url_rule(self):
"""Setup URL rule."""
@app.route('/account/set-gravatar')
def set_gravatar(self):
"""Set gravatar for the current user."""
if current_user.is_anonymous():
return redirect(url_for('account.signin'))
gravatar.set(current_user)
|
Add URL rule to set Gravatar for current user
|
Add URL rule to set Gravatar for current user
|
Python
|
bsd-3-clause
|
alexandermendes/pybossa-gravatar
|
8d8863fe178b085c6ce7500996f9c2d2c8f159f6
|
umibukela/csv_export.py
|
umibukela/csv_export.py
|
from collections import OrderedDict
def form_questions(form):
d = OrderedDict()
children = form['children']
for child in children:
if 'pathstr' in child and 'control' not in child:
d.update({child['pathstr']: ''})
elif 'children' in child:
for minor in child['children']:
if 'pathstr' in minor:
d.update({minor['pathstr']: ''})
if 'Contact_number' in d:
del d['Contact_number']
if 'Full_name' in d:
del d['Full_name']
if 'Monitor_name' in d:
del d['Monitor_name']
if 'phonenumber' in d:
del d['phonenumber']
if 'capturer' in d:
del d['capturer']
if 'surveyor' in d:
del d['surveyor']
if 'Monitor_Name' in d:
del d['Monitor_Name']
if 'phone_number' in d:
del d['phone_number']
return d
def export_row(answer, fields):
obj = answer.answers
for k in fields.keys():
try:
fields[k] = obj[k]
except KeyError:
del fields[k]
return fields
|
from collections import OrderedDict
def form_questions(form):
d = OrderedDict()
children = form['children']
for child in children:
if 'pathstr' in child and 'control' not in child and child['type'] != 'group':
d.update({child['pathstr']: ''})
elif 'children' in child:
for minor in child['children']:
if 'pathstr' in minor:
d.update({minor['pathstr']: ''})
if 'Contact_number' in d:
del d['Contact_number']
if 'Full_name' in d:
del d['Full_name']
if 'Monitor_name' in d:
del d['Monitor_name']
if 'phonenumber' in d:
del d['phonenumber']
if 'capturer' in d:
del d['capturer']
if 'surveyor' in d:
del d['surveyor']
if 'Monitor_Name' in d:
del d['Monitor_Name']
if 'phone_number' in d:
del d['phone_number']
return d
def export_row(answer, fields):
obj = answer.answers
for k in fields.keys():
try:
fields[k] = obj[k]
except KeyError:
del fields[k]
return fields
|
Make sure correct type is excluded
|
Make sure correct type is excluded
|
Python
|
mit
|
Code4SA/umibukela,Code4SA/umibukela,Code4SA/umibukela,Code4SA/umibukela
|
6086b970e6c37ca4f343291a35bbb9e533109c1c
|
flask_wiki/backend/routes.py
|
flask_wiki/backend/routes.py
|
from flask_wiki.backend.backend import api
from flask_wiki.backend.views import PageView
api.add_resource(PageView, '/pages-list', endpoint='pages-list')
|
from flask_wiki.backend.backend import api
from flask_wiki.backend.views import PageView, PageDetail
api.add_resource(PageView, '/pages-list', endpoint='pages-list')
api.add_resource(PageDetail, '/pages/<slug>', endpoint='page-detail')
|
Support for page-detail url added.
|
Support for page-detail url added.
|
Python
|
bsd-2-clause
|
gcavalcante8808/flask-wiki,gcavalcante8808/flask-wiki,gcavalcante8808/flask-wiki
|
76a2248ffe8c64b15a6f7d307b6d7c726e97165c
|
alerts/cloudtrail_logging_disabled.py
|
alerts/cloudtrail_logging_disabled.py
|
#!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Copyright (c) 2017 Mozilla Corporation
#
# Contributors:
# Brandon Myers [email protected]
from lib.alerttask import AlertTask
from query_models import SearchQuery, TermMatch
class AlertCloudtrailLoggingDisabled(AlertTask):
def main(self):
search_query = SearchQuery(minutes=30)
search_query.add_must([
TermMatch('_type', 'cloudtrail'),
TermMatch('eventName', 'StopLogging'),
])
search_query.add_must_not(TermMatch('errorCode', 'AccessDenied'))
self.filtersManual(search_query)
self.searchEventsSimple()
self.walkEvents()
def onEvent(self, event):
category = 'AWSCloudtrail'
tags = ['cloudtrail', 'aws']
severity = 'CRITICAL'
summary = 'Cloudtrail Logging Disabled: ' + event['_source']['requestParameters']['name']
return self.createAlertDict(summary, category, tags, [event], severity)
|
#!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Copyright (c) 2017 Mozilla Corporation
#
# Contributors:
# Brandon Myers [email protected]
from lib.alerttask import AlertTask
from query_models import SearchQuery, TermMatch
class AlertCloudtrailLoggingDisabled(AlertTask):
def main(self):
search_query = SearchQuery(minutes=30)
search_query.add_must([
TermMatch('_type', 'cloudtrail'),
TermMatch('eventName', 'StopLogging'),
])
search_query.add_must_not(TermMatch('errorCode', 'AccessDenied'))
self.filtersManual(search_query)
self.searchEventsSimple()
self.walkEvents()
def onEvent(self, event):
category = 'AWSCloudtrail'
tags = ['cloudtrail', 'aws', 'cloudtrailpagerduty']
severity = 'CRITICAL'
summary = 'Cloudtrail Logging Disabled: ' + event['_source']['requestParameters']['name']
return self.createAlertDict(summary, category, tags, [event], severity)
|
Send Cloudtrail logging disabled alert to MOC
|
Send Cloudtrail logging disabled alert to MOC
|
Python
|
mpl-2.0
|
mozilla/MozDef,Phrozyn/MozDef,ameihm0912/MozDef,gdestuynder/MozDef,ameihm0912/MozDef,mpurzynski/MozDef,mozilla/MozDef,gdestuynder/MozDef,jeffbryner/MozDef,jeffbryner/MozDef,mpurzynski/MozDef,mpurzynski/MozDef,Phrozyn/MozDef,jeffbryner/MozDef,ameihm0912/MozDef,mozilla/MozDef,ameihm0912/MozDef,jeffbryner/MozDef,gdestuynder/MozDef,mozilla/MozDef,Phrozyn/MozDef,gdestuynder/MozDef,mpurzynski/MozDef,Phrozyn/MozDef
|
680f9e27ddef3be13b025cffd2041e7fece35f64
|
pygraphc/similarity/pysmJaroWinkler.py
|
pygraphc/similarity/pysmJaroWinkler.py
|
import py_stringmatching
from itertools import combinations
from time import time
jw = py_stringmatching.JaroWinkler()
start = time()
log_file = '/home/hs32832011/Git/labeled-authlog/dataset/Hofstede2014/dataset1_perday/Dec 1.log'
with open(log_file, 'r') as f:
lines = f.readlines()
log_length = len(lines)
for line1, line2 in combinations(xrange(log_length), 2):
# string1 = unicode(lines[line1], 'utf-8')
# string2 = unicode(lines[line2], 'utf-8')
string1 = lines[line1]
string2 = lines[line2]
distance = jw.get_sim_score(string1, string2)
print distance
# print runtime
duration = time() - start
minute, second = divmod(duration, 60)
hour, minute = divmod(minute, 60)
print "Runtime: %d:%02d:%02d" % (hour, minute, second)
|
Add Jaro-Winkler distance based on py_stringmatching
|
Add Jaro-Winkler distance based on py_stringmatching
|
Python
|
mit
|
studiawan/pygraphc
|
|
6099451fe088fe74945bbeedeeee66896bd7ff3d
|
voctocore/lib/sources/__init__.py
|
voctocore/lib/sources/__init__.py
|
import logging
from lib.config import Config
from lib.sources.decklinkavsource import DeckLinkAVSource
from lib.sources.imgvsource import ImgVSource
from lib.sources.tcpavsource import TCPAVSource
from lib.sources.testsource import TestSource
from lib.sources.videoloopsource import VideoLoopSource
log = logging.getLogger('AVSourceManager')
sources = {}
def spawn_source(name, port, has_audio=True, has_video=True,
force_num_streams=None):
kind = Config.getSourceKind(name)
if kind == 'img':
sources[name] = ImgVSource(name)
elif kind == 'decklink':
sources[name] = DeckLinkAVSource(name, has_audio, has_video)
elif kind == 'test':
sources[name] = TestSource(name, has_audio, has_video)
elif kind == 'videoloop':
sources[name] = VideoLoopSource(name)
elif kind == 'tcp':
sources[name] = TCPAVSource(name, port, has_audio, has_video,
force_num_streams)
else:
log.warning('Unknown source kind "%s", defaulting to "tcp"', kind)
return sources[name]
def restart_source(name):
assert False, "restart_source() not implemented"
|
import logging
from lib.config import Config
from lib.sources.decklinkavsource import DeckLinkAVSource
from lib.sources.imgvsource import ImgVSource
from lib.sources.tcpavsource import TCPAVSource
from lib.sources.testsource import TestSource
from lib.sources.videoloopsource import VideoLoopSource
log = logging.getLogger('AVSourceManager')
sources = {}
def spawn_source(name, port, has_audio=True, has_video=True,
force_num_streams=None):
kind = Config.getSourceKind(name)
if kind == 'img':
sources[name] = ImgVSource(name)
elif kind == 'decklink':
sources[name] = DeckLinkAVSource(name, has_audio, has_video)
elif kind == 'videoloop':
sources[name] = VideoLoopSource(name)
elif kind == 'tcp':
sources[name] = TCPAVSource(name, port, has_audio, has_video,
force_num_streams)
else:
if kind != 'test':
log.warning('Unknown value "%s" in attribute "kind" in definition of source %s (see section [source.%s] in configuration). Falling back to kind "test".', kind, name, name)
sources[name] = TestSource(name, has_audio, has_video)
return sources[name]
def restart_source(name):
assert False, "restart_source() not implemented"
|
Use test sources as the default in configuration (and improve warning message, when falling back to)
|
Use test sources as the default in configuration (and improve warning message, when falling back to)
|
Python
|
mit
|
voc/voctomix,voc/voctomix
|
3d9d1b10149655030d172de38f9caeb5906d093c
|
source/lucidity/__init__.py
|
source/lucidity/__init__.py
|
# :coding: utf-8
# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips
# :license: See LICENSE.txt.
from .template import Template
|
# :coding: utf-8
# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips
# :license: See LICENSE.txt.
import os
import uuid
import imp
from .template import Template
def discover_templates(paths=None, recursive=True):
'''Search *paths* for mount points and load templates from them.
*paths* should be a list of filesystem paths to search for mount points.
If not specified will try to use value from environment variable
:envvar:`LUCIDITY_TEMPLATE_PATH`.
A mount point is a Python file that defines a 'register' function. The
function should return a list of instantiated
:py:class:`~lucidity.template.Template` objects.
If *recursive* is True (the default) then all directories under a path
will also be searched.
'''
templates = []
if paths is None:
paths = os.environ.get('LUCIDITY_TEMPLATE_PATH', '').split(os.pathsep)
for path in paths:
for base, directories, filenames in os.walk(path):
for filename in filenames:
_, extension = os.path.splitext(filename)
if extension != '.py':
continue
module_path = os.path.join(base, filename)
module_name = uuid.uuid4().hex
module = imp.load_source(module_name, module_path)
try:
registered = module.register()
except AttributeError:
pass
else:
if registered:
templates.extend(registered)
if not recursive:
del directories[:]
return templates
|
Add helper method to load templates from disk.
|
Add helper method to load templates from disk.
|
Python
|
apache-2.0
|
4degrees/lucidity,nebukadhezer/lucidity,BigRoy/lucidity
|
74eb0a324acd75f43aa4efa731c7fc289c5987dd
|
medium/combination-sum-iii/python/combination-sum-iii.py
|
medium/combination-sum-iii/python/combination-sum-iii.py
|
class Solution(object):
def combinationSum3(self, k, n):
"""
:type k: int
:type n: int
:rtype: List[List[int]]
"""
# Use recursion to resolve the problem
# The algorithm complexity is high due to it has to iterate from one
# for each call given k, n. Another optimization is passing another
# parameter in mycombinationSum3(self, k, n, start) for tracking.
if k < 0 or k > 0 and ((10 - k) + 9)*k/2 < n:
return []
elif k == 1 and n < 10:
return [[n]]
# Check the worst recursion sitiation and try to avoid it.
elif (1 + k)*k/2 == n:
return [range(1, k + 1)]
# Check the worst recursion sitiation and try to avoid it.
elif ((10 - k) + 9)*k/2 == n:
return [range(9, 9 - k, -1)]
else:
l = []
for i in range(n):
if i > 0 and i <= n/2 and i < 10:
for j in self.combinationSum3(k - 1, n - i):
# If the number is not unique, then skip it.
# If the return list is empty, then skip it.
if i not in j and len(j) != 0:
j.append(i)
l.append(sorted(j))
# If the length of final list is less than 2, then return it.
if len(l) < 2:
return l
else:
# Drop any duplicated element.
c = []
for i in l:
if i not in c:
c.append(i);
return c
|
Resolve Combination Sum III with recursion
|
Resolve Combination Sum III with recursion
|
Python
|
apache-2.0
|
shuquan/leetcode
|
|
b4d3bae4223671ddda05e864fcd34bd71e188f05
|
tangled/web/exc.py
|
tangled/web/exc.py
|
import datetime
import html
from webob.exc import HTTPInternalServerError
class ConfigurationError(Exception):
"""Exception used to indicate a configuration error."""
class DebugHTTPInternalServerError(HTTPInternalServerError):
"""For use in debug mode, mainly for showing tracebacks."""
body_template = '<pre>{timestamp}\n\n{content}</pre>'
def __init__(self, content, *args, **kwargs):
now = datetime.datetime.now()
content = html.escape(content)
body_template = self.body_template.format(
timestamp=now, content=content)
super().__init__(body_template=body_template, *args, **kwargs)
|
import datetime
import html
from webob.exc import HTTPInternalServerError
class ConfigurationError(Exception):
"""Exception used to indicate a configuration error."""
class DebugHTTPInternalServerError(HTTPInternalServerError):
"""For use in debug mode, mainly for showing tracebacks."""
body_template = '<pre>{timestamp}\n\n{content}</pre>'
def __init__(self, content, *args, **kwargs):
now = datetime.datetime.now()
content = html.escape(content)
body_template = self.body_template.format(
timestamp=now, content=content)
super().__init__(body_template=body_template, *args, **kwargs)
# HACK
safe_substitue = self.body_template_obj.safe_substitute
self.body_template_obj.substitute = safe_substitue
|
Add a hack so weird chars don't cause issues
|
Add a hack so weird chars don't cause issues
In DebugHTTPInternalServerError.__init__.
Should probably figure out the underlying cause.
Or don't inherit from HTTPInternalServerError.
|
Python
|
mit
|
TangledWeb/tangled.web
|
0e2e30382def1f911987ca22fce5adc6c6b73fb6
|
airship/__init__.py
|
airship/__init__.py
|
import os
import json
from flask import Flask, render_template
def channels_json(station, escaped=False):
channels = [{"name": channel} for channel in station.channels()]
jsonbody = json.dumps(channels)
if escaped:
jsonbody = jsonbody.replace("</", "<\\/")
return jsonbody
def make_airship(station):
app = Flask(__name__)
@app.route("/")
def index():
return render_template("index.html", channels_json=channels_json(station, True))
@app.route("/channels")
def list_channels():
return channels_json(station)
@app.route("/grefs/<channel>")
def list_grefs(channel):
return
return app
|
import os
import json
from flask import Flask, render_template
def jsonate(obj, escaped):
jsonbody = json.dumps(obj)
if escaped:
jsonbody = jsonbody.replace("</", "<\\/")
return jsonbody
def channels_json(station, escaped=False):
channels = [{"name": channel} for channel in station.channels()]
return jsonate(channels, escaped)
def grefs_json(station, channel, escaped=False):
grefs = [{"name": gref} for gref in station.grefs(channel)]
return jsonate(grefs, escaped)
def make_airship(station):
app = Flask(__name__)
@app.route("/")
def index():
return render_template("index.html", channels_json=channels_json(station, True))
@app.route("/channels")
def list_channels():
return channels_json(station)
@app.route("/grefs/<channel>")
def list_grefs(channel):
return grefs_json(station, channel)
return app
|
Fix the grefs route in the airship server
|
Fix the grefs route in the airship server
|
Python
|
mit
|
richo/airship,richo/airship,richo/airship
|
6f83b42ae9aaf9cd23bc8d15b66157a75bbc3aed
|
util/createCollector.py
|
util/createCollector.py
|
import os
import sys
import subprocesses
THIS_SCRIPT_DIRECTORY = os.path.dirname(os.path.abspath(__file__))
fuzzManagerPath = os.path.abspath(os.path.join(THIS_SCRIPT_DIRECTORY, os.pardir, os.pardir, 'FuzzManager'))
if not os.path.exists(fuzzManagerPath):
print "Please check out Lithium and FuzzManager side-by-side with funfuzz. Links in https://github.com/MozillaSecurity/funfuzz/#setup"
sys.exit(2)
sys.path.append(fuzzManagerPath)
from Collector.Collector import Collector
def createCollector(tool):
assert tool == "DOMFuzz" or tool == "jsfunfuzz"
sigCacheDir = os.path.join(subprocesses.normExpUserPath("~"), "fuzzsigcache")
if not os.path.exists(sigCacheDir):
os.mkdir(sigCacheDir)
collector = Collector(tool=tool, sigCacheDir=sigCacheDir)
return collector
def printCrashInfo(crashInfo):
if crashInfo.createShortSignature() != "No crash detected":
print
print "crashInfo:"
print " Short Signature: " + crashInfo.createShortSignature()
print " Class name: " + crashInfo.__class__.__name__ # "NoCrashInfo", etc
print " Stack trace: " + repr(crashInfo.backtrace)
print
def printMatchingSignature(match):
print "Matches signature in FuzzManager:"
print " Signature description: " + match[1].get('shortDescription')
print " Signature file: " + match[0]
print
|
import os
import sys
THIS_SCRIPT_DIRECTORY = os.path.dirname(os.path.abspath(__file__))
fuzzManagerPath = os.path.abspath(os.path.join(THIS_SCRIPT_DIRECTORY, os.pardir, os.pardir, 'FuzzManager'))
if not os.path.exists(fuzzManagerPath):
print "Please check out Lithium and FuzzManager side-by-side with funfuzz. Links in https://github.com/MozillaSecurity/funfuzz/#setup"
sys.exit(2)
sys.path.append(fuzzManagerPath)
from Collector.Collector import Collector
def createCollector(tool):
assert tool == "DOMFuzz" or tool == "jsfunfuzz"
collector = Collector(tool=tool)
return collector
def printCrashInfo(crashInfo):
if crashInfo.createShortSignature() != "No crash detected":
print
print "crashInfo:"
print " Short Signature: " + crashInfo.createShortSignature()
print " Class name: " + crashInfo.__class__.__name__ # "NoCrashInfo", etc
print " Stack trace: " + repr(crashInfo.backtrace)
print
def printMatchingSignature(match):
print "Matches signature in FuzzManager:"
print " Signature description: " + match[1].get('shortDescription')
print " Signature file: " + match[0]
print
|
Use the signature (cache) directory specified in .fuzzmanagerconf
|
Use the signature (cache) directory specified in .fuzzmanagerconf
|
Python
|
mpl-2.0
|
nth10sd/funfuzz,MozillaSecurity/funfuzz,MozillaSecurity/funfuzz,MozillaSecurity/funfuzz,nth10sd/funfuzz,nth10sd/funfuzz
|
28add39cbd964d9a26ff8f12c1ee3668b765c7a7
|
perforce/p4login.py
|
perforce/p4login.py
|
#!/usr/bin/env python3
"""Script to automate logging into Perforce.
Use P4API to log in to the server.
"""
import P4
def main():
"""Log in to the Perforce server."""
# Yep, pretty much that easy.
p4 = P4.P4()
p4.connect()
p4.run_login()
if __name__ == "__main__":
main()
|
#!/usr/bin/env python
"""Script to automate logging into Perforce."""
import subprocess
import sys
def main():
"""Log in to the Perforce server."""
# Yep, pretty much that easy.
result = subprocess.check_output(['p4', 'set', '-q', 'P4PASSWD'])
passwd = result.strip().split('=')[1]
proc = subprocess.Popen(['p4', 'login'], stdin=subprocess.PIPE)
proc.communicate(passwd)
sys.exit(proc.returncode)
if __name__ == "__main__":
main()
|
Use p4 cli instead of p4 api
|
Use p4 cli instead of p4 api
|
Python
|
bsd-3-clause
|
nlfiedler/devscripts,nlfiedler/devscripts
|
ad42da9cb3c944f5bd5e953f947a0be96a4b8e17
|
astropy/samp/tests/test_hub_proxy.py
|
astropy/samp/tests/test_hub_proxy.py
|
from astropy.samp import conf
from astropy.samp.hub import SAMPHubServer
from astropy.samp.hub_proxy import SAMPHubProxy
def setup_module(module):
conf.use_internet = False
class TestHubProxy:
def setup_method(self, method):
self.hub = SAMPHubServer(web_profile=False, mode='multiple', pool_size=1)
self.hub.start()
self.proxy = SAMPHubProxy()
self.proxy.connect(hub=self.hub, pool_size=1)
def teardown_method(self, method):
if self.proxy.is_connected:
self.proxy.disconnect()
self.hub.stop()
def test_is_connected(self):
assert self.proxy.is_connected
def test_disconnect(self):
self.proxy.disconnect()
def test_ping(self):
self.proxy.ping()
def test_registration(self):
result = self.proxy.register(self.proxy.lockfile["samp.secret"])
self.proxy.unregister(result['samp.private-key'])
def test_custom_lockfile(tmpdir):
lockfile = tmpdir.join('.samptest').realpath().strpath
hub = SAMPHubServer(web_profile=False, lockfile=lockfile, pool_size=1)
hub.start()
proxy = SAMPHubProxy()
proxy.connect(hub=hub, pool_size=1)
hub.stop()
|
from astropy.samp import conf
from astropy.samp.hub import SAMPHubServer
from astropy.samp.hub_proxy import SAMPHubProxy
def setup_module(module):
conf.use_internet = False
class TestHubProxy:
def setup_method(self, method):
self.hub = SAMPHubServer(web_profile=False, mode='multiple', pool_size=1)
self.hub.start()
self.proxy = SAMPHubProxy()
self.proxy.connect(hub=self.hub, pool_size=1)
def teardown_method(self, method):
if self.proxy.is_connected:
self.proxy.disconnect()
self.hub.stop()
def test_is_connected(self):
assert self.proxy.is_connected
def test_disconnect(self):
self.proxy.disconnect()
def test_ping(self):
self.proxy.ping()
def test_registration(self):
result = self.proxy.register(self.proxy.lockfile["samp.secret"])
self.proxy.unregister(result['samp.private-key'])
def test_custom_lockfile(tmp_path):
lockfile = str(tmp_path / '.samptest')
hub = SAMPHubServer(web_profile=False, lockfile=lockfile, pool_size=1)
hub.start()
proxy = SAMPHubProxy()
proxy.connect(hub=hub, pool_size=1)
hub.stop()
|
Replace `tmpdir` with `tmp_path` in `samp` tests
|
Replace `tmpdir` with `tmp_path` in `samp` tests
|
Python
|
bsd-3-clause
|
pllim/astropy,mhvk/astropy,lpsinger/astropy,lpsinger/astropy,mhvk/astropy,larrybradley/astropy,pllim/astropy,lpsinger/astropy,lpsinger/astropy,lpsinger/astropy,astropy/astropy,pllim/astropy,astropy/astropy,larrybradley/astropy,pllim/astropy,astropy/astropy,mhvk/astropy,larrybradley/astropy,larrybradley/astropy,astropy/astropy,astropy/astropy,mhvk/astropy,pllim/astropy,mhvk/astropy,larrybradley/astropy
|
b4a92b80d2cfe316d89dbecdf1026486d5288fe0
|
simulator-perfect.py
|
simulator-perfect.py
|
#!/usr/bin/env python3
import timer
import sys
import utils
def simulate():
# A set of files already in the storage
seen = set()
# The size of the all uploads combined (deduplicated or not)
total_in = 0
# The size of the data sent to the service
data_in = 0
tmr = timer.Timer()
for (i, (hsh, size)) in enumerate(utils.read_upload_stream()):
total_in += size
if hsh not in seen:
data_in += size
seen.add(hsh)
if (i + 1) % utils.REPORT_FREQUENCY == 0:
print("%s uploads, percentage %.4f, time %s, mem[%s]" % (
utils.num_fmt(i),
1 - data_in / total_in,
tmr.elapsed_str,
utils.get_mem_info()
), file=sys.stderr)
dedup_percentage = 1 - data_in / total_in
print("Simulation complete. stored=%s, uploaded=%s, dedup_percentage=%f" % (
utils.sizeof_fmt(data_in), utils.sizeof_fmt(total_in), dedup_percentage),
file=sys.stderr)
if __name__ == "__main__":
simulate()
|
#!/usr/bin/env python3
import timer
import sys
import utils
def simulate():
# A set of files already in the storage
seen = set()
# The size of the all uploads combined (deduplicated or not)
total_in = 0
# The size of the data sent to the service
data_in = 0
tmr = timer.Timer()
for (i, (hsh, size)) in enumerate(utils.read_upload_stream()):
total_in += size
if hsh not in seen:
data_in += size
seen.add(hsh)
if (i + 1) % utils.REPORT_FREQUENCY == 0:
print("%s uploads, percentage %.4f, time %s, mem[%s]" % (
utils.num_fmt(i),
1 - data_in / total_in,
tmr.elapsed_str,
utils.get_mem_info()
), file=sys.stderr)
print("%i,%i" % (data_in, total_in))
dedup_percentage = 1 - data_in / total_in
print("Simulation complete. stored=%s, uploaded=%s, dedup_percentage=%f" % (
utils.sizeof_fmt(data_in), utils.sizeof_fmt(total_in), dedup_percentage),
file=sys.stderr)
if __name__ == "__main__":
simulate()
|
Make perfect simulator print data after each upload
|
Make perfect simulator print data after each upload
|
Python
|
apache-2.0
|
sjakthol/dedup-simulator,sjakthol/dedup-simulator
|
1a871cf3bf1fd40342e490599361d57017cdcc65
|
backend/breach/tests/test_strategy.py
|
backend/breach/tests/test_strategy.py
|
from mock import patch
from breach.tests.base import RuptureTestCase
from breach.strategy import Strategy
class StrategyTestCase(RuptureTestCase):
@patch('breach.strategy.Sniffer')
def test_first_round(self, Sniffer):
strategy0 = Strategy(self.victim)
work0 = strategy0.get_work()
self.assertEqual(
work0['url'],
'https://di.uoa.gr/?breach=^testsecret0^1^3^2^5^4^7^6^9^8^'
)
self.assertTrue('amount' in work0)
self.assertTrue('timeout' in work0)
strategy1 = Strategy(self.victim)
work1 = strategy1.get_work()
self.assertEqual(
work1['url'],
'https://di.uoa.gr/?breach=^testsecret1^0^3^2^5^4^7^6^9^8^'
)
def test_same_round_same_batch(self):
pass
def test_same_round_different_batch(self):
pass
def test_advance_round(self):
pass
|
from mock import patch
from breach.tests.base import RuptureTestCase
from breach.strategy import Strategy
class StrategyTestCase(RuptureTestCase):
@patch('breach.strategy.Sniffer')
def test_first_round(self, Sniffer):
strategy0 = Strategy(self.victim)
work0 = strategy0.get_work()
self.assertEqual(
work0['url'],
'https://di.uoa.gr/?breach=^testsecret0^1^'
)
self.assertTrue('amount' in work0)
self.assertTrue('timeout' in work0)
strategy1 = Strategy(self.victim)
work1 = strategy1.get_work()
self.assertEqual(
work1['url'],
'https://di.uoa.gr/?breach=^testsecret1^0^'
)
def test_same_round_same_batch(self):
pass
def test_same_round_different_batch(self):
pass
def test_advance_round(self):
pass
|
Update first round test, create huffman based on knownalphabet
|
Update first round test, create huffman based on knownalphabet
|
Python
|
mit
|
dionyziz/rupture,dimriou/rupture,esarafianou/rupture,esarafianou/rupture,dimkarakostas/rupture,dimkarakostas/rupture,esarafianou/rupture,dionyziz/rupture,dimkarakostas/rupture,dionyziz/rupture,esarafianou/rupture,dimkarakostas/rupture,dimkarakostas/rupture,dimriou/rupture,dimriou/rupture,dimriou/rupture,dimriou/rupture,dionyziz/rupture,dionyziz/rupture
|
91b01e37897ea20f6486118e4dd595439f81006b
|
ktane/Model/Modules/WiresModule.py
|
ktane/Model/Modules/WiresModule.py
|
from enum import Enum
from .AbstractModule import AbstractModule, ModuleState
class WireColors(Enum):
MISSING = 'missing'
BLACK = 'black'
RED = 'red'
WHITE = 'white'
BLUE = 'blue'
YELLOW = 'yellow'
def get_correct_wire(sequence, boolpar):
wires_count = get_wires_count(sequence)
def get_wires_count(sequence):
return len([1 for x in sequence if x != WireColors.MISSING.value])
def get_nth_wire_position(sequence, n):
NotImplementedError
class WiresModule(AbstractModule):
def export_to_string(self):
raise NotImplementedError
def import_from_string(self, string):
raise NotImplementedError
def translate_to_commands(self):
raise NotImplementedError
def __init__(self):
super().__init__()
self.name = "WiresModule"
self.type_number = 10
self.state = ModuleState.Armed
|
from enum import Enum
from .AbstractModule import AbstractModule, ModuleState
class WireColors(Enum):
MISSING = 'missing'
BLACK = 'black'
RED = 'red'
WHITE = 'white'
BLUE = 'blue'
YELLOW = 'yellow'
def get_correct_wire(sequence, boolpar):
wires_count = get_wires_count(sequence)
def get_wires_count(sequence):
return len([1 for x in sequence if x != WireColors.MISSING.value])
def get_nth_wire_position(sequence, n):
counter = 0
for idx, value in enumerate(sequence):
if value != WireColors.MISSING.value:
counter += 1
if counter == n:
return idx
return None
class WiresModule(AbstractModule):
def export_to_string(self):
raise NotImplementedError
def import_from_string(self, string):
raise NotImplementedError
def translate_to_commands(self):
raise NotImplementedError
def __init__(self):
super().__init__()
self.name = "WiresModule"
self.type_number = 10
self.state = ModuleState.Armed
|
Implement Wires helper method get_nth_wire_position
|
Implement Wires helper method get_nth_wire_position
|
Python
|
mit
|
hanzikl/ktane-controller
|
d8d77d4dd98d9287be8a98f0024e5f458bef2b66
|
tests/test_time.py
|
tests/test_time.py
|
from immobilus import immobilus
from immobilus.logic import _datetime_to_utc_timestamp
from datetime import datetime
from time import time
def test_time_function():
dt = datetime(1970, 1, 1)
assert _datetime_to_utc_timestamp(dt) == 0.0
assert type(_datetime_to_utc_timestamp(dt)) is float
assert time() != _datetime_to_utc_timestamp(dt)
with immobilus(dt):
assert time() == _datetime_to_utc_timestamp(dt)
assert time() != _datetime_to_utc_timestamp(dt)
|
from immobilus import immobilus
from immobilus.logic import _datetime_to_utc_timestamp
from datetime import datetime
from time import time
def test_time_function():
dt = datetime(1970, 1, 1)
timestamp = _datetime_to_utc_timestamp(dt)
assert timestamp == 0.0
assert type(timestamp) is float
assert time() != timestamp
with immobilus(dt):
assert time() == timestamp
assert time() != timestamp
|
Tidy test - reuse timestamp
|
Tidy test - reuse timestamp
|
Python
|
apache-2.0
|
pokidovea/immobilus
|
a9f55a57559a6647c451d38893624be4109be23b
|
Spiders.py
|
Spiders.py
|
'''
Created on 2 сент. 2016 г.
@author: garet
'''
class BaseSpider():
def __init__(self):
pass
def AddUrls(self, urls):
pass
def Routing(self, url):
pass
def SaveCache(self, url, data=None):
pass
def GetCache(self, url):
pass
def Run(self):
pass
|
'''
Created on 2 сент. 2016 г.
@author: garet
'''
import queue
import sqlite3
class BaseSpider():
def __init__(self):
pass
def AddUrls(self, urls):
pass
def Routing(self, url):
pass
def SaveCache(self, url, data=None):
pass
def GetCache(self, url):
pass
def Run(self):
pass
class QueueUrls():
def __init__(self):
self._urls_queue = queue.Queue()
self._urls_set = set()
def AddUrls(self, urls):
for url in urls:
if url not in self._urls_set:
self._urls_queue.put(url)
self._urls_set.add(url)
pass
def ExistUrl(self, url):
if url in self._urls_set:
return True
return False
def GetUrl(self):
return self._urls_queue.get()
class SqliteCache():
def __init__(self, db_name):
self.db_name = db_name
def InitDB(self):
file = self.db_name + '.sqlite'
self._db = sqlite3.connect(file)
self._cursor = self._db.cursor()
# Create table
sql = """
CREATE TABLE IF NOT EXISTS tbl_urls
(
url text primary key not null,
html text,
time timestamp DEFAULT CURRENT_TIMESTAMP
);"""
self._cursor.execute(sql)
def Get(self, url):
if self._cursor == None:
self.InitDB()
sql = """SELECT * FROM tbl_urls WHERE url=?;"""
self._cursor.execute(sql, (url,))
return self._cursor.fetchone()
def Set(self, url, data):
if self._cursor == None:
self.InitDB()
sql = """INSERT OR REPLACE INTO tbl_urls(url, html)
VALUES (?,?);"""
self._cursor.execute(sql, (url, data) )
self._db.commit()
|
Add SqliteCache for html raw data. Add QueueUrls for list urls.
|
Add SqliteCache for html raw data. Add QueueUrls for list urls.
|
Python
|
bsd-3-clause
|
SaltusVita/ReoGrab
|
20eb711953a8981e7b73b59613018514157e352a
|
spyder_terminal/__init__.py
|
spyder_terminal/__init__.py
|
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) Spyder Project Contributors
#
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
# -----------------------------------------------------------------------------
"""Spyder Terminal Plugin."""
from .terminalplugin import TerminalPlugin as PLUGIN_CLASS
PLUGIN_CLASS
VERSION_INFO = (0, 2, 3)
__version__ = '.'.join(map(str, VERSION_INFO))
|
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) Spyder Project Contributors
#
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
# -----------------------------------------------------------------------------
"""Spyder Terminal Plugin."""
from .terminalplugin import TerminalPlugin as PLUGIN_CLASS
PLUGIN_CLASS
VERSION_INFO = (0, 3, 0, 'dev0')
__version__ = '.'.join(map(str, VERSION_INFO))
|
Set development version number to v0.3.0.dev0
|
Set development version number to v0.3.0.dev0
|
Python
|
mit
|
spyder-ide/spyder-terminal,andfoy/spyder-terminal,spyder-ide/spyder-terminal,andfoy/spyder-terminal,spyder-ide/spyder-terminal,andfoy/spyder-terminal,spyder-ide/spyder-terminal
|
d28bbd597ddbcbf516f490b5bc0511adb63a4be7
|
utils/autogen/config.py
|
utils/autogen/config.py
|
INPUT_DECL_PATHS = [
"../../target/device/libio/export"
# "../../../pia-sdk-repo/iolib/arduino/arduiPIA.h"
]
AUTOGEN_TEST = 1
if AUTOGEN_TEST == 1:
INPUT_DECL_PATHS = [
"./testSuite/"
]
VERSION = '0.0.1'
TARGET = 'galileo'
OUTPUT_COMP_PATH = '../../target/companion/lib/board/'
OUTPUT_DEV_PATH = '../../target/device/src'
OUTPUT_SERVER_PATH = '../../target/server/lib/board'
OUTPUT_CMD_MAP_PATH = '../../target/doc/'
EXPORT_DEF = 'main.h'
EXPORT_CPP = 'main.cpp'
GLOBAL_DEF = '_globalvar.h'
GLOBAL_CPP = '_globalvar.cpp'
EXPORT_MODULE = 'galileo'
GYP_PATH = OUTPUT_DEV_PATH + '/../'
GYP_SRC_PATH = 'src/' #the relative path of GYP_SRC_PATH to OUTPUT_DEV_PATH from
GYP_FILE = 'binding.gyp'
Global_CLASS_VAR_FILE = 'globalClassVarSetterGetter.js'
INSTANCE_V8CLASS_ARG = '__instanceV8' # used to generate a V8 class Object without
# corresponding C class generated.
INC_MAKEFILE = 'autogen.make'
DEBUG = 1
|
INPUT_DECL_PATHS = [
"../../target/device/libio/export"
# "../../../pia-sdk-repo/iolib/arduino/arduiPIA.h"
]
AUTOGEN_TEST = 0
if AUTOGEN_TEST == 1:
INPUT_DECL_PATHS = [
"./testSuite/"
]
VERSION = '0.0.1'
TARGET = 'galileo'
OUTPUT_COMP_PATH = '../../target/companion/lib/board/'
OUTPUT_DEV_PATH = '../../target/device/src'
OUTPUT_SERVER_PATH = '../../target/server/lib/board'
OUTPUT_CMD_MAP_PATH = '../../target/doc/'
EXPORT_DEF = 'main.h'
EXPORT_CPP = 'main.cpp'
GLOBAL_DEF = '_globalvar.h'
GLOBAL_CPP = '_globalvar.cpp'
EXPORT_MODULE = 'galileo'
GYP_PATH = OUTPUT_DEV_PATH + '/../'
GYP_SRC_PATH = 'src/' #the relative path of GYP_SRC_PATH to OUTPUT_DEV_PATH from
GYP_FILE = 'binding.gyp'
Global_CLASS_VAR_FILE = 'globalClassVarSetterGetter.js'
INSTANCE_V8CLASS_ARG = '__instanceV8' # used to generate a V8 class Object without
# corresponding C class generated.
INC_MAKEFILE = 'autogen.make'
DEBUG = 1
|
Set default AUTOGEN_TEST to 0
|
Set default AUTOGEN_TEST to 0
|
Python
|
bsd-3-clause
|
ilc-opensource/io-js,ilc-opensource/io-js,ilc-opensource/io-js,ilc-opensource/io-js,ilc-opensource/io-js
|
caf18b1cd8923e6d070d2652f9969dabba50e81b
|
lotteryResult.py
|
lotteryResult.py
|
#!/usr/bin/env python
import sys
import json
import requests
import hashlib
def hashToNumber(txhash,total):
result = long(txhash, 16) % total
return result
def getBlocktxs(blockhash, number, total, startnum):
url = "https://blockexplorer.com/api/block/" + blockhash
params = dict()
resp = requests.get(url=url, params=params)
data = json.loads(resp.text)
if "tx" in data:
if len(data["tx"]) >= number :
print ("%d Transactions for %d results." % (len(data["tx"]), number) )
for i in range(number):
txhash=data["tx"][i];
r = hashToNumber (txhash, total) + startnum
print ( "result %d is %d" % (i, r) )
else:
print ("only %d Transactions for %d results." % (len(data["tx"]), number) )
else:
print "invalid block data"
def main():
if len(sys.argv) == 5:
blockhash = sys.argv[1]
number = sys.argv[2]
total= sys.argv[3]
startnum = sys.argv[4]
getBlocktxs(blockhash, int(number), int(total), int(startnum))
else:
print "usage: ./lotteryResult.py blockhash number total startnum"
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
import sys
import json
import requests
def hashToNumber(txhash, total):
result = long(txhash, 16) % total
return result
def getBlocktxs(blockhash, number, total, startnum):
url = "https://blockexplorer.com/api/block/" + blockhash
params = dict()
resp = requests.get(url=url, params=params, timeout=5)
data = json.loads(resp.text)
if "tx" in data:
if len(data["tx"]) >= number:
print ("%d Transactions for %d results." % (len(data["tx"]), number))
for i in range(number):
txhash=data["tx"][i]
r = hashToNumber(txhash, total) + startnum
print ("result %d is %d" % (i, r))
else:
print ("only %d Transactions for %d results." % (len(data["tx"]), number))
else:
print "invalid block data"
def main():
if len(sys.argv) == 5:
blockhash = sys.argv[1]
number = sys.argv[2]
total= sys.argv[3]
startnum = sys.argv[4]
getBlocktxs(blockhash, int(number), int(total), int(startnum))
else:
print "usage: ./lotteryResult.py blockhash number total startnum"
if __name__ == '__main__':
main()
|
Format code with pep8 and add timeout to requests
|
Format code with pep8 and add timeout to requests
|
Python
|
mit
|
planetcoder/readerLottery
|
5ec99974a6611cc5993bf56f3f0f4e299a89e29d
|
txircd/modules/cmd_pass.py
|
txircd/modules/cmd_pass.py
|
from twisted.words.protocols import irc
from txircd.modbase import Command, Module
class PassCommand(Command, Module):
def onUse(self, user, params):
if user.registered == 0:
user.sendMessage(irc.ERR_ALREADYREGISTRED, ":Unauthorized command (already registered)")
return
if not params:
user.sendMessage(irc.ERR_NEEDMOREPARAMS, "PASS", ":Not enough parameters")
return
user.password = params[0]
def onRegister(self, user):
if self.ircd.server_password and self.ircd.server_password != user.password:
user.sendMessage("ERROR", ":Closing link: ({}@{}) [Access denied]".format(user.username, user.hostname), to=None, prefix=None)
return False
def Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
self.passcmd = PassCommand()
def spawn():
return {
"actions": {
"register": [self.passcmd]
},
"commands": {
"PASS": self.passcmd
}
}
def cleanup():
self.ircd.actions.remove(self.passcmd)
del self.ircd.commands["PASS"]
del self.passcmd
|
from twisted.words.protocols import irc
from txircd.modbase import Command, Module
class PassCommand(Command, Module):
def onUse(self, user, params):
if user.registered == 0:
user.sendMessage(irc.ERR_ALREADYREGISTRED, ":Unauthorized command (already registered)")
return
if not params:
user.sendMessage(irc.ERR_NEEDMOREPARAMS, "PASS", ":Not enough parameters")
return
user.password = params[0]
def onRegister(self, user):
if self.ircd.server_password and self.ircd.server_password != user.password:
user.sendMessage("ERROR", ":Closing link: ({}@{}) [Access denied]".format(user.username, user.hostname), to=None, prefix=None)
return False
def Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
self.passcmd = PassCommand()
def spawn():
return {
"actions": {
"register": [self.passcmd.onRegister]
},
"commands": {
"PASS": self.passcmd
}
}
def cleanup():
self.ircd.actions.remove(self.passcmd)
del self.ircd.commands["PASS"]
del self.passcmd
|
Add the function (not class) to actions as is now required
|
Add the function (not class) to actions as is now required
|
Python
|
bsd-3-clause
|
DesertBus/txircd,Heufneutje/txircd,ElementalAlchemist/txircd
|
1dd681517fd1831f3990caa043ea8220f5d1bb90
|
app/app.py
|
app/app.py
|
#!/usr/bin/env python3.5
import os,time,asyncio,json
from datetime import datetime
from aiohttp import web
import logging;logging.basicConfig(level=logging.INFO)
from tools.log import Log
from tools.httptools import Middleware,Route
from tools.template import Template
from models import *
from tools.config import Config
@Route.get('/')
def index():
user=yield from User.findall()
print(user)
return Template.render('index.html')
@Route.get('/user/{id}/comment/{comment}')
def user(id,comment):
return '<h1>%s,%s</h1>'%(id,comment)
@asyncio.coroutine
def init(loop):
print(Middleware.allmiddlewares())
app=web.Application(loop=loop,middlewares=Middleware.allmiddlewares())
Template(app)
Route.register_route(app)
pool=yield from create_pool(loop)
srv=yield from loop.create_server(app.make_handler(),'127.0.0.1',8000)
logging.info('server started at http://127.0.0.1:8000')
Log.info("server startd at http://127.0.0.1:8000")
return srv
if __name__=="__main__":
loop=asyncio.get_event_loop()
loop.run_until_complete(init(loop))
loop.run_forever()
|
#!/usr/bin/env python3.5
import os,time,asyncio,json
from datetime import datetime
from aiohttp import web
import logging;logging.basicConfig(level=logging.INFO)
from tools.log import Log
from tools.httptools import Middleware,Route
from tools.template import Template
from models import *
from tools.config import Config
@Route.get('/')
def index():
user=yield from User.findall()
print(user)
return Template('index.html').render()
@Route.get('/user/{id}/comment/{comment}')
def user(id,comment):
return '<h1>%s,%s</h1>'%(id,comment)
@asyncio.coroutine
def init(loop):
print(Middleware.allmiddlewares())
app=web.Application(loop=loop,middlewares=Middleware.allmiddlewares())
Template.init(app)
Route.register_route(app)
pool=yield from create_pool(loop)
srv=yield from loop.create_server(app.make_handler(),'127.0.0.1',8000)
logging.info('server started at http://127.0.0.1:8000')
Log.info("server startd at http://127.0.0.1:8000")
return srv
if __name__=="__main__":
loop=asyncio.get_event_loop()
loop.run_until_complete(init(loop))
loop.run_forever()
|
Change Template() to Template.init() in init function
|
Change Template() to Template.init() in init function
|
Python
|
mit
|
free-free/pyblog,free-free/pyblog,free-free/pyblog,free-free/pyblog
|
178474ceb7227313d039666db3c235c2ee18251e
|
astropy/tests/image_tests.py
|
astropy/tests/image_tests.py
|
import matplotlib
from matplotlib import pyplot as plt
from astropy.utils.decorators import wraps
MPL_VERSION = matplotlib.__version__
ROOT = "http://{server}/testing/astropy/2018-10-24T12:38:34.134556/{mpl_version}/"
IMAGE_REFERENCE_DIR = (ROOT.format(server='data.astropy.org', mpl_version=MPL_VERSION[:3] + '.x') + ',' +
ROOT.format(server='www.astropy.org/astropy-data', mpl_version=MPL_VERSION[:3] + '.x'))
def ignore_matplotlibrc(func):
# This is a decorator for tests that use matplotlib but not pytest-mpl
# (which already handles rcParams)
@wraps(func)
def wrapper(*args, **kwargs):
with plt.style.context({}, after_reset=True):
return func(*args, **kwargs)
return wrapper
|
import matplotlib
from matplotlib import pyplot as plt
from astropy.utils.decorators import wraps
MPL_VERSION = matplotlib.__version__
# The developer versions of the form 3.1.x+... contain changes that will only
# be included in the 3.2.x release, so we update this here.
if MPL_VERSION[:3] == '3.1' and '+' in MPL_VERSION:
MPL_VERSION = '3.2'
ROOT = "http://{server}/testing/astropy/2018-10-24T12:38:34.134556/{mpl_version}/"
IMAGE_REFERENCE_DIR = (ROOT.format(server='data.astropy.org', mpl_version=MPL_VERSION[:3] + '.x') + ',' +
ROOT.format(server='www.astropy.org/astropy-data', mpl_version=MPL_VERSION[:3] + '.x'))
def ignore_matplotlibrc(func):
# This is a decorator for tests that use matplotlib but not pytest-mpl
# (which already handles rcParams)
@wraps(func)
def wrapper(*args, **kwargs):
with plt.style.context({}, after_reset=True):
return func(*args, **kwargs)
return wrapper
|
Use 3.2.x reference images for developer version of Matplotlib
|
Use 3.2.x reference images for developer version of Matplotlib
|
Python
|
bsd-3-clause
|
pllim/astropy,StuartLittlefair/astropy,mhvk/astropy,stargaser/astropy,stargaser/astropy,mhvk/astropy,mhvk/astropy,saimn/astropy,aleksandr-bakanov/astropy,astropy/astropy,lpsinger/astropy,dhomeier/astropy,aleksandr-bakanov/astropy,MSeifert04/astropy,StuartLittlefair/astropy,saimn/astropy,bsipocz/astropy,dhomeier/astropy,mhvk/astropy,pllim/astropy,larrybradley/astropy,dhomeier/astropy,MSeifert04/astropy,astropy/astropy,astropy/astropy,astropy/astropy,saimn/astropy,pllim/astropy,saimn/astropy,dhomeier/astropy,StuartLittlefair/astropy,stargaser/astropy,larrybradley/astropy,stargaser/astropy,StuartLittlefair/astropy,saimn/astropy,aleksandr-bakanov/astropy,StuartLittlefair/astropy,bsipocz/astropy,bsipocz/astropy,larrybradley/astropy,lpsinger/astropy,lpsinger/astropy,larrybradley/astropy,dhomeier/astropy,MSeifert04/astropy,lpsinger/astropy,astropy/astropy,pllim/astropy,lpsinger/astropy,mhvk/astropy,pllim/astropy,aleksandr-bakanov/astropy,bsipocz/astropy,MSeifert04/astropy,larrybradley/astropy
|
ee43ade86df9eb30455e6026671776b1e5be01e5
|
pyservice/common.py
|
pyservice/common.py
|
DEFAULT_CONFIG = {
"protocol": "json",
"timeout": 2,
"strict": True
}
def scrub_output(context, whitelist, strict=True):
r = context.get("response", None)
if r is None:
context["response"] = {}
return
if not strict:
return
context["response"] = {r[k] for k in whitelist}
|
DEFAULT_CONFIG = {
"protocol": "json",
"timeout": 2,
"strict": True
}
def scrub_output(context, whitelist, strict=True):
r = context.get("response", None)
if r is None:
context["response"] = {}
return
if not strict:
return
context["response"] = {k: r[k] for k in whitelist}
|
Fix bug in scrub_output where response was creating set, not dict
|
Fix bug in scrub_output where response was creating set, not dict
|
Python
|
mit
|
numberoverzero/pyservice
|
41a83c6742f0e688dad5a98761c0f0415c77bac9
|
outgoing_mail.py
|
outgoing_mail.py
|
#!/usr/bin/env python
#
# Copyright 2010 Eric Entzel <[email protected]>
#
from google.appengine.api import mail
from google.appengine.ext.webapp import template
import os
from_address = '"EventBot" <[email protected]>'
def send(to, template_name, values):
path = os.path.join(os.path.dirname(__file__), 'email_templates', template_name)
message = mail.EmailMessage(sender=from_address, to=to)
message.subject = template.render(path + '.subject', values)
message.body = template.render(path + '.body', values)
message.send()
|
#!/usr/bin/env python
#
# Copyright 2010 Eric Entzel <[email protected]>
#
from google.appengine.api import mail
from google.appengine.ext.webapp import template
from google.appengine.api import memcache
from datetime import datetime
import os
from_address = '"EventBot" <[email protected]>'
email_interval = 10
def send(to, template_name, values):
"""Send an email to the specified address using a template. No
more than one email per EMAIL_INTERVAL seconds will be sent to any
given address.
"""
last_action = memcache.get(to, namespace='last_action')
if last_action != None:
return
path = os.path.join(os.path.dirname(__file__), 'email_templates', template_name)
message = mail.EmailMessage(sender=from_address, to=to)
message.subject = template.render(path + '.subject', values)
message.body = template.render(path + '.body', values)
message.send()
memcache.set(to, datetime.now(), time=email_interval, namespace='last_action')
|
Use memcache to rate-limit outgoing emails.
|
Use memcache to rate-limit outgoing emails.
|
Python
|
mit
|
eentzel/myeventbot,eentzel/myeventbot,eentzel/myeventbot,eentzel/myeventbot,eentzel/myeventbot
|
0fdda1366b3657614ee76707e617af255634d50b
|
moa/device/__init__.py
|
moa/device/__init__.py
|
from moa.base import MoaBase
class Device(MoaBase):
''' By default, the device does not support multi-threading.
'''
_activated_set = None
def __init__(self, **kwargs):
super(Device, self).__init__(**kwargs)
self._activated_set = set()
def activate(self, identifier, **kwargs):
active = self._activated_set
result = len(active) == 0
active.add(identifier)
return result
def recover(self, **kwargs):
pass
def deactivate(self, identifier, clear=False, **kwargs):
active = self._activated_set
old_len = len(active)
if clear:
active.clear()
else:
try:
active.remove(identifier)
except ValueError:
pass
return bool(old_len and not len(active))
|
from moa.base import MoaBase
class Device(MoaBase):
''' By default, the device does not support multi-threading.
'''
_activated_set = None
def __init__(self, **kwargs):
super(Device, self).__init__(**kwargs)
self._activated_set = set()
def activate(self, identifier, **kwargs):
active = self._activated_set
result = len(active) == 0
active.add(identifier)
return result
def recover(self, **kwargs):
pass
def deactivate(self, identifier, clear=False, **kwargs):
active = self._activated_set
old_len = len(active)
if clear:
active.clear()
else:
try:
active.remove(identifier)
except KeyError:
pass
return bool(old_len and not len(active))
|
Fix device activation remove exception.
|
Fix device activation remove exception.
|
Python
|
mit
|
matham/moa
|
cf7b2bb0569431e97cc316dc41924c78806af5a9
|
drivers/vnfm/gvnfm/gvnfmadapter/driver/pub/config/config.py
|
drivers/vnfm/gvnfm/gvnfmadapter/driver/pub/config/config.py
|
# Copyright 2017 ZTE Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# [MSB]
MSB_SERVICE_IP = '127.0.0.1'
MSB_SERVICE_PORT = '10080'
# [register]
REG_TO_MSB_WHEN_START = True
REG_TO_MSB_REG_URL = "/openoapi/microservices/v1/services"
REG_TO_MSB_REG_PARAM = {
"serviceName": "ztevmanagerdriver",
"version": "v1",
"url": "/openoapi/ztevmanagerdriver/v1",
"protocol": "REST",
"visualRange": "1",
"nodes": [{
"ip": "127.0.0.1",
"port": "8410",
"ttl": 0
}]
}
|
# Copyright 2017 ZTE Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# [MSB]
MSB_SERVICE_IP = '127.0.0.1'
MSB_SERVICE_PORT = '10080'
# [register]
REG_TO_MSB_WHEN_START = True
REG_TO_MSB_REG_URL = "/openoapi/microservices/v1/services"
REG_TO_MSB_REG_PARAM = {
"serviceName": "gvnfmdriver",
"version": "v1",
"url": "/openoapi/gvnfmdriver/v1",
"protocol": "REST",
"visualRange": "1",
"nodes": [{
"ip": "127.0.0.1",
"port": "8484",
"ttl": 0
}]
}
|
Add code framework of gvnfm-driver
|
Add code framework of gvnfm-driver
Change-Id: Ibb0dd98a73860f538599328b718040df5f3f7007
Issue-Id: NFVO-132
Signed-off-by: fujinhua <[email protected]>
|
Python
|
apache-2.0
|
open-o/nfvo,open-o/nfvo,open-o/nfvo,open-o/nfvo,open-o/nfvo
|
c4c71dd65675f904c34a0d86a80d5abe7bafdbb1
|
txircd/modules/cmd_user.py
|
txircd/modules/cmd_user.py
|
from twisted.words.protocols import irc
from txircd.modbase import Command
class UserCommand(Command):
def onUse(self, user, params):
if user.registered == 0:
self.sendMessage(irc.ERR_ALREADYREGISTRED, ":Unauthorized command (already registered)")
return
if params and len(params) < 4:
user.sendMessage(irc.ERR_NEEDMOREPARAMS, "USER", ":Not enough parameters")
if not user.username:
user.registered -= 1
user.username = filter(lambda x: x in string.ascii_letters + string.digits + "-_", params[0])[:12]
if not user.username:
user.registered += 1
user.sendMessage(irc.ERR_NEEDMOREPARAMS, "USER", ":Your username is not valid")
return
user.realname = params[3]
if user.registered == 0:
user.register()
def Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn():
return {
"commands": {
"USER": UserCommand()
}
}
def cleanup():
del self.ircd.commands["USER"]
|
from twisted.words.protocols import irc
from txircd.modbase import Command
class UserCommand(Command):
def onUse(self, user, data):
if not user.username:
user.registered -= 1
user.username = data["ident"]
user.realname = data["gecos"]
if user.registered == 0:
user.register()
def processParams(self, user, params):
if user.registered == 0:
user.sendMessage(irc.ERR_ALREADYREGISTRED, ":Unauthorized command (already registered)")
return {}
if params and len(params) < 4:
user.sendMessage(irc.ERR_NEEDMOREPARAMS, "USER", ":Not enough parameters")
return {}
ident = filter(lambda x: x in string.ascii_letters + string.digits + "-_", params[0])[:12]
if not ident:
user.sendMessage(irc.ERR_NEEDMOREPARAMS, "USER", ":Your username is not valid")
return {}
return {
"user": user,
"ident": ident,
"gecos": params[3]
}
def Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn():
return {
"commands": {
"USER": UserCommand()
}
}
def cleanup():
del self.ircd.commands["USER"]
|
Update the USER command to take advantage of core capabilities as well
|
Update the USER command to take advantage of core capabilities as well
|
Python
|
bsd-3-clause
|
ElementalAlchemist/txircd,Heufneutje/txircd,DesertBus/txircd
|
2a980eee73fb79b191126c9ec1c41963dcaf1d9c
|
aim/db/migration/alembic_migrations/versions/f0c056954eee_sg_rule_remote_group_id.py
|
aim/db/migration/alembic_migrations/versions/f0c056954eee_sg_rule_remote_group_id.py
|
# Copyright 2017 Cisco, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""Add remote_group_id column
Revision ID: f0c056954eee
Revises: 3880e0a62e1f
Create Date: 2020-05-05 12:23:39.608507
"""
# revision identifiers, used by Alembic.
revision = 'f0c056954eee'
down_revision = '3880e0a62e1f'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column(
'aim_security_group_rules',
sa.Column('remote_group_id', sa.String(64),
server_default='', nullable=False)
)
def downgrade():
pass
|
# Copyright 2020 Cisco, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""Add remote_group_id column
Revision ID: f0c056954eee
Revises: 3880e0a62e1f
Create Date: 2020-05-05 12:23:39.608507
"""
# revision identifiers, used by Alembic.
revision = 'f0c056954eee'
down_revision = '3880e0a62e1f'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column(
'aim_security_group_rules',
sa.Column('remote_group_id', sa.String(64),
server_default='', nullable=False)
)
def downgrade():
pass
|
Fix date in DB migration
|
Fix date in DB migration
|
Python
|
apache-2.0
|
noironetworks/aci-integration-module,noironetworks/aci-integration-module
|
42062493738a166ddc029d111024b17ffa5cda5f
|
dataviva/apps/scholar/upload_file.py
|
dataviva/apps/scholar/upload_file.py
|
class uploadfile():
def __init__(self, name, type=None, size=None, not_allowed_msg=''):
self.name = name
self.type = type
self.size = size
self.not_allowed_msg = not_allowed_msg
self.url = "data/%s" % name
self.delete_url = "delete/%s" % name
self.delete_type = "DELETE"
def get_file(self):
if self.type is not None:
# POST an image
if self.not_allowed_msg == '':
return {"name": self.name,
"type": self.type,
"size": self.size,
"url": self.url,
"deleteUrl": self.delete_url,
"deleteType": self.delete_type, }
# File type is not allowed
else:
return {"error": self.not_allowed_msg,
"name": self.name,
"type": self.type,
"size": self.size, }
# GET normal file from disk
else:
return {"name": self.name,
"size": self.size,
"url": self.url,
"deleteUrl": self.delete_url,
"deleteType": self.delete_type, }
|
class UploadFile():
def __init__(self, name, type=None, size=None, not_allowed_msg=''):
self.name = name
self.type = type
self.size = size
self.not_allowed_msg = not_allowed_msg
self.url = "data/%s" % name
self.delete_url = "delete/%s" % name
self.delete_type = "DELETE"
def get_file(self):
if self.type is not None:
# POST an image
if self.not_allowed_msg == '':
return {"name": self.name,
"type": self.type,
"size": self.size,
"url": self.url,
"deleteUrl": self.delete_url,
"deleteType": self.delete_type, }
# File type is not allowed
else:
return {"error": self.not_allowed_msg,
"name": self.name,
"type": self.type,
"size": self.size, }
# GET normal file from disk
else:
return {"name": self.name,
"size": self.size,
"url": self.url,
"deleteUrl": self.delete_url,
"deleteType": self.delete_type, }
|
Update class name to camelcase pattern.
|
Update class name to camelcase pattern.
|
Python
|
mit
|
DataViva/dataviva-site,DataViva/dataviva-site,DataViva/dataviva-site,DataViva/dataviva-site
|
2c082afb4024cafb530ffab6a62cc6602e75e092
|
stock_request_picking_type/models/stock_request_order.py
|
stock_request_picking_type/models/stock_request_order.py
|
# Copyright 2019 Open Source Integrators
# License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html).
from odoo import api, fields, models
class StockRequestOrder(models.Model):
_inherit = 'stock.request.order'
@api.model
def _get_default_picking_type(self):
return self.env['stock.picking.type'].search([
('code', '=', 'stock_request_order'),
('warehouse_id.company_id', 'in',
[self.env.context.get('company_id', self.env.user.company_id.id),
False])],
limit=1).id
picking_type_id = fields.Many2one(
'stock.picking.type', 'Operation Type',
default=_get_default_picking_type, required=True)
|
# Copyright 2019 Open Source Integrators
# License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html).
from odoo import api, fields, models
class StockRequestOrder(models.Model):
_inherit = 'stock.request.order'
@api.model
def _get_default_picking_type(self):
return self.env['stock.picking.type'].search([
('code', '=', 'stock_request_order'),
('warehouse_id.company_id', 'in',
[self.env.context.get('company_id', self.env.user.company_id.id),
False])],
limit=1).id
picking_type_id = fields.Many2one(
'stock.picking.type', 'Operation Type',
default=_get_default_picking_type, required=True)
@api.onchange('warehouse_id')
def onchange_warehouse_picking_id(self):
if self.warehouse_id:
picking_type_id = self.env['stock.picking.type'].\
search([('code', '=', 'stock_request_order'),
('warehouse_id', '=', self.warehouse_id.id)], limit=1)
if picking_type_id:
self._origin.write({'picking_type_id': picking_type_id.id})
|
Synchronize Picking Type and Warehouse
|
[IMP] Synchronize Picking Type and Warehouse
[IMP] User write()
|
Python
|
agpl-3.0
|
Vauxoo/stock-logistics-warehouse,Vauxoo/stock-logistics-warehouse,Vauxoo/stock-logistics-warehouse
|
be0a078aa004470a450dddfa5a8e770b2e0ad97c
|
disk/datadog_checks/disk/__init__.py
|
disk/datadog_checks/disk/__init__.py
|
# (C) Datadog, Inc. 2018
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
from .__about__ import __version__
from .disk import Disk
all = [
'__version__', 'Disk'
]
|
# (C) Datadog, Inc. 2018
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
from .__about__ import __version__ # NOQA F401
from .disk import Disk # NOQA F401
all = [
'__version__', 'Disk'
]
|
Fix flake8 issues and ignore unused
|
[Disk] Fix flake8 issues and ignore unused
|
Python
|
bsd-3-clause
|
DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core
|
a4f41648cd0318694d551b067309539df475c2d7
|
tests/test_function_calls.py
|
tests/test_function_calls.py
|
from thinglang.runner import run
def test_function_calls():
assert run("""
thing Program
does start
number n = 1
number m = 2
Output.write("before n=", n, " m=", m)
self.say_hello()
Output.write("after n=", n, " m=", m)
does say_hello
number n = 3
Output.write("hello", n)
""").output == """
before n= 1 m= 2
hello 3
after n= 1 m= 2
""".strip()
|
from thinglang.runner import run
def test_zero_arg_function_calls():
assert run("""
thing Program
does start
number n = 1
number m = 2
Output.write("before n=", n, " m=", m)
self.say_hello()
Output.write("after n=", n, " m=", m)
does say_hello
number n = 3
Output.write("hello", n)
""").output == """
before n= 1 m= 2
hello 3
after n= 1 m= 2
""".strip()
def test_multi_arg_function_calls():
assert run("""
thing Program
does start
text arg_val = "some value"
self.say_hello(1, "hello", arg_val)
does say_hello with arg1, arg2, arg3
Output.write("in say_hello", arg1, arg2, arg3)
""").output == """
in say_hello 1 hello some value
""".strip()
|
Test for method argument calls
|
Test for method argument calls
|
Python
|
mit
|
ytanay/thinglang,ytanay/thinglang,ytanay/thinglang,ytanay/thinglang
|
ebbc68da19755097b2131d60bc9757ecb4dc6d4c
|
bundles/auth/models/token.py
|
bundles/auth/models/token.py
|
import hashlib
import random
import string
from ext.aboard.model import *
def set_value(token):
"""Randomly create and return a value."""
value = str(token.user) + "_" + str(token.timestamp)
len_rand = random.randint(20, 40)
to_pick = string.digits + string.ascii_letters + \
"_-+^$"
for i in range(len_rand):
value += random.choice(to_pick)
print("Private value", value)
# Hash the value
hashed = hashlib.sha512(value.encode())
value = hashed.hexdigest()
print("Public value", value)
return value
class Token(Model):
"""A token model."""
id = None
user = Integer()
timestamp = Integer()
value = String(pkey=True, default=set_value)
|
import hashlib
import random
import string
from ext.aboard.model import *
class Token(Model):
"""A token model."""
id = None
user = Integer()
timestamp = Integer()
value = String(pkey=True)
def __init__(self, user=None, timestamp=None):
value = None
if user and timestamp:
value = Token.get_token_value(user, timestamp)
Model.__init__(self, user=user, timestamp=timestamp, value=value)
@staticmethod
def get_token_value(user, timestamp):
"""Randomly create and return a token value."""
value = str(user) + "_" + str(timestamp)
len_rand = random.randint(20, 40)
to_pick = string.digits + string.ascii_letters + \
"_-+^$"
for i in range(len_rand):
value += random.choice(to_pick)
print("Private value", value)
# Hash the value
hashed = hashlib.sha512(value.encode())
value = hashed.hexdigest()
print("Public value", value)
return value
|
Use the Model constructor to generate a default value
|
[user] Use the Model constructor to generate a default value
|
Python
|
bsd-3-clause
|
v-legoff/pa-poc2,v-legoff/pa-poc2
|
1b40a51e371d10cc37f4d8f8c7557dbc741d690f
|
butterfly/ImageLayer/HDF5.py
|
butterfly/ImageLayer/HDF5.py
|
from Datasource import Datasource
import numpy as np
import h5py
class HDF5(Datasource):
pass
@classmethod
def load_tile(ds, query):
Sk,Sj,Si = query.all_scales
path = query.OUTPUT.INFO.PATH.VALUE
(K0,J0,I0),(K1,J1,I1) = query.source_bounds
with h5py.File(path) as fd:
vol = fd[fd.keys()[0]]
return vol[::Sk,::Sj,::Si]
|
from Datasource import Datasource
import numpy as np
import h5py
class HDF5(Datasource):
pass
@classmethod
def load_tile(ds, query):
Sk,Sj,Si = query.all_scales
path = query.OUTPUT.INFO.PATH.VALUE
z0,y0,x0 = query.index_zyx*query.blocksize
z1,y1,x1 = query.index_zyx*query.blocksize + query.blocksize
with h5py.File(path) as fd:
vol = fd[fd.keys()[0]]
return vol[z0:z1:Sk,y0:y1:Sj,x0:x1:Si]
|
Fix loading a whole tile into memory.
|
Fix loading a whole tile into memory.
|
Python
|
mit
|
Rhoana/butterfly,Rhoana/butterfly,Rhoana/butterfly2,Rhoana/butterfly,Rhoana/butterfly
|
78c5580d349d6bec0715a36c13437177a726f7ad
|
tests/test_isim.py
|
tests/test_isim.py
|
import pytest
def test_isim():
import os
import shutil
import tempfile
import yaml
from fusesoc.edatools import get_edatool
from edalize_common import compare_files, files, param_gen, tests_dir, vpi
(parameters, args) = param_gen(['plusarg', 'vlogdefine', 'vlogparam'])
work_root = tempfile.mkdtemp(prefix='isim_')
eda_api_file = os.path.join(work_root, 'test_isim_0.eda.yml')
with open(eda_api_file,'w') as f:
f.write(yaml.dump({'name' : 'test_isim_0',
'files' : files,
'parameters' : parameters,
'tool_options' : {'isim' : {
'fuse_options' : ['some', 'fuse_options'],
'isim_options' : ['a', 'few', 'isim_options']}},
'toplevel' : 'top_module',
'vpi' : vpi}))
backend = get_edatool('isim')(eda_api_file=eda_api_file)
backend.configure(args)
ref_dir = os.path.join(tests_dir, __name__)
compare_files(ref_dir, work_root,
['config.mk',
'Makefile',
'run_test_isim_0.tcl',
'test_isim_0.prj'])
dummy_exe = 'test_isim_0'
shutil.copy(os.path.join(ref_dir, dummy_exe),
os.path.join(work_root, dummy_exe))
backend.run([])
compare_files(ref_dir, work_root, ['run.cmd'])
|
import pytest
def test_isim():
import os
import shutil
from edalize_common import compare_files, setup_backend, tests_dir
ref_dir = os.path.join(tests_dir, __name__)
paramtypes = ['plusarg', 'vlogdefine', 'vlogparam']
name = 'test_isim_0'
tool = 'isim'
tool_options = {
'fuse_options' : ['some', 'fuse_options'],
'isim_options' : ['a', 'few', 'isim_options'],
}
(backend, args, work_root) = setup_backend(paramtypes, name, tool, tool_options)
backend.configure(args)
compare_files(ref_dir, work_root,
['config.mk',
'Makefile',
'run_test_isim_0.tcl',
'test_isim_0.prj'])
dummy_exe = 'test_isim_0'
shutil.copy(os.path.join(ref_dir, dummy_exe),
os.path.join(work_root, dummy_exe))
backend.run([])
compare_files(ref_dir, work_root, ['run.cmd'])
|
Reduce code duplication in isim test
|
Reduce code duplication in isim test
|
Python
|
bsd-2-clause
|
olofk/fusesoc,olofk/fusesoc,lowRISC/fusesoc,lowRISC/fusesoc
|
1e60c603321729c71895ac5dc19adc669cce4a72
|
tests/udev_test.py
|
tests/udev_test.py
|
#!/usr/bin/python
import unittest
import mock
class UdevTest(unittest.TestCase):
def setUp(self):
import blivet.udev
blivet.udev.os = mock.Mock()
blivet.udev.log = mock.Mock()
def test_udev_get_device(self):
import blivet.udev
devices = blivet.udev.global_udev.list_devices(subsystem="block")
for device in devices:
self.assertNotEqual(blivet.udev.get_device(device.sys_path), None)
def udev_settle_test(self):
import blivet.udev
blivet.udev.util = mock.Mock()
blivet.udev.settle()
self.assertTrue(blivet.udev.util.run_program.called)
def udev_trigger_test(self):
import blivet.udev
blivet.udev.util = mock.Mock()
blivet.udev.trigger()
self.assertTrue(blivet.udev.util.run_program.called)
if __name__ == "__main__":
unittest.main()
|
#!/usr/bin/python
import unittest
import mock
class UdevTest(unittest.TestCase):
def setUp(self):
import blivet.udev
self._blivet_os = blivet.udev.os
self._blivet_log = blivet.udev.log
self._blivet_util = blivet.udev.util
blivet.udev.os = mock.Mock()
blivet.udev.log = mock.Mock()
blivet.udev.util = mock.Mock()
def tearDown(self):
import blivet.udev
blivet.udev.log = self._blivet_log
blivet.udev.os = self._blivet_os
blivet.udev.util = self._blivet_util
def test_udev_get_device(self):
import blivet.udev
devices = blivet.udev.global_udev.list_devices(subsystem="block")
for device in devices:
self.assertNotEqual(blivet.udev.get_device(device.sys_path), None)
def udev_settle_test(self):
import blivet.udev
blivet.udev.settle()
self.assertTrue(blivet.udev.util.run_program.called)
def udev_trigger_test(self):
import blivet.udev
blivet.udev.trigger()
self.assertTrue(blivet.udev.util.run_program.called)
if __name__ == "__main__":
unittest.main()
|
Clean up mocking done by udev tests when finished.
|
Clean up mocking done by udev tests when finished.
|
Python
|
lgpl-2.1
|
dwlehman/blivet,rvykydal/blivet,AdamWill/blivet,rhinstaller/blivet,vpodzime/blivet,AdamWill/blivet,vojtechtrefny/blivet,vojtechtrefny/blivet,vpodzime/blivet,rvykydal/blivet,rhinstaller/blivet,dwlehman/blivet,jkonecny12/blivet,jkonecny12/blivet
|
c3029a3796437add90cdd6c0033be70fe5766a3a
|
mapit/middleware/__init__.py
|
mapit/middleware/__init__.py
|
import re
from .view_error import *
class JSONPMiddleware(object):
def process_response(self, request, response):
# If the response is a redirect, the callback will be dealt
# on the next request:
if response.status_code == 302:
return response
else:
if request.GET.get('callback') and re.match('[a-zA-Z0-9_$.]+$', request.GET.get('callback')):
response.content = request.GET.get('callback').encode('utf-8') + b'(' + response.content + b')'
response.status_code = 200 # Must return OK for JSONP to be processed
return response
|
import re
from .view_error import *
class JSONPMiddleware(object):
def process_response(self, request, response):
# If the response is a redirect, the callback will be dealt
# on the next request:
if response.status_code == 302:
return response
else:
cb = request.GET.get('callback')
if cb and re.match('[a-zA-Z0-9_$.]+$', cb):
cb = cb.encode('utf-8')
response.content = b'typeof ' + cb + b" === 'function' && " + cb + b'(' + response.content + b')'
response.status_code = 200 # Must return OK for JSONP to be processed
return response
|
Include typeof check in JSONP callback response.
|
Include typeof check in JSONP callback response.
This is more robust, and helps against attacks such as Rosetta Flash:
https://miki.it/blog/2014/7/8/abusing-jsonp-with-rosetta-flash/
|
Python
|
agpl-3.0
|
opencorato/mapit,chris48s/mapit,opencorato/mapit,Code4SA/mapit,Code4SA/mapit,opencorato/mapit,Code4SA/mapit,chris48s/mapit,chris48s/mapit
|
9c2075f13e2aa8ff7a5c4644208e8de17ebefbab
|
finding-geodesic-basins-with-scipy.py
|
finding-geodesic-basins-with-scipy.py
|
# IPython log file
import numpy as np
from scipy import sparse
from skimage import graph
from skimage.graph import _mcp
image = np.array([[1, 1, 2, 2], [2, 1, 1, 3], [3, 2, 1, 2], [2, 2, 2, 1]])
mcp = graph.MCP_Geometric(image)
destinations = [[0, 0], [3, 3]]
costs, traceback = mcp.find_costs(destinations)
offsets = _mcp.make_offsets(2, True)
indices = np.indices(traceback.shape)
offsets.append([0, 0])
offsets_arr = np.array(offsets)
offset_to_neighbor = offsets_arr[traceback]
neighbor_index = indices - offset_to_neighbor.transpose((2, 0, 1))
ids = np.arange(traceback.size).reshape(image.shape)
neighbor_ids = np.ravel_multi_index(tuple(neighbor_index), traceback.shape)
g = sparse.coo_matrix((
np.ones(traceback.size),
(ids.flat, neighbor_ids.flat),
)).tocsr()
basins = sparse.csgraph.connected_components(g)[1].reshape((4, 4))
print(basins)
|
# IPython log file
# See https://stackoverflow.com/questions/62135639/mcp-geometrics-for-calculating-marketsheds/62144556
import numpy as np
from scipy import sparse
from skimage import graph
from skimage.graph import _mcp
image = np.array([[1, 1, 2, 2], [2, 1, 1, 3], [3, 2, 1, 2], [2, 2, 2, 1]])
mcp = graph.MCP_Geometric(image)
destinations = [[0, 0], [3, 3]]
costs, traceback = mcp.find_costs(destinations)
offsets = _mcp.make_offsets(2, True)
indices = np.indices(traceback.shape)
offsets.append([0, 0])
offsets_arr = np.array(offsets)
offset_to_neighbor = offsets_arr[traceback]
neighbor_index = indices - offset_to_neighbor.transpose((2, 0, 1))
ids = np.arange(traceback.size).reshape(image.shape)
neighbor_ids = np.ravel_multi_index(tuple(neighbor_index), traceback.shape)
g = sparse.coo_matrix((
np.ones(traceback.size),
(ids.flat, neighbor_ids.flat),
)).tocsr()
basins = sparse.csgraph.connected_components(g)[1].reshape((4, 4))
print(basins)
|
Add link to SO question
|
Add link to SO question
|
Python
|
bsd-3-clause
|
jni/useful-histories
|
897b637ca9de93b7107cd6d6ab76ed0cb485aba9
|
classifiers/ppmc.py
|
classifiers/ppmc.py
|
__author__ = 'sharvey'
from classifiers import Classifier
from corpus.mysql.reddit import RedditMySQLCorpus
from ppm import Trie
class RedditPPM(Classifier):
corpus = None
trie = None
user = None
reddit = None
order = 5
def __init__(self, corpus):
self.corpus = corpus
def train(self, corpus_type, user, reddit, char_count, order=5):
if (self.trie is not None):
del self.trie
self.trie = Trie(order)
self.reddit = reddit
self.user = user
document = self.corpus.get_train_documents(corpus_type, user, reddit, char_count).encode('utf-8')
for c in document:
self.trie.add(c)
def test(self, corpus_type, reddit, char_count):
documents = self.corpus.get_test_documents(corpus_type, reddit)
results = []
for row in documents:
test_bits = 0
newtrie = self.trie.duplicate()
document = row['text'].encode('utf-8')
for c in document:
newtrie.add(c)
test_bits += newtrie.bit_encoding
del newtrie
results.append({'id': row['id'],
'label': (self.user == row['username']),
'score': test_bits/(len(document)*8)})
return results
def run_reddit_experiment(corpus_type, char_count, reddits, mysql_opts):
corpus = RedditMySQLCorpus()
corpus.setup(**mysql_opts)
|
__author__ = 'sharvey'
from classifiers import Classifier
from corpus.mysql.reddit import RedditMySQLCorpus
from ppm import Trie
class RedditPPM(Classifier):
corpus = None
trie = None
user = None
reddit = None
order = 5
def __init__(self, corpus):
self.corpus = corpus
def train(self, corpus_type, user, reddit, char_count, order=5):
if (self.trie is not None):
del self.trie
self.trie = Trie(order)
self.reddit = reddit
self.user = user
document = self.corpus.get_train_documents(corpus_type, user, reddit, char_count).encode('utf-8')
for c in document:
self.trie.add(c)
def test(self, corpus_type, reddit, char_count):
documents = self.corpus.get_test_documents(corpus_type, reddit)
results = []
for row in documents:
test_bits = 0
newtrie = self.trie.duplicate()
document = row['text'].encode('utf-8')
for c in document:
newtrie.add(c)
test_bits += newtrie.bit_encoding
del newtrie
results.append({'id': row['id'],
'username': row['username'],
'label': (self.user == row['username']),
'score': test_bits/(len(document)*8)})
return results
|
Add field for test result return
|
Add field for test result return
|
Python
|
mit
|
worldwise001/stylometry
|
10be9375fb201d7a271babb81ac25c22c70f219b
|
template.py
|
template.py
|
# -*- coding: utf-8 -*-
# Copyright (C) 2009-2010, Luis Pedro Coelho <[email protected]>
# vim: set ts=4 sts=4 sw=4 expandtab smartindent:
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import division
import numpy as np
|
# -*- coding: utf-8 -*-
# Copyright (C) 2009-2010, Luis Pedro Coelho <[email protected]>
# vim: set ts=4 sts=4 sw=4 expandtab smartindent:
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import division
import numpy as np
|
Remove spaces at the end of lines.
|
Remove spaces at the end of lines.
|
Python
|
mit
|
luispedro/waldo,luispedro/waldo
|
19634d62f5a9b2c1aa9f867c247f46ed7f19ac07
|
openstack_dashboard/views.py
|
openstack_dashboard/views.py
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django import shortcuts
from django.views.decorators import vary
import horizon
from openstack_auth import views
def get_user_home(user):
if user.is_superuser:
return horizon.get_dashboard('admin').get_absolute_url()
return horizon.get_dashboard('project').get_absolute_url()
@vary.vary_on_cookie
def splash(request):
if request.user.is_authenticated():
return shortcuts.redirect(get_user_home(request.user))
form = views.Login(request)
request.session.clear()
request.session.set_test_cookie()
return shortcuts.render(request, 'splash.html', {'form': form})
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django import shortcuts
from django.views.decorators import vary
import horizon
from openstack_auth import forms
def get_user_home(user):
if user.is_superuser:
return horizon.get_dashboard('admin').get_absolute_url()
return horizon.get_dashboard('project').get_absolute_url()
@vary.vary_on_cookie
def splash(request):
if request.user.is_authenticated():
return shortcuts.redirect(get_user_home(request.user))
form = forms.Login(request)
request.session.clear()
request.session.set_test_cookie()
return shortcuts.render(request, 'splash.html', {'form': form})
|
Fix issues with importing the Login form
|
Fix issues with importing the Login form
The Login form lives in openstack_auth.forms and should be directly
imported from that file.
Change-Id: I42808530024bebb01604adbf4828769812856bf3
Closes-Bug: #1332149
(cherry picked from commit 345ccc9d503e6e55fe46d7813958c0081cc1cffe)
|
Python
|
apache-2.0
|
rickerc/horizon_audit,rickerc/horizon_audit,rickerc/horizon_audit
|
552caa1d1fefcc48107eae02091aaca4a39123b4
|
src/zeit/content/cp/field.py
|
src/zeit/content/cp/field.py
|
import zc.form.field
import zope.schema.interfaces
class DynamicCombination(zc.form.field.Combination):
def __init__(self, type_field, type_interface, **kw):
self.type_field = type_field
self.type_field.__name__ = "combination_00"
self.fields = (type_field,)
self.type_interface = type_interface
super(zc.form.field.Combination, self).__init__(**kw)
def generate_fields(self, selector):
fields = []
field = self.type_interface[selector]
if zope.schema.interfaces.ICollection.providedBy(field):
fields.extend(field.value_type.fields)
else:
fields.append(field)
fields = [x.bind(self.context) for x in fields]
for ix, field in enumerate(fields):
field.__name__ = "combination_%02d" % (ix + 1)
return fields
def _validate(self, value):
# XXX I hope we can get away with no validation here, since all input
# happens through widgets and so should be valid anyway. Otherwise we
# have to synthesize fields here too, like DynamicCombinationWidget.
pass
|
import zc.form.field
import zc.form.interfaces
import zope.schema.interfaces
class DynamicCombination(zc.form.field.Combination):
def __init__(self, type_field, type_interface, **kw):
self.type_field = type_field
self.type_field.__name__ = "combination_00"
self.fields = (type_field,)
self.type_interface = type_interface
super(zc.form.field.Combination, self).__init__(**kw)
def generate_fields(self, selector):
result = []
field = self.type_interface[selector]
if zope.schema.interfaces.ICollection.providedBy(field):
field = field.value_type
if zc.form.interfaces.ICombinationField.providedBy(field):
result.extend(field.fields)
else:
result.append(field)
result = [x.bind(self.context) for x in result]
for ix, field in enumerate(result):
field.__name__ = "combination_%02d" % (ix + 1)
return result
def _validate(self, value):
# XXX I hope we can get away with no validation here, since all input
# happens through widgets and so should be valid anyway. Otherwise we
# have to synthesize fields here too, like DynamicCombinationWidget.
pass
|
Support sequences with value_type other than combination
|
TMS-227: Support sequences with value_type other than combination
|
Python
|
bsd-3-clause
|
ZeitOnline/zeit.content.cp,ZeitOnline/zeit.content.cp
|
0b4097394fd05da204624d1c6093176feb158bb1
|
ajaxuploader/backends/thumbnail.py
|
ajaxuploader/backends/thumbnail.py
|
import os
from sorl.thumbnail import get_thumbnail
from ajaxuploader.backends.local import LocalUploadBackend
class ThumbnailUploadBackend(LocalUploadBackend):
def __init__(self, dimension):
self._dimension = dimension
def upload_complete(self, request, filename):
thumbnail = get_thumbnail(self._filename, self._dimension)
os.unlink(self._filename)
return {"path": thumbnail.name}
|
import os
from django.conf import settings
from sorl.thumbnail import get_thumbnail
from ajaxuploader.backends.local import LocalUploadBackend
class ThumbnailUploadBackend(LocalUploadBackend):
DIMENSION = "100x100"
def upload_complete(self, request, filename):
thumbnail = get_thumbnail(self._path, self.DIMENSION)
os.unlink(self._path)
return {"path": settings.MEDIA_URL + thumbnail.name}
|
Use dimension as a constant, so we keep same interface for all backends; also returns full path to the place where image was saved
|
Use dimension as a constant, so we keep same interface for all backends; also returns full path to the place where image was saved
|
Python
|
bsd-3-clause
|
OnlyInAmerica/django-ajax-uploader,derek-adair/django-ajax-uploader,derek-adair/django-ajax-uploader,skoczen/django-ajax-uploader,brilliant-org/django-ajax-uploader,derek-adair/django-ajax-uploader,brilliant-org/django-ajax-uploader,skoczen/django-ajax-uploader,OnlyInAmerica/django-ajax-uploader,brilliant-org/django-ajax-uploader
|
a3c4f151a9a44aae3528492d4a00a1815c52cda6
|
website_membership_contact_visibility/models/res_partner.py
|
website_membership_contact_visibility/models/res_partner.py
|
# -*- coding: utf-8 -*-
# © 2016 Michael Viriyananda
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agp
from openerp import fields, models
class ResPartner(models.Model):
_inherit = 'res.partner'
website_membership_published = fields.Boolean(
string='Visible In The Website',
copy=False,
default=True)
|
# -*- coding: utf-8 -*-
# © 2016 Michael Viriyananda
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agp
from openerp import fields, models
class ResPartner(models.Model):
_inherit = 'res.partner'
website_membership_published = fields.Boolean(
string='Visible Contact Info On The Website',
copy=False,
default=True)
|
Change the label of "website_membership_published" into "Visible Contact Info On The Website"
|
Change the label of "website_membership_published" into "Visible Contact Info On The Website"
|
Python
|
agpl-3.0
|
open-synergy/vertical-association
|
477faabee7fc674f8ce0c04663b9eff3943e83fa
|
trac/versioncontrol/web_ui/__init__.py
|
trac/versioncontrol/web_ui/__init__.py
|
from trac.versioncontrol.web_ui.browser import *
from trac.versioncontrol.web_ui.changeset import *
from trac.versioncontrol.web_ui.log import *
|
from trac.versioncontrol.web_ui.browser import *
from trac.versioncontrol.web_ui.changeset import *
from trac.versioncontrol.web_ui.log import *
|
Add missing `svn:eol-style : native` prop, which prevented making clean patches against the early 0.9b1 archives (now both the .zip and the .tar.gz have CRLFs for this file)
|
Add missing `svn:eol-style : native` prop, which prevented making clean patches against the early 0.9b1 archives (now both the .zip and the .tar.gz have CRLFs for this file)
git-svn-id: f68c6b3b1dcd5d00a2560c384475aaef3bc99487@2214 af82e41b-90c4-0310-8c96-b1721e28e2e2
|
Python
|
bsd-3-clause
|
moreati/trac-gitsvn,exocad/exotrac,dokipen/trac,dafrito/trac-mirror,exocad/exotrac,dokipen/trac,exocad/exotrac,dafrito/trac-mirror,moreati/trac-gitsvn,dafrito/trac-mirror,dokipen/trac,moreati/trac-gitsvn,dafrito/trac-mirror,moreati/trac-gitsvn,exocad/exotrac
|
b56c2063dbb8ea6145048eb8a74bfd2693b2b6f4
|
app.py
|
app.py
|
#!/usr/bin/env python
from flask import Flask
app = Flask(__name__)
@app.route("/ping")
def hello():
return "pong"
if __name__ == "__main__":
app.run()
|
#!/usr/bin/env python
from flask import Flask
app = Flask(__name__)
@app.route("/ping")
def hello():
return "pong"
# Returns larger sample JSON from http://json.org/example.html to exercise performance with larger payloads
@app.route("/bigger")
def big_response():
return '''{
"glossary": {
"title": "example glossary",
"GlossDiv": {
"title": "S",
"GlossList": {
"GlossEntry": {
"ID": "SGML",
"SortAs": "SGML",
"GlossTerm": "Standard Generalized Markup Language",
"Acronym": "SGML",
"Abbrev": "ISO 8879:1986",
"GlossDef": {
"para": "A meta-markup language, used to create markup languages such as DocBook.",
"GlossSeeAlso": ["GML", "XML"]
},
"GlossSee": "markup"
}
}
}
}
}'''
if __name__ == "__main__":
app.run()
|
Add bigger response payload option of 512B
|
Add bigger response payload option of 512B
|
Python
|
apache-2.0
|
svanoort/python-client-benchmarks,svanoort/python-client-benchmarks
|
d7cb9bdd63b381b81bf89c5e3c1cc3031c5928d9
|
run.py
|
run.py
|
"""
Entry point for running the sqmpy application standalone
"""
import os
from gevent import monkey
monkey.patch_all()
from sqmpy.factory import create_app
# This line added to support heroku deployment
port = int(os.environ.get("PORT", 3000))
app = create_app('../config.py')
app.run(host='0.0.0.0', port=port,
ssl_context=('server.crt', 'server.key'),
debug=True,
threaded=True)
|
"""
Entry point for running the sqmpy application standalone
"""
import os
from gevent import monkey
monkey.patch_all()
from sqmpy.factory import create_app
# This line added to support heroku deployment
port = int(os.environ.get("PORT", 3000))
# Workaround for passing ssh options to underlying library. Since we want
# to avoid any question upon ssh initialization, therefore we have tp add
# this StrictHostKeyChecking=no to ~/.ssh/config, otherwise we will get
# an error when connecting to new host, since there is no way currently to
# pass this option programmatically.
# Pass the correct config file and create the app instance
app = create_app('../config.py')
# If pyOpenSSL is installed it is possible to use adhoc certificates:
# app.run(host='0.0.0.0', port=port, ssl_context='adhoc')
app.run(host='0.0.0.0', port=port, ssl_context=('server.crt', 'server.key'))
|
Add comments and more gitignore
|
Add comments and more gitignore
|
Python
|
bsd-3-clause
|
mehdisadeghi/sqmpy,mehdisadeghi/sqmpy,mehdisadeghi/sqmpy,simphony/sqmpy,simphony/sqmpy,simphony/sqmpy
|
1f16d194ba78ec8ef50959dc37833ed8d5348c38
|
tests/ssh_parameters_test.py
|
tests/ssh_parameters_test.py
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
from app.protocols import ssh
def ssh_parameters_test():
ss = ssh(hostname = '127.0.0.1', port = 22, username='user', password='password')
assert(ss.hostname and ss.port, ss.username and ss.password)
|
#!/usr/bin/env python
# -*- coding: utf8 -*-
from app.protocols import ssh
def ssh_parameters_test():
ss = ssh(hostname = '127.0.0.1', port = 22, username='user', password='password')
assert(ss.hostname and ss.port and ss.username and ss.password)
|
Fix typo in ssh test
|
Fix typo in ssh test
|
Python
|
mit
|
rbagrov/xana
|
ca758b2813ae77b795c4318d7d5566cd47ab0ec7
|
postgres/operations.py
|
postgres/operations.py
|
from django.db.migrations.operations.base import Operation
from django.db import connection
from psycopg2.extras import register_composite
class LoadSQLFromScript(Operation):
def __init__(self, filename):
self.filename = filename
@property
def reversible(self):
return False
def state_forwards(self, app_label, state):
pass
def database_forwards(self, app_label, schema_editor, from_state, to_state):
schema_editor.execute(open(self.filename).read().replace('%', '%%'))
class CreateCompositeType(Operation):
def __init__(self, name=None, fields=None):
self.name = name
self.fields = fields
@property
def reversible(self):
return True
def state_forwards(self, app_label, state):
pass
def database_forwards(self, app_label, schema_editor, from_state, to_state):
schema_editor.execute('CREATE TYPE %s AS (%s)' % (
self.name, ", ".join(["%s %s" % field for field in self.fields])
))
def state_backwards(self, app_label, state):
pass
def database_backwards(self, app_label, schema_editor, from_state, to_state):
schema_editor.execute('DROP TYPE %s' % self.name)
|
from django.db.migrations.operations.base import Operation
from django.db import connection
from psycopg2.extras import register_composite
from .fields.composite import composite_type_created
class LoadSQLFromScript(Operation):
def __init__(self, filename):
self.filename = filename
@property
def reversible(self):
return False
def state_forwards(self, app_label, state):
pass
def database_forwards(self, app_label, schema_editor, from_state, to_state):
schema_editor.execute(open(self.filename).read().replace('%', '%%'))
class CreateCompositeType(Operation):
def __init__(self, name=None, fields=None):
self.name = name
self.fields = fields
@property
def reversible(self):
return True
def state_forwards(self, app_label, state):
pass
def database_forwards(self, app_label, schema_editor, from_state, to_state):
schema_editor.execute('CREATE TYPE %s AS (%s)' % (
self.name, ", ".join(["%s %s" % field for field in self.fields])
))
composite_type_created.send(sender=self.__class__, db_type=self.name)
def state_backwards(self, app_label, state):
pass
def database_backwards(self, app_label, schema_editor, from_state, to_state):
schema_editor.execute('DROP TYPE %s' % self.name)
|
Send a signal after creation of composite field.
|
Send a signal after creation of composite field.
|
Python
|
bsd-3-clause
|
wlanslovenija/django-postgres
|
7f51b7a74df8e2c8d6756b8c3e95f7fbf47b291b
|
hashbrown/utils.py
|
hashbrown/utils.py
|
from django.conf import settings
from .models import Switch
def is_active(label, user=None):
defaults = getattr(settings, 'HASHBROWN_SWITCH_DEFAULTS', {})
globally_active = defaults[label].get(
'globally_active',
False) if label in defaults else False
description = defaults[label].get(
'description',
'') if label in defaults else ''
switch, created = Switch.objects.get_or_create(
label=label, defaults={
'globally_active': globally_active,
'description': description,
})
if created:
return switch.globally_active
if switch.globally_active or (
user and user.available_switches.filter(pk=switch.pk).exists()
):
return True
return False
|
from django.conf import settings
from .models import Switch
SETTINGS_KEY = 'HASHBROWN_SWITCH_DEFAULTS'
def is_active(label, user=None):
defaults = getattr(settings, SETTINGS_KEY, {})
globally_active = defaults[label].get(
'globally_active',
False) if label in defaults else False
description = defaults[label].get(
'description',
'') if label in defaults else ''
switch, created = Switch.objects.get_or_create(
label=label, defaults={
'globally_active': globally_active,
'description': description,
})
if created:
return switch.globally_active
if switch.globally_active or (
user and user.available_switches.filter(pk=switch.pk).exists()
):
return True
return False
|
Use a constant for the 'HASHBROWN_SWITCH_DEFAULTS' settings key so it is easier to re-use.
|
Use a constant for the 'HASHBROWN_SWITCH_DEFAULTS' settings key so it is easier to re-use.
|
Python
|
bsd-2-clause
|
potatolondon/django-hashbrown
|
df57b55c8ffa2a1948d7442d041415a3f19bbca0
|
python/Cloudbot/bbm.py
|
python/Cloudbot/bbm.py
|
from cloudbot import hook
@hook.command("bbmstaff")
def bbmStaff(text, message, chan):
if chan in ("#bbm-bots", "#bbm-dev", "#bbm-packs", "#builtbrokenmodding", "#builtbroken"):
message("Owners: Dmodoomsirius, DarkGuardsman");
message("textureArtist: Morton0000");
message("Developers: Snow, Hennamann")
else:
message("Command can only be run in Official BBM Channels. Join #bbm-bots to run the command.")
@hook.command("sponsor")
def sponsor(text, message, chan):
if chan in ("#BBM-bots", "#BBM-Dev", "#BBM-Packs", "#BuiltBrokenModding", "#BuiltBroken"):
message("BuiltBroken servers both Beta test servers and build server");
message("is sponsored by Akliz.");
message("http://www.akliz.net/bbm")
message("Command can only be run in Official BBM Channels. Join #bbm-bots to run the command.")
|
from cloudbot import hook
bbmChannels = ["#bbm-bots","#bbm-dev","#builtbroken","#builtbrokenmodding","#bbm-packs","#icbm","#artillects "]
@hook.command("bbmstaff")
def bbmStaff(text, message, chan):
if any(x in chan for x in bbmChannels):
message("Owners: Dmodoomsirius, DarkGuardsman");
#message("Texture Artist: Morton0000");
message("Senior Developers: Kolatra")
message("Junior Developers: Kolatra, shobu9, TheCowGod, Hennamann")
else:
message("Command can only be run in Official BBM Channels. Join #bbm-bots to run the command.")
@hook.command("sponsor")
def sponsor(text, message, chan):
if any(x in chan for x in bbmChannels):
message("BuildBrokenModding servers both Beta test servers and build server");
message("is sponsored by Akliz.");
message("http://www.akliz.net/bbm")
else:
message("Command can only be run in Official BBM Channels. Join #bbm-bots to run the command.")
@hook.command("bbmchan")
def chans(text, message):
message("The official BuiltBroken Channels are: " + " , ".join(bbmChannels))
@hook.command("bbmhelp")
def bbmhelp(text, message):
message("If you are looking for who is the staff type .bbmstaff")
message ("if you are looking for our sponsors please type .sponsor")
message("If you are looking for our official channels please do .bbmchan")
|
Update and add more commands.
|
Update and add more commands.
|
Python
|
unknown
|
dmodoomsirius/DmodCode,dmodoomsirius/DmodCode,dsirius/DmodCode,dmodoomsirius/DmodCode,dsirius/DmodCode,dsirius/DmodCode
|
a30be93bf4aeef78158898c07252fd29e0303a57
|
frigg/authentication/models.py
|
frigg/authentication/models.py
|
# -*- coding: utf8 -*-
from django.contrib.auth.models import AbstractUser
from django.utils.functional import cached_property
from social.apps.django_app.default.models import UserSocialAuth
from frigg.helpers import github
class User(AbstractUser):
@cached_property
def github_token(self):
try:
return self.social_auth.get(provider='github').extra_data['access_token']
except UserSocialAuth.DoesNotExist:
return
def save(self, *args, **kwargs):
create = hasattr(self, 'id')
super(User, self).save(*args, **kwargs)
if create:
self.update_repo_permissions()
def update_repo_permissions(self):
if self.github_token:
github.update_repo_permissions(self)
|
# -*- coding: utf8 -*-
from django.contrib.auth.models import AbstractUser
from django.utils.functional import cached_property
from social.apps.django_app.default.models import UserSocialAuth
from frigg.helpers import github
class User(AbstractUser):
@cached_property
def github_token(self):
try:
return self.social_auth.get(provider='github').extra_data['access_token']
except UserSocialAuth.DoesNotExist:
return
def save(self, *args, **kwargs):
create = not hasattr(self, 'id')
super(User, self).save(*args, **kwargs)
if create:
self.update_repo_permissions()
def update_repo_permissions(self):
if self.github_token:
github.update_repo_permissions(self)
|
Fix update permission on user
|
Fix update permission on user
Only run it when user is created, not when it they are saved
|
Python
|
mit
|
frigg/frigg-hq,frigg/frigg-hq,frigg/frigg-hq
|
43f647f691f6c279d8e126c8e62e05af81baff38
|
cal_pipe/update_pipeline_paths.py
|
cal_pipe/update_pipeline_paths.py
|
'''
Update EVLA pipeline variables to the current system.
'''
def update_paths(pipe_dict, ms_path, pipepath):
pipe_dict['ms_active'] = ms_path
pipe_dict['SDM_name'] = ms_path+".ms"
pipe_dict['pipepath'] = pipepath
return pipe_dict
if __name__ == '__main__':
import sys
pipe_var_file = str(sys.argv[1])
ms_path = str(sys.argv[2])
pipepath = str(sys.argv[3])
import shelve
pipe_dict = shelve.open(pipe_var_file, writeback=True)
pipe_dict = update_paths(pipe_dict, ms_path, pipepath)
pipe_dict.flush()
pipe_dict.close()
|
'''
Update EVLA pipeline variables to the current system.
'''
def update_paths(pipe_dict, ms_path, pipepath):
pipe_dict['ms_active'] = ms_path
pipe_dict['SDM_name'] = ms_path+".ms"
pipe_dict['pipepath'] = pipepath
return pipe_dict
if __name__ == '__main__':
import sys
pipe_var_file = str(sys.argv[5])
ms_path = str(sys.argv[6])
pipepath = str(sys.argv[7])
import shelve
pipe_dict = shelve.open(pipe_var_file, writeback=True)
pipe_dict = update_paths(pipe_dict, ms_path, pipepath)
pipe_dict.flush()
pipe_dict.close()
|
Change cmd line args to reflect change to CASA
|
Change cmd line args to reflect change to CASA
|
Python
|
mit
|
e-koch/canfar_scripts,e-koch/canfar_scripts
|
411175d40b449a793528920c3745ca831f6f55e0
|
debug_toolbar/panels/version.py
|
debug_toolbar/panels/version.py
|
import sys
import django
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from debug_toolbar.panels import DebugPanel
class VersionDebugPanel(DebugPanel):
"""
Panel that displays the Django version.
"""
name = 'Version'
template = 'debug_toolbar/panels/versions.html'
has_content = True
def nav_title(self):
return _('Versions')
def nav_subtitle(self):
return 'Django %s' % django.get_version()
def url(self):
return ''
def title(self):
return _('Versions')
def process_response(self, request, response):
versions = {}
versions['Python'] = '%d.%d.%d' % sys.version_info[:3]
for app in settings.INSTALLED_APPS + ['django']:
name = app.split('.')[-1].replace('_', ' ').capitalize()
__import__(app)
app = sys.modules[app]
if hasattr(app, 'get_version'):
get_version = app.get_version
if callable(get_version):
version = get_version()
else:
version = get_version
elif hasattr(app, 'VERSION'):
version = app.VERSION
elif hasattr(app, '__version__'):
version = app.__version__
else:
continue
if isinstance(version, (list, tuple)):
version = '.'.join(str(o) for o in version)
versions[name] = version
self.record_stats({
'versions': versions,
'paths': sys.path,
})
|
import sys
import django
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from debug_toolbar.panels import DebugPanel
class VersionDebugPanel(DebugPanel):
"""
Panel that displays the Django version.
"""
name = 'Version'
template = 'debug_toolbar/panels/versions.html'
has_content = True
def nav_title(self):
return _('Versions')
def nav_subtitle(self):
return 'Django %s' % django.get_version()
def url(self):
return ''
def title(self):
return _('Versions')
def process_response(self, request, response):
versions = {}
versions['Python'] = '%d.%d.%d' % sys.version_info[:3]
for app in list(settings.INSTALLED_APPS) + ['django']:
name = app.split('.')[-1].replace('_', ' ').capitalize()
__import__(app)
app = sys.modules[app]
if hasattr(app, 'get_version'):
get_version = app.get_version
if callable(get_version):
version = get_version()
else:
version = get_version
elif hasattr(app, 'VERSION'):
version = app.VERSION
elif hasattr(app, '__version__'):
version = app.__version__
else:
continue
if isinstance(version, (list, tuple)):
version = '.'.join(str(o) for o in version)
versions[name] = version
self.record_stats({
'versions': versions,
'paths': sys.path,
})
|
Convert settings.INSTALLED_APPS to list before concatenating django.
|
Convert settings.INSTALLED_APPS to list before concatenating django.
According to the Django documentation settings.INSTALLED_APPS is a
tuple. To go for sure that only list + list are concatenated,
settings.INSTALLED_APPS is converted to list type before adding
['django'].
|
Python
|
bsd-3-clause
|
stored/django-debug-toolbar,django-debug-toolbar/django-debug-toolbar,megcunningham/django-debug-toolbar,django-debug-toolbar/django-debug-toolbar,spookylukey/django-debug-toolbar,Endika/django-debug-toolbar,ChristosChristofidis/django-debug-toolbar,sidja/django-debug-toolbar,peap/django-debug-toolbar,ivelum/django-debug-toolbar,ChristosChristofidis/django-debug-toolbar,sidja/django-debug-toolbar,peap/django-debug-toolbar,guilhermetavares/django-debug-toolbar,stored/django-debug-toolbar,ChristosChristofidis/django-debug-toolbar,peap/django-debug-toolbar,Endika/django-debug-toolbar,barseghyanartur/django-debug-toolbar,ivelum/django-debug-toolbar,barseghyanartur/django-debug-toolbar,calvinpy/django-debug-toolbar,tim-schilling/django-debug-toolbar,jazzband/django-debug-toolbar,django-debug-toolbar/django-debug-toolbar,guilhermetavares/django-debug-toolbar,tim-schilling/django-debug-toolbar,barseghyanartur/django-debug-toolbar,tim-schilling/django-debug-toolbar,guilhermetavares/django-debug-toolbar,sidja/django-debug-toolbar,megcunningham/django-debug-toolbar,calvinpy/django-debug-toolbar,jazzband/django-debug-toolbar,seperman/django-debug-toolbar,jazzband/django-debug-toolbar,seperman/django-debug-toolbar,calvinpy/django-debug-toolbar,pevzi/django-debug-toolbar,stored/django-debug-toolbar,pevzi/django-debug-toolbar,pevzi/django-debug-toolbar,ivelum/django-debug-toolbar,spookylukey/django-debug-toolbar,seperman/django-debug-toolbar,spookylukey/django-debug-toolbar,Endika/django-debug-toolbar,megcunningham/django-debug-toolbar
|
373fd6e9332ca225c1939b5bba675161bdec3596
|
bika/lims/upgrade/__init__.py
|
bika/lims/upgrade/__init__.py
|
# see https://gist.github.com/malthe/704910
import imp
import sys
def create_modules(module_path):
path = ""
module = None
for element in module_path.split('.'):
path += element
try:
module = __import__(path)
except ImportError:
new = imp.new_module(path)
if module is not None:
setattr(module, element, new)
module = new
sys.modules[path] = module
__import__(path)
path += "."
return module
def stub(module_path, class_name, base_class, meta_class=type):
module = create_modules(module_path)
cls = meta_class(class_name, (base_class, ), {})
setattr(module, class_name, cls)
def skip_pre315(portal):
# Hack prevent out-of-date upgrading
# Related: PR #1484
# https://github.com/bikalabs/Bika-LIMS/pull/1484
qi = portal.portal_quickinstaller
info = qi.upgradeInfo('bika.lims')
if info['installedVersion'] > '315':
return True
|
# see https://gist.github.com/malthe/704910
import imp
import sys
def create_modules(module_path):
path = ""
module = None
for element in module_path.split('.'):
path += element
try:
module = __import__(path)
except ImportError:
new = imp.new_module(path)
if module is not None:
setattr(module, element, new)
module = new
sys.modules[path] = module
__import__(path)
path += "."
return module
def stub(module_path, class_name, base_class, meta_class=type):
module = create_modules(module_path)
cls = meta_class(class_name, (base_class, ), {})
setattr(module, class_name, cls)
def skip_pre315(portal):
# Hack prevent out-of-date upgrading
# Related: PR #1484
# https://github.com/bikalabs/Bika-LIMS/pull/1484
qi = portal.portal_quickinstaller
info = qi.upgradeInfo('bika.lims')
if info['installedVersion'] > '315':
return True
return False
|
Add return False to be sure all works as expected
|
Add return False to be sure all works as expected
|
Python
|
agpl-3.0
|
labsanmartin/Bika-LIMS,labsanmartin/Bika-LIMS,veroc/Bika-LIMS,labsanmartin/Bika-LIMS,veroc/Bika-LIMS,veroc/Bika-LIMS,rockfruit/bika.lims,rockfruit/bika.lims
|
6168ce884a1234910bace1a026402a21501b499c
|
buildbot_travis/steps/base.py
|
buildbot_travis/steps/base.py
|
from buildbot.process import buildstep
from buildbot.process.buildstep import SUCCESS, FAILURE, EXCEPTION
from buildbot.process.properties import Properties
from twisted.internet import defer
from ..travisyml import TravisYml
class ConfigurableStep(buildstep.LoggingBuildStep):
"""
Base class for a step which can be tuned by changing settings in .travis.yml
"""
@defer.inlineCallbacks
def getStepConfig(self):
log = self.addLog(".travis.yml")
cmd = self.cmd = buildstep.RemoteShellCommand(workdir="build", command=["cat", ".travis.yml"])
cmd.useLog(log, False, "stdio")
yield self.runCommand(cmd)
self.cmd = None
if cmd.rc != 0:
raise buildstep.BuildStepFailed()
config = TravisYml()
config.parse(log.getText())
defer.returnValue(config)
|
from buildbot.process import buildstep
from buildbot.process.buildstep import SUCCESS, FAILURE, EXCEPTION
from buildbot.process.properties import Properties
from twisted.internet import defer
from ..travisyml import TravisYml
class ConfigurableStep(buildstep.LoggingBuildStep):
"""
Base class for a step which can be tuned by changing settings in .travis.yml
"""
@defer.inlineCallbacks
def getStepConfig(self):
config = TravisYml()
struct = self.build.getProperty(".travis.yml", None)
if struct:
config.parse(struct)
defer.returnValue(config)
log = self.addLog(".travis.yml")
cmd = self.cmd = buildstep.RemoteShellCommand(workdir="build", command=["cat", ".travis.yml"])
cmd.useLog(log, False, "stdio")
yield self.runCommand(cmd)
self.cmd = None
if cmd.rc != 0:
raise buildstep.BuildStepFailed()
config = TravisYml()
config.parse(log.getText())
self.build.setProperty(".travis.yml", config.config, ".VCS")
defer.returnValue(config)
|
Save .travis.yml into build properties
|
Save .travis.yml into build properties
|
Python
|
unknown
|
tardyp/buildbot_travis,isotoma/buildbot_travis,tardyp/buildbot_travis,buildbot/buildbot_travis,buildbot/buildbot_travis,isotoma/buildbot_travis,tardyp/buildbot_travis,tardyp/buildbot_travis,buildbot/buildbot_travis
|
7debde34bd1c5fd903edf4428aa89060da6de037
|
promgen/celery.py
|
promgen/celery.py
|
from __future__ import absolute_import, unicode_literals
import os
from celery import Celery
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'promgen.settings')
app = Celery('promgen')
# Using a string here means the worker don't have to serialize
# the configuration object to child processes.
# - namespace='CELERY' means all celery-related configuration keys
# should have a `CELERY_` prefix.
app.config_from_object('django.conf:settings', namespace='CELERY')
# Load task modules from all registered Django app configs.
app.autodiscover_tasks()
@app.task(bind=True)
def debug_task(self):
print('Request: {0!r}'.format(self.request))
# Because of the way that celery is de-coupled, it can be quite difficult to
# get an instance of 'self' when converting a class method into a celery task.
# as a way to get around this, we can mimick a @task(bind=True) call so that we
# get a self instance to the Celery task, and then we can set a __klass__
# attribute that we can use to get to our other class functions
def wrap_send(cls):
if hasattr(cls, '_send'):
print('Wrapping ', cls)
cls._send = app.task(cls._send, bind=True, lazy=False)
cls._send.__klass__ = cls()
return cls
|
from __future__ import absolute_import, unicode_literals
import logging
import os
from celery import Celery
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'promgen.settings')
logger = logging.getLogger(__name__)
app = Celery('promgen')
# Using a string here means the worker don't have to serialize
# the configuration object to child processes.
# - namespace='CELERY' means all celery-related configuration keys
# should have a `CELERY_` prefix.
app.config_from_object('django.conf:settings', namespace='CELERY')
# Load task modules from all registered Django app configs.
app.autodiscover_tasks()
@app.task(bind=True)
def debug_task(self):
print('Request: {0!r}'.format(self.request))
# Because of the way that celery is de-coupled, it can be quite difficult to
# get an instance of 'self' when converting a class method into a celery task.
# as a way to get around this, we can mimick a @task(bind=True) call so that we
# get a self instance to the Celery task, and then we can set a __klass__
# attribute that we can use to get to our other class functions
def wrap_send(cls):
if hasattr(cls, '_send'):
logger.debug('Wrapping %s', cls)
cls._send = app.task(cls._send, bind=True, lazy=False)
cls._send.__klass__ = cls()
return cls
|
Swap print for logging statement
|
Swap print for logging statement
|
Python
|
mit
|
kfdm/promgen,kfdm/promgen,kfdm/promgen,kfdm/promgen
|
dbfa6398ae84d6920181a750f1447fd1b9a9c521
|
tests/test_packet.py
|
tests/test_packet.py
|
# coding: utf-8
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""
Tests for Packet.
"""
import os
import json
import pytest
from laniakea.core.providers.packet import PacketManager
@pytest.fixture
def packet():
with open(os.path.join(os.getcwd(), 'laniakea/examples/packet.json')) as fo:
conf = json.loads(fo.read())
return PacketManager(conf)
def test_list_projects(packet):
for plan in packet.list_projects():
assert hasattr(plan, 'name')
assert hasattr(plan, 'id')
|
# coding: utf-8
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""
Tests for Packet.
"""
import os
import json
import pytest
from laniakea.core.providers.packet import PacketManager
#@pytest.fixture
#def packet():
# with open(os.path.join(os.getcwd(), 'laniakea/examples/packet.json')) as fo:
# conf = json.loads(fo.read())
# return PacketManager(conf)
#def test_list_projects(packet):
# for plan in packet.list_projects():
# assert hasattr(plan, 'name')
# assert hasattr(plan, 'id')
|
Disable dummy Packet test temporarily
|
Disable dummy Packet test temporarily
|
Python
|
mpl-2.0
|
nth10sd/laniakea,MozillaSecurity/laniakea,MozillaSecurity/laniakea,nth10sd/laniakea
|
1239623e7e23d7c51e864f715c0908ef2c0d2765
|
tests/test_reduce.py
|
tests/test_reduce.py
|
import mr_streams as ms
import unittest
# :::: auxilary functions ::::
def sum_reduction(x,y):
return x + y
class TestMisc(unittest.TestCase):
def test_sum_reduce(self):
_ = ms.stream([1,2,3,4,5]).reduce(sum_reduction)
assert _ is 15
def test_reduce_with_one_element(self):
_ = ms.stream([1]).reduce(sum_reduction)
assert _ is 1
def test_empty_reduce(self):
try:
_ = ms.stream([]).reduce(sum_reduction)
except ms.IllegalStreamOperationException:
pass
|
import mr_streams as ms
import unittest
# :::: auxilary functions ::::
def sum_reduction(x,y):
return x + y
class TestMisc(unittest.TestCase):
def test_sum_reduce(self):
_ = ms.stream([1,2,3,4,5]).reduce(sum_reduction)
assert _ is 15
def test_initializer(self):
_ = ms.stream([1]).reduce(sum_reduction, initializer= 1)
assert _ is 2
def test_reduce_with_one_element(self):
_ = ms.stream([1]).reduce(sum_reduction)
assert _ is 1
def test_empty_reduce(self):
try:
_ = ms.stream([]).reduce(sum_reduction)
except ms.IllegalStreamOperationException:
pass
|
Refactor reduce to handle edge-case streams of length 0 and 1.
|
Refactor reduce to handle edge-case streams of length 0 and 1.
|
Python
|
mit
|
caffeine-potent/Streamer-Datastructure
|
9a97b9df87f06268ab1075726835da95f4852052
|
romanesco/format/tree/nested_to_vtktree.py
|
romanesco/format/tree/nested_to_vtktree.py
|
from romanesco.format import dict_to_vtkarrays, dict_to_vtkrow
import vtk
vtk_builder = vtk.vtkMutableDirectedGraph()
node_fields = input["node_fields"]
edge_fields = input["edge_fields"]
dict_to_vtkarrays(input["node_data"], node_fields, vtk_builder.GetVertexData())
if "children" in input and len(input["children"]) > 0:
dict_to_vtkarrays(input["children"][0]["edge_data"], edge_fields,
vtk_builder.GetEdgeData())
def process_node(vtknode, node):
if "children" in node:
for n in node["children"]:
vtkchild = vtk_builder.AddVertex()
vtkparentedge = vtk_builder.AddGraphEdge(vtknode, vtkchild).GetId()
dict_to_vtkrow(n["node_data"], vtk_builder.GetVertexData())
if "edge_data" in n:
dict_to_vtkrow(n["edge_data"], vtk_builder.GetEdgeData())
process_node(vtkchild, n)
vtk_builder.AddVertex()
dict_to_vtkrow(input["node_data"], vtk_builder.GetVertexData())
process_node(0, input)
output = vtk.vtkTree()
output.ShallowCopy(vtk_builder)
|
from romanesco.format import dict_to_vtkarrays, dict_to_vtkrow
import vtk
vtk_builder = vtk.vtkMutableDirectedGraph()
node_fields = input["node_fields"]
edge_fields = input["edge_fields"]
dict_to_vtkarrays(input["node_data"], node_fields, vtk_builder.GetVertexData())
if "children" in input and len(input["children"]) > 0:
dict_to_vtkarrays(input["children"][0]["edge_data"], edge_fields,
vtk_builder.GetEdgeData())
def process_node(vtknode, node):
if "children" in node:
for n in node["children"]:
vtkchild = vtk_builder.AddVertex()
vtkparentedge = vtk_builder.AddGraphEdge(vtknode, vtkchild).GetId()
dict_to_vtkrow(n["node_data"], vtk_builder.GetVertexData())
dict_to_vtkrow(n["edge_data"], vtk_builder.GetEdgeData())
process_node(vtkchild, n)
vtk_builder.AddVertex()
dict_to_vtkrow(input["node_data"], vtk_builder.GetVertexData())
process_node(0, input)
output = vtk.vtkTree()
output.ShallowCopy(vtk_builder)
|
Revert "tolerate missing edge data"
|
Revert "tolerate missing edge data"
This reverts commit 93f1f6b24b7e8e61dbbfebe500048db752bc9fed.
|
Python
|
apache-2.0
|
Kitware/romanesco,Kitware/romanesco,Kitware/romanesco,Kitware/romanesco,girder/girder_worker,girder/girder_worker,girder/girder_worker
|
5e6e784a5b54f4ac6d1e7841a46772e5aaac9c2d
|
getpaid/backends/paymill/__init__.py
|
getpaid/backends/paymill/__init__.py
|
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from getpaid.backends import PaymentProcessorBase
class PaymentProcessor(PaymentProcessorBase):
BACKEND = 'getpaid.backends.paymill'
BACKEND_NAME = _('Paymill')
BACKEND_ACCEPTED_CURRENCY = ('EUR', 'CZK', 'DKK', 'HUF', 'ISK', 'ILS', 'LVL',
'CHF', 'NOK', 'PLN', 'SEK', 'TRY', 'GBP', )
def get_gateway_url(self, request):
return reverse('getpaid-paymill-authorization', kwargs={'pk' : self.payment.pk}), "GET", {}
|
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from getpaid.backends import PaymentProcessorBase
class PaymentProcessor(PaymentProcessorBase):
BACKEND = 'getpaid.backends.paymill'
BACKEND_NAME = _('Paymill')
BACKEND_ACCEPTED_CURRENCY = ('EUR', 'CZK', 'DKK', 'HUF', 'ISK', 'ILS', 'LVL',
'CHF', 'NOK', 'PLN', 'SEK', 'TRY', 'GBP', 'USD', )
def get_gateway_url(self, request):
return reverse('getpaid-paymill-authorization', kwargs={'pk' : self.payment.pk}), "GET", {}
|
Add USD to supported currencies in Paymill backend
|
Add USD to supported currencies in Paymill backend
USD was not listed as a supported currency in the init file. This was causing 403 Forbidden errors which were hard to debug, because the Paymill backend simply didn't show up in the payment form and the only error was about unsupported backend.
|
Python
|
mit
|
anih/django-getpaid,glowka/django-getpaid,pawciobiel/django-getpaid,mionch/django-getpaid,dekoza/django-getpaid,dekoza/django-getpaid,mionch/django-getpaid,glowka/django-getpaid,nielsonsantana/django-getpaid,anih/django-getpaid,cypreess/django-getpaid,kamilglod/django-getpaid,pawciobiel/django-getpaid,cypreess/django-getpaid,nielsonsantana/django-getpaid,kamilglod/django-getpaid
|
b005d0b5eae4328e1482d0571f4dbc7164fef21f
|
app/eve_api/__init__.py
|
app/eve_api/__init__.py
|
VERSION = (0, 1)
# Dynamically calculate the version based on VERSION tuple
if len(VERSION)>2 and VERSION[2] is not None:
str_version = "%d.%d_%s" % VERSION[:3]
else:
str_version = "%d.%d" % VERSION[:2]
__version__ = str_version
|
Add versioning information to eve_api
|
Add versioning information to eve_api
|
Python
|
bsd-3-clause
|
nikdoof/test-auth
|
|
47831156874d31dcf9b8b61118399cb5ac77632c
|
PyFVCOM/__init__.py
|
PyFVCOM/__init__.py
|
"""
The FVCOM Python toolbox (PyFvcom)
"""
__version__ = '1.2'
__author__ = 'Pierre Cazenave'
__credits__ = ['Pierre Cazenave']
__license__ = 'MIT'
__maintainer__ = 'Pierre Cazenave'
__email__ = '[email protected]'
import inspect
from warnings import warn
# Import numpy so we have it across the board.
import numpy as np
# Import everything!
from PyFVCOM import buoy_tools
from PyFVCOM import cst_tools
from PyFVCOM import ctd_tools
from PyFVCOM import grid_tools
from PyFVCOM import img2xyz
from PyFVCOM import ll2utm
from PyFVCOM import ocean_tools
from PyFVCOM import stats_tools
from PyFVCOM import tide_tools
from PyFVCOM import tidal_ellipse
from PyFVCOM import process_results
from PyFVCOM import read_results
# External TAPPY now instead of my bundled version. Requires my forked version
# of TAPPY from https://github.com/pwcazenave/tappy or
# http://gitlab.em.pml.ac.uk/pica/tappy.
from tappy import tappy
# For backwards-compatibility.
process_FVCOM_results = process_results
read_FVCOM_results = read_results
|
"""
The FVCOM Python toolbox (PyFvcom)
"""
__version__ = '1.2'
__author__ = 'Pierre Cazenave'
__credits__ = ['Pierre Cazenave']
__license__ = 'MIT'
__maintainer__ = 'Pierre Cazenave'
__email__ = '[email protected]'
import inspect
from warnings import warn
# Import numpy so we have it across the board.
import numpy as np
# Import everything!
from PyFVCOM import buoy_tools
from PyFVCOM import cst_tools
from PyFVCOM import ctd_tools
from PyFVCOM import grid_tools
from PyFVCOM import ll2utm
from PyFVCOM import ocean_tools
from PyFVCOM import stats_tools
from PyFVCOM import tide_tools
from PyFVCOM import tidal_ellipse
from PyFVCOM import process_results
from PyFVCOM import read_results
# External TAPPY now instead of my bundled version. Requires my forked version
# of TAPPY from https://github.com/pwcazenave/tappy or
# http://gitlab.em.pml.ac.uk/pica/tappy.
from tappy import tappy
# For backwards-compatibility.
process_FVCOM_results = process_results
read_FVCOM_results = read_results
|
Remove the (dodgy) function to convert from an image to data.
|
Remove the (dodgy) function to convert from an image to data.
|
Python
|
mit
|
pwcazenave/PyFVCOM
|
9c40a87c5f7d261550c860a69e2679feda53d884
|
demo_app/demo_app/settings_test.py
|
demo_app/demo_app/settings_test.py
|
# -*- coding: utf-8 -*-
import warnings
from .settings import * # noqa: F403,F401
# Handle system warning as log messages
warnings.simplefilter("once")
for handler in LOGGING.get("handlers", []):
LOGGING["handlers"][handler]["level"] = "CRITICAL"
for logger in LOGGING.get("loggers", []):
LOGGING["loggers"][logger]["level"] = "CRITICAL"
mysql_db = DATABASES["default"]
DEFAULT_DB = {
"ENGINE": "django.db.backends.sqlite3",
"NAME": ":memory:",
"TEST": {"MIGRATE": False},
}
if os.environ.get("DB_TYPE") == "mysql":
print("Using MySQL Backend!")
DEFAULT_DB = mysql_db
DATABASES = {
"default": DEFAULT_DB,
}
|
# -*- coding: utf-8 -*-
import warnings
from .settings import * # noqa: F403,F401
# Handle system warning as log messages
warnings.simplefilter("once")
for handler in LOGGING.get("handlers", []):
LOGGING["handlers"][handler]["level"] = "CRITICAL"
for logger in LOGGING.get("loggers", []):
LOGGING["loggers"][logger]["level"] = "CRITICAL"
mysql_db = DATABASES["default"]
DEFAULT_DB = {
"ENGINE": "django.db.backends.sqlite3",
"NAME": ":memory:",
"TEST": {"MIGRATE": False},
}
if os.environ.get("DB_TYPE") == "mysql":
print("Using MySQL Backend!")
DEFAULT_DB = mysql_db
DATABASES = {
"default": DEFAULT_DB,
}
|
Unify and simplify for tests.
|
Unify and simplify for tests.
|
Python
|
apache-2.0
|
pivotal-energy-solutions/django-datatable-view,pivotal-energy-solutions/django-datatable-view,pivotal-energy-solutions/django-datatable-view
|
751f40ef23250cf9fad1374359393588edee477a
|
back/blog/models/base.py
|
back/blog/models/base.py
|
from sqlalchemy.ext.declarative import declared_attr
from blog.lib.database import db
class ModelMixin(object):
"""A base mixin for all models."""
@declared_attr
def __tablename__(cls):
return cls.__name__.lower()
def __str__(self):
return '<{} (id={})>'.format(self.__class__.__name__, self.id_)
def __repr__(self):
return str(self)
id_ = db.Column('id', db.Integer, primary_key=True)
def get_dictionary(self):
d = {}
for column in self.__table__.columns:
key = 'id_' if column.key == 'id' else column.key
d[key] = getattr(self, key)
return d
def update(self, d):
for column in self.__table__.columns:
if column.key == 'id_':
continue
setattr(
self, column.key, d.get(
column.key, getattr(self, column.key)
)
)
|
from sqlalchemy.ext.declarative import declared_attr
from blog.lib.database import db
class ModelMixin(object):
"""A base mixin for all models."""
@declared_attr
def __tablename__(cls):
return cls.__name__.lower()
def __str__(self):
return '<{} (id={})>'.format(self.__class__.__name__, self.id_)
def __repr__(self):
return str(self)
id_ = db.Column('id', db.Integer, primary_key=True)
def get_dictionary(self):
d = {}
for column in self.__table__.columns:
if column.key == 'id':
d['id'] = getattr(self, 'id_')
else:
d[column.key] = getattr(self, column.key)
return d
def update(self, d):
for column in self.__table__.columns:
if column.key == 'id_':
continue
setattr(
self, column.key, d.get(
column.key, getattr(self, column.key)
)
)
|
Return "id" key to front instead of "id_".
|
Return "id" key to front instead of "id_".
|
Python
|
mit
|
astex/living-with-django,astex/living-with-django,astex/living-with-django
|
564d54c377bf6a8c16cae3681934cc7ba5007c76
|
bundledApps/wailEndpoint.py
|
bundledApps/wailEndpoint.py
|
import tornado.ioloop
import tornado.web
import requests
host = 'localhost'
waybackPort = '8080'
archiveConfigFile = '/Applications/WAIL.app/config/archive.json'
class MainHandler(tornado.web.RequestHandler):
def get(self):
iwa = isWaybackAccessible()
print iwa
self.write(iwa)
def make_app():
return tornado.web.Application([
(r"/", MainHandler),
])
def isWaybackAccessible():
try:
r = requests.get('http://' + host + ':' + waybackPort)
with open(archiveConfigFile, 'r') as myfile:
data=myfile.read()
return data
except requests.exceptions.ConnectionError as e:
return ''
if __name__ == "__main__":
app = make_app()
app.listen(8888)
tornado.ioloop.IOLoop.current().start()
|
import tornado.ioloop
import tornado.web
import requests
host = 'localhost'
waybackPort = '8080'
# Use a separate JSON file that only queries the local WAIL instance for MemGator
archiveConfigFile = '/Applications/WAIL.app/config/archive.json'
class MainHandler(tornado.web.RequestHandler):
def get(self):
iwa = isWaybackAccessible()
print iwa
self.write(iwa)
def make_app():
return tornado.web.Application([
(r"/", MainHandler),
])
def isWaybackAccessible():
try:
r = requests.get('http://' + host + ':' + waybackPort)
with open(archiveConfigFile, 'r') as myfile:
data=myfile.read()
return data
except requests.exceptions.ConnectionError as e:
return ''
if __name__ == "__main__":
app = make_app()
app.listen(8888)
tornado.ioloop.IOLoop.current().start()
|
Add comment to justify separate JSON file existence
|
Add comment to justify separate JSON file existence
|
Python
|
mit
|
machawk1/wail,machawk1/wail,machawk1/wail,machawk1/wail,machawk1/wail,machawk1/wail,machawk1/wail,machawk1/wail
|
b32f4955665b8618a9623f6898a15d4da40dc58e
|
dxtbx/command_line/print_header.py
|
dxtbx/command_line/print_header.py
|
def print_header():
import sys
from dxtbx.format.Registry import Registry
# this will do the lookup for every frame - this is strictly not needed
# if all frames are from the same instrument
for arg in sys.argv[1:]:
format = Registry.find(arg)
i = format(arg)
print 'Beam:'
print i.get_beam()
print 'Goniometer:'
print i.get_goniometer()
print 'Detector:'
print i.get_detector()
print 'Scan:'
print i.get_scan()
print 'Total Counts:'
print sum(i.get_raw_data())
if __name__ == '__main__':
print_header()
|
def print_header():
import sys
from dxtbx.format.Registry import Registry
# this will do the lookup for every frame - this is strictly not needed
# if all frames are from the same instrument
for arg in sys.argv[1:]:
format = Registry.find(arg)
print 'Using header reader: %s' % format.__name__
i = format(arg)
print 'Beam:'
print i.get_beam()
print 'Goniometer:'
print i.get_goniometer()
print 'Detector:'
print i.get_detector()
print 'Scan:'
print i.get_scan()
print 'Total Counts:'
print sum(i.get_raw_data())
if __name__ == '__main__':
print_header()
|
Print the Format class used
|
Print the Format class used
|
Python
|
bsd-3-clause
|
dials/dials,dials/dials,dials/dials,dials/dials,dials/dials
|
c790055fa7e6810703599bc0124507133b8a55fc
|
crispy_forms/compatibility.py
|
crispy_forms/compatibility.py
|
import sys
try:
basestring
except:
basestring = str # Python3
PY2 = sys.version_info[0] == 2
if not PY2:
text_type = str
binary_type = bytes
string_types = (str,)
integer_types = (int,)
else:
text_type = unicode
binary_type = str
string_types = basestring
integer_types = (int, long)
try:
# avoid RemovedInDjango19Warning by using lru_cache where available
from django.utils.lru_cache import lru_cache as memoize
except:
from django.utils.functional import memoize
|
import sys
try:
basestring
except:
basestring = str # Python3
PY2 = sys.version_info[0] == 2
if not PY2:
text_type = str
binary_type = bytes
string_types = (str,)
integer_types = (int,)
else:
text_type = unicode
binary_type = str
string_types = basestring
integer_types = (int, long)
try:
# avoid RemovedInDjango19Warning by using lru_cache where available
from django.utils.lru_cache import lru_cache
def memoize(function, *args):
return lru_cache()(function)
except:
from django.utils.functional import memoize
|
Fix lru_cache import as memoize
|
Fix lru_cache import as memoize
Thanks to @jcomeauictx for the heads up
|
Python
|
mit
|
scuml/django-crispy-forms,VishvajitP/django-crispy-forms,saydulk/django-crispy-forms,alanwj/django-crispy-forms,schrd/django-crispy-forms,bouttier/django-crispy-forms,smirolo/django-crispy-forms,saydulk/django-crispy-forms,IanLee1521/django-crispy-forms,zixan/django-crispy-forms,Stranger6667/django-crispy-forms,RamezIssac/django-crispy-forms,maraujop/django-crispy-forms,alanwj/django-crispy-forms,iris-edu/django-crispy-forms,dzhuang/django-crispy-forms,ngenovictor/django-crispy-forms,damienjones/django-crispy-forms,VishvajitP/django-crispy-forms,iris-edu-int/django-crispy-forms,RamezIssac/django-crispy-forms,django-crispy-forms/django-crispy-forms,damienjones/django-crispy-forms,schrd/django-crispy-forms,davidszotten/django-crispy-forms,spectras/django-crispy-forms,IanLee1521/django-crispy-forms,avsd/django-crispy-forms,carltongibson/django-crispy-forms,agepoly/django-crispy-forms,zixan/django-crispy-forms,dzhuang/django-crispy-forms,scuml/django-crispy-forms,avsd/django-crispy-forms,jtyoung/django-crispy-forms,iris-edu/django-crispy-forms,Stranger6667/django-crispy-forms,ngenovictor/django-crispy-forms,tarunlnmiit/django-crispy-forms,impulse-cloud/django-crispy-forms,django-crispy-forms/django-crispy-forms,tarunlnmiit/django-crispy-forms,dessibelle/django-crispy-forms,carltongibson/django-crispy-forms,spectras/django-crispy-forms,maraujop/django-crispy-forms,treyhunner/django-crispy-forms,davidszotten/django-crispy-forms,impulse-cloud/django-crispy-forms,jtyoung/django-crispy-forms,iris-edu-int/django-crispy-forms,treyhunner/django-crispy-forms,dessibelle/django-crispy-forms,bouttier/django-crispy-forms,agepoly/django-crispy-forms,smirolo/django-crispy-forms
|
3a18c25ef019a9a54475419bfabc4b6e2776df9c
|
lib/unsubscribe.py
|
lib/unsubscribe.py
|
from lxml.html import fromstring as lxml_from_string
from unidecode import unidecode
UNSUBSCRIBE_MARKERS = [
# English
"unsub", "blacklist", "opt-out", "opt out",
# French
"desinscription", "desinscrire", "desabonner", "desabonnement",
"ne souhaitez plus", "ne plus recevoir", "cesser de recevoir"
]
def FindUnsubscribeLink(message):
unsubscribe_link = None
unsubscribe_links = []
for part in message.walk():
if part.get_content_type() == 'text/html':
html = part.get_payload(decode=True)
doc = lxml_from_string(html)
for element, attribute, link, pos in doc.iterlinks():
link_content = unidecode(element.text_content()).lower()
link = link.lower()
unsubscribe_links.append((repr(link_content)[0:100], link[0:100]))
for pattern in UNSUBSCRIBE_MARKERS:
if (pattern in link_content) or (pattern in link):
unsubscribe_link = link
return unsubscribe_link, unsubscribe_links
|
from lxml.html import fromstring as lxml_from_string
from unidecode import unidecode
UNSUBSCRIBE_MARKERS = [
# English
"unsub", "blacklist", "opt-out", "opt out", "removealert", "removeme",
# French
"desinscription", "desinscrire", "desabonner", "desabonnement",
"ne souhaitez plus", "ne plus recevoir", "cesser de recevoir"
]
def FindUnsubscribeLink(message):
unsubscribe_link = None
unsubscribe_links = []
for part in message.walk():
if part.get_content_type() == 'text/html':
html = part.get_payload(decode=True)
doc = lxml_from_string(html)
for element, attribute, link, pos in doc.iterlinks():
link_content = unidecode(element.text_content()).lower()
unsubscribe_links.append((repr(link_content)[0:100], link[0:100]))
for pattern in UNSUBSCRIBE_MARKERS:
if (pattern in link_content) or (pattern in link.lower()):
unsubscribe_link = link
return unsubscribe_link, unsubscribe_links
|
Fix a bug with uppercase links
|
Fix a bug with uppercase links
|
Python
|
mit
|
sylvinus/reclaim-my-gmail-inbox
|
a9405eaf838842688262689d665f30ae3cebfdea
|
django_migration_linter/cache.py
|
django_migration_linter/cache.py
|
import os
import pickle
class Cache(dict):
def __init__(self, django_folder, database, cache_path):
self.filename = os.path.join(
cache_path,
"{}_{}.pickle".format(django_folder.replace(os.sep, "_"), database),
)
if not os.path.exists(os.path.dirname(self.filename)):
os.makedirs(os.path.dirname(self.filename))
super().__init__()
def load(self):
try:
with open(self.filename, "rb") as f:
tmp_dict = pickle.load(f)
self.update(tmp_dict)
except OSError:
pass
def save(self):
with open(self.filename, "wb") as f:
pickle.dump(self, f, protocol=2)
|
import os
import pickle
class Cache(dict):
def __init__(self, django_folder, database, cache_path):
self.filename = os.path.join(
cache_path,
"{}_{}.pickle".format(str(django_folder).replace(os.sep, "_"), database),
)
if not os.path.exists(os.path.dirname(self.filename)):
os.makedirs(os.path.dirname(self.filename))
super().__init__()
def load(self):
try:
with open(self.filename, "rb") as f:
tmp_dict = pickle.load(f)
self.update(tmp_dict)
except OSError:
pass
def save(self):
with open(self.filename, "wb") as f:
pickle.dump(self, f, protocol=2)
|
Support `Path` type for `project_root_path`
|
feat: Support `Path` type for `project_root_path`
Django has switched to using pathlib.Path in startproject. This adds support for these in the project root path config option.
|
Python
|
apache-2.0
|
3YOURMIND/django-migration-linter
|
593e826b24d83997a5be450be1401e16ec17c07c
|
application.py
|
application.py
|
#!/usr/bin/env python
from __future__ import print_function
import os
from flask.ext.script import Manager, Server
from flask.ext.migrate import Migrate, MigrateCommand
from app import create_app, db
application = create_app(os.getenv('DM_ENVIRONMENT') or 'development')
manager = Manager(application)
manager.add_command("runserver", Server(port=5000))
migrate = Migrate(application, db)
manager.add_command('db', MigrateCommand)
@manager.command
def list_routes():
"""List URLs of all application routes."""
for rule in sorted(application.url_map.iter_rules(), key=lambda r: r.rule):
print("{:10} {}".format(", ".join(rule.methods - set(['OPTIONS', 'HEAD'])), rule.rule))
if __name__ == '__main__':
manager.run()
|
#!/usr/bin/env python
from __future__ import print_function
import os
from dmutils import init_manager
from flask.ext.migrate import Migrate, MigrateCommand
from app import create_app, db
application = create_app(os.getenv('DM_ENVIRONMENT') or 'development')
manager = init_manager(application, 5000, ['./json_schemas'])
migrate = Migrate(application, db)
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run()
|
Use new dmutils init_manager to set up reload on schema changes
|
Use new dmutils init_manager to set up reload on schema changes
|
Python
|
mit
|
alphagov/digitalmarketplace-api,alphagov/digitalmarketplace-api,alphagov/digitalmarketplace-api
|
39603bde90ebad7e0d70e41403a9a971867dcbac
|
backend/breach/views.py
|
backend/breach/views.py
|
from django.shortcuts import render
from django.http import HttpResponse
def get_work(request):
return HttpResponse('Not implemented')
def work_completed(request):
return HttpResponse('Not implemented')
|
from django.http import HttpResponse
from django.views.decorators.csrf import csrf_exempt
def get_work(request):
return HttpResponse('Not implemented')
@csrf_exempt
def work_completed(request):
return HttpResponse('Not implemented')
|
Allow POST request to work_completed view
|
Allow POST request to work_completed view
|
Python
|
mit
|
esarafianou/rupture,esarafianou/rupture,dimkarakostas/rupture,esarafianou/rupture,dionyziz/rupture,dimriou/rupture,dimriou/rupture,dionyziz/rupture,dimkarakostas/rupture,dionyziz/rupture,dimriou/rupture,esarafianou/rupture,dimkarakostas/rupture,dimriou/rupture,dionyziz/rupture,dimkarakostas/rupture,dimriou/rupture,dionyziz/rupture,dimkarakostas/rupture
|
a633fd37a4d795e7b565254ef10aaa0f2ad77f31
|
vcontrol/rest/machines/shutdown.py
|
vcontrol/rest/machines/shutdown.py
|
from ..helpers import get_allowed
import subprocess
import web
class ShutdownMachineR:
"""
This endpoint is for shutting down a running machine.
"""
allow_origin, rest_url = get_allowed.get_allowed()
def GET(self, machine):
web.header('Access-Control-Allow-Origin', self.allow_origin)
try:
out = subprocess.check_output("/usr/local/bin/docker-machine stop "+machine, shell=True)
except:
out = "unable to stop machine"
return str(out)
|
from ..helpers import get_allowed
import subprocess
import web
class ShutdownMachineR:
"""
This endpoint is for shutting down a running machine.
"""
allow_origin, rest_url = get_allowed.get_allowed()
def GET(self, machine):
try:
web.header('Access-Control-Allow-Origin', self.allow_origin)
except Exception as e: # no pragma
print(e.message)
try:
out = subprocess.check_output("/usr/local/bin/docker-machine stop "+machine, shell=True)
except:
out = "unable to stop machine"
return str(out)
|
Put the web.header function in a try/except block
|
Put the web.header function in a try/except block
|
Python
|
apache-2.0
|
cglewis/vcontrol,CyberReboot/vcontrol,CyberReboot/vcontrol,cglewis/vcontrol,CyberReboot/vcontrol,cglewis/vcontrol
|
633c3a356a0ed88c00fbb1a5c972171de2255890
|
dinosaurs/transaction/database.py
|
dinosaurs/transaction/database.py
|
from peewee import *
db = SqliteDatabase('emails.db')
class Transaction(Model):
cost = FloatField()
address = CharField()
tempPass = CharField()
domain = CharField(index=True)
email = CharField(primary_key=True, unique=True)
is_complete = BooleanField(default=False, index=True)
class Meta:
database = db
|
from datetime import datetime
from peewee import *
from dinosaurs import settings
from dinosaurs.transaction.coin import generate_address
db = SqliteDatabase(settings.database)
class Transaction(Model):
cost = FloatField()
address = CharField()
started = DateField()
tempPass = CharField()
domain = CharField(index=True)
email = CharField(primary_key=True, unique=True)
is_complete = BooleanField(default=False, index=True)
class Meta:
database = db
def __init__(self, *args, **kwargs):
kwargs['started'] = datetime.now()
kwargs['address'] = generate_address()
super(Transaction, self).__init__(*args, **kwargs)
@property
def expired(self):
return (datetime.now() - self.started).minutes > 4
@property
def seconds_left(self):
return (datetime.now() - self.started).total_seconds
|
Update what a transaction is
|
Update what a transaction is
|
Python
|
mit
|
chrisseto/dinosaurs.sexy,chrisseto/dinosaurs.sexy
|
820ddf412d09f10977b4bec525d478cc55fe443b
|
math/prime_test.py
|
math/prime_test.py
|
'''
prime_test(n) returns a True if n is a prime number else it returns False
'''
def prime_test(n):
if n <= 1:
return False
if n==2 or n==3:
return True
if n%2==0 or n%3==0:
return False
j = 5
while(j*j <= n):
if n%(j)==0 or n%(j+2)==0:
return False
j += 6
return True
def prime_test(n):
# prime numbers are greater than 1
if num > 1:
# check for factors
for i in range(2,num):
if (num % i) == 0:
print(num,"is not a prime number")
print(i,"times",num//i,"is",num)
break
else:
print(num,"is a prime number")
# if input number is less than
# or equal to 1, it is not prime
else:
print(num,"is not a prime number")
|
'''
prime_test(n) returns a True if n is a prime number else it returns False
'''
def prime_test(n):
if n <= 1:
return False
if n==2 or n==3:
return True
if n%2==0 or n%3==0:
return False
j = 5
while(j*j <= n):
if n%(j)==0 or n%(j+2)==0:
return False
j += 6
return True
def prime_test(n):
# prime numbers are greater than 1
if num > 1:
# check for factors
for i in range(2,num):
if (num % i) == 0:
#print(num,"is not a prime number")
#print(i,"times",num//i,"is",num)
return False
break
else:
#print(num,"is a prime number")
return True
# if input number is less than
# or equal to 1, it is not prime
else:
#print(num,"is not a prime number")
return False
|
Change the return type to boolean
|
Change the return type to boolean
|
Python
|
mit
|
amaozhao/algorithms,keon/algorithms
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.