commit
stringlengths 40
40
| subject
stringlengths 4
1.73k
| repos
stringlengths 5
127k
| old_file
stringlengths 2
751
| new_file
stringlengths 2
751
| new_contents
stringlengths 1
8.98k
| old_contents
stringlengths 0
6.59k
| license
stringclasses 13
values | lang
stringclasses 23
values |
---|---|---|---|---|---|---|---|---|
d0e8d8d9e6e029f0af5dd50e507565fb67bf90b1
|
update example script
|
daler/ucscsession
|
ucscsession/scripts/ucscsession_example.py
|
ucscsession/scripts/ucscsession_example.py
|
import webbrowser
from ucscsession import UCSCSession
import pybedtools
# -----------------------------------------------------------------------------
# Note: most methods return a requests.Response object
# -----------------------------------------------------------------------------
# -----------------------------------------------------------------------------
# Begin a session. A browser window will not pop up until we call the u.show()
# method (this will happen later...)
u = UCSCSession()
# -----------------------------------------------------------------------------
# Upload custom tracks using example data from pybedtools.
for fn in ['a.bed', 'b.bed']:
x = pybedtools.example_bedtool(fn)\
.saveas(trackline='track name=%s' % fn)
response = u.upload_track(x)
# -----------------------------------------------------------------------------
# Set the position of the browser by providing coordinates -- either as
# a string or with an object that has chrom, start, stop attributes.
response = u.set_position('chr1:1-2000')
# -----------------------------------------------------------------------------
# Zoom_in and zoom_out take different levels (1, 2, 3 for 1.5x, 3x, 10x
# respectively), so here we're zooming out 10x
response = u.zoom_out(3)
# -----------------------------------------------------------------------------
# The show() method will open a new tab in your web browser and show a view of
# the current coordinates
response = u.show()
# =============================================================================
# Track configuration
# =============================================================================
# -----------------------------------------------------------------------------
# Tracks are stored as a dictionary of {tracklabel: Track object}.
# So let's inspect our currently-loaded tracks.
from pprint import pprint
pprint(u.tracks)
# -----------------------------------------------------------------------------
# Track visibility can be set in multiple ways. Say we want to set the custom
# tracks to "pack"; in this case u.set_track_visibilities() is the best choice.
#
# We detect the new tracks by looking for the string "bed" in the track label.
items = []
for k, v in u.tracks.items():
if 'bed' in v.label:
items.append((k, 'pack'))
u.set_track_visibilities(items)
# -----------------------------------------------------------------------------
# Let's configure the RefSeq track; for convenience save the Track object as
# `t`
t = u.tracks['refGene']
# -----------------------------------------------------------------------------
# Track.config represents a configuration page for a track. There can be one
# or more forms on this page, and each form on the configuration page is
# represented as a mechanize.HTMLForm.
#
# It so happens that the refGene track only has a single form.
form = t.config.forms[0]
# -----------------------------------------------------------------------------
# Printing the form tells us the kinds of things it can do.
#
# It's probably good to look at this along with the page itself
print form
webbrowser.open(t.url)
# -----------------------------------------------------------------------------
# Let's enable all possible labels for the refGene track. The names were
# discovered by inspecting the printed form along with the page in the browser
for control in form.controls:
if control.type == 'checkbox':
if 'refGene.label' in control.name:
form[control.name] = ['on']
form['refGene'] = ['pack']
# -----------------------------------------------------------------------------
# After making the configuration changes, submit the changes using the
# ConfigPage object's `submit()` method
response = t.config.submit()
# -----------------------------------------------------------------------------
# Clean up the view a little bit by hiding some tracks
u.set_track_visibilities([
('mrna', 'hide'),
('intronEst', 'hide'),
('knownGene', 'hide'),
])
# -----------------------------------------------------------------------------
# Save a PDF of the new view
u.pdf(filename="example.pdf")
print "pdf saved"
# -----------------------------------------------------------------------------
# And show the final result in the web browser
u.show()
|
import webbrowser
from ucscsession import UCSCSession
import pybedtools
# Begin a session.
u = UCSCSession()
# Demonstration of uploading custom tracks; this uses example data from
# pybedtools.
for fn in ['a.bed', 'b.bed']:
x = pybedtools.example_bedtool(fn)\
.saveas(trackline='track name=%s' % fn)
r = u.upload_track(x)
# Set the position of the browser by providing coordinates, either as a string or
u.position('chr1:1-2000')
# zoom_in and zoom_out take different levels (1, 2, 3 for 1.5x, 3x, 10x
# respectively)
r = u.zoom_out(3)
# the show() method will open a new tab in your web browser and show a view of
# the current coordinates
u.show()
# Demo of track configuration.
#
# Track visibility can be set in multiple ways. If all you need is to set
# visibility, then the best way is to do so in bulk, using
# set_track_visibilities().
items = []
for k, v in u.tracks.items():
if 'bed' in v.label:
items.append((k, 'pack'))
u.set_track_visibilities(items)
# Alternatively, if you need to set multiple configuration items (as well as
# visibility), then use the track's config page.
#
# Inspect our options for loaded tracks
print u.tracks
# Let's configure the RefSeq track.
t = u.tracks['refGene']
#
# # Each form on the configuration page (there happens to be just one form for
# this particular track) is represented as a mechanize.HTMLForm
form = t.config.forms[0]
# Print the form for a good overview of options and current settings. It's
# probably good to look at this along with the page itself
print form
webbrowser.open(t.url)
# Let's enable all possible labels for the refGene track. The names were
# discovered by inspecting the printed form along with the page in the browser
for control in form.controls:
if control.type == 'checkbox':
if 'refGene.label' in control.name:
form[control.name] = ['on']
form['refGene'] = ['pack']
response = t.config.submit()
u.set_track_visibilities([
('mrna', 'hide'),
('intronEst', 'hide')
])
u.show()
|
bsd-2-clause
|
Python
|
c8ecbda2b8c4d1a03285527dd11a27db74f746e7
|
change IdP configuration order, enabled first
|
appsembler/edx-platform,appsembler/edx-platform,appsembler/edx-platform,appsembler/edx-platform
|
openedx/core/djangoapps/appsembler/tpa_admin/api.py
|
openedx/core/djangoapps/appsembler/tpa_admin/api.py
|
from rest_framework import generics, viewsets
from rest_framework.permissions import IsAuthenticated
from openedx.core.djangoapps.appsembler.sites.permissions import AMCAdminPermission
from openedx.core.lib.api.authentication import (
OAuth2AuthenticationAllowInactiveUser,
)
from third_party_auth.models import SAMLConfiguration, SAMLProviderConfig
from .serializers import SAMLConfigurationSerializer, SAMLProviderConfigSerializer
class SAMLConfigurationViewSet(viewsets.ModelViewSet):
queryset = SAMLConfiguration.objects.current_set()
serializer_class = SAMLConfigurationSerializer
authentication_classes = (OAuth2AuthenticationAllowInactiveUser,)
permission_classes = (IsAuthenticated, AMCAdminPermission)
class SAMLConfigurationSiteDetail(generics.RetrieveAPIView):
serializer_class = SAMLConfigurationSerializer
lookup_field = 'site_id'
def get_queryset(self):
site_id = self.kwargs['site_id']
return SAMLConfiguration.objects.current_set().filter(site__id=site_id)
class SAMLProviderConfigViewSet(viewsets.ModelViewSet):
queryset = SAMLProviderConfig.objects.current_set()
serializer_class = SAMLProviderConfigSerializer
authentication_classes = (OAuth2AuthenticationAllowInactiveUser,)
permission_classes = (IsAuthenticated, AMCAdminPermission)
class SAMLProviderSiteDetail(generics.ListAPIView):
serializer_class = SAMLProviderConfigSerializer
lookup_field = 'site_id'
def get_queryset(self):
site_id = self.kwargs['site_id']
return SAMLProviderConfig.objects.current_set().filter(site__id=site_id).order_by('-enabled')
|
from rest_framework import generics, viewsets
from rest_framework.permissions import IsAuthenticated
from openedx.core.djangoapps.appsembler.sites.permissions import AMCAdminPermission
from openedx.core.lib.api.authentication import (
OAuth2AuthenticationAllowInactiveUser,
)
from third_party_auth.models import SAMLConfiguration, SAMLProviderConfig
from .serializers import SAMLConfigurationSerializer, SAMLProviderConfigSerializer
class SAMLConfigurationViewSet(viewsets.ModelViewSet):
queryset = SAMLConfiguration.objects.current_set()
serializer_class = SAMLConfigurationSerializer
authentication_classes = (OAuth2AuthenticationAllowInactiveUser,)
permission_classes = (IsAuthenticated, AMCAdminPermission)
class SAMLConfigurationSiteDetail(generics.RetrieveAPIView):
serializer_class = SAMLConfigurationSerializer
lookup_field = 'site_id'
def get_queryset(self):
site_id = self.kwargs['site_id']
return SAMLConfiguration.objects.current_set().filter(site__id=site_id)
class SAMLProviderConfigViewSet(viewsets.ModelViewSet):
queryset = SAMLProviderConfig.objects.current_set()
serializer_class = SAMLProviderConfigSerializer
authentication_classes = (OAuth2AuthenticationAllowInactiveUser,)
permission_classes = (IsAuthenticated, AMCAdminPermission)
class SAMLProviderSiteDetail(generics.ListAPIView):
serializer_class = SAMLProviderConfigSerializer
lookup_field = 'site_id'
def get_queryset(self):
site_id = self.kwargs['site_id']
return SAMLProviderConfig.objects.current_set().filter(site__id=site_id)
|
agpl-3.0
|
Python
|
67a0bab4da1d31aba150ce5cb7831daaea1523de
|
Increase BQ_DEFAULT_TABLE_EXPIRATION_MS in e2etest settings
|
ebmdatalab/openprescribing,annapowellsmith/openpresc,annapowellsmith/openpresc,annapowellsmith/openpresc,annapowellsmith/openpresc,ebmdatalab/openprescribing,ebmdatalab/openprescribing,ebmdatalab/openprescribing
|
openprescribing/openprescribing/settings/e2etest.py
|
openprescribing/openprescribing/settings/e2etest.py
|
from __future__ import absolute_import
from .test import *
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': utils.get_env_setting('E2E_DB_NAME'),
'USER': utils.get_env_setting('DB_USER'),
'PASSWORD': utils.get_env_setting('DB_PASS'),
'HOST': utils.get_env_setting('DB_HOST', '127.0.0.1')
}
}
PIPELINE_METADATA_DIR = os.path.join(SITE_ROOT, 'pipeline', 'metadata')
PIPELINE_DATA_BASEDIR = os.path.join(
SITE_ROOT, 'pipeline', 'e2e-test-data', 'data', '')
PIPELINE_IMPORT_LOG_PATH=os.path.join(
SITE_ROOT, 'pipeline', 'e2e-test-data', 'log.json')
SLACK_SENDING_ACTIVE = True
BQ_DEFAULT_TABLE_EXPIRATION_MS = 24 * 60 * 60 * 1000 # 24 hours
|
from __future__ import absolute_import
from .test import *
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': utils.get_env_setting('E2E_DB_NAME'),
'USER': utils.get_env_setting('DB_USER'),
'PASSWORD': utils.get_env_setting('DB_PASS'),
'HOST': utils.get_env_setting('DB_HOST', '127.0.0.1')
}
}
PIPELINE_METADATA_DIR = os.path.join(SITE_ROOT, 'pipeline', 'metadata')
PIPELINE_DATA_BASEDIR = os.path.join(
SITE_ROOT, 'pipeline', 'e2e-test-data', 'data', '')
PIPELINE_IMPORT_LOG_PATH=os.path.join(
SITE_ROOT, 'pipeline', 'e2e-test-data', 'log.json')
SLACK_SENDING_ACTIVE = True
|
mit
|
Python
|
2660096db01f88cd0e71860935862fe969204666
|
Fix a script missed in refactor
|
somic/paasta,Yelp/paasta,somic/paasta,Yelp/paasta
|
paasta_tools/contrib/check_registered_slaves_aws.py
|
paasta_tools/contrib/check_registered_slaves_aws.py
|
#!/usr/bin/env python
from __future__ import absolute_import
from __future__ import unicode_literals
import argparse
import sys
from paasta_tools.autoscaling.autoscaling_cluster_lib import get_scaler
from paasta_tools.mesos_tools import get_mesos_master
from paasta_tools.utils import load_system_paasta_config
def check_registration(threshold_percentage):
mesos_state = get_mesos_master().state
autoscaling_resources = load_system_paasta_config().get_cluster_autoscaling_resources()
for resource in autoscaling_resources.values():
print("Checking %s" % resource['id'])
try:
scaler = get_scaler(resource['type'])(resource=resource,
pool_settings=None,
config_folder=None,
dry_run=True)
except KeyError:
print("Couldn't find a metric provider for resource of type: {0}".format(resource['type']))
continue
if len(scaler.instances) == 0:
print("No instances for this resource")
continue
else:
slaves = scaler.get_aws_slaves(mesos_state)
percent_registered = float(float(len(slaves)) / float(len(scaler.instances))) * 100
if percent_registered < float(threshold_percentage):
print("CRIT: Only found {0}% of instances in {1} registered in mesos. "
"Please check for puppet or AMI baking problems!".format(percent_registered,
resource['id']))
return False
print("OK: Found more than {0}% of instances registered for all paasta resources in this "
"superregion".format(threshold_percentage))
return True
def main():
parser = argparse.ArgumentParser()
parser.add_argument("-t", "--threshold", help="percentage threshold for registered instances",
default="75")
threshold = parser.parse_args().threshold
if check_registration(threshold):
sys.exit(0)
sys.exit(2)
if __name__ == "__main__":
main()
|
#!/usr/bin/env python
from __future__ import absolute_import
from __future__ import unicode_literals
import argparse
import sys
from paasta_tools.autoscaling.autoscaling_cluster_lib import get_sfr
from paasta_tools.autoscaling.autoscaling_cluster_lib import get_sfr_slaves
from paasta_tools.autoscaling.autoscaling_cluster_lib import get_spot_fleet_instances
from paasta_tools.mesos_tools import get_mesos_master
from paasta_tools.utils import load_system_paasta_config
def check_registration(threshold_percentage):
mesos_state = get_mesos_master().state
autoscaling_resources = load_system_paasta_config().get_cluster_autoscaling_resources()
for resource in autoscaling_resources.values():
if resource['type'] == 'aws_spot_fleet_request':
resource['sfr'] = get_sfr(resource['id'], region=resource['region'])
instances = get_spot_fleet_instances(resource['id'], region=resource['region'])
resource['sfr']['ActiveInstances'] = instances
slaves = get_sfr_slaves(resource, mesos_state)
if len(instances) == 0:
continue
else:
percent_registered = float(float(len(slaves)) / float(len(instances))) * 100
if percent_registered < float(threshold_percentage):
print "CRIT: Only found {0}% of instances in {1} registered in mesos. "\
"Please check for puppet or AMI baking problems!".format(percent_registered,
resource['id'])
return False
print "OK: Found more than {0}% of instances registered for all paasta resources in this "\
"superregion".format(threshold_percentage)
return True
def main():
parser = argparse.ArgumentParser()
parser.add_argument("-t", "--threshold", help="percentage threshold for registered instances",
default="75")
threshold = parser.parse_args().threshold
if check_registration(threshold):
sys.exit(0)
sys.exit(2)
if __name__ == "__main__":
main()
|
apache-2.0
|
Python
|
d406cf8f4812fce7314f10d3a4b1303d54230099
|
Remove unused import.
|
pyhmsa/pyhmsa
|
src/pyhmsa/util/signal.py
|
src/pyhmsa/util/signal.py
|
#!/usr/bin/env python
"""
================================================================================
:mod:`signal` -- Signal pattern
================================================================================
.. module:: signal
:synopsis: signal pattern
.. inheritance-diagram:: pyhmsa.util.signal
"""
# Script information for the file.
__author__ = "Philippe T. Pinard"
__email__ = "[email protected]"
__version__ = "0.1"
__copyright__ = "Copyright (c) 2014 Philippe T. Pinard"
__license__ = "GPL v3"
# Standard library modules.
# Third party modules.
# Local modules.
# Globals and constants variables.
class Signal(object):
def __init__(self):
self._handlers = set()
def connect(self, handler):
self._handlers.add(handler)
def fire(self, *args):
for handler in self._handlers:
handler(*args)
|
#!/usr/bin/env python
"""
================================================================================
:mod:`signal` -- Signal pattern
================================================================================
.. module:: signal
:synopsis: signal pattern
.. inheritance-diagram:: pyhmsa.util.signal
"""
# Script information for the file.
__author__ = "Philippe T. Pinard"
__email__ = "[email protected]"
__version__ = "0.1"
__copyright__ = "Copyright (c) 2014 Philippe T. Pinard"
__license__ = "GPL v3"
# Standard library modules.
from weakref import WeakSet
# Third party modules.
# Local modules.
# Globals and constants variables.
class Signal(object):
def __init__(self):
self._handlers = set()
def connect(self, handler):
self._handlers.add(handler)
def fire(self, *args):
for handler in self._handlers:
handler(*args)
|
mit
|
Python
|
9a5135f9cd27cf24d27b2393fd071073b4485ac7
|
add test gat_plot_slice
|
dgwakeman/mne-python,pravsripad/mne-python,larsoner/mne-python,wmvanvliet/mne-python,jniediek/mne-python,adykstra/mne-python,trachelr/mne-python,rkmaddox/mne-python,leggitta/mne-python,Teekuningas/mne-python,Teekuningas/mne-python,cjayb/mne-python,larsoner/mne-python,jmontoyam/mne-python,cmoutard/mne-python,Eric89GXL/mne-python,olafhauk/mne-python,drammock/mne-python,matthew-tucker/mne-python,leggitta/mne-python,wronk/mne-python,drammock/mne-python,Odingod/mne-python,cmoutard/mne-python,yousrabk/mne-python,ARudiuk/mne-python,mne-tools/mne-python,kambysese/mne-python,jaeilepp/mne-python,aestrivex/mne-python,larsoner/mne-python,bloyl/mne-python,andyh616/mne-python,jniediek/mne-python,pravsripad/mne-python,andyh616/mne-python,kingjr/mne-python,mne-tools/mne-python,Eric89GXL/mne-python,olafhauk/mne-python,dimkal/mne-python,teonlamont/mne-python,yousrabk/mne-python,wmvanvliet/mne-python,nicproulx/mne-python,ARudiuk/mne-python,wmvanvliet/mne-python,dimkal/mne-python,adykstra/mne-python,drammock/mne-python,rkmaddox/mne-python,mne-tools/mne-python,dgwakeman/mne-python,olafhauk/mne-python,teonlamont/mne-python,antiface/mne-python,wronk/mne-python,alexandrebarachant/mne-python,Odingod/mne-python,antiface/mne-python,pravsripad/mne-python,nicproulx/mne-python,kingjr/mne-python,kambysese/mne-python,jaeilepp/mne-python,lorenzo-desantis/mne-python,jmontoyam/mne-python,bloyl/mne-python,aestrivex/mne-python,Teekuningas/mne-python,trachelr/mne-python,matthew-tucker/mne-python,kingjr/mne-python,lorenzo-desantis/mne-python,alexandrebarachant/mne-python,cjayb/mne-python
|
mne/viz/tests/test_decoding.py
|
mne/viz/tests/test_decoding.py
|
# Authors: Denis Engemann <[email protected]>
#
# License: Simplified BSD
import os.path as op
import warnings
from nose.tools import assert_raises
from mne.decoding import GeneralizationAcrossTime
from mne import io, Epochs, read_events, pick_types
from mne.utils import requires_sklearn, run_tests_if_main
import matplotlib
matplotlib.use('Agg') # for testing don't use X server
data_dir = op.join(op.dirname(__file__), '..', '..', 'io', 'tests', 'data')
raw_fname = op.join(data_dir, 'test_raw.fif')
event_name = op.join(data_dir, 'test-eve.fif')
warnings.simplefilter('always') # enable b/c these tests throw warnings
# Set our plotters to test mode
tmin, tmax = -0.2, 0.5
event_id = dict(aud_l=1, vis_l=3)
event_id_gen = dict(aud_l=2, vis_l=4)
@requires_sklearn
def _get_data():
"""Aux function for testing GAT viz"""
gat = GeneralizationAcrossTime()
raw = io.Raw(raw_fname, preload=False)
events = read_events(event_name)
picks = pick_types(raw.info, meg='mag', stim=False, ecg=False,
eog=False, exclude='bads')
picks = picks[1:13:3]
decim = 30
# Test on time generalization within one condition
with warnings.catch_warnings(record=True):
epochs = Epochs(raw, events, event_id, tmin, tmax, picks=picks,
baseline=(None, 0), preload=True, decim=decim)
# Test default running
gat = GeneralizationAcrossTime()
gat.fit(epochs)
gat.score(epochs)
return gat
def test_gat_plot_matrix():
"""Test GAT matrix plot"""
gat = _get_data()
gat.plot()
del gat.scores_
assert_raises(RuntimeError, gat.plot)
def test_gat_plot_diagonal():
"""Test GAT diagonal plot"""
gat = _get_data()
gat.plot_diagonal()
del gat.scores_
assert_raises(RuntimeError, gat.plot)
def test_gat_plot_slice():
"""Test GAT slice plot"""
gat = _get_data()
gat.plot_slice(gat.train_time['times_'][0])
# test invalid time point
assert_raises(ValueError, gat.plot_slice, -1.)
# test float type
assert_raises(ValueError, gat.plot_slice, 1)
del gat.scores_
assert_raises(RuntimeError, gat.plot)
run_tests_if_main()
|
# Authors: Denis Engemann <[email protected]>
#
# License: Simplified BSD
import os.path as op
import warnings
from nose.tools import assert_raises
from mne.decoding import GeneralizationAcrossTime
from mne import io, Epochs, read_events, pick_types
from mne.utils import requires_sklearn, run_tests_if_main
import matplotlib
matplotlib.use('Agg') # for testing don't use X server
data_dir = op.join(op.dirname(__file__), '..', '..', 'io', 'tests', 'data')
raw_fname = op.join(data_dir, 'test_raw.fif')
event_name = op.join(data_dir, 'test-eve.fif')
warnings.simplefilter('always') # enable b/c these tests throw warnings
# Set our plotters to test mode
tmin, tmax = -0.2, 0.5
event_id = dict(aud_l=1, vis_l=3)
event_id_gen = dict(aud_l=2, vis_l=4)
@requires_sklearn
def _get_data():
"""Aux function for testing GAT viz"""
gat = GeneralizationAcrossTime()
raw = io.Raw(raw_fname, preload=False)
events = read_events(event_name)
picks = pick_types(raw.info, meg='mag', stim=False, ecg=False,
eog=False, exclude='bads')
picks = picks[1:13:3]
decim = 30
# Test on time generalization within one condition
with warnings.catch_warnings(record=True):
epochs = Epochs(raw, events, event_id, tmin, tmax, picks=picks,
baseline=(None, 0), preload=True, decim=decim)
# Test default running
gat = GeneralizationAcrossTime()
gat.fit(epochs)
gat.score(epochs)
return gat
def test_gat_plot_matrix():
"""Test GAT matrix plot"""
gat = _get_data()
gat.plot()
del gat.scores_
assert_raises(RuntimeError, gat.plot)
def test_gat_plot_diagonal():
"""Test GAT diagonal plot"""
gat = _get_data()
gat.plot_diagonal()
del gat.scores_
assert_raises(RuntimeError, gat.plot)
run_tests_if_main()
|
bsd-3-clause
|
Python
|
476d7da17c7d22415cbd16b625ba8e443a750f0f
|
update change_upstream_proxy example
|
syjzwjj/mitmproxy,meizhoubao/mitmproxy,mosajjal/mitmproxy,dufferzafar/mitmproxy,gzzhanghao/mitmproxy,tekii/mitmproxy,Kriechi/mitmproxy,ikoz/mitmproxy,syjzwjj/mitmproxy,byt3bl33d3r/mitmproxy,onlywade/mitmproxy,noikiy/mitmproxy,ZeYt/mitmproxy,syjzwjj/mitmproxy,ParthGanatra/mitmproxy,ikoz/mitmproxy,cortesi/mitmproxy,fimad/mitmproxy,StevenVanAcker/mitmproxy,claimsmall/mitmproxy,devasia1000/mitmproxy,zlorb/mitmproxy,owers19856/mitmproxy,zbuc/mitmproxy,ryoqun/mitmproxy,dwfreed/mitmproxy,ZeYt/mitmproxy,zlorb/mitmproxy,noikiy/mitmproxy,inscriptionweb/mitmproxy,rauburtin/mitmproxy,elitest/mitmproxy,xaxa89/mitmproxy,jpic/mitmproxy,dufferzafar/mitmproxy,Fuzion24/mitmproxy,xbzbing/mitmproxy,noikiy/mitmproxy,ZeYt/mitmproxy,sethp-jive/mitmproxy,mhils/mitmproxy,legendtang/mitmproxy,xtso520ok/mitmproxy,bltb/mitmproxy,xaxa89/mitmproxy,onlywade/mitmproxy,azureplus/mitmproxy,ryoqun/mitmproxy,MatthewShao/mitmproxy,laurmurclar/mitmproxy,0x0mar/mitmproxy,tdickers/mitmproxy,ADemonisis/mitmproxy,cortesi/mitmproxy,Endika/mitmproxy,liorvh/mitmproxy,fimad/mitmproxy,inscriptionweb/mitmproxy,byt3bl33d3r/mitmproxy,scriptmediala/mitmproxy,zbuc/mitmproxy,mosajjal/mitmproxy,macmantrl/mitmproxy,Fuzion24/mitmproxy,dweinstein/mitmproxy,mitmproxy/mitmproxy,MatthewShao/mitmproxy,ccccccccccc/mitmproxy,tfeagle/mitmproxy,liorvh/mitmproxy,dweinstein/mitmproxy,fimad/mitmproxy,macmantrl/mitmproxy,guiquanz/mitmproxy,mhils/mitmproxy,devasia1000/mitmproxy,mitmproxy/mitmproxy,devasia1000/mitmproxy,onlywade/mitmproxy,ddworken/mitmproxy,sethp-jive/mitmproxy,noikiy/mitmproxy,dxq-git/mitmproxy,byt3bl33d3r/mitmproxy,laurmurclar/mitmproxy,ddworken/mitmproxy,Fuzion24/mitmproxy,claimsmall/mitmproxy,jvillacorta/mitmproxy,pombredanne/mitmproxy,vhaupert/mitmproxy,ccccccccccc/mitmproxy,rauburtin/mitmproxy,jvillacorta/mitmproxy,ujjwal96/mitmproxy,dufferzafar/mitmproxy,jpic/mitmproxy,Endika/mitmproxy,0x0mar/mitmproxy,jpic/mitmproxy,ccccccccccc/mitmproxy,laurmurclar/mitmproxy,claimsmall/mitmproxy,bltb/mitmproxy,gzzhanghao/mitmproxy,elitest/mitmproxy,inscriptionweb/mitmproxy,claimsmall/mitmproxy,mitmproxy/mitmproxy,pombredanne/mitmproxy,devasia1000/mitmproxy,bazzinotti/mitmproxy,devasia1000/anti_adblock,jpic/mitmproxy,dwfreed/mitmproxy,tdickers/mitmproxy,gzzhanghao/mitmproxy,dweinstein/mitmproxy,legendtang/mitmproxy,liorvh/mitmproxy,0xwindows/InfoLeak,zbuc/mitmproxy,guiquanz/mitmproxy,mhils/mitmproxy,elitest/mitmproxy,tfeagle/mitmproxy,ParthGanatra/mitmproxy,Endika/mitmproxy,ryoqun/mitmproxy,ddworken/mitmproxy,ADemonisis/mitmproxy,tekii/mitmproxy,Fuzion24/mitmproxy,mitmproxy/mitmproxy,StevenVanAcker/mitmproxy,Endika/mitmproxy,ZeYt/mitmproxy,devasia1000/anti_adblock,ujjwal96/mitmproxy,zlorb/mitmproxy,tekii/mitmproxy,dufferzafar/mitmproxy,Kriechi/mitmproxy,0x0mar/mitmproxy,laurmurclar/mitmproxy,legendtang/mitmproxy,mitmproxy/mitmproxy,vhaupert/mitmproxy,dweinstein/mitmproxy,Kriechi/mitmproxy,0xwindows/InfoLeak,dwfreed/mitmproxy,pombredanne/mitmproxy,gzzhanghao/mitmproxy,mhils/mitmproxy,bazzinotti/mitmproxy,tfeagle/mitmproxy,dxq-git/mitmproxy,azureplus/mitmproxy,sethp-jive/mitmproxy,xbzbing/mitmproxy,xtso520ok/mitmproxy,onlywade/mitmproxy,owers19856/mitmproxy,scriptmediala/mitmproxy,scriptmediala/mitmproxy,owers19856/mitmproxy,rauburtin/mitmproxy,ikoz/mitmproxy,jvillacorta/mitmproxy,ccccccccccc/mitmproxy,meizhoubao/mitmproxy,vhaupert/mitmproxy,tdickers/mitmproxy,meizhoubao/mitmproxy,zbuc/mitmproxy,xbzbing/mitmproxy,ujjwal96/mitmproxy,macmantrl/mitmproxy,inscriptionweb/mitmproxy,azureplus/mitmproxy,meizhoubao/mitmproxy,mosajjal/mitmproxy,guiquanz/mitmproxy,0xwindows/InfoLeak,0xwindows/InfoLeak,ParthGanatra/mitmproxy,legendtang/mitmproxy,vhaupert/mitmproxy,liorvh/mitmproxy,mhils/mitmproxy,dwfreed/mitmproxy,dxq-git/mitmproxy,ryoqun/mitmproxy,StevenVanAcker/mitmproxy,ADemonisis/mitmproxy,elitest/mitmproxy,ddworken/mitmproxy,ADemonisis/mitmproxy,scriptmediala/mitmproxy,bltb/mitmproxy,zlorb/mitmproxy,cortesi/mitmproxy,sethp-jive/mitmproxy,ParthGanatra/mitmproxy,syjzwjj/mitmproxy,byt3bl33d3r/mitmproxy,ikoz/mitmproxy,tdickers/mitmproxy,ujjwal96/mitmproxy,xaxa89/mitmproxy,jvillacorta/mitmproxy,bltb/mitmproxy,xaxa89/mitmproxy,Kriechi/mitmproxy,fimad/mitmproxy,rauburtin/mitmproxy,StevenVanAcker/mitmproxy,tfeagle/mitmproxy,MatthewShao/mitmproxy,cortesi/mitmproxy,macmantrl/mitmproxy,azureplus/mitmproxy,guiquanz/mitmproxy,dxq-git/mitmproxy,owers19856/mitmproxy,mosajjal/mitmproxy,pombredanne/mitmproxy,bazzinotti/mitmproxy,tekii/mitmproxy,xtso520ok/mitmproxy,MatthewShao/mitmproxy,bazzinotti/mitmproxy,xbzbing/mitmproxy,devasia1000/anti_adblock
|
examples/change_upstream_proxy.py
|
examples/change_upstream_proxy.py
|
# This scripts demonstrates how mitmproxy can switch to a second/different upstream proxy
# in upstream proxy mode.
#
# Usage: mitmdump -U http://default-upstream-proxy.local:8080/ -s "change_upstream_proxy.py host"
from libmproxy.protocol.http import send_connect_request
alternative_upstream_proxy = ("localhost", 8082)
def should_redirect(flow):
return flow.request.host == "example.com"
def request(context, flow):
if flow.live and should_redirect(flow):
# If you want to change the target server, you should modify flow.request.host and flow.request.port
# flow.live.change_server should only be used by inline scripts to change the upstream proxy,
# unless you are sure that you know what you are doing.
server_changed = flow.live.change_server(alternative_upstream_proxy, persistent_change=True)
if flow.request.scheme == "https" and server_changed:
send_connect_request(flow.live.c.server_conn, flow.request.host, flow.request.port)
flow.live.c.establish_ssl(server=True)
|
# This scripts demonstrates how mitmproxy can switch to a different upstream proxy
# in upstream proxy mode.
#
# Usage: mitmdump -s "change_upstream_proxy.py host"
from libmproxy.protocol.http import send_connect_request
alternative_upstream_proxy = ("localhost", 8082)
def should_redirect(flow):
return flow.request.host == "example.com"
def request(context, flow):
if flow.live and should_redirect(flow):
# If you want to change the target server, you should modify flow.request.host and flow.request.port
# flow.live.change_server should only be used by inline scripts to change the upstream proxy,
# unless you are sure that you know what you are doing.
server_changed = flow.live.change_server(alternative_upstream_proxy, persistent_change=True)
if flow.request.scheme == "https" and server_changed:
send_connect_request(flow.live.c.server_conn, flow.request.host, flow.request.port)
flow.live.c.establish_ssl(server=True)
|
mit
|
Python
|
5af79b94c6f1b0117e229db23811d3e1c58ff3fa
|
Add password encrpytion and try to fix mailgun again
|
joehand/DataNews,joehand/DataNews
|
config.py
|
config.py
|
import os
basedir = os.path.abspath(os.path.dirname(__file__))
class Config(object):
DEBUG = False
TESTING = False
SECRET_KEY = 'this_is_so_secret' #used for development, reset in prod
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(basedir, 'app.db')
SQLALCHEMY_MIGRATE_REPO = os.path.join(basedir, 'db_repository')
#Flask-Security Config
SECURITY_TRACKABLE = True
SECURITY_CHANGEABLE = True
SECURITY_REGISTERABLE = True
SECURITY_RECOVERABLE = True
SECURITY_DEFAULT_REMEMBER_ME = True
SECURITY_SEND_REGISTER_EMAIL = False
SECURITY_EMAIL_SUBJECT_PASSWORD_RESET = 'DataNews: password reset instructions'
SECURITY_EMAIL_SUBJECT_PASSWORD_NOTICE = 'DataNews: your password has been reset'
SECURITY_EMAIL_SUBJECT_PASSWORD_CHANGE_NOTICE = 'DataNews: your password changed'
WHOOSH_BASE = os.path.join(basedir, 'search.db')
DEBUG_TB_PANELS = (
'flask.ext.debugtoolbar.panels.versions.VersionDebugPanel',
'flask.ext.debugtoolbar.panels.timer.TimerDebugPanel',
'flask.ext.debugtoolbar.panels.headers.HeaderDebugPanel',
'flask.ext.debugtoolbar.panels.request_vars.RequestVarsDebugPanel',
'flask.ext.debugtoolbar.panels.template.TemplateDebugPanel',
'flask.ext.debugtoolbar.panels.logger.LoggingPanel'
)
class ProductionConfig(Config):
DEBUG = True
SECRET_KEY = os.environ.get('SECRET_KEY')
SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL')
SECURITY_PASSWORD_HASH = 'bcrypt'
SECURITY_PASSWORD_SALT = '$2a$16$PnnIgfMwkOjGX4SkHqSOPO'
#Mail config
MAIL_SERVER = os.environ.get('MAILGUN_SMTP_SERVER')
MAIL_PORT = int(os.environ.get('MAILGUN_SMTP_PORT', 587))
MAIL_USERNAME = os.environ.get('MAILGUN_SMTP_LOGIN')
MAIL_PASSWORD = os.environ.get('MAILGUN_SMTP_PASSWORD')
SECURITY_EMAIL_SENDER = '[email protected]'
class DevelopmentConfig(Config):
DEBUG = True
DEBUG_TB_INTERCEPT_REDIRECTS = False
class TestingConfig(Config):
TESTING = True
|
import os
basedir = os.path.abspath(os.path.dirname(__file__))
class Config(object):
DEBUG = False
TESTING = False
SECRET_KEY = 'this_is_so_secret' #used for development, reset in prod
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(basedir, 'app.db')
SQLALCHEMY_MIGRATE_REPO = os.path.join(basedir, 'db_repository')
#Flask-Security Config
SECURITY_TRACKABLE = True
SECURITY_CHANGEABLE = True
SECURITY_REGISTERABLE = True
SECURITY_RECOVERABLE = True
SECURITY_DEFAULT_REMEMBER_ME = True
SECURITY_SEND_REGISTER_EMAIL = False
SECURITY_EMAIL_SUBJECT_PASSWORD_RESET = 'DataNews: password reset instructions'
SECURITY_EMAIL_SUBJECT_PASSWORD_NOTICE = 'DataNews: your password has been reset'
SECURITY_EMAIL_SUBJECT_PASSWORD_CHANGE_NOTICE = 'DataNews: your password changed'
WHOOSH_BASE = os.path.join(basedir, 'search.db')
DEBUG_TB_PANELS = (
'flask.ext.debugtoolbar.panels.versions.VersionDebugPanel',
'flask.ext.debugtoolbar.panels.timer.TimerDebugPanel',
'flask.ext.debugtoolbar.panels.headers.HeaderDebugPanel',
'flask.ext.debugtoolbar.panels.request_vars.RequestVarsDebugPanel',
'flask.ext.debugtoolbar.panels.template.TemplateDebugPanel',
'flask.ext.debugtoolbar.panels.logger.LoggingPanel'
)
class ProductionConfig(Config):
DEBUG = True
SECRET_KEY = os.environ.get('SECRET_KEY')
SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL')
#Mail config
MAIL_SERVER = os.environ.get('MAILGUN_SMTP_SERVER')
MAIL_PORT = int(os.environ.get('MAILGUN_SMTP_PORT', 25))
MAIL_USE_SSL = True
MAIL_USERNAME = os.environ.get('MAILGUN_SMTP_LOGIN')
MAIL_PASSWORD = os.environ.get('MAILGUN_SMTP_PASSWORD')
class DevelopmentConfig(Config):
DEBUG = True
DEBUG_TB_INTERCEPT_REDIRECTS = False
class TestingConfig(Config):
TESTING = True
|
bsd-3-clause
|
Python
|
cdbf0f2c82360c866d8c26f2d8a9539fa943df6b
|
Bump version 1.0.0.
|
junaruga/rpm-py-installer,junaruga/rpm-py-installer
|
rpm_py_installer/version.py
|
rpm_py_installer/version.py
|
"""Version string."""
# main = X.Y.Z
# sub = .devN for pre-alpha releases
VERSION = '1.0.0'
|
"""Version string."""
# main = X.Y.Z
# sub = .devN for pre-alpha releases
VERSION = '0.9.2'
|
mit
|
Python
|
9b5c1892dd4731df564d627ae9dafe95bd82b6a9
|
Bump version 0.7.1.
|
junaruga/rpm-py-installer,junaruga/rpm-py-installer
|
rpm_py_installer/version.py
|
rpm_py_installer/version.py
|
"""Version string."""
# main = X.Y.Z
# sub = .devN for pre-alpha releases
VERSION = '0.7.1'
|
"""Version string."""
# main = X.Y.Z
# sub = .devN for pre-alpha releases
VERSION = '0.7.0'
|
mit
|
Python
|
2d2513ce860503a7ab69e56f47998ca075efaa3b
|
Add new production hosts
|
okfn/rtei,okfn/rtei,okfn/rtei,okfn/rtei
|
rtei/settings/production.py
|
rtei/settings/production.py
|
import sys
from .base import *
DEBUG = False
# Update database configuration with $DATABASE_URL.
import dj_database_url
db_from_env = dj_database_url.config()
DATABASES['default'].update(db_from_env)
SECRET_KEY = os.environ.get('SECRET_KEY')
# AWS S3 settings
DEFAULT_FILE_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
AWS_STORAGE_BUCKET_NAME = os.environ.get('AWS_STORAGE_BUCKET_NAME')
AWS_S3_CUSTOM_DOMAIN = '%s.s3.amazonaws.com' % AWS_STORAGE_BUCKET_NAME
# Necessary to overcome broken pipe error if not default US location
# (https://github.com/boto/boto/issues/621).
if os.environ.get('AWS_S3_HOST', False):
AWS_S3_HOST = os.environ.get('AWS_S3_HOST')
MEDIA_URL = "https://%s/" % (AWS_S3_CUSTOM_DOMAIN)
ALLOWED_HOSTS = [
'localhost',
'rtei.herokuapp.com',
'rtei-production.herokuapp.com',
'www.rtei.org',
]
# Email to receive contact requests from the form on /about/contact-us/
RTEI_CONTACT_FORM_EMAIL = os.environ.get('RTEI_CONTACT_FORM_EMAIL')
EMAIL_HOST = os.environ.get('EMAIL_HOST')
EMAIL_PORT = os.environ.get('EMAIL_PORT', 587)
EMAIL_HOST_USER = os.environ.get('EMAIL_HOST_USER')
EMAIL_HOST_PASSWORD = os.environ.get('EMAIL_HOST_PASSWORD')
EMAIL_USE_TLS = True
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'console': {
'level': 'INFO',
'class': 'logging.StreamHandler',
'stream': sys.stdout
},
},
'loggers': {
'django': {
'handlers': ['console'],
'level': os.getenv('DJANGO_LOG_LEVEL', 'INFO'),
},
'rtei': {
'handlers': ['console'],
'level': os.getenv('DJANGO_LOG_LEVEL', 'INFO'),
},
},
}
try:
from .local import *
except ImportError:
pass
|
import sys
from .base import *
DEBUG = False
# Update database configuration with $DATABASE_URL.
import dj_database_url
db_from_env = dj_database_url.config()
DATABASES['default'].update(db_from_env)
SECRET_KEY = os.environ.get('SECRET_KEY')
# AWS S3 settings
DEFAULT_FILE_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
AWS_ACCESS_KEY_ID = os.environ.get('AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = os.environ.get('AWS_SECRET_ACCESS_KEY')
AWS_STORAGE_BUCKET_NAME = os.environ.get('AWS_STORAGE_BUCKET_NAME')
AWS_S3_CUSTOM_DOMAIN = '%s.s3.amazonaws.com' % AWS_STORAGE_BUCKET_NAME
# Necessary to overcome broken pipe error if not default US location
# (https://github.com/boto/boto/issues/621).
if os.environ.get('AWS_S3_HOST', False):
AWS_S3_HOST = os.environ.get('AWS_S3_HOST')
MEDIA_URL = "https://%s/" % (AWS_S3_CUSTOM_DOMAIN)
ALLOWED_HOSTS = [
'localhost',
'rtei.herokuapp.com'
]
# Email to receive contact requests from the form on /about/contact-us/
RTEI_CONTACT_FORM_EMAIL = os.environ.get('RTEI_CONTACT_FORM_EMAIL')
EMAIL_HOST = os.environ.get('EMAIL_HOST')
EMAIL_PORT = os.environ.get('EMAIL_PORT', 587)
EMAIL_HOST_USER = os.environ.get('EMAIL_HOST_USER')
EMAIL_HOST_PASSWORD = os.environ.get('EMAIL_HOST_PASSWORD')
EMAIL_USE_TLS = True
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'console': {
'level': 'INFO',
'class': 'logging.StreamHandler',
'stream': sys.stdout
},
},
'loggers': {
'django': {
'handlers': ['console'],
'level': os.getenv('DJANGO_LOG_LEVEL', 'INFO'),
},
'rtei': {
'handlers': ['console'],
'level': os.getenv('DJANGO_LOG_LEVEL', 'INFO'),
},
},
}
try:
from .local import *
except ImportError:
pass
|
agpl-3.0
|
Python
|
1df00cb6adf8b9cac677f5f6a272331ab5388c90
|
Update main.py
|
Densvin/RSSVK,Densvin/RSSVK
|
vkfeed/pages/main.py
|
vkfeed/pages/main.py
|
# -*- coding: utf-8 -*-
'''Generates the main page.'''
from __future__ import unicode_literals
import re
import urllib
import webapp2
import vkfeed.utils
class MainPage(webapp2.RequestHandler):
'''Generates the main page.'''
def get(self):
'''Processes a GET request.'''
self.response.headers[b'Content-Type'] = b'text/html; charset=utf-8'
self.response.out.write(vkfeed.utils.render_template('main.html'))
def post(self):
'''Processes a POST request.'''
profile_url = self.request.get('profile_url', '')
match = re.match(r'''^
\s*
(?:https?://(?:www\.)?(?:vk\.com|vkontakte\.ru)/)?
(?P<profile_id>[a-zA-Z0-9._-]+)/?
\s*
$''', profile_url, re.IGNORECASE | re.VERBOSE)
if match:
params = {}
if self.request.get('foreign_posts') == '1':
params['foreign_posts'] = '1'
if self.request.get('big_photos') == '1':
params['big_photos'] = '1'
if self.request.get('show_photo') != '1':
params['show_photo'] = '0'
params = '?' + urllib.urlencode(params) if params else ''
self.redirect('/feed/' + match.group('profile_id') + '/wall' + params)
else:
self.response.headers[b'Content-Type'] = b'text/html; charset=utf-8'
self.response.out.write(vkfeed.utils.render_template('main.html', {
'post_error': '''
Неверно указан URL профиля.
Адрес должен быть вида http://vk.com/имя_профиля.
Имя профиля должно удовлетворять требованиям, предъявляемым администрацией ВКонтакте.
'''
}))
|
# -*- coding: utf-8 -*-
'''Generates the main page.'''
from __future__ import unicode_literals
import re
import urllib
import webapp2
import RSSvk.utils
class MainPage(webapp2.RequestHandler):
'''Generates the main page.'''
def get(self):
'''Processes a GET request.'''
self.response.headers[b'Content-Type'] = b'text/html; charset=utf-8'
self.response.out.write(RSSvk.utils.render_template('main.html'))
def post(self):
'''Processes a POST request.'''
profile_url = self.request.get('profile_url', '')
match = re.match(r'''^
\s*
(?:https?://(?:www\.)?(?:vk\.com|vkontakte\.ru)/)?
(?P<profile_id>[a-zA-Z0-9._-]+)/?
\s*
$''', profile_url, re.IGNORECASE | re.VERBOSE)
if match:
params = {}
if self.request.get('foreign_posts') == '1':
params['foreign_posts'] = '1'
if self.request.get('big_photos') == '1':
params['big_photos'] = '1'
if self.request.get('show_photo') != '1':
params['show_photo'] = '0'
params = '?' + urllib.urlencode(params) if params else ''
self.redirect('/feed/' + match.group('profile_id') + '/wall' + params)
else:
self.response.headers[b'Content-Type'] = b'text/html; charset=utf-8'
self.response.out.write(RSSvk.utils.render_template('main.html', {
'post_error': '''
Неверно указан URL профиля.
Адрес должен быть вида http://vk.com/имя_профиля.
Имя профиля должно удовлетворять требованиям, предъявляемым администрацией ВКонтакте.
'''
}))
|
bsd-2-clause
|
Python
|
15129d5cc4c3a4981a41bebbfc6ace855004cd20
|
Add organizational structure.
|
weidnem/IntroPython2016,UWPCE-PythonCert/IntroPython2016,UWPCE-PythonCert/IntroPython2016,UWPCE-PythonCert/IntroPython2016,weidnem/IntroPython2016,weidnem/IntroPython2016
|
students/psbriant/session08/circle.py
|
students/psbriant/session08/circle.py
|
"""
Name: Paul Briant
Date: 11/29/16
Class: Introduction to Python
Session: 08
Assignment: Circle Lab
Description:
Classes for Circle Lab
"""
class Circle:
def __init__(self, radius):
"""
"""
self.radius = radius
self.diameter = radius * 2
@classmethod
def from_diameter(cls, diameter):
self = cls(diameter / 2)
return self
def __str__(self):
return "A circle object with radius: {}".format(self.radius)
def __repr__(self):
return "Circle({})".format(self.radius)
@property
def diameter(self):
return self.radius * 2
@diameter.setter
def diameter(self, value):
self.radius = value / 2
|
class Circle:
def __init__(self, radius):
"""
"""
self.radius = radius
self.diameter = radius * 2
@classmethod
def from_diameter(cls, diameter):
self = cls(diameter / 2)
return self
def __str__(self):
return "A circle object with radius: {}".format(self.radius)
def __repr__(self):
return "Circle({})".format(self.radius)
@property
def diameter(self):
return self.radius * 2
@diameter.setter
def diameter(self, value):
self.radius = value / 2
|
unlicense
|
Python
|
c79723b179b0bfeda9b324139d8478bf4f24c1e5
|
add unicode char to test print
|
LettError/glyphNameFormatter,LettError/glyphNameFormatter
|
Lib/glyphNameFormatter/test.py
|
Lib/glyphNameFormatter/test.py
|
def printRange(rangeName):
from glyphNameFormatter import GlyphName
from glyphNameFormatter.unicodeRangeNames import getRangeByName
from glyphNameFormatter.data import unicode2name_AGD
for u in range(*getRangeByName(rangeName)):
g = GlyphName(uniNumber=u)
name = g.getName()
if name is None:
continue
AGDName = unicode2name_AGD.get(g.uniNumber, "")
if AGDName is None or AGDName == name:
AGDName = ""
print name.ljust(50), AGDName.ljust(30), "%04X" % g.uniNumber, "\t", g.uniLetter.encode("utf-8"), "\t", g.uniName
|
def printRange(rangeName):
from glyphNameFormatter import GlyphName
from glyphNameFormatter.unicodeRangeNames import getRangeByName
from glyphNameFormatter.data import unicode2name_AGD
for u in range(*getRangeByName(rangeName)):
g = GlyphName(uniNumber=u)
name = g.getName()
if name is None:
continue
AGDName = unicode2name_AGD.get(g.uniNumber, "")
if AGDName is None or AGDName == name:
AGDName = ""
print name.ljust(50), AGDName.ljust(20), "%04X" % g.uniNumber, "\t", g.uniName
|
bsd-3-clause
|
Python
|
c09274936df73668afd14ccac6d7f7c322d5e8b8
|
Add naive logging in Main.py
|
shunghsiyu/pdf-processor
|
Main.py
|
Main.py
|
"""Main Module of PDF Splitter"""
import argparse
import logging
import os
from PyPDF2 import PdfFileWriter
from Util import all_pdf_files_in_directory, split_on, concat_pdf_pages, is_landscape, write_pdf_file
parser = \
argparse.ArgumentParser(
description='Split all the pages of multiple PDF files in a directory by document number'
)
parser.add_argument(
'directory',
metavar='PATH',
type=str,
help='path to a directory'
)
# Get default logger
log = logging.getLogger(__name__)
log.addHandler(logging.StreamHandler())
log.setLevel(logging.INFO)
def main():
# Get to directory with PDF files to work on
args = parser.parse_args()
directory = args.directory
log.info('Working on PDF files in %s', directory)
# Open the PDF files
all_pdf_files = [os.path.join(directory, filename) for filename in all_pdf_files_in_directory(directory)]
log.info('Found the following PDF files\n %s', '\n '.join(all_pdf_files))
opened_files = [open(path, 'rb') for path in all_pdf_files]
# Take all the pages in all the PDF files into a generator
all_pages = concat_pdf_pages(opened_files)
def make_pagenum_even(pdf_writer):
"""Helper function that append a blank page if the number of page is an odd number, in order to make the
page number even."""
if pdf_writer.getNumPages() % 2 == 1:
log.debug(' Adding a blank page')
pdf_writer.addBlankPage()
# For all pages that belongs to the same document ID
for idx, pages_to_write in enumerate(split_on(all_pages, predicate=is_landscape), start=1):
# Create a PDF writer instance
pdf_writer = PdfFileWriter()
# Put those pages into a writer
log.info('Adding %d pages to PDFWriter', len(pages_to_write))
for page in pages_to_write:
log.debug(' Adding page %s', repr(page))
pdf_writer.addPage(page)
# Make sure the output PDF will have an even number of pages
# which makes printing the PDF file easier
make_pagenum_even(pdf_writer)
output_filename = '{0:05}.pdf'.format(idx)
# And write those pages to a single PDF file
log.info('Writing PDF pages to %s', output_filename)
write_pdf_file(output_filename, pdf_writer)
# Make sure to close all the files that were opened
log.debug('Closing all opened files')
for file in opened_files:
file.close()
if __name__ == '__main__':
main()
|
"""Main Module of PDF Splitter"""
import argparse
import os
from PyPDF2 import PdfFileWriter
from Util import all_pdf_files_in_directory, split_on, concat_pdf_pages, is_landscape, write_pdf_file
parser = \
argparse.ArgumentParser(
description='Split all the pages of multiple PDF files in a directory by document number'
)
parser.add_argument(
'directory',
metavar='PATH',
type=str,
help='path to a directory'
)
def main():
# Get to directory with PDF files to work on
args = parser.parse_args()
directory = args.directory
# Open the PDF files
all_pdf_files = [os.path.join(directory, filename) for filename in all_pdf_files_in_directory(directory)]
opened_files = [open(path, 'rb') for path in all_pdf_files]
# Take all the pages in all the PDF files into a generator
all_pages = concat_pdf_pages(opened_files)
def make_pagenum_even(pdf_writer):
"""Helper function that append a blank page if the number of page is an odd number, in order to make the
page number even."""
if pdf_writer.getNumPages() % 2 == 1:
pdf_writer.addBlankPage()
# For all pages that belongs to the same document ID
for idx, pages_to_write in enumerate(split_on(all_pages, predicate=is_landscape), start=1):
# Create a PDF writer instance
pdf_writer = PdfFileWriter()
# Put those pages into a writer
for page in pages_to_write:
pdf_writer.addPage(page)
# Make sure the output PDF will have an even number of pages
# which makes printing the PDF file easier
make_pagenum_even(pdf_writer)
output_filename = '{0:05}.pdf'.format(idx)
# And write those pages to a single PDF file
write_pdf_file(output_filename, pdf_writer)
# Make sure to close all the files that were opened
for file in opened_files:
file.close()
if __name__ == '__main__':
main()
|
mit
|
Python
|
6d4038653bf237a285f99e68288454ce9ebdfc92
|
Add allowed hosts
|
jasisz/cinderella,jasisz/cinderella
|
cinderella/cinderella/settings/production.py
|
cinderella/cinderella/settings/production.py
|
from .base import *
DEBUG = False
ALLOWED_HOSTS = ['188.226.249.33', 'cinderella.li']
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': os.environ['DB_NAME'],
'USER': os.environ['DB_USER'],
'PASSWORD': os.environ['DB_PASSWORD'],
'HOST': '127.0.0.1',
'PORT': '5432',
}
}
|
from .base import *
DEBUG = False
ALLOWED_HOSTS = ['cinderella.io']
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': os.environ['DB_NAME'],
'USER': os.environ['DB_USER'],
'PASSWORD': os.environ['DB_PASSWORD'],
'HOST': '127.0.0.1',
'PORT': '5432',
}
}
|
mit
|
Python
|
c0787c468e1b71d7e9db93b5f5990ae9bb506d82
|
FIX other two sample data load for Windows
|
massmutual/pystruct,pystruct/pystruct,amueller/pystruct,d-mittal/pystruct,wattlebird/pystruct,pystruct/pystruct,d-mittal/pystruct,wattlebird/pystruct,massmutual/pystruct,amueller/pystruct
|
pystruct/datasets/dataset_loaders.py
|
pystruct/datasets/dataset_loaders.py
|
import cPickle
from os.path import dirname
from os.path import join
import numpy as np
def load_letters():
"""Load the OCR letters dataset.
This is a chain classification task.
Each example consists of a word, segmented into letters.
The first letter of each word is ommited from the data,
as it was a capital letter (in contrast to all other letters).
"""
module_path = dirname(__file__)
data_file = open(join(module_path, 'letters.pickle'),'rb')
data = cPickle.load(data_file)
# we add an easy to use image representation:
data['images'] = [np.hstack([l.reshape(16, 8) for l in word])
for word in data['data']]
return data
def load_scene():
module_path = dirname(__file__)
data_file = open(join(module_path, 'scene.pickle'),'rb')
return cPickle.load(data_file)
def load_snakes():
module_path = dirname(__file__)
data_file = open(join(module_path, 'snakes.pickle'),'rb')
return cPickle.load(data_file)
|
import cPickle
from os.path import dirname
from os.path import join
import numpy as np
def load_letters():
"""Load the OCR letters dataset.
This is a chain classification task.
Each example consists of a word, segmented into letters.
The first letter of each word is ommited from the data,
as it was a capital letter (in contrast to all other letters).
"""
module_path = dirname(__file__)
data_file = open(join(module_path, 'letters.pickle'),'rb')
data = cPickle.load(data_file)
# we add an easy to use image representation:
data['images'] = [np.hstack([l.reshape(16, 8) for l in word])
for word in data['data']]
return data
def load_scene():
module_path = dirname(__file__)
data_file = open(join(module_path, 'scene.pickle'))
return cPickle.load(data_file)
def load_snakes():
module_path = dirname(__file__)
data_file = open(join(module_path, 'snakes.pickle'))
return cPickle.load(data_file)
|
bsd-2-clause
|
Python
|
19a9465424400ca1a194e5516c44ca77a0f5591a
|
add Alpha Vantage API key to config file
|
jamesottinger/MoneyWatch,jamesottinger/MoneyWatch,jamesottinger/MoneyWatch
|
moneywatch/moneywatchconfig.py
|
moneywatch/moneywatchconfig.py
|
#!/usr/bin/python
#===============================================================================
# Copyright (c) 2016, James Ottinger. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
#
# MoneyWatch - https://github.com/jamesottinger/moneywatch
#=======================================================+=======================
db_creds = { 'host':'xxx.xxx.xxx.xxx', 'user':'youruser', 'passwd':'yourpassword', 'db':'thedbname' }
dirlogs = '/log/'
direrrors = '/log/errors/'
weather = { 'latitude':'39.7344443', 'longitude':'-75.0072787', 'title':'Chipotle - Sicklerville, NJ' }
alphavantage_apikey = "----"
uilinks = [
[
('Google Drive', 'https://drive.google.com/#my-drive')
], [
('Capital One 360', 'https://home.capitalone360.com'),
('Discover Bank', 'https://www.discover.com/online-banking/')
], [
('Fidelity', 'https://www.fidelity.com'),
('Vanguard', 'https://www.vanguard.com/')
]
]
|
#!/usr/bin/python
#===============================================================================
# Copyright (c) 2016, James Ottinger. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
#
# MoneyWatch - https://github.com/jamesottinger/moneywatch
#=======================================================+=======================
db_creds = { 'host':'xxx.xxx.xxx.xxx', 'user':'youruser', 'passwd':'yourpassword', 'db':'thedbname' }
dirlogs = '/log/'
direrrors = '/log/errors/'
weather = { 'latitude':'39.7344443', 'longitude':'-75.0072787', 'title':'Chipotle - Sicklerville, NJ' }
uilinks = [
[
('Google Drive', 'https://drive.google.com/#my-drive')
], [
('Capital One 360', 'https://home.capitalone360.com'),
('Discover Bank', 'https://www.discover.com/online-banking/')
], [
('Fidelity', 'https://www.fidelity.com'),
('Vanguard', 'https://www.vanguard.com/')
]
]
|
bsd-3-clause
|
Python
|
04c67e99af363cd8eea4414f59a9294a84faaa6d
|
Fix test layout
|
muffins-on-dope/bakery,muffins-on-dope/bakery,muffins-on-dope/bakery
|
tests/api/test_views.py
|
tests/api/test_views.py
|
# -*- coding: utf-8 -*-
import httpretty
import json
from django.test import TestCase
from django.utils.encoding import smart_str
from bakery.auth.models import BakeryUser
from bakery.cookies.models import Cookie
from bakery.utils.test import read
class TestApi(TestCase):
def test_cookies_list_empty(self):
resp = self.client.get('/api/v1/cookies/list/')
self.assertEqual(resp.content, b'[]')
def test_cookies_list(self):
BakeryUser.objects.create_user('user')
user = BakeryUser.objects.get(username='user')
Cookie.objects.create(
name='test',
owner_name='test',
url='http://example.com/unique',
owner=user,
backend='github'
)
resp = self.client.get('/api/v1/cookies/list/')
data = json.loads(smart_str(resp.content))
self.assertEqual(
data,
[{
"url": "http://example.com/unique",
"description": "",
"name": "test",
"last_change": None
}]
)
@httpretty.activate
def test_cookie_register(self):
httpretty.register_uri(httpretty.GET,
'https://api.github.com/repos/muffins-on-dope/bakery',
body=read(__file__, '..', '_replay_data', 'bakery-repository'),
content_type='application/json; charset=utf-8'
)
self.client.post('/api/v1/cookies/new/',
json.dumps({'url': '[email protected]:muffins-on-dope/bakery.git'}),
content_type='application/json',
)
self.assertEqual(Cookie.objects.count(), 1)
|
# -*- coding: utf-8 -*-
import json
from django.test import TestCase
from django.utils.encoding import smart_str
from bakery.auth.models import BakeryUser
from bakery.cookies.models import Cookie
from bakery.utils.test import read
import httpretty
class TestApi(TestCase):
def test_cookies_list_empty(self):
resp = self.client.get('/api/v1/cookies/list/')
self.assertEqual(resp.content, b'[]')
def test_cookies_list(self):
BakeryUser.objects.create_user('user')
user = BakeryUser.objects.get(username='user')
cookie = Cookie.objects.create(
name='test',
owner_name='test',
url='http://example.com/unique',
owner=user,
backend='github'
)
resp = self.client.get('/api/v1/cookies/list/')
data = json.loads(smart_str(resp.content))
self.assertEqual(
data,
[{
"url": "http://example.com/unique",
"description": "",
"name": "test",
"last_change": None
}]
)
@httpretty.activate
def test_cookies_list(self):
httpretty.register_uri(httpretty.GET,
'https://api.github.com/repos/muffins-on-dope/bakery',
body=read(__file__, '..', '_replay_data', 'bakery-repository'),
content_type='application/json; charset=utf-8'
)
self.client.post('/api/v1/cookies/new/',
json.dumps({'url': '[email protected]:muffins-on-dope/bakery.git'}),
content_type='application/json',
)
self.assertEqual(Cookie.objects.count(), 1)
|
bsd-3-clause
|
Python
|
6940035d7827a6a2aa719e537f122c07a91bd7c1
|
support werkzeug==1.0.0
|
claytonbrown/dokku,dokku/dokku,progrium/dokku,claytonbrown/dokku,progrium/dokku,claytonbrown/dokku,dokku/dokku,progrium/dokku,dokku/dokku,claytonbrown/dokku,dokku/dokku,claytonbrown/dokku,progrium/dokku,progrium/dokku,dokku/dokku,progrium/dokku,claytonbrown/dokku,dokku/dokku,dokku/dokku,dokku/dokku,progrium/dokku,claytonbrown/dokku,claytonbrown/dokku,progrium/dokku
|
tests/apps/multi/app.py
|
tests/apps/multi/app.py
|
import os
from flask import Flask, render_template
try:
from werkzeug.wsgi import SharedDataMiddleware
except ImportError:
from werkzeug.middleware.shared_data import SharedDataMiddleware
app = Flask(__name__)
@app.route('/')
def hello_world():
return render_template('index.html')
app.wsgi_app = SharedDataMiddleware(app.wsgi_app, { '/': os.path.join(os.path.dirname(__file__), 'static') })
app.wsgi_app = SharedDataMiddleware(app.wsgi_app, { '/': os.path.join(os.path.dirname(__file__), 'static/.tmp') })
if __name__ == '__main__':
app.run(host='0.0.0.0', debug=True)
|
import os
from flask import Flask, render_template
from werkzeug.wsgi import SharedDataMiddleware
app = Flask(__name__)
@app.route('/')
def hello_world():
return render_template('index.html')
app.wsgi_app = SharedDataMiddleware(app.wsgi_app, { '/': os.path.join(os.path.dirname(__file__), 'static') })
app.wsgi_app = SharedDataMiddleware(app.wsgi_app, { '/': os.path.join(os.path.dirname(__file__), 'static/.tmp') })
if __name__ == '__main__':
app.run(host='0.0.0.0', debug=True)
|
mit
|
Python
|
d45bbe102efec23656d1329b5c3e6a785c69acee
|
switch base test cases of pickle backend from tests.backend.ini to tests.backend.common
|
ssato/python-anyconfig,ssato/python-anyconfig
|
tests/backend/pickle.py
|
tests/backend/pickle.py
|
#
# Copyright (C) 2017 Satoru SATOH <ssato @ redhat.com>
# Copyright (C) 2017 Red Hat, Inc.
# License: MIT
#
# pylint: disable=missing-docstring,invalid-name,too-few-public-methods
from __future__ import absolute_import
import anyconfig.backend.pickle as TT
import tests.backend.common as TBC
class HasParserTrait(TBC.HasParserTrait):
psr = TT.Parser()
cnf = TBC.CNF_1
cnf_s = TT.pickle.dumps(cnf)
class Test_10(TBC.Test_10_dumps_and_loads, HasParserTrait):
load_options = dump_options = dict(protocol=TT.pickle.HIGHEST_PROTOCOL)
class Test_20(TBC.Test_20_dump_and_load, HasParserTrait):
pass
# vim:sw=4:ts=4:et:
|
#
# Copyright (C) 2017 Satoru SATOH <ssato @ redhat.com>
# License: MIT
#
# pylint: disable=missing-docstring
from __future__ import absolute_import
try:
import anyconfig.backend.pickle as TT
except ImportError:
TT = None
import tests.backend.ini
from tests.common import dicts_equal
CNF_0 = dict(a=0, b="bbb", c=5, sect0=dict(d=["x", "y", "z"]))
class Test10(tests.backend.ini.Test10):
cnf = CNF_0
cnf_s = TT.pickle.dumps(cnf)
load_options = dump_options = dict(protocol=TT.pickle.HIGHEST_PROTOCOL)
is_order_kept = False
def setUp(self):
self.psr = TT.Parser()
class Test20(tests.backend.ini.Test20):
psr_cls = TT.Parser
cnf = CNF_0
cnf_s = TT.pickle.dumps(cnf, protocol=TT.pickle.HIGHEST_PROTOCOL)
cnf_fn = "conf0.pkl"
def test_22_dump__w_special_option(self):
self.psr.dump(self.cnf, self.cpath,
protocol=TT.pickle.HIGHEST_PROTOCOL)
cnf = self.psr.load(self.cpath)
self.assertTrue(dicts_equal(cnf, self.cnf), str(cnf))
# vim:sw=4:ts=4:et:
|
mit
|
Python
|
f0ef4f5e269d7f2d7fd347e8f458c1c9ce1ffb34
|
Fix bug in redis hook
|
synw/django-mqueue,synw/django-mqueue,synw/django-mqueue
|
mqueue/hooks/redis/__init__.py
|
mqueue/hooks/redis/__init__.py
|
import redis
import time
from mqueue.conf import DOMAIN
from mqueue.hooks.redis import serializer
from mqueue.conf import HOOKS
conf = HOOKS["redis"]
R = redis.StrictRedis(host=conf["host"], port=conf["port"], db=conf["db"])
event_num = int(time.time())
def save(event, conf):
global event_num
global R
name = DOMAIN + "_event" + str(event_num)
event.request = event.request.replace("\n", "//")
data = serializer.Pack(event)
R.set(name, data)
event_num += 1
|
import redis
import time
from mqueue.conf import DOMAIN
from mqueue.hooks.redis import serializer
from mqueue.conf import HOOKS
conf = HOOKS["redis"]
R = redis.StrictRedis(host=conf["host"], port=conf["port"], db=conf["db"])
event_num = int(time.time())
def save(event, conf):
name = DOMAIN+"_event"+str(event_num)
event.request = event.request.replace("\n", "//")
data = serializer.Pack(event)
R.set(name, data)
|
mit
|
Python
|
54035774d3b9aece86e68f047e9ff4a270d344cb
|
add mountain climbing emoji #2001
|
c2corg/v6_ui,c2corg/v6_ui,c2corg/v6_ui,c2corg/v6_ui
|
c2corg_ui/format/emoji_databases/c2c_activities.py
|
c2corg_ui/format/emoji_databases/c2c_activities.py
|
SVG_CDN = "/static/img/documents/activities/"
emoji = {
":rock_climbing:": {
"category": "activitiy",
"name": "rock climbing",
"svg_name": "rock_climbing",
"unicode": "1f9d7",
},
":skitouring:": {
"category": "activitiy",
"name": "ski touring",
"svg_name": "skitouring",
"unicode": "26f7"
},
":hiking:": {
"category": "activitiy",
"name": "hiking",
"svg_name": "hiking",
},
":ice_climbing:": {
"category": "activitiy",
"name": "ice climbing",
"svg_name": "ice_climbing",
},
":mountain_biking:": {
"category": "activitiy",
"name": "mountain biking",
"svg_name": "mountain_biking",
},
":paragliding:": {
"category": "activitiy",
"name": "paragliding",
"svg_name": "paragliding",
},
":slacklining:": {
"category": "activitiy",
"name": "slacklining",
"svg_name": "slacklining",
},
":snow_ice_mixed:": {
"category": "activitiy",
"name": "snow ice mixed",
"svg_name": "snow_ice_mixed",
},
":snowshoeing:": {
"category": "activitiy",
"name": "snowshoeing",
"svg_name": "snowshoeing",
},
":via_ferrata:": {
"category": "activitiy",
"name": "via ferrata",
"svg_name": "via_ferrata",
},
":mountain_climbing:": {
"category": "activitiy",
"name": "mountain climbing",
"svg_name": "mountain_climbing",
}
}
aliases = {}
|
SVG_CDN = "/static/img/documents/activities/"
emoji = {
":rock_climbing:": {
"category": "activitiy",
"name": "rock climbing",
"svg_name": "rock_climbing",
"unicode": "1f9d7",
},
":skitouring:": {
"category": "activitiy",
"name": "ski touring",
"svg_name": "skitouring",
"unicode": "26f7"
},
":hiking:": {
"category": "activitiy",
"name": "hiking",
"svg_name": "hiking",
},
":ice_climbing:": {
"category": "activitiy",
"name": "ice climbing",
"svg_name": "ice_climbing",
},
":mountain_biking:": {
"category": "activitiy",
"name": "mountain biking",
"svg_name": "mountain_biking",
},
":paragliding:": {
"category": "activitiy",
"name": "paragliding",
"svg_name": "paragliding",
},
":slacklining:": {
"category": "activitiy",
"name": "slacklining",
"svg_name": "slacklining",
},
":snow_ice_mixed:": {
"category": "activitiy",
"name": "snow ice mixed",
"svg_name": "snow_ice_mixed",
},
":snowshoeing:": {
"category": "activitiy",
"name": "snowshoeing",
"svg_name": "snowshoeing",
},
":via_ferrata:": {
"category": "activitiy",
"name": "via ferrata",
"svg_name": "via_ferrata",
},
}
aliases = {}
|
agpl-3.0
|
Python
|
d5167d8ba1b3107e5ce121eca76b5496bf8d6448
|
Truncate a long log message.
|
ohsu-qin/qipipe
|
qipipe/registration/ants/template.py
|
qipipe/registration/ants/template.py
|
import os
import logging
import envoy
from .ants_error import ANTSError
def create_template(metric, files):
"""
Builds a template from the given image files.
:param metric: the similarity metric
:param files: the image files
:return: the template file name
"""
CMD = "buildtemplateparallel.sh -d 2 -c 2 -j 4 -d 2 -s {metric} -o {output} {files}"
PREFIX = 'reg_'
SUFFIX = 'template.nii.gz'
tmpl = PREFIX + SUFFIX
if os.path.exists(tmpl):
logging.info("Registration template already exists: %s" % tmpl)
return tmpl
cmd = CMD.format(metric=metric.name, output=PREFIX, files=' '.join(files))
logging.info("Building the %s registration template with the following command:" % tmpl)
logging.info(cmd[:80])
r = envoy.run(cmd)
if r.status_code:
logging.error("Build registration template failed with error code %d" % r.status_code)
logging.error(r.std_err)
raise ANTSError("Build registration template unsuccessful; see the log for details")
if not os.path.exists(tmpl):
logging.error("Build registration template was not created.")
raise ANTSError("Build registration template unsuccessful; see the log for details")
logging.info("Built the registration template %s." % tmpl)
return tmpl
|
import os
import logging
import envoy
from .ants_error import ANTSError
def create_template(metric, files):
"""
Builds a template from the given image files.
:param metric: the similarity metric
:param files: the image files
:return: the template file name
"""
CMD = "buildtemplateparallel.sh -d 2 -c 2 -j 4 -d 2 -s {metric} -o {output} {files}"
PREFIX = 'reg_'
SUFFIX = 'template.nii.gz'
tmpl = PREFIX + SUFFIX
if os.path.exists(tmpl):
logging.info("Registration template already exists: %s" % tmpl)
return tmpl
cmd = CMD.format(metric=metric.name, output=PREFIX, files=' '.join(files))
logging.info("Building the %s registration template with the following command:" % tmpl)
logging.info(cmd)
r = envoy.run(cmd)
if r.status_code:
logging.error("Build registration template failed with error code %d" % r.status_code)
logging.error(r.std_err)
raise ANTSError("Build registration template unsuccessful; see the log for details")
if not os.path.exists(tmpl):
logging.error("Build registration template was not created.")
raise ANTSError("Build registration template unsuccessful; see the log for details")
logging.info("Built the registration template %s." % tmpl)
return tmpl
|
bsd-2-clause
|
Python
|
52b98755a8b26fb50d90b7988ee8ee16053e5c11
|
Update lint.py to automatically find .py files
|
wbond/oscrypto
|
lint.py
|
lint.py
|
# coding: utf-8
from __future__ import unicode_literals
import os
from pylint.lint import Run
cur_dir = os.path.dirname(__file__)
rc_path = os.path.join(cur_dir, './.pylintrc')
print('Running pylint...')
files = []
for root, dirnames, filenames in os.walk('oscrypto/'):
for filename in filenames:
if not filename.endswith('.py'):
continue
files.append(os.path.join(root, filename))
args = ['--rcfile=%s' % rc_path] + files
Run(args)
|
# coding: utf-8
from __future__ import unicode_literals
import os
from pylint.lint import Run
cur_dir = os.path.dirname(__file__)
rc_path = os.path.join(cur_dir, './.pylintrc')
print('Running pylint...')
files = [
'__init__.py',
'_osx_ctypes.py',
'_osx_public_key.py',
'_osx_symmetric.py',
'_osx_util.py',
'_win_util.py',
'errors.py',
'kdf.py',
'keys.py',
'public_key.py',
'symmetric.py',
'util.py',
]
args = ['--rcfile=%s' % rc_path]
args += ['oscrypto/' + f for f in files]
Run(args)
|
mit
|
Python
|
671ff419731084681edaf3fdc826c1139383c325
|
add Raises to orthogonal_procrustes docstring
|
mgaitan/scipy,pbrod/scipy,sonnyhu/scipy,ortylp/scipy,petebachant/scipy,raoulbq/scipy,gef756/scipy,aeklant/scipy,jonycgn/scipy,ortylp/scipy,e-q/scipy,richardotis/scipy,giorgiop/scipy,gef756/scipy,zaxliu/scipy,efiring/scipy,mhogg/scipy,piyush0609/scipy,woodscn/scipy,felipebetancur/scipy,pyramania/scipy,gdooper/scipy,rmcgibbo/scipy,ndchorley/scipy,aman-iitj/scipy,Stefan-Endres/scipy,jamestwebber/scipy,hainm/scipy,matthewalbani/scipy,WillieMaddox/scipy,mortonjt/scipy,hainm/scipy,zxsted/scipy,mortada/scipy,zerothi/scipy,gertingold/scipy,ilayn/scipy,vberaudi/scipy,jseabold/scipy,fernand/scipy,rmcgibbo/scipy,chatcannon/scipy,pizzathief/scipy,petebachant/scipy,futurulus/scipy,mdhaber/scipy,behzadnouri/scipy,mtrbean/scipy,andim/scipy,fernand/scipy,perimosocordiae/scipy,witcxc/scipy,gertingold/scipy,gertingold/scipy,lukauskas/scipy,richardotis/scipy,anielsen001/scipy,maniteja123/scipy,dch312/scipy,mdhaber/scipy,befelix/scipy,chatcannon/scipy,maniteja123/scipy,surhudm/scipy,anielsen001/scipy,sauliusl/scipy,aarchiba/scipy,matthew-brett/scipy,woodscn/scipy,jjhelmus/scipy,pyramania/scipy,trankmichael/scipy,gertingold/scipy,zerothi/scipy,jseabold/scipy,mortonjt/scipy,zaxliu/scipy,jseabold/scipy,josephcslater/scipy,jakevdp/scipy,apbard/scipy,vigna/scipy,kalvdans/scipy,ales-erjavec/scipy,pyramania/scipy,fredrikw/scipy,zaxliu/scipy,fernand/scipy,lukauskas/scipy,e-q/scipy,FRidh/scipy,bkendzior/scipy,anntzer/scipy,richardotis/scipy,apbard/scipy,maniteja123/scipy,maciejkula/scipy,andim/scipy,lhilt/scipy,zerothi/scipy,Dapid/scipy,pyramania/scipy,efiring/scipy,futurulus/scipy,ales-erjavec/scipy,kleskjr/scipy,andyfaff/scipy,gfyoung/scipy,jjhelmus/scipy,jsilter/scipy,WarrenWeckesser/scipy,newemailjdm/scipy,pnedunuri/scipy,lukauskas/scipy,pschella/scipy,haudren/scipy,Srisai85/scipy,gfyoung/scipy,kleskjr/scipy,dominicelse/scipy,andyfaff/scipy,dch312/scipy,raoulbq/scipy,cpaulik/scipy,pnedunuri/scipy,surhudm/scipy,tylerjereddy/scipy,matthewalbani/scipy,ales-erjavec/scipy,fredrikw/scipy,njwilson23/scipy,vberaudi/scipy,vigna/scipy,chatcannon/scipy,aarchiba/scipy,njwilson23/scipy,kalvdans/scipy,josephcslater/scipy,matthew-brett/scipy,cpaulik/scipy,josephcslater/scipy,mingwpy/scipy,mingwpy/scipy,pschella/scipy,behzadnouri/scipy,raoulbq/scipy,petebachant/scipy,matthew-brett/scipy,FRidh/scipy,sonnyhu/scipy,tylerjereddy/scipy,WillieMaddox/scipy,minhlongdo/scipy,witcxc/scipy,niknow/scipy,WillieMaddox/scipy,niknow/scipy,fredrikw/scipy,pschella/scipy,mortada/scipy,Shaswat27/scipy,Kamp9/scipy,nvoron23/scipy,jsilter/scipy,zxsted/scipy,vanpact/scipy,trankmichael/scipy,zerothi/scipy,richardotis/scipy,sauliusl/scipy,witcxc/scipy,FRidh/scipy,endolith/scipy,jamestwebber/scipy,Stefan-Endres/scipy,pbrod/scipy,efiring/scipy,nmayorov/scipy,pschella/scipy,giorgiop/scipy,Eric89GXL/scipy,ndchorley/scipy,mikebenfield/scipy,sriki18/scipy,Kamp9/scipy,rgommers/scipy,aman-iitj/scipy,arokem/scipy,argriffing/scipy,aarchiba/scipy,Newman101/scipy,jakevdp/scipy,ortylp/scipy,minhlongdo/scipy,jakevdp/scipy,haudren/scipy,fredrikw/scipy,Dapid/scipy,zxsted/scipy,perimosocordiae/scipy,futurulus/scipy,chatcannon/scipy,dominicelse/scipy,vigna/scipy,nvoron23/scipy,Newman101/scipy,jonycgn/scipy,Eric89GXL/scipy,sriki18/scipy,woodscn/scipy,nonhermitian/scipy,kalvdans/scipy,njwilson23/scipy,Stefan-Endres/scipy,Dapid/scipy,sriki18/scipy,nvoron23/scipy,jsilter/scipy,e-q/scipy,nmayorov/scipy,Gillu13/scipy,niknow/scipy,vigna/scipy,aeklant/scipy,hainm/scipy,person142/scipy,andyfaff/scipy,aman-iitj/scipy,piyush0609/scipy,mdhaber/scipy,jsilter/scipy,WarrenWeckesser/scipy,rmcgibbo/scipy,Shaswat27/scipy,perimosocordiae/scipy,apbard/scipy,bkendzior/scipy,sonnyhu/scipy,WillieMaddox/scipy,andim/scipy,Srisai85/scipy,larsmans/scipy,Srisai85/scipy,ilayn/scipy,andim/scipy,jseabold/scipy,maniteja123/scipy,rgommers/scipy,njwilson23/scipy,efiring/scipy,endolith/scipy,person142/scipy,giorgiop/scipy,gef756/scipy,mingwpy/scipy,mhogg/scipy,endolith/scipy,zerothi/scipy,Srisai85/scipy,minhlongdo/scipy,futurulus/scipy,WarrenWeckesser/scipy,jamestwebber/scipy,dch312/scipy,mortada/scipy,vhaasteren/scipy,giorgiop/scipy,woodscn/scipy,trankmichael/scipy,Stefan-Endres/scipy,aman-iitj/scipy,njwilson23/scipy,rgommers/scipy,mgaitan/scipy,maciejkula/scipy,anntzer/scipy,lhilt/scipy,jonycgn/scipy,vanpact/scipy,jseabold/scipy,anielsen001/scipy,surhudm/scipy,felipebetancur/scipy,njwilson23/scipy,gef756/scipy,mtrbean/scipy,ndchorley/scipy,scipy/scipy,bkendzior/scipy,WillieMaddox/scipy,mingwpy/scipy,argriffing/scipy,cpaulik/scipy,Shaswat27/scipy,Shaswat27/scipy,Gillu13/scipy,surhudm/scipy,Eric89GXL/scipy,petebachant/scipy,newemailjdm/scipy,aeklant/scipy,gfyoung/scipy,vanpact/scipy,jonycgn/scipy,mtrbean/scipy,bkendzior/scipy,pnedunuri/scipy,scipy/scipy,Kamp9/scipy,nmayorov/scipy,andyfaff/scipy,mtrbean/scipy,WarrenWeckesser/scipy,pbrod/scipy,dominicelse/scipy,niknow/scipy,sauliusl/scipy,gdooper/scipy,haudren/scipy,gdooper/scipy,Kamp9/scipy,jor-/scipy,endolith/scipy,richardotis/scipy,pnedunuri/scipy,jor-/scipy,Eric89GXL/scipy,nonhermitian/scipy,befelix/scipy,newemailjdm/scipy,argriffing/scipy,befelix/scipy,hainm/scipy,ales-erjavec/scipy,behzadnouri/scipy,maciejkula/scipy,hainm/scipy,perimosocordiae/scipy,vberaudi/scipy,mortada/scipy,e-q/scipy,cpaulik/scipy,mingwpy/scipy,ndchorley/scipy,andim/scipy,Gillu13/scipy,person142/scipy,jseabold/scipy,newemailjdm/scipy,matthewalbani/scipy,grlee77/scipy,sriki18/scipy,anntzer/scipy,aeklant/scipy,befelix/scipy,hainm/scipy,tylerjereddy/scipy,pschella/scipy,jsilter/scipy,kleskjr/scipy,anielsen001/scipy,efiring/scipy,mgaitan/scipy,fernand/scipy,vhaasteren/scipy,mgaitan/scipy,sauliusl/scipy,mortada/scipy,futurulus/scipy,Gillu13/scipy,anntzer/scipy,pbrod/scipy,maniteja123/scipy,jakevdp/scipy,piyush0609/scipy,pbrod/scipy,Srisai85/scipy,chatcannon/scipy,Dapid/scipy,ilayn/scipy,felipebetancur/scipy,mhogg/scipy,arokem/scipy,Stefan-Endres/scipy,jonycgn/scipy,giorgiop/scipy,Shaswat27/scipy,jor-/scipy,vberaudi/scipy,andyfaff/scipy,larsmans/scipy,felipebetancur/scipy,larsmans/scipy,newemailjdm/scipy,kalvdans/scipy,matthewalbani/scipy,mikebenfield/scipy,mortonjt/scipy,matthew-brett/scipy,person142/scipy,WarrenWeckesser/scipy,cpaulik/scipy,arokem/scipy,josephcslater/scipy,vanpact/scipy,Newman101/scipy,behzadnouri/scipy,aarchiba/scipy,Newman101/scipy,Gillu13/scipy,rmcgibbo/scipy,ChanderG/scipy,Dapid/scipy,haudren/scipy,mdhaber/scipy,trankmichael/scipy,Kamp9/scipy,mdhaber/scipy,cpaulik/scipy,raoulbq/scipy,zerothi/scipy,ChanderG/scipy,lukauskas/scipy,mikebenfield/scipy,scipy/scipy,grlee77/scipy,dch312/scipy,niknow/scipy,ilayn/scipy,witcxc/scipy,vhaasteren/scipy,mikebenfield/scipy,vberaudi/scipy,giorgiop/scipy,matthewalbani/scipy,surhudm/scipy,nvoron23/scipy,zxsted/scipy,e-q/scipy,jakevdp/scipy,sriki18/scipy,grlee77/scipy,kalvdans/scipy,mingwpy/scipy,pizzathief/scipy,ortylp/scipy,sriki18/scipy,nonhermitian/scipy,minhlongdo/scipy,jjhelmus/scipy,Eric89GXL/scipy,mortada/scipy,surhudm/scipy,ilayn/scipy,mdhaber/scipy,ndchorley/scipy,tylerjereddy/scipy,haudren/scipy,kleskjr/scipy,vhaasteren/scipy,ales-erjavec/scipy,argriffing/scipy,maciejkula/scipy,Gillu13/scipy,anntzer/scipy,perimosocordiae/scipy,anielsen001/scipy,ales-erjavec/scipy,lhilt/scipy,piyush0609/scipy,zxsted/scipy,sonnyhu/scipy,zxsted/scipy,futurulus/scipy,jjhelmus/scipy,mhogg/scipy,person142/scipy,kleskjr/scipy,ChanderG/scipy,trankmichael/scipy,nonhermitian/scipy,mikebenfield/scipy,pizzathief/scipy,behzadnouri/scipy,lhilt/scipy,ChanderG/scipy,Newman101/scipy,efiring/scipy,zaxliu/scipy,ortylp/scipy,fredrikw/scipy,minhlongdo/scipy,lukauskas/scipy,kleskjr/scipy,pnedunuri/scipy,felipebetancur/scipy,FRidh/scipy,sonnyhu/scipy,rgommers/scipy,felipebetancur/scipy,gertingold/scipy,mhogg/scipy,piyush0609/scipy,sauliusl/scipy,scipy/scipy,mortonjt/scipy,piyush0609/scipy,mhogg/scipy,vhaasteren/scipy,raoulbq/scipy,maciejkula/scipy,niknow/scipy,argriffing/scipy,petebachant/scipy,pizzathief/scipy,larsmans/scipy,nonhermitian/scipy,vanpact/scipy,woodscn/scipy,vanpact/scipy,mortonjt/scipy,ortylp/scipy,scipy/scipy,witcxc/scipy,dch312/scipy,lhilt/scipy,dominicelse/scipy,mtrbean/scipy,fernand/scipy,vigna/scipy,gfyoung/scipy,ChanderG/scipy,FRidh/scipy,chatcannon/scipy,aman-iitj/scipy,jor-/scipy,trankmichael/scipy,arokem/scipy,gdooper/scipy,gdooper/scipy,gef756/scipy,vhaasteren/scipy,tylerjereddy/scipy,anntzer/scipy,richardotis/scipy,nvoron23/scipy,nmayorov/scipy,sauliusl/scipy,fredrikw/scipy,ilayn/scipy,minhlongdo/scipy,gef756/scipy,newemailjdm/scipy,jor-/scipy,Dapid/scipy,scipy/scipy,lukauskas/scipy,mgaitan/scipy,endolith/scipy,zaxliu/scipy,nvoron23/scipy,jjhelmus/scipy,josephcslater/scipy,behzadnouri/scipy,ChanderG/scipy,Kamp9/scipy,ndchorley/scipy,aeklant/scipy,rgommers/scipy,woodscn/scipy,aarchiba/scipy,Stefan-Endres/scipy,maniteja123/scipy,haudren/scipy,mtrbean/scipy,anielsen001/scipy,WarrenWeckesser/scipy,Eric89GXL/scipy,aman-iitj/scipy,raoulbq/scipy,Shaswat27/scipy,pbrod/scipy,FRidh/scipy,argriffing/scipy,perimosocordiae/scipy,Srisai85/scipy,grlee77/scipy,larsmans/scipy,arokem/scipy,petebachant/scipy,larsmans/scipy,jamestwebber/scipy,gfyoung/scipy,bkendzior/scipy,rmcgibbo/scipy,matthew-brett/scipy,jonycgn/scipy,andim/scipy,nmayorov/scipy,apbard/scipy,WillieMaddox/scipy,dominicelse/scipy,sonnyhu/scipy,Newman101/scipy,rmcgibbo/scipy,andyfaff/scipy,apbard/scipy,mgaitan/scipy,zaxliu/scipy,grlee77/scipy,jamestwebber/scipy,mortonjt/scipy,endolith/scipy,pnedunuri/scipy,vberaudi/scipy,pyramania/scipy,fernand/scipy,pizzathief/scipy,befelix/scipy
|
scipy/linalg/_procrustes.py
|
scipy/linalg/_procrustes.py
|
"""
Solve the orthogonal Procrustes problem.
"""
from __future__ import division, print_function, absolute_import
import numpy as np
from .decomp_svd import svd
__all__ = ['orthogonal_procrustes']
def orthogonal_procrustes(A, B, compute_scale=False, check_finite=True):
"""
Compute the matrix solution of the orthogonal Procrustes problem.
Given matrices A and B of equal shape, find an orthogonal matrix R
that most closely maps A to B [1]_.
Note that unlike higher level Procrustes analyses of spatial data,
this function only uses orthogonal transformations like rotations
and reflections, and it does not use scaling or translation.
Parameters
----------
A : (M, N) array_like
Matrix to be mapped.
B : (M, N) array_like
Target matrix.
compute_scale : bool, optional
True to return the sum of singular values of an intermediate matrix.
check_finite : bool, optional
Whether to check that the input matrices contain only finite numbers.
Disabling may give a performance gain, but may result in problems
(crashes, non-termination) if the inputs do contain infinities or NaNs.
Returns
-------
R : (N, N) ndarray
The matrix solution of the orthogonal Procrustes problem.
Minimizes the Frobenius norm of dot(A, R) - B, subject to
dot(R.T, R) == I.
scale : float, optional
The sum of singular values of an intermediate matrix.
This value is not returned unless specifically requested.
Raises
------
ValueError
If the input arrays are incompatibly shaped.
This may also be raised if matrix A or B contains an inf or nan
and check_finite is True, or if the matrix product AB contains
an inf or nan.
References
----------
.. [1] Peter H. Schonemann, "A generalized solution of the orthogonal
Procrustes problem", Psychometrica -- Vol. 31, No. 1, March, 1996.
"""
if check_finite:
A = np.asarray_chkfinite(A)
B = np.asarray_chkfinite(B)
else:
A = np.asanyarray(A)
B = np.asanyarray(B)
if A.ndim != 2:
raise ValueError('expected ndim to be 2, but observed %s' % A.ndim)
if A.shape != B.shape:
raise ValueError('the shapes of A and B differ (%s vs %s)' % (
A.shape, B.shape))
# Be clever with transposes, with the intention to save memory.
u, w, vt = svd(B.T.dot(A).T)
R = u.dot(vt)
# Always return R, and maybe return a scaling factor.
if compute_scale:
scale = w.sum()
return R, scale
else:
return R
|
"""
Solve the orthogonal Procrustes problem.
"""
from __future__ import division, print_function, absolute_import
import numpy as np
from .decomp_svd import svd
__all__ = ['orthogonal_procrustes']
def orthogonal_procrustes(A, B, compute_scale=False, check_finite=True):
"""
Compute the matrix solution of the orthogonal Procrustes problem.
Given matrices A and B of equal shape, find an orthogonal matrix R
that most closely maps A to B [1]_.
Note that unlike higher level Procrustes analyses of spatial data,
this function only uses orthogonal transformations like rotations
and reflections, and it does not use scaling or translation.
Parameters
----------
A : (M, N) array_like
Matrix to be mapped.
B : (M, N) array_like
Target matrix.
compute_scale : bool, optional
True to return the sum of singular values of an intermediate matrix.
check_finite : bool, optional
Whether to check that the input matrices contain only finite numbers.
Disabling may give a performance gain, but may result in problems
(crashes, non-termination) if the inputs do contain infinities or NaNs.
Returns
-------
R : (N, N) ndarray
The matrix solution of the orthogonal Procrustes problem.
Minimizes the Frobenius norm of dot(A, R) - B, subject to
dot(R.T, R) == I.
scale : float, optional
The sum of singular values of an intermediate matrix.
This value is not returned unless specifically requested.
References
----------
.. [1] Peter H. Schonemann, "A generalized solution of the orthogonal
Procrustes problem", Psychometrica -- Vol. 31, No. 1, March, 1996.
"""
if check_finite:
A = np.asarray_chkfinite(A)
B = np.asarray_chkfinite(B)
else:
A = np.asanyarray(A)
B = np.asanyarray(B)
if A.ndim != 2:
raise ValueError('expected ndim to be 2, but observed %s' % A.ndim)
if A.shape != B.shape:
raise ValueError('the shapes of A and B differ (%s vs %s)' % (
A.shape, B.shape))
# Be clever with transposes, with the intention to save memory.
u, w, vt = svd(B.T.dot(A).T)
R = u.dot(vt)
# Always return R, and maybe return a scaling factor.
if compute_scale:
scale = w.sum()
return R, scale
else:
return R
|
bsd-3-clause
|
Python
|
d8b4dbfed17be90846ea4bc47b5f7b39ad944c24
|
Remove raw SQL from oscar_calculate_scores
|
lijoantony/django-oscar,bnprk/django-oscar,bnprk/django-oscar,binarydud/django-oscar,saadatqadri/django-oscar,amirrpp/django-oscar,Jannes123/django-oscar,amirrpp/django-oscar,MatthewWilkes/django-oscar,MatthewWilkes/django-oscar,django-oscar/django-oscar,jmt4/django-oscar,faratro/django-oscar,Jannes123/django-oscar,sonofatailor/django-oscar,sonofatailor/django-oscar,adamend/django-oscar,manevant/django-oscar,ademuk/django-oscar,bnprk/django-oscar,ahmetdaglarbas/e-commerce,nfletton/django-oscar,jmt4/django-oscar,thechampanurag/django-oscar,michaelkuty/django-oscar,ahmetdaglarbas/e-commerce,rocopartners/django-oscar,ka7eh/django-oscar,anentropic/django-oscar,dongguangming/django-oscar,lijoantony/django-oscar,django-oscar/django-oscar,django-oscar/django-oscar,pdonadeo/django-oscar,pasqualguerrero/django-oscar,sonofatailor/django-oscar,thechampanurag/django-oscar,kapari/django-oscar,taedori81/django-oscar,pasqualguerrero/django-oscar,jinnykoo/christmas,taedori81/django-oscar,nfletton/django-oscar,saadatqadri/django-oscar,sonofatailor/django-oscar,kapt/django-oscar,binarydud/django-oscar,jinnykoo/wuyisj.com,thechampanurag/django-oscar,jinnykoo/wuyisj,jlmadurga/django-oscar,pasqualguerrero/django-oscar,Jannes123/django-oscar,kapari/django-oscar,QLGu/django-oscar,nfletton/django-oscar,okfish/django-oscar,sasha0/django-oscar,jinnykoo/wuyisj.com,bnprk/django-oscar,machtfit/django-oscar,jmt4/django-oscar,bschuon/django-oscar,okfish/django-oscar,eddiep1101/django-oscar,michaelkuty/django-oscar,sasha0/django-oscar,manevant/django-oscar,kapt/django-oscar,michaelkuty/django-oscar,nickpack/django-oscar,jinnykoo/wuyisj.com,adamend/django-oscar,amirrpp/django-oscar,lijoantony/django-oscar,rocopartners/django-oscar,ahmetdaglarbas/e-commerce,pasqualguerrero/django-oscar,eddiep1101/django-oscar,jmt4/django-oscar,nfletton/django-oscar,Bogh/django-oscar,jlmadurga/django-oscar,bschuon/django-oscar,okfish/django-oscar,saadatqadri/django-oscar,Bogh/django-oscar,pdonadeo/django-oscar,lijoantony/django-oscar,marcoantoniooliveira/labweb,vovanbo/django-oscar,amirrpp/django-oscar,WadeYuChen/django-oscar,Jannes123/django-oscar,QLGu/django-oscar,solarissmoke/django-oscar,john-parton/django-oscar,monikasulik/django-oscar,kapt/django-oscar,rocopartners/django-oscar,mexeniz/django-oscar,ka7eh/django-oscar,jinnykoo/wuyisj,vovanbo/django-oscar,nickpack/django-oscar,marcoantoniooliveira/labweb,sasha0/django-oscar,anentropic/django-oscar,kapari/django-oscar,mexeniz/django-oscar,MatthewWilkes/django-oscar,nickpack/django-oscar,michaelkuty/django-oscar,ademuk/django-oscar,josesanch/django-oscar,dongguangming/django-oscar,taedori81/django-oscar,jinnykoo/wuyisj,nickpack/django-oscar,mexeniz/django-oscar,QLGu/django-oscar,bschuon/django-oscar,spartonia/django-oscar,jinnykoo/wuyisj.com,jlmadurga/django-oscar,solarissmoke/django-oscar,manevant/django-oscar,itbabu/django-oscar,django-oscar/django-oscar,josesanch/django-oscar,Bogh/django-oscar,taedori81/django-oscar,bschuon/django-oscar,john-parton/django-oscar,pdonadeo/django-oscar,dongguangming/django-oscar,WillisXChen/django-oscar,solarissmoke/django-oscar,monikasulik/django-oscar,thechampanurag/django-oscar,josesanch/django-oscar,QLGu/django-oscar,kapari/django-oscar,itbabu/django-oscar,ademuk/django-oscar,adamend/django-oscar,monikasulik/django-oscar,ka7eh/django-oscar,MatthewWilkes/django-oscar,itbabu/django-oscar,faratro/django-oscar,adamend/django-oscar,pdonadeo/django-oscar,sasha0/django-oscar,binarydud/django-oscar,mexeniz/django-oscar,machtfit/django-oscar,saadatqadri/django-oscar,Bogh/django-oscar,anentropic/django-oscar,john-parton/django-oscar,manevant/django-oscar,WillisXChen/django-oscar,jinnykoo/christmas,jinnykoo/wuyisj,spartonia/django-oscar,jinnykoo/christmas,ka7eh/django-oscar,spartonia/django-oscar,dongguangming/django-oscar,vovanbo/django-oscar,faratro/django-oscar,rocopartners/django-oscar,jlmadurga/django-oscar,okfish/django-oscar,marcoantoniooliveira/labweb,WillisXChen/django-oscar,vovanbo/django-oscar,monikasulik/django-oscar,WadeYuChen/django-oscar,spartonia/django-oscar,WillisXChen/django-oscar,eddiep1101/django-oscar,machtfit/django-oscar,binarydud/django-oscar,WillisXChen/django-oscar,WadeYuChen/django-oscar,ademuk/django-oscar,itbabu/django-oscar,marcoantoniooliveira/labweb,solarissmoke/django-oscar,anentropic/django-oscar,faratro/django-oscar,WadeYuChen/django-oscar,eddiep1101/django-oscar,ahmetdaglarbas/e-commerce,WillisXChen/django-oscar,john-parton/django-oscar
|
oscar/apps/analytics/scores.py
|
oscar/apps/analytics/scores.py
|
from django.db.models import F
from oscar.core.loading import get_model
ProductRecord = get_model('analytics', 'ProductRecord')
Product = get_model('catalogue', 'Product')
class Calculator(object):
# Map of field name to weight
weights = {
'num_views': 1,
'num_basket_additions': 3,
'num_purchases': 5
}
def __init__(self, logger):
self.logger = logger
def run(self):
self.calculate_scores()
self.update_product_models()
def calculate_scores(self):
self.logger.info("Calculating product scores")
total_weight = float(sum(self.weights.values()))
weighted_fields = [
self.weights[name] * F(name) for name in self.weights.keys()]
ProductRecord.objects.update(
score=sum(weighted_fields)/total_weight)
def update_product_models(self):
self.logger.info("Updating product records")
records = ProductRecord.objects.select_related('product')
for record in records:
record.product.score = record.score
record.product.save()
self.logger.info("Updated scores for %d products" % len(records))
|
from django.db import connection, transaction
from oscar.core.loading import get_model
ProductRecord = get_model('analytics', 'ProductRecord')
Product = get_model('catalogue', 'Product')
class Calculator(object):
# Map of field name to weight
weights = {'num_views': 1,
'num_basket_additions': 3,
'num_purchases': 5}
def __init__(self, logger):
self.logger = logger
self.cursor = connection.cursor()
def run(self):
self.calculate_scores()
self.update_product_models()
def calculate_scores(self):
self.logger.info("Calculating product scores")
# Build the "SET ..." part of the SQL statement
weighted_sum = " + ".join(
['%s*"%s"' % (weight, field) for field, weight
in self.weights.items()])
ctx = {'table': ProductRecord._meta.db_table,
'weighted_total': weighted_sum,
'total_weight': sum(self.weights.values())}
sql = '''UPDATE "%(table)s"
SET score = %(weighted_total)s / %(total_weight)s''' % ctx
self.logger.debug(sql)
self.cursor.execute(sql)
transaction.commit_unless_managed()
def update_product_models(self):
self.logger.info("Updating product records")
qs = ProductRecord.objects.all()
for record in qs:
record.product.score = record.score
record.product.save()
self.logger.info("Updated scores for %d products" % qs.count())
|
bsd-3-clause
|
Python
|
52b1448c6eb91ca9d59c7f2be5fa60f93262d52b
|
Bump version to 2.2.0
|
r1chardj0n3s/pip-check-reqs
|
pip_check_reqs/__init__.py
|
pip_check_reqs/__init__.py
|
__version__ = '2.2.0'
|
__version__ = '2.1.1'
|
mit
|
Python
|
695ea0a9b9768c713e03f9d3ec3ca4a702de0347
|
Update repeaterBotScript.py
|
Busterz/Telegram-Echo-Bot
|
script/repeaterBotScript.py
|
script/repeaterBotScript.py
|
# For the Repeater Bot
import telepot
# your bot key
bot = telepot.Bot("[YOUR_TOKEN]")
def handle(msg):
chat_id = msg['chat']['id']
chat_msg = msg['text']
bot.sendMessage(chat_id, chat_msg)
bot.message_loop(handle)
|
# For the Repeater Bot
import telepot
# your bot key
bot = telepot.Bot("228412441:AAH0hKP-WOlcFGsZRaSCETVKIFBZf7C4gXc")
def handle(msg):
chat_id = msg['chat']['id']
chat_msg = msg['text']
bot.sendMessage(chat_id, chat_msg)
bot.message_loop(handle)
|
mit
|
Python
|
2033166b57ec566c763a9ff70df9a4123243160d
|
Bump version to 1.17.4
|
platformio/platformio-api
|
platformio_api/__init__.py
|
platformio_api/__init__.py
|
# Copyright 2014-present Ivan Kravets <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import logging.config
import os
from time import tzset
VERSION = (1, 17, 4)
__version__ = ".".join([str(s) for s in VERSION])
__title__ = "platformio-api"
__description__ = ("An API for PlatformIO")
__url__ = "https://github.com/ivankravets/platformio-api"
__author__ = "Ivan Kravets"
__email__ = "[email protected]"
__license__ = "MIT License"
__copyright__ = "Copyright (C) 2014-2015 Ivan Kravets"
config = dict(
SQLALCHEMY_DATABASE_URI=None,
GITHUB_LOGIN=None,
GITHUB_PASSWORD=None,
DL_PIO_DIR=None,
DL_PIO_URL=None,
MAX_DLFILE_SIZE=1024 * 1024 * 150, # 150 Mb
# Fuzzy search will not be applied to words shorter than the value below
SOLR_FUZZY_MIN_WORD_LENGTH=3,
LOGGING=dict(version=1)
)
assert "PIOAPI_CONFIG_PATH" in os.environ
with open(os.environ.get("PIOAPI_CONFIG_PATH")) as f:
config.update(json.load(f))
# configure logging for packages
logging.basicConfig()
logging.config.dictConfig(config['LOGGING'])
# setup time zone to UTC globally
os.environ['TZ'] = "+00:00"
tzset()
|
# Copyright 2014-present Ivan Kravets <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import logging.config
import os
from time import tzset
VERSION = (1, 17, 3)
__version__ = ".".join([str(s) for s in VERSION])
__title__ = "platformio-api"
__description__ = ("An API for PlatformIO")
__url__ = "https://github.com/ivankravets/platformio-api"
__author__ = "Ivan Kravets"
__email__ = "[email protected]"
__license__ = "MIT License"
__copyright__ = "Copyright (C) 2014-2015 Ivan Kravets"
config = dict(
SQLALCHEMY_DATABASE_URI=None,
GITHUB_LOGIN=None,
GITHUB_PASSWORD=None,
DL_PIO_DIR=None,
DL_PIO_URL=None,
MAX_DLFILE_SIZE=1024 * 1024 * 150, # 150 Mb
# Fuzzy search will not be applied to words shorter than the value below
SOLR_FUZZY_MIN_WORD_LENGTH=3,
LOGGING=dict(version=1)
)
assert "PIOAPI_CONFIG_PATH" in os.environ
with open(os.environ.get("PIOAPI_CONFIG_PATH")) as f:
config.update(json.load(f))
# configure logging for packages
logging.basicConfig()
logging.config.dictConfig(config['LOGGING'])
# setup time zone to UTC globally
os.environ['TZ'] = "+00:00"
tzset()
|
apache-2.0
|
Python
|
1ce9101f4ac12e400e0ba09a2221c16f3b02f0ab
|
Add iter_plugins_dirs() test
|
rossant/podoc,podoc/podoc,podoc/podoc,rossant/podoc
|
podoc/tests/test_plugin.py
|
podoc/tests/test_plugin.py
|
# -*- coding: utf-8 -*-
"""Test plugin system."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
import os.path as op
from ..core import save_text
from ..plugin import (IPluginRegistry, IPlugin, discover_plugins,
iter_plugins_dirs)
#------------------------------------------------------------------------------
# Fixtures
#------------------------------------------------------------------------------
def setup():
IPluginRegistry.plugins = []
def teardown():
IPluginRegistry.plugins = []
#------------------------------------------------------------------------------
# Tests
#------------------------------------------------------------------------------
def test_plugin_registration():
class MyPlugin(IPlugin):
pass
assert IPluginRegistry.plugins == [MyPlugin]
def test_discover_plugins(tempdir):
path = op.join(tempdir, 'my_plugin.py')
contents = '''from podoc import IPlugin\nclass MyPlugin(IPlugin): pass'''
save_text(path, contents)
plugins = discover_plugins([tempdir])
assert plugins
assert plugins[0].__name__ == 'MyPlugin'
def test_iter_plugins_dirs():
assert 'json' in [op.basename(plugin_dir)
for plugin_dir in iter_plugins_dirs()]
|
# -*- coding: utf-8 -*-
"""Test plugin system."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
import os.path as op
from ..core import save_text
from ..plugin import IPluginRegistry, IPlugin, discover_plugins
#------------------------------------------------------------------------------
# Fixtures
#------------------------------------------------------------------------------
def setup():
IPluginRegistry.plugins = []
def teardown():
IPluginRegistry.plugins = []
#------------------------------------------------------------------------------
# Tests
#------------------------------------------------------------------------------
def test_plugin_registration():
class MyPlugin(IPlugin):
pass
assert IPluginRegistry.plugins == [MyPlugin]
def test_discover_plugins(tempdir):
path = op.join(tempdir, 'my_plugin.py')
contents = '''from podoc import IPlugin\nclass MyPlugin(IPlugin): pass'''
save_text(path, contents)
plugins = discover_plugins([tempdir])
assert plugins
assert plugins[0].__name__ == 'MyPlugin'
|
bsd-3-clause
|
Python
|
7fa19f13df92a3ddea6f69519539ead6cdeab3af
|
implement list --version option
|
openstack/reno
|
reno/lister.py
|
reno/lister.py
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import print_function
from reno import scanner
from reno import utils
def list_cmd(args):
"List notes files based on query arguments"
reporoot = args.reporoot.rstrip('/') + '/'
notesdir = utils.get_notes_dir(args)
notes = scanner.get_notes_by_version(reporoot, notesdir)
if args.version:
versions = args.version
else:
versions = notes.keys()
for version in versions:
notefiles = notes[version]
print(version)
for n in notefiles:
if n.startswith(reporoot):
n = n[len(reporoot):]
print('\t%s' % n)
return
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import print_function
from reno import scanner
from reno import utils
def list_cmd(args):
"List notes files based on query arguments"
reporoot = args.reporoot.rstrip('/') + '/'
notesdir = utils.get_notes_dir(args)
notes = scanner.get_notes_by_version(reporoot, notesdir)
for version, notefiles in notes.items():
print(version)
for n in notefiles:
if n.startswith(reporoot):
n = n[len(reporoot):]
print('\t%s' % n)
return
|
apache-2.0
|
Python
|
98edde06a481cc18727a8c030bc670f7ab43b73f
|
increase timeout
|
efiop/dvc,dmpetrov/dataversioncontrol,dmpetrov/dataversioncontrol,efiop/dvc
|
scripts/pyinstaller/sign.py
|
scripts/pyinstaller/sign.py
|
import argparse
import os
import pathlib
import sys
from subprocess import STDOUT, check_call
if sys.platform != "darwin":
raise NotImplementedError
parser = argparse.ArgumentParser()
parser.add_argument(
"--application-id",
required=True,
help="Certificate ID (should be added to the keychain).",
)
args = parser.parse_args()
path = pathlib.Path(__file__).parent.absolute()
dvc = path / "dist" / "dvc"
for root, _, fnames in os.walk(dvc):
for fname in fnames:
fpath = os.path.join(root, fname)
print(f"signing {fpath}")
check_call(
[
"codesign",
"--force",
"--verbose",
"-s",
args.application_id,
"-o",
"runtime",
"--entitlements",
"entitlements.plist",
fpath,
],
stderr=STDOUT,
timeout=10,
)
|
import argparse
import os
import pathlib
import sys
from subprocess import STDOUT, check_call
if sys.platform != "darwin":
raise NotImplementedError
parser = argparse.ArgumentParser()
parser.add_argument(
"--application-id",
required=True,
help="Certificate ID (should be added to the keychain).",
)
args = parser.parse_args()
path = pathlib.Path(__file__).parent.absolute()
dvc = path / "dist" / "dvc"
for root, _, fnames in os.walk(dvc):
for fname in fnames:
fpath = os.path.join(root, fname)
print(f"signing {fpath}")
check_call(
[
"codesign",
"--force",
"--verbose",
"-s",
args.application_id,
"-o",
"runtime",
"--entitlements",
"entitlements.plist",
fpath,
],
stderr=STDOUT,
timeout=5,
)
|
apache-2.0
|
Python
|
8284279a5e92679976637bc9f966f4b776636e82
|
Use copies of net to avoid cross-talk between tests.
|
GutenkunstLab/SloppyCell,GutenkunstLab/SloppyCell
|
Testing/test_InitialConditions.py
|
Testing/test_InitialConditions.py
|
import copy
import unittest
import scipy
from TestNetwork import net
net = copy.deepcopy(net)
net.compile()
net.setInitialVariableValue('A', 1.0)
net.setInitialVariableValue('B', 2.0)
class test_ics(unittest.TestCase):
def test_default_initial_conditions(self):
"""Test that default ICs are handled correctly"""
test_net = net.copy()
traj = test_net.integrate(scipy.linspace(0, 5, 5))
ICx = traj.getVariableTrajectory('x')[0]
ICy = traj.getVariableTrajectory('y')[0]
self.assertAlmostEqual(ICx, 1.0, 6, 'Failed on default IC')
self.assertAlmostEqual(ICy, 2.0, 6, 'Failed on default IC')
def test_resetting_initial_conditions(self):
"""Test resetting of ICs"""
test_net = net.copy()
test_net.set_initial_var_value('x', 0.5)
traj = test_net.integrate(scipy.linspace(0, 5, 5))
ICx = traj.getVariableTrajectory('x')[0]
self.assertAlmostEqual(ICx, 0.5, 6, 'Failed on resetting IC')
def test_parameter_ics(self):
"""Test parameters as ICs"""
test_net = net.copy()
test_net.set_initial_var_value('x', 'A')
traj = test_net.integrate(scipy.linspace(0, 5, 5))
ICx = traj.getVariableTrajectory('x')[0]
self.assertAlmostEqual(ICx, 1.0, 6, 'Failed on parameter IC')
def test_resetting_parameter(self):
"""Test changing parameters as ICs"""
test_net = net.copy()
test_net.set_initial_var_value('x', 'A')
test_net.set_initial_var_value('A', 0.9)
traj = test_net.integrate(scipy.linspace(0, 5, 5))
ICx = traj.getVariableTrajectory('x')[0]
self.assertAlmostEqual(ICx, 0.9, 6, 'Failed on changing parameter IC')
def test_expression_ICs(self):
"""Test math expression as IC"""
test_net = net.copy()
test_net.set_initial_var_value('x', 'A + 1.5*B')
traj = test_net.integrate(scipy.linspace(0, 5, 5))
ICx = traj.getVariableTrajectory('x')[0]
self.assertAlmostEqual(ICx, 4.0, 6, 'Failed on changing parameter IC')
suite = unittest.makeSuite(test_ics)
if __name__ == '__main__':
unittest.main()
|
import copy
import unittest
import scipy
from TestNetwork import net
net = copy.deepcopy(net)
net.compile()
net.setInitialVariableValue('A', 1.0)
net.setInitialVariableValue('B', 2.0)
class test_ics(unittest.TestCase):
def test_default_initial_conditions(self):
"""Test that default ICs are handled correctly"""
traj = net.integrate(scipy.linspace(0, 5, 5))
ICx = traj.getVariableTrajectory('x')[0]
ICy = traj.getVariableTrajectory('y')[0]
self.assertAlmostEqual(ICx, 1.0, 6, 'Failed on default IC')
self.assertAlmostEqual(ICy, 2.0, 6, 'Failed on default IC')
def test_resetting_initial_conditions(self):
"""Test resetting of ICs"""
net.set_initial_var_value('x', 0.5)
traj = net.integrate(scipy.linspace(0, 5, 5))
ICx = traj.getVariableTrajectory('x')[0]
self.assertAlmostEqual(ICx, 0.5, 6, 'Failed on resetting IC')
def test_parameter_ics(self):
"""Test parameters as ICs"""
net.set_initial_var_value('x', 'A')
traj = net.integrate(scipy.linspace(0, 5, 5))
ICx = traj.getVariableTrajectory('x')[0]
self.assertAlmostEqual(ICx, 1.0, 6, 'Failed on parameter IC')
def test_resetting_parameter(self):
"""Test changing parameters as ICs"""
net.set_initial_var_value('x', 'A')
net.set_initial_var_value('A', 0.9)
traj = net.integrate(scipy.linspace(0, 5, 5))
ICx = traj.getVariableTrajectory('x')[0]
self.assertAlmostEqual(ICx, 0.9, 6, 'Failed on changing parameter IC')
def test_expression_ICs(self):
"""Test math expression as IC"""
net.set_initial_var_value('x', 'A + 1.5*B')
traj = net.integrate(scipy.linspace(0, 5, 5))
ICx = traj.getVariableTrajectory('x')[0]
self.assertAlmostEqual(ICx, 4.0, 6, 'Failed on changing parameter IC')
suite = unittest.makeSuite(test_ics)
if __name__ == '__main__':
unittest.main()
|
bsd-3-clause
|
Python
|
35b9de04035e8e2d19c7d891ce758e95a5bc4679
|
Split bord.py into logical functions.
|
rpisharody/bord,rpisharody/bord
|
bord.py
|
bord.py
|
#!/usr/bin/python3
"""
bord - The Board for your website
bord is a static site generator written in Python 3.
There are many static site generators like bord.
This one is mine.
Written by Rahul Pisharody.
MIT Licence
"""
import argparse
import os
import re
import markdown
from jinja2 import FileSystemLoader, Environment
import configreader
md = markdown.Markdown()
def get_cmdline_arguments():
"""
Function wrapper to parse the input
arguments provided while invoking bord
"""
parser = argparse.ArgumentParser(
description='The Python3 Static Site Generator'
)
parser.add_argument(
"-c", "--config",
type=str,
default="~/.bord.rc",
help="Specify the config file to use"
)
return parser.parse_args()
def markdown_to_html(directory):
"""
Converts all markdown files in 'directory'
into plain HTML format.
:param directory Input Content Directory
Outputs a dictionary, key = filename, value = html content
"""
html_dict = {}
for inputFile in os.listdir(directory):
post = os.path.join(directory, inputFile)
try:
f = open(post, 'r', encoding='utf-8')
html = md.convert(f.read())
html = render_template(html)
html_dict[inputFile] = html
md.reset()
except IOError as err:
print('Error while opening', post)
print('[', err.errno, ']', err.filename, ':', err.strerror)
return html_dict
def render_template(html):
env = Environment(loader=FileSystemLoader("templates"))
template = env.get_template("template.html")
return template.render(content=html)
def generate_output(html, output_dir):
count = 0
try:
os.makedirs(output_dir)
except OSError as err:
print ('Error while creating directory', output_dir)
print ('[', err.errno, ']', err.filename, ':', err.strerror)
for inputFile in html:
outputFile = re.sub('\.md$', '.html', inputFile)
outputFile = os.path.join(output_dir, outputFile)
try:
f = open(outputFile, 'w', encoding='utf-8')
f.write(html[inputFile])
f.close()
count = count + 1
except IOError as err:
print (err.strerror)
return count
def main():
"""
The main() function of bord.
Reads/Sets up parameters and calls the
generate/render methods
"""
args = get_cmdline_arguments()
bord_config = configreader.ConfigReader(args.config)
html = markdown_to_html(bord_config['content_dir'])
count = generate_output(html, bord_config['output_dir'])
print ('Created', count, 'HTML files')
if __name__ == '__main__':
main()
|
#!/usr/bin/python3
"""
bord - The Board for your website
bord is a static site generator written in Python 3.
There are many static site generators like bord.
This one is mine.
Written by Rahul Pisharody.
MIT Licence
"""
import os
import re
import markdown
from jinja2 import FileSystemLoader, Environment
CWD = os.getcwd()
OUTPUT_DIR = 'output'
CONTENT_DIR = 'content'
md = markdown.Markdown()
CONTENT_DIR = os.path.join(CWD, CONTENT_DIR)
# site = [1, 2, 3, 4, 5]
try:
os.makedirs(os.path.join(CWD, OUTPUT_DIR))
except OSError as err:
print ('Error while creating directory', OUTPUT_DIR)
print ('[', err.errno, ']', err.filename, ':', err.strerror)
for inputFile in os.listdir(CONTENT_DIR):
inputFile = os.path.join(CONTENT_DIR, inputFile)
outputFile = re.sub('\.md$', '.html', os.path.basename(inputFile))
outputFile = os.path.join(CWD, OUTPUT_DIR, outputFile)
try:
f = open(inputFile, 'r', encoding='utf-8')
except IOError as e:
print ('Error while opening', inputFile)
print ('[', err.errno, ']', err.filename, ':', err.strerror)
html = md.convert(f.read())
env = Environment(loader=FileSystemLoader("templates"))
template = env.get_template("template.html")
html = template.render(
content=html)
f.close()
try:
f = open(outputFile, 'w', encoding='utf-8')
except IOError as r:
print (e.strerror)
f.write(html)
f.close()
md.reset()
|
mit
|
Python
|
aac855cb0339f87e1046811b9097858201cfe841
|
isolate helper functions
|
rigetticomputing/grove,rigetticomputing/grove
|
grove/alpha/jordan_gradient/gradient_helper.py
|
grove/alpha/jordan_gradient/gradient_helper.py
|
import numpy as np
def real_to_binary(number, precision=16):
""" Convert real decimal to precision-bit binary fraction
:param float number: Real decimal over [0, 1).
:param int precision: Number of bits of binary precision.
:return float bf: Binary fraction representation of real decimal.
"""
n_sign = np.sign(number)
number = abs(number)
bf = ''
for val in range(precision):
number = 2 * (number % 1)
bf += str(int(number))
bf = n_sign * float('.' + bf)
return bf
def binary_to_real(number):
""" Convert binary fraction to real decimal
:param float number: Floating point representation of binary fraction.
:return float deci: Real decimal representation of binary fraction.
"""
if isinstance(number, str):
if number[0] == '-':
n_sign = -1
else:
n_sign = 1
elif isinstance(number, float):
n_sign = np.sign(number)
deci = 0
for ndx, val in enumerate(str(number).split('.')[-1]):
deci += float(val) / 2**(ndx+1)
deci *= n_sign
return deci
def stats_to_bf(stats):
""" Convert measurement into gradient binary fraction
:param np.array stats: Output measurement statistics of gradient program.
:return float bf: Binary fraction representation of gradient estimate.
"""
stats_str = [str(int(i)) for i in np.ceil(stats[::-1][1:])]
bf_str = '0.' + ''.join(stats_str)
bf = float(bf_str)
return bf
|
import numpy as np
from jordan_gradient import gradient_estimator
def real_to_binary(number, precision=16):
""" Convert real decimal to precision-bit binary fraction
:param float number: Real decimal over [0, 1).
:param int precision: Number of bits of binary precision.
:return float bf: Binary fraction representation of real decimal.
"""
n_sign = np.sign(number)
number = abs(number)
bf = ''
for val in range(precision):
number = 2 * (number % 1)
bf += str(int(number))
bf = n_sign * float('.' + bf)
return bf
def binary_to_real(number):
""" Convert binary fraction to real decimal
:param float number: Floating point representation of binary fraction.
:return float deci: Real decimal representation of binary fraction.
"""
if isinstance(number, str):
if number[0] == '-':
n_sign = -1
else:
n_sign = 1
elif isinstance(number, float):
n_sign = np.sign(number)
deci = 0
for ndx, val in enumerate(str(number).split('.')[-1]):
deci += float(val) / 2**(ndx+1)
deci *= n_sign
return deci
def stats_to_bf(stats):
""" Convert measurement into gradient binary fraction
:param np.array stats: Output measurement statistics of gradient program.
:return float bf: Binary fraction representation of gradient estimate.
"""
stats_str = [str(int(i)) for i in np.ceil(stats[::-1][1:])]
bf_str = '0.' + ''.join(stats_str)
bf = float(bf_str)
return bf
def gradient_error(f_h, precision=5, n_measurements=100):
""" Computes error of gradient estimates for an input perturbation value
:param np.array/float f_h: Value of f at perturbation h.
:param int n_measurements: Number of times to run the gradient program.
:return float error: Error of gradient estimate.
"""
if isinstance(f_h, float):
d = 1 # f_h = np.array(f_h)
else:
d = f_h.ndim
# enumerate qubit register
N_qi = d * precision
input_qubits = list(range(N_qi))
ancilla_qubits = [N_qi]
# build program and run n_measurements times
p_g = gradient_estimator(f_h, input_qubits, ancilla_qubits, precision)
from pyquil.api import SyncConnection
qvm = SyncConnection()
measurements = np.array(qvm.run(p_g, input_qubits, n_measurements))
# summarize measurements and compute error
stats = measurements.sum(axis=0) / len(measurements)
bf_estimate = stats_to_bf(stats)
deci_estimate = binary_to_real(bf_estimate)
error = f_h - deci_estimate
return error
|
apache-2.0
|
Python
|
4aa1b4d79ef7777f060a05c70ead7a27dc6ed6e9
|
Add plot_coef
|
termoshtt/cujak,termoshtt/cujak
|
fft2d/plot.py
|
fft2d/plot.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import fft2d
import numpy as np
import matplotlib.pyplot as plt
import argparse
def _load_size(prop):
Lx = prop.Lx if prop.Lx > 0 else 1.0
Ly = prop.Ly if prop.Ly > 0 else 1.0
return Lx, Ly
def plot_field(filename, figure_ext):
arr, prop = fft2d.load(filename)
Nx, Ny = arr.shape
Lx, Ly = _load_size(prop)
X, Y = np.meshgrid(np.linspace(0, Ly, Ny), np.linspace(0, Lx, Nx))
plt.pcolormesh(X, Y, arr)
plt.colorbar()
plt.axis("image")
plt.savefig(filename + figure_ext)
def plot_coef(filename, figure_ext):
arr, prop = fft2d.load(filename)
Nx, Ny = arr.shape
Lx, Ly = _load_size(prop)
X, Y = np.meshgrid(range(Ny), range(Nx))
plt.subplot(211)
plt.pcolormesh(X, Y, np.real(arr))
plt.axis("tight")
plt.colorbar()
plt.title("Real")
plt.subplot(212)
plt.pcolormesh(X, Y, np.imag(arr))
plt.axis("tight")
plt.colorbar()
plt.title("Imaginary")
plt.savefig(filename + figure_ext)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("filename")
parser.add_argument("-e", "--extenion", default=".png")
args = parser.parse_args()
ext = args.filename[-1]
figure_ext = args.extenion if args.extenion[0] == "." else "." + args.extenion
{
"f": plot_field,
"c": plot_coef,
}[ext](args.filename, figure_ext)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import fft2d
import numpy as np
import matplotlib.pyplot as plt
import argparse
def main(args):
arr, prop = fft2d.load(args.filename)
Nx, Ny = arr.shape
Lx = prop.Lx if prop.Lx > 0 else 1.0
Ly = prop.Ly if prop.Ly > 0 else 1.0
X, Y = np.meshgrid(np.linspace(0, Ly, Ny), np.linspace(0, Lx, Nx))
plt.pcolormesh(X, Y, arr)
plt.colorbar()
plt.axis("image")
ext = args.extenion if args.extenion[0] == "." else "." + args.extenion
plt.savefig(args.filename + ext)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("filename")
parser.add_argument("-e", "--extenion", default=".png")
args = parser.parse_args()
main(args)
|
bsd-2-clause
|
Python
|
7ee29cfee740d6096fca8379253073077890a54c
|
Add more info to the redis example.
|
seabirdzh/disco,discoproject/disco,oldmantaiter/disco,pooya/disco,oldmantaiter/disco,simudream/disco,pombredanne/disco,seabirdzh/disco,discoproject/disco,simudream/disco,discoproject/disco,ErikDubbelboer/disco,mozilla/disco,mozilla/disco,pombredanne/disco,ErikDubbelboer/disco,mwilliams3/disco,pooya/disco,mwilliams3/disco,pombredanne/disco,beni55/disco,pombredanne/disco,pooya/disco,mwilliams3/disco,pombredanne/disco,beni55/disco,seabirdzh/disco,ktkt2009/disco,ktkt2009/disco,discoproject/disco,seabirdzh/disco,seabirdzh/disco,mwilliams3/disco,beni55/disco,beni55/disco,simudream/disco,ktkt2009/disco,ErikDubbelboer/disco,ktkt2009/disco,mwilliams3/disco,simudream/disco,oldmantaiter/disco,mozilla/disco,ErikDubbelboer/disco,simudream/disco,mozilla/disco,beni55/disco,ErikDubbelboer/disco,pooya/disco,ktkt2009/disco,discoproject/disco,oldmantaiter/disco,oldmantaiter/disco
|
examples/util/wordcount_redis.py
|
examples/util/wordcount_redis.py
|
"""
Usage:
python wordcount_redis.py redis://redis_server:6379:0 redis://redis_server:6379:1
The input is read from db 0 and the output is written to db 1. The inputs
should be of the form (key, list_of_values) (they are read from the server with the
lrange command. See the redis documentation for more info).
The output will also be of the form (key, list_of_values). The reason we use
this approach is to unify the mechanism for the intermediate input-outputs
(which must be (key, list_of_values) with the inputs and outputs).
"""
from disco.schemes.scheme_redis import redis_output_stream
from disco.worker.task_io import task_output_stream
from disco.core import Job, result_iterator
class WordCount(Job):
reduce_output_stream = (task_output_stream, redis_output_stream)
@staticmethod
def map(line, params):
k, v = line
yield v, 1
@staticmethod
def reduce(iter, params):
from disco.util import kvgroup
for word, counts in kvgroup(sorted(iter)):
yield word, sum(counts)
if __name__ == '__main__':
import sys
if len(sys.argv) != 3:
print "Usage: python wordcount_redis.py <input redis> <output redis>"
sys.exit(1)
from wordcount_redis import WordCount
job = WordCount()
job.params = {}
job.params['url'] = sys.argv[2]
job.run(input=[sys.argv[1]])
job.wait(show=True)
|
from disco.schemes.scheme_redis import redis_output_stream
from disco.worker.task_io import task_output_stream
from disco.core import Job, result_iterator
class WordCount(Job):
reduce_output_stream = (task_output_stream, redis_output_stream)
@staticmethod
def map(line, params):
k, v = line
yield v, 1
@staticmethod
def reduce(iter, params):
from disco.util import kvgroup
for word, counts in kvgroup(sorted(iter)):
yield word, sum(counts)
if __name__ == '__main__':
import sys
if len(sys.argv) != 3:
print "Usage: python wordcount_redis.py <input redis> <output redis>"
sys.exit(1)
from wordcount_redis import WordCount
job = WordCount()
job.params = {}
job.params['url'] = sys.argv[2]
job.run(input=[sys.argv[1]])
job.wait(show=True)
|
bsd-3-clause
|
Python
|
a08c04151e88eb95ae05484adb8b3f64ef91bd87
|
Update example
|
gkc1000/pyscf,gkc1000/pyscf,sunqm/pyscf,sunqm/pyscf,gkc1000/pyscf,sunqm/pyscf,gkc1000/pyscf,gkc1000/pyscf,sunqm/pyscf
|
examples/tools/print_mo_and_dm.py
|
examples/tools/print_mo_and_dm.py
|
#!/usr/bin/env python
#
# Author: Qiming Sun <[email protected]>
#
import sys
import numpy
from pyscf import gto, scf, tools
'''
Formatted output for 2D array
'''
mol = gto.M(atom='H 0 0 0; F 0 0 1', basis='ccpvdz')
nf = mol.nao_nr()
orb = numpy.random.random((nf,4))
#
# Print orbital coefficients
#
tools.dump_mat.dump_mo(mol, orb)
#
# Print lower triangular part of an array
#
dm = numpy.eye(3)
tools.dump_mat.dump_tri(sys.stdout, dm)
#
# Print rectangular matrix
#
mol = gto.M(atom='C 0 0 0',basis='6-31g')
dm = numpy.eye(mol.nao_nr())
tools.dump_mat.dump_rec(sys.stdout, dm, label=mol.spheric_labels(True),
ncol=9, digits=2)
#
# Change the default output format of .analyze function.
#
mol = gto.M(atom='H 0 0 0; F 0 0 1', basis='ccpvdz')
mf = scf.RHF(mol).run()
mf.analyze(verbose=5, ncol=10, digits=9)
|
#!/usr/bin/env python
#
# Author: Qiming Sun <[email protected]>
#
import sys
import numpy
from pyscf import gto, tools
'''
Formatted output for 2D array
'''
mol = gto.M(atom='H 0 0 0; F 0 0 1', basis='ccpvdz')
nf = mol.nao_nr()
orb = numpy.random.random((nf,4))
#
# Print orbital coefficients
#
tools.dump_mat.dump_mo(mol, orb)
#
# Print lower triangular part of an array
#
dm = numpy.eye(3)
tools.dump_mat.dump_tri(sys.stdout, dm)
#
# Print rectangular matrix
#
mol = gto.M(atom='C 0 0 0',basis='6-31g')
dm = numpy.eye(mol.nao_nr())
tools.dump_mat.dump_rec(sys.stdout, dm, label=mol.spheric_labels(True),
ncol=9, digits=2)
|
apache-2.0
|
Python
|
ad595aae697abbf7c7de79a4a2448f0c47133934
|
Make viewer runnable from outer dirs.
|
mihneadb/python-execution-trace,mihneadb/python-execution-trace,mihneadb/python-execution-trace
|
execution_trace/viewer/viewer.py
|
execution_trace/viewer/viewer.py
|
# Run with `python viewer.py PATH_TO_RECORD_JSON.
import json
import os
import sys
from flask import Flask, jsonify
from flask.helpers import send_from_directory
app = Flask(__name__)
viewer_root = os.path.abspath(os.path.dirname(__file__))
# `main` inits these.
# File containing `record` output.
record_path = None
# 0 is source, 1:N is state
record_data = []
@app.route("/")
def hello():
return send_from_directory(viewer_root, 'index.html')
@app.route("/source.json")
def source():
return jsonify(record_data[0])
@app.route("/state.json")
def state():
return jsonify({'data': record_data[1:]})
def main():
record_path = sys.argv[1]
with open(record_path) as f:
record_data.append(json.loads(f.readline()))
for line in f:
record_data.append(json.loads(line))
app.run()
if __name__ == "__main__":
main()
|
# Run with `python viewer.py PATH_TO_RECORD_JSON.
import json
import sys
from flask import Flask, jsonify
from flask.helpers import send_from_directory
app = Flask(__name__)
# `main` inits these.
# File containing `record` output.
record_path = None
# 0 is source, 1:N is state
record_data = []
@app.route("/")
def hello():
return send_from_directory('.', 'index.html')
@app.route("/source.json")
def source():
return jsonify(record_data[0])
@app.route("/state.json")
def state():
return jsonify({'data': record_data[1:]})
def main():
record_path = sys.argv[1]
with open(record_path) as f:
record_data.append(json.loads(f.readline()))
for line in f:
record_data.append(json.loads(line))
app.run()
if __name__ == "__main__":
main()
|
mit
|
Python
|
385fbdc0401c979a71b0ff326852292bbb6a6ceb
|
Print coverages and deal with missing files
|
charanpald/APGL
|
exp/influence2/ProcessResults.py
|
exp/influence2/ProcessResults.py
|
import numpy
import matplotlib
matplotlib.use("GTK3Agg")
import matplotlib.pyplot as plt
from exp.influence2.ArnetMinerDataset import ArnetMinerDataset
from apgl.util.Latex import Latex
from apgl.util.Util import Util
from apgl.util.Evaluator import Evaluator
ranLSI = False
numpy.set_printoptions(suppress=True, precision=3, linewidth=100)
dataset = ArnetMinerDataset(runLSI=ranLSI)
ns = numpy.arange(5, 55, 5)
averagePrecisionN = 30
bestPrecisions = numpy.zeros((len(ns), len(dataset.fields)))
bestAveragePrecisions = numpy.zeros(len(dataset.fields))
coverages = numpy.load(dataset.coverageFilename)
print("==== Coverages ====")
print(coverages)
for s, field in enumerate(dataset.fields):
if ranLSI:
outputFilename = dataset.getOutputFieldDir(field) + "outputListsLSI.npz"
else:
outputFilename = dataset.getOutputFieldDir(field) + "outputListsLDA.npz"
try:
outputLists, expertMatchesInds = Util.loadPickle(outputFilename)
numMethods = len(outputLists)
precisions = numpy.zeros((len(ns), numMethods))
averagePrecisions = numpy.zeros(numMethods)
for i, n in enumerate(ns):
for j in range(len(outputLists)):
precisions[i, j] = Evaluator.precisionFromIndLists(expertMatchesInds, outputLists[j][0:n])
for j in range(len(outputLists)):
averagePrecisions[j] = Evaluator.averagePrecisionFromLists(expertMatchesInds, outputLists[j][0:averagePrecisionN], averagePrecisionN)
print(field)
print(precisions)
print(averagePrecisions)
bestInd = numpy.argmax(averagePrecisions)
plt.plot(ns, precisions[:, bestInd], label=field)
bestPrecisions[:, s] = precisions[:, bestInd]
bestAveragePrecisions[s] = averagePrecisions[bestInd]
except IOError as e:
print(e)
bestPrecisions2 = numpy.c_[numpy.array(ns), bestPrecisions]
print(Latex.array2DToRows(bestPrecisions2))
print(Latex.array1DToRow(bestAveragePrecisions))
print(dataset.fields)
plt.legend()
plt.show()
|
import numpy
import matplotlib
matplotlib.use("GTK3Agg")
import matplotlib.pyplot as plt
from exp.influence2.ArnetMinerDataset import ArnetMinerDataset
from apgl.util.Latex import Latex
from apgl.util.Util import Util
from apgl.util.Evaluator import Evaluator
numpy.set_printoptions(suppress=True, precision=3, linewidth=100)
dataset = ArnetMinerDataset()
ns = numpy.arange(5, 55, 5)
averagePrecisionN = 20
bestPrecisions = numpy.zeros((len(ns), len(dataset.fields)))
bestAveragePrecisions = numpy.zeros(len(dataset.fields))
for i, field in enumerate(dataset.fields):
outputFilename = dataset.getResultsDir(field) + "outputLists.npz"
outputLists, expertMatchesInds = Util.loadPickle(outputFilename)
numMethods = len(outputLists)
precisions = numpy.zeros((len(ns), numMethods))
averagePrecisions = numpy.zeros(numMethods)
for i, n in enumerate(ns):
for j in range(len(outputLists)):
precisions[i, j] = Evaluator.precisionFromIndLists(expertMatchesInds, outputLists[j][0:n])
for j in range(len(outputLists)):
averagePrecisions[j] = Evaluator.averagePrecisionFromLists(expertMatchesInds, outputLists[j][0:averagePrecisionN], averagePrecisionN)
print(field)
print(precisions)
print(averagePrecisions)
bestInd = numpy.argmax(averagePrecisions)
plt.plot(ns, precisions[:, bestInd], label=field)
bestPrecisions[:, i] = precisions[:, bestInd]
bestAveragePrecisions[i] = averagePrecisions[bestInd]
bestPrecisions2 = numpy.c_[numpy.array(ns), bestPrecisions]
print(Latex.array2DToRows(bestPrecisions2))
print(Latex.array1DToRow(bestAveragePrecisions))
print(dataset.fields)
plt.legend()
plt.show()
|
bsd-3-clause
|
Python
|
1c939a99e377ff1dfe037c47dd99f635d3cb0a1f
|
Remove Cotswold election id (update expected)
|
chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,chris48s/UK-Polling-Stations,chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations
|
polling_stations/apps/data_collection/management/commands/import_cotswold.py
|
polling_stations/apps/data_collection/management/commands/import_cotswold.py
|
from data_collection.management.commands import BaseXpressWebLookupCsvImporter
class Command(BaseXpressWebLookupCsvImporter):
council_id = 'E07000079'
addresses_name = 'CotswoldPropertyPostCodePollingStationWebLookup-2017-03-27.TSV'
stations_name = 'CotswoldPropertyPostCodePollingStationWebLookup-2017-03-27.TSV'
elections = [
'local.gloucestershire.2017-05-04',
#'parl.2017-06-08'
]
csv_delimiter = '\t'
|
from data_collection.management.commands import BaseXpressWebLookupCsvImporter
class Command(BaseXpressWebLookupCsvImporter):
council_id = 'E07000079'
addresses_name = 'CotswoldPropertyPostCodePollingStationWebLookup-2017-03-27.TSV'
stations_name = 'CotswoldPropertyPostCodePollingStationWebLookup-2017-03-27.TSV'
elections = [
'local.gloucestershire.2017-05-04',
'parl.2017-06-08'
]
csv_delimiter = '\t'
|
bsd-3-clause
|
Python
|
55fd2ec6454df9c62e40c5115be94a00bf944bc4
|
Allow list_classes.py to be loaded without side-effects
|
amolenaar/gaphor,amolenaar/gaphor
|
examples/list_classes.py
|
examples/list_classes.py
|
#!/usr/bin/python
"""This script lists classes and optionally attributes from UML model created
with Gaphor.
"""
import optparse
import sys
from gaphor import UML
from gaphor.application import Session
# Setup command line options.
usage = "usage: %prog [options] file.gaphor"
def main():
parser = optparse.OptionParser(usage=usage)
parser.add_option(
"-a",
"--attributes",
dest="attrs",
action="store_true",
help="Print class attributes",
)
(options, args) = parser.parse_args()
if len(args) != 1:
parser.print_help()
sys.exit(1)
# The model file to load.
model = args[0]
# Create the Gaphor application object.
session = Session()
# Get services we need.
element_factory = session.get_service("element_factory")
file_manager = session.get_service("file_manager")
# Load model from file.
file_manager.load(model)
# Find all classes using factory select.
for cls in element_factory.select(UML.Class):
print(f"Found class {cls.name}")
if options.attrs:
for attr in cls.ownedAttribute:
print(f" Attribute: {attr.name}")
if __name__ == "__main__":
main()
|
#!/usr/bin/python
"""This script lists classes and optionally attributes from UML model created
with Gaphor.
"""
import optparse
import sys
from gaphor import UML
from gaphor.application import Session
# Setup command line options.
usage = "usage: %prog [options] file.gaphor"
parser = optparse.OptionParser(usage=usage)
parser.add_option(
"-a",
"--attributes",
dest="attrs",
action="store_true",
help="Print class attributes",
)
(options, args) = parser.parse_args()
if len(args) != 1:
parser.print_help()
sys.exit(1)
# The model file to load.
model = args[0]
# Create the Gaphor application object.
session = Session()
# Get services we need.
element_factory = session.get_service("element_factory")
file_manager = session.get_service("file_manager")
# Load model from file.
file_manager.load(model)
# Find all classes using factory select.
for cls in element_factory.select(UML.Class):
print(f"Found class {cls.name}")
if options.attrs:
for attr in cls.ownedAttribute:
print(f" Attribute: {attr.name}")
|
lgpl-2.1
|
Python
|
8a348c30e2381e76f09355f3cf23a9e87e060d80
|
Update without_end.py
|
RCoon/CodingBat,RCoon/CodingBat
|
Python/String_1/without_end.py
|
Python/String_1/without_end.py
|
# Given a string, return a version without the first and last char, so "Hello"
# yields "ell". The string length will be at least 2.
# without_end('Hello') --> 'ell'
# without_end('java') --> 'av'
# without_end('coding') --> 'odin'
def without_end(str):
return str[1:len(str) - 1]
print(without_end('Hello'))
print(without_end('java'))
print(without_end('coding'))
|
# Given a string, return a version without the first and last char, so "Hello"
# yields "ell". The string length will be at least 2.
# without_end('Hello') -> 'ell'
# without_end('java') -> 'av'
# without_end('coding') -> 'odin'
def without_end(str):
return str[1:len(str) - 1]
print(without_end('Hello'))
print(without_end('java'))
print(without_end('coding'))
|
mit
|
Python
|
9259bcd9d4dd745a5f34dff8dd4286eecf4a62f2
|
Update verifyAuthenticodePS.py
|
jgstew/tools,jgstew/tools,jgstew/tools,jgstew/tools
|
Python/verifyAuthenticodePS.py
|
Python/verifyAuthenticodePS.py
|
# NOTE: This is Windows Only - tested in Python2.7.1
# https://twitter.com/jgstew/status/1011657455275610112
# https://github.com/jgstew/tools/blob/master/CMD/PS_VerifyFileSig.bat
# https://github.com/jgstew/tools/blob/master/Python/verifyAuthenticode.py
# powershell -ExecutionPolicy Bypass -command "(Get-AuthenticodeSignature \"C:\Windows\explorer.exe\").Status -eq 'Valid'"
import subprocess
import sys
sFileName = r"C:\Windows\explorer.exe"
# TODO: use `-ExecutionPolicy Bypass` somehow
# TODO: capture the output in python and evaluate it rather than just passing through the PowerShell result with stdout=sys.stdout
# TODO: not sure if `Popen` is the best `subprocess` option. I'm just using the first thing that worked that I found:
# https://stackoverflow.com/questions/21944895/running-powershell-script-within-python-script-how-to-make-python-print-the-pow
psResult = subprocess.Popen( ["powershell", r'(Get-AuthenticodeSignature "' + sFileName + r'").Status -eq "Valid"'], stdout=sys.stdout )
psResult.communicate()
# This will output `True` if the signature is valid.
|
# NOTE: This is Windows Only - tested in Python2.7.1
# https://twitter.com/jgstew/status/1011657455275610112
# https://github.com/jgstew/tools/blob/master/CMD/PS_VerifyFileSig.bat
# https://github.com/jgstew/tools/blob/master/Python/verifyAuthenticode.py
# powershell -ExecutionPolicy Bypass -command "(Get-AuthenticodeSignature \"C:\Windows\explorer.exe\").Status -eq 'Valid'"
import subprocess
import sys
sFileName = r"C:\Windows\explorer.exe"
# TODO: use `-ExecutionPolicy Bypass` somehow
# TODO: not sure if `Popen` is the best `subprocess` option. I'm just using the first thing that worked that I found:
# https://stackoverflow.com/questions/21944895/running-powershell-script-within-python-script-how-to-make-python-print-the-pow
psResult = subprocess.Popen( ["powershell", r'(Get-AuthenticodeSignature "' + sFileName + r'").Status -eq "Valid"'], stdout=sys.stdout )
psResult.communicate()
# This will output `True` if the signature is valid.
|
mit
|
Python
|
f1ef248f046c91683df8e6837249cc407a5f1cf2
|
Update ngrokwebhook.py
|
jbogarin/ciscosparkapi
|
examples/ngrokwebhook.py
|
examples/ngrokwebhook.py
|
#sample script that reads ngrok info from localhost:4040 and create Cisco Spark Webhook
#typicall ngrok is called "ngrok http 8080" to redirect localhost:8080 to Internet
#accesible ngrok url
#
#To use script simply launch ngrok, then launch this script. After ngrok is killed, run this
#script a second time to remove webhook from Cisco Spark
import requests
import json
import re
import sys
import requests.packages.urllib3
requests.packages.urllib3.disable_warnings()
from ciscosparkapi import CiscoSparkAPI, Webhook
def findwebhookidbyname(api, webhookname):
webhooks = api.webhooks.list()
for wh in webhooks:
if wh.name == webhookname:
return wh.id
else:
return "not found"
#Webhook attributes
webhookname="testwebhook"
resource="messages"
event="created"
url_suffix="/sparkwebhook"
#grab the at from a local at.txt file instead of global variable
fat=open ("at.txt","r+")
at=fat.readline().rstrip()
fat.close
api = CiscoSparkAPI(at)
#go to the localhost page for nogrok and grab the public url for http
try:
ngrokpage = requests.get("http://127.0.0.1:4040").text
except:
print ("no ngrok running - deleting webhook if it exists")
whid=findwebhookidbyname(api, webhookname)
if "not found" in whid:
print ("no webhook found")
sys.exit()
else:
print (whid)
dict=api.webhooks.delete(whid)
print (dict)
print ("Webhook deleted")
sys.exit()
for line in ngrokpage.split("\n"):
if "window.common = " in line:
ngrokjson = re.search('JSON.parse\(\"(.+)\"\)\;',line).group(1)
ngrokjson = (ngrokjson.replace('\\',''))
print (ngrokjson)
Url = (json.loads(ngrokjson)["Session"]["Tunnels"]["command_line (http)"]["URL"])+url_suffix
print (Url)
#check if the webhook exists by name and then create it if not
whid=findwebhookidbyname(api, webhookname)
if "not found" in whid:
#create
print ("not found")
dict=api.webhooks.create(webhookname, Url, resource, event)
print (dict)
else:
#update
print (whid)
dict=api.webhooks.update(whid, name=webhookname, targetUrl=Url)
print (dict)
|
#sample script that reads ngrok info from localhost:4040 and create Cisco Spark Webhook
#typicall ngrok is called "ngrok http 8080" to redirect localhost:8080 to Internet
#accesible ngrok url
#
#To use script simply launch ngrok, then launch this script. After ngrok is killed, run this
#script a second time to remove webhook from Cisco Spark
import requests
import json
import re
import sys
import requests.packages.urllib3
requests.packages.urllib3.disable_warnings()
from ciscosparkapi import CiscoSparkAPI, Webhook
def findwebhookidbyname(api, webhookname):
webhooks = api.webhooks.list()
for wh in webhooks:
if wh.name == webhookname:
return wh.id
else:
return "not found"
#Webhook attributes
webhookname="testwebhook"
resource="messages"
event="created"
url_suffix="/sparkwebhook"
#grab the at from a local at.txt file instead of global variable
fat=open ("at.txt","r+")
at=fat.readline().rstrip()
fat.close
api = CiscoSparkAPI(at)
#go to the localhost page for nogrok and grab the public url for http
try:
ngrokpage = requests.get("http://127.0.0.1:4040").text
except:
print ("no ngrok running - deleting webhook if it exists")
whid=findwebhookidbyname(api, webhookname)
if "not found" in whid:
print ("no webhook found")
sys.exit()
else:
print (whid)
dict=api.webhooks.delete(whid)
print (dict)
print ("Webhook deleted")
sys.exit()
for line in ngrokpage.split("\n"):
if "window.common = " in line:
ngrokjson = re.search('JSON.parse\(\"(.+)\"\)\;',line).group(1)
ngrokjson = (ngrokjson.replace('\\',''))
print (ngrokjson)
Url = (json.loads(ngrokjson)["Session"]["Tunnels"]["command_line (http)"]["URL"])+url_suffix
print (Url)
#check if the webhook exists by name and then create it if not
whid=findwebhookidbyname(api, webhookname)
if "not found" in whid:
#create
print ("not found")
dict=api.webhooks.create(webhookname, targetUrl, resource, event)
print (dict)
else:
#update
print (whid)
dict=api.webhooks.update(whid, name=webhookname, targetUrl=Url)
print (dict)
|
mit
|
Python
|
fc8bfc1f2cda0844adbf1d831a1a5e1888f8949b
|
add regression test for behavior when fallbackfee is disabled
|
ElementsProject/elements,ElementsProject/elements,ElementsProject/elements,ElementsProject/elements,ElementsProject/elements,ElementsProject/elements
|
test/functional/wallet_fallbackfee.py
|
test/functional/wallet_fallbackfee.py
|
#!/usr/bin/env python3
# Copyright (c) 2017-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test wallet replace-by-fee capabilities in conjunction with the fallbackfee."""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_raises_rpc_error
from test_framework.util import rpc_port ## ELEMENTS
class WalletRBFTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
self.setup_clean_chain = True
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
self.nodes[0].generate(101)
# sending a transaction without fee estimations must be possible by default on regtest
self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1)
# test sending a tx with disabled fallback fee (must fail)
self.restart_node(0, extra_args=["-fallbackfee=0"])
assert_raises_rpc_error(-6, "Fee estimation failed", lambda: self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1))
assert_raises_rpc_error(-4, "Fee estimation failed", lambda: self.nodes[0].fundrawtransaction(self.nodes[0].createrawtransaction([], [{self.nodes[0].getnewaddress(): 1}])))
assert_raises_rpc_error(-6, "Fee estimation failed", lambda: self.nodes[0].sendmany("", {self.nodes[0].getnewaddress(): 1}))
## ELEMENTS: test claimpegin with fallback fee set to zero
# getpeginaddress does not work with descriptor wallets yet
if not self.options.descriptors:
extra_args = [
'-fallbackfee=0',
'-mainchainrpchost=127.0.0.1',
'-mainchainrpcport=%s' % rpc_port(0),
'-parentgenesisblockhash=%s' % self.nodes[0].getblockhash(0),
'-con_parent_chain_signblockscript=51',
'-parentscriptprefix=75',
]
self.restart_node(0)
self.restart_node(1, extra_args)
addrs = self.nodes[1].getpeginaddress()
txid = self.nodes[0].sendtoaddress(addrs["mainchain_address"], 5)
raw = self.nodes[0].getrawtransaction(txid)
self.nodes[0].generate(12)
proof = self.nodes[0].gettxoutproof([txid])
assert_raises_rpc_error(-6, "Fee estimation failed", lambda: self.nodes[1].claimpegin(raw, proof))
# Try again with fallbackfee below the min relay fee. It should just work
# (will let the relay fee override the fallbackfee)
extra_args[0] = '-fallbackfee=0.00000001'
self.restart_node(1, extra_args)
self.nodes[1].claimpegin(raw, proof)
if __name__ == '__main__':
WalletRBFTest().main()
|
#!/usr/bin/env python3
# Copyright (c) 2017-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test wallet replace-by-fee capabilities in conjunction with the fallbackfee."""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_raises_rpc_error
class WalletRBFTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.setup_clean_chain = True
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
self.nodes[0].generate(101)
# sending a transaction without fee estimations must be possible by default on regtest
self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1)
# test sending a tx with disabled fallback fee (must fail)
self.restart_node(0, extra_args=["-fallbackfee=0"])
assert_raises_rpc_error(-6, "Fee estimation failed", lambda: self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1))
assert_raises_rpc_error(-4, "Fee estimation failed", lambda: self.nodes[0].fundrawtransaction(self.nodes[0].createrawtransaction([], [{self.nodes[0].getnewaddress(): 1}])))
assert_raises_rpc_error(-6, "Fee estimation failed", lambda: self.nodes[0].sendmany("", {self.nodes[0].getnewaddress(): 1}))
if __name__ == '__main__':
WalletRBFTest().main()
|
mit
|
Python
|
94c1b54ef1db5180c5f1d07e857bf91d6b6fbf25
|
fix for python2.7 on windows
|
mocobeta/janome,nakagami/janome,nakagami/janome,mocobeta/janome
|
examples/usage_stream.py
|
examples/usage_stream.py
|
# -*- coding: utf-8 -*-
from janome.tokenizer import Tokenizer
import sys
from io import open
PY3 = sys.version_info[0] == 3
print(u'Tokenize (stream mode)')
t = Tokenizer(mmap=True)
with open('text_lemon.txt', encoding='utf-8') as f:
text = f.read()
if not PY3:
text = unicode(text)
for token in t.tokenize(text, stream=True):
print(token)
|
# -*- coding: utf-8 -*-
from janome.tokenizer import Tokenizer
import sys
from io import open
PY3 = sys.version_info[0] == 3
print(u'Tokenize (stream mode)')
t = Tokenizer(mmap=True)
with open('text_lemon.txt', encoding='utf-8') as f:
text = f.read()
if not PY3:
text = unicode(text, 'utf-8')
for token in t.tokenize(text, stream=True):
print(token)
|
apache-2.0
|
Python
|
331cef286380f3369344dbc9a60e460619c17405
|
fix dnn
|
FederatedAI/FATE,FederatedAI/FATE,FederatedAI/FATE
|
federatedml/nn/homo_nn/zoo/dnn.py
|
federatedml/nn/homo_nn/zoo/dnn.py
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from federatedml.nn.homo_nn.backend.tf_keras.layers import has_builder, DENSE, DROPOUT
from federatedml.nn.homo_nn.backend.tf_keras.nn_model import KerasNNModel
from federatedml.nn.homo_nn.zoo import nn
def is_dnn_supported_layer(layer):
return has_builder(layer) and layer in {DENSE, DROPOUT}
def build_nn_model(input_shape, nn_define, loss, optimizer, metrics,
is_supported_layer=is_dnn_supported_layer) -> KerasNNModel:
return nn.build_nn_model(input_shape=input_shape,
nn_define=nn_define,
loss=loss,
optimizer=optimizer,
metrics=metrics,
is_supported_layer=is_supported_layer,
default_layer=DENSE)
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import typing
from federatedml.nn.homo_nn.backend.tf_keras.layers import has_builder, DENSE, DROPOUT
from federatedml.nn.homo_nn.backend.tf_keras.nn_model import KerasNNModel, KerasSequenceDataConverter
from federatedml.nn.homo_nn.zoo.nn import build_nn
def is_supported_layer(layer):
return has_builder(layer) and layer in {DENSE, DROPOUT}
def build_dnn(nn_define, loss, optimizer, metrics) -> typing.Tuple[KerasNNModel, KerasSequenceDataConverter]:
return build_nn(nn_define=nn_define,
loss=loss,
optimizer=optimizer,
metrics=metrics,
is_supported_layer=is_supported_layer,
default_layer=DENSE)
|
apache-2.0
|
Python
|
cee0a7a3af8f17c69e4c9701a363f50904321bd1
|
fix daemonizer.kill
|
hansroh/aquests,hansroh/aquests
|
aquests/lib/daemonize.py
|
aquests/lib/daemonize.py
|
import os
import sys
import time
import signal
from . import killtree, processutil
class Daemonizer:
def __init__(self, chdir="/", procname = None, umask=0o22):
self.chdir = chdir
self.procname = procname
self.umask = umask
self.pidfile = os.path.join (chdir, '.pid')
def runAsDaemon(self):
if status (self.chdir, self.procname):
return 0
self.fork_and_die()
self.dettach_env ()
self.fork_and_die()
sys.stdout.flush()
sys.stderr.flush()
self.attach_stream('stdin', 'r')
self.attach_stream('stdout', 'a+')
self.attach_stream('stderr', 'a+')
return 1
def dettach_env (self):
os.setsid()
os.umask(self.umask)
os.chdir(self.chdir)
def attach_stream (self, name, mode, fd = '/dev/null'):
stream = open(fd, mode)
os.dup2(stream.fileno(), getattr(sys, name).fileno())
def fork_and_die(self):
r = os.fork()
if r == -1:
raise OSError("Couldn't fork().")
elif r > 0: # I'm the parent
if self.pidfile:
open (self.pidfile, 'w').write (str(r))
sys.exit(0)
elif r < 0:
raise OSError("Something bizarre happened while trying to fork().")
# now only r = 0 (the child) survives.
return r
def status (chdir, procname = None):
pidfile = os.path.join (chdir, '.pid')
if not os.path.isfile (pidfile):
return 0
with open (pidfile) as f:
pid = int (f.read ())
return processutil.is_running (pid, procname) and pid or 0
def kill (chdir, procname = None, include_children = True, signaling = True):
import psutil
for i in range (2):
pid = status (chdir, procname)
if not pid:
break
if signaling:
os.kill (pid, signal.SIGTERM)
time.sleep (2)
if include_children:
try:
killtree.kill (pid, True)
except psutil.NoSuchProcess:
pass
while processutil.is_running (pid, procname):
time.sleep (1)
try:
os.remove (os.path.join (chdir, ".pid"))
except FileNotFoundError:
pass
if __name__ == "__main__" :
import time
Daemonizer ().runAsDaemon ()
f = open ('/home/ubuntu/out', 'w')
while 1:
time.sleep (1)
f.write ('asdkljaldjalkdjalkdsa\n')
f.flush()
f.close ()
|
import os
import sys
import time
import signal
from . import killtree, processutil
class Daemonizer:
def __init__(self, chdir="/", procname = None, umask=0o22):
self.chdir = chdir
self.procname = procname
self.umask = umask
self.pidfile = os.path.join (chdir, '.pid')
def runAsDaemon(self):
if status (self.chdir, self.procname):
return 0
self.fork_and_die()
self.dettach_env ()
self.fork_and_die()
sys.stdout.flush()
sys.stderr.flush()
self.attach_stream('stdin', 'r')
self.attach_stream('stdout', 'a+')
self.attach_stream('stderr', 'a+')
return 1
def dettach_env (self):
os.setsid()
os.umask(self.umask)
os.chdir(self.chdir)
def attach_stream (self, name, mode, fd = '/dev/null'):
stream = open(fd, mode)
os.dup2(stream.fileno(), getattr(sys, name).fileno())
def fork_and_die(self):
r = os.fork()
if r == -1:
raise OSError("Couldn't fork().")
elif r > 0: # I'm the parent
if self.pidfile:
open (self.pidfile, 'w').write (str(r))
sys.exit(0)
elif r < 0:
raise OSError("Something bizarre happened while trying to fork().")
# now only r = 0 (the child) survives.
return r
def status (chdir, procname = None):
pidfile = os.path.join (chdir, '.pid')
if not os.path.isfile (pidfile):
return 0
with open (pidfile) as f:
pid = int (f.read ())
return processutil.is_running (pid, procname) and pid or 0
def kill (chdir, procname = None, include_children = True):
import psutil
for i in range (2):
pid = status (chdir, procname)
if not pid:
break
os.kill (pid, signal.SIGTERM)
time.sleep (2)
if include_children:
try:
killtree.kill (pid, True)
except psutil.NoSuchProcess:
pass
while processutil.is_running (pid, procname):
time.sleep (1)
try:
os.remove (os.path.join (chdir, ".pid"))
except FileNotFoundError:
pass
if __name__ == "__main__" :
import time
Daemonizer ().runAsDaemon ()
f = open ('/home/ubuntu/out', 'w')
while 1:
time.sleep (1)
f.write ('asdkljaldjalkdjalkdsa\n')
f.flush()
f.close ()
|
mit
|
Python
|
f5c5fef9cfdc94ad2d1b7d95f990e288251f57fe
|
Add search analyzer to search
|
fin/froide,fin/froide,stefanw/froide,stefanw/froide,fin/froide,stefanw/froide,fin/froide,stefanw/froide,stefanw/froide
|
froide/helper/search/__init__.py
|
froide/helper/search/__init__.py
|
import importlib
from django.conf import settings
from elasticsearch_dsl import analyzer, tokenizer
from django_elasticsearch_dsl import Index
from .signal_processor import CelerySignalProcessor
from .queryset import SearchQuerySetWrapper
from .registry import search_registry
__all__ = [
'CelerySignalProcessor', 'search_registry', 'SearchQuerySetWrapper',
]
def get_index(name):
index_name = '%s_%s' % (
settings.ELASTICSEARCH_INDEX_PREFIX,
name
)
# if settings.ELASTICSEARCH_INDEX_PREFIX == 'froide_test':
# index_name += '_%s' % threading.get_ident()
index = Index(index_name)
# See Elasticsearch Indices API reference for available settings
index.settings(
number_of_shards=1,
number_of_replicas=0
)
return index
def get_default_text_analyzer():
return analyzer(
'froide_analyzer',
tokenizer='standard',
filter=[
'standard',
'lowercase',
'asciifolding',
]
)
def get_default_ngram_analyzer():
return analyzer(
'froide_ngram_analyzer',
tokenizer=tokenizer(
'froide_ngram_tokenzier',
type='edge_ngram',
min_gram=1,
max_gram=15,
token_chars=['letter', 'digit']
),
filter=[
'standard',
'lowercase',
'asciifolding',
]
)
def get_func(config_name, default_func):
def get_it():
from django.conf import settings
func_path = settings.FROIDE_CONFIG.get(config_name, None)
if not func_path:
return default_func()
module, func = func_path.rsplit('.', 1)
module = importlib.import_module(module)
analyzer_func = getattr(module, func)
return analyzer_func()
return get_it
get_text_analyzer = get_func('text_analyzer', get_default_text_analyzer)
get_search_analyzer = get_func('search_analyzer', get_default_text_analyzer)
get_ngram_analyzer = get_func('ngram_analyzer', get_default_ngram_analyzer)
|
import importlib
from django.conf import settings
from elasticsearch_dsl import analyzer, tokenizer
from django_elasticsearch_dsl import Index
from .signal_processor import CelerySignalProcessor
from .queryset import SearchQuerySetWrapper
from .registry import search_registry
__all__ = [
'CelerySignalProcessor', 'search_registry', 'SearchQuerySetWrapper',
]
def get_index(name):
index_name = '%s_%s' % (
settings.ELASTICSEARCH_INDEX_PREFIX,
name
)
# if settings.ELASTICSEARCH_INDEX_PREFIX == 'froide_test':
# index_name += '_%s' % threading.get_ident()
index = Index(index_name)
# See Elasticsearch Indices API reference for available settings
index.settings(
number_of_shards=1,
number_of_replicas=0
)
return index
def get_default_text_analyzer():
return analyzer(
'froide_analyzer',
tokenizer='standard',
filter=[
'standard',
'lowercase',
'asciifolding',
]
)
def get_default_ngram_analyzer():
return analyzer(
'froide_ngram_analyzer',
tokenizer=tokenizer(
'froide_ngram_tokenzier',
type='edge_ngram',
min_gram=1,
max_gram=15,
token_chars=['letter', 'digit']
),
filter=[
'standard',
'lowercase',
'asciifolding',
]
)
def get_func(config_name, default_func):
def get_it():
from django.conf import settings
func_path = settings.FROIDE_CONFIG.get(config_name, None)
if not func_path:
return default_func()
module, func = func_path.rsplit('.', 1)
module = importlib.import_module(module)
analyzer_func = getattr(module, func)
return analyzer_func()
return get_it
get_text_analyzer = get_func('search_text_analyzer', get_default_text_analyzer)
get_ngram_analyzer = get_func('search_ngram_analyzer', get_default_ngram_analyzer)
|
mit
|
Python
|
56b38e64aeea12269b36d11849e0952377510c16
|
Change method of listening to state changes
|
robjohnson189/home-assistant,florianholzapfel/home-assistant,aequitas/home-assistant,aronsky/home-assistant,mKeRix/home-assistant,adrienbrault/home-assistant,keerts/home-assistant,DavidLP/home-assistant,stefan-jonasson/home-assistant,ewandor/home-assistant,Teagan42/home-assistant,robjohnson189/home-assistant,nkgilley/home-assistant,tchellomello/home-assistant,deisi/home-assistant,sffjunkie/home-assistant,mezz64/home-assistant,sffjunkie/home-assistant,betrisey/home-assistant,Cinntax/home-assistant,pschmitt/home-assistant,jaharkes/home-assistant,jnewland/home-assistant,robbiet480/home-assistant,molobrakos/home-assistant,Theb-1/home-assistant,turbokongen/home-assistant,auduny/home-assistant,PetePriority/home-assistant,betrisey/home-assistant,srcLurker/home-assistant,MungoRae/home-assistant,Julian/home-assistant,adrienbrault/home-assistant,fbradyirl/home-assistant,instantchow/home-assistant,JshWright/home-assistant,sffjunkie/home-assistant,jamespcole/home-assistant,sdague/home-assistant,rohitranjan1991/home-assistant,open-homeautomation/home-assistant,nevercast/home-assistant,philipbl/home-assistant,sffjunkie/home-assistant,luxus/home-assistant,sfam/home-assistant,hmronline/home-assistant,HydrelioxGitHub/home-assistant,home-assistant/home-assistant,happyleavesaoc/home-assistant,ma314smith/home-assistant,FreekingDean/home-assistant,JshWright/home-assistant,xifle/home-assistant,ewandor/home-assistant,aoakeson/home-assistant,ct-23/home-assistant,justyns/home-assistant,justyns/home-assistant,eagleamon/home-assistant,persandstrom/home-assistant,lukas-hetzenecker/home-assistant,philipbl/home-assistant,caiuspb/home-assistant,ct-23/home-assistant,titilambert/home-assistant,nugget/home-assistant,stefan-jonasson/home-assistant,leoc/home-assistant,mKeRix/home-assistant,Teagan42/home-assistant,kyvinh/home-assistant,aequitas/home-assistant,Smart-Torvy/torvy-home-assistant,alexmogavero/home-assistant,partofthething/home-assistant,jabesq/home-assistant,xifle/home-assistant,florianholzapfel/home-assistant,varunr047/homefile,bdfoster/blumate,ct-23/home-assistant,nevercast/home-assistant,florianholzapfel/home-assistant,nugget/home-assistant,turbokongen/home-assistant,toddeye/home-assistant,MartinHjelmare/home-assistant,miniconfig/home-assistant,emilhetty/home-assistant,keerts/home-assistant,alexmogavero/home-assistant,auduny/home-assistant,leoc/home-assistant,srcLurker/home-assistant,bdfoster/blumate,varunr047/homefile,ma314smith/home-assistant,Duoxilian/home-assistant,aronsky/home-assistant,MartinHjelmare/home-assistant,w1ll1am23/home-assistant,emilhetty/home-assistant,nnic/home-assistant,fbradyirl/home-assistant,alexmogavero/home-assistant,jabesq/home-assistant,JshWright/home-assistant,sfam/home-assistant,joopert/home-assistant,nkgilley/home-assistant,aoakeson/home-assistant,stefan-jonasson/home-assistant,luxus/home-assistant,soldag/home-assistant,LinuxChristian/home-assistant,tinloaf/home-assistant,Duoxilian/home-assistant,morphis/home-assistant,tboyce1/home-assistant,bdfoster/blumate,devdelay/home-assistant,sfam/home-assistant,jamespcole/home-assistant,instantchow/home-assistant,Zac-HD/home-assistant,Julian/home-assistant,srcLurker/home-assistant,hmronline/home-assistant,eagleamon/home-assistant,Theb-1/home-assistant,hmronline/home-assistant,eagleamon/home-assistant,miniconfig/home-assistant,MungoRae/home-assistant,robbiet480/home-assistant,varunr047/homefile,xifle/home-assistant,hexxter/home-assistant,qedi-r/home-assistant,MungoRae/home-assistant,Danielhiversen/home-assistant,nnic/home-assistant,justyns/home-assistant,Julian/home-assistant,emilhetty/home-assistant,postlund/home-assistant,robjohnson189/home-assistant,caiuspb/home-assistant,joopert/home-assistant,Zac-HD/home-assistant,MartinHjelmare/home-assistant,jamespcole/home-assistant,hmronline/home-assistant,tboyce021/home-assistant,sffjunkie/home-assistant,luxus/home-assistant,betrisey/home-assistant,deisi/home-assistant,open-homeautomation/home-assistant,mikaelboman/home-assistant,kennedyshead/home-assistant,alexmogavero/home-assistant,leppa/home-assistant,HydrelioxGitHub/home-assistant,molobrakos/home-assistant,leppa/home-assistant,jaharkes/home-assistant,LinuxChristian/home-assistant,emilhetty/home-assistant,oandrew/home-assistant,bdfoster/blumate,postlund/home-assistant,hexxter/home-assistant,Cinntax/home-assistant,morphis/home-assistant,bdfoster/blumate,Duoxilian/home-assistant,keerts/home-assistant,shaftoe/home-assistant,philipbl/home-assistant,dmeulen/home-assistant,varunr047/homefile,jaharkes/home-assistant,persandstrom/home-assistant,florianholzapfel/home-assistant,tchellomello/home-assistant,oandrew/home-assistant,miniconfig/home-assistant,ct-23/home-assistant,mikaelboman/home-assistant,deisi/home-assistant,persandstrom/home-assistant,rohitranjan1991/home-assistant,hmronline/home-assistant,shaftoe/home-assistant,caiuspb/home-assistant,kennedyshead/home-assistant,Zac-HD/home-assistant,dmeulen/home-assistant,philipbl/home-assistant,Danielhiversen/home-assistant,LinuxChristian/home-assistant,xifle/home-assistant,PetePriority/home-assistant,mKeRix/home-assistant,molobrakos/home-assistant,Smart-Torvy/torvy-home-assistant,LinuxChristian/home-assistant,PetePriority/home-assistant,coteyr/home-assistant,Zyell/home-assistant,fbradyirl/home-assistant,soldag/home-assistant,hexxter/home-assistant,w1ll1am23/home-assistant,eagleamon/home-assistant,tinloaf/home-assistant,GenericStudent/home-assistant,jnewland/home-assistant,coteyr/home-assistant,Smart-Torvy/torvy-home-assistant,dmeulen/home-assistant,mikaelboman/home-assistant,tboyce1/home-assistant,leoc/home-assistant,leoc/home-assistant,kyvinh/home-assistant,mezz64/home-assistant,devdelay/home-assistant,stefan-jonasson/home-assistant,emilhetty/home-assistant,toddeye/home-assistant,jawilson/home-assistant,tboyce021/home-assistant,aoakeson/home-assistant,shaftoe/home-assistant,coteyr/home-assistant,srcLurker/home-assistant,happyleavesaoc/home-assistant,devdelay/home-assistant,tboyce1/home-assistant,Zyell/home-assistant,kyvinh/home-assistant,aequitas/home-assistant,jaharkes/home-assistant,varunr047/homefile,Zyell/home-assistant,morphis/home-assistant,partofthething/home-assistant,balloob/home-assistant,mikaelboman/home-assistant,MungoRae/home-assistant,DavidLP/home-assistant,deisi/home-assistant,balloob/home-assistant,tboyce1/home-assistant,jawilson/home-assistant,nevercast/home-assistant,nugget/home-assistant,ma314smith/home-assistant,open-homeautomation/home-assistant,keerts/home-assistant,tinloaf/home-assistant,hexxter/home-assistant,oandrew/home-assistant,JshWright/home-assistant,happyleavesaoc/home-assistant,jnewland/home-assistant,sander76/home-assistant,mKeRix/home-assistant,ewandor/home-assistant,kyvinh/home-assistant,robjohnson189/home-assistant,dmeulen/home-assistant,ct-23/home-assistant,HydrelioxGitHub/home-assistant,GenericStudent/home-assistant,lukas-hetzenecker/home-assistant,Smart-Torvy/torvy-home-assistant,rohitranjan1991/home-assistant,LinuxChristian/home-assistant,sander76/home-assistant,FreekingDean/home-assistant,nnic/home-assistant,pschmitt/home-assistant,DavidLP/home-assistant,MungoRae/home-assistant,sdague/home-assistant,ma314smith/home-assistant,balloob/home-assistant,jabesq/home-assistant,Julian/home-assistant,open-homeautomation/home-assistant,betrisey/home-assistant,devdelay/home-assistant,Theb-1/home-assistant,miniconfig/home-assistant,morphis/home-assistant,Duoxilian/home-assistant,happyleavesaoc/home-assistant,titilambert/home-assistant,oandrew/home-assistant,deisi/home-assistant,auduny/home-assistant,shaftoe/home-assistant,mikaelboman/home-assistant,home-assistant/home-assistant,qedi-r/home-assistant,instantchow/home-assistant,Zac-HD/home-assistant
|
homeassistant/components/automation/template.py
|
homeassistant/components/automation/template.py
|
"""
homeassistant.components.automation.template
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Offers template automation rules.
For more details about this automation rule, please refer to the documentation
at https://home-assistant.io/components/automation/#template-trigger
"""
import logging
from homeassistant.const import CONF_VALUE_TEMPLATE, EVENT_STATE_CHANGED
from homeassistant.exceptions import TemplateError
from homeassistant.util import template
_LOGGER = logging.getLogger(__name__)
def trigger(hass, config, action):
""" Listen for state changes based on `config`. """
value_template = config.get(CONF_VALUE_TEMPLATE)
if value_template is None:
_LOGGER.error("Missing configuration key %s", CONF_VALUE_TEMPLATE)
return False
# Local variable to keep track of if the action has already been triggered
already_triggered = False
def event_listener(event):
""" Listens for state changes and calls action. """
nonlocal already_triggered
template_result = _check_template(hass, value_template)
# Check to see if template returns true
if template_result and not already_triggered:
already_triggered = True
action()
elif not template_result:
already_triggered = False
hass.bus.listen(EVENT_STATE_CHANGED, event_listener)
return True
def if_action(hass, config):
""" Wraps action method with state based condition. """
value_template = config.get(CONF_VALUE_TEMPLATE)
if value_template is None:
_LOGGER.error("Missing configuration key %s", CONF_VALUE_TEMPLATE)
return False
return lambda: _check_template(hass, value_template)
def _check_template(hass, value_template):
""" Checks if result of template is true """
try:
value = template.render(hass, value_template, {})
except TemplateError:
_LOGGER.exception('Error parsing template')
return False
return value.lower() == 'true'
|
"""
homeassistant.components.automation.template
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Offers template automation rules.
For more details about this automation rule, please refer to the documentation
at https://home-assistant.io/components/automation/#template-trigger
"""
import logging
from homeassistant.const import CONF_VALUE_TEMPLATE
from homeassistant.exceptions import TemplateError
from homeassistant.helpers.event import track_state_change
from homeassistant.util import template
_LOGGER = logging.getLogger(__name__)
def trigger(hass, config, action):
""" Listen for state changes based on `config`. """
value_template = config.get(CONF_VALUE_TEMPLATE)
if value_template is None:
_LOGGER.error("Missing configuration key %s", CONF_VALUE_TEMPLATE)
return False
# Get all entity ids
all_entity_ids = hass.states.entity_ids()
# Local variable to keep track of if the action has already been triggered
already_triggered = False
def state_automation_listener(entity, from_s, to_s):
""" Listens for state changes and calls action. """
nonlocal already_triggered
template_result = _check_template(hass, value_template)
# Check to see if template returns true
if template_result and not already_triggered:
already_triggered = True
action()
elif not template_result:
already_triggered = False
track_state_change(hass, all_entity_ids, state_automation_listener)
return True
def if_action(hass, config):
""" Wraps action method with state based condition. """
value_template = config.get(CONF_VALUE_TEMPLATE)
if value_template is None:
_LOGGER.error("Missing configuration key %s", CONF_VALUE_TEMPLATE)
return False
return lambda: _check_template(hass, value_template)
def _check_template(hass, value_template):
""" Checks if result of template is true """
try:
value = template.render(hass, value_template, {})
except TemplateError:
_LOGGER.exception('Error parsing template')
return False
return value.lower() == 'true'
|
mit
|
Python
|
3f80950f1fa9c5bad018a8cdaa9f6ae70168e4e1
|
Update main.py
|
Python-IoT/Smart-IoT-Planting-System,Python-IoT/Smart-IoT-Planting-System
|
device/src/main.py
|
device/src/main.py
|
#This is the file executing while STM32 MCU bootup, and in this file,
#it will call other functions to fullfill the project.
#Communication module: LoRa.
#Communication method with gateway via LoRa.
#Uart port drive LoRa module.
#Parse JSON between device and gateway via LoRa channel.
#LoRa module: E32-TTL-100
#Pin specification:
#Module MCU
#M0(IN) <--> GPIO(X3)(OUT) #mode setting, can not hang
#M1(IN) <--> GPIO(X4)(OUT) #mode setting, can not hang
#RXD(IN) <--> X1(TX)(OUT) #UART4
#TXD(OUT) <--> X2(RX)(IN) #UART4
#AUX(OUT) <--> GPIO/INT(IN) #module status detecting
#VCC
#GND
#Communication mode is 0, need to set M0 and M1 to 0.
import pyb
from pyb import Pin
from pyb import Timer
from pyb import UART
import micropython
#Import light intensity needed module
import LightIntensity
import time
import json
micropython.alloc_emergency_exception_buf(100)
Pin('Y11',Pin.OUT_PP).low() #GND
Pin('Y9',Pin.OUT_PP).high() #VCC
#Set LoRa module with mode-0.
M0 = Pin('X3', Pin.OUT_PP)
M1 = Pin('X4', Pin.OUT_PP)
M0.low()
M1.low()
#Init uart4 for LoRa module.
u4 = UART(4,9600)
u4.init(9600, bits=8, parity=None, stop=1)
cmd_online = '{"ID":"1", "CMD":"Online", "TYPE":"N", "VALUE":"N"}\n'
#Send Online command to gateway while it power on to obtain its status data from gateway's database.
u4.write(cmd_online)
#LED shining regularly(using timer) to indicate the program is running correctly
tim1 = Timer(1, freq=1)
tim1.callback(lambda t: pyb.LED(1).toggle())
#Read the light intensity value from sensor regularly.
'''
lightVlaue = 0
def getLightInten():
global lightVlaue
lightVlaue = LightIntensity.readLight()
tim1 = Timer(2, freq=1)
tim1.callback(getLightInten())
print(LightIntensity.readLight())
'''
if __name__=='__main__':
while True:
#Waiting for the message from UART4 to obtain LoRa data.
len = u4.any()
if(len > 0):
recv = u4.read()
print(recv)
json_lora = json.loads(recv)
#Parse JSON from gateway.
if (json_lora.get("CMD") == 'Online' and json_lora.get("TYPE") == 'Light2' ):
if json_lora.get("VALUE") == 'On':
pyb.LED(2).on()
else:
pyb.LED(2).off()
print(LightIntensity.readLight())
'''
if lightVlaue > 0:
print(lightVlaue)
lightVlaue = 0
'''
|
#This is the file executing while STM32 MCU bootup, and in this file,
#it will call other functions to fullfill the project.
#Communication module: LoRa.
#Communication method with gateway via LoRa.
#Uart port drive LoRa module.
#Parse JSON between device and gateway via LoRa channel.
#LoRa module: E32-TTL-100
#Pin specification:
#Module MCU
#M0(IN) <--> GPIO(X3)(OUT) #mode setting, can not hang
#M1(IN) <--> GPIO(X4)(OUT) #mode setting, can not hang
#RXD(IN) <--> X1(TX)(OUT) #UART4
#TXD(OUT) <--> X2(RX)(IN) #UART4
#AUX(OUT) <--> GPIO/INT(IN) #module status detecting
#VCC
#GND
#Communication mode is 0, need to set M0 and M1 to 0.
import pyb
from pyb import Pin
from pyb import Timer
from pyb import UART
import micropython
#Import light intensity needed module
import LightIntensity
import time
import json
micropython.alloc_emergency_exception_buf(100)
Pin('Y11',Pin.OUT_PP).low() #GND
Pin('Y9',Pin.OUT_PP).high() #VCC
#Set LoRa module with mode-0.
M0 = Pin('X3', Pin.OUT_PP)
M1 = Pin('X4', Pin.OUT_PP)
M0.low()
M1.low()
#Init uart4 for LoRa module.
u4 = UART(4,9600)
u4.init(9600, bits=8, parity=None, stop=1)
cmd_online = '{"ID":"1", "CMD":"Online", "TYPE":"N", "VALUE":"N"}\n'
#Send Online command to gateway while it power on to obtain its status data from gateway's database.
u4.write(cmd_online)
#LED shining regularly(using timer) to indicate the program is running correctly
tim1 = Timer(1, freq=1)
tim1.callback(lambda t: pyb.LED(1).toggle())
#Read the light intensity value from sensor regularly.
'''
lightVlaue = 0
def getLightInten():
global lightVlaue
lightVlaue = LightIntensity.readLight()
tim1 = Timer(2, freq=1)
tim1.callback(getLightInten())
print(LightIntensity.readLight())
'''
if __name__=='__main__'
while True:
#Waiting for the message from UART4 to obtain LoRa data.
len = u4.any()
if(len > 0):
recv = u4.read()
print(recv)
json_lora = json.loads(recv)
#Parse JSON from gateway.
if (json_lora.get("CMD") == 'Online' and json_lora.get("TYPE") == 'Light2' ):
if json_lora.get("VALUE") == 'On':
pyb.LED(2).on()
else:
pyb.LED(2).off()
print(LightIntensity.readLight())
'''
if lightVlaue > 0:
print(lightVlaue)
lightVlaue = 0
'''
|
mit
|
Python
|
608c55bb681667a6e1fe65e328676a3a99deb391
|
Update mupenGenerator.py
|
nadenislamarre/recalbox-configgen,digitalLumberjack/recalbox-configgen,recalbox/recalbox-configgen
|
configgen/generators/mupen/mupenGenerator.py
|
configgen/generators/mupen/mupenGenerator.py
|
#!/usr/bin/env python
import Command
import mupenControllers
import recalboxFiles
from generators.Generator import Generator
class MupenGenerator(Generator):
# Main entry of the module
# Configure mupen and return a command
def generate(self, system, rom, playersControllers):
# Settings recalbox default config file if no user defined one
if not system.config['configfile']:
# Using recalbox config file
system.config['configfile'] = recalboxFiles.mupenCustom
# Write controllers configuration files
mupenControllers.writeControllersConfig(playersControllers)
commandArray = ["mupen64plus", "--corelib", "/usr/lib/libmupen64plus.so.2.0.0", "--gfx", "/usr/lib/mupen64plus/mupen64plus-video-{}.so".format(system.config['core']),
"--configdir", recalboxFiles.mupenConf, "--datadir", recalboxFiles.mupenConf, rom]
return Command.Command(videomode=system.config['videomode'], array=commandArray, env={"SDL_VIDEO_GL_DRIVER":"/usr/lib/libGLESv2.so"})
|
#!/usr/bin/env python
import Command
import mupenControllers
import recalboxFiles
from generators.Generator import Generator
class MupenGenerator(Generator):
# Main entry of the module
# Configure mupen and return a command
def generate(self, system, rom, playersControllers):
# Settings recalbox default config file if no user defined one
if not system.config['configfile']:
# Using recalbox config file
system.config['configfile'] = recalboxFiles.mupenCustom
# Write controllers configuration files
mupenControllers.writeControllersConfig(playersControllers)
commandArray = ["mupen64plus", "--corelib", "/usr/lib/libmupen64plus.so.2.0.0", "--gfx", "/usr/lib/mupen64plus/mupen64plus-video-{}.so".format(system.config['core']),
"--configdir", "/recalbox/share/system/configs/mupen64/", "--datadir", "/recalbox/share/system/configs/mupen64/", rom]
return Command.Command(videomode=system.config['videomode'], array=commandArray, env={"SDL_VIDEO_GL_DRIVER":"/usr/lib/libGLESv2.so"})
|
mit
|
Python
|
ad3554ae58f65a295ac94c131d8193e0b2e7e6f8
|
Add reminder to look at the number of terms returned
|
nlesc-sherlock/concept-search,nlesc-sherlock/concept-search,nlesc-sherlock/concept-search,nlesc-sherlock/concept-search
|
termsuggester/word2vec.py
|
termsuggester/word2vec.py
|
from gensim.models import Word2Vec
import logging
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
logger.addHandler(logging.StreamHandler())
class Word2VecSuggester():
def __init__(self, modelfile):
try:
self.model = Word2Vec.load(modelfile)
logger.info('Load Word2Vec model "{}"'.format(modelfile))
except IOError:
logger.warn('Unable to load Word2Vec model "{}"'.format(modelfile))
logger.warn('Was the train_word2vec script run?')
self.model = None
def suggest_terms(self, query_word):
# TODO: make the number of terms returned a parameter of the function
if self.model is not None:
results = self.model.most_similar(positive=[query_word],
negative=[], topn=10)
suggestions = {}
for word, weight in results:
suggestions[word] = weight
return suggestions
else:
return {}
|
from gensim.models import Word2Vec
import logging
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
logger.addHandler(logging.StreamHandler())
class Word2VecSuggester():
def __init__(self, modelfile):
try:
self.model = Word2Vec.load(modelfile)
logger.info('Load Word2Vec model "{}"'.format(modelfile))
except IOError:
logger.warn('Unable to load Word2Vec model "{}"'.format(modelfile))
logger.warn('Was the train_word2vec script run?')
self.model = None
def suggest_terms(self, query_word):
if self.model is not None:
results = self.model.most_similar(positive=[query_word],
negative=[])
suggestions = {}
for word, weight in results:
suggestions[word] = weight
return suggestions
else:
return {}
|
apache-2.0
|
Python
|
73a375a3adb140c270444e886b3df842e0b28a86
|
Fix formatting tests: cfloat and cdouble as well as np.float and np.double are the same; make sure we test 4 bytes float.
|
chiffa/numpy,gfyoung/numpy,ekalosak/numpy,hainm/numpy,sigma-random/numpy,GrimDerp/numpy,sonnyhu/numpy,embray/numpy,GaZ3ll3/numpy,nbeaver/numpy,dimasad/numpy,gmcastil/numpy,musically-ut/numpy,kirillzhuravlev/numpy,simongibbons/numpy,ContinuumIO/numpy,simongibbons/numpy,mindw/numpy,ogrisel/numpy,rgommers/numpy,pelson/numpy,grlee77/numpy,mwiebe/numpy,mindw/numpy,SiccarPoint/numpy,pizzathief/numpy,AustereCuriosity/numpy,shoyer/numpy,jankoslavic/numpy,maniteja123/numpy,Yusa95/numpy,pdebuyl/numpy,dch312/numpy,jakirkham/numpy,mindw/numpy,cowlicks/numpy,behzadnouri/numpy,andsor/numpy,andsor/numpy,charris/numpy,joferkington/numpy,ewmoore/numpy,trankmichael/numpy,WarrenWeckesser/numpy,Srisai85/numpy,matthew-brett/numpy,CMartelLML/numpy,ewmoore/numpy,behzadnouri/numpy,brandon-rhodes/numpy,Eric89GXL/numpy,dwf/numpy,MSeifert04/numpy,rajathkumarmp/numpy,dato-code/numpy,dwillmer/numpy,Srisai85/numpy,bertrand-l/numpy,NextThought/pypy-numpy,Linkid/numpy,felipebetancur/numpy,jakirkham/numpy,githubmlai/numpy,tacaswell/numpy,ContinuumIO/numpy,maniteja123/numpy,mhvk/numpy,sigma-random/numpy,rmcgibbo/numpy,KaelChen/numpy,charris/numpy,mathdd/numpy,yiakwy/numpy,empeeu/numpy,pelson/numpy,cjermain/numpy,ddasilva/numpy,mhvk/numpy,MaPePeR/numpy,pbrod/numpy,rajathkumarmp/numpy,tynn/numpy,dch312/numpy,stuarteberg/numpy,groutr/numpy,njase/numpy,WarrenWeckesser/numpy,astrofrog/numpy,mortada/numpy,charris/numpy,skwbc/numpy,BMJHayward/numpy,empeeu/numpy,has2k1/numpy,madphysicist/numpy,Eric89GXL/numpy,empeeu/numpy,ChanderG/numpy,MaPePeR/numpy,Dapid/numpy,ajdawson/numpy,joferkington/numpy,tdsmith/numpy,dwf/numpy,numpy/numpy,ChanderG/numpy,dwf/numpy,endolith/numpy,skwbc/numpy,yiakwy/numpy,ddasilva/numpy,BabeNovelty/numpy,Anwesh43/numpy,brandon-rhodes/numpy,jonathanunderwood/numpy,rmcgibbo/numpy,GrimDerp/numpy,kirillzhuravlev/numpy,rherault-insa/numpy,immerrr/numpy,CMartelLML/numpy,pbrod/numpy,astrofrog/numpy,dato-code/numpy,jschueller/numpy,yiakwy/numpy,pdebuyl/numpy,utke1/numpy,larsmans/numpy,kiwifb/numpy,jorisvandenbossche/numpy,githubmlai/numpy,felipebetancur/numpy,naritta/numpy,SiccarPoint/numpy,gmcastil/numpy,ssanderson/numpy,immerrr/numpy,mhvk/numpy,BMJHayward/numpy,nbeaver/numpy,ssanderson/numpy,chiffa/numpy,ahaldane/numpy,pyparallel/numpy,skymanaditya1/numpy,dato-code/numpy,CMartelLML/numpy,sonnyhu/numpy,shoyer/numpy,ajdawson/numpy,dwf/numpy,abalkin/numpy,utke1/numpy,immerrr/numpy,pbrod/numpy,mwiebe/numpy,argriffing/numpy,Dapid/numpy,rgommers/numpy,trankmichael/numpy,MichaelAquilina/numpy,ChristopherHogan/numpy,skymanaditya1/numpy,ChanderG/numpy,BabeNovelty/numpy,immerrr/numpy,leifdenby/numpy,matthew-brett/numpy,b-carter/numpy,numpy/numpy,rherault-insa/numpy,WarrenWeckesser/numpy,madphysicist/numpy,rgommers/numpy,nbeaver/numpy,brandon-rhodes/numpy,ajdawson/numpy,musically-ut/numpy,mortada/numpy,KaelChen/numpy,solarjoe/numpy,stuarteberg/numpy,Yusa95/numpy,mingwpy/numpy,AustereCuriosity/numpy,mhvk/numpy,has2k1/numpy,simongibbons/numpy,chatcannon/numpy,ChristopherHogan/numpy,matthew-brett/numpy,githubmlai/numpy,larsmans/numpy,ESSS/numpy,solarjoe/numpy,ewmoore/numpy,WillieMaddox/numpy,ESSS/numpy,pdebuyl/numpy,rudimeier/numpy,pelson/numpy,pyparallel/numpy,mingwpy/numpy,shoyer/numpy,maniteja123/numpy,nguyentu1602/numpy,trankmichael/numpy,ViralLeadership/numpy,MaPePeR/numpy,KaelChen/numpy,mathdd/numpy,sigma-random/numpy,tynn/numpy,grlee77/numpy,mingwpy/numpy,nguyentu1602/numpy,ChanderG/numpy,ahaldane/numpy,felipebetancur/numpy,musically-ut/numpy,sinhrks/numpy,jorisvandenbossche/numpy,jankoslavic/numpy,endolith/numpy,jakirkham/numpy,rhythmsosad/numpy,cjermain/numpy,jorisvandenbossche/numpy,SiccarPoint/numpy,pelson/numpy,nguyentu1602/numpy,Dapid/numpy,SunghanKim/numpy,bmorris3/numpy,ESSS/numpy,seberg/numpy,shoyer/numpy,hainm/numpy,trankmichael/numpy,moreati/numpy,grlee77/numpy,moreati/numpy,tynn/numpy,b-carter/numpy,anntzer/numpy,GrimDerp/numpy,dch312/numpy,ajdawson/numpy,behzadnouri/numpy,WarrenWeckesser/numpy,dwillmer/numpy,endolith/numpy,ekalosak/numpy,njase/numpy,dwf/numpy,drasmuss/numpy,seberg/numpy,chatcannon/numpy,cowlicks/numpy,njase/numpy,stefanv/numpy,has2k1/numpy,Linkid/numpy,yiakwy/numpy,madphysicist/numpy,tdsmith/numpy,mattip/numpy,bmorris3/numpy,sinhrks/numpy,leifdenby/numpy,numpy/numpy-refactor,rherault-insa/numpy,kiwifb/numpy,larsmans/numpy,MSeifert04/numpy,astrofrog/numpy,rajathkumarmp/numpy,dimasad/numpy,MichaelAquilina/numpy,mortada/numpy,mathdd/numpy,MichaelAquilina/numpy,MSeifert04/numpy,andsor/numpy,pdebuyl/numpy,gfyoung/numpy,naritta/numpy,pizzathief/numpy,ekalosak/numpy,numpy/numpy-refactor,hainm/numpy,bringingheavendown/numpy,rmcgibbo/numpy,anntzer/numpy,rhythmsosad/numpy,argriffing/numpy,embray/numpy,Yusa95/numpy,ViralLeadership/numpy,embray/numpy,ssanderson/numpy,numpy/numpy,jonathanunderwood/numpy,dimasad/numpy,bringingheavendown/numpy,mattip/numpy,cjermain/numpy,WillieMaddox/numpy,ogrisel/numpy,ewmoore/numpy,rmcgibbo/numpy,rgommers/numpy,BMJHayward/numpy,jschueller/numpy,charris/numpy,Srisai85/numpy,Eric89GXL/numpy,stefanv/numpy,jorisvandenbossche/numpy,anntzer/numpy,embray/numpy,utke1/numpy,sinhrks/numpy,shoyer/numpy,stuarteberg/numpy,ChristopherHogan/numpy,abalkin/numpy,simongibbons/numpy,cowlicks/numpy,numpy/numpy-refactor,sonnyhu/numpy,kirillzhuravlev/numpy,bertrand-l/numpy,ogrisel/numpy,mortada/numpy,jschueller/numpy,skwbc/numpy,pyparallel/numpy,GrimDerp/numpy,ViralLeadership/numpy,ogrisel/numpy,skymanaditya1/numpy,ddasilva/numpy,abalkin/numpy,bertrand-l/numpy,astrofrog/numpy,madphysicist/numpy,embray/numpy,rhythmsosad/numpy,Anwesh43/numpy,skymanaditya1/numpy,sonnyhu/numpy,madphysicist/numpy,AustereCuriosity/numpy,chiffa/numpy,ogrisel/numpy,brandon-rhodes/numpy,pbrod/numpy,pelson/numpy,rudimeier/numpy,BabeNovelty/numpy,pizzathief/numpy,MichaelAquilina/numpy,SunghanKim/numpy,larsmans/numpy,sigma-random/numpy,grlee77/numpy,KaelChen/numpy,jakirkham/numpy,Yusa95/numpy,tacaswell/numpy,empeeu/numpy,solarjoe/numpy,BMJHayward/numpy,jakirkham/numpy,pizzathief/numpy,mathdd/numpy,MSeifert04/numpy,matthew-brett/numpy,stefanv/numpy,ahaldane/numpy,grlee77/numpy,SiccarPoint/numpy,drasmuss/numpy,bmorris3/numpy,NextThought/pypy-numpy,rhythmsosad/numpy,naritta/numpy,chatcannon/numpy,Linkid/numpy,numpy/numpy-refactor,drasmuss/numpy,jonathanunderwood/numpy,kirillzhuravlev/numpy,rudimeier/numpy,dato-code/numpy,andsor/numpy,astrofrog/numpy,GaZ3ll3/numpy,felipebetancur/numpy,stefanv/numpy,dimasad/numpy,b-carter/numpy,WillieMaddox/numpy,groutr/numpy,has2k1/numpy,bmorris3/numpy,ewmoore/numpy,jorisvandenbossche/numpy,tdsmith/numpy,sinhrks/numpy,seberg/numpy,joferkington/numpy,pizzathief/numpy,dch312/numpy,Anwesh43/numpy,mingwpy/numpy,jschueller/numpy,WarrenWeckesser/numpy,Eric89GXL/numpy,musically-ut/numpy,argriffing/numpy,cjermain/numpy,numpy/numpy-refactor,mattip/numpy,pbrod/numpy,ChristopherHogan/numpy,tacaswell/numpy,bringingheavendown/numpy,SunghanKim/numpy,seberg/numpy,Anwesh43/numpy,dwillmer/numpy,nguyentu1602/numpy,groutr/numpy,mhvk/numpy,numpy/numpy,MSeifert04/numpy,SunghanKim/numpy,ekalosak/numpy,simongibbons/numpy,Srisai85/numpy,mindw/numpy,githubmlai/numpy,ahaldane/numpy,stuarteberg/numpy,GaZ3ll3/numpy,gfyoung/numpy,hainm/numpy,NextThought/pypy-numpy,rajathkumarmp/numpy,kiwifb/numpy,leifdenby/numpy,MaPePeR/numpy,NextThought/pypy-numpy,endolith/numpy,joferkington/numpy,ahaldane/numpy,Linkid/numpy,jankoslavic/numpy,mattip/numpy,anntzer/numpy,gmcastil/numpy,rudimeier/numpy,BabeNovelty/numpy,ContinuumIO/numpy,jankoslavic/numpy,stefanv/numpy,naritta/numpy,matthew-brett/numpy,mwiebe/numpy,cowlicks/numpy,moreati/numpy,dwillmer/numpy,tdsmith/numpy,GaZ3ll3/numpy,CMartelLML/numpy
|
numpy/core/tests/test_print.py
|
numpy/core/tests/test_print.py
|
import numpy as np
from numpy.testing import *
def check_float_type(tp):
for x in [0, 1,-1, 1e10, 1e20] :
assert_equal(str(tp(x)), str(float(x)))
def test_float_types():
""" Check formatting.
This is only for the str function, and only for simple types.
The precision of np.float and np.longdouble aren't the same as the
python float precision.
"""
for t in [np.float32, np.double, np.longdouble] :
yield check_float_type, t
def check_complex_type(tp):
for x in [0, 1,-1, 1e10, 1e20] :
assert_equal(str(tp(x)), str(complex(x)))
assert_equal(str(tp(x*1j)), str(complex(x*1j)))
assert_equal(str(tp(x + x*1j)), str(complex(x + x*1j)))
def test_complex_types():
"""Check formatting.
This is only for the str function, and only for simple types.
The precision of np.float and np.longdouble aren't the same as the
python float precision.
"""
for t in [np.complex64, np.cdouble, np.clongdouble] :
yield check_complex_type, t
if __name__ == "__main__":
run_module_suite()
|
import numpy as np
from numpy.testing import *
def check_float_type(tp):
for x in [0, 1,-1, 1e10, 1e20] :
assert_equal(str(tp(x)), str(float(x)))
def test_float_types():
""" Check formatting.
This is only for the str function, and only for simple types.
The precision of np.float and np.longdouble aren't the same as the
python float precision.
"""
for t in [np.float, np.double, np.longdouble] :
yield check_float_type, t
def check_complex_type(tp):
for x in [0, 1,-1, 1e10, 1e20] :
assert_equal(str(tp(x)), str(complex(x)))
assert_equal(str(tp(x*1j)), str(complex(x*1j)))
assert_equal(str(tp(x + x*1j)), str(complex(x + x*1j)))
def test_complex_types():
"""Check formatting.
This is only for the str function, and only for simple types.
The precision of np.float and np.longdouble aren't the same as the
python float precision.
"""
for t in [np.cfloat, np.cdouble, np.clongdouble] :
yield check_complex_type, t
if __name__ == "__main__":
run_module_suite()
|
bsd-3-clause
|
Python
|
27d920372b215da7cb2625ce6d7d3f3666bf16e9
|
Improve spelling
|
openfisca/country-template,openfisca/country-template
|
openfisca_country_template/variables/housing.py
|
openfisca_country_template/variables/housing.py
|
# -*- coding: utf-8 -*-
# This file defines the variables of our legislation.
# A variable is property of a person, or an entity (e.g. a household).
# See http://openfisca.org/doc/variables.html
# Import from openfisca-core the common python objects used to code the legislation in OpenFisca
from openfisca_core.model_api import *
# Import the entities specifically defined for this tax and benefit system
from openfisca_country_template.entities import *
# This variable is a pure input: it doesn't have a formula
class accomodation_size(Variable):
value_type = float
entity = Household
definition_period = MONTH
label = u"Size of the accomodation, in square metres"
# This variable is a pure input: it doesn't have a formula
class rent(Variable):
value_type = float
entity = Household
definition_period = MONTH
label = u"Rent paid by the household"
# Possible values for the housing_occupancy_status variable, defined further down
class HousingOccupancyStatus(Enum):
__order__ = "owner tenant free_lodger homeless"
owner = u'Owner'
tenant = u'Tenant'
free_lodger = u'Free lodger'
homeless = u'Homeless'
class housing_occupancy_status(Variable):
value_type = Enum
possible_values = HousingOccupancyStatus
default_value = HousingOccupancyStatus.tenant
entity = Household
definition_period = MONTH
label = u"Legal housing situation of the household concerning their main residence"
|
# -*- coding: utf-8 -*-
# This file defines the variables of our legislation.
# A variable is property of a person, or an entity (e.g. a household).
# See http://openfisca.org/doc/variables.html
# Import from openfisca-core the common python objects used to code the legislation in OpenFisca
from openfisca_core.model_api import *
# Import the entities specifically defined for this tax and benefit system
from openfisca_country_template.entities import *
# This variable is a pure input: it doesn't have a formula
class accomodation_size(Variable):
value_type = float
entity = Household
definition_period = MONTH
label = u"Size of the accomodation, in square metters"
# This variable is a pure input: it doesn't have a formula
class rent(Variable):
value_type = float
entity = Household
definition_period = MONTH
label = u"Rent paid by the household"
# Possible values for the housing_occupancy_status variable, defined further down
class HousingOccupancyStatus(Enum):
__order__ = "owner tenant free_lodger homeless"
owner = u'Owner'
tenant = u'Tenant'
free_lodger = u'Free logder'
homeless = u'Homeless'
class housing_occupancy_status(Variable):
value_type = Enum
possible_values = HousingOccupancyStatus
default_value = HousingOccupancyStatus.tenant
entity = Household
definition_period = MONTH
label = u"Legal housing situation of the household concerning their main residence"
|
agpl-3.0
|
Python
|
ce02f48ea85b64a5a4503b22d3fb324c521904d0
|
Enable graphiql only when DEBUG=True
|
maferelo/saleor,tfroehlich82/saleor,car3oon/saleor,tfroehlich82/saleor,HyperManTT/ECommerceSaleor,UITools/saleor,UITools/saleor,itbabu/saleor,mociepka/saleor,jreigel/saleor,car3oon/saleor,KenMutemi/saleor,mociepka/saleor,KenMutemi/saleor,jreigel/saleor,maferelo/saleor,tfroehlich82/saleor,car3oon/saleor,itbabu/saleor,HyperManTT/ECommerceSaleor,KenMutemi/saleor,maferelo/saleor,jreigel/saleor,HyperManTT/ECommerceSaleor,UITools/saleor,UITools/saleor,mociepka/saleor,UITools/saleor,itbabu/saleor
|
saleor/urls.py
|
saleor/urls.py
|
from django.conf import settings
from django.conf.urls import url, include
from django.conf.urls.static import static
from django.contrib import admin
from django.contrib.sitemaps.views import sitemap
from django.contrib.staticfiles.views import serve
from graphene_django.views import GraphQLView
from .cart.urls import urlpatterns as cart_urls
from .checkout.urls import urlpatterns as checkout_urls
from .core.sitemaps import sitemaps
from .core.urls import urlpatterns as core_urls
from .order.urls import urlpatterns as order_urls
from .product.urls import urlpatterns as product_urls
from .search.urls import urlpatterns as search_urls
from .userprofile.urls import urlpatterns as userprofile_urls
from .data_feeds.urls import urlpatterns as feed_urls
from .dashboard.urls import urlpatterns as dashboard_urls
admin.autodiscover()
urlpatterns = [
url(r'^', include(core_urls)),
url(r'^account/', include('allauth.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^cart/', include(cart_urls, namespace='cart')),
url(r'^checkout/', include(checkout_urls, namespace='checkout')),
url(r'^dashboard/', include(dashboard_urls, namespace='dashboard')),
url(r'^graphql', GraphQLView.as_view(graphiql=settings.DEBUG)),
url(r'^order/', include(order_urls, namespace='order')),
url(r'^products/', include(product_urls, namespace='product')),
url(r'^profile/', include(userprofile_urls, namespace='profile')),
url(r'^search/', include(search_urls, namespace='search')),
url(r'^selectable/', include('selectable.urls')),
url(r'^feeds/', include(feed_urls, namespace='data_feeds')),
url(r'^sitemap\.xml$', sitemap, {'sitemaps': sitemaps},
name='django.contrib.sitemaps.views.sitemap'),
url(r'', include('payments.urls'))
]
if settings.DEBUG:
# static files (images, css, javascript, etc.)
urlpatterns += [
url(r'^static/(?P<path>.*)$', serve)
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
from django.conf import settings
from django.conf.urls import url, include
from django.conf.urls.static import static
from django.contrib import admin
from django.contrib.sitemaps.views import sitemap
from django.contrib.staticfiles.views import serve
from graphene_django.views import GraphQLView
from .cart.urls import urlpatterns as cart_urls
from .checkout.urls import urlpatterns as checkout_urls
from .core.sitemaps import sitemaps
from .core.urls import urlpatterns as core_urls
from .order.urls import urlpatterns as order_urls
from .product.urls import urlpatterns as product_urls
from .search.urls import urlpatterns as search_urls
from .userprofile.urls import urlpatterns as userprofile_urls
from .data_feeds.urls import urlpatterns as feed_urls
from .dashboard.urls import urlpatterns as dashboard_urls
admin.autodiscover()
urlpatterns = [
url(r'^', include(core_urls)),
url(r'^account/', include('allauth.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^cart/', include(cart_urls, namespace='cart')),
url(r'^checkout/', include(checkout_urls, namespace='checkout')),
url(r'^dashboard/', include(dashboard_urls, namespace='dashboard')),
url(r'^graphql', GraphQLView.as_view(graphiql=True)),
url(r'^order/', include(order_urls, namespace='order')),
url(r'^products/', include(product_urls, namespace='product')),
url(r'^profile/', include(userprofile_urls, namespace='profile')),
url(r'^search/', include(search_urls, namespace='search')),
url(r'^selectable/', include('selectable.urls')),
url(r'^feeds/', include(feed_urls, namespace='data_feeds')),
url(r'^sitemap\.xml$', sitemap, {'sitemaps': sitemaps},
name='django.contrib.sitemaps.views.sitemap'),
url(r'', include('payments.urls'))
]
if settings.DEBUG:
# static files (images, css, javascript, etc.)
urlpatterns += [
url(r'^static/(?P<path>.*)$', serve)
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
bsd-3-clause
|
Python
|
0bb0720616bebd308abae5cbee8d60c4dfa98fe4
|
Add a metric ton of test cases (some of which currently fail)
|
feincms/feincms-cleanse,matthiask/html-sanitizer
|
feincms_cleanse/tests.py
|
feincms_cleanse/tests.py
|
from django.test import TestCase
from feincms_cleanse import cleanse_html
class CleanseTestCase(TestCase):
def run_tests(self, entries):
for before, after in entries:
after = before if after is None else after
result = cleanse_html(before)
self.assertEqual(result, after, u"Cleaning '%s', expected '%s' but got '%s'" % (before, after, result))
def test_01_cleanse(self):
entries = [
(u'<p> </p>', u''),
(u'<p> </p>', u''),
(u'<span style="font-weight: bold;">Something</span><p></p>',
u'<strong>Something</strong>'),
(u'<p>abc <span>def <em>ghi</em> jkl</span> mno</p>',
u'<p>abc def <em>ghi</em> jkl mno</p>'),
(u'<span style="font-style: italic;">Something</span><p></p>',
u'<em>Something</em>'),
(u'<p>abc<br />def</p>', u'<p>abc<br />def</p>'),
]
self.run_tests(entries)
def test_02_a_tag(self):
entries = (
('<a href="/foo">foo</a>', None),
('<a href="/foo" target="some" name="bar" title="baz" cookies="yesplease">foo</a>', '<a href="/foo" target="some" name="bar" title="baz">foo</a>'),
('<a href="http://somewhere.else">foo</a>', None),
('<a href="https://somewhere.else">foo</a>', None),
('<a href="javascript:alert()">foo</a>', '<a href="">foo</a>')
)
self.run_tests(entries)
def test_03_merge(self):
entries = (
('<h2>foo</h2><h2>bar</h2>', '<h2>foo bar</h2>'),
('<h2>foo </h2> <h2> bar</h2>', '<h2>foo bar</h2>'),
)
self.run_tests(entries)
def test_04_p_in_li(self):
entries = (
('<li><p>foo</p></li>', '<li>foo</li>'),
('<li> <p>foo</p>   </li>', '<li>foo</li>'),
('<li>foo<p>bar</p>baz</li>', '<li>foo bar baz</li>'),
)
self.run_tests(entries)
def test_05_p_in_p(self):
entries = (
(u'<p><p><p> </p> </p><p><br /></p></p>', u' '),
('<p>foo<p>bar</p>baz</p>', '<p>foo bar baz</p>'),
)
self.run_tests(entries)
def test_06_whitelist(self):
entries = (
(u'<script src="http://abc">foo</script>', u''),
(u'<script type="text/javascript">foo</script>', u''),
)
self.run_tests(entries)
|
from django.test import TestCase
from feincms_cleanse import cleanse_html
class CleanseTestCase(TestCase):
def test_01_cleanse(self):
entries = [
(u'<p> </p>', u''),
(u'<span style="font-weight: bold;">Something</span><p></p>',
u'<strong>Something</strong>'),
(u'<p>abc <span>def <em>ghi</em> jkl</span> mno</p>',
u'<p>abc def <em>ghi</em> jkl mno</p>'),
(u'<span style="font-style: italic;">Something</span><p></p>',
u'<em>Something</em>'),
(u'<p>abc<br />def</p>', u'<p>abc<br />def</p>'),
(u'<p><p><p> </p> </p><p><br /></p></p>', u' '),
]
for a, b in entries:
self.assertEqual(cleanse_html(a), b)
|
bsd-3-clause
|
Python
|
cc934035033eff97ac698d9fa9b54b38dd2fbe9d
|
Update main.py
|
RoboCupULaval/UI-Debug
|
main.py
|
main.py
|
# Under MIT License, see LICENSE.txt
import sys
import argparse
import warnings
from PyQt5.QtWidgets import QApplication
from qtpy import QtCore
from Controller.MainController import MainController
__author__ = 'RoboCupULaval'
def argumentParser(argument):
""" Argument parser """
parser = argparse.ArgumentParser(description='option pour initialiser le UI-debug')
parser.add_argument('use_type', metavar='use_type', type=str, default='sim',
help='use_type = sim: utilise les data de grsim dans le port 10024 (il faut set le port de '
'grsim a 10024 pour que ca marche) use_type = real: utilise le port normal de la '
'vision (10020)')
args_ = parser.parse_args(argument)
return args_
if __name__ == '__main__':
args = argumentParser(None)
app = QApplication(sys.argv)
if args.use_type == 'sim':
port = 10024
elif args.use_type == 'kalman':
port = 10022
elif args.use_type == 'real':
port = 10020
else: # force real-life
warnings.warn("Unrecognized use_type argument. force real-life.", SyntaxWarning, stacklevel=2)
port = 10020
f = MainController(port)
f.show()
sys.exit(app.exec_())
#except NameError:
# pass
|
# Under MIT License, see LICENSE.txt
import sys
import argparse
from PyQt5.QtWidgets import QApplication
from qtpy import QtCore
from Controller.MainController import MainController
__author__ = 'RoboCupULaval'
def argumentParser(argument):
""" Argument parser """
parser = argparse.ArgumentParser(description='option pour initialiser le UI-debug')
parser.add_argument('use_type', metavar='use_type', type=str, default='sim',
help='use_type = sim: utilise les data de grsim dans le port 10024 (il faut set le port de '
'grsim a 10024 pour que ca marche) use_type = real: utilise le port normal de la '
'vision (10020)')
args_ = parser.parse_args(argument)
return args_
if __name__ == '__main__':
args = argumentParser(None)
app = QApplication(sys.argv)
if args.use_type == 'sim':
port = 10024
elif args.use_type == 'kalman':
port = 10022
else: # real-life
port = 10020
f = MainController(port)
f.show()
sys.exit(app.exec_())
#except NameError:
# pass
|
mit
|
Python
|
473ff95fcd1ac784cc3492a4727d668a10270594
|
add today filter
|
anddev68/Notif-ClassChange-NIT-GC
|
main.py
|
main.py
|
# -*- coding: utf-8 -*-
import sys
import urllib
import subprocess
import argparse
import datetime
# Constants
URL = "http://www.gifu-nct.ac.jp/gakka/keijiyou/keijiyou.pdf"
LOCALPDF = "keijiyou.pdf"
LOCALTEXT = "keijiyou.txt"
def download():
urllib.urlretrieve(URL, LOCALPDF)
def pre_parse():
cmd = "pdftotext -raw " + LOCALPDF + " " + LOCALTEXT
#print("done:" + cmd)
let = subprocess.check_call( cmd.split(" ") )
return let
def get_date():
today = datetime.date.today()
return str(today.monnth+1) + "月" + str(today.day) + "日"
def filter1(gakunen, gakka, map):
return filter(lambda item:item['gakka'] is gakka and item['gakunen'] is gakunen, map)
def filter2(gakunen, map):
return filter(lambda item:item['gakunen'] is gakunen, map)
def filter_today(map):
return filter(lambda item:item['date'] is get_date(), map)
def parse():
map = []
for line in open(LOCALTEXT, "r"):
terms = line.split(" ")
# discard headers
# change conditions if need
if line.find("教員名") is not -1 or len(terms) < 5 or line is "":
continue
# read body
map.append({
'date': terms[0],
'weeks': terms[1],
'jigen': terms[2],
'gakka': terms[3],
'gakunen': terms[4],
'old': terms[5],
'new': terms[7].replace("\n","") if 7 < len(terms) else "空きコマ"
})
return map
if __name__ == "__main__":
# arg check
parser = argparse.ArgumentParser()
parser.add_argument('--cache',help='Using cache, not download original pdf')
parser.add_argument('--filter',help='[1E|2E|2C etc...]')
parser.add_argument('--date', help='today')
args = parser.parse_args()
# execute
if args.cache is not None:
download()
pre_parse()
data = parse()
# use filter
if args.filter is not None:
data = filter1(args.filter[0], args.filter[1], data)
if args.date is "today":
date = filter_today(map)
# print
if len(data) is 0:
print "No data."
for item in data:
print item['date'] + item['weeks'] + item['jigen'] + " " + item['gakunen'] + item['gakka'] + " " + item['old'] + " -> " + item['new']
|
# -*- coding: utf-8 -*-
import sys
import urllib
import subprocess
import argparse
# Constants
URL = "http://www.gifu-nct.ac.jp/gakka/keijiyou/keijiyou.pdf"
LOCALPDF = "keijiyou.pdf"
LOCALTEXT = "keijiyou.txt"
def download():
urllib.urlretrieve(URL, LOCALPDF)
def pre_parse():
cmd = "pdftotext -raw " + LOCALPDF + " " + LOCALTEXT
#print("done:" + cmd)
let = subprocess.check_call( cmd.split(" ") )
return let
def filter1(gakunen, gakka, map):
return filter(lambda item:item['gakka'] is gakka and item['gakunen'] is gakunen, map)
def filter2(gakunen, map):
return filter(lambda item:item['gakunen'] is gakunen, map)
def parse():
map = []
for line in open(LOCALTEXT, "r"):
terms = line.split(" ")
# discard headers
# change conditions if need
if line.find("教員名") is not -1 or len(terms) < 5 or line is "":
continue
# read body
map.append({
'date': terms[0],
'weeks': terms[1],
'jigen': terms[2],
'gakka': terms[3],
'gakunen': terms[4],
'old': terms[5],
'new': terms[7].replace("\n","") if 7 < len(terms) else "空きコマ"
})
return map
if __name__ == "__main__":
# arg check
parser = argparse.ArgumentParser()
parser.add_argument('--cache',help='Using cache, not download original pdf')
parser.add_argument('--filter',help='[1E|2E|2C etc...]')
args = parser.parse_args()
# execute
if args.cache is not None:
download()
pre_parse()
data = parse()
# use filter
if args.filter is not None:
data = filter1(args.filter[0], args.filter[1], data)
if len(data) is 0:
print "No data."
for item in data:
print item['date'] + item['weeks'] + item['jigen'] + " " + item['gakunen'] + item['gakka'] + " " + item['old'] + " -> " + item['new']
|
mit
|
Python
|
34bd4dec6bc60ff676874c13cb993a8123bd0ddc
|
handle the exception of an invalid bitly url
|
swaroopsm/Console-Bitly
|
main.py
|
main.py
|
#!/usr/bin/python
import urllib
import urllib2
import json
import settings as s
class ConsoleBitly:
def shorten(self,req):
url="http://api.bitly.com/v3/shorten?login="+s.bitly_username+"&apiKey="+s.bitly_apikey+"&longUrl="+req
print "Please wait... \n"
response=urllib2.urlopen(url)
a = json.loads(response.read())
print "\nShortened URL is: \033[1;36m"+a['data']['url']+"\033[1;m\n"
def expand(self,req):
try:
url="http://api.bitly.com/v3/expand?login="+s.bitly_username+"&apiKey="+s.bitly_apikey+"&shortUrl="+req
print "Please wait... \n"
response=urllib2.urlopen(url)
a = json.loads(response.read())
print "\nExpanded URL is: \033[1;36m"+a['data']['expand'][0]['long_url']+"\033[1;m\n"
except:
print "\033[1;31mThe provided url might not be of a bit.ly domain OR shortened version does not exist!\033[1;m\n"
c=ConsoleBitly()
req=raw_input("Enter the URL to be shortened: ")
c.expand(req)
|
#!/usr/bin/python
import urllib
import urllib2
import json
import settings as s
class ConsoleBitly:
def shorten(self,req):
url="http://api.bitly.com/v3/shorten?login="+s.bitly_username+"&apiKey="+s.bitly_apikey+"&longUrl="+req
print "Please wait... \n"
response=urllib2.urlopen(url)
a = json.loads(response.read())
print "\nShortened URL is: \033[1;36m"+a['data']['url']+"\033[1;m\n"
def expand(self,req):
url="http://api.bitly.com/v3/expand?login="+s.bitly_username+"&apiKey="+s.bitly_apikey+"&shortUrl="+req
print "Please wait... \n"
response=urllib2.urlopen(url)
a = json.loads(response.read())
print "\nExpanded URL is: \033[1;36m"+a['data']['expand'][0]['long_url']+"\033[1;m\n"
c=ConsoleBitly()
req=raw_input("Enter the URL to be shortened: ")
c.expand(req)
|
mit
|
Python
|
b57e12c4e01559ce5bcb1588b14758429ad552df
|
Revert "fix: error instance to pytest"
|
jamesstidard/sanic-envconfig
|
tests/test_envconfig.py
|
tests/test_envconfig.py
|
import pytest
@pytest.mark.parametrize("attribute, value", [
('ATTRIBUTE_STR', 'default_str'),
('ATTRIBUTE_INT', 1),
('ATTRIBUTE_FLOAT', 1.5),
('ATTRIBUTE_BOOL', True),
])
def test_get_attributes(config, attribute, value):
assert getattr(config, attribute) == value
def test_set_existing_attribute(config, sentinel):
config.ATTRIBUTE_STR = sentinel
assert config.ATTRIBUTE_STR == sentinel
def test_set_existing_attribute_gets_overridden(config, mock_env, sentinel):
mock_env({'ATTRIBUTE_INT': '1'})
config.ATTRIBUTE_INT = sentinel
assert config.ATTRIBUTE_INT == 1
def test_set_new_attribute_gets_overridden(config, mock_env, sentinel):
mock_env({'ATTRIBUTE_NEW': 'hello attr'})
config.ATTRIBUTE_NEW = sentinel
assert config.ATTRIBUTE_NEW == 'hello attr'
def test_no_attribute(config):
assert not hasattr(config, 'MISSING_ATTRIBUTE')
def test_add_attribute(config, sentinel):
config.NEW_ATTRIBUTE = sentinel
assert config.NEW_ATTRIBUTE == sentinel
@pytest.mark.parametrize("attribute, value_type, new_value_in, new_value_out", [
("ATTRIBUTE_STR", str, 'new value', 'new value'),
("ATTRIBUTE_INT", int, '12345', 12345),
("ATTRIBUTE_FLOAT", float, '3.14', 3.14),
("ATTRIBUTE_BOOL", bool, 'yes', True),
])
def test_env_override(config, mock_env, attribute, value_type, new_value_in, new_value_out):
mock_env({attribute: new_value_in})
assert getattr(config, attribute) == new_value_out
assert type(getattr(config, attribute)) == value_type
def test_cant_parse(config, mock_env):
mock_env({'ATTRIBUTE_INT': 'string'})
with pytest.raises(AttributeError):
print(config.ATTRIBUTE_INT)
|
import pytest
@pytest.mark.parametrize("attribute, value", [
('ATTRIBUTE_STR', 'default_str'),
('ATTRIBUTE_INT', 1),
('ATTRIBUTE_FLOAT', 1.5),
('ATTRIBUTE_BOOL', True),
])
def test_get_attributes(config, attribute, value):
assert getattr(config, attribute) == value
def test_set_existing_attribute(config, sentinel):
config.ATTRIBUTE_STR = sentinel
assert config.ATTRIBUTE_STR == sentinel
def test_set_existing_attribute_gets_overridden(config, mock_env, sentinel):
mock_env({'ATTRIBUTE_INT': '1'})
config.ATTRIBUTE_INT = sentinel
assert config.ATTRIBUTE_INT == 1
def test_set_new_attribute_gets_overridden(config, mock_env, sentinel):
mock_env({'ATTRIBUTE_NEW': 'hello attr'})
config.ATTRIBUTE_NEW = sentinel
assert config.ATTRIBUTE_NEW == 'hello attr'
def test_no_attribute(config):
assert not hasattr(config, 'MISSING_ATTRIBUTE')
def test_add_attribute(config, sentinel):
config.NEW_ATTRIBUTE = sentinel
assert config.NEW_ATTRIBUTE == sentinel
@pytest.mark.parametrize("attribute, value_type, new_value_in, new_value_out", [
("ATTRIBUTE_STR", str, 'new value', 'new value'),
("ATTRIBUTE_INT", int, '12345', 12345),
("ATTRIBUTE_FLOAT", float, '3.14', 3.14),
("ATTRIBUTE_BOOL", bool, 'yes', True),
])
def test_env_override(config, mock_env, attribute, value_type, new_value_in, new_value_out):
mock_env({attribute: new_value_in})
assert getattr(config, attribute) == new_value_out
assert type(getattr(config, attribute)) == value_type
def test_cant_parse(config, mock_env):
mock_env({'ATTRIBUTE_INT': 'string'})
with pytest.raises(AttributeError()):
print(config.ATTRIBUTE_INT)
|
mit
|
Python
|
f5ffdefed84c7ddc6dbb4ea897e9f476b5b4d245
|
Add squares to table.
|
oskarijarvelin/4dttt
|
main.py
|
main.py
|
class Square:
def __init__(self, coords):
pass
def add_mark(self, player):
pass
class Line:
def __init__(self, params):
pass
def squares(self):
pass
def win(self):
pass
def not_winnable(self):
pass
class TTT_Table:
def __init__(self, base, power, players):
self.base = base
self.power = power
self.squares = {}
self.add_squares_recursive(self.power - 1, [])
self.players = players
self.turn = self.players[0]
def add_squares_recursive(self, powerleft, list):
for i in range(0, self.power):
if powerleft == 0:
self.squares[''.join(list + [str(i)])] = Square(list + [i])
else:
self.add_squares_recursive(powerleft - 1, list + [str(i)])
def list_lines(self):
pass
def win(self):
pass
def add_mark(self, player):
pass
class Player:
def __init__(self, name, mark):
self.name = name
self.mark = mark
table = TTT_Table(4, 4, [1])
print(table.squares)
|
class Square:
def __init__(self, coords):
pass
def add_mark(self, player):
pass
class Line:
def __init__(self, params):
pass
def squares(self):
pass
def win(self):
pass
def not_winnable(self):
pass
class TTT_Table:
def __init__(self, base, power, players):
self.base = base
self.power = power
self.players = players
self.turn = self.players[0]
def add_squares_recursive(self):
pass
def list_lines(self):
pass
def win(self):
pass
def add_mark(self, player):
pass
class Player:
def __init__(self, name, mark)
self.name = name
self.mark = mark
|
agpl-3.0
|
Python
|
a28eedba060934d498ac38e8704022dc38c43758
|
Add stringio from six
|
danielecook/python-cli-skeleton
|
tests/test_utilities.py
|
tests/test_utilities.py
|
from subprocess import Popen, PIPE
from six import StringIO
import sys
class Capturing(list):
def __enter__(self):
self._stdout = sys.stdout
sys.stdout = self._stringio = StringIO()
return self
def __exit__(self, *args):
self.extend(self._stringio.getvalue().splitlines())
sys.stdout = self._stdout
class Capturing_err(list):
def __enter__(self):
self._stderr = sys.stderr
sys.stderr = self._stringio = StringIO()
return self
def __exit__(self, *args):
self.extend(self._stringio.getvalue().splitlines())
sys.stderr = self._stderr
def terminal(command):
return Popen(command, stdout=PIPE, stderr=PIPE).communicate()
|
from subprocess import Popen, PIPE
from StringIO import StringIO
import sys
class Capturing(list):
def __enter__(self):
self._stdout = sys.stdout
sys.stdout = self._stringio = StringIO()
return self
def __exit__(self, *args):
self.extend(self._stringio.getvalue().splitlines())
sys.stdout = self._stdout
class Capturing_err(list):
def __enter__(self):
self._stderr = sys.stderr
sys.stderr = self._stringio = StringIO()
return self
def __exit__(self, *args):
self.extend(self._stringio.getvalue().splitlines())
sys.stderr = self._stderr
def terminal(command):
return Popen(command, stdout=PIPE, stderr=PIPE).communicate()
|
mit
|
Python
|
603d0dbb3ce2911b62f3274e68b93812da9acf2d
|
Remove SuperOnline and UyduNet DNS addresses
|
isair/youtubekapatildimi,isair/youtubekapatildimi
|
main.py
|
main.py
|
# -*- coding: utf-8 -*-
"""
main.py
Jan 14, 2014
Copyright (C) 2014 Baris Sencan
"""
import os
import redis
from flask import Flask, render_template, url_for
from flask.ext.compress import Compress
app = Flask(__name__)
# Enable gzip compression.
Compress(app)
# Static file loading helper.
app.jinja_env.globals['static'] = (
lambda filename: url_for('static', filename=filename))
# Redis configuration.
redis_url = os.getenv('REDISTOGO_URL', 'redis://localhost:6379')
redis = redis.from_url(redis_url)
# DNS list (plot twist: it's actually a dictionary).
dns_list = {
'Google': ('8.8.8.8', '8.8.4.4'),
'OpenDNS': ('208.67.222.222', '208.67.220.220'),
'TTNet': ('195.175.39.40', '195.175.39.39')
}
@app.route('/')
def home():
# Fetch information from database and render page.
status_for = dict()
for server in dns_list:
try:
status_for[server] = redis.get(server)
except:
status_for[server] = 'unknown'
return render_template('home.html', dns_list=dns_list, status_for=status_for)
|
# -*- coding: utf-8 -*-
"""
main.py
Jan 14, 2014
Copyright (C) 2014 Barış Şencan
"""
import os
import redis
from flask import Flask, render_template, url_for
from flask.ext.compress import Compress
app = Flask(__name__)
# Enable gzip compression.
Compress(app)
# Static file loading helper.
app.jinja_env.globals['static'] = (
lambda filename: url_for('static', filename=filename))
# Redis configuration.
redis_url = os.getenv('REDISTOGO_URL', 'redis://localhost:6379')
redis = redis.from_url(redis_url)
# DNS list (plot twist: it's actually a dictionary).
dns_list = {
'Google': ('8.8.8.8', '8.8.4.4'),
'OpenDNS': ('208.67.222.222', '208.67.220.220'),
'TTNet': ('195.175.39.40', '195.175.39.39'),
'SuperOnline': ('213.74.0.1', '213.74.1.1'),
'UyduNet': ('62.248.80.161', '62.248.80.162')
}
@app.route('/')
def home():
status_for = dict()
# Fetch information from database.
for server in dns_list:
try:
status_for[server] = redis.get(server)
except:
status_for[server] = 'unknown'
return render_template('home.html', dns_list=dns_list, status_for=status_for)
|
apache-2.0
|
Python
|
9c7660fd63bc1c48a0533e867c7d18faf9d90c03
|
Make use thru uwsgi easier
|
pictuga/morss,pictuga/morss,pictuga/morss
|
main.py
|
main.py
|
#!/usr/bin/env python
from morss import main, cgi_wrapper as application
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
from morss import main
if __name__ == '__main__':
main()
|
agpl-3.0
|
Python
|
b3e61d648be5c5d28cfe3a7ef69f918c2a2c33a1
|
optimize the logic of mail
|
goace/bitcoin-ticker
|
main.py
|
main.py
|
# -*- coding: utf-8 -*-
import mail
import simplejson
import urllib2
import time
def moniter(m, send_to, high, low):
req = urllib2.Request("https://data.btcchina.com/data/ticker")
opener = urllib2.build_opener()
last_sent = 0
while True:
try:
f = opener.open(req)
data = simplejson.load(f)
except:
time.sleep(3)
continue
price = float(data['ticker']['last'])
if price > high or price < low:
if time.time() - last_sent > 5 * 60:
try:
m.send(send_to, "BTC Ticker Warning",
"the price now is " + str(price))
print "sent email"
last_sent = time.time()
except Exception, e:
print e
print "Price: ¥%s Buy: ¥%s Sell: ¥%s" % (data['ticker']['last'],
data['ticker']['buy'], data['ticker']['sell'])
time.sleep(3)
if __name__ == "__main__":
m = mail.Mail(account, smtp_addr, account, password)
moniter(m, send_to, high, low)
|
# -*- coding: utf-8 -*-
import mail
import simplejson
import urllib2
import time
from config import *
def moniter(m, send_to, high, low):
req = urllib2.Request("https://data.btcchina.com/data/ticker")
opener = urllib2.build_opener()
last_sent = 0
while True:
try:
f = opener.open(req)
data = simplejson.load(f)
except:
time.sleep(3)
continue
price = float(data['ticker']['last'])
if price > high or price < low:
for i in range(3):
try:
if time.time() - last_sent > 5 * 60:
m.send(send_to, "BTC Ticker Warning",
"the price now is " + str(price))
last_sent = time.time()
except:
continue
break
print "Price: ¥%s Buy: ¥%s Sell: ¥%s" % (data['ticker']['last'],
data['ticker']['buy'], data['ticker']['sell'])
time.sleep(3)
if __name__ == "__main__":
m = mail.Mail(account, smtp_addr, account, password)
moniter(m, send_to, high, low)
|
mit
|
Python
|
3cff942af436f16aab2078e6aeedd3073f4a5522
|
Add Flask-Mail, and null user loader
|
mikeboers/Flask-Roots,mikeboers/Flask-Roots
|
flask_roots/extension.py
|
flask_roots/extension.py
|
from __future__ import absolute_import
import os
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.images import Images
from flask.ext.mail import Mail
from flask.ext.login import LoginManager
from flask.ext.acl import AuthManager
class Roots(object):
def __init__(self, app):
self.extensions = {}
self.init_app(app)
def init_app(self, app):
# Establish two-way links.
self.app = app
app.roots = self
app.extensions['roots'] = self
from .config import setup_config
setup_config(app)
from .logs import setup_logs
setup_logs(app)
from .session import setup_session
setup_session(app)
self.extensions['login_manager'] = login = LoginManager(app)
login.user_callback = lambda uid: None
self.extensions['auth'] = AuthManager(app)
from .mako import MakoTemplates
self.extensions['mako'] = MakoTemplates(app)
self.extensions['images'] = Images(app)
self.extensions['db'] = db = SQLAlchemy(app)
db.metadata.bind = db.engine # WTF do I need to do this for?!
self.extensions['mail'] = Mail(app)
from .routing import setup_routing
setup_routing(app)
from .errors import setup_errors
setup_errors(app)
|
from __future__ import absolute_import
import os
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.images import Images
# from flask.ext.mail import Mail
from flask.ext.login import LoginManager
from flask.ext.acl import AuthManager
class Roots(object):
def __init__(self, app):
self.extensions = {}
self.init_app(app)
def init_app(self, app):
# Establish two-way links.
self.app = app
app.roots = self
app.extensions['roots'] = self
from .config import setup_config
setup_config(app)
from .logs import setup_logs
setup_logs(app)
from .session import setup_session
setup_session(app)
self.extensions['login_manager'] = LoginManager(app)
self.extensions['auth'] = AuthManager(app)
from .mako import MakoTemplates
self.extensions['mako'] = MakoTemplates(app)
self.extensions['images'] = Images(app)
self.extensions['db'] = db = SQLAlchemy(app)
db.metadata.bind = db.engine # WTF do I need to do this for?!
from .routing import setup_routing
setup_routing(app)
from .errors import setup_errors
setup_errors(app)
|
bsd-3-clause
|
Python
|
18897bf535b4885b9dfbf0a014c92656e8ed74b7
|
Update main to return properly formatted data
|
avinassh/Laozi,avinassh/Laozi
|
main.py
|
main.py
|
import telegram
import tornado.ioloop
import tornado.web
from tornado.options import define, options
from goodreads_api import get_book_details_by_name, BookNotFound
from utils import get_formatted_book_data
from settings import TELEGRAM_ACCESS_TOKEN, WEBHOOK_URL
define("port", default=5000, help="run on the given port", type=int)
bot = telegram.Bot(token=TELEGRAM_ACCESS_TOKEN)
class IndexHandler(tornado.web.RequestHandler):
def get(self):
self.write('wink, wink')
class MainHandler(tornado.web.RequestHandler):
def get(self):
self.write('wink, wink')
def post(self):
data = tornado.escape.json_decode(self.request.body)
try:
message_id = data['message']['message_id']
chat_id = data['message']['chat']['id']
text = data['message']['text']
chat_type = data['message']['chat']['type']
if not chat_type == 'group':
return
response = parse_command(text=text)
bot.sendMessage(reply_to_message_id=message_id,
chat_id=chat_id, text=response)
except KeyError:
pass
except telegram.error.TelegramError:
print(data)
return
class WebHookHandler(tornado.web.RequestHandler):
def get(self):
# one time only operation
response = bot.setWebhook(WEBHOOK_URL)
if not response:
return self.write('Setting up webhook has failed')
return self.write('Webhook has been successfully set')
def parse_command(text):
# The telegram usually sends the whole text as something like:
# '/ping hello' or '/ping@botname hello'
command, argument = text.split(' ', 1)
if command == '/book' or command == '/book@goodreadsbot':
return get_book_details(book_name=argument)
return 'Invalid command'
def get_book_details(book_name):
try:
book_data = get_book_details_by_name(book_name=book_name)
return get_formatted_book_data(book_data=book_data)
except BookNotFound:
return "I couldn't find the book, can you be more precise?"
def make_app():
return tornado.web.Application([
(r'/', IndexHandler),
(r'/duh', MainHandler),
(r'/setwebhook', WebHookHandler)
])
if __name__ == "__main__":
tornado.options.parse_command_line()
app = make_app()
app.listen(options.port)
tornado.ioloop.IOLoop.current().start()
|
import telegram
import tornado.ioloop
import tornado.web
from tornado.options import define, options
from goodreads_api import get_book_details_by_name, BookNotFound
from settings import TELEGRAM_ACCESS_TOKEN, WEBHOOK_URL
define("port", default=5000, help="run on the given port", type=int)
bot = telegram.Bot(token=TELEGRAM_ACCESS_TOKEN)
class IndexHandler(tornado.web.RequestHandler):
def get(self):
self.write('wink, wink')
class MainHandler(tornado.web.RequestHandler):
def get(self):
self.write('wink, wink')
def post(self):
data = tornado.escape.json_decode(self.request.body)
try:
message_id = data['message']['message_id']
chat_id = data['message']['chat']['id']
text = data['message']['text']
chat_type = data['message']['chat']['type']
if not chat_type == 'group':
return
response = parse_command(text=text)
bot.sendMessage(reply_to_message_id=message_id,
chat_id=chat_id, text=response)
except KeyError:
pass
except telegram.error.TelegramError:
print(data)
return
class WebHookHandler(tornado.web.RequestHandler):
def get(self):
# one time only operation
response = bot.setWebhook(WEBHOOK_URL)
if not response:
return self.write('Setting up webhook has failed')
return self.write('Webhook has been successfully set')
def parse_command(text):
# The telegram usually sends the whole text as something like:
# '/ping hello' or '/ping@botname hello'
command, argument = text.split(' ', 1)
if command == '/book' or command == '/book@goodreadsbot':
return get_book_details(book_name=argument)
def get_book_details(book_name):
try:
book_data = get_book_details_by_name(book_name)
except BookNotFound:
return "I couldn't find the book, can you be more precise?"
def make_app():
return tornado.web.Application([
(r'/', IndexHandler),
(r'/duh', MainHandler),
(r'/setwebhook', WebHookHandler)
])
if __name__ == "__main__":
tornado.options.parse_command_line()
app = make_app()
app.listen(options.port)
tornado.ioloop.IOLoop.current().start()
|
mit
|
Python
|
f9aa61fa326f9737e2af971a420da9c0652612ae
|
revise BaseDoctype to add generic generation method
|
tLDP/python-tldp,tLDP/python-tldp,tLDP/python-tldp
|
tldp/doctypes/common.py
|
tldp/doctypes/common.py
|
#! /usr/bin/python
from __future__ import absolute_import, division, print_function
import os
from ..utils import logger
class SignatureChecker(object):
@classmethod
def signatureLocation(cls, f):
f.seek(0)
buf = f.read(1024).lower()
for sig in cls.signatures:
try:
sindex = buf.index(sig.lower())
logger.debug("Found signature %s in %s at %s; doctype %s.",
sig, f.name, sindex, cls)
return sindex
except ValueError:
logger.debug("Signature %s not found in %s for type %s",
sig, f.name, cls.__name__)
return None
class BaseDoctype(object):
def __init__(self, *args, **kwargs):
self.source = kwargs.get('source')
self.output = kwargs.get('output')
self.platform = kwargs.get('platform')
self.logdir = os.path.join(self.output.dirname, 'logs')
if os.path.exists(self.logdir):
logger.warning("Found existing logs directory: %s", self.logdir)
else:
os.mkdir(self.logdir)
def generate(self):
os.chdir(self.output.dirname)
vector = [self.output.clean(),
self.platform_check(),
self.create_htmls(),
self.create_pdf(),
self.create_txt(),
self.create_html(),
]
result = all(vector)
logger.info("%s generation of all documents %s", self.source.stem, result)
return all(vector)
#
# -- end of file
|
#! /usr/bin/python
from __future__ import absolute_import, division, print_function
import os
from ..utils import logger
class SignatureChecker(object):
@classmethod
def signatureLocation(cls, f):
f.seek(0)
buf = f.read(1024).lower()
for sig in cls.signatures:
try:
sindex = buf.index(sig.lower())
logger.debug("Found signature %s in %s at %s; doctype %s.",
sig, f.name, sindex, cls)
return sindex
except ValueError:
logger.debug("Signature %s not found in %s for type %s",
sig, f.name, cls.__name__)
return None
class BaseDoctype(object):
def __init__(self, *args, **kwargs):
self.source = kwargs.get('source')
self.output = kwargs.get('output')
self.platform = kwargs.get('platform')
self.logdir = os.path.join(self.output.dirname, 'logs')
if os.path.exists(self.logdir):
logger.warning("Found existing logs directory: %s", self.logdir)
else:
os.mkdir(self.logdir)
def generate(self):
os.chdir(self.output.dirname)
self.output.clear()
self.platform_check()
self.create_htmls()
self.create_pdf()
self.create_txt()
self.create_html()
#
# -- end of file
|
mit
|
Python
|
577ebb6d4dc037bc912aa11d525ca4f2f09e7940
|
set default schema to defm
|
SANDAG/pydefm,SANDAG/pydefm,SANDAG/defm,SANDAG/pydefm
|
db/log.py
|
db/log.py
|
from sqlalchemy.orm import sessionmaker
from forecast import util
from sqlalchemy import Column, ForeignKey, Integer, String
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import create_engine
from sqlalchemy import MetaData
from pysandag.database import get_connection_string
import os
def new_run(name='runs'):
Base = declarative_base()
table_name = name
class Run(Base):
__tablename__ = table_name
__table_args__ = {'schema': 'defm'}
# define columns for the table
id = Column(Integer, primary_key=True)
base_rate_version = Column(Integer)
birth_rate_version = Column(Integer)
death_rate_version = Column(Integer)
migration_rate_version = Column(Integer)
householder_rate_version = Column(Integer)
#metadata = MetaData(schema="defm")
db_dir = 'results/'
if not os.path.exists(db_dir):
os.makedirs(db_dir)
engine = create_engine(get_connection_string("model_config.yml", 'output_database')).execution_options(
schema_translate_map={
None: "defm", # no schema name -> "defm"
})
Base.metadata.schema = 'defm'
if not engine.has_table(table_name,schema='defm'):
Base.metadata.create_all(engine)
db_session = sessionmaker(bind=engine)
session = db_session()
# Rate versions from yml file
rate_versions = util.yaml_to_dict('model_config.yml', 'rate_versions')
# Insert versions in database
model_run = Run(
base_rate_version=rate_versions['population'],
birth_rate_version=rate_versions['birth'],
death_rate_version=rate_versions['death'],
migration_rate_version=rate_versions['migration'],
householder_rate_version=rate_versions['householder'])
session.add(model_run)
session.commit()
run_id = model_run.id
return run_id
def insert_run(db_name,model_run_id,df_results,table_name):
engine = create_engine(get_connection_string("model_config.yml", 'output_database'))
# Insert prediction in the population table
df_results['run_id'] = model_run_id # foreign key to run log table
df_results.to_sql(name=table_name, con=engine, schema='defm', if_exists = 'append', index=True)
df_results = df_results.drop('run_id', 1) # remove run_id
|
from sqlalchemy.orm import sessionmaker
from forecast import util
from sqlalchemy import Column, ForeignKey, Integer, String
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import create_engine
from sqlalchemy import MetaData
from pysandag.database import get_connection_string
import os
def new_run(name='runs'):
Base = declarative_base()
table_name = name
class Run(Base):
__tablename__ = table_name
__table_args__ = {'schema': 'defm'}
# define columns for the table
id = Column(Integer, primary_key=True)
base_rate_version = Column(Integer)
birth_rate_version = Column(Integer)
death_rate_version = Column(Integer)
migration_rate_version = Column(Integer)
householder_rate_version = Column(Integer)
db_dir = 'results/'
if not os.path.exists(db_dir):
os.makedirs(db_dir)
engine = create_engine(get_connection_string("model_config.yml", 'output_database'))
if not engine.has_table(table_name):
Base.metadata.create_all(engine)
db_session = sessionmaker(bind=engine)
session = db_session()
# Rate versions from yml file
rate_versions = util.yaml_to_dict('model_config.yml', 'rate_versions')
# Insert versions in database
model_run = Run(
base_rate_version=rate_versions['population'],
birth_rate_version=rate_versions['birth'],
death_rate_version=rate_versions['death'],
migration_rate_version=rate_versions['migration'],
householder_rate_version=rate_versions['householder'])
session.add(model_run)
session.commit()
run_id = model_run.id
return run_id
def insert_run(db_name,model_run_id,df_results,table_name):
engine = create_engine(get_connection_string("model_config.yml", 'output_database'))
# Insert prediction in the population table
df_results['run_id'] = model_run_id # foreign key to run log table
df_results.to_sql(name=table_name, con=engine, schema='defm', if_exists = 'append', index=True)
df_results = df_results.drop('run_id', 1) # remove run_id
|
unknown
|
Python
|
7c5dad065fc3cb391609c9dd9a8d1b3effc197b1
|
fix typo
|
un33k/problems
|
paths/recursive_total_path.py
|
paths/recursive_total_path.py
|
import cProfile
def numberOfPaths(nXm_matrix):
"""
Returns the total number of paths from the top left
to the bottom right by moving right and down in a 2D array
of size n x m. Where cells can only contain `1` or `0` while
real paths can only contain `1`.
"""
hight = len(nXm_matrix)
width = len(nXm_matrix[0])
def calculate(matrix, matrix_h, matrix_w, next_h, next_w, last_value=0):
"""
Local calculation recursive function.
"""
# Stop condition 1
if matrix[next_h][next_w] == 0:
return last_value
# Stop condition 2
if next_h == matrix_h and next_w == matrix_w:
return last_value + 1
# Move right
if next_w < matrix_w:
last_value = calculate(matrix, matrix_h, matrix_w,
next_h, next_w + 1, last_value)
# Move down
if next_h < matrix_h:
last_value = calculate(matrix, matrix_h, matrix_w,
next_h + 1, next_w, last_value)
return last_value
count = calculate(nXm_matrix, hight-1, width-1, 0, 0)
return count
def run_test():
"""
Test function.
"""
d2_matrix = [
[1,1,1,1,1,1],
[1,1,0,1,1,1],
[1,1,1,0,1,1],
[1,1,0,1,1,1],
[1,1,1,1,0,1],
]
count = numberOfPaths(d2_matrix)
print "---------------------"
print "Total Number of Paths = {}\n".format(count)
if __name__ == "__main__":
"""
Run the code and profile it.
"""
cProfile.run('run_test()')
|
import cProfile
def numberOfPaths(nXm_matrix):
"""
Returns the total number of paths from the top left
to the bottom right by moving right and down in a 2D array
of size n x m. Where cells can only contain `1` or `0` while
real paths can only contain `1`.
"""
hight = len(nXm_matrix)
width = len(nXm_matrix[0])
def calculate(matrix, matrix_h, matrix_w, next_h, next_w, last_value=0):
"""
Local calculation recursive function.
"""
# Stop condition 1
if matrix[next_h][next_w] == 0:
return last_value
# Stop condition 2
if next_h == matrix_h and next_w == matrix_w:
return last_value + 1
# Move right
if next_w < matrix_w:
last_value = calculate(matrix, matrix_h, matrix_w,
next_h, next_w + 1, last_value)
# Move down
if next_h < matrix_h:
last_value = calculate(matrix, matrix_h, matrix_w,
next_h + 1, next_w, last_value)
# Final Result
return last_value
count = calculate(nXm_matrix, hight-1, width-1, 0, 0)
return count
def run_test():
"""
Test function.
"""
d2_matrix = [
[1,1,1,1,1,1],
[1,1,0,1,1,1],
[1,1,1,0,1,1],
[1,1,0,1,1,1],
[1,1,1,1,0,1],
]
count = numberOfPaths(d2_matrix)
print "---------------------"
print "Total Number of Paths = {}\n".format(count)
if __name__ == "__main__":
"""
Run the code and profile it.
"""
cProfile.run('run_test()')
|
bsd-2-clause
|
Python
|
f4bedfed9a7324574695c065f8a1b7132d5f4708
|
update ftbfs logic
|
paultag/ethel,paultag/ethel
|
ethel/commands/build.py
|
ethel/commands/build.py
|
from ethel.runners.sbuild import sbuild
from ethel.utils import upload
import glob
import os
# target package firehose
def run(dsc, package, job, firehose):
suite = job['suite']
arch = job['arch']
info, out, ftbfs = sbuild(dsc, suite, arch)
changes = "{source}*{arch}.changes".format(
source=package['source'],
arch=arch
)
changes = list(glob.glob(changes))
if changes == [] and not ftbfs:
print(out)
raise Exception("Um. No changes but no FTBFS.")
if not ftbfs:
changes = changes[0]
upload(changes, job['_id'])
return (info, out, ftbfs)
|
from ethel.runners.sbuild import sbuild
from ethel.utils import upload
import glob
import os
# target package firehose
def run(dsc, package, job, firehose):
suite = job['suite']
arch = job['arch']
info, out, ftbfs = sbuild(dsc, suite, arch)
changes = "{source}*{arch}.changes".format(
source=package['source'],
arch=arch
)
changes = list(glob.glob(changes))
if changes == [] and not ftbfs:
print(out)
raise Exception("Um. No changes but no FTBFS.")
changes = changes[0]
if os.path.exists(changes):
upload(changes, job['_id'])
return (info, out, ftbfs)
|
mit
|
Python
|
026f7d8c07d5ec4e307caa692c66a04653565f52
|
修复 notebook js 函数未转换的 bug
|
chenjiandongx/pyecharts,chenjiandongx/pyecharts,chenjiandongx/pyecharts
|
pyecharts/render/engine.py
|
pyecharts/render/engine.py
|
# coding=utf-8
import re
from jinja2 import Environment
from pyecharts.commons.types import Any, Optional
from ..commons.utils import write_utf8_html_file
from ..datasets import EXTRA, FILENAMES
from ..globals import CurrentConfig
class RenderEngine:
def __init__(self, env: Optional[Environment] = None):
self.env = env or CurrentConfig.GLOBAL_ENV
@staticmethod
def generate_js_link(chart: Any) -> Any:
if not chart.js_host:
chart.js_host = CurrentConfig.ONLINE_HOST
links = []
for dep in chart.js_dependencies.items:
# TODO: if?
if dep in FILENAMES:
f, ext = FILENAMES[dep]
links.append("{}{}.{}".format(chart.js_host, f, ext))
else:
for url, files in EXTRA.items():
if dep in files:
f, ext = files[dep]
links.append("{}{}.{}".format(url, f, ext))
break
chart.dependencies = links
return chart
def render_chart_to_file(self, template_name: str, chart: Any, path: str):
"""
Render a chart or page to local html files.
:param chart: A Chart or Page object
:param path: The destination file which the html code write to
:param template_name: The name of template file.
"""
tpl = self.env.get_template(template_name)
html = tpl.render(chart=self.generate_js_link(chart))
html = re.sub(r'\\n|\\t|"?__-o-__"?', "", html)
write_utf8_html_file(path, html)
def render_chart_to_notebook(self, template_name, **kwargs) -> str:
tpl = self.env.get_template(template_name)
html = tpl.render(**kwargs)
html = re.sub(r'\\n|\\t|"?__-o-__"?', "", html)
return html
|
# coding=utf-8
import re
from jinja2 import Environment
from pyecharts.commons.types import Any, Optional
from ..commons.utils import write_utf8_html_file
from ..datasets import EXTRA, FILENAMES
from ..globals import CurrentConfig
class RenderEngine:
def __init__(self, env: Optional[Environment] = None):
self.env = env or CurrentConfig.GLOBAL_ENV
@staticmethod
def generate_js_link(chart: Any) -> Any:
if not chart.js_host:
chart.js_host = CurrentConfig.ONLINE_HOST
links = []
for dep in chart.js_dependencies.items:
# TODO: if?
if dep in FILENAMES:
f, ext = FILENAMES[dep]
links.append("{}{}.{}".format(chart.js_host, f, ext))
else:
for url, files in EXTRA.items():
if dep in files:
f, ext = files[dep]
links.append("{}{}.{}".format(url, f, ext))
break
chart.dependencies = links
return chart
def render_chart_to_file(self, template_name: str, chart: Any, path: str):
"""
Render a chart or page to local html files.
:param chart: A Chart or Page object
:param path: The destination file which the html code write to
:param template_name: The name of template file.
"""
tpl = self.env.get_template(template_name)
html = tpl.render(chart=self.generate_js_link(chart))
html = re.sub(r'\\n|\\t|"?__-o-__"?', "", html)
write_utf8_html_file(path, html)
def render_chart_to_notebook(self, template_name, **kwargs) -> str:
tpl = self.env.get_template(template_name)
return tpl.render(**kwargs)
|
mit
|
Python
|
c5d224818903ccad9624ac4cb3449af82faa55f9
|
fix typo
|
houqp/floyd-cli,houqp/floyd-cli,mckayward/floyd-cli,mckayward/floyd-cli
|
floyd/main.py
|
floyd/main.py
|
import click
import sys
from distutils.version import LooseVersion
import floyd
from floyd.cli.utils import get_cli_version
from floyd.cli.auth import login, logout
from floyd.cli.data import data
from floyd.cli.experiment import clone, delete, info, init, logs, output, status, stop
from floyd.cli.run import run, restart
from floyd.cli.version import upgrade, version
from floyd.client.version import VersionClient
from floyd.log import configure_logger
@click.group()
@click.option('-h', '--host', default='https://www.floydhub.com', help='Floyd server endpoint')
@click.option('-v', '--verbose', count=True, help='Turn on debug logging')
def cli(host, verbose):
"""
Floyd CLI interacts with FloydHub server and executes your commands.
More help is available under each command listed below.
"""
import raven
raven.Client(
dsn='https://d8669005bd2b4b1ba6387ec57e1ce660:[email protected]/226940',
release=get_cli_version(),
environment='prod',
processors=('raven.processors.SanitizePasswordsProcessor',))
floyd.floyd_host = host
configure_logger(verbose)
check_cli_version()
def check_cli_version():
"""
Check if the current cli version satisfies the server requirements
"""
server_version = VersionClient().get_cli_version()
current_version = get_cli_version()
if LooseVersion(current_version) < LooseVersion(server_version.min_version):
print("""
Your version of CLI (%s) is no longer compatible with server.""" % current_version)
if click.confirm('Do you want to upgrade to version %s now?' % server_version.latest_version):
from floyd.cli.version import pip_upgrade
pip_upgrade()
sys.exit(0)
else:
print("""Your can manually run:
pip install -U floyd-cli
to upgrade to the latest version (%s))""" % server_version.latest_version)
sys.exit(0)
elif LooseVersion(current_version) < LooseVersion(server_version.latest_version):
print("""
New version of CLI (%s) is now available. To upgrade run:
pip install -U floyd-cli
""" % server_version.latest_version)
def add_commands(cli):
cli.add_command(clone)
cli.add_command(data)
cli.add_command(delete)
cli.add_command(info)
cli.add_command(init)
cli.add_command(login)
cli.add_command(logout)
cli.add_command(logs)
cli.add_command(output)
cli.add_command(status)
cli.add_command(stop)
cli.add_command(restart)
cli.add_command(run)
cli.add_command(upgrade)
cli.add_command(version)
add_commands(cli)
|
import click
import sys
from distutils.version import LooseVersion
import floyd
from floyd.cli.utils import get_cli_version
from floyd.cli.auth import login, logout
from floyd.cli.data import data
from floyd.cli.experiment import clone, delete, info, init, logs, output, status, stop
from floyd.cli.run import run, restart
from floyd.cli.version import upgrade, version
from floyd.client.version import VersionClient
from floyd.log import configure_logger
@click.group()
@click.option('-h', '--host', default='https://www.floydhub.com', help='Floyd server endpoint')
@click.option('-v', '--verbose', count=True, help='Turn on debug logging')
def cli(host, verbose):
"""
Floyd CLI interacts with FloydHub server and executes your commands.
More help is available under each command listed below.
"""
import raven
raven.Client(
dsn='https://d8669005bd2b4b1ba6387ec57e1ce660:[email protected]/226940',
release=get_cli_version(),
environment='prod',
processors=('raven.processors.SanitizePasswordsProcessr',))
floyd.floyd_host = host
configure_logger(verbose)
check_cli_version()
def check_cli_version():
"""
Check if the current cli version satisfies the server requirements
"""
server_version = VersionClient().get_cli_version()
current_version = get_cli_version()
if LooseVersion(current_version) < LooseVersion(server_version.min_version):
print("""
Your version of CLI (%s) is no longer compatible with server.""" % current_version)
if click.confirm('Do you want to upgrade to version %s now?' % server_version.latest_version):
from floyd.cli.version import pip_upgrade
pip_upgrade()
sys.exit(0)
else:
print("""Your can manually run:
pip install -U floyd-cli
to upgrade to the latest version (%s))""" % server_version.latest_version)
sys.exit(0)
elif LooseVersion(current_version) < LooseVersion(server_version.latest_version):
print("""
New version of CLI (%s) is now available. To upgrade run:
pip install -U floyd-cli
""" % server_version.latest_version)
def add_commands(cli):
cli.add_command(clone)
cli.add_command(data)
cli.add_command(delete)
cli.add_command(info)
cli.add_command(init)
cli.add_command(login)
cli.add_command(logout)
cli.add_command(logs)
cli.add_command(output)
cli.add_command(status)
cli.add_command(stop)
cli.add_command(restart)
cli.add_command(run)
cli.add_command(upgrade)
cli.add_command(version)
add_commands(cli)
|
apache-2.0
|
Python
|
41d3298eb716ad813a82f56fbfb2771373338c09
|
Make this test pass by checking for 61 which is what OS X uses as connection refused, apparently
|
lindenlab/eventlet,collinstocks/eventlet,lindenlab/eventlet,collinstocks/eventlet,lindenlab/eventlet,tempbottle/eventlet,tempbottle/eventlet
|
greentest/test__socket_errors.py
|
greentest/test__socket_errors.py
|
# Copyright (c) 2008-2009 AG Projects
# Author: Denis Bilenko
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import unittest
from eventlet import api
if hasattr(api._threadlocal, 'hub'):
from eventlet.green import socket
else:
import socket
class TestSocketErrors(unittest.TestCase):
def test_connection_refused(self):
s = socket.socket()
try:
s.connect(('127.0.0.1', 81))
except socket.error, ex:
code, text = ex.args
assert code in [111, 61], (code, text)
assert 'refused' in text.lower(), (code, text)
if __name__=='__main__':
unittest.main()
|
# Copyright (c) 2008-2009 AG Projects
# Author: Denis Bilenko
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import unittest
from eventlet import api
if hasattr(api._threadlocal, 'hub'):
from eventlet.green import socket
else:
import socket
class TestSocketErrors(unittest.TestCase):
def test_connection_refused(self):
s = socket.socket()
try:
s.connect(('127.0.0.1', 81))
except socket.error, ex:
code, text = ex.args
assert code == 111, (code, text)
assert 'refused' in text.lower(), (code, text)
if __name__=='__main__':
unittest.main()
|
mit
|
Python
|
e25ff765e423511da57f04c96227d2ca69e7c149
|
Update focal_loss.py
|
mkocabas/focal-loss-keras
|
focal_loss.py
|
focal_loss.py
|
from keras import backend as K
'''
Compatible with tensorflow backend
'''
def focal_loss(gamma=2., alpha=.25):
def focal_loss_fixed(y_true, y_pred):
pt_1 = tf.where(tf.equal(y_true, 1), y_pred, tf.ones_like(y_pred))
pt_0 = tf.where(tf.equal(y_true, 0), y_pred, tf.zeros_like(y_pred))
return -K.mean(alpha * K.pow(1. - pt_1, gamma) * K.log(pt_1)) - K.mean((1 - alpha) * K.pow(pt_0, gamma) * K.log(1. - pt_0))
return focal_loss_fixed
|
from keras import backend as K
'''
Compatible with tensorflow backend
'''
def focal_loss(gamma=2., alpha=.25):
def focal_loss_fixed(y_true, y_pred):
pt_1 = tf.where(tf.equal(y_true, 1), y_pred, tf.ones_like(y_pred))
pt_0 = tf.where(tf.equal(y_true, 0), y_pred, tf.zeros_like(y_pred))
return -K.sum(alpha * K.pow(1. - pt_1, gamma) * K.log(pt_1))-K.sum((1-alpha) * K.pow( pt_0, gamma) * K.log(1. - pt_0))
return focal_loss_fixed
|
mit
|
Python
|
6b992aa75c0f553ca5066cda269e93d4f63ea4a5
|
Optimize banned host validation test
|
qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq
|
corehq/util/tests/test_validation.py
|
corehq/util/tests/test_validation.py
|
from random import sample
from corehq.util.validation import is_url_or_host_banned
from django.test import TestCase
def sample_range(start, stop):
yield start
num_samples = min(8, stop - start)
for middle in sample(xrange(start + 1, stop), num_samples):
yield middle
yield stop
class ValidationTestCase(TestCase):
def testBannedHosts(self):
self.assertTrue(is_url_or_host_banned('anything.commcarehq.org'))
for i in sample_range(0, 255):
for j in sample_range(0, 255):
for k in sample_range(0, 255):
self.assertTrue(is_url_or_host_banned('10.%s.%s.%s' % (i, j, k)))
for i in sample_range(16, 31):
for j in sample_range(0, 255):
for k in sample_range(0, 255):
self.assertTrue(is_url_or_host_banned('172.%s.%s.%s' % (i, j, k)))
for i in sample_range(0, 255):
for j in sample_range(0, 255):
self.assertTrue(is_url_or_host_banned('192.168.%s.%s' % (i, j)))
self.assertTrue(is_url_or_host_banned('127.0.0.1'))
self.assertTrue(is_url_or_host_banned('localhost'))
self.assertFalse(is_url_or_host_banned('dimagi.com'))
|
from corehq.util.validation import is_url_or_host_banned
from django.test import TestCase
def inclusive_range(start, stop):
return range(start, stop + 1)
class ValidationTestCase(TestCase):
def testBannedHosts(self):
self.assertTrue(is_url_or_host_banned('anything.commcarehq.org'))
for i in inclusive_range(0, 255):
for j in inclusive_range(0, 255):
for k in inclusive_range(0, 255):
self.assertTrue(is_url_or_host_banned('10.%s.%s.%s' % (i, j, k)))
for i in inclusive_range(16, 31):
for j in inclusive_range(0, 255):
for k in inclusive_range(0, 255):
self.assertTrue(is_url_or_host_banned('172.%s.%s.%s' % (i, j, k)))
for i in inclusive_range(0, 255):
for j in inclusive_range(0, 255):
self.assertTrue(is_url_or_host_banned('192.168.%s.%s' % (i, j)))
self.assertTrue(is_url_or_host_banned('127.0.0.1'))
self.assertTrue(is_url_or_host_banned('localhost'))
self.assertFalse(is_url_or_host_banned('dimagi.com'))
|
bsd-3-clause
|
Python
|
a4b7be1a8c6d44c272ce25acf9513054d6bee4ad
|
Fix in listports to work with windows
|
Blinkinlabs/BlinkyPendant,timtomch/BlinkyPendant,Blinkinlabs/BlinkyPendant,Blinkinlabs/BlinkyPendant,timtomch/BlinkyPendant,timtomch/BlinkyPendant
|
python_loader/listports.py
|
python_loader/listports.py
|
"""BlinkyTape Python communication library.
This code assumes stock serialLoop() in the firmware.
Commands are issued in 3-byte blocks, with pixel data
encoded in RGB triplets in range 0-254, sent sequentially
and a triplet ending with a 255 causes the accumulated pixel
data to display (a show command).
Note that with the stock firmware changing the maximum brightness
over serial communication is impossible.
"""
from serial.tools import list_ports
import re
import platform
def listPorts():
allPorts = list_ports.comports()
ports = []
# Regular expression that identifies the serial port to use
# for OS/X:
if platform.system() == 'Darwin':
match = '/dev/cu\.usb*'
# For Linux:
elif platform.system() == 'Linux':
match = '/dev/ttyACM*'
# TODO: Windows ?
else:
match = '.'
for port in allPorts:
# If the port name is acceptable, add it to the list
if re.match(match, port[0]) != None:
ports.append(port[0])
return ports
# Example code
if __name__ == "__main__":
print listPorts()
|
"""BlinkyTape Python communication library.
This code assumes stock serialLoop() in the firmware.
Commands are issued in 3-byte blocks, with pixel data
encoded in RGB triplets in range 0-254, sent sequentially
and a triplet ending with a 255 causes the accumulated pixel
data to display (a show command).
Note that with the stock firmware changing the maximum brightness
over serial communication is impossible.
"""
from serial.tools import list_ports
import re
import platform
def listPorts():
allPorts = list_ports.comports()
ports = []
# Regular expression that identifies the serial port to use
# for OS/X:
if platform.system() == 'Darwin':
match = '/dev/cu\.usb*'
# For Linux:
elif platform.system() == 'Linux':
match = '/dev/ttyACM*'
# TODO: Windows ?
else:
match = "*"
for port in allPorts:
# If the port name is acceptable, add it to the list
if re.match(match, port[0]) != None:
ports.append(port[0])
return ports
# Example code
if __name__ == "__main__":
print listPorts()
|
mit
|
Python
|
c1ad95ee4680836fc359ee636a2252925a41b3a9
|
fix raw_id_fields on admin. closes #33
|
RDXT/django-knowledge,legrostdg/django-knowledge,zapier/django-knowledge,RDXT/django-knowledge,CantemoInternal/django-knowledge,legrostdg/django-knowledge,zapier/django-knowledge,inovasolutions/django-knowledge,CantemoInternal/django-knowledge,legrostdg/django-knowledge,RDXT/django-knowledge,inovasolutions/django-knowledge,7wonders/django-knowledge,7wonders/django-knowledge,zapier/django-knowledge,inovasolutions/django-knowledge,7wonders/django-knowledge,CantemoInternal/django-knowledge
|
knowledge/admin.py
|
knowledge/admin.py
|
from django.contrib import admin
from knowledge.models import Question, Response, Category
class CategoryAdmin(admin.ModelAdmin):
list_display = [f.name for f in Category._meta.fields]
prepopulated_fields = {'slug': ('title', )}
admin.site.register(Category, CategoryAdmin)
class QuestionAdmin(admin.ModelAdmin):
list_display = [f.name for f in Question._meta.fields]
list_select_related = True
raw_id_fields = ['user']
admin.site.register(Question, QuestionAdmin)
class ResponseAdmin(admin.ModelAdmin):
list_display = [f.name for f in Response._meta.fields]
list_select_related = True
raw_id_fields = ['user', 'question']
admin.site.register(Response, ResponseAdmin)
|
from django.contrib import admin
from knowledge.models import Question, Response, Category
class CategoryAdmin(admin.ModelAdmin):
list_display = [f.name for f in Category._meta.fields]
prepopulated_fields = {'slug': ('title', )}
admin.site.register(Category, CategoryAdmin)
class QuestionAdmin(admin.ModelAdmin):
list_display = [f.name for f in Question._meta.fields]
list_select_related = True
admin.site.register(Question, QuestionAdmin)
class ResponseAdmin(admin.ModelAdmin):
list_display = [f.name for f in Response._meta.fields]
list_select_related = True
admin.site.register(Response, ResponseAdmin)
|
isc
|
Python
|
4c052b2429d7b48768f51cef0627e7a1772909b3
|
Fix import
|
globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service
|
dbaas/workflow/steps/tests/test_base_step.py
|
dbaas/workflow/steps/tests/test_base_step.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import logging
from django.test import TestCase
from ..util.base import BaseStep
LOG = logging.getLogger(__name__)
class BaseStepTestCase(TestCase):
def setUp(self):
self.base_step = BaseStep()
def test_has_do_method(self):
self.assertTrue(hasattr(self.base_step, 'do'))
def test_has_undo_method(self):
self.assertTrue(hasattr(self.base_step, 'undo'))
def test_do_requires_workflow_dict(self):
try:
self.base_step.do()
except TypeError:
exception = True
self.assertTrue(exception)
def test_undo_requires_workflow_dict(self):
try:
self.base_step.undo()
except TypeError:
exception = True
self.assertTrue(exception)
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import logging
from django.test import TestCase
from ..base import BaseStep
LOG = logging.getLogger(__name__)
class BaseStepTestCase(TestCase):
def setUp(self):
self.base_step = BaseStep()
def test_has_do_method(self):
self.assertTrue(hasattr(self.base_step, 'do'))
def test_has_undo_method(self):
self.assertTrue(hasattr(self.base_step, 'undo'))
def test_do_requires_workflow_dict(self):
try:
self.base_step.do()
except TypeError:
exception = True
self.assertTrue(exception)
def test_undo_requires_workflow_dict(self):
try:
self.base_step.undo()
except TypeError:
exception = True
self.assertTrue(exception)
|
bsd-3-clause
|
Python
|
b70e34d399b171b3d8e0731206dde54f8d029379
|
Add uuid to import
|
goller/casstest
|
casstest/casstest.py
|
casstest/casstest.py
|
# -*- coding: utf-8 -*-
import os
import uuid
from cassandra.io.libevreactor import LibevConnection
from cassandra.cluster import Cluster
seed = os.environ.get('SEED', '127.0.0.1')
port = int(os.environ.get('PORT', '9042'))
keyspace = os.environ.get('KEYSPACE', 'test')
def test_read(session, user_id, age):
read_query = 'SELECT name, age, user_id FROM users'
rows = session.execute(read_query)
for row in rows:
assert(row.name == "John O'Reilly")
assert(row.age == age)
assert(row.user_id == user_id)
def main():
print('Connecting to seed {0} on port {1}'.format(seed, port))
cluster = Cluster([seed], port=port)
cluster.connection_class = LibevConnection
print('Using keyspace {0}'.format(keyspace))
session = cluster.connect(keyspace)
user_id = uuid.uuid1()
create_query = """
INSERT INTO users (name, age, user_id)
VALUES (%s, %s, %s)
""", ("John O'Reilly", 42, user_id)
session.execute(create_query)
test_read(session, user_id, 42)
update_query = 'UPDATE users SET age = 84'
session.execute(update_query)
test_read(session, user_id, 84)
delete_query = 'DELETE name, age, user_id FROM users'
session.execute(delete_query)
if __name__ == '__main__':
main()
|
# -*- coding: utf-8 -*-
import os
from cassandra.io.libevreactor import LibevConnection
from cassandra.cluster import Cluster
seed = os.environ.get('SEED', '127.0.0.1')
port = int(os.environ.get('PORT', '9042'))
keyspace = os.environ.get('KEYSPACE', 'test')
def test_read(session, user_id, age):
read_query = 'SELECT name, age, user_id FROM users'
rows = session.execute(read_query)
for row in rows:
assert(row.name == "John O'Reilly")
assert(row.age == age)
assert(row.user_id == user_id)
def main():
print('Connecting to seed {0} on port {1}'.format(seed, port))
cluster = Cluster([seed], port=port)
cluster.connection_class = LibevConnection
print('Using keyspace {0}'.format(keyspace))
session = cluster.connect(keyspace)
user_id = uuid.uuid1()
create_query = """
INSERT INTO users (name, age, user_id)
VALUES (%s, %s, %s)
""", ("John O'Reilly", 42, user_id)
session.execute(create_query)
test_read(session, user_id, 42)
update_query = 'UPDATE users SET age = 84'
session.execute(update_query)
test_read(session, user_id, 84)
delete_query = 'DELETE name, age, user_id FROM users'
session.execute(delete_query)
if __name__ == '__main__':
main()
|
bsd-3-clause
|
Python
|
85680ed5792456a109583830bcbfbc30ef3b62c4
|
Complete 'About Lambdas' koans
|
JesusMtnez/devexperto-challenge,JesusMtnez/devexperto-challenge
|
jesusmtnez/python/koans/koans/about_lambdas.py
|
jesusmtnez/python/koans/koans/about_lambdas.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Based slightly on the lambdas section of AboutBlocks in the Ruby Koans
#
from runner.koan import *
class AboutLambdas(Koan):
def test_lambdas_can_be_assigned_to_variables_and_called_explicitly(self):
add_one = lambda n: n + 1
self.assertEqual(11, add_one(10))
# ------------------------------------------------------------------
def make_order(self, order):
return lambda qty: str(qty) + " " + order + "s"
def test_accessing_lambda_via_assignment(self):
sausages = self.make_order('sausage')
eggs = self.make_order('egg')
self.assertEqual('3 sausages', sausages(3))
self.assertEqual('2 eggs', eggs(2))
def test_accessing_lambda_without_assignment(self):
self.assertEqual('39823 spams', self.make_order('spam')(39823))
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Based slightly on the lambdas section of AboutBlocks in the Ruby Koans
#
from runner.koan import *
class AboutLambdas(Koan):
def test_lambdas_can_be_assigned_to_variables_and_called_explicitly(self):
add_one = lambda n: n + 1
self.assertEqual(__, add_one(10))
# ------------------------------------------------------------------
def make_order(self, order):
return lambda qty: str(qty) + " " + order + "s"
def test_accessing_lambda_via_assignment(self):
sausages = self.make_order('sausage')
eggs = self.make_order('egg')
self.assertEqual(__, sausages(3))
self.assertEqual(__, eggs(2))
def test_accessing_lambda_without_assignment(self):
self.assertEqual(__, self.make_order('spam')(39823))
|
mit
|
Python
|
d9719a20d4ee433aaab27a648888966d1ce0163b
|
Implement inversion counting with global variable
|
timpel/stanford-algs,timpel/stanford-algs
|
count-inversions/count_inversions.py
|
count-inversions/count_inversions.py
|
from random import randint
import sys
count = 0
def sort_and_count(arr):
n = len(arr)
if n == 1:
return arr
else:
first_half = arr[:n/2]
second_half = arr[n/2:]
return merge_and_count_split(sort_and_count(first_half), sort_and_count(second_half))
def merge_and_count_split(arr1, arr2):
i, j = 0, 0
result = []
global count
while 1:
if i == len(arr1):
result.extend(arr2[j:])
break
if j == len(arr2):
result.extend(arr1[i:])
break
if (arr1[i] <= arr2[j]):
result.append(arr1[i])
i += 1
else:
result.append(arr2[j])
count += len(arr1) - i
j += 1
return result
def main(arr_len):
#test_arr = [randint(0,arr_len) for n in range(arr_len)]
test_arr = [1,6,3,4,8,2,5,0,3,6,5,4,7,2,2,5,6,8,1]
return sort_and_count(test_arr)
if __name__ == '__main__':
try:
arr_len = int(sys.argv[1])
except (IndexError, ValueError):
print 'Format: python merge-sort.py <array-length>'
main(arr_len)
print count
|
from random import randint
import sys
def sort_and_count(arr):
n = len(arr)
if n == 1:
return arr
else:
first_half = arr[:n/2]
second_half = arr[n/2:]
return merge_and_count_split(sort_and_count(first_half), sort_and_count(second_half))
def merge_and_count_split(arr1, arr2):
i, j = 0, 0
result = []
while 1:
if i == len(arr1):
result.extend(arr2[j:])
break
if j == len(arr2):
result.extend(arr1[i:])
break
if (arr1[i] < arr2[j]):
result.append(arr1[i])
i += 1
else:
result.append(arr2[j])
j += 1
return result
def main(arr_len):
test_arr = [randint(0,arr_len) for n in range(arr_len)]
return sort_and_count(test_arr)
if __name__ == '__main__':
try:
arr_len = int(sys.argv[1])
except (IndexError, ValueError):
print 'Format: python merge-sort.py <array-length>'
print main(arr_len)
|
mit
|
Python
|
17dceb0d66ac72547c6de6153d79d9bcd5dd9926
|
put _id and properties in the right places
|
NCI-GDC/gdcdatamodel,NCI-GDC/gdcdatamodel
|
gdcdatamodel/mappings/mappings.py
|
gdcdatamodel/mappings/mappings.py
|
from gdcdatamodel import node_avsc_json
from mapped_entities import (
file_tree, file_traversal,
participant_tree, participant_traversal,
annotation_tree, annotation_traversal,
ONE_TO_MANY, ONE_TO_ONE, annotation_tree
)
def _get_es_type(_type):
if 'long' in _type or 'int' in _type:
return 'long'
else:
return 'string'
def _munge_properties(source):
a = [n['fields'] for n in node_avsc_json if n['name'] == source]
if not a:
return
fields = [b['type'] for b in a[0] if b['name'] == 'properties']
fields[0][0]['fields'].append({
'name': '{}_id'.format(source),
'type': 'string'
})
return {b['name']: {
'type': _get_es_type(b['type']),
'index': 'not_analyzed'
} for b in fields[0][0]['fields']}
def _walk_tree(tree, mapping):
for k, v in [(k, v) for k, v in tree.items() if k != 'corr']:
corr, name = v['corr']
mapping[name] = {'properties': _munge_properties(k)}
_walk_tree(tree[k], mapping[name])
return mapping
def get_file_es_mapping(include_participant=True):
files = {"_id": {"path": "file_id"}}
files["properties"] = _walk_tree(file_tree, _munge_properties("file"))
if include_participant:
files["properties"]['participant'] = get_participant_es_mapping(False)
files["properties"]["participant"]["type"] = "nested"
return files
def get_participant_es_mapping(include_file=True):
participant = {"_id": {"path": "file_id"}}
participant["properties"] = _walk_tree(participant_tree, _munge_properties("participant"))
if include_file:
participant["properties"]['files'] = get_file_es_mapping(True)
participant["properties"]["files"]["type"] = "nested"
return participant
def get_annotation_es_mapping(include_file=True):
annotation = _walk_tree(annotation_tree, _munge_properties("annotation"))
annotation["_id"] = {"path": "annotation_id"}
if include_file:
annotation['files'] = get_file_es_mapping(False)
annotation["files"]["type"] = "nested"
return annotation
|
from gdcdatamodel import node_avsc_json
from mapped_entities import (
file_tree, file_traversal,
participant_tree, participant_traversal,
annotation_tree, annotation_traversal,
ONE_TO_MANY, ONE_TO_ONE, annotation_tree
)
def _get_es_type(_type):
if 'long' in _type or 'int' in _type:
return 'long'
else:
return 'string'
def _munge_properties(source):
a = [n['fields'] for n in node_avsc_json if n['name'] == source]
if not a:
return
fields = [b['type'] for b in a[0] if b['name'] == 'properties']
fields[0][0]['fields'].append({
'name': '{}_id'.format(source),
'type': 'string'
})
return {b['name']: {
'type': _get_es_type(b['type']),
'index': 'not_analyzed'
} for b in fields[0][0]['fields']}
def _walk_tree(tree, mapping):
for k, v in [(k, v) for k, v in tree.items() if k != 'corr']:
corr, name = v['corr']
mapping[name] = {'properties': _munge_properties(k)}
_walk_tree(tree[k], mapping[name])
return mapping
def get_file_es_mapping(include_participant=True):
files = _walk_tree(file_tree, _munge_properties("file"))
files["_id"] = {"path": "file_id"}
if include_participant:
files['participant'] = get_participant_es_mapping(False)
files["participant"]["type"] = "nested"
return files
def get_participant_es_mapping(include_file=True):
participant = _walk_tree(participant_tree, _munge_properties("participant"))
participant["_id"] = {"path": "participant_id"}
if include_file:
participant['files'] = get_file_es_mapping(True)
participant["files"]["type"] = "nested"
return participant
def get_annotation_es_mapping(include_file=True):
annotation = _walk_tree(annotation_tree, _munge_properties("annotation"))
annotation["_id"] = {"path": "annotation_id"}
if include_file:
annotation['files'] = get_file_es_mapping(False)
annotation["files"]["type"] = "nested"
return annotation
|
apache-2.0
|
Python
|
36b2ca696bf6955d54ade1f917a1caea59b2d2d1
|
update version to 0.7.4
|
Widukind/dlstats,Widukind/dlstats
|
dlstats/version.py
|
dlstats/version.py
|
VERSION = (0, 7, 4)
def version_str():
if len(VERSION) == 3:
return "%s.%s.%s" % VERSION
elif len(VERSION) == 4:
return "%s.%s.%s-%s" % VERSION
else:
raise IndexError("Incorrect format for the VERSION tuple")
|
VERSION = (0, 7, 3)
def version_str():
if len(VERSION) == 3:
return "%s.%s.%s" % VERSION
elif len(VERSION) == 4:
return "%s.%s.%s-%s" % VERSION
else:
raise IndexError("Incorrect format for the VERSION tuple")
|
agpl-3.0
|
Python
|
bca4bb08fbfffe27214803429ad63f7800f36428
|
Update demo.py to show different stock codes
|
7forz/numpy_pandas_tushare_learning
|
demo.py
|
demo.py
|
#!/usr/bin/python3
# -*- encoding: utf-8 -*-
from indexes import *
import global_data
def main(stock='000001', date=global_data.NEWEST_TRADE_DATE, p_MA=5, p_MACD=(12,26,9),
p_RSI=6, p_KDJ=(9,3), p_MTM=(12,6), p_CCI=14):
"""
Example
stock: str, '000001',
date: str, '2017-08-18',
p_MA: int, 5
p_MACD: tuple, (12,26,9)
p_RSI: int, 6
p_KDJ: tuple, (9,3)
p_MTM: tuple, (12,6)
p_CCI: int, 14
"""
rsi = RSI(stock)
ma = MA(stock)
macd = MACD(stock)
mtm = MTM(stock)
kdj = KDJ(stock)
cci = CCI(stock)
global_data.add_data(stock) # download data to database
print('Demo for ', stock, date)
print('MA%s' % str(p_MA), ma.get_ma(date, p_MA))
print('MACD%s' % str(p_MACD), macd.get_macd(date, *p_MACD))
print('RSI%s' % str(p_RSI), rsi.get_rsi(date, p_RSI))
print('KDJ%s' % str(p_KDJ), kdj.get_kdj(date, *p_KDJ))
print('MTM%s' % str(p_MTM), mtm.get_mtm(date, *p_MTM))
print('CCI%s' % str(p_CCI), cci.get_cci(date, p_CCI))
# global_data.save_database(global_data.DB_FILE)
if __name__ == '__main__':
main(stock='000001')
main(stock='HK.00700')
|
#!/usr/bin/python3
# -*- encoding: utf-8 -*-
from indexes import *
import global_data
def main(stock='000001', date=global_data.NEWEST_TRADE_DATE, p_MA=5, p_MACD=(12,26,9),
p_RSI=6, p_KDJ=(9,3), p_MTM=(12,6), p_CCI=14):
"""
Example
stock: str, '000001',
date: str, '2017-08-18',
p_MA: int, 5
p_MACD: tuple, (12,26,9)
p_RSI: int, 6
p_KDJ: tuple, (9,3)
p_MTM: tuple, (12,6)
p_CCI: int, 14
"""
rsi = RSI(stock)
ma = MA(stock)
macd = MACD(stock)
mtm = MTM(stock)
kdj = KDJ(stock)
cci = CCI(stock)
global_data.add_data(stock) # download data to database
print('Demo for ', stock, date)
print('MA%s' % str(p_MA), ma.get_ma(date, p_MA))
print('MACD%s' % str(p_MACD), macd.get_macd(date, *p_MACD))
print('RSI%s' % str(p_RSI), rsi.get_rsi(date, p_RSI))
print('KDJ%s' % str(p_KDJ), kdj.get_kdj(date, *p_KDJ))
print('MTM%s' % str(p_MTM), mtm.get_mtm(date, *p_MTM))
print('CCI%s' % str(p_CCI), cci.get_cci(date, p_CCI))
# global_data.save_database(global_data.DB_FILE)
if __name__ == '__main__':
main()
|
agpl-3.0
|
Python
|
04713d6aa951b444098cddead17de35421135c99
|
adjust BNF conf + banner remover (#372)
|
medialab/hyphe,medialab/hyphe,medialab/hyphe,medialab/hyphe
|
hyphe_backend/lib/webarchives.py
|
hyphe_backend/lib/webarchives.py
|
import re
ARCHIVES_OPTIONS = {
"": {
"label": "Disabled",
"description": "crawl the live web, not any kind of web archive"
},
"archive.org": {
"label": "Web.Archive.org",
"description": "crawl worldwide web archives maintained by Archive.org",
"url_prefix": "https://web.archive.org/web/"
},
"bnf.fr": {
"label": "ArchivesInternet.BNF.fr",
"description": "crawl France's official web archives maintained by BNF",
"url_prefix": "http://pfcarchivesinternet.bnf.fr",
"proxy": "pfcarchivesinternet.bnf.fr:9115"
}
}
def validateOption(value):
return type(value) in [str, bytes, unicode] and value.lower() in [x.lower() for x in ARCHIVES_OPTIONS.keys()]
def validateOptions(values):
return all(validateOption(v) for v in values)
def validateArchiveDate(dt):
"""be a string or an int of the form YYYYSMMSDD with S being non numerical separators or empty and year comprised between 1980 and 2050."""
try:
valid_dt = re.sub(r"\D", "", str(dt))
if len(valid_dt) != 8:
return False
year = int(valid_dt[0:4])
month = int(valid_dt[4:6])
day = int(valid_dt[6:8])
if not (
1980 <= year <= 2050 and
1 <= month <= 12 and
1 <= day <= 31
):
return False
except:
return False
return True
RE_ARCHIVE_REDIRECT = r'function go\(\) \{.*document.location.href = "(%s/[^"]*)".*<p class="code shift red">Got an HTTP (\d+) response at crawl time</p>.*<p class="code">Redirecting to...</p>'
RE_BNF_ARCHIVES_PERMALINK = re.compile(r'<input id="permalink" class="BANNER_PERMALIEN_LINK_CUSTOMED" value="([^"]+)"')
RE_BNF_ARCHIVES_BANNER = re.compile(r'<!--\n\s+FILE ARCHIVED ON.*<!--\n\s+END.*?-->', re.DOTALL)
|
import re
ARCHIVES_OPTIONS = {
"": {
"label": "Disabled",
"description": "crawl the live web, not any kind of web archive"
},
"archive.org": {
"label": "Web.Archive.org",
"description": "crawl worldwide web archives maintained by Archive.org",
"url_prefix": "https://web.archive.org/web/"
},
"bnf.fr": {
"label": "ArchivesInternet.BNF.fr",
"description": "crawl France's official web archives maintained by BNF",
"url_prefix": "http://pcfarchivesinternet.bnf.fr",
"proxy": "pcfarchivesinternet.bnf.fr:8888"
}
}
def validateOption(value):
return type(value) in [str, bytes, unicode] and value.lower() in [x.lower() for x in ARCHIVES_OPTIONS.keys()]
def validateOptions(values):
return all(validateOption(v) for v in values)
def validateArchiveDate(dt):
"""be a string or an int of the form YYYYSMMSDD with S being non numerical separators or empty and year comprised between 1980 and 2050."""
try:
valid_dt = re.sub(r"\D", "", str(dt))
if len(valid_dt) != 8:
return False
year = int(valid_dt[0:4])
month = int(valid_dt[4:6])
day = int(valid_dt[6:8])
if not (
1980 <= year <= 2050 and
1 <= month <= 12 and
1 <= day <= 31
):
return False
except:
return False
return True
RE_ARCHIVE_REDIRECT = r'function go\(\) \{.*document.location.href = "(%s/[^"]*)".*<p class="code shift red">Got an HTTP (\d+) response at crawl time</p>.*<p class="code">Redirecting to...</p>'
RE_BNF_ARCHIVES_PERMALINK = re.compile(r'<input id="permalink" class="BANNER_PERMALIEN_LINK_CUSTOMED" value="([^"]+)"')
RE_BNF_ARCHIVES_BANNER = re.compile(r'<div id="MAIN_BANNER_BNF_CUSTOM".*$', re.DOTALL)
|
agpl-3.0
|
Python
|
d99aebe4ded6f8b8663ca32a191c6c39e58b1517
|
bump version
|
cenkalti/kuyruk,cenkalti/kuyruk
|
kuyruk/__init__.py
|
kuyruk/__init__.py
|
from __future__ import absolute_import
import logging
from kuyruk.kuyruk import Kuyruk
from kuyruk.worker import Worker
from kuyruk.task import Task
from kuyruk.config import Config
__version__ = '0.14.5'
try:
# not available in python 2.6
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
# Add NullHandler to prevent logging warnings on startup
null_handler = NullHandler()
logging.getLogger('kuyruk').addHandler(null_handler)
logging.getLogger('pika').addHandler(null_handler)
|
from __future__ import absolute_import
import logging
from kuyruk.kuyruk import Kuyruk
from kuyruk.worker import Worker
from kuyruk.task import Task
from kuyruk.config import Config
__version__ = '0.14.4'
try:
# not available in python 2.6
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
# Add NullHandler to prevent logging warnings on startup
null_handler = NullHandler()
logging.getLogger('kuyruk').addHandler(null_handler)
logging.getLogger('pika').addHandler(null_handler)
|
mit
|
Python
|
8650f7380976671553db28e11ac145ac3b4b9d20
|
Update production.py
|
watchdogpolska/poradnia,watchdogpolska/poradnia,rwakulszowa/poradnia,watchdogpolska/poradnia,watchdogpolska/poradnia.siecobywatelska.pl,rwakulszowa/poradnia,rwakulszowa/poradnia,watchdogpolska/poradnia,watchdogpolska/poradnia.siecobywatelska.pl,watchdogpolska/poradnia.siecobywatelska.pl,rwakulszowa/poradnia
|
poradnia/config/production.py
|
poradnia/config/production.py
|
# -*- coding: utf-8 -*-
'''
Production Configurations
'''
from .common import * # noqa
# SECRET KEY
SECRET_KEY = env.str('DJANGO_SECRET_KEY')
# END SECRET KEY
# SITE CONFIGURATION
# Hosts/domain names that are valid for this site
# See https://docs.djangoproject.com/en/1.6/ref/settings/#allowed-hosts
ALLOWED_HOSTS = ["*"]
# END SITE CONFIGURATION
# EMAIL
EMAIL_BACKEND = env.email_url(default='consolemail://')
DEFAULT_FROM_EMAIL = env.str('DJANGO_DEFAULT_FROM_EMAIL',
'poradnia <[email protected]>')
EMAIL_HOST = env.str('DJANGO_EMAIL_HOST')
EMAIL_HOST_PASSWORD = env.str('DJANGO_EMAIL_HOST_PASSWORD')
EMAIL_HOST_USER = env.str('DJANGO_EMAIL_HOST_USER')
EMAIL_PORT = env.int('DJANGO_EMAIL_PORT')
EMAIL_SUBJECT_PREFIX = env.str('DJANGO_EMAIL_SUBJECT_PREFIX', '[poradnia] ')
EMAIL_USE_TLS = env.bool('DJANGO_EMAIL_USE_TLS', True)
SERVER_EMAIL = EMAIL_HOST_USER
# END EMAIL
# TEMPLATE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs
TEMPLATE_LOADERS = (
('django.template.loaders.cached.Loader', (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)),
)
# END TEMPLATE CONFIGURATION
# Your production stuff: Below this line define 3rd party libary settings
# Ustaw wartość twojego DSN
RAVEN_CONFIG = {
'dsn': env.str('RAVEN_DSN', 'http://example.com'),
}
INSTALLED_APPS += ('raven.contrib.django.raven_compat',)
MIDDLEWARE_CLASSES = (
"raven.contrib.django.raven_compat.middleware.Sentry404CatchMiddleware",
) + MIDDLEWARE_CLASSES
CACHES = {
'default': env.cache(),
}
|
# -*- coding: utf-8 -*-
'''
Production Configurations
'''
from .common import * # noqa
# SECRET KEY
SECRET_KEY = env.str('DJANGO_SECRET_KEY')
# END SECRET KEY
# SITE CONFIGURATION
# Hosts/domain names that are valid for this site
# See https://docs.djangoproject.com/en/1.6/ref/settings/#allowed-hosts
ALLOWED_HOSTS = ["*"]
# END SITE CONFIGURATION
# EMAIL
EMAIL_BACKEND = env.email_url(default='consolemail://')
DEFAULT_FROM_EMAIL = env.str('DJANGO_DEFAULT_FROM_EMAIL',
'poradnia <[email protected]>')
EMAIL_HOST = env('DJANGO_EMAIL_HOST')
EMAIL_HOST_PASSWORD = env('DJANGO_EMAIL_HOST_PASSWORD')
EMAIL_HOST_USER = env('DJANGO_EMAIL_HOST_USER')
EMAIL_PORT = env.int('DJANGO_EMAIL_PORT')
EMAIL_SUBJECT_PREFIX = env('DJANGO_EMAIL_SUBJECT_PREFIX', default='[poradnia] ')
EMAIL_USE_TLS = env.bool('DJANGO_EMAIL_USE_TLS', defualt=True)
SERVER_EMAIL = EMAIL_HOST_USER
# END EMAIL
# TEMPLATE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs
TEMPLATE_LOADERS = (
('django.template.loaders.cached.Loader', (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)),
)
# END TEMPLATE CONFIGURATION
# Your production stuff: Below this line define 3rd party libary settings
# Ustaw wartość twojego DSN
RAVEN_CONFIG = {
'dsn': env.str('RAVEN_DSN', 'http://example.com'),
}
INSTALLED_APPS += ('raven.contrib.django.raven_compat',)
MIDDLEWARE_CLASSES = (
"raven.contrib.django.raven_compat.middleware.Sentry404CatchMiddleware",
) + MIDDLEWARE_CLASSES
CACHES = {
'default': env.cache(),
}
|
mit
|
Python
|
a73451272d832ac22e6afcff0bf3fbe416328b65
|
use correct file?
|
ppliu1979/libinjection,fengjian/libinjection,fengjian/libinjection,ppliu1979/libinjection,fengjian/libinjection,fengjian/libinjection,dijkstracula/libinjection,dijkstracula/libinjection,ppliu1979/libinjection,fengjian/libinjection,ppliu1979/libinjection,ppliu1979/libinjection,dijkstracula/libinjection,dijkstracula/libinjection,dijkstracula/libinjection,dijkstracula/libinjection,ppliu1979/libinjection,fengjian/libinjection,fengjian/libinjection
|
cicada/publishers.py
|
cicada/publishers.py
|
import logging
import os
import stat
import subprocess
class PublishArtifact(object):
"""
Publish console ouput
artifct is relative to workspace directory
href/linktext is made for future linking
"""
def __init__(self, artifact, destination, href, linktext):
self.artifact = artifact
self.destination = destination
self.href = href
self.linktext = linktext
def link(self):
return (self.href, self.linktext)
def run(self, workspace, project, jobname, start):
destdir = os.path.join(self.destination, project, jobname, str(start));
if not os.path.exists(destdir):
logging.info("Making destination directory of %s", destdir)
os.makedirs(destdir)
sourcedir = os.path.join(workspace, self.artifact)
# create empty file if it doesnt exist
# this works for files and directories
if not os.path.exists(sourcedir):
subprocess.call(['touch', '-a', sourcedir])
regular = False
if (stat.S_ISREG(os.stat(sourcedir).st_mode)):
regular = True
destdir = os.path.join(destdir, self.artifact)
logging.info('%s is %s file', sourcedir, str(regular))
if regular:
logging.info('Copying file %s to %s', sourcedir, destdir)
subprocess.call(['cp', sourcedir, destdir])
else:
logging.info('Copying directory %s to %s', sourcedir, destdir)
subprocess.call(['cp', '-r', sourcedir, destdir])
# portable? link to latest
latestdir = os.path.join(self.destination, project, jobname, 'latest');
subprocess.call(['rm', '-rf', latestdir])
subprocess.call(['ln', '-s', destdir, latestdir])
return destdir
|
import logging
import os
import stat
import subprocess
class PublishArtifact(object):
"""
Publish console ouput
artifct is relative to workspace directory
href/linktext is made for future linking
"""
def __init__(self, artifact, destination, href, linktext):
self.artifact = artifact
self.destination = destination
self.href = href
self.linktext = linktext
def link(self):
return (self.href, self.linktext)
def run(self, workspace, project, jobname, start):
destdir = os.path.join(self.destination, project, jobname, str(start));
if not os.path.exists(destdir):
os.makedirs(destdir)
sourcedir = os.path.join(workspace, self.artifact)
# create empty file if it doesnt exist
# this works for files and directories
if not os.path.exists(sourcedir):
subprocess.call(['touch', '-a', sourcedir])
regular = False
if (stat.S_ISREG(os.stat(sourcedir).st_mode)):
regular = True
destdir = os.path.join(destdir, self.destination)
logging.info('%s is %s file', sourcedir, str(regular))
if regular:
logging.info('Copying file %s to %s', sourcedir, destdir)
subprocess.call(['cp', sourcedir, destdir])
else:
logging.info('Copying directory %s to %s', sourcedir, destdir)
subprocess.call(['cp', '-r', sourcedir, destdir])
# portable? link to latest
latestdir = os.path.join(self.destination, project, jobname, 'latest');
subprocess.call(['rm', '-rf', latestdir])
subprocess.call(['ln', '-s', destdir, latestdir])
return destdir
|
bsd-3-clause
|
Python
|
eaba29ee7ea2a02eef180531e7efaf4c3cfebf31
|
Check for subscript like DD1
|
mph-/lcapy
|
lcapy/latex.py
|
lcapy/latex.py
|
import re
sub_super_pattern = re.compile(r"([_\^]){([a-zA-Z]+)([0-9]*)}")
class Latex(object):
words = ('in', 'out', 'ref', 'rms', 'load', 'source', 'avg',
'mean', 'peak', 'pp', 'min', 'max', 'src',
'cc', 'ee', 'dd', 'ss', 'ih', 'il', 'oh', 'ol')
def __init__(self, string):
self.str = string
def mathrm(self):
"""Place words in sub- or super-scripts inside a mathrm.
For example V_{rms} -> V_{\mathrm{rms}}"""
def foo(match):
word = match.group(2)
suffix = word + match.group(3)
if word.lower() in self.words:
suffix = r'{\mathrm{%s}}' % suffix
else:
suffix = r'{%s}' % suffix
return match.group(1) + suffix
return sub_super_pattern.sub(foo, self.str)
def __str__(self):
return self.mathrm()
def latex_str(string):
return Latex(string).__str__()
def format_label(s):
if s == '':
return s
# Pass math-mode labels verbatim.
if s[0] == '$' and s[-1] == '$':
return s
# With leading $ and no trailing $, e.g., v=$V1, try to
# automagically convert to LateX string, otherwise pass
# verbatim. Currently, this only converts words in sub- or
# super- scripts to roman. TODO, handle more cases.
if s[0] == '$' and s[-1] != '$':
return '$' + latex_str(s[1:]) + '$'
# If have + or ^ need to be in math-mode.
if '_' in s or '^' in s or '\\left' in s:
return '$' + latex_str(s) + '$'
return s
|
import re
sub_super_pattern = re.compile(r"([_\^]){([\w]+)}")
class Latex(object):
words = ('in', 'out', 'ref', 'rms', 'load', 'source', 'avg',
'mean', 'peak', 'pp', 'min', 'max', 'src',
'cc', 'ee', 'dd', 'ss', 'ih', 'il', 'oh', 'ol')
def __init__(self, string):
self.str = string
def mathrm(self):
"""Place words in sub- or super-scripts inside a mathrm.
For example V_{rms} -> V_{\mathrm{rms}}"""
def foo(match):
fred = match.group(2)
if fred.lower() in self.words:
fred = r'{\mathrm{%s}}' % fred
else:
fred = r'{%s}' % fred
return match.group(1) + fred
return sub_super_pattern.sub(foo, self.str)
def __str__(self):
return self.mathrm()
def latex_str(string):
return Latex(string).__str__()
def format_label(s):
if s == '':
return s
# Pass math-mode labels verbatim.
if s[0] == '$' and s[-1] == '$':
return s
# With leading $ and no trailing $, e.g., v=$V1, try to
# automagically convert to LateX string, otherwise pass
# verbatim. Currently, this only converts words in sub- or
# super- scripts to roman. TODO, handle more cases.
if s[0] == '$' and s[-1] != '$':
return '$' + latex_str(s[1:]) + '$'
# If have + or ^ need to be in math-mode.
if '_' in s or '^' in s or '\\left' in s:
return '$' + latex_str(s) + '$'
return s
|
lgpl-2.1
|
Python
|
9cc013cc0fbff030be386ed3af5f6a826b97ca5f
|
make the module hook more robust
|
iulian787/spack,LLNL/spack,LLNL/spack,iulian787/spack,iulian787/spack,iulian787/spack,LLNL/spack,LLNL/spack,iulian787/spack,LLNL/spack
|
lib/spack/spack/hooks/module_file_generation.py
|
lib/spack/spack/hooks/module_file_generation.py
|
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import spack.config
import spack.modules
import spack.modules.common
import llnl.util.tty as tty
def _for_each_enabled(spec, method_name):
"""Calls a method for each enabled module"""
enabled = spack.config.get('modules:enable')
if not enabled:
tty.debug('NO MODULE WRITTEN: list of enabled module files is empty')
return
for name in enabled:
generator = spack.modules.module_types[name](spec)
try:
getattr(generator, method_name)()
except RuntimeError as e:
msg = 'cannot perform the requested {0} operation on module files'
msg += ' [{1}]'
tty.warn(msg.format(method_name, str(e)))
def post_install(spec):
_for_each_enabled(spec, 'write')
def post_uninstall(spec):
_for_each_enabled(spec, 'remove')
|
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import spack.modules
import spack.modules.common
import llnl.util.tty as tty
try:
enabled = spack.config.get('modules:enable')
except KeyError:
tty.debug('NO MODULE WRITTEN: list of enabled module files is empty')
enabled = []
def _for_each_enabled(spec, method_name):
"""Calls a method for each enabled module"""
for name in enabled:
generator = spack.modules.module_types[name](spec)
try:
getattr(generator, method_name)()
except RuntimeError as e:
msg = 'cannot perform the requested {0} operation on module files'
msg += ' [{1}]'
tty.warn(msg.format(method_name, str(e)))
post_install = lambda spec: _for_each_enabled(spec, 'write')
post_uninstall = lambda spec: _for_each_enabled(spec, 'remove')
|
lgpl-2.1
|
Python
|
7f77f822dcf05152634cda771cce934320d23ca1
|
use image data
|
pdebuyl-lab/RMPCDMD,laurensdeprez/RMPCDMD,pdebuyl-lab/RMPCDMD,laurensdeprez/RMPCDMD,pdebuyl/RMPCDMD,pdebuyl/RMPCDMD
|
experiments/01-single-dimer/plot_velocity.py
|
experiments/01-single-dimer/plot_velocity.py
|
#!/usr/bin/env python3
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('file', type=str, help='H5MD datafile')
parser.add_argument('--directed', action='store_true')
args = parser.parse_args()
import numpy as np
import h5py
import matplotlib.pyplot as plt
with h5py.File(args.file, 'r') as f:
r = f['particles/dimer/position/value'][...]
r_dt = f['particles/dimer/position/time'][()]
im = f['particles/dimer/image/value'][...]
v = f['particles/dimer/velocity/value'][...]
v_dt = f['particles/dimer/velocity/time'][()]
edges = f['particles/dimer/box/edges'][:].reshape((1,-1))
r += edges*im
assert abs(r_dt-v_dt) < 1e-12
assert r.shape[1]==2
assert r.shape[2]==3
assert v.shape[1]==2
assert v.shape[2]==3
time = np.arange(r.shape[0])*r_dt
v_com = v.mean(axis=1)
if args.directed:
unit_z = r[:,1,:]-r[:,0,:]
unit_z /= np.sqrt(np.sum(unit_z**2, axis=1)).reshape((-1,1))
vz = np.sum(v_com*unit_z, axis=1)
plt.plot(time, vz)
else:
plt.plot(time, v_com)
plt.show()
|
#!/usr/bin/env python3
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('file', type=str, help='H5MD datafile')
parser.add_argument('--directed', action='store_true')
args = parser.parse_args()
import numpy as np
import h5py
import matplotlib.pyplot as plt
with h5py.File(args.file, 'r') as f:
r = f['particles/dimer/position/value'][...]
r_dt = f['particles/dimer/position/time'][()]
v = f['particles/dimer/velocity/value'][...]
v_dt = f['particles/dimer/velocity/time'][()]
assert abs(r_dt-v_dt) < 1e-12
assert r.shape[1]==2
assert r.shape[2]==3
assert v.shape[1]==2
assert v.shape[2]==3
time = np.arange(r.shape[0])*r_dt
v_com = v.mean(axis=1)
if args.directed:
unit_z = r[:,1,:]-r[:,0,:]
unit_z /= np.sqrt(np.sum(unit_z**2, axis=1)).reshape((-1,1))
vz = np.sum(v_com*unit_z, axis=1)
plt.plot(time, vz)
else:
plt.plot(time, v_com)
plt.show()
|
bsd-3-clause
|
Python
|
02ae6cadac686c9c601bcad42c6b95b8811c3cfb
|
Add --ip and --port flag to example.
|
eronde/vim_suggest,eronde/vim_suggest,eronde/py_word_suggest,eronde/py_word_suggest
|
examples/commandline.py
|
examples/commandline.py
|
"""commandline.
Usage:
commandline.py lookup --lang=<lang> --word=<preword> [--ip=<redis-ip>] [--port=<redis-port>]
commandline.py -h | --help
Options:
-h --help Show this screen.
--lang=<lang> Language of suggested word.
--word=<preword> Pre-word of suggested word.
--ip=<rediis-ip> Ip of redis server (Default: 172.17.0.3)
--port=<rediis-port> Port of redis server (Default: 6379)
"""
import redis
import py_word_suggest
# from config import REDIS_IP
from docopt import docopt
def main():
arguments = docopt(__doc__, version='commandline 0.0.1')
if arguments['--port']:
rp = arguments['--port']
else:
rp = 6379
if arguments['--ip']:
rs = arguments['--ip']
else:
rs = '172.17.0.3'
r = redis.StrictRedis(host=rs, port=rp, db=0)
try:
obj = py_word_suggest.Selector_redis(r)
except Exception as e:
print("{e} Fail to connect to: {ip}:{port}".format(e=e, ip=rs, port=rp))
exit(1)
if arguments['lookup']:
key = 'lang:{l}:gram:2:{w}'.format(l=arguments['--lang'], w=arguments['--word'])
try:
fetch = obj.gen_fetchWords(key)
except Exception as e:
print("{e}".format(e=e))
exit(1)
print("'{w}' has the following suggested words:\n".format(w=arguments['--word']))
print(list(obj.gen_suggestWord(*fetch)))
if __name__ == "__main__":
main()
|
"""commandline.
Usage:
commandline.py lookup --lang=<lang> --word=<preword>
commandline.py -h | --help
Options:
-h --help Show this screen.
--lang=<lang> Language of suggested word.
--word=<preword> Pre-word of suggested word.
"""
import redis
import py_word_suggest
# from config import REDIS_IP
from docopt import docopt
# r = redis.StrictRedis(host='py-word-suggest-redis', port=6379, db=0)
# r = redis.StrictRedis(host=REDIS_IP, port=6379, db=0)
rs = '172.17.0.3'
r = redis.StrictRedis(host=rs, port=6379, db=0)
def main():
try:
obj = py_word_suggest.Selector_redis(r)
except Exception as e:
print("{e} Fail to connect to: {ip}".format(e=e,ip=rs))
exit(1)
arguments = docopt(__doc__, version='commandline 0.0.1')
if arguments['lookup']:
key = 'lang:{l}:gram:2:{w}'.format(l=arguments['--lang'],w=arguments['--word'])
try:
fetch = obj.gen_fetchWords(key)
except Exception as e:
print("{e}".format(e=e))
exit(1)
print("'{w}' has the following suggested words:\n".format(w=arguments['--word']))
print(list(obj.gen_suggestWord(*fetch)))
if __name__ == "__main__":
main()
|
mit
|
Python
|
ca1fa126611f958ccd893f034df74e8821f41771
|
update prototype
|
sachinio/redalert,sachinio/redalert,sachinio/redalert,sachinio/redalert,sachinio/redalert,sachinio/redalert,sachinio/redalert
|
hardware/prototype/read_serial.py
|
hardware/prototype/read_serial.py
|
__author__ = 'sachinpatney'
import serial
import time
import binascii
ser = serial.Serial('/dev/ttyUSB0', baudrate=9600, timeout=1.0)
s = b''
def do(cmd):
if cmd == 'play':
print('Playing music ...')
while True:
bytesToRead = ser.inWaiting()
if bytesToRead > 0:
s += ser.read()
if binascii.hexlify(s) == b'7e':
w = ser.read(2)
s += w
l = int(binascii.hexlify(w), 16)
s += ser.read(l + 2)
s = binascii.hexlify(s)
if s[6:8] == b'90':
data = s[32:-2]
data = binascii.unhexlify(data).decode('utf-8')
print(data)
print(len(data))
do(data.strip())
else:
s = b''
else:
s = b''
time.sleep(0.3)
|
__author__ = 'sachinpatney'
import serial
import time
import binascii
ser = serial.Serial('/dev/ttyUSB0', baudrate=9600, timeout=1.0)
s = b''
def do(cmd):
if cmd == 'play':
print('Playing music ...')
while True:
bytesToRead = ser.inWaiting()
if bytesToRead > 0:
s += ser.read()
if binascii.hexlify(s) == b'7e':
w = ser.read(2)
s += w
l = int(binascii.hexlify(w), 16)
s += ser.read(l + 2)
s = binascii.hexlify(s)
if s[6:8] == b'90':
data = s[32:-2]
data = binascii.unhexlify(data).decode('utf-8')
print(data)
do(data.strip())
else:
s = b''
else:
s = b''
time.sleep(0.3)
|
mit
|
Python
|
2846b996783b896f69ec0870569c7e442ddcc652
|
fix imports, arguments
|
vlukes/sfepy,sfepy/sfepy,sfepy/sfepy,olivierverdier/sfepy,RexFuzzle/sfepy,vlukes/sfepy,BubuLK/sfepy,olivierverdier/sfepy,RexFuzzle/sfepy,sfepy/sfepy,BubuLK/sfepy,vlukes/sfepy,rc/sfepy,rc/sfepy,RexFuzzle/sfepy,lokik/sfepy,RexFuzzle/sfepy,rc/sfepy,lokik/sfepy,olivierverdier/sfepy,lokik/sfepy,lokik/sfepy,BubuLK/sfepy
|
sfepy/terms/terms_fibres.py
|
sfepy/terms/terms_fibres.py
|
from sfepy.terms.terms import *
from sfepy.terms.terms_base import VectorVector
from sfepy.terms.terms_hyperelastic_tl import HyperElasticTLBase
from sfepy.homogenization.utils import iter_sym
class FibresActiveTLTerm(VectorVector, HyperElasticTLBase):
r""":description: Hyperelastic active fibres term. Effective stress $S_{ij} =
A f_{\rm max} \exp{-(\frac{\epsilon - \varepsilon_{\rm opt}}{s})^2}$, where
$\epsilon = E_{ij} d_i d_j$ is the Green strain $\ull{E}$ projected to the
fibre direction $\ul{d}$.
:definition:
$\int_{\Omega} S_{ij}(\ul{u}) \delta E_{ij}(\ul{u};\ul{v})$ """
name = 'dw_tl_fib_a'
arg_types = ('material_1', 'material_2', 'material_3',
'material_4', 'material_5', 'virtual', 'state')
geometry = [(Volume, 'virtual')]
family_data_names = ['E']
def compute_crt_data( self, family_data, mode, **kwargs ):
pars = self.get_args(['material_1', 'material_2', 'material_3',
'material_4', 'material_5'], **kwargs)
fmax, eps_opt, s, fdir, act = pars
strainE = family_data[0]
eps = nm.zeros_like(fmax)
omega = nm.empty_like(strainE)
for ii, (ir, ic) in enumerate(iter_sym(fdir.shape[2])):
omega[...,ii,0] = fdir[...,ir,0] * fdir[...,ic,0]
eps[...,0,0] += omega[...,ii,0] * strainE[...,ii,0]
tau = act * fmax * nm.exp(-((eps - eps_opt) / s)**2.0)
if mode == 0:
out = omega * tau
else:
shape = list(strainE.shape)
shape[-1] = shape[-2]
out = nm.empty(shape, dtype=nm.float64)
for ir in range(omega.shape[2]):
for ic in range(omega.shape[2]):
out[...,ir,ic] = omega[...,ir,0] * omega[...,ic,0]
out[:] *= -2.0 * ((eps - eps_opt) / (s**2.0)) * tau
return out
|
from sfepy.terms.terms import *
from sfepy.terms.terms_hyperelastic_tl import HyperElasticTLBase
from sfepy.homogenization.utils import iter_sym
class FibresActiveTLTerm(HyperElasticTLBase):
r""":description: Hyperelastic active fibres term. Effective stress $S_{ij} =
A f_{\rm max} \exp{-(\frac{\epsilon - \varepsilon_{\rm opt}}{s})^2}$, where
$\epsilon = E_{ij} d_i d_j$ is the Green strain $\ull{E}$ projected to the
fibre direction $\ul{d}$.
:definition:
$\int_{\Omega} S_{ij}(\ul{u}) \delta E_{ij}(\ul{u};\ul{v})$ """
name = 'dw_tl_fib_a'
arg_types = ('material_1', 'material_2', 'material_3',
'material_4', 'material_5', 'virtual', 'state')
geometry = [(Volume, 'virtual')]
family_data_names = ['E']
def compute_crt_data( self, family_data, ap, vg, mode, **kwargs ):
pars = self.get_args(['material_1', 'material_2', 'material_3',
'material_4', 'material_5'], **kwargs)
fmax, eps_opt, s, fdir, act = pars
strainE = family_data[0]
eps = nm.zeros_like(fmax)
omega = nm.empty_like(strainE)
for ii, (ir, ic) in enumerate(iter_sym(fdir.shape[2])):
omega[...,ii,0] = fdir[...,ir,0] * fdir[...,ic,0]
eps[...,0,0] += omega[...,ii,0] * strainE[...,ii,0]
tau = act * fmax * nm.exp(-((eps - eps_opt) / s)**2.0)
if mode == 0:
out = omega * tau
else:
shape = list(strainE.shape)
shape[-1] = shape[-2]
out = nm.empty(shape, dtype=nm.float64)
for ir in range(omega.shape[2]):
for ic in range(omega.shape[2]):
out[...,ir,ic] = omega[...,ir,0] * omega[...,ic,0]
out[:] *= -2.0 * ((eps - eps_opt) / (s**2.0)) * tau
return out
|
bsd-3-clause
|
Python
|
207dd3b5f59ecc66c896cc0f3ad90e283fca5145
|
refactor the registration process
|
fretboardfreak/netify
|
src/app.py
|
src/app.py
|
"""The netify application object."""
# Copyright 2015 Curtis Sand
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from flask import Flask
class NetifyApp(object):
"""The Netify Application object."""
flask_app = None
def __init__(self, config=None):
if self.flask_app is None: # First time init
self.__class__.flask_app = Flask(__name__)
self.registered_views = []
if config:
self.config = config
self.config.update_flask(self.flask_app)
def register_views(self, views):
"""Register the view classes against the flask app.
The "Method" name registered in the Flask app is the "name" field for
each View class.
"""
view_config = self.config.netify_views
enabled = [name.strip() for name in view_config['enabled'].split(',')]
for view in views:
view_cls = view.value
if view.name in enabled:
if view_cls.name in self.registered_views:
self.flask_app.logger.warning(
'Not Registering view %s. A view has already '
'been registered for %s.' % (view.name, view_cls.name))
view_opts = self.config.get_page_options(view_cls.name)
view_cls.register(self, **view_opts)
self.registered_views.append(view_cls.name)
def run(self, host=None, port=None, debug=None):
"""Run the Flask Server."""
self.flask_app.run(host, port, debug)
|
"""The netify application object."""
# Copyright 2015 Curtis Sand
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from flask import Flask
class NetifyApp(object):
"""The Netify Application object."""
flask_app = None
def __init__(self, config=None):
if self.flask_app is None:
self.__class__.flask_app = Flask(__name__)
if config and self.flask_app:
config.update_flask(self.flask_app)
def register_views(self, views):
"""Register the view classes against the flask app."""
for view in views:
view.register(self.flask_app)
def run(self, host=None, port=None, debug=None):
"""Run the Flask Server."""
self.flask_app.run(host, port, debug)
|
apache-2.0
|
Python
|
12720a225210afe73f301c2112f0a7ddcdc41bcb
|
Fix CSR loading in ACME example client script.
|
goofwear/letsencrypt,jsha/letsencrypt,kevinlondon/letsencrypt,DavidGarciaCat/letsencrypt,ahojjati/letsencrypt,jmaurice/letsencrypt,rugk/letsencrypt,rugk/letsencrypt,thanatos/lets-encrypt-preview,DavidGarciaCat/letsencrypt,thanatos/lets-encrypt-preview,beermix/letsencrypt,ghyde/letsencrypt,Jonadabe/letsencrypt,TheBoegl/letsencrypt,stewnorriss/letsencrypt,bestwpw/letsencrypt,twstrike/le_for_patching,skynet/letsencrypt,bsmr-misc-forks/letsencrypt,jtl999/certbot,tdfischer/lets-encrypt-preview,jmaurice/letsencrypt,riseofthetigers/letsencrypt,brentdax/letsencrypt,Hasimir/letsencrypt,stewnorriss/letsencrypt,riseofthetigers/letsencrypt,TheBoegl/letsencrypt,dietsche/letsencrypt,modulexcite/letsencrypt,sapics/letsencrypt,luorenjin/letsencrypt,BKreisel/letsencrypt,fmarier/letsencrypt,g1franc/lets-encrypt-preview,hlieberman/letsencrypt,modulexcite/letsencrypt,rutsky/letsencrypt,wteiken/letsencrypt,solidgoldbomb/letsencrypt,VladimirTyrin/letsencrypt,mitnk/letsencrypt,PeterMosmans/letsencrypt,Bachmann1234/letsencrypt,xgin/letsencrypt,Jonadabe/letsencrypt,mrb/letsencrypt,vcavallo/letsencrypt,bsmr-misc-forks/letsencrypt,jmhodges/letsencrypt,jsha/letsencrypt,jtl999/certbot,rlustin/letsencrypt,Hasimir/letsencrypt,armersong/letsencrypt,lmcro/letsencrypt,kuba/letsencrypt,xgin/letsencrypt,lbeltrame/letsencrypt,VladimirTyrin/letsencrypt,Bachmann1234/letsencrypt,ruo91/letsencrypt,sapics/letsencrypt,martindale/letsencrypt,piru/letsencrypt,Jadaw1n/letsencrypt,PeterMosmans/letsencrypt,deserted/letsencrypt,hlieberman/letsencrypt,mrb/letsencrypt,hsduk/lets-encrypt-preview,kuba/letsencrypt,sjerdo/letsencrypt,jmhodges/letsencrypt,rutsky/letsencrypt,goofwear/letsencrypt,kevinlondon/letsencrypt,Sveder/letsencrypt,rlustin/letsencrypt,ruo91/letsencrypt,letsencrypt/letsencrypt,sjerdo/letsencrypt,martindale/letsencrypt,BillKeenan/lets-encrypt-preview,lmcro/letsencrypt,mitnk/letsencrypt,Jadaw1n/letsencrypt,fmarier/letsencrypt,piru/letsencrypt,dietsche/letsencrypt,bestwpw/letsencrypt,stweil/letsencrypt,stweil/letsencrypt,deserted/letsencrypt,vcavallo/letsencrypt,hsduk/lets-encrypt-preview,beermix/letsencrypt,twstrike/le_for_patching,BKreisel/letsencrypt,Sveder/letsencrypt,tyagi-prashant/letsencrypt,g1franc/lets-encrypt-preview,BillKeenan/lets-encrypt-preview,skynet/letsencrypt,luorenjin/letsencrypt,brentdax/letsencrypt,wteiken/letsencrypt,armersong/letsencrypt,solidgoldbomb/letsencrypt,letsencrypt/letsencrypt,ghyde/letsencrypt,ahojjati/letsencrypt,tdfischer/lets-encrypt-preview,lbeltrame/letsencrypt,tyagi-prashant/letsencrypt
|
examples/acme_client.py
|
examples/acme_client.py
|
"""Example script showing how to use acme client API."""
import logging
import os
import pkg_resources
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.asymmetric import rsa
import OpenSSL
from acme import client
from acme import messages
from acme import jose
logging.basicConfig(level=logging.DEBUG)
NEW_REG_URL = 'https://www.letsencrypt-demo.org/acme/new-reg'
BITS = 2048 # minimum for Boulder
DOMAIN = 'example1.com' # example.com is ignored by Boulder
# generate_private_key requires cryptography>=0.5
key = jose.JWKRSA(key=rsa.generate_private_key(
public_exponent=65537,
key_size=2048,
backend=default_backend()))
acme = client.Client(NEW_REG_URL, key)
regr = acme.register()
logging.info('Auto-accepting TOS: %s', regr.terms_of_service)
acme.update_registration(regr.update(
body=regr.body.update(agreement=regr.terms_of_service)))
logging.debug(regr)
authzr = acme.request_challenges(
identifier=messages.Identifier(typ=messages.IDENTIFIER_FQDN, value=DOMAIN),
new_authzr_uri=regr.new_authzr_uri)
logging.debug(authzr)
authzr, authzr_response = acme.poll(authzr)
csr = OpenSSL.crypto.load_certificate_request(
OpenSSL.crypto.FILETYPE_ASN1, pkg_resources.resource_string(
'acme', os.path.join('testdata', 'csr.der')))
try:
acme.request_issuance(csr, (authzr,))
except messages.Error as error:
print ("This script is doomed to fail as no authorization "
"challenges are ever solved. Error from server: {0}".format(error))
|
"""Example script showing how to use acme client API."""
import logging
import os
import pkg_resources
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.asymmetric import rsa
import OpenSSL
from acme import client
from acme import messages
from acme import jose
logging.basicConfig(level=logging.DEBUG)
NEW_REG_URL = 'https://www.letsencrypt-demo.org/acme/new-reg'
BITS = 2048 # minimum for Boulder
DOMAIN = 'example1.com' # example.com is ignored by Boulder
# generate_private_key requires cryptography>=0.5
key = jose.JWKRSA(key=rsa.generate_private_key(
public_exponent=65537,
key_size=2048,
backend=default_backend()))
acme = client.Client(NEW_REG_URL, key)
regr = acme.register()
logging.info('Auto-accepting TOS: %s', regr.terms_of_service)
acme.update_registration(regr.update(
body=regr.body.update(agreement=regr.terms_of_service)))
logging.debug(regr)
authzr = acme.request_challenges(
identifier=messages.Identifier(typ=messages.IDENTIFIER_FQDN, value=DOMAIN),
new_authzr_uri=regr.new_authzr_uri)
logging.debug(authzr)
authzr, authzr_response = acme.poll(authzr)
csr = OpenSSL.crypto.load_certificate_request(
OpenSSL.crypto.FILETYPE_ASN1, pkg_resources.resource_string(
'acme.jose', os.path.join('testdata', 'csr.der')))
try:
acme.request_issuance(csr, (authzr,))
except messages.Error as error:
print ("This script is doomed to fail as no authorization "
"challenges are ever solved. Error from server: {0}".format(error))
|
apache-2.0
|
Python
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.