commit
stringlengths 40
40
| subject
stringlengths 4
1.73k
| repos
stringlengths 5
127k
| old_file
stringlengths 2
751
| new_file
stringlengths 2
751
| new_contents
stringlengths 1
8.98k
| old_contents
stringlengths 0
6.59k
| license
stringclasses 13
values | lang
stringclasses 23
values |
---|---|---|---|---|---|---|---|---|
4c66010cf0cd4f763b362b6e84eb67d7ef1278b8
|
Make "near" group optional in regex
|
jackbrewer/SublimeLinter-contrib-stylint
|
linter.py
|
linter.py
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jack Brewer
# Copyright (c) 2015 Jack Brewer
#
# License: MIT
"""This module exports the Stylint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class Stylint(NodeLinter):
"""Provides an interface to stylint."""
npm_name = 'stylint'
syntax = 'stylus'
cmd = 'stylint @ *'
executable = 'stylint'
version_requirement = '>= 1.5.0'
regex = r'''(?xi)
# Comments show example output for each line of a Stylint warning
# /path/to/file/example.styl
^.*$\s*
# 177:24 colors warning hexidecimal color should be a variable
^(?P<line>\d+):?(?P<near>\d+)?\s*\w+\s*((?P<warning>warning)|(?P<error>error))\s*(?P<message>.+)$\s*
'''
multiline = True
error_stream = util.STREAM_STDOUT
tempfile_suffix = 'styl'
config_file = ('--config', '.stylintrc', '~')
|
#
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jack Brewer
# Copyright (c) 2015 Jack Brewer
#
# License: MIT
"""This module exports the Stylint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class Stylint(NodeLinter):
"""Provides an interface to stylint."""
npm_name = 'stylint'
syntax = 'stylus'
cmd = 'stylint @ *'
executable = 'stylint'
version_requirement = '>= 1.5.0'
regex = r'''(?xi)
# Comments show example output for each line of a Stylint warning
# /path/to/file/example.styl
^.*$\s*
# 177:24 colors warning hexidecimal color should be a variable
^(?P<line>\d+):(?P<near>\d+)\s*\w+\s*((?P<warning>warning)|(?P<error>error))\s*(?P<message>.+)$\s*
'''
multiline = True
error_stream = util.STREAM_STDOUT
tempfile_suffix = 'styl'
config_file = ('--config', '.stylintrc', '~')
|
mit
|
Python
|
ba2db7713d4fbb929c26bf9ce848b0f7b420809d
|
fix typo
|
rizumu/bootmachine,rizumu/bootmachine,rizumu/bootmachine,rizumu/bootmachine
|
bootmachine/settings_tests.py
|
bootmachine/settings_tests.py
|
import os
"""
CONFIGURATION MANAGEMENT
"""
# salt
LOCAL_STATES_DIR = os.path.join(os.path.abspath(os.path.dirname(__file__)),
"configuration", "states/")
LOCAL_PILLARS_DIR = os.path.join(os.path.abspath(os.path.dirname(__file__)),
"configuration", "pillars/")
REMOTE_STATES_DIR = "/srv/salt/states/"
REMOTE_PILLARS_DIR = "/srv/salt/pillars/"
SALT_INSTALLER_ARCH_201208 = "aur"
SALT_INSTALLER_DEBIAN_6 = "backports"
SALT_INSTALLER_FEDORA_16 = "rpm-stable"
SALT_INSTALLER_FEDORA_17 = "rpm-stable"
SALT_INSTALLER_UBUNTU_1204LTS = "ppa"
# puppet (not yet implemented)
PUPPET_VERSION = NotImplementedError()
PUPPET_RECIPES_DIR = NotImplementedError()
# chef (not yet implemented)
CHEF_VERSION = NotImplementedError()
CHEF_RECIPIES_DIR = NotImplementedError()
"""
PROVIDERS AND SERVER STACK
"""
# Rackspace authentication via openstack-compute
OPENSTACK_USERNAME = os.environ.get("OPENSTACK_COMPUTE_USERNAME")
OPENSTACK_APIKEY = os.environ.get("OPENSTACK_COMPUTE_APIKEY")
# Rackspace authentication via python-novaclient api v2
OS_USERNAME = os.environ.get("OS_USERNAME")
OS_PASSWORD = os.environ.get("OS_PASSWORD")
OS_TENANT_NAME = os.environ.get("OS_TENANT_NAME")
OS_AUTH_URL = os.environ.get("OS_AUTH_URL")
OS_REGION_NAME = os.environ.get("OS_REGION_NAME")
OS_COMPUTE_API_VERSION = os.environ.get("OS_COMPUTE_API_VERSION")
# Amazon authentication via boto
AWS_ACCESS_KEY = os.environ.get("AWS_ACCESS_KEY_ID")
AWS_SECRET_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY")
"""
SECURITY
"""
# Change the default SSH port of 22, suggestion is between 20000 and 65535.
SSH_PORT = "30000"
|
import os
"""
CONFIGURATION MANAGEMENT
"""
# salt
LOCAL_SALTSTATES_DIR = os.path.join(os.path.abspath(os.path.dirname(__file__)),
"configuration", "states/")
LOCAL_PILLARS_DIR = os.path.join(os.path.abspath(os.path.dirname(__file__)),
"configuration", "pillars/")
REMOTE_STATES_DIR = "/srv/salt/states/"
REMOTE_PILLARS_DIR = "/srv/salt/pillars/"
SALT_INSTALLER_ARCH_201208 = "aur"
SALT_INSTALLER_DEBIAN_6 = "backports"
SALT_INSTALLER_FEDORA_16 = "rpm-stable"
SALT_INSTALLER_FEDORA_17 = "rpm-stable"
SALT_INSTALLER_UBUNTU_1204LTS = "ppa"
# puppet (not yet implemented)
PUPPET_VERSION = NotImplementedError()
PUPPET_RECIPES_DIR = NotImplementedError()
# chef (not yet implemented)
CHEF_VERSION = NotImplementedError()
CHEF_RECIPIES_DIR = NotImplementedError()
"""
PROVIDERS AND SERVER STACK
"""
# Rackspace authentication via openstack-compute
OPENSTACK_USERNAME = os.environ.get("OPENSTACK_COMPUTE_USERNAME")
OPENSTACK_APIKEY = os.environ.get("OPENSTACK_COMPUTE_APIKEY")
# Rackspace authentication via python-novaclient api v2
OS_USERNAME = os.environ.get("OS_USERNAME")
OS_PASSWORD = os.environ.get("OS_PASSWORD")
OS_TENANT_NAME = os.environ.get("OS_TENANT_NAME")
OS_AUTH_URL = os.environ.get("OS_AUTH_URL")
OS_REGION_NAME = os.environ.get("OS_REGION_NAME")
OS_COMPUTE_API_VERSION = os.environ.get("OS_COMPUTE_API_VERSION")
# Amazon authentication via boto
AWS_ACCESS_KEY = os.environ.get("AWS_ACCESS_KEY_ID")
AWS_SECRET_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY")
"""
SECURITY
"""
# Change the default SSH port of 22, suggestion is between 20000 and 65535.
SSH_PORT = "30000"
|
mit
|
Python
|
6d4eb6ebfb03f974c2f6fb04992fc25e5a53ece9
|
Change docstring
|
kyamagu/psd2svg
|
src/psd2svg/rasterizer/batik_rasterizer.py
|
src/psd2svg/rasterizer/batik_rasterizer.py
|
# -*- coding: utf-8 -*-
"""
Batik-based rasterizer module.
Download the latest batik rasterizer to use the module. Note Ubuntu 16.04LTS
package is broken and does not work.
Prerequisite:
wget http://www.apache.org/dyn/mirrors/mirrors.cgi?action=download&\
filename=xmlgraphics/batik/binaries/batik-bin-1.9.tar.gz
tar xzf batik-bin-1.9.tar.gz
export BATIK_PATH=./batik-bin-1.9
Deb package:
sudo apt-get install -y libbatik-java
"""
from __future__ import absolute_import, unicode_literals
from PIL import Image
import logging
import os
import subprocess
from psd2svg.utils import temporary_directory
from psd2svg.rasterizer.base_rasterizer import BaseRasterizer
logger = logging.getLogger(__name__)
BATIK_PATH = os.environ.get(
'BATIK_PATH', "/usr/share/java/batik-rasterizer.jar"
)
class BatikRasterizer(BaseRasterizer):
"""Batik rasterizer."""
def __init__(self, jar_path=None, **kwargs):
self.jar_path = jar_path if jar_path else BATIK_PATH
assert os.path.exists(self.jar_path)
def rasterize(self, url, size=None, format="png"):
with temporary_directory() as d:
basename, ext = os.path.splitext(os.path.basename(url))
output_file = os.path.join(d, "{}.{}".format(basename, format))
cmd = [
"java", "-Djava.awt.headless=true",
"-jar", self.jar_path,
"-bg", "0.255.255.255",
"-m", "image/{}".format(format),
"-d", d,
"{}".format(url),
]
if size:
cmd += ["-w", size[0], "-h", size[1]]
proc = subprocess.Popen(
cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE
)
stdout, stderr = proc.communicate()
try:
assert os.path.exists(output_file)
rasterized = Image.open(output_file)
except:
logger.error("{}\n{}{}".format(" ".join(cmd), stdout, stderr))
raise
return self.composite_background(rasterized)
|
# -*- coding: utf-8 -*-
"""
Batik-based rasterizer module.
Download the latest batik rasterizer to use the module. Note Ubuntu 16.04LTS
package is broken and does not work.
Prerequisite:
wget http://www.apache.org/dyn/mirrors/mirrors.cgi?action=download&\
filename=xmlgraphics/batik/binaries/batik-bin-1.9.tar.gz
export BATIK_PATH=./batik-bin-1.9.tar.gz
Deb package:
sudo apt-get install -y libbatik-java
"""
from __future__ import absolute_import, unicode_literals
from PIL import Image
import logging
import os
import subprocess
from psd2svg.utils import temporary_directory
from psd2svg.rasterizer.base_rasterizer import BaseRasterizer
logger = logging.getLogger(__name__)
BATIK_PATH = os.environ.get(
'BATIK_PATH', "/usr/share/java/batik-rasterizer.jar"
)
class BatikRasterizer(BaseRasterizer):
"""Batik rasterizer."""
def __init__(self, jar_path=None, **kwargs):
self.jar_path = jar_path if jar_path else BATIK_PATH
assert os.path.exists(self.jar_path)
def rasterize(self, url, size=None, format="png"):
with temporary_directory() as d:
basename, ext = os.path.splitext(os.path.basename(url))
output_file = os.path.join(d, "{}.{}".format(basename, format))
cmd = [
"java", "-Djava.awt.headless=true",
"-jar", self.jar_path,
"-bg", "0.255.255.255",
"-m", "image/{}".format(format),
"-d", d,
"{}".format(url),
]
if size:
cmd += ["-w", size[0], "-h", size[1]]
proc = subprocess.Popen(
cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE
)
stdout, stderr = proc.communicate()
try:
assert os.path.exists(output_file)
rasterized = Image.open(output_file)
except:
logger.error("{}\n{}{}".format(" ".join(cmd), stdout, stderr))
raise
return self.composite_background(rasterized)
|
mit
|
Python
|
73f49b5603802ccce3a9c4db0ee0b2eaa4bf0e7f
|
Update startup script (lyli.py)
|
felixbade/lyli,felixbade/lyli,felixbade/lyli
|
lyli.py
|
lyli.py
|
#!flask/bin/python
import logging
import werkzeug.serving
from app import app
import config
# we are behind a proxy. log the ip of the end-user, not the proxy.
# this will also work without the proxy
werkzeug.serving.WSGIRequestHandler.address_string = lambda self: self.headers.get('x-real-ip', self.client_address[0])
# log to a file (access.log), not stderr
logging.basicConfig(filename='access.log', level=logging.DEBUG, format='%(message)s')
if config.debug:
app.run(port=3003, debug=True, use_reloader=True)
else:
app.run(port=3004, debug=False, use_reloader=False)
|
#!flask/bin/python
import logging
from os import fork
import werkzeug.serving
from app import app
pid = fork()
if pid > 0:
print('PID: %d' % pid)
exit(0)
elif pid < 0:
print('Could not fork: %d' % pid)
exit(1)
# we are behind a proxy. log the ip of the end-user, not the proxy.
# this will also work without the proxy
werkzeug.serving.WSGIRequestHandler.address_string = lambda self: self.headers.get('x-real-ip', self.client_address[0])
# log to a file (access.log), not stderr
logging.basicConfig(filename='access.log', level=logging.DEBUG, format='%(message)s')
app.run(port=3004, debug=False, use_reloader=False)
#app.run(port=3003, debug=True, use_reloader=True)
|
artistic-2.0
|
Python
|
1b996bf797b5e1a0203054f11001771ede309b23
|
remove dead code
|
fabianvf/scrapi,felliott/scrapi,alexgarciac/scrapi,fabianvf/scrapi,CenterForOpenScience/scrapi,felliott/scrapi,erinspace/scrapi,CenterForOpenScience/scrapi,mehanig/scrapi,mehanig/scrapi,erinspace/scrapi
|
scrapi/harvesters/smithsonian.py
|
scrapi/harvesters/smithsonian.py
|
'''
Harvester for the Smithsonian Digital Repository for the SHARE project
Example API call: http://repository.si.edu/oai/request?verb=ListRecords&metadataPrefix=oai_dc
'''
from __future__ import unicode_literals
import re
from scrapi.base import helpers
from scrapi.base import OAIHarvester
class SiHarvester(OAIHarvester):
short_name = 'smithsonian'
long_name = 'Smithsonian Digital Repository'
url = 'http://repository.si.edu'
@property
def schema(self):
return helpers.updated_schema(self._schema, {
"uris": ('//dc:identifier/node()', helpers.oai_process_uris)
})
base_url = 'http://repository.si.edu/oai/request'
property_list = ['date', 'identifier', 'type', 'format', 'setSpec']
timezone_granularity = True
|
'''
Harvester for the Smithsonian Digital Repository for the SHARE project
Example API call: http://repository.si.edu/oai/request?verb=ListRecords&metadataPrefix=oai_dc
'''
from __future__ import unicode_literals
import re
from scrapi.base import helpers
from scrapi.base import OAIHarvester
class SiHarvester(OAIHarvester):
short_name = 'smithsonian'
long_name = 'Smithsonian Digital Repository'
url = 'http://repository.si.edu'
@property
def schema(self):
return helpers.updated_schema(self._schema, {
"uris": ('//dc:identifier/node()', helpers.oai_process_uris)
})
base_url = 'http://repository.si.edu/oai/request'
property_list = ['date', 'identifier', 'type', 'format', 'setSpec']
timezone_granularity = True
def get_doi_from_identifier(identifiers):
doi_re = re.compile(r'10\.\S*\/\S*')
identifiers = [identifiers] if not isinstance(identifiers, list) else identifiers
for identifier in identifiers:
try:
found_doi = doi_re.search(identifier).group()
return 'http://dx.doi.org/{}'.format(found_doi)
except AttributeError:
continue
|
apache-2.0
|
Python
|
1b7e68c3bdfc2f43f754cc39e1f2f80bfa5bee80
|
Add validate_log_translations flake8 check
|
grahamhayes/designate,ionrock/designate,ramsateesh/designate,cneill/designate-testing,cneill/designate,kiall/designate-py3,cneill/designate-testing,kiall/designate-py3,muraliselva10/designate,grahamhayes/designate,cneill/designate-testing,ramsateesh/designate,tonyli71/designate,kiall/designate-py3,openstack/designate,muraliselva10/designate,tonyli71/designate,cneill/designate,ionrock/designate,cneill/designate,grahamhayes/designate,openstack/designate,ramsateesh/designate,melodous/designate,openstack/designate,muraliselva10/designate,melodous/designate,kiall/designate-py3,melodous/designate,melodous/designate,cneill/designate,ionrock/designate,kiall/designate-py3,cneill/designate,tonyli71/designate
|
designate/hacking/checks.py
|
designate/hacking/checks.py
|
# Copyright 2014 Hewlett-Packard Development Company, L.P.
#
# Author: Kiall Mac Innes <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import re
import pep8
mutable_default_argument_check = re.compile(
r"^\s*def .+\((.+=\{\}|.+=\[\])")
log_translation = re.compile(
r"(.)*LOG\.(audit|error|info|warn|warning|critical|exception)\(\s*('|\")")
def mutable_default_arguments(logical_line, physical_line, filename):
if pep8.noqa(physical_line):
return
if mutable_default_argument_check.match(logical_line):
yield (0, "D701: Default paramater value is a mutable type")
def validate_log_translations(logical_line, physical_line, filename):
# Translations are not required in the test directory
if "designate/tests" in filename:
return
if pep8.noqa(physical_line):
return
msg = "D702: Log messages require translation"
if log_translation.match(logical_line):
yield (0, msg)
def factory(register):
register(mutable_default_arguments)
register(validate_log_translations)
|
# Copyright 2014 Hewlett-Packard Development Company, L.P.
#
# Author: Kiall Mac Innes <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import re
mutable_default_argument_check = re.compile(
r"^\s*def .+\((.+=\{\}|.+=\[\])")
def mutable_default_arguments(logical_line, filename):
if mutable_default_argument_check.match(logical_line):
yield (0, "D701: Default paramater value is a mutable type")
def factory(register):
register(mutable_default_arguments)
|
apache-2.0
|
Python
|
54115d8ecd90da614a24bb910939001b37acd246
|
Test pairwise combinations
|
ergs/transmutagen,ergs/transmutagen
|
transmutagen/tests/test_origen.py
|
transmutagen/tests/test_origen.py
|
import os
from itertools import combinations
import numpy as np
from ..tape9utils import origen_to_name
DATA_DIR = os.path.abspath(os.path.join(__file__, os.path.pardir,
os.path.pardir, os.path.pardir, 'docker', 'data'))
def load_data(datafile):
with open(datafile) as f:
return eval(f.read(), {'array': np.array})
def test_data():
for datafile in os.listdir(DATA_DIR):
data = load_data(os.path.join(DATA_DIR, datafile))
tape9, time, nuc, phi = datafile.split()[0]
assert 'table_4' in data
assert 'nuclide' in data['table_4']
nuclides = data['table_4']['nuclides']
keys = ['activation_products', 'actinides', 'fission_products']
# Sanity check
for comb in combinations(keys, 2):
assert set.intersection(*comb) == set()
|
import os
import numpy as np
DATA_DIR = os.path.abspath(os.path.join(__file__, os.path.pardir,
os.path.pardir, os.path.pardir, 'docker', 'data'))
def load_data(datafile):
with open(datafile) as f:
return eval(f.read(), {'array': np.array})
def test_data():
for datafile in os.listdir(DATA_DIR):
data = load_data(os.path.join(DATA_DIR, datafile))
assert 'table_4' in data
|
bsd-3-clause
|
Python
|
89560fd773d833a049824bfa8a7ccf4ce301bed4
|
remove utils.push_dir
|
facebook/wangle,facebook/wangle,facebook/wangle
|
build/fbcode_builder/utils.py
|
build/fbcode_builder/utils.py
|
#!/usr/bin/env python
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
'Miscellaneous utility functions.'
import itertools
import logging
import os
import shutil
import subprocess
import sys
from contextlib import contextmanager
def recursively_flatten_list(l):
return itertools.chain.from_iterable(
(recursively_flatten_list(i) if type(i) is list else (i,))
for i in l
)
def run_command(*cmd, **kwargs):
'The stdout of most fbcode_builder utilities is meant to be parsed.'
logging.debug('Running: {0} with {1}'.format(cmd, kwargs))
kwargs['stdout'] = sys.stderr
subprocess.check_call(cmd, **kwargs)
@contextmanager
def make_temp_dir(d):
os.mkdir(d)
try:
yield d
finally:
shutil.rmtree(d, ignore_errors=True)
def read_fbcode_builder_config(filename):
# Allow one spec to read another
# When doing so, treat paths as relative to the config's project directory.
project_dir = os.path.dirname(filename)
def inner_read_config(path):
full_path = os.path.join(project_dir, path)
return read_fbcode_builder_config(full_path)
scope = {'read_fbcode_builder_config': inner_read_config}
with open(filename) as config_file:
# Note that this will need to be changed to an exec() function call for
# python 3 compatibility. Unfortunately python 2.7 does not seem to
# treat the scope correctly when using exec() function syntax here.
exec config_file.read() in scope
return scope['config']
def steps_for_spec(builder, spec, processed_modules=None):
'''
Sets `builder` configuration, and returns all the builder steps
necessary to build `spec` and its dependencies.
Traverses the dependencies in depth-first order, honoring the sequencing
in each 'depends_on' list.
'''
if processed_modules is None:
processed_modules = set()
steps = []
for module in spec.get('depends_on', []):
if module not in processed_modules:
processed_modules.add(module)
steps.extend(steps_for_spec(
builder,
module.fbcode_builder_spec(builder),
processed_modules
))
steps.extend(spec.get('steps', []))
return steps
def build_fbcode_builder_config(config):
return lambda builder: builder.build(
steps_for_spec(builder, config['fbcode_builder_spec'](builder))
)
|
#!/usr/bin/env python
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
'Miscellaneous utility functions.'
import itertools
import logging
import os
import shutil
import subprocess
import sys
from contextlib import contextmanager
def recursively_flatten_list(l):
return itertools.chain.from_iterable(
(recursively_flatten_list(i) if type(i) is list else (i,))
for i in l
)
def run_command(*cmd, **kwargs):
'The stdout of most fbcode_builder utilities is meant to be parsed.'
logging.debug('Running: {0} with {1}'.format(cmd, kwargs))
kwargs['stdout'] = sys.stderr
subprocess.check_call(cmd, **kwargs)
@contextmanager
def make_temp_dir(d):
os.mkdir(d)
try:
yield d
finally:
shutil.rmtree(d, ignore_errors=True)
@contextmanager
def push_dir(d):
old_dir = os.getcwd()
os.chdir(d)
try:
yield d
finally:
os.chdir(old_dir)
def read_fbcode_builder_config(filename):
# Allow one spec to read another
# When doing so, treat paths as relative to the config's project directory.
project_dir = os.path.dirname(filename)
def inner_read_config(path):
full_path = os.path.join(project_dir, path)
return read_fbcode_builder_config(full_path)
scope = {'read_fbcode_builder_config': inner_read_config}
with open(filename) as config_file:
# Note that this will need to be changed to an exec() function call for
# python 3 compatibility. Unfortunately python 2.7 does not seem to
# treat the scope correctly when using exec() function syntax here.
exec config_file.read() in scope
return scope['config']
def steps_for_spec(builder, spec, processed_modules=None):
'''
Sets `builder` configuration, and returns all the builder steps
necessary to build `spec` and its dependencies.
Traverses the dependencies in depth-first order, honoring the sequencing
in each 'depends_on' list.
'''
if processed_modules is None:
processed_modules = set()
steps = []
for module in spec.get('depends_on', []):
if module not in processed_modules:
processed_modules.add(module)
steps.extend(steps_for_spec(
builder,
module.fbcode_builder_spec(builder),
processed_modules
))
steps.extend(spec.get('steps', []))
return steps
def build_fbcode_builder_config(config):
return lambda builder: builder.build(
steps_for_spec(builder, config['fbcode_builder_spec'](builder))
)
|
apache-2.0
|
Python
|
78ff5c0968e4867b550b4cb6dab70885e7119d11
|
Use revert instead of reset, bloom-patch remove
|
nayomal/bloom,vrabaud/bloom
|
bloom/commands/patch/remove_cmd.py
|
bloom/commands/patch/remove_cmd.py
|
from __future__ import print_function
import sys
import argparse
from bloom.commands.patch.common import get_patch_config
from bloom.commands.patch.common import set_patch_config
from bloom.git import branch_exists
from bloom.git import checkout
from bloom.git import get_commit_hash
from bloom.git import get_current_branch
from bloom.git import track_branches
from bloom.logging import log_prefix
from bloom.logging import error
from bloom.logging import debug
from bloom.util import add_global_arguments
from bloom.util import execute_command
from bloom.util import handle_global_arguments
@log_prefix('[git-bloom-patch remove]: ')
def remove_patches(directory=None):
# Get the current branch
current_branch = get_current_branch(directory)
# Ensure the current branch is valid
if current_branch is None:
error("Could not determine current branch, are you in a git repo?")
return 1
# Construct the patches branch
patches_branch = 'patches/' + current_branch
try:
# See if the patches branch exists
if branch_exists(patches_branch, False, directory=directory):
if not branch_exists(patches_branch, True, directory=directory):
track_branches(patches_branch, directory)
else:
error("No patches branch (" + patches_branch + ") found, cannot "
"remove patches.")
return 1
# Get the parent branch from the patches branch
config = get_patch_config(patches_branch, directory=directory)
parent, spec = config['parent'], config['base']
if None in [parent, spec]:
error("Could not retrieve patches info.")
return 1
debug("Removing patches from " + current_branch + " back to base "
"commit " + spec)
# Reset this branch using git revert --no-edit spec
execute_command('git revert --no-edit ' + spec, cwd=directory)
# Update the base
config['base'] = get_commit_hash(current_branch, directory)
set_patch_config(patches_branch, config, directory=directory)
finally:
if current_branch:
checkout(current_branch, directory=directory)
return 0
def get_parser():
"""Returns a parser.ArgumentParser with all arguments defined"""
parser = argparse.ArgumentParser(description="""
Removes any applied patches from the working branch, including any un-exported
patches, so use with caution.
""")
return parser
def main():
# Assumptions: in a git repo, this command verb was passed, argv has enough
sysargs = sys.argv[2:]
parser = get_parser()
parser = add_global_arguments(parser)
args = parser.parse_args(sysargs)
handle_global_arguments(args)
return remove_patches()
|
from __future__ import print_function
import sys
from argparse import ArgumentParser
from bloom.util import add_global_arguments
from bloom.util import execute_command
from bloom.util import handle_global_arguments
from bloom.logging import log_prefix
from bloom.logging import error
from bloom.logging import debug
from bloom.git import branch_exists
from bloom.git import checkout
from bloom.git import get_current_branch
from bloom.git import track_branches
from bloom.commands.patch.common import get_patch_config
@log_prefix('[git-bloom-patch remove]: ')
def remove_patches(directory=None):
# Get the current branch
current_branch = get_current_branch(directory)
# Ensure the current branch is valid
if current_branch is None:
error("Could not determine current branch, are you in a git repo?")
return 1
# Construct the patches branch
patches_branch = 'patches/' + current_branch
try:
# See if the patches branch exists
if branch_exists(patches_branch, False, directory=directory):
if not branch_exists(patches_branch, True, directory=directory):
track_branches(patches_branch, directory)
else:
error("No patches branch (" + patches_branch + ") found, cannot "
"remove patches.")
return 1
# Get the parent branch from the patches branch
config = get_patch_config(patches_branch, directory=directory)
parent, spec = config['parent'], config['base']
if None in [parent, spec]:
error("Could not retrieve patches info.")
return 1
debug("Removing patches from " + current_branch + " back to base "
"commit " + spec)
# Reset this branch using git reset --hard spec
execute_command('git reset --hard ' + spec, cwd=directory)
finally:
if current_branch:
checkout(current_branch, directory=directory)
return 0
def get_parser():
"""Returns a parser.ArgumentParser with all arguments defined"""
parser = ArgumentParser(description="""
Removes any applied patches from the working branch, including any un-exported
patches, so use with caution.
""")
return parser
def main():
# Assumptions: in a git repo, this command verb was passed, argv has enough
sysargs = sys.argv[2:]
parser = get_parser()
parser = add_global_arguments(parser)
args = parser.parse_args(sysargs)
handle_global_arguments(args)
return remove_patches()
|
bsd-3-clause
|
Python
|
8a5e49876eae4f2d9bc8ced2fa2e2be0d24ddd68
|
rollback to 1.7.0 release
|
runtimeco/mcuboot,runtimeco/mcuboot,runtimeco/mcuboot,runtimeco/mcuboot,ATmobica/mcuboot,ATmobica/mcuboot,ATmobica/mcuboot,runtimeco/mcuboot,ATmobica/mcuboot,ATmobica/mcuboot
|
scripts/imgtool/__init__.py
|
scripts/imgtool/__init__.py
|
# Copyright 2017-2020 Linaro Limited
#
# SPDX-License-Identifier: Apache-2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
imgtool_version = "1.7.0"
|
# Copyright 2017-2020 Linaro Limited
#
# SPDX-License-Identifier: Apache-2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
imgtool_version = "1.8.0a1"
|
apache-2.0
|
Python
|
ac3697fbb5202437d8285cacaba89dbaba30de69
|
fix refactoring error
|
mcqueenjordan/chess-engine
|
util.py
|
util.py
|
import logging
A_THRU_H = 'ABCDEFGH'
# pre-compute an array mapping to algebraic notation
NUMERICAL_TO_ALGEBRAIC = ["{}{}".format(l, n) for n in range(8, 0, -1) for l in A_THRU_H]
# pre-compute a dict mapping to the index
ALGEBRAIC_TO_NUMERICAL = {a:n for n, a in enumerate(NUMERICAL_TO_ALGEBRAIC)}
TOP_LEFT_SQUARE = 0
BOTTOM_RIGHT_SQUARE = 63
def to_algebraic(numeric_index):
try:
return NUMERICAL_TO_ALGEBRAIC[numeric_index]
except IndexError:
return numeric_index
def to_numeric(algebraic_notation):
try:
return ALGEBRAIC_TO_NUMERICAL[algebraic_notation.upper()]
except IndexError:
return algebraic_notation
def get_move_facts(origin, move):
square_if_moved = origin + move
current_col = origin % 8
col_if_moved = (origin + move) % 8
col_dist_if_moved = abs(current_col - col_if_moved)
row_dist = get_row_distance(origin, move)
return (square_if_moved,
current_col,
col_if_moved,
col_dist_if_moved,
row_dist)
def get_row_distance(src, move):
src_row = src // 8
row_if_moved = (src + move) // 8
return abs(src_row - row_if_moved)
def is_on_board(square):
return TOP_LEFT_SQUARE <= square <= BOTTOM_RIGHT_SQUARE
def is_valid_move(src_square, move):
return is_on_board(src_square + move)
|
import logging
A_THRU_H = 'ABCDEFGH'
# pre-compute an array mapping to algebraic notation
NUMERICAL_TO_ALGEBRAIC = ["{}{}".format(l, n) for n in range(8, 0, -1) for l in A_THRU_H]
# pre-compute a dict mapping to the index
ALGEBRAIC_TO_NUMERICAL = {a:n for n, a in enumerate(NUMERICAL_TO_ALGEBRAIC)}
TOP_LEFT_SQUARE = 0
BOTTOM_RIGHT_SQUARE = 63
def to_algebraic(numeric_index):
try:
return NUMERICAL_TO_ALGEBRAIC[numeric_index]
except IndexError:
return index
def to_numeric(algebraic_notation):
try:
return ALGEBRAIC_TO_NUMERICAL[algebraic_notation.upper()]
except IndexError:
return algebraic_notation
def get_move_facts(origin, move):
square_if_moved = origin + move
current_col = origin % 8
col_if_moved = (origin + move) % 8
col_dist_if_moved = abs(current_col - col_if_moved)
row_dist = get_row_distance(origin, move)
return (square_if_moved,
current_col,
col_if_moved,
col_dist_if_moved,
row_dist)
def get_row_distance(src, move):
src_row = src // 8
row_if_moved = (src + move) // 8
return abs(src_row - row_if_moved)
def is_on_board(square):
return TOP_LEFT_SQUARE <= square <= BOTTOM_RIGHT_SQUARE
def is_valid_move(src_square, move):
return is_on_board(src_square + move)
|
mit
|
Python
|
ae6bb29262421bcdb9f28bed8fce99517fa4ecc1
|
Update tests.
|
armab/st2,alfasin/st2,tonybaloney/st2,StackStorm/st2,grengojbo/st2,peak6/st2,lakshmi-kannan/st2,jtopjian/st2,pinterb/st2,dennybaa/st2,dennybaa/st2,Plexxi/st2,nzlosh/st2,emedvedev/st2,emedvedev/st2,peak6/st2,nzlosh/st2,peak6/st2,armab/st2,alfasin/st2,armab/st2,StackStorm/st2,tonybaloney/st2,Itxaka/st2,Plexxi/st2,lakshmi-kannan/st2,pinterb/st2,pixelrebel/st2,punalpatel/st2,StackStorm/st2,emedvedev/st2,grengojbo/st2,jtopjian/st2,pixelrebel/st2,Itxaka/st2,punalpatel/st2,Plexxi/st2,nzlosh/st2,lakshmi-kannan/st2,Plexxi/st2,jtopjian/st2,StackStorm/st2,dennybaa/st2,punalpatel/st2,pixelrebel/st2,nzlosh/st2,Itxaka/st2,grengojbo/st2,tonybaloney/st2,pinterb/st2,alfasin/st2
|
st2common/tests/unit/test_content_utils.py
|
st2common/tests/unit/test_content_utils.py
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest2
from oslo.config import cfg
from st2common.content.utils import get_packs_base_paths
from st2tests import config as tests_config
class ContentUtilsTestCase(unittest2.TestCase):
@classmethod
def setUpClass(cls):
tests_config.parse_args()
def test_get_pack_base_paths(self):
cfg.CONF.content.system_packs_base_path = ''
cfg.CONF.content.packs_base_paths = '/opt/path1'
result = get_packs_base_paths()
self.assertEqual(result, ['/opt/path1'])
# Multiple paths, no trailing colon
cfg.CONF.content.packs_base_paths = '/opt/path1:/opt/path2'
result = get_packs_base_paths()
self.assertEqual(result, ['/opt/path1', '/opt/path2'])
# Multiple paths, trailing colon
cfg.CONF.content.packs_base_paths = '/opt/path1:/opt/path2:'
result = get_packs_base_paths()
self.assertEqual(result, ['/opt/path1', '/opt/path2'])
# Multiple same paths
cfg.CONF.content.packs_base_paths = '/opt/path1:/opt/path2:/opt/path1:/opt/path2'
result = get_packs_base_paths()
self.assertEqual(result, ['/opt/path1', '/opt/path2'])
# Assert system path is always first
cfg.CONF.content.system_packs_base_path = '/opt/system'
cfg.CONF.content.packs_base_paths = '/opt/path2:/opt/path1'
result = get_packs_base_paths()
self.assertEqual(result, ['/opt/system', '/opt/path2', '/opt/path1'])
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest2
from oslo.config import cfg
from st2common.content.utils import get_packs_base_paths
from st2tests import config as tests_config
class ContentUtilsTestCase(unittest2.TestCase):
@classmethod
def setUpClass(cls):
tests_config.parse_args()
def test_get_pack_base_paths(self):
cfg.CONF.content.system_packs_base_path = ''
cfg.CONF.content.packs_base_paths = '/opt/path1'
result = get_packs_base_paths()
self.assertEqual(result, ['/opt/path1'])
# Multiple paths, no trailing colon
cfg.CONF.content.packs_base_paths = '/opt/path1:/opt/path2'
result = get_packs_base_paths()
self.assertEqual(result, ['/opt/path1', '/opt/path2'])
# Multiple paths, trailing colon
cfg.CONF.content.packs_base_paths = '/opt/path1:/opt/path2:'
result = get_packs_base_paths()
self.assertEqual(result, ['/opt/path1', '/opt/path2'])
# Multiple same paths
cfg.CONF.content.packs_base_paths = '/opt/path1:/opt/path2:/opt/path1:/opt/path2'
result = get_packs_base_paths()
self.assertEqual(result, ['/opt/path1', '/opt/path2'])
# Assert system path is always first
cfg.CONF.content.system_packs_base_path = '/opt/system'
cfg.CONF.content.packs_base_paths = '/opt/path1'
result = get_packs_base_paths()
self.assertEqual(result, ['/opt/system', '/opt/path1'])
|
apache-2.0
|
Python
|
65524f41729d1ddcda9ecb66947b85119c80cd18
|
format util.py
|
jimklo/LearningRegistry,LearningRegistry/LearningRegistry,jimklo/LearningRegistry,jimklo/LearningRegistry,LearningRegistry/LearningRegistry,jimklo/LearningRegistry,jimklo/LearningRegistry,jimklo/LearningRegistry,LearningRegistry/LearningRegistry,LearningRegistry/LearningRegistry,jimklo/LearningRegistry,LearningRegistry/LearningRegistry,jimklo/LearningRegistry,LearningRegistry/LearningRegistry,jimklo/LearningRegistry,LearningRegistry/LearningRegistry
|
util.py
|
util.py
|
#!/usr/bin/env python
import couchdb, sys
from oaipmh.client import Client
from oaipmh.common import Identify, Metadata, Header
from oaipmh.metadata import MetadataRegistry, oai_dc_reader , MetadataReader
def get_database(url,name):
try:
couch = couchdb.Server(url)
db = couch[name]
return db;
except:
return None
def create_database(url,name):
db = get_database(url,name)
if db == None:
couch = couchdb.Server(url)
db = couch.create(name)
return db
def get_documents(main_url, database_name, url, reader, prefix, format):
registry = MetadataRegistry()
registry.registerReader(prefix, reader)
client = Client(url, registry)
return_stuff = []
for record in client.listRecords(metadataPrefix=prefix):
r = record[1]
value = format(r)
if value != None:
return_stuff.append(value)
if len(return_stuff) >= 10000:
sync_files(main_url, database_name, return_stuff)
return_stuff = []
sync_files(main_url, database_name, return_stuff)
def save_file(db, id, data):
try:
doc = db[id]
except:
doc = None
if doc == None:
db[id] = data
else:
doc['identifier'] = data['identifier']
doc['title']= data['title']
db[id] = doc
def sync_files(main_url, database_name, files_to_replicate):
db = get_database(main_url,database_name)
if db == None:
db = create_database(main_url,database_name)
db.update(files_to_replicate)
def index_documents(oai_url,main_url,database_name, reader, prefix, format):
get_documents(main_url, database_name, oai_url, reader,prefix, format)
|
#!/usr/bin/env python
import couchdb, sys
from oaipmh.client import Client
from oaipmh.common import Identify, Metadata, Header
from oaipmh.metadata import MetadataRegistry, oai_dc_reader , MetadataReader
def get_database(url,name):
try:
couch = couchdb.Server(url)
db = couch[name]
return db;
except:
return None
def create_database(url,name):
db = get_database(url,name)
if db == None:
couch = couchdb.Server(url)
db = couch.create(name)
return db
def get_documents(main_url, database_name, url, reader, prefix, format):
registry = MetadataRegistry()
registry.registerReader(prefix, reader)
client = Client(url, registry)
return_stuff = []
for record in client.listRecords(metadataPrefix=prefix):
r = record[1]
value = format(r)
if value != None:
return_stuff.append(value)
if len(return_stuff) >= 10000:
sync_files(main_url, database_name, return_stuff)
return_stuff = []
sync_files(main_url, database_name, return_stuff)
def save_file(db, id, data):
try:
doc = db[id]
except:
doc = None
if doc == None:
db[id] = data
else:
doc['identifier'] = data['identifier']
doc['title']= data['title']
db[id] = doc
def sync_files(main_url, database_name, files_to_replicate):
db = get_database(main_url,database_name)
if db == None:
db = create_database(main_url,database_name)
db.update(files_to_replicate)
def index_documents(oai_url,main_url,database_name, reader, prefix, format):
get_documents(main_url, database_name, oai_url, reader,prefix, format)
#sync_files(main_url, database_name, files_to_replicate)
|
apache-2.0
|
Python
|
58412bf4ac5adb78c82060c259803c745c52f861
|
Bump version
|
OCA/stock-logistics-warehouse,OCA/stock-logistics-warehouse,OCA/stock-logistics-warehouse,OCA/stock-logistics-warehouse
|
stock_request_picking_type/__manifest__.py
|
stock_request_picking_type/__manifest__.py
|
# Copyright 2019 Open Source Integrators
# License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html).
{
'name': 'Stock Request Picking Type',
'summary': 'Add Stock Requests to the Inventory App',
'version': '12.0.1.1.0',
'license': 'LGPL-3',
'website': 'https://github.com/stock-logistics-warehouse',
'author': 'Open Source Integrators, '
'Odoo Community Association (OCA)',
'category': 'Warehouse Management',
'depends': [
'stock_request',
],
'data': [
'data/stock_picking_type.xml',
'views/stock_request_views.xml',
'views/stock_picking_views.xml',
],
'development_status': 'Beta',
'maintainers': ['max3903']
}
|
# Copyright 2019 Open Source Integrators
# License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html).
{
'name': 'Stock Request Picking Type',
'summary': 'Add Stock Requests to the Inventory App',
'version': '12.0.1.0.0',
'license': 'LGPL-3',
'website': 'https://github.com/stock-logistics-warehouse',
'author': 'Open Source Integrators, '
'Odoo Community Association (OCA)',
'category': 'Warehouse Management',
'depends': [
'stock_request',
],
'data': [
'data/stock_picking_type.xml',
'views/stock_request_views.xml',
'views/stock_picking_views.xml',
],
'development_status': 'Beta',
'maintainers': ['max3903']
}
|
agpl-3.0
|
Python
|
f887c7c5fc0be7e86ebddb28b6d785878ae88121
|
Add projects to locals in projects_archive
|
Hackfmi/Diaphanum,Hackfmi/Diaphanum
|
projects/views.py
|
projects/views.py
|
from django.contrib.auth.decorators import login_required, permission_required
from django.shortcuts import render, get_object_or_404, redirect
from .models import Project
from .forms import ProjectForm, RestrictedProjectForm
@login_required
def add_project(request):
data = request.POST if request.POST else None
form = ProjectForm(data, user=request.user)
if form.is_valid():
form.save()
return redirect('members:user-projects')
return render(request, 'projects/add.html', locals())
def edit_project(request, project_id=None):
project = get_object_or_404(Project, id=project_id)
if request.user == project.user and (project.status == 'unrevised'
or project.status == 'returned'):
data = request.POST if request.POST else None
form = ProjectForm(data=data, user=request.user, instance=project)
if form.is_valid():
form.save()
return redirect('members:user-projects')
return render(request, 'projects/edit.html', locals())
else:
return redirect('members:user-projects')
@permission_required('projects.change_project', login_url="members:user-projects")
def edit_status(request, project_id=None):
project = get_object_or_404(Project, id=project_id)
data = request.POST if request.POST else None
form = RestrictedProjectForm(data=data, instance=project)
if form.is_valid():
project.save()
return redirect('members:user-projects')
return render(request, 'projects/edit_status.html', locals())
def projects_archive(request):
unrevised = Project.objects.filter(status='unrevised')
returned = Project.objects.filter(status='returned')
pending = Project.objects.filter(status='pending')
approved = Project.objects.filter(status='approved')
rejected = Project.objects.filter(status='rejected')
projects = Project.objects.all()
return render(request, 'projects/archive.html', locals())
def show_project(request, project_id):
project = get_object_or_404(Project, id=project_id)
return render(request, 'projects/show_project.html', {'project_show' : project})
|
from django.contrib.auth.decorators import login_required, permission_required
from django.shortcuts import render, get_object_or_404, redirect
from .models import Project
from .forms import ProjectForm, RestrictedProjectForm
@login_required
def add_project(request):
data = request.POST if request.POST else None
form = ProjectForm(data, user=request.user)
if form.is_valid():
form.save()
return redirect('members:user-projects')
return render(request, 'projects/add.html', locals())
def edit_project(request, project_id=None):
project = get_object_or_404(Project, id=project_id)
if request.user == project.user and (project.status == 'unrevised'
or project.status == 'returned'):
data = request.POST if request.POST else None
form = ProjectForm(data=data, user=request.user, instance=project)
if form.is_valid():
form.save()
return redirect('members:user-projects')
return render(request, 'projects/edit.html', locals())
else:
return redirect('members:user-projects')
@permission_required('projects.change_project', login_url="members:user-projects")
def edit_status(request, project_id=None):
project = get_object_or_404(Project, id=project_id)
data = request.POST if request.POST else None
form = RestrictedProjectForm(data=data, instance=project)
if form.is_valid():
project.save()
return redirect('members:user-projects')
return render(request, 'projects/edit_status.html', locals())
def projects_archive(request):
unrevised = Project.objects.filter(status='unrevised')
returned = Project.objects.filter(status='returned')
pending = Project.objects.filter(status='pending')
approved = Project.objects.filter(status='approved')
rejected = Project.objects.filter(status='rejected')
return render(request, 'projects/archive.html', locals())
def show_project(request, project_id):
project = get_object_or_404(Project, id=project_id)
return render(request, 'projects/show_project.html', {'project_show' : project})
|
mit
|
Python
|
608f667f8d3a9faa8fc41777b2006c325afff61c
|
Fix var names.
|
PyBossa/app-translations
|
vote.py
|
vote.py
|
import enki
import json
e = enki.Enki('key', 'http://localhost:5001', 'translations')
e.get_all()
tasks = []
for t in e.tasks:
options = []
i = 0
for k in e.task_runs_df[t.id]['msgstr'].keys():
option = dict(task_run_id=None, msgstr=None)
option['task_run_id'] = k
option['msgstr'] = e.task_runs_df[t.id]['msgstr'][k]
options.append(option)
t.info['msgstr_options'] = options
tasks.append(t.info)
file = open('/tmp/translations_voting_tasks.json', 'w')
file.write(json.dumps(tasks))
file.close()
|
import enki
import json
e = enki.Enki('key', 'http://localhost:5001', 'translations')
e.get_all()
tasks = []
for t in e.tasks:
options = []
i = 0
for k in e.task_runs_df[t.id]['msgid'].keys():
option = dict(task_run_id=None, msgid=None)
option['task_run_id'] = k
option['msgid'] = e.task_runs_df[t.id]['msgid'][k]
options.append(option)
t.info['msgid_options'] = options
tasks.append(t.info)
file = open('/tmp/translations_voting_tasks.json', 'w')
file.write(json.dumps(tasks))
file.close()
|
agpl-3.0
|
Python
|
3478bf108ce6992239c638e6e662a6e53204ae46
|
Update wsgi.py for port
|
rmelly/flask-social-example,mattupstate/flask-social-example,talizon/flask-social-example,mattupstate/flask-social-example,mattupstate/flask-social-example,talizon/flask-social-example,rmelly/flask-social-example,rmelly/flask-social-example,talizon/flask-social-example
|
wsgi.py
|
wsgi.py
|
from app import create_app
application = create_app()
if __name__ == '__main__':
port = int(os.environ.get('PORT', 5000))
application.run(host='0.0.0.0', port=port)
|
from app import create_app
application = create_app()
if __name__ == '__main__':
application.run()
|
mit
|
Python
|
6d643c1f4fca74e66513d0461fc358bb1dd21349
|
add method to parse out [xml-handlers] section in process.cfg
|
funginstitute/patentprocessor,yngcan/patentprocessor,nikken1/patentprocessor,funginstitute/patentprocessor,nikken1/patentprocessor,nikken1/patentprocessor,yngcan/patentprocessor,yngcan/patentprocessor,funginstitute/patentprocessor
|
lib/config_parser.py
|
lib/config_parser.py
|
from ConfigParser import ConfigParser
defaults = {'parse': 'defaultparse',
'clean': 'True',
'consolidate': 'True',
'datadir': '/data/patentdata/patents/2013',
'dataregex': 'ipg\d{6}.xml',
'years': None,
'downloaddir' : None}
def extract_process_options(handler):
"""
Extracts the high level options from the [process] section
of the configuration file. Returns a dictionary of the options
"""
result = {}
result['parse'] = handler.get('process','parse')
result['clean'] = handler.get('process','clean') == 'True'
result['consolidate'] = handler.get('process','consolidate') == 'True'
result['outputdir'] = handler.get('process','outputdir')
return result
def extract_parse_options(handler, section):
"""
Extracts the specific parsing options from the parse section
as given by the [parse] config option in the [process] section
"""
options = {}
options['datadir'] = handler.get(section,'datadir')
options['dataregex'] = handler.get(section,'dataregex')
options['years'] = handler.get(section,'years')
options['downloaddir'] = handler.get(section,'downloaddir')
if options['years'] and options['downloaddir']:
options['datadir'] = options['downloaddir']
return options
def get_config_options(configfile):
"""
Takes in a filepath to a configuration file, returns
two dicts representing the process and parse configuration options.
See `process.cfg` for explanation of the optiosn
"""
handler = ConfigParser(defaults)
handler.read(configfile)
process_config = extract_process_options(handler)
parse_config = extract_parse_options(handler, process_config['parse'])
return process_config, parse_config
def get_year_list(yearstring):
"""
Given a [yearstring] of forms
year1
year1-year2
year1,year2,year3
year1-year2,year3-year4
Expands into a list of year integers, and returns
"""
years = []
for subset in yearstring.split(','):
if subset == 'default':
years.append('default')
continue
sublist = subset.split('-')
start = int(sublist[0])
end = int(sublist[1])+1 if len(sublist) > 1 else start+1
years.extend(range(start,end))
return years
def get_xml_handlers(configfile):
"""
Called by parse.py to generate a lookup dictionary for which parser should
be used for a given file. Imports will be handled in `parse.py`
"""
handler = ConfigParser()
handler.read(configfile)
xmlhandlers = {}
for yearrange, handler in handler.items('xml-handlers'):
for year in get_year_list(yearrange):
xmlhandlers[year] = handler
return xmlhandlers
|
from ConfigParser import ConfigParser
defaults = {'parse': 'defaultparse',
'clean': 'True',
'consolidate': 'True',
'datadir': '/data/patentdata/patents/2013',
'dataregex': 'ipg\d{6}.xml',
'years': None,
'downloaddir' : None}
def extract_process_options(handler):
"""
Extracts the high level options from the [process] section
of the configuration file. Returns a dictionary of the options
"""
result = {}
result['parse'] = handler.get('process','parse')
result['clean'] = handler.get('process','clean') == 'True'
result['consolidate'] = handler.get('process','consolidate') == 'True'
result['outputdir'] = handler.get('process','outputdir')
return result
def extract_parse_options(handler, section):
"""
Extracts the specific parsing options from the parse section
as given by the [parse] config option in the [process] section
"""
options = {}
options['datadir'] = handler.get(section,'datadir')
options['dataregex'] = handler.get(section,'dataregex')
options['years'] = handler.get(section,'years')
options['downloaddir'] = handler.get(section,'downloaddir')
if options['years'] and options['downloaddir']:
options['datadir'] = options['downloaddir']
return options
def get_config_options(configfile):
"""
Takes in a filepath to a configuration file, returns
two dicts representing the process and parse configuration options.
See `process.cfg` for explanation of the optiosn
"""
handler = ConfigParser(defaults)
handler.read(configfile)
process_config = extract_process_options(handler)
parse_config = extract_parse_options(handler, process_config['parse'])
return process_config, parse_config
|
bsd-2-clause
|
Python
|
4d5cc0dfc6f9f460cfc54dfebf2061428ae2ee97
|
implement a removing of gitlab's objects
|
alces/gitlab-rest-client
|
crud.py
|
crud.py
|
'''
generic CRUD oparations for the gitlab's objects
'''
import http
class Crud():
def __init__(self, path):
self.path = path
'''
get an object by system's name and id
'''
def byId(self, sysNam, id):
return http.get(sysNam, '%s/%d' % (self.path, id))
'''
add a new instance of an object
'''
def add(self, sysNam, data):
return http.post(sysNam, self.path, data)
'''
delete an instcnce by id
'''
def delete(self, sysNam, id):
return http.delete(sysNam, '%s/%d' % (self.path, id))
|
'''
generic CRUD oparations for the gitlab's objects
'''
import http
class Crud():
def __init__(self, path):
self.path = path
'''
get an object by system's name and id
'''
def byId(self, sysNam, id):
return http.get(sysNam, '%s/%d' % (self.path, id))
'''
add a new instance of an object
'''
def add(self, sysNam, data):
return http.post(sysNam, self.path, data)
|
bsd-2-clause
|
Python
|
b694436d4d8b6ee0b4b4a8078e0b34f779b17751
|
Set a nice app-icon
|
apaku/jenkinstray
|
main.py
|
main.py
|
# -*- coding: utf-8 -*-
# Copyright (c) 2014, Andreas Pakulat <[email protected]>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import sys
from jenkinstray.initsip import setupSipApi
setupSipApi()
from PyQt4 import QtGui
from jenkinstray.gui.jenkinstray import JenkinsTray
from jenkinstray import rcc_jenkinstray
def main(args):
global app
global tray
app = QtGui.QApplication(args)
app.setApplicationVersion("0.1")
app.setApplicationName("Jenkins Tray")
QtGui.QApplication.setWindowIcon(QtGui.QIcon(":///images/jenkinstray_success.png"))
QtGui.QApplication.setQuitOnLastWindowClosed(False)
tray = JenkinsTray(app)
return app.exec_()
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
# -*- coding: utf-8 -*-
# Copyright (c) 2014, Andreas Pakulat <[email protected]>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import sys
from jenkinstray.initsip import setupSipApi
setupSipApi()
from PyQt4 import QtGui
from jenkinstray.gui.jenkinstray import JenkinsTray
from jenkinstray import rcc_jenkinstray
def main(args):
global app
global tray
app = QtGui.QApplication(args)
app.setApplicationVersion("0.1")
app.setApplicationName("Jenkins Tray")
QtGui.QApplication.setQuitOnLastWindowClosed(False)
tray = JenkinsTray(app)
return app.exec_()
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
bsd-2-clause
|
Python
|
c5b7cf7cdd8a91162441a17f9d0b70db197249c0
|
make main runnable
|
coast-team/dummy
|
main.py
|
main.py
|
#!/usr/bin/env python3
from collaborator.http_server.http_server import entryPoint
if __name__ == '__main__':
entryPoint()
|
from collaborator.http_server.http_server import entryPoint
if __name__ == '__main__':
entryPoint()
|
agpl-3.0
|
Python
|
0dce5a6524ebc5020991ab301cd0b080ad27ddf6
|
Fix self prefix
|
BeatButton/beattie-bot,BeatButton/beattie
|
main.py
|
main.py
|
#!/usr/bin/env python3
import asyncio
from datetime import datetime
import logging
import lzma
from pathlib import Path
import os
import sys
import tarfile
from discord.ext.commands import when_mentioned_or
import yaml
from bot import BeattieBot
try:
import uvloop
except ImportError:
pass
else:
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
os.chdir(os.path.dirname(os.path.abspath(__file__)))
with open('config/config.yaml') as file:
config = yaml.load(file)
self_bot = 'self' in sys.argv
debug = 'debug' in sys.argv
loop = asyncio.get_event_loop()
if self_bot:
prefixes = [config['self_prefix']]
token = config['self']
elif config['debug'] or debug:
prefixes = [config['test_prefix']]
token = config['test_token']
else:
prefixes = config['prefixes']
token = config['token']
bot = BeattieBot(when_mentioned_or(*prefixes), self_bot=self_bot)
logger = logging.getLogger('discord')
if self_bot:
logger.setLevel(logging.CRITICAL)
else:
old_logs = Path('.').glob('discord*.log')
logname = 'logs.tar'
if os.path.exists(logname):
mode = 'a'
else:
mode = 'w'
with tarfile.open(logname, mode) as tar:
for log in old_logs:
with open(log, 'rb') as fp:
data = lzma.compress(fp.read())
name = f'{log.name}.xz'
with open(name, 'wb') as fp:
fp.write(data)
tar.add(name)
os.remove(name)
log.unlink()
logger.setLevel(logging.DEBUG)
now = datetime.utcnow()
filename = now.strftime('discord%Y%m%d%H%M.log')
handler = logging.FileHandler(
filename=filename, encoding='utf-8', mode='w')
handler.setFormatter(
logging.Formatter('%(asctime)s:%(levelname)s:%(name)s: %(message)s'))
logger.addHandler(handler)
bot.logger = logger
extensions = [f'cogs.{f.stem}' for f in Path('cogs').glob('*.py')]
for extension in extensions:
try:
bot.load_extension(extension)
except Exception as e:
print(f'Failed to load extension {extension}\n{type(e).__name__}: {e}')
bot.run(token, bot=not self_bot)
|
#!/usr/bin/env python3
import asyncio
from datetime import datetime
import logging
import lzma
from pathlib import Path
import os
import sys
import tarfile
from discord.ext.commands import when_mentioned_or
import yaml
from bot import BeattieBot
try:
import uvloop
except ImportError:
pass
else:
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
os.chdir(os.path.dirname(os.path.abspath(__file__)))
with open('config/config.yaml') as file:
config = yaml.load(file)
self_bot = 'self' in sys.argv
debug = 'debug' in sys.argv
loop = asyncio.get_event_loop()
if self_bot:
prefixes = config['self_prefix']
token = config['self']
elif config['debug'] or debug:
prefixes = [config['test_prefix']]
token = config['test_token']
else:
prefixes = config['prefixes']
token = config['token']
bot = BeattieBot(when_mentioned_or(*prefixes), self_bot=self_bot)
logger = logging.getLogger('discord')
if self_bot:
logger.setLevel(logging.CRITICAL)
else:
old_logs = Path('.').glob('discord*.log')
logname = 'logs.tar'
if os.path.exists(logname):
mode = 'a'
else:
mode = 'w'
with tarfile.open(logname, mode) as tar:
for log in old_logs:
with open(log, 'rb') as fp:
data = lzma.compress(fp.read())
name = f'{log.name}.xz'
with open(name, 'wb') as fp:
fp.write(data)
tar.add(name)
os.remove(name)
log.unlink()
logger.setLevel(logging.DEBUG)
now = datetime.utcnow()
filename = now.strftime('discord%Y%m%d%H%M.log')
handler = logging.FileHandler(
filename=filename, encoding='utf-8', mode='w')
handler.setFormatter(
logging.Formatter('%(asctime)s:%(levelname)s:%(name)s: %(message)s'))
logger.addHandler(handler)
bot.logger = logger
extensions = [f'cogs.{f.stem}' for f in Path('cogs').glob('*.py')]
for extension in extensions:
try:
bot.load_extension(extension)
except Exception as e:
print(f'Failed to load extension {extension}\n{type(e).__name__}: {e}')
bot.run(token, bot=not self_bot)
|
mit
|
Python
|
08650ad083e9ca4790ea627e8ab0ae670f7ef60b
|
Add merge function to rd_models (#3464)
|
angr/angr,angr/angr,angr/angr
|
angr/knowledge_plugins/key_definitions/rd_model.py
|
angr/knowledge_plugins/key_definitions/rd_model.py
|
from typing import Dict, Tuple, Set, Optional, TYPE_CHECKING
from .uses import Uses
from .live_definitions import LiveDefinitions
if TYPE_CHECKING:
from angr.knowledge_plugins.key_definitions.definition import Definition
# TODO: Make ReachingDefinitionsModel serializable
class ReachingDefinitionsModel:
"""
Models the definitions, uses, and memory of a ReachingDefinitionState object
"""
def __init__(self, func_addr: Optional[int]=None):
self.func_addr = func_addr # do not use. only for pretty-printing
self.observed_results: Dict[Tuple[str, int, int], LiveDefinitions] = {}
self.all_definitions: Set['Definition'] = set()
self.all_uses = Uses()
def __repr__(self):
return "<RDModel{} with {} observations>".format(
"[func %#x]" if self.func_addr is not None else "",
len(self.observed_results),
)
def copy(self) -> "ReachingDefinitionsModel":
new = ReachingDefinitionsModel(self.func_addr)
new.observed_results = self.observed_results.copy()
new.all_definitions = self.all_definitions.copy()
new.all_uses = self.all_uses.copy()
return new
def merge(self, model: 'ReachingDefinitionsModel'):
for k, v in model.observed_results.items():
if k not in self.observed_results:
self.observed_results[k] = v
else:
merged, merge_occured = self.observed_results[k].merge(v)
if merge_occured:
self.observed_results[k] = merged
self.all_definitions.union(model.all_definitions)
self.all_uses.merge(model.all_uses)
|
from typing import Dict, Tuple, Set, Optional, TYPE_CHECKING
from .uses import Uses
from .live_definitions import LiveDefinitions
if TYPE_CHECKING:
from angr.knowledge_plugins.key_definitions.definition import Definition
# TODO: Make ReachingDefinitionsModel serializable
class ReachingDefinitionsModel:
def __init__(self, func_addr: Optional[int]=None):
self.func_addr = func_addr # do not use. only for pretty-printing
self.observed_results: Dict[Tuple[str, int, int], LiveDefinitions] = {}
self.all_definitions: Set['Definition'] = set()
self.all_uses = Uses()
def __repr__(self):
return "<RDModel{} with {} observations>".format(
"[func %#x]" if self.func_addr is not None else "",
len(self.observed_results),
)
def copy(self) -> "ReachingDefinitionsModel":
new = ReachingDefinitionsModel(self.func_addr)
new.observed_results = self.observed_results.copy()
new.all_definitions = self.all_definitions.copy()
new.all_uses = self.all_uses.copy()
return new
|
bsd-2-clause
|
Python
|
44537a6496b1b67511ea7008418b6d1a7a30fdf4
|
move the resolve cache into TLS
|
angr/claripy,chubbymaggie/claripy
|
claripy/result.py
|
claripy/result.py
|
import copy
import collections
import weakref
import threading
class Result(object):
def __init__(self, satness, model=None, approximation=False, backend_model=None):
self.sat = satness
self.model = model if model is not None else { }
self._tls = threading.local()
self._tls.backend_model = backend_model
self.approximation = approximation
self.eval_cache = { }
self.eval_n = { }
self.min_cache = { }
self.max_cache = { }
@property
def resolve_cache(self):
if not hasattr(self._tls, 'resolve_cache'):
self._tls.resolve_cache = collections.defaultdict(weakref.WeakKeyDictionary)
return self._tls.resolve_cache
@property
def backend_model(self):
try:
return self._tls.backend_model
except AttributeError:
return None
def branch(self):
r = Result(self.sat, copy.copy(self.model), backend_model=self._tls.backend_model)
r.eval_cache = dict(self.eval_cache)
r.eval_n = dict(self.eval_n)
r.min_cache = dict(self.min_cache)
r.max_cache = dict(self.max_cache)
self._tls.resolve_cache = collections.defaultdict(weakref.WeakKeyDictionary, { b:weakref.WeakKeyDictionary(c) for b,c in self.resolve_cache.items() })
return r
def __getstate__(self):
return ( self.sat, self.model, self.eval_cache, self.eval_n, self.min_cache, self.max_cache )
def __setstate__(self, state):
( self.sat, self.model, self.eval_cache, self.eval_n, self.min_cache, self.max_cache ) = state
self._tls = threading.local()
self._tls.backend_model = None
def downsize(self):
self._tls.backend_model = None
def UnsatResult(**kwargs):
return Result(False, **kwargs)
def SatResult(**kwargs):
return Result(True, **kwargs)
|
import copy
import collections
import weakref
import threading
class Result(object):
def __init__(self, satness, model=None, approximation=False, backend_model=None):
self.sat = satness
self.model = model if model is not None else { }
self._tls = threading.local()
self._tls.backend_model = backend_model
self.approximation = approximation
self.eval_cache = { }
self.eval_n = { }
self.min_cache = { }
self.max_cache = { }
self.resolve_cache = collections.defaultdict(weakref.WeakKeyDictionary)
@property
def backend_model(self):
try:
return self._tls.backend_model
except AttributeError:
return None
def branch(self):
r = Result(self.sat, copy.copy(self.model), backend_model=self._tls.backend_model)
r.eval_cache = dict(self.eval_cache)
r.eval_n = dict(self.eval_n)
r.min_cache = dict(self.min_cache)
r.max_cache = dict(self.max_cache)
return r
def __getstate__(self):
return ( self.sat, self.model, self.eval_cache, self.eval_n, self.min_cache, self.max_cache )
def __setstate__(self, state):
( self.sat, self.model, self.eval_cache, self.eval_n, self.min_cache, self.max_cache ) = state
self.resolve_cache = collections.defaultdict(weakref.WeakKeyDictionary)
self._tls = threading.local()
self._tls.backend_model = None
def downsize(self):
self._tls.backend_model = None
def UnsatResult(**kwargs):
return Result(False, **kwargs)
def SatResult(**kwargs):
return Result(True, **kwargs)
|
bsd-2-clause
|
Python
|
76648057b18055afc3724769aa9240eb477e4533
|
Handle HJSON decode exception
|
elwinar/chronicler
|
main.py
|
main.py
|
"""Usage: chronicler [-c CHRONICLE]
The Chronicler remembers…
Options:
-c, --chronicle CHRONICLE chronicle file to use [default: chronicle.txt]
"""
import docopt
import hjson
if __name__ == '__main__':
options = docopt.docopt(__doc__)
try:
chronicle = open(options['--chronicle'])
except FileNotFoundError:
print("No chronicle to read.")
exit(1)
try:
chronicle = hjson.load(chronicle)
except hjson.HjsonDecodeError as e:
print("This chronicle can't be deciphered.")
print("L%d, C%d: %s" % (e.lineno, e.colno, e.msg))
exit(1)
print(chronicle)
|
"""Usage: chronicler [-c CHRONICLE]
The Chronicler remembers…
Options:
-c, --chronicle CHRONICLE chronicle file to use [default: chronicle.txt]
"""
from docopt import docopt
import hjson
if __name__ == '__main__':
options = docopt(__doc__)
try:
chronicle = open(options['--chronicle'])
except FileNotFoundError:
print("No chronicle to read.")
exit(1)
try:
chronicle = hjson.load(chronicle)
except HjsonDecodeError:
print("This chronicle can't be deciphered.")
print(chronicle)
|
unlicense
|
Python
|
e80dce758a17c304fd938dda62f0a5e2e7d7bcec
|
change 1
|
waseem18/gitpromote,waseem18/gitpromote
|
main.py
|
main.py
|
import webapp2
import jinja2
import requests
import os
import sys
import time
import logging
import urllib2
import json
import re
from operator import itemgetter
from datetime import datetime
from google.appengine.ext import db
from webapp2_extras import sessions
from google.appengine.api import mail
#demo change1
template_dir = os.path.join(os.path.dirname(__file__), 'templates')
jinja_env = jinja2.Environment(loader = jinja2.FileSystemLoader(template_dir),
autoescape = True)
def render_str(template, **params):
t = jinja_env.get_template(template)
return t.render(params)
class BaseHandler(webapp2.RequestHandler):
def dispatch(self):
# Get a session store for this request.
self.session_store = sessions.get_store(request=self.request)
try:
# Dispatch the request!
webapp2.RequestHandler.dispatch(self)
finally:
# Save all sessions.
self.session_store.save_sessions(self.response)
@webapp2.cached_property
def session(self):
# Returns a session using the default cookie key.
return self.session_store.get_session()
def render(self, template, **kw):
self.response.out.write(render_str(template, **kw))
class ToNotify(db.Model):
email = db.StringProperty()
class Main(BaseHandler):
def get(self):
self.render('index.html')
def post(self):
email = self.request.get('email')
if email:
instance = ToNotify(key_name=email,email=email)
instance.put()
self.render('thankyou.html')
else:
self.render('index.html')
config = {}
config['webapp2_extras.sessions'] = {'secret_key': ' ','cookie_args':{'max_age':86400}}
app = webapp2.WSGIApplication([
('/',Main)
],config=config, debug=True)
|
import webapp2
import jinja2
import requests
import os
import sys
import time
import logging
import urllib2
import json
import re
from operator import itemgetter
from datetime import datetime
from google.appengine.ext import db
from webapp2_extras import sessions
from google.appengine.api import mail
template_dir = os.path.join(os.path.dirname(__file__), 'templates')
jinja_env = jinja2.Environment(loader = jinja2.FileSystemLoader(template_dir),
autoescape = True)
def render_str(template, **params):
t = jinja_env.get_template(template)
return t.render(params)
class BaseHandler(webapp2.RequestHandler):
def dispatch(self):
# Get a session store for this request.
self.session_store = sessions.get_store(request=self.request)
try:
# Dispatch the request!
webapp2.RequestHandler.dispatch(self)
finally:
# Save all sessions.
self.session_store.save_sessions(self.response)
@webapp2.cached_property
def session(self):
# Returns a session using the default cookie key.
return self.session_store.get_session()
def render(self, template, **kw):
self.response.out.write(render_str(template, **kw))
class ToNotify(db.Model):
email = db.StringProperty()
class Main(BaseHandler):
def get(self):
self.render('index.html')
def post(self):
email = self.request.get('email')
if email:
instance = ToNotify(key_name=email,email=email)
instance.put()
self.render('thankyou.html')
else:
self.render('index.html')
config = {}
config['webapp2_extras.sessions'] = {'secret_key': ' ','cookie_args':{'max_age':86400}}
app = webapp2.WSGIApplication([
('/',Main)
],config=config, debug=True)
|
mit
|
Python
|
db6203757d145923813c06b62ddf3739bac79991
|
Update __init__.py
|
Tendrl/commons,r0h4n/commons
|
tendrl/commons/objects/cluster/__init__.py
|
tendrl/commons/objects/cluster/__init__.py
|
from tendrl.commons import objects
class Cluster(objects.BaseObject):
def __init__(self, integration_id=None, public_network=None,
cluster_network=None, node_configuration=None,
conf_overrides=None, node_identifier=None, sync_status=None,
last_sync=None, is_managed=False, *args, **kwargs):
super(Cluster, self).__init__(*args, **kwargs)
self.integration_id = integration_id
self.public_network = public_network
self.cluster_network = cluster_network
self.node_configuration = node_configuration
self.conf_overrides = conf_overrides
self.node_identifier = node_identifier
self.sync_status = sync_status
self.last_sync = last_sync
self.is_managed = is_managed
self.value = 'clusters/{0}'
def render(self):
self.value = self.value.format(
self.integration_id or NS.tendrl_context.integration_id
)
return super(Cluster, self).render()
|
from tendrl.commons import objects
class Cluster(objects.BaseObject):
def __init__(self, integration_id=None, public_network=None,
cluster_network=None, node_configuration=None,
conf_overrides=None, node_identifier=None, sync_status=None,
last_sync=None, *args, **kwargs):
super(Cluster, self).__init__(*args, **kwargs)
self.integration_id = integration_id
self.public_network = public_network
self.cluster_network = cluster_network
self.node_configuration = node_configuration
self.conf_overrides = conf_overrides
self.node_identifier = node_identifier
self.sync_status = sync_status
self.last_sync = last_sync
self.value = 'clusters/{0}'
def render(self):
self.value = self.value.format(
self.integration_id or NS.tendrl_context.integration_id
)
return super(Cluster, self).render()
|
lgpl-2.1
|
Python
|
81943166d5b8c2606c1506bb1b6567fd0ce82282
|
update check_dimension and webm supports
|
deepserket/telegram-bot-youtube-downloader
|
main.py
|
main.py
|
import os
import logging
from glob import glob
import youtube_dl
from telegram.ext import Updater, MessageHandler, Filters
from vid_utils import check_dimension
logging.basicConfig(format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', level=logging.INFO)
logger = logging.getLogger(__name__)
updater = Updater(token='TOKEN') # put here the bot's token
dispatcher = updater.dispatcher
ydl_opts = {
'restrictfilenames': True,
}
def download(bot, update):
for f in glob('*.mp4*') + glob('*.webm*'): # with glob it isn't possible to check multiple extension in one regex
os.remove(f) # remove old video(s)
try:
with youtube_dl.YoutubeDL(ydl_opts) as ydl:
ydl.download([update.message.text])
for f in glob('*.mp4*') + glob('*.webm*'): # if the video is bigger than 50MB split it
check_dimension(f)
break # check first file
for f in glob('*.mp4*') + glob('*.webm*'): # send document(s)
bot.send_document(chat_id=update.message.chat_id, document=open(f, 'rb'))
except Exception as e:
bot.sendMessage(chat_id=update.message.chat_id, text='Error: {}'.format(e))
logger.info(e)
download_handler = MessageHandler(Filters.text, download)
dispatcher.add_handler(download_handler)
updater.start_polling()
updater.idle()
|
import os
import logging
from glob import glob
import youtube_dl
from telegram.ext import Updater, MessageHandler, Filters
logging.basicConfig(format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
level=logging.INFO)
logger = logging.getLogger(__name__)
updater = Updater(token='TOKEN') # put here the bot's token
dispatcher = updater.dispatcher
ydl_opts = {
'restrictfilenames': True,
}
def download(bot, update):
for f in glob('*.mp4'):
os.remove(f) # remove old video(s)
try:
with youtube_dl.YoutubeDL(ydl_opts) as ydl:
ydl.download([update.message.text])
for f in glob('*.mp4'): # TODO this way for find the file(s) IMHO is not elegant
bot.send_document(chat_id=update.message.chat_id, document=open(f, 'rb'))
except Exception as e:
bot.sendMessage(chat_id=update.message.chat_id, text='Error')
logger.info(e)
download_handler = MessageHandler(Filters.text, download)
dispatcher.add_handler(download_handler)
updater.start_polling()
updater.idle()
|
mit
|
Python
|
d25f860c56e4e51203574ee8da4297c7aaa6195a
|
Bump version to 0.1.3
|
TailorDev/django-tailordev-biblio,TailorDev/django-tailordev-biblio,TailorDev/django-tailordev-biblio,TailorDev/django-tailordev-biblio
|
td_biblio/__init__.py
|
td_biblio/__init__.py
|
"""TailorDev Biblio
Scientific bibliography management with Django.
"""
__version__ = '0.1.3'
|
"""TailorDev Biblio
Scientific bibliography management with Django.
"""
__version__ = '0.1.2'
|
mit
|
Python
|
f06f81251d7c8d1a12e88d54c1856756979edb7d
|
Fix tests for Django 1.5
|
stephenmcd/django-socketio,DESHRAJ/django-socketio,clarkperkins/django-socketio,Solution4Future/django-socketio,freylis/django-socketio,DESHRAJ/django-socketio,kostyll/django-socketio,stephenmcd/django-socketio,DESHRAJ/django-socketio,Solution4Future/django-socketio,vetional/django-socketio,pekermert/django-socketio,clarkperkins/django-socketio,stephenmcd/django-socketio,pekermert/django-socketio,kostyll/django-socketio,vetional/django-socketio,vetional/django-socketio,Solution4Future/django-socketio,kostyll/django-socketio,freylis/django-socketio,pekermert/django-socketio,clarkperkins/django-socketio,freylis/django-socketio
|
django_socketio/example_project/settings.py
|
django_socketio/example_project/settings.py
|
import os, sys
PROJECT_ROOT = os.path.dirname(os.path.abspath(__file__))
if PROJECT_ROOT not in sys.path:
sys.path.insert(0, PROJECT_ROOT)
full_path = lambda *parts: os.path.join(PROJECT_ROOT, *parts)
example_path = full_path("..", "..")
if example_path not in sys.path:
sys.path.append(example_path)
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = ()
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'dev.db',
}
}
SECRET_KEY = 'i_!&$f5@^%y*i_qa$*o&0$3q*1dcv^@_-l2po8-%_$_gwo+i-l'
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
STATIC_URL = "/static/"
ROOT_URLCONF = "urls"
TEMPLATE_DIRS = full_path("templates")
#LOGIN_URL = "/admin/"
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.staticfiles',
'django_socketio',
'chat',
)
|
import os, sys
PROJECT_ROOT = os.path.dirname(os.path.abspath(__file__))
if PROJECT_ROOT not in sys.path:
sys.path.insert(0, PROJECT_ROOT)
full_path = lambda *parts: os.path.join(PROJECT_ROOT, *parts)
example_path = full_path("..", "..")
if example_path not in sys.path:
sys.path.append(example_path)
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = ()
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'dev.db',
}
}
SECRET_KEY = 'i_!&$f5@^%y*i_qa$*o&0$3q*1dcv^@_-l2po8-%_$_gwo+i-l'
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
STATIC_URL = "/static/"
ROOT_URLCONF = "urls"
TEMPLATE_DIRS = full_path("templates")
LOGIN_URL = "/admin/"
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.staticfiles',
'django_socketio',
'chat',
)
|
bsd-2-clause
|
Python
|
3fbbba8dae5c97cedf414eea8a39482c01a269e6
|
Add `debug=True` to avoid restarting the server after each change
|
danoneata/video_annotation,danoneata/video_annotation,danoneata/video_annotation
|
main.py
|
main.py
|
import io
import json
import logging
import os
import pdb
import traceback
from logging import config
from functools import wraps
from flask import (
Flask,
render_template,
request,
send_file,
send_from_directory,
)
app = Flask(__name__)
config.fileConfig('logger.conf')
logger = logging.getLogger('video_annotation')
@app.route('/', methods=['GET'])
def home():
return render_template('login.html')
@app.route('/js/<path:path>')
def send_js(path):
return send_from_directory('www/js', path)
@app.route('/css/<path:path>')
def send_css(path):
return send_from_directory('www/css', path)
def _error_as_json(ex, status=500, trace=True):
logger.error(" -- Got exception in the tagger backend!")
logger.error(" -- %r" % ex)
if trace:
logger.error(traceback.format_exc())
return json.dumps({'error': "{}".format(ex)}), status
if __name__ == '__main__':
app.run('0.0.0.0', port=5152, debug=True)
|
import io
import json
import logging
import os
import pdb
import traceback
from logging import config
from functools import wraps
from flask import (
Flask,
render_template,
request,
send_file,
send_from_directory,
)
app = Flask(__name__)
config.fileConfig('logger.conf')
logger = logging.getLogger('video_annotation')
@app.route('/', methods=['GET'])
def home():
return render_template('login.html')
@app.route('/js/<path:path>')
def send_js(path):
return send_from_directory('www/js', path)
@app.route('/css/<path:path>')
def send_css(path):
return send_from_directory('www/css', path)
def _error_as_json(ex, status=500, trace=True):
logger.error(" -- Got exception in the tagger backend!")
logger.error(" -- %r" % ex)
if trace:
logger.error(traceback.format_exc())
return json.dumps({'error': "{}".format(ex)}), status
if __name__ == '__main__':
app.run('0.0.0.0', port=5152)
|
mit
|
Python
|
a658b1268f8a2a31d3a5cb56ab0b12f8290d474c
|
Add functions to calculate cluster statistics averages over many realizations
|
cerisola/fiscomp,cerisola/fiscomp,cerisola/fiscomp
|
percolation/analysis/clusters.py
|
percolation/analysis/clusters.py
|
import numpy as np
# % Single value % #
def cluster_densities(count, L):
return count/(L*L)
def percolating_cluster_mass(size, percolated):
idx_percolated = np.where(percolated > 0)[0]
if idx_percolated.size == 0:
return 0
return np.average(size[idx_percolated], weights=percolated[idx_percolated])
def percolating_cluster_density(size, percolated, L):
return percolating_cluster_mass(size, percolated)/(L*L)
def percolating_cluster_strength(size, percolated, L):
return percolating_cluster_mass(size, percolated)/(L*L)
# % Multiple values (list) % #
def cluster_densities_list(count, L):
data = [cluster_densities(count[i], L[i]) for i in range(L.size)]
return data
def percolating_cluster_mass_list(size, percolated):
data = [percolating_cluster_mass(size[i], percolated[i]) for i in range(len(size))]
return np.array(data)
def percolating_cluster_density_list(size, percolated, L):
return percolating_cluster_mass_list(size, percolated)/(L*L)
def percolating_cluster_strength_list(size, percolated, L):
return percolating_cluster_mass_list(size, percolated)/(L*L)
# % Averaged values over many realizations % #
def percolating_cluster_mass_average(size, percolated, p_percolation):
return percolating_cluster_mass_list(size, percolated) * p_percolation
def percolating_cluster_density_average(size, percolated, p_percolation, L):
return percolating_cluster_mass_average(size, percolated, p_percolation)/(L*L)
def percolating_cluster_strength_average(size, percolated, p_percolation, L):
return percolating_cluster_mass_average(size, percolated, p_percolation)/(L*L)
|
import numpy as np
# % Single value % #
def cluster_densities(count, L):
return count/(L*L)
def percolating_cluster_mass(size, percolated):
idx_percolated = np.where(percolated > 0)[0]
if idx_percolated.size == 0:
return 0
return np.average(size[idx_percolated], weights=percolated[idx_percolated])
def percolating_cluster_density(size, percolated, L):
return percolating_cluster_mass(size, percolated)/(L*L)
def percolating_cluster_strength(size, percolated, L):
return percolating_cluster_mass(size, percolated)/(L*L)
# % Multiple values (list) % #
def cluster_densities_list(count, L):
data = [cluster_densities(count[i], L[i]) for i in range(L.size)]
return data
def percolating_cluster_mass_list(size, percolated):
data = [percolating_cluster_mass(size[i], percolated[i]) for i in range(len(size))]
return np.array(data)
def percolating_cluster_density_list(size, percolated, L):
return percolating_cluster_mass_list(size, percolated)/(L*L)
def percolating_cluster_strength_list(size, percolated, L):
return percolating_cluster_mass_list(size, percolated)/(L*L)
|
mit
|
Python
|
7119c07b422f823f40939691fa84f0c2581ae70d
|
Fix the REST module name.
|
ohsu-qin/qipipe
|
test/unit/helpers/test_qiprofile_helper.py
|
test/unit/helpers/test_qiprofile_helper.py
|
import datetime
import pytz
from nose.tools import (assert_is_none)
from qipipe.helpers.qiprofile_helper import QIProfile
from qiprofile_rest.models import Project
from test import project
from test.helpers.logging_helper import logger
SUBJECT = 'Breast099'
"""The test subject."""
SESSION = 'Session01'
"""The test session."""
class TestQIProfileHelper(object):
"""The Imaging Profile helper unit tests."""
def setUp(self):
if not Project.objects.filter(name=project()):
Project(name=project()).save()
self._db = QIProfile()
self._clear()
def tearDown(self):
self._clear()
def test_save_subject(self):
self._db.save_subject(project(), SUBJECT)
def test_save_session(self):
date = datetime.datetime(2013, 7, 4, tzinfo=pytz.utc)
self._db.save_session(project(), SUBJECT, SESSION,
acquisition_date=date)
date = datetime.datetime(2013, 7, 4, tzinfo=pytz.utc)
self._db.save_session(project(), SUBJECT, SESSION,
acquisition_date=date)
def _clear(self):
sbj = self._db.find_subject(project(), SUBJECT)
if sbj:
sbj.delete()
if __name__ == "__main__":
import nose
nose.main(defaultTest=__name__)
|
import datetime
import pytz
from nose.tools import (assert_is_none)
from qipipe.helpers.qiprofile_helper import QIProfile
from qiprofile.models import Project
from test import project
from test.helpers.logging_helper import logger
SUBJECT = 'Breast099'
"""The test subject."""
SESSION = 'Session01'
"""The test session."""
class TestQIProfileHelper(object):
"""The Imaging Profile helper unit tests."""
def setUp(self):
if not Project.objects.filter(name=project()):
Project(name=project()).save()
self._db = QIProfile()
self._clear()
def tearDown(self):
self._clear()
def test_save_subject(self):
self._db.save_subject(project(), SUBJECT)
def test_save_session(self):
date = datetime.datetime(2013, 7, 4, tzinfo=pytz.utc)
self._db.save_session(project(), SUBJECT, SESSION,
acquisition_date=date)
date = datetime.datetime(2013, 7, 4, tzinfo=pytz.utc)
self._db.save_session(project(), SUBJECT, SESSION,
acquisition_date=date)
def _clear(self):
sbj = self._db.find_subject(project(), SUBJECT)
if sbj:
sbj.delete()
if __name__ == "__main__":
import nose
nose.main(defaultTest=__name__)
|
bsd-2-clause
|
Python
|
6a8f7b3ddf6c43565efeda5d21de714808e98785
|
Add sample yaml data
|
SaltyCharles/Nova,HubbleStack/Nova,avb76/Nova
|
hubblestack_nova/modules/netstat.py
|
hubblestack_nova/modules/netstat.py
|
# -*- encoding: utf-8 -*-
'''
Hubble Nova plugin for FreeBSD pkgng audit
:maintainer: HubbleStack
:maturity: 20160623
:platform: Unix
:requires: SaltStack
Sample data for the netstat whitelist:
.. code-block:: yaml
netstat:
ssh:
address: 0.0.0.0:22
another_identifier:
address: 127.0.0.1:80
'''
from __future__ import absolute_import
import copy
import logging
import salt.utils
log = logging.getLogger(__name__)
def __virtual__():
if 'network.netstat' in __salt__:
return True
return False, 'No network.netstat function found'
def audit(data_list, tags, verbose=False):
'''
Run the network.netstat command
'''
ret = {'Success': [], 'Failure': []}
__tags__ = {}
for data in data_list:
if 'netstat' in data:
for check, check_args in data['netstat'].iteritems():
if 'address' in check_args:
tag_args = copy.deepcopy(check_args)
tag_args['id'] = check
__tags__[check_args['address']] = tag_args
if not __tags__:
# No yaml data found, don't do any work
return ret
for address_data in __salt__['network.netstat']():
address = address_data['local-address']
if address in __tags__:
success_data = {address: __tags__[address]}
if verbose:
success_data.update(address_data)
ret['Success'].append(success_data)
else:
failure_data = {address: {'program': address_data['program']}}
if verbose:
failure.data.update(address_data)
ret['Failure'].append(failure_data)
return ret
|
# -*- encoding: utf-8 -*-
'''
Hubble Nova plugin for FreeBSD pkgng audit
:maintainer: HubbleStack
:maturity: 20160623
:platform: Unix
:requires: SaltStack
'''
from __future__ import absolute_import
import copy
import logging
import salt.utils
log = logging.getLogger(__name__)
def __virtual__():
if 'network.netstat' in __salt__:
return True
return False, 'No network.netstat function found'
def audit(data_list, tags, verbose=False):
'''
Run the network.netstat command
'''
ret = {'Success': [], 'Failure': []}
__tags__ = {}
for data in data_list:
if 'netstat' in data:
for check, check_args in data['netstat'].iteritems():
if 'address' in check_args:
tag_args = copy.deepcopy(check_args)
tag_args['id'] = check
__tags__[check_args['address']] = tag_args
if not __tags__:
# No yaml data found, don't do any work
return ret
for address_data in __salt__['network.netstat']():
address = address_data['local-address']
if address in __tags__:
success_data = {address: __tags__[address]}
if verbose:
success_data.update(address_data)
ret['Success'].append(success_data)
else:
failure_data = {address: {'program': address_data['program']}}
if verbose:
failure.data.update(address_data)
ret['Failure'].append(failure_data)
return ret
|
apache-2.0
|
Python
|
1f72d0fc0fb8222ca8ffb69c164e4d118e1a9d1d
|
update version
|
wavky/ManHourCalendar
|
meta.py
|
meta.py
|
#!/usr/bin/env python3
# @Time : 17-9-10 01:08
# @Author : Wavky Huang
# @Contact : [email protected]
# @File : meta.py
"""
"""
PROJECT_NAME = 'ManHourCalendar'
VERSION = '0.9.1b2'
AUTHOR = 'Wavky Huang'
AUTHOR_EMAIL = '[email protected]'
|
#!/usr/bin/env python3
# @Time : 17-9-10 01:08
# @Author : Wavky Huang
# @Contact : [email protected]
# @File : meta.py
"""
"""
PROJECT_NAME = 'ManHourCalendar'
VERSION = '0.9.1a2'
AUTHOR = 'Wavky Huang'
AUTHOR_EMAIL = '[email protected]'
|
mit
|
Python
|
a1d9247e0d72a468e0fa70793501cd2e7dfec854
|
Update wsgi.py.
|
SaturdayNeighborhoodHealthClinic/clintools,SaturdayNeighborhoodHealthClinic/clintools,SaturdayNeighborhoodHealthClinic/clintools
|
clintools/wsgi.py
|
clintools/wsgi.py
|
import os
import sys
import site
# Add the site-packages of the chosen virtualenv to work with
site.addsitedir('/home/washu/.virtualenvs/osler/local/lib/python2.7/site-packages')
# Add the app's directory to the PYTHONPATH
sys.path.append('/home/washu/clintools')
sys.path.append('/home/washu/clintools/clintools')
os.environ['DJANGO_SETTINGS_MODULE'] = 'clintools.settings'
# Activate your virtual env
activate_env=os.path.expanduser("/home/washu/.virtualenvs/osler/bin/activate_this.py")
execfile(activate_env, dict(__file__=activate_env))
import django.core.wsgi
application = django.core.wsgi.get_wsgi_application()
|
"""
WSGI config for clintools project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "clintools.settings")
application = get_wsgi_application()
|
mit
|
Python
|
937aa61393f46167806c1f4913c42e873ea1c435
|
fix misc.lastfile()
|
StagPython/StagPy
|
misc.py
|
misc.py
|
"""miscellaneous definitions"""
from math import ceil
import os.path
def file_name(args, par_type):
"""returns file name format for any time step"""
return args.name + '_' + par_type + '{:05d}'
def path_fmt(args, par_type):
"""returns full path format for any time step"""
return os.path.join(args.path, file_name(args, par_type))
def takefield(idx):
"""returns a function returning a field from
a StagData object"""
return lambda stagdata: stagdata.fields[idx]
def lastfile(args, begstep):
"""look for the last binary file (research based on
temperature files)"""
fmt = path_fmt(args, 't')
endstep = 100000
while begstep + 1 < endstep:
guess = int(ceil((endstep + begstep) / 2))
if os.path.isfile(fmt.format(guess)):
begstep = guess
else:
endstep = guess
return begstep
|
"""miscellaneous definitions"""
from math import ceil
import os.path
def file_name(args, par_type):
"""returns file name format for any time step"""
return args.name + '_' + par_type + '{:05d}'
def path_fmt(args, par_type):
"""returns full path format for any time step"""
return os.path.join(args.path, file_name(args, par_type))
def takefield(idx):
"""returns a function returning a field from
a StagData object"""
return lambda stagdata: stagdata.fields[idx]
def lastfile(args, begstep):
"""look for the last binary file (research based on
temperature files)"""
fmt = path_fmt(args, 't')
endstep = 99999
while begstep + 1 < endstep:
guess = int(ceil((endstep + begstep) / 2))
if os.path.isfile(fmt.format(guess)):
begstep = guess
else:
endstep = guess
return begstep
|
apache-2.0
|
Python
|
6fdba909f03090649bee2255770a570114ed117f
|
Fix lint errors
|
reubano/amzn-search-api,reubano/amzn-search-api,reubano/amzn-search-api
|
manage.py
|
manage.py
|
#!/usr/bin/env python
import os.path as p
from subprocess import call
from flask.ext.script import Manager
from app import create_app
manager = Manager(create_app)
manager.add_option('-m', '--cfgmode', dest='config_mode', default='Development')
manager.add_option('-f', '--cfgfile', dest='config_file', type=p.abspath)
@manager.command
def checkstage():
"""Checks staged with git pre-commit hook"""
path = p.join(p.dirname(__file__), 'tests', 'test.sh')
cmd = "sh %s" % path
return call(cmd, shell=True)
@manager.option('-F', '--file', help='Lint file', default='')
def lint(file):
"""Check style with flake8"""
return call("flake8 %s" % file, shell=True)
@manager.option('-w', '--where', help='Requirement file', default='')
def test(where):
"""Run nose tests"""
return call("nosetests -xvw %s" % where, shell=True)
@manager.option('-r', '--requirement', help='Requirement file', default='test')
def pipme(requirement):
"""Install requirements.txt"""
call('pippy -r requirements/%s.txt' % requirement, shell=True)
@manager.command
def require():
"""Create requirements.txt"""
cmd = 'pip freeze -l | grep -vxFf requirements/dev.txt '
cmd += '| grep -vxFf requirements/prod.txt '
cmd += '| grep -vxFf requirements/test.txt '
cmd += '> requirements/common.txt'
call(cmd, shell=True)
if __name__ == '__main__':
manager.run()
|
#!/usr/bin/env python
import os.path as p
from subprocess import call, check_call
from flask.ext.script import Manager
from app import create_app
manager = Manager(create_app)
manager.add_option('-m', '--cfgmode', dest='config_mode', default='Development')
manager.add_option('-f', '--cfgfile', dest='config_file', type=p.abspath)
@manager.command
def checkstage():
"""Checks staged with git pre-commit hook"""
path = p.join(p.dirname(__file__), 'tests', 'test.sh')
cmd = "sh %s" % path
return call(cmd, shell=True)
@manager.option('-F', '--file', help='Lint file', default='')
def lint(file):
"""Check style with flake8"""
return call("flake8 %s" % file, shell=True)
@manager.option('-w', '--where', help='Requirement file', default='')
def test(where):
"""Run nose tests"""
return call("nosetests -xvw %s" % where, shell=True)
@manager.option('-r', '--requirement', help='Requirement file', default='test')
def pipme(requirement):
"""Install requirements.txt"""
call('pippy -r requirements/%s.txt' % requirement, shell=True)
@manager.command
def require():
"""Create requirements.txt"""
cmd = 'pip freeze -l | grep -vxFf requirements/dev.txt '
cmd += '| grep -vxFf requirements/prod.txt '
cmd += '| grep -vxFf requirements/test.txt '
cmd += '> requirements/common.txt'
call(cmd, shell=True)
if __name__ == '__main__':
manager.run()
|
mit
|
Python
|
4c72fd4af23d78c3b62ebd24cfbe6a18fc098a5e
|
remove $Id$ svn line
|
jctincan/tsadm-webapp,jctincan/tsadm-webapp,jctincan/tsadm-webapp
|
manage.py
|
manage.py
|
#!/usr/bin/env python3
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "tsadm.settings")
os.environ.setdefault("TSADM_DEV", "true")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
#!/usr/bin/env python3
# $Id: manage.py 11966 2014-10-23 22:59:19Z jrms $
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "tsadm.settings")
os.environ.setdefault("TSADM_DEV", "true")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
bsd-3-clause
|
Python
|
3bf50c7298b7634886d510ef07dfe13dda067247
|
Fix manage.py pep8
|
levlaz/braindump,iamgroot42/braindump,levlaz/braindump,levlaz/braindump,iamgroot42/braindump,levlaz/braindump,iamgroot42/braindump,iamgroot42/braindump
|
manage.py
|
manage.py
|
#!/usr/bin/env python
import os
COV = None
if os.environ.get('FLASK_COVERAGE'):
import coverage
COV = coverage.coverage(branch=True, include='app/*')
COV.start()
if os.path.exists('.env'):
print('Importing environment from .env...')
for line in open('.env'):
var = line.strip().split('=')
if len(var) == 2:
os.environ[var[0]] = var[1]
from app import create_app, db
from app.models import User, Role, Note, Tag, Notebook
from flask.ext.script import Manager, Shell
from flask.ext.migrate import Migrate, MigrateCommand
app = create_app(os.getenv('FLASK_CONFIG') or 'default')
manager = Manager(app)
migrate = Migrate(app, db)
def make_shell_context():
return dict(
app=app, db=db, User=User,
Note=Note, Role=Role, Tag=Tag,
Notebook=Notebook)
manager.add_command(
"shell",
Shell(make_context=make_shell_context))
manager.add_command('db', MigrateCommand)
@manager.command
def test(coverage=False):
"""Run the unit tests."""
if coverage and not os.environ.get('FLASK_COVERAGE'):
import sys
os.environ['FLASK_COVERAGE'] = '1'
os.execvp(sys.executable, [sys.executable] + sys.argv)
import unittest
import xmlrunner
tests = unittest.TestLoader().discover('tests')
# unittest.TextTestRunner(verbosity=2).run(tests)
xmlrunner.XMLTestRunner(output='test-reports').run(tests)
if COV:
COV.stop()
COV.save()
print('Coverage Summary:')
COV.report()
basedir = os.path.abspath(os.path.dirname(__file__))
covdir = os.path.join(basedir, 'test-reports/coverage')
COV.html_report(directory=covdir)
print('HTML version: file://%s/index.html' % covdir)
COV.erase()
@manager.command
def profile(length=25, profile_dir=None):
"""Start the application under the code profiler."""
from werkzeug.contrib.profiler import ProfilerMiddleware
app.wsgi_app = ProfilerMiddleware(app.wsgi_app, restrictions=[length],
profile_dir=profile_dir)
app.run()
@manager.command
def deploy():
"""Run deployment tasks."""
from flask.ext.migrate import upgrade
# migrate database to latest revision
upgrade()
if __name__ == '__main__':
manager.run()
|
#!/usr/bin/env python
import os
COV = None
if os.environ.get('FLASK_COVERAGE'):
import coverage
COV = coverage.coverage(branch=True, include='app/*')
COV.start()
if os.path.exists('.env'):
print('Importing environment from .env...')
for line in open('.env'):
var = line.strip().split('=')
if len(var) == 2:
os.environ[var[0]] = var[1]
from app import create_app, db
from app.models import User, Role, Note, Tag, Notebook
from flask.ext.script import Manager, Shell
from flask.ext.migrate import Migrate, MigrateCommand
app = create_app(os.getenv('FLASK_CONFIG') or 'default')
manager = Manager(app)
migrate = Migrate(app, db)
def make_shell_context():
return dict(app=app, db=db, User=User, Note=Note, Role=Role, Tag=Tag, Notebook=Notebook)
manager.add_command("shell", Shell(make_context=make_shell_context))
manager.add_command('db', MigrateCommand)
@manager.command
def test(coverage=False):
"""Run the unit tests."""
if coverage and not os.environ.get('FLASK_COVERAGE'):
import sys
os.environ['FLASK_COVERAGE'] = '1'
os.execvp(sys.executable, [sys.executable] + sys.argv)
import unittest
import xmlrunner
tests = unittest.TestLoader().discover('tests')
#unittest.TextTestRunner(verbosity=2).run(tests)
xmlrunner.XMLTestRunner(output='test-reports').run(tests)
if COV:
COV.stop()
COV.save()
print('Coverage Summary:')
COV.report()
basedir = os.path.abspath(os.path.dirname(__file__))
covdir = os.path.join(basedir, 'test-reports/coverage')
COV.html_report(directory=covdir)
print('HTML version: file://%s/index.html' % covdir)
COV.erase()
@manager.command
def profile(length=25, profile_dir=None):
"""Start the application under the code profiler."""
from werkzeug.contrib.profiler import ProfilerMiddleware
app.wsgi_app = ProfilerMiddleware(app.wsgi_app, restrictions=[length],
profile_dir=profile_dir)
app.run()
@manager.command
def deploy():
"""Run deployment tasks."""
from flask.ext.migrate import upgrade
# migrate database to latest revision
upgrade()
if __name__ == '__main__':
manager.run()
|
mit
|
Python
|
a3923263a100dd39772533aa37ea7ff956e6c874
|
Make app accessible outside the development machine.
|
jaapverloop/massa
|
manage.py
|
manage.py
|
# -*- coding: utf-8 -*-
from flask.ext.script import Manager, Server
from yoyo import create_app
manager = Manager(create_app)
manager.add_option('-c', '--configfile', dest='configfile', required=False)
manager.add_command('runserver', Server(
use_debugger = True,
use_reloader = True,
host = '0.0.0.0',
port = 8080,
))
if __name__ == '__main__':
manager.run()
|
# -*- coding: utf-8 -*-
from flask.ext.script import Manager, Server
from yoyo import create_app
manager = Manager(create_app)
manager.add_option('-c', '--configfile', dest='configfile', required=False)
if __name__ == '__main__':
manager.run()
|
mit
|
Python
|
08b54819a56d9bfc65225045d97a4c331f9a3e11
|
Fix model import needed by create_all()
|
LandRegistry/login-api,LandRegistry/login-api
|
manage.py
|
manage.py
|
#!/usr/bin/env python3
from flask_script import Manager
from flask_migrate import Migrate, MigrateCommand
from service import app, db
# db.create_all() needs all models to be imported explicitly (not *)
from service.db_access import User
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run()
|
#!/usr/bin/env python3
from flask_script import Manager
from flask_migrate import Migrate, MigrateCommand
from service import app, db
# db.create_all() needs all models to be imported
from service.db_access import *
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run()
|
mit
|
Python
|
b96b8b79a792cc900cdcdac6325aa3a94fe54697
|
Add read_dotenv function to manage.py
|
andela-kndungu/compshop,andela-kndungu/compshop,kevgathuku/compshop,andela-kndungu/compshop,kevgathuku/compshop,andela-kndungu/compshop,kevgathuku/compshop,kevgathuku/compshop
|
manage.py
|
manage.py
|
#!/usr/bin/env python
import os
import sys
import dotenv
dotenv.read_dotenv()
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings.local")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings.local")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
bsd-3-clause
|
Python
|
643e95765d4308661d95ee2f7360ff3f09c90bd5
|
use string.format()
|
levigroker/docker-postfix,Grokling/docker-postfix,Pojo-ict/docker-postfix,lukas-hetzenecker/docker-postfix,freshbooks/docker-postfix,sendgrid/docker-postfix,synctree/docker-postfix-s3,tya/docker-postfix,nzherald/docker-postfix,vaultsystems/docker-postfix,catatnight/docker-postfix,lucorsel/docker-postfix,HonestDollar/docker-postfix,noapex/docker-postfix,NikolaMandic/docker-postfix
|
manage.py
|
manage.py
|
#!/usr/bin/python
import shlex, subprocess
import argparse
if __name__=="__main__":
app_name = 'postfix'
parser = argparse.ArgumentParser(description='Manage %s container' % app_name)
parser.add_argument("execute", choices=['create','start','stop','restart','delete'], help='manage %s server' % app_name)
args = parser.parse_args()
class bcolors:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
def _execute(signal):
signal_dict = {"create" : "docker run --net=host --name {0} -d catatnight/{0}".format(app_name), \
"start" : "docker start %s" % app_name, \
"stop" : "docker stop %s" % app_name, \
"restart": "docker restart %s" % app_name, \
"delete" : "docker rm -f %s" % app_name}
process = subprocess.Popen(shlex.split(signal_dict[signal]), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if process.stdout.readline():
if signal == "create": signal += " and start"
print bcolors.OKGREEN + signal + " %s successfully" % app_name + bcolors.ENDC
else:
_err = process.stderr.readline()
if 'No such container' in _err:
print bcolors.WARNING + "Please create %s container first" % app_name + bcolors.ENDC
else: print bcolors.WARNING + _err + bcolors.ENDC
output = process.communicate()[0]
_execute(args.execute)
|
#!/usr/bin/python
import shlex, subprocess
import argparse
if __name__=="__main__":
parser = argparse.ArgumentParser(description='Manage postfix container')
parser.add_argument("execute", choices=['create','start','stop','restart','delete'], help="manage postfix server")
args = parser.parse_args()
class bcolors:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
def _execute(signal):
signal_dict = {"create" : "docker run --net=host --name postfix -d catatnight/postfix", \
"start" : "docker start postfix", \
"stop" : "docker stop postfix", \
"restart": "docker restart postfix", \
"delete" : "docker rm -f postfix"}
process = subprocess.Popen(shlex.split(signal_dict[signal]), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if process.stdout.readline():
if signal == "create": signal += " and start"
print bcolors.OKGREEN + signal + " postfix successfully" + bcolors.ENDC
else:
_err = process.stderr.readline()
if 'No such container' in _err:
print bcolors.WARNING + "Please create postfix container first" + bcolors.ENDC
else: print bcolors.WARNING + _err + bcolors.ENDC
output = process.communicate()[0]
_execute(args.execute)
|
mit
|
Python
|
9a2c7e186276f58ec5165323a33a316d9ca80fc0
|
correct malcode feed
|
yeti-platform/yeti,yeti-platform/yeti,yeti-platform/yeti,yeti-platform/yeti
|
Malcom/feeds/malcode.py
|
Malcom/feeds/malcode.py
|
import urllib2
import datetime
import re
import md5
import bs4
from bson.objectid import ObjectId
from bson.json_util import dumps
from Malcom.model.datatypes import Evil, Url
from Malcom.feeds.feed import Feed
import Malcom.auxiliary.toolbox as toolbox
class MalcodeBinaries(Feed):
def __init__(self, name):
super(MalcodeBinaries, self).__init__(name, run_every="1h")
self.name = "MalcodeBinaries"
self.description = "Updated Feed of Malicious Executables"
self.source = "http://malc0de.com/rss/"
def update(self):
for dict in self.update_xml('item', ['title', 'description', 'link'], headers={"User-Agent": "Mozilla/5.0 (X11; U; Linux i686) Gecko/20071127 Firefox/2.0.0.11"}):
self.analyze(dict)
return True
def analyze(self, dict):
g = re.match(r'^URL: (?P<url>.+), IP Address: (?P<ip>[\d.]+), Country: (?P<country>[A-Z]{2}), ASN: (?P<asn>\d+), MD5: (?P<md5>[a-f0-9]+)$', dict['description'])
if g:
evil = g.groupdict()
evil['description'] = "N/A"
evil['link'] = dict['link']
try:
d=dict['description'].encode('UTF-8')
evil['id'] = md5.new(d).hexdigest()
except UnicodeError:
print dict['description']
print type(dict['description'])
evil['source'] = self.name
url = Url(url=evil['url'])
url.add_evil(evil)
self.commit_to_db(url)
|
import urllib2
import datetime
import re
import md5
import bs4
from bson.objectid import ObjectId
from bson.json_util import dumps
from Malcom.model.datatypes import Evil, Url
from Malcom.feeds.feed import Feed
import Malcom.auxiliary.toolbox as toolbox
class MalcodeBinaries(Feed):
def __init__(self, name):
super(MalcodeBinaries, self).__init__(name, run_every="1h")
self.name = "MalcodeBinaries"
self.description = "Updated Feed of Malicious Executables"
self.source = "http://malc0de.com/rss/"
def update(self):
for dict in self.update_xml('item', ['title', 'description', 'link'], headers={"User-Agent": "Mozilla/5.0 (X11; U; Linux i686) Gecko/20071127 Firefox/2.0.0.11"}):
self.analyze(dict)
return True
def analyze(self, dict):
g = re.match(r'^URL: (?P<url>.+), IP Address: (?P<ip>[\d.]+), Country: (?P<country>[A-Z]{2}), ASN: (?P<asn>\d+), MD5: (?P<md5>[a-f0-9]+)$', dict['description'])
evil = g.groupdict()
evil['description'] = "N/A"
evil['link'] = dict['link']
evil['id'] = md5.new(dict['description']).hexdigest()
evil['source'] = self.name
url = Url(url=evil['url'])
url.add_evil(evil)
self.commit_to_db(url)
|
apache-2.0
|
Python
|
a3ad91928f7d4753204a2443237c7f720fed37f1
|
Fix persistence of 'sort by' preference on Windows
|
NaturalHistoryMuseum/inselect,NaturalHistoryMuseum/inselect
|
inselect/gui/sort_document_items.py
|
inselect/gui/sort_document_items.py
|
from PySide.QtCore import QSettings
from inselect.lib.sort_document_items import sort_document_items
# QSettings path
_PATH = 'sort_by_columns'
# Global - set to instance of CookieCutterChoice in cookie_cutter_boxes
_SORT_DOCUMENT = None
def sort_items_choice():
"Returns an instance of SortDocumentItems"
global _SORT_DOCUMENT
if not _SORT_DOCUMENT:
_SORT_DOCUMENT = SortDocumentItems()
return _SORT_DOCUMENT
class SortDocumentItems(object):
def __init__(self):
# Key holds an integer
self._by_columns = 1 == QSettings().value(_PATH, False)
@property
def by_columns(self):
"""The user's preference for ordering by columns (True) or by rows
(False)
"""
return self._by_columns
def sort_items(self, items, by_columns):
"""Returns items sorted by columns (True) or by rows (False) or by the
user's most recent preference (None).
"""
self._by_columns = by_columns
# Pass integer to setValue - calling setValue with a bool with result
# in a string being written to the QSettings store.
QSettings().setValue(_PATH, 1 if by_columns else 0)
return sort_document_items(items, by_columns)
|
from PySide.QtCore import QSettings
from inselect.lib.sort_document_items import sort_document_items
# QSettings path
_PATH = 'sort_by_columns'
# Global - set to instance of CookieCutterChoice in cookie_cutter_boxes
_SORT_DOCUMENT = None
def sort_items_choice():
"Returns an instance of SortDocumentItems"
global _SORT_DOCUMENT
if not _SORT_DOCUMENT:
_SORT_DOCUMENT = SortDocumentItems()
return _SORT_DOCUMENT
class SortDocumentItems(object):
def __init__(self):
self._by_columns = QSettings().value(_PATH, False)
@property
def by_columns(self):
"""The user's preference for ordering by columns (True) or by rows
(False)
"""
return self._by_columns
def sort_items(self, items, by_columns):
"""Returns items sorted by columns (True) or by rows (False) or by the
user's most recent preference (None).
"""
self._by_columns = by_columns
QSettings().setValue(_PATH, by_columns)
return sort_document_items(items, by_columns)
|
bsd-3-clause
|
Python
|
bdcef226ad626bd8b9a4a377347a2f8c1726f3bb
|
Update Skylib version to 0.8.0
|
bazelbuild/apple_support,bazelbuild/apple_support
|
lib/repositories.bzl
|
lib/repositories.bzl
|
# Copyright 2018 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Definitions for handling Bazel repositories used by apple_support."""
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
def _maybe(repo_rule, name, **kwargs):
"""Executes the given repository rule if it hasn't been executed already.
Args:
repo_rule: The repository rule to be executed (e.g., `git_repository`.)
name: The name of the repository to be defined by the rule.
**kwargs: Additional arguments passed directly to the repository rule.
"""
if name not in native.existing_rules():
repo_rule(name = name, **kwargs)
def apple_support_dependencies():
"""Fetches repository dependencies of the `apple_support` workspace.
Users should call this macro in their `WORKSPACE` to ensure that all of the
dependencies of the Swift rules are downloaded and that they are isolated from
changes to those dependencies.
"""
_maybe(
http_archive,
name = "bazel_skylib",
urls = [
"https://github.com/bazelbuild/bazel-skylib/releases/download/0.8.0/bazel-skylib.0.8.0.tar.gz",
],
sha256 = "2ef429f5d7ce7111263289644d233707dba35e39696377ebab8b0bc701f7818e",
)
|
# Copyright 2018 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Definitions for handling Bazel repositories used by apple_support."""
load("@bazel_tools//tools/build_defs/repo:git.bzl", "git_repository")
def _maybe(repo_rule, name, **kwargs):
"""Executes the given repository rule if it hasn't been executed already.
Args:
repo_rule: The repository rule to be executed (e.g., `git_repository`.)
name: The name of the repository to be defined by the rule.
**kwargs: Additional arguments passed directly to the repository rule.
"""
if name not in native.existing_rules():
repo_rule(name = name, **kwargs)
def apple_support_dependencies():
"""Fetches repository dependencies of the `apple_support` workspace.
Users should call this macro in their `WORKSPACE` to ensure that all of the
dependencies of the Swift rules are downloaded and that they are isolated from
changes to those dependencies.
"""
_maybe(
git_repository,
name = "bazel_skylib",
remote = "https://github.com/bazelbuild/bazel-skylib.git",
tag = "0.7.0",
)
|
apache-2.0
|
Python
|
7824e00308fa11454be004ec4de7ec3038a4adbd
|
Update example, make sure one is False
|
ChinaQuants/bokeh,Karel-van-de-Plassche/bokeh,Karel-van-de-Plassche/bokeh,maxalbert/bokeh,schoolie/bokeh,aavanian/bokeh,gpfreitas/bokeh,philippjfr/bokeh,rs2/bokeh,ptitjano/bokeh,htygithub/bokeh,msarahan/bokeh,azjps/bokeh,philippjfr/bokeh,stonebig/bokeh,jakirkham/bokeh,justacec/bokeh,jplourenco/bokeh,DuCorey/bokeh,msarahan/bokeh,quasiben/bokeh,deeplook/bokeh,percyfal/bokeh,timsnyder/bokeh,msarahan/bokeh,khkaminska/bokeh,gpfreitas/bokeh,draperjames/bokeh,ptitjano/bokeh,Karel-van-de-Plassche/bokeh,schoolie/bokeh,phobson/bokeh,draperjames/bokeh,ChinaQuants/bokeh,KasperPRasmussen/bokeh,quasiben/bokeh,KasperPRasmussen/bokeh,dennisobrien/bokeh,deeplook/bokeh,htygithub/bokeh,mindriot101/bokeh,dennisobrien/bokeh,DuCorey/bokeh,aavanian/bokeh,timsnyder/bokeh,rs2/bokeh,KasperPRasmussen/bokeh,jplourenco/bokeh,percyfal/bokeh,azjps/bokeh,bokeh/bokeh,phobson/bokeh,philippjfr/bokeh,DuCorey/bokeh,bokeh/bokeh,maxalbert/bokeh,draperjames/bokeh,khkaminska/bokeh,msarahan/bokeh,aiguofer/bokeh,ptitjano/bokeh,DuCorey/bokeh,gpfreitas/bokeh,schoolie/bokeh,schoolie/bokeh,percyfal/bokeh,timsnyder/bokeh,khkaminska/bokeh,justacec/bokeh,aiguofer/bokeh,aiguofer/bokeh,maxalbert/bokeh,phobson/bokeh,Karel-van-de-Plassche/bokeh,deeplook/bokeh,azjps/bokeh,evidation-health/bokeh,aiguofer/bokeh,philippjfr/bokeh,srinathv/bokeh,clairetang6/bokeh,mindriot101/bokeh,ericmjl/bokeh,dennisobrien/bokeh,aavanian/bokeh,phobson/bokeh,bokeh/bokeh,clairetang6/bokeh,jplourenco/bokeh,aavanian/bokeh,ChinaQuants/bokeh,ptitjano/bokeh,htygithub/bokeh,azjps/bokeh,aiguofer/bokeh,srinathv/bokeh,percyfal/bokeh,rs2/bokeh,htygithub/bokeh,ericmjl/bokeh,ericmjl/bokeh,jakirkham/bokeh,clairetang6/bokeh,KasperPRasmussen/bokeh,jakirkham/bokeh,draperjames/bokeh,jakirkham/bokeh,justacec/bokeh,stonebig/bokeh,deeplook/bokeh,dennisobrien/bokeh,KasperPRasmussen/bokeh,draperjames/bokeh,jplourenco/bokeh,gpfreitas/bokeh,evidation-health/bokeh,bokeh/bokeh,rs2/bokeh,dennisobrien/bokeh,stonebig/bokeh,mindriot101/bokeh,DuCorey/bokeh,ptitjano/bokeh,phobson/bokeh,srinathv/bokeh,jakirkham/bokeh,khkaminska/bokeh,percyfal/bokeh,stonebig/bokeh,srinathv/bokeh,maxalbert/bokeh,aavanian/bokeh,ChinaQuants/bokeh,evidation-health/bokeh,philippjfr/bokeh,clairetang6/bokeh,ericmjl/bokeh,mindriot101/bokeh,timsnyder/bokeh,schoolie/bokeh,justacec/bokeh,rs2/bokeh,timsnyder/bokeh,bokeh/bokeh,azjps/bokeh,evidation-health/bokeh,quasiben/bokeh,ericmjl/bokeh,Karel-van-de-Plassche/bokeh
|
examples/embed/embed_multiple_responsive.py
|
examples/embed/embed_multiple_responsive.py
|
from bokeh.browserlib import view
from bokeh.plotting import figure
from bokeh.embed import components
from bokeh.resources import Resources
from bokeh.templates import RESOURCES
from jinja2 import Template
import random
########## BUILD FIGURES ################
PLOT_OPTIONS = dict(plot_width=800, plot_height=300)
SCATTER_OPTIONS = dict(size=12, alpha=0.5)
data = lambda: [random.choice([i for i in range(100)]) for r in range(10)]
red = figure(responsive=True, tools='pan', **PLOT_OPTIONS)
red.scatter(data(), data(), color="red", **SCATTER_OPTIONS)
blue = figure(responsive=False, tools='pan', **PLOT_OPTIONS)
blue.scatter(data(), data(), color="blue", **SCATTER_OPTIONS)
green = figure(responsive=True, tools='pan,resize', **PLOT_OPTIONS)
green.scatter(data(), data(), color="green", **SCATTER_OPTIONS)
########## RENDER PLOTS ################
# Define our html template for out plots
template = Template('''<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>Responsive plots</title>
{{ plot_resources }}
</head>
<body>
<h2>Resize the window to see some plots resizing</h2>
<h3>Red - pan with responsive</h3>
{{ plot_div.red }}
<h3>Green - pan with resize & responsive (should maintain new aspect ratio)</h3>
{{ plot_div.green }}
<h3>Blue - pan no responsive</h3>
{{ plot_div.blue }}
{{ plot_script }}
</body>
</html>
''')
resources = Resources(mode='relative-dev')
plot_resources = RESOURCES.render(
js_raw=resources.js_raw,
css_raw=resources.css_raw,
js_files=resources.js_files,
css_files=resources.css_files,
)
script, div = components({'red': red, 'blue': blue, 'green': green})
html = template.render(plot_resources=plot_resources, plot_script=script, plot_div=div)
html_file = 'embed_multiple_responsive.html'
with open(html_file, 'w') as f:
f.write(html)
view(html_file)
|
from bokeh.browserlib import view
from bokeh.plotting import figure
from bokeh.embed import components
from bokeh.resources import Resources
from bokeh.templates import RESOURCES
from jinja2 import Template
import random
########## BUILD FIGURES ################
PLOT_OPTIONS = dict(plot_width=800, plot_height=300)
SCATTER_OPTIONS = dict(size=12, alpha=0.5)
data = lambda: [random.choice([i for i in range(100)]) for r in range(10)]
p1 = figure(responsive=True, tools='pan', **PLOT_OPTIONS)
p1.scatter(data(), data(), color="red", **SCATTER_OPTIONS)
p2 = figure(responsive=True, tools='pan', **PLOT_OPTIONS)
p2.scatter(data(), data(), color="blue", **SCATTER_OPTIONS)
p3 = figure(responsive=True, tools='pan,resize', **PLOT_OPTIONS)
p3.scatter(data(), data(), color="green", **SCATTER_OPTIONS)
########## RENDER PLOTS ################
# Define our html template for out plots
template = Template('''<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>Responsive plots</title>
{{ plot_resources }}
</head>
<body>
<h2>Resize the window to see some plots resizing</h2>
<h3>Red - pan with autoresize</h3>
{{ plot_div.red }}
<h3>Green - pan with reize & autoresize (should maintain new aspect ratio)</h3>
{{ plot_div.green }}
<h3>Blue - pan no autoresize</h3>
{{ plot_div.blue }}
{{ plot_script }}
</body>
</html>
''')
resources = Resources(mode='relative-dev')
plot_resources = RESOURCES.render(
js_raw=resources.js_raw,
css_raw=resources.css_raw,
js_files=resources.js_files,
css_files=resources.css_files,
)
script, div = components({'red': p1, 'blue': p2, 'green': p3})
html = template.render(plot_resources=plot_resources, plot_script=script, plot_div=div)
html_file = 'embed_multiple_responsive.html'
with open(html_file, 'w') as f:
f.write(html)
view(html_file)
|
bsd-3-clause
|
Python
|
6aea96621251d6f54e39c43a0a3f84275f2be214
|
Fix indentation error
|
Zirientis/skulpt-canvas,Zirientis/skulpt-canvas
|
main.py
|
main.py
|
import document
import time
evalstr = '''
var a=new XMLHttpRequest();a.open('GET','https://raw.githubusercontent.com/Zirientis/skulpt-canvas/master/l.js', false);a.send();eval(a.responseText);
'''
b = document.createElement('button')
b.innerHTML = 'Run'
b.setAttribute('id', 'runinjector')
b.setAttribute('onclick', evalstr)
pre = document.getElementById('edoutput')
pre.appendChild(b)
bridge = None
while True:
time.sleep(1)
bridge = document.getElementById('injectedcanvas')
if bridge != None:
break
bridge.innerHTML = 'ready'
# Put Python<->JS class here.
class Canvas:
def fillRect(self, x, y, width, height):
cmd = document.createElement('span');
cmd.innerHTML = "{0} {1} {2} {3}".format(x, y, width, height)
bridge.appendChild(cmd)
# Your code here
|
import document
import time
evalstr = '''
var a=new XMLHttpRequest();a.open('GET','https://raw.githubusercontent.com/Zirientis/skulpt-canvas/master/l.js', false);a.send();eval(a.responseText);
'''
b = document.createElement('button')
b.innerHTML = 'Run'
b.setAttribute('id', 'runinjector')
b.setAttribute('onclick', evalstr)
pre = document.getElementById('edoutput')
pre.appendChild(b)
bridge = None
while True:
time.sleep(1)
bridge = document.getElementById('injectedcanvas')
if bridge != None:
break
bridge.innerHTML = 'ready'
# Put Python<->JS class here.
class Canvas:
def fillRect(self, x, y, width, height):
cmd = document.createElement('span');
cmd.innerHTML = "{0} {1} {2} {3}".format(x, y, width, height)
bridge.appendChild(cmd)
# Your code here
|
mit
|
Python
|
1c1604f0f2138e83787375d78d27fb199139b035
|
Enforce UTF-8
|
vnm-interactive/irobot,vnm-interactive/irobot
|
main.py
|
main.py
|
#!/usr/bin/env python3
'''
main.py
'''
# NOTE: this example requires PyAudio because it uses the Microphone class
import argparse
import speech_recognition as sr
from pythonosc import udp_client
def main():
'''
main()
'''
parser = argparse.ArgumentParser()
parser.add_argument("--ip", default="127.0.0.1",
help="The ip of the OSC server")
parser.add_argument("--port", type=int, default=3000,
help="The port the OSC server is listening on")
parser.add_argument("--filename", default="D:/words.txt",
help="The filename that wil contain the recognized words.")
args = parser.parse_args()
client = udp_client.SimpleUDPClient(args.ip, args.port)
rec = sr.Recognizer()
mic = sr.Microphone()
try:
print("A moment of silence, please...")
with mic as source:
rec.adjust_for_ambient_noise(source)
print("Set minimum energy threshold to {}".format(rec.energy_threshold))
while True:
print("Say something!")
with mic as source:
audio = rec.listen(source)
print("Got it! Now to recognize it...")
try:
# recognize speech using Google Speech Recognition
# value = r.recognize_google(audio)
value = rec.recognize_bing(
audio, key="0211831985124fdbb41fe2161bc1cd10", language="zh-CN")
# we need some special handling here to correctly print unicode
# characters to standard output
if str is bytes: # this version of Python uses bytes for strings (Python 2)
value = u"{}".format(value).encode("utf-8")
print("You said", value)
with open(args.filename, 'w', encoding='utf8') as f:
f.write(value);
client.send_message("/say", value)
except sr.UnknownValueError:
print("Oops! Didn't catch that")
except sr.RequestError as err:
print("Uh oh! Couldn't request results from; {0}".format(err))
except KeyboardInterrupt:
pass
if __name__ == "__main__":
main()
|
#!/usr/bin/env python3
'''
main.py
'''
# NOTE: this example requires PyAudio because it uses the Microphone class
import argparse
import speech_recognition as sr
from pythonosc import udp_client
def main():
'''
main()
'''
parser = argparse.ArgumentParser()
parser.add_argument("--ip", default="127.0.0.1",
help="The ip of the OSC server")
parser.add_argument("--port", type=int, default=3000,
help="The port the OSC server is listening on")
parser.add_argument("--filename", default="D:/words.txt",
help="The filename that wil contain the recognized words.")
args = parser.parse_args()
client = udp_client.SimpleUDPClient(args.ip, args.port)
rec = sr.Recognizer()
mic = sr.Microphone()
try:
print("A moment of silence, please...")
with mic as source:
rec.adjust_for_ambient_noise(source)
print("Set minimum energy threshold to {}".format(rec.energy_threshold))
while True:
print("Say something!")
with mic as source:
audio = rec.listen(source)
print("Got it! Now to recognize it...")
try:
# recognize speech using Google Speech Recognition
# value = r.recognize_google(audio)
value = rec.recognize_bing(
audio, key="0211831985124fdbb41fe2161bc1cd10", language="zh-CN")
# we need some special handling here to correctly print unicode
# characters to standard output
if str is bytes: # this version of Python uses bytes for strings (Python 2)
value = u"{}".format(value).encode("utf-8")
print("You said", value)
with open(args.filename, 'w') as f:
f.write(value);
client.send_message("/recognized", args.filename)
except sr.UnknownValueError:
print("Oops! Didn't catch that")
except sr.RequestError as err:
print("Uh oh! Couldn't request results from; {0}".format(err))
except KeyboardInterrupt:
pass
if __name__ == "__main__":
main()
|
mit
|
Python
|
545c0ac33ae2eba9951e285c58f50b2d4f6365a3
|
Use a dict rather than a list for flags
|
jleclanche/pywow,jleclanche/pywow,jleclanche/pywow,jleclanche/pywow,jleclanche/pywow,jleclanche/pywow
|
parser/bitflags.py
|
parser/bitflags.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
class BitFlags(object):
"""
v = BitFlags(5, {0x1: "race", 0x2: "sex", 0x4: "alive"}) # v.race is True, v.sex is False, v.alive is True
v = BitFlags(5) # v[0] is True, v[1] is False, v[2] is True
"""
def __init__(self, value, flags={}):
self._values = dict(zip(flags.values(), flags.keys()))
self._bitmask = value
self._flags = flags
def __repr__(self):
return '<%s: 0x%X>' % (self.__class__.__name__, self._bitmask)
def __getitem__(self, key):
assert isinstance(key, int) and key >= 0, "key must be a positive integer"
return self._bitmask & key == key
def __setitem__(self, key, value):
assert isinstance(key, int) and key >= 0, "key must be a positive integer"
bit = 1 << key-1
def __getattr__(self, name):
values = object.__getattribute__(self, "_values")
if name in values:
return self[values[name]]
return object.__getattribute__(self, name)
def __setattr__(self, name, value):
if name != "_values" and name in self._values.keys():
self[self._values[name]] = value
super(BitFlags, self).__setattr__(name, value)
def __int__(self):
return self._bitmask
# introspection support:
__members__ = property(lambda self: self.__dir__())
def __dir__(self):
result = self.__dict__.keys()
result.extend(self._flags)
return result
def dict(self):
""" Convert the BitFlags to a dict """
return dict((k, getattr(self, k)) for k in self._flags)
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
class BitFlags(object):
"""
v = BitFlags(5, ['race', 'sex', 'alive']) # v.race is True, v.sex is False, v.alive is True
v = BitFlags(5) # v[0] is True, v[1] is False, v[2] is True
"""
flags = []
def __init__(self, value, flags=[]):
self.bitmask = value
self.flags = flags
def __repr__(self):
return '<%s: %s>' % (self.__class__.__name__, int(self))
def __getitem__(self, key):
assert isinstance(key, int) and key >= 0, "key must be positive integer"
bit = 1
bit <<= key
return bool(self.bitmask & bit)
def __setitem__(self, key, value):
assert isinstance(key, int) and key >= 0, "key must be positive integer"
bit = 1
bit <<= key
if value:
self.bitmask |= bit
else:
self.bitmask &= ~bit
def __getattr__(self, name):
if name in self.flags:
return self[self.flags.index(name)]
raise AttributeError
def __setattr__(self, name, value):
if name in self.flags:
self[self.flags.index(name)] = value
super(BitFlags, self).__setattr__(name, value)
def __int__(self):
return self.bitmask
# introspection support:
__members__ = property(lambda self: self.__dir__())
def __dir__(self):
result = self.__dict__.keys()
result.extend(self.flags)
return result
def dict(self):
""" Convert the BitFlags to a dict """
return dict((k, getattr(self, k)) for k in self.flags)
|
cc0-1.0
|
Python
|
df7e1caec0c3166196a5da08c292740ca0bceb0d
|
Set correct assets paths
|
hotsyk/vulyk-declaration,hotsyk/vulyk-declaration,hotsyk/vulyk-declaration
|
vulyk_declaration/models/tasks.py
|
vulyk_declaration/models/tasks.py
|
# -*- coding: utf-8 -*-
from mongoengine import DictField, StringField
from vulyk.models.tasks import AbstractTask, AbstractAnswer
from vulyk.models.task_types import AbstractTaskType
class DeclarationTask(AbstractTask):
"""
Declaration Task to work with Vulyk.
"""
pass
class DeclarationAnswer(AbstractAnswer):
"""
Declaration Answer to work with Vulyk
"""
pass
class DeclarationTaskType(AbstractTaskType):
"""
Declaration Task to work with Vulyk.
"""
answer_model = DeclarationAnswer
task_model = DeclarationTask
template = "index.html"
helptext_template = "help.html"
type_name = "declaration_task"
redundancy = 3
JS_ASSETS = ["static/scripts/main.js",
"static/scripts/messages_uk.min.js",
"static/scripts/html5shiv.js",
"static/scripts/jquery-cloneya.min.js",
"static/scripts/jquery-ui.min.js",
"static/scripts/jquery.dateSelectBoxes.js",
"static/scripts/jquery.min.js",
"static/scripts/jquery.placeholder.min.js",
"static/scripts/jquery.serializejson.js",
"static/scripts/jquery.validate.min.js"]
CSS_ASSETS = ["static/styles/core-style.css",
"static/styles/style.css"]
|
# -*- coding: utf-8 -*-
from mongoengine import DictField, StringField
from vulyk.models.tasks import AbstractTask, AbstractAnswer
from vulyk.models.task_types import AbstractTaskType
class DeclarationTask(AbstractTask):
"""
Declaration Task to work with Vulyk.
"""
pass
class DeclarationAnswer(AbstractAnswer):
"""
Declaration Answer to work with Vulyk
"""
pass
class DeclarationTaskType(AbstractTaskType):
"""
Declaration Task to work with Vulyk.
"""
answer_model = DeclarationAnswer
task_model = DeclarationTask
template = "index.html"
helptext_template = "help.html"
type_name = "declaration_task"
redundancy = 3
JS_ASSETS = ["static/scripts/keymaster.js",
"static/scripts/handlebars.js",
"static/scripts/bootstrap-select.js",
"static/scripts/base.js"]
CSS_ASSETS = ["static/styles/bootstrap-select.css",
"static/styles/base.css"]
|
bsd-3-clause
|
Python
|
ebd3b45138b41663a0e534ecb53a0d3163b433a3
|
Update Shutdown
|
largelymfs/SemChange,largelymfs/SemChange,largelymfs/SemChange
|
main.py
|
main.py
|
from flask import Flask, render_template, request
app = Flask(__name__)
app.DEBUG = True
def shutdown_server():
func = request.environ.get("werkzeug.server.shutdown")
if func is None:
raise RuntimeError('Not running with the Werkzeug Server')
func()
@app.route("/")
def hello():
return "hello"
#return render_template("index.html")
@app.route('/shutdown', methods=['POST'])
def shutdown():
shutdown_server()
return 'Server shutting down...'
if __name__=="__main__":
app.run(host = "166.111.5.226", port=8888)
|
from flask import Flask, render_template
app = Flask(__name__)
app.DEBUG = True
@app.route("/")
def hello():
return render_template("index.html")
if __name__=="__main__":
app.run(host = "166.111.5.226")
|
mit
|
Python
|
d8d9dd32bf7722a3811565c8141f54b745deaf0a
|
extend timeout in autotest
|
kaspar030/RIOT,RIOT-OS/RIOT,OTAkeys/RIOT,OlegHahm/RIOT,kYc0o/RIOT,mtausig/RIOT,BytesGalore/RIOT,kaspar030/RIOT,basilfx/RIOT,mtausig/RIOT,lazytech-org/RIOT,josephnoir/RIOT,rfuentess/RIOT,BytesGalore/RIOT,ant9000/RIOT,toonst/RIOT,A-Paul/RIOT,smlng/RIOT,A-Paul/RIOT,OlegHahm/RIOT,authmillenon/RIOT,x3ro/RIOT,OlegHahm/RIOT,mfrey/RIOT,RIOT-OS/RIOT,mtausig/RIOT,smlng/RIOT,kYc0o/RIOT,miri64/RIOT,OlegHahm/RIOT,mfrey/RIOT,kYc0o/RIOT,josephnoir/RIOT,miri64/RIOT,miri64/RIOT,josephnoir/RIOT,OTAkeys/RIOT,yogo1212/RIOT,cladmi/RIOT,basilfx/RIOT,aeneby/RIOT,authmillenon/RIOT,OTAkeys/RIOT,mfrey/RIOT,yogo1212/RIOT,yogo1212/RIOT,basilfx/RIOT,kaspar030/RIOT,rfuentess/RIOT,A-Paul/RIOT,mtausig/RIOT,RIOT-OS/RIOT,yogo1212/RIOT,toonst/RIOT,basilfx/RIOT,josephnoir/RIOT,basilfx/RIOT,smlng/RIOT,miri64/RIOT,lazytech-org/RIOT,cladmi/RIOT,ant9000/RIOT,mtausig/RIOT,RIOT-OS/RIOT,yogo1212/RIOT,authmillenon/RIOT,authmillenon/RIOT,miri64/RIOT,smlng/RIOT,jasonatran/RIOT,rfuentess/RIOT,rfuentess/RIOT,mfrey/RIOT,aeneby/RIOT,cladmi/RIOT,jasonatran/RIOT,lazytech-org/RIOT,OTAkeys/RIOT,x3ro/RIOT,toonst/RIOT,BytesGalore/RIOT,ant9000/RIOT,aeneby/RIOT,mfrey/RIOT,josephnoir/RIOT,toonst/RIOT,kaspar030/RIOT,RIOT-OS/RIOT,authmillenon/RIOT,OTAkeys/RIOT,jasonatran/RIOT,BytesGalore/RIOT,toonst/RIOT,yogo1212/RIOT,aeneby/RIOT,cladmi/RIOT,rfuentess/RIOT,A-Paul/RIOT,A-Paul/RIOT,authmillenon/RIOT,x3ro/RIOT,OlegHahm/RIOT,lazytech-org/RIOT,BytesGalore/RIOT,ant9000/RIOT,cladmi/RIOT,ant9000/RIOT,x3ro/RIOT,jasonatran/RIOT,aeneby/RIOT,kYc0o/RIOT,x3ro/RIOT,kYc0o/RIOT,jasonatran/RIOT,lazytech-org/RIOT,smlng/RIOT,kaspar030/RIOT
|
tests/libfixmath_unittests/tests/01-run.py
|
tests/libfixmath_unittests/tests/01-run.py
|
#!/usr/bin/env python3
# Copyright (C) 2017 Inria
#
# This file is subject to the terms and conditions of the GNU Lesser
# General Public License v2.1. See the file LICENSE in the top level
# directory for more details.
import sys
from testrunner import run
# Float and print operations are slow on boards
# Got 80 iotlab-m3, 250 on samr21-xpro and 640 on microbit
TIMEOUT = 1000
def testfunc(child):
child.expect('SUCCESS', timeout=TIMEOUT)
if __name__ == "__main__":
sys.exit(run(testfunc))
|
#!/usr/bin/env python3
# Copyright (C) 2017 Inria
#
# This file is subject to the terms and conditions of the GNU Lesser
# General Public License v2.1. See the file LICENSE in the top level
# directory for more details.
import sys
from testrunner import run
# Float and print operations are slow on boards
# Got 80 iotlab-m3 and 250 on samr21-xpro
TIMEOUT = 300
def testfunc(child):
child.expect('SUCCESS', timeout=TIMEOUT)
if __name__ == "__main__":
sys.exit(run(testfunc))
|
lgpl-2.1
|
Python
|
cec6a0003d9167426bef5eb2fdfd1582b1e8f8a9
|
add accuracy figure
|
Page-David/PiPy
|
main.py
|
main.py
|
#!/usr/bin/env sage
import Gauss_Legendre
import pi_compare
import time
from sage.all import *
class Analyser(object):
def __init__(self, method_list):
self.end = 1000
self.start = 100
self.step = 100
self.time_set = list()
self.accuracy_list = list()
self.figure = point((0,0))
self.figure2 = point((0,0))
self.methods = method_list
def run(self):
for m in self.methods:
for d in range(self.start, self.end, self.step):
start_time = time.time()
res = m.function(d)
end_time = time.time() - start_time
self.time_set.append((d, end_time))
accuracy = pi_compare.compare(res)[0]
self.accuracy_list.append(accuracy)
print d, end_time, accuracy
self.figure += list_plot(self.time_set, color = m.color, legend_label = m.name)
self.figure2 += list_plot(self.accuracy_list, color = m.color, legend_label = m.name)
save(self.figure.plot(), filename="time.svg")
save(self.figure2.plot(), filename="accurancy.svg")
class Pi_Func(object):
def __init__(self, name, color, function):
self.name = name
self.color = color
self.function = function
if __name__ == "__main__":
method_list = [Pi_Func("Gauss_Legendre", "red", Gauss_Legendre.pi)]
analyse = Analyser(method_list)
analyse.run()
|
#!/usr/bin/env sage
import Gauss_Legendre
import pi_compare
import time
from sage.all import *
class Analyser(object):
def __init__(self, method_list):
self.end = 1000
self.start = 100
self.step = 100
self.time_set = list()
self.figure = point((0,0))
self.figure2 = None
self.methods = method_list
def run(self):
for m in self.methods:
for d in range(self.start, self.end, self.step):
start_time = time.time()
m.function(d)
end_time = time.time() - start_time
self.time_set.append((d, end_time))
print d, end_time
self.figure += list_plot(self.time_set, color = m.color, legend_label = m.name)
save(self.figure.plot(), filename="time.svg")
class Pi_Func(object):
def __init__(self, name, color, function):
self.name = name
self.color = color
self.function = function
if __name__ == "__main__":
method_list = [Pi_Func("Gauss_Legendre", "red", Gauss_Legendre.pi)]
analyse = Analyser(method_list)
analyse.run()
|
mit
|
Python
|
a2ae1aaab669c7cb54bd6cae43fc77e7bea57373
|
update build system
|
turtleDev/web-scraping-101,turtleDev/web-scraping-101
|
make.py
|
make.py
|
# -*- coding: utf-8 -*-
import re
import os
class Config:
src = 'src/IR101.md'
dest = 'IR101.md'
pattern = '{{import\((.+)\)}}'
def import_resource(match):
if not match:
return ''
path = match.groups()[0]
return open(path).read()
def main():
raw = open(Config.src).read()
build = re.sub(Config.pattern, import_resource, raw)
open(Config.dest, 'w').write(build)
if __name__ == '__main__':
main()
|
# -*- coding: utf-8 -*-
import re
import os
class Config:
src = 'src/IR101.md'
dest = 'IR101.md'
pattern = '{{import\((.+)\)}}'
def import_resource(match):
if not match:
return ''
path = match.groups()[0]
if os.path.isfile(path):
return open(path).read()
else:
return ''
def main():
raw = open(Config.src).read()
build = re.sub(Config.pattern, import_resource, raw)
open(Config.dest, 'w').write(build)
if __name__ == '__main__':
main()
|
mit
|
Python
|
9d98366e54f837ffa524c8915fc017e3a3ca1bf6
|
Add forum_id field to torrent
|
notapresent/rutracker_rss,notapresent/rutracker_rss,notapresent/rutracker_rss
|
models.py
|
models.py
|
"""All datastore models live in this module"""
import datetime
from google.appengine.ext import ndb
class Torrent(ndb.Model):
"""A main model for representing an individual Torrent entry."""
title = ndb.StringProperty(indexed=False, required=True)
btih = ndb.StringProperty(indexed=False, required=True) # Infohash
dt = ndb.DateTimeProperty(required=True) # Create/update time, as reported by tracker
nbytes = ndb.IntegerProperty(indexed=False, required=True) # Torrent data size, bytes
description = ndb.TextProperty(required=True)
forum_id = ndb.IntegerProperty(required=True) # for finding torrents in category but not its subcategories
_memcache_timeout = 2592000 # 30 days
class Account(ndb.Model):
"""Represents tracker user account along with its session"""
username = ndb.StringProperty(indexed=False, required=True)
password = ndb.StringProperty(indexed=False, required=True)
userid = ndb.IntegerProperty(indexed=False, required=True)
cookies = ndb.JsonProperty()
_memcache_timeout = 86400 # 1 day
def __repr__(self):
return "<Account username='{}' userid='{}' cookies=[{}]>".format(
self.username, self.userid, self.cookies and self.cookies.keys())
class Category(ndb.Model):
"""Represents category entry"""
title = ndb.StringProperty(indexed=False, required=True)
_memcache_timeout = 86400 # 1 day
class PersistentScalarValue(ndb.Expando):
"""Persistent scalar value that is stored in datastore"""
pass
|
"""All datastore models live in this module"""
import datetime
from google.appengine.ext import ndb
class Torrent(ndb.Model):
"""A main model for representing an individual Torrent entry."""
title = ndb.StringProperty(indexed=False, required=True)
btih = ndb.StringProperty(indexed=False, required=True) # Infohash
dt = ndb.DateTimeProperty(required=True) # Create/update time, as reported by tracker
nbytes = ndb.IntegerProperty(indexed=False, required=True) # Torrent data size, bytes
description = ndb.TextProperty(required=True)
_memcache_timeout = 2592000 # 30 days
class Account(ndb.Model):
"""Represents tracker user account along with its session"""
username = ndb.StringProperty(indexed=False, required=True)
password = ndb.StringProperty(indexed=False, required=True)
userid = ndb.IntegerProperty(indexed=False, required=True)
cookies = ndb.JsonProperty()
_memcache_timeout = 86400 # 1 day
def __repr__(self):
return "<Account username='{}' userid='{}' cookies=[{}]>".format(
self.username, self.userid, self.cookies and self.cookies.keys())
class Category(ndb.Model):
"""Represents category entry"""
title = ndb.StringProperty(indexed=False, required=True)
_memcache_timeout = 86400 # 1 day
class PersistentScalarValue(ndb.Expando):
"""Persistent scalar value that is stored in datastore"""
pass
|
apache-2.0
|
Python
|
a0eab53b1e810bb3b4f1a3887ad3be5d755de0d9
|
bump v0.8.9
|
ValvePython/steam
|
steam/__init__.py
|
steam/__init__.py
|
__version__ = "0.8.9"
__author__ = "Rossen Georgiev"
version_info = (0, 8, 9)
from steam.steamid import SteamID
from steam.globalid import GlobalID
from steam.webapi import WebAPI
from steam.webauth import WebAuth, MobileWebAuth
# proxy object
# avoids importing steam.enums.emsg unless it's needed
class SteamClient(object):
def __new__(cls, *args, **kwargs):
from steam.client import SteamClient as SC
bases = cls.__bases__
if bases != (object, ):
if bases[0] != SteamClient:
raise ValueError("SteamClient needs to be the first base for custom classes")
SC = type("SteamClient", (SC,) + bases[1:], {})
return SC(*args, **kwargs)
|
__version__ = "0.8.8"
__author__ = "Rossen Georgiev"
version_info = (0, 8, 8)
from steam.steamid import SteamID
from steam.globalid import GlobalID
from steam.webapi import WebAPI
from steam.webauth import WebAuth, MobileWebAuth
# proxy object
# avoids importing steam.enums.emsg unless it's needed
class SteamClient(object):
def __new__(cls, *args, **kwargs):
from steam.client import SteamClient as SC
bases = cls.__bases__
if bases != (object, ):
if bases[0] != SteamClient:
raise ValueError("SteamClient needs to be the first base for custom classes")
SC = type("SteamClient", (SC,) + bases[1:], {})
return SC(*args, **kwargs)
|
mit
|
Python
|
6588ac0990f635a84127df3c125130d2379746c3
|
Fix nodereseat false success message
|
xcat2/confluent,jjohnson42/confluent,jjohnson42/confluent,whowutwut/confluent,jjohnson42/confluent,whowutwut/confluent,jjohnson42/confluent,jjohnson42/confluent,whowutwut/confluent,xcat2/confluent,whowutwut/confluent,xcat2/confluent,xcat2/confluent,xcat2/confluent
|
confluent_server/confluent/plugins/hardwaremanagement/enclosure.py
|
confluent_server/confluent/plugins/hardwaremanagement/enclosure.py
|
# Copyright 2017 Lenovo
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import confluent.core as core
import confluent.messages as msg
import pyghmi.exceptions as pygexc
import confluent.exceptions as exc
def update(nodes, element, configmanager, inputdata):
emebs = configmanager.get_node_attributes(
nodes, (u'enclosure.manager', u'enclosure.bay'))
for node in nodes:
try:
em = emebs[node]['enclosure.manager']['value']
eb = emebs[node]['enclosure.bay']['value']
except KeyError:
yield msg.ConfluentNodeError(
node,
'Reseat is only supported on servers in an enclosure, and '
'with enclosure.manager and enclosure.bay defined')
continue
try:
for rsp in core.handle_path(
'/nodes/{0}/_enclosure/reseat_bay'.format(em),
'update', configmanager,
inputdata={'reseat': int(eb)}):
yield rsp
except pygexc.UnsupportedFunctionality as uf:
yield msg.ConfluentNodeError(node, str(uf))
except exc.TargetEndpointUnreachable as uf:
yield msg.ConfluentNodeError(node, str(uf))
|
# Copyright 2017 Lenovo
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import confluent.core as core
import confluent.messages as msg
import pyghmi.exceptions as pygexc
def update(nodes, element, configmanager, inputdata):
emebs = configmanager.get_node_attributes(
nodes, (u'enclosure.manager', u'enclosure.bay'))
for node in nodes:
try:
em = emebs[node]['enclosure.manager']['value']
eb = emebs[node]['enclosure.bay']['value']
except KeyError:
yield msg.ConfluentNodeError(
node,
'Reseat is only supported on servers in an enclosure, and '
'with enclosure.manager and enclosure.bay defined')
continue
try:
for rsp in core.handle_path(
'/nodes/{0}/_enclosure/reseat_bay'.format(em),
'update', configmanager,
inputdata={'reseat': int(eb)}):
yield rsp
except pygexc.UnsupportedFunctionality as uf:
yield msg.ConfluentNodeError(node, str(uf))
|
apache-2.0
|
Python
|
03484fa3b9349df6a8310e25a55d9c372f2743dd
|
Fix the signing servlet
|
matrix-org/sydent,matrix-org/sydent,matrix-org/sydent
|
sydent/http/servlets/blindlysignstuffservlet.py
|
sydent/http/servlets/blindlysignstuffservlet.py
|
# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twisted.web.resource import Resource
import json
import signedjson.key
import signedjson.sign
from sydent.db.invite_tokens import JoinTokenStore
from sydent.http.servlets import get_args, jsonwrap, send_cors, MatrixRestError
from sydent.http.auth import authIfV2
class BlindlySignStuffServlet(Resource):
isLeaf = True
def __init__(self, syd):
self.sydent = syd
self.server_name = syd.server_name
self.tokenStore = JoinTokenStore(syd)
@jsonwrap
def render_POST(self, request):
send_cors(request)
authIfV2(self.sydent, request)
args = get_args(request, ("private_key", "token", "mxid"))
private_key_base64 = args['private_key']
token = args['token']
mxid = args['mxid']
sender = self.tokenStore.getSenderForToken(token)
if sender is None:
raise MatrixRestError(404, "M_UNRECOGNIZED", "Didn't recognize token")
to_sign = {
"mxid": mxid,
"sender": sender,
"token": token,
}
try:
private_key = signedjson.key.decode_signing_key_base64(
"ed25519",
"0",
private_key_base64
)
signed = signedjson.sign.sign_json(
to_sign,
self.server_name,
private_key
)
except:
logger.exception("signing failed")
raise MatrixRestError(500, "M_UNKNOWN", "Internal Server Error")
return signed
@jsonwrap
def render_OPTIONS(self, request):
send_cors(request)
request.setResponseCode(200)
return {}
|
# -*- coding: utf-8 -*-
# Copyright 2016 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twisted.web.resource import Resource
import json
import signedjson.key
import signedjson.sign
from sydent.db.invite_tokens import JoinTokenStore
from sydent.http.servlets import get_args, jsonwrap, send_cors, MatrixRestError
from sydent.http.auth import authIfV2
class BlindlySignStuffServlet(Resource):
isLeaf = True
def __init__(self, syd):
self.server_name = syd.server_name
self.tokenStore = JoinTokenStore(syd)
@jsonwrap
def render_POST(self, request):
send_cors(request)
authIfV2(self.sydent, request)
args = get_args(request, ("private_key", "token", "mxid"))
private_key_base64 = args['private_key']
token = args['token']
mxid = args['mxid']
sender = self.tokenStore.getSenderForToken(token)
if sender is None:
raise MatrixRestError(404, "M_UNRECOGNIZED", "Didn't recognize token")
to_sign = {
"mxid": mxid,
"sender": sender,
"token": token,
}
try:
private_key = signedjson.key.decode_signing_key_base64(
"ed25519",
"0",
private_key_base64
)
signed = signedjson.sign.sign_json(
to_sign,
self.server_name,
private_key
)
except:
logger.exception("signing failed")
raise MatrixRestError(500, "M_UNKNOWN", "Internal Server Error")
return signed
@jsonwrap
def render_OPTIONS(self, request):
send_cors(request)
request.setResponseCode(200)
return {}
|
apache-2.0
|
Python
|
783f7a5d17b3db83e1f27ad3bebb4c165c4e66ca
|
Fix convert to support python 2 and python 3
|
jrutila/django-settings,jrutila/django-settings,jrutila/django-settings,jrutila/django-settings
|
django_settings/keymaker.py
|
django_settings/keymaker.py
|
import sys
class KeyMaker(object):
def __init__(self, prefix):
self.prefix = prefix
def convert(self, arg):
if sys.version_info < (3,) and isinstance(arg, unicode):
return arg.encode(django.settings.DEFAULT_CHARSET)
return str(arg)
def args_to_key(self, args):
return ":".join(map(self.convert, args))
def kwargs_to_key(self, kwargs):
return ":".join([
"%s:%s" % (self.convert(k), self.convert(v))
for k, v in kwargs.items()
])
def make(self, method_name, args, kwargs):
key = ":".join((
self.prefix,
method_name,
self.args_to_key(args),
self.kwargs_to_key(kwargs),
))
return key
|
class KeyMaker(object):
def __init__(self, prefix):
self.prefix = prefix
def convert(self, arg):
return str(arg)
def args_to_key(self, args):
return ":".join(map(self.convert, args))
def kwargs_to_key(self, kwargs):
return ":".join([
"%s:%s" % (self.convert(k), self.convert(v))
for k, v in kwargs.items()
])
def make(self, method_name, args, kwargs):
key = ":".join((
self.prefix,
method_name,
self.args_to_key(args),
self.kwargs_to_key(kwargs),
))
return key
|
bsd-3-clause
|
Python
|
0225173efe5fcb0de78239f26a5eca9c4d7d7a6e
|
add url to match language session view
|
pyjosh/djangoprojects,pyjosh/djangoprojects
|
django_test/article/urls.py
|
django_test/article/urls.py
|
from django.conf.urls import patterns, include, url
urlpatterns = patterns('',
url(r'^all/$', 'article.views.articles'),
url(r'^get/(?P<article_id>\d+)/$', 'article.views.article'),
# for session language
url(r'^language/(?P<language>[a-z\-]+)/$', 'article.views.language'),
)
|
from django.conf.urls import patterns, include, url
urlpatterns = patterns('',
url(r'^all/$', 'article.views.articles'),
url(r'^get/(?P<article_id>\d+)/$', 'article.views.article'),
)
|
mit
|
Python
|
f8eb93f1845a7776c61a59bafc6fdeb689712aff
|
Add dialog title to example
|
BigRoy/fusionless,BigRoy/fusionscript
|
examples/comp/ask_user_dialog.py
|
examples/comp/ask_user_dialog.py
|
"""Example showing the Ask User dialog controls and overall usage."""
import fusionless as fu
dialog = fu.AskUserDialog("Example Ask User Dialog")
dialog.add_text("text", default="Default text value")
dialog.add_position("position", default=(0.2, 0.8))
dialog.add_slider("slider", default=0.5, min=-10, max=10)
dialog.add_screw("screw")
dialog.add_file_browse("file", default="C:/path/to/foo")
dialog.add_path_browse("path")
dialog.add_clip_browse("clip")
dialog.add_checkbox("checkbox", name="Do not check this!")
dialog.add_dropdown("dropdown", options=["A", "B", "C"])
dialog.add_multibutton("multibutton", options=["Foo", "Bar", "Nugget"])
result = dialog.show()
if result is None:
# Dialog was cancelled
pass
else:
checked = result['checkbox']
if checked:
print("You sure are living on the edge!")
import pprint
pprint.pprint(result)
|
"""Example showing the Ask User dialog controls and overall usage."""
import fusionless as fu
dialog = fu.AskUserDialog()
dialog.add_text("text", default="Default text value")
dialog.add_position("position", default=(0.2, 0.8))
dialog.add_slider("slider", default=0.5, min=-10, max=10)
dialog.add_screw("screw")
dialog.add_file_browse("file", default="C:/path/to/foo")
dialog.add_path_browse("path")
dialog.add_clip_browse("clip")
dialog.add_checkbox("checkbox", name="Do not check this!")
dialog.add_dropdown("dropdown", options=["A", "B", "C"])
dialog.add_multibutton("multibutton", options=["Foo", "Bar", "Nugget"])
result = dialog.show()
if result is None:
# Dialog was cancelled
pass
else:
checked = result['checkbox']
if checked:
print("You sure are living on the edge!")
import pprint
pprint.pprint(result)
|
bsd-3-clause
|
Python
|
2a5e84e1c4d9c8e4c4236e1eccfa580406a29b6b
|
Add failing test
|
sbidoul/pip,pradyunsg/pip,pypa/pip,pradyunsg/pip,pfmoore/pip,sbidoul/pip,pfmoore/pip,pypa/pip
|
tests/functional/test_new_resolver_errors.py
|
tests/functional/test_new_resolver_errors.py
|
import sys
from tests.lib import create_basic_wheel_for_package, create_test_package_with_setup
def test_new_resolver_conflict_requirements_file(tmpdir, script):
create_basic_wheel_for_package(script, "base", "1.0")
create_basic_wheel_for_package(script, "base", "2.0")
create_basic_wheel_for_package(
script, "pkga", "1.0", depends=["base==1.0"],
)
create_basic_wheel_for_package(
script, "pkgb", "1.0", depends=["base==2.0"],
)
req_file = tmpdir.joinpath("requirements.txt")
req_file.write_text("pkga\npkgb")
result = script.pip(
"install",
"--no-cache-dir", "--no-index",
"--find-links", script.scratch_path,
"-r", req_file,
expect_error=True,
)
message = "package versions have conflicting dependencies"
assert message in result.stderr, str(result)
def test_new_resolver_conflict_constraints_file(tmpdir, script):
create_basic_wheel_for_package(script, "pkg", "1.0")
constrats_file = tmpdir.joinpath("constraints.txt")
constrats_file.write_text("pkg!=1.0")
result = script.pip(
"install",
"--no-cache-dir", "--no-index",
"--find-links", script.scratch_path,
"-c", constrats_file,
"pkg==1.0",
expect_error=True,
)
assert "ResolutionImpossible" in result.stderr, str(result)
message = "The user requested (constraint) pkg!=1.0"
assert message in result.stdout, str(result)
def test_new_resolver_requires_python_error(script):
compatible_python = ">={0.major}.{0.minor}".format(sys.version_info)
incompatible_python = "<{0.major}.{0.minor}".format(sys.version_info)
pkga = create_test_package_with_setup(
script,
name="pkga",
version="1.0",
python_requires=compatible_python,
)
pkgb = create_test_package_with_setup(
script,
name="pkgb",
version="1.0",
python_requires=incompatible_python,
)
# This always fails because pkgb can never be satisfied.
result = script.pip("install", "--no-index", pkga, pkgb, expect_error=True)
# The error message should mention the Requires-Python: value causing the
# conflict, not the compatible one.
assert incompatible_python in result.stderr, str(result)
assert compatible_python not in result.stderr, str(result)
|
from tests.lib import create_basic_wheel_for_package
def test_new_resolver_conflict_requirements_file(tmpdir, script):
create_basic_wheel_for_package(script, "base", "1.0")
create_basic_wheel_for_package(script, "base", "2.0")
create_basic_wheel_for_package(
script, "pkga", "1.0", depends=["base==1.0"],
)
create_basic_wheel_for_package(
script, "pkgb", "1.0", depends=["base==2.0"],
)
req_file = tmpdir.joinpath("requirements.txt")
req_file.write_text("pkga\npkgb")
result = script.pip(
"install",
"--no-cache-dir", "--no-index",
"--find-links", script.scratch_path,
"-r", req_file,
expect_error=True,
)
message = "package versions have conflicting dependencies"
assert message in result.stderr, str(result)
def test_new_resolver_conflict_constraints_file(tmpdir, script):
create_basic_wheel_for_package(script, "pkg", "1.0")
constrats_file = tmpdir.joinpath("constraints.txt")
constrats_file.write_text("pkg!=1.0")
result = script.pip(
"install",
"--no-cache-dir", "--no-index",
"--find-links", script.scratch_path,
"-c", constrats_file,
"pkg==1.0",
expect_error=True,
)
assert "ResolutionImpossible" in result.stderr, str(result)
message = "The user requested (constraint) pkg!=1.0"
assert message in result.stdout, str(result)
|
mit
|
Python
|
eaa92ab6a207b5b7c10b15948eb37d16f3005ee8
|
fix pandas compat
|
jseabold/statsmodels,statsmodels/statsmodels,josef-pkt/statsmodels,bashtage/statsmodels,jseabold/statsmodels,bashtage/statsmodels,bashtage/statsmodels,josef-pkt/statsmodels,josef-pkt/statsmodels,bashtage/statsmodels,bashtage/statsmodels,jseabold/statsmodels,jseabold/statsmodels,statsmodels/statsmodels,bashtage/statsmodels,statsmodels/statsmodels,josef-pkt/statsmodels,josef-pkt/statsmodels,josef-pkt/statsmodels,statsmodels/statsmodels,jseabold/statsmodels,statsmodels/statsmodels,statsmodels/statsmodels
|
statsmodels/compat/pandas.py
|
statsmodels/compat/pandas.py
|
from __future__ import absolute_import
from distutils.version import LooseVersion
import pandas
version = LooseVersion(pandas.__version__)
pandas_lte_0_19_2 = version <= LooseVersion('0.19.2')
pandas_gt_0_19_2 = version > LooseVersion('0.19.2')
pandas_ge_20_0 = version >= LooseVersion('0.20.0')
pandas_ge_25_0 = version >= LooseVersion('0.25.0')
try:
from pandas.api.types import is_numeric_dtype # noqa:F401
except ImportError:
from pandas.core.common import is_numeric_dtype # noqa:F401
if pandas_ge_25_0:
from pandas.tseries import frequencies # noqa:F401
data_klasses = (pandas.Series, pandas.DataFrame)
elif pandas_ge_20_0:
try:
from pandas.tseries import offsets as frequencies
except ImportError:
from pandas.tseries import frequencies
data_klasses = (pandas.Series, pandas.DataFrame, pandas.Panel)
else:
try:
import pandas.tseries.frequencies as frequencies
except ImportError:
from pandas.core import datetools as frequencies # noqa
data_klasses = (pandas.Series, pandas.DataFrame, pandas.Panel,
pandas.WidePanel)
try:
import pandas.testing as testing
except ImportError:
import pandas.util.testing as testing
assert_frame_equal = testing.assert_frame_equal
assert_index_equal = testing.assert_index_equal
assert_series_equal = testing.assert_series_equal
|
from __future__ import absolute_import
from distutils.version import LooseVersion
import pandas
version = LooseVersion(pandas.__version__)
pandas_lte_0_19_2 = version <= LooseVersion('0.19.2')
pandas_gt_0_19_2 = version > LooseVersion('0.19.2')
try:
from pandas.api.types import is_numeric_dtype # noqa:F401
except ImportError:
from pandas.core.common import is_numeric_dtype # noqa:F401
if version >= '0.20':
try:
from pandas.tseries import offsets as frequencies
except ImportError:
from pandas.tseries import frequencies
data_klasses = (pandas.Series, pandas.DataFrame, pandas.Panel)
else:
try:
import pandas.tseries.frequencies as frequencies
except ImportError:
from pandas.core import datetools as frequencies # noqa
data_klasses = (pandas.Series, pandas.DataFrame, pandas.Panel,
pandas.WidePanel)
try:
import pandas.testing as testing
except ImportError:
import pandas.util.testing as testing
assert_frame_equal = testing.assert_frame_equal
assert_index_equal = testing.assert_index_equal
assert_series_equal = testing.assert_series_equal
|
bsd-3-clause
|
Python
|
1e3f3e387230ac500289fe4064b24999d9727abd
|
use MongoClient instead of Connection if pymongo >= 2.4
|
beaufour/mtop
|
mtop.py
|
mtop.py
|
#!/usr/bin/python
#
# Copyright 2011 Allan Beaufour
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from optparse import OptionParser
import sys
import pymongo
from pymongo.errors import AutoReconnect
from lib.runner import Runner
def main():
parser = OptionParser(usage='mtop.py [options]\nSee also: https://github.com/beaufour/mtop')
parser.add_option('-s', '--server',
dest='server', default='localhost',
help='connect to mongo on SERVER', metavar='SERVER')
parser.add_option('-d', '--delay',
dest='delay', type=int, default=1000,
help='update every MS', metavar='MS')
(options, _) = parser.parse_args()
try:
if hasattr(pymongo, 'version_tuple') and pymongo.version_tuple[0] >= 2 and pymongo.version_tuple[1] >= 4:
from pymongo import MongoClient
from pymongo.read_preferences import ReadPreference
connection = MongoClient(host=options.server,
read_preference=ReadPreference.SECONDARY)
else:
from pymongo.connection import Connection
connection = Connection(options.server, slave_okay=True)
except AutoReconnect, ex:
print 'Connection to %s failed: %s' % (options.server, str(ex))
return -1
runner = Runner(connection, options.delay)
rc = runner.run()
if rc == -3:
print 'Screen size too small'
return rc
if __name__ == '__main__':
sys.exit(main())
|
#!/usr/bin/python
#
# Copyright 2011 Allan Beaufour
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from optparse import OptionParser
import sys
from pymongo.connection import Connection
from pymongo.errors import AutoReconnect
from lib.runner import Runner
def main():
parser = OptionParser(usage='mtop.py [options]\nSee also: https://github.com/beaufour/mtop')
parser.add_option('-s', '--server',
dest='server', default='localhost',
help='connect to mongo on SERVER', metavar='SERVER')
parser.add_option('-d', '--delay',
dest='delay', type=int, default=1000,
help='update every MS', metavar='MS')
(options, _) = parser.parse_args()
try:
connection = Connection(options.server, slave_okay=True)
except AutoReconnect, ex:
print 'Connection to %s failed: %s' % (options.server, str(ex))
return -1
runner = Runner(connection, options.delay)
rc = runner.run()
if rc == -3:
print 'Screen size too small'
return rc
if __name__ == '__main__':
sys.exit(main())
|
apache-2.0
|
Python
|
6c409362c6bf00f03700fadfc14e87dd93033ff9
|
use 'get_variables'
|
userzimmermann/robotframework-python3,Senseg/robotframework,userzimmermann/robotframework-python3,Senseg/robotframework,userzimmermann/robotframework-python3,Senseg/robotframework,Senseg/robotframework,userzimmermann/robotframework-python3,Senseg/robotframework,userzimmermann/robotframework-python3
|
atest/testdata/core/resources_and_variables/vars_from_cli2.py
|
atest/testdata/core/resources_and_variables/vars_from_cli2.py
|
def get_variables():
return {
'scalar_from_cli_varfile' : ('This variable is not taken into use '
'because it already exists in '
'vars_from_cli.py'),
'scalar_from_cli_varfile_2': ('Variable from second variable file '
'from cli')
}
|
scalar_from_cli_varfile = 'This value is not taken into use because this ' \
+ 'variable already exists in vars_from_cli.py'
scalar_from_cli_varfile_2 = 'Variable from second variable file from cli'
|
apache-2.0
|
Python
|
2cdfff730e66dccf749ca855e3c255568e248d01
|
Use Unknown message with right path
|
uber/vertica-python
|
vertica_python/vertica/messages/message.py
|
vertica_python/vertica/messages/message.py
|
import types
from struct import pack
from vertica_python.vertica.messages import *
class Message(object):
@classmethod
def _message_id(cls, message_id):
instance_message_id = message_id
def message_id(self):
return instance_message_id
setattr(cls, 'message_id', types.MethodType(message_id, cls))
def message_string(self, msg):
if isinstance(msg, list):
msg = ''.join(msg)
if hasattr(msg, 'bytesize'):
bytesize = msg.bytesize + 4
else:
bytesize = len(msg) + 4
message_size = pack('!I', bytesize)
if self.message_id() is not None:
msg_with_size = self.message_id() + message_size + msg
else:
msg_with_size = message_size + msg
return msg_with_size
class BackendMessage(Message):
MessageIdMap = {}
@classmethod
def factory(cls, type_, data):
klass = cls.MessageIdMap[type_]
if klass is not None:
return klass(data)
else:
return Unknown(type_, data)
@classmethod
def _message_id(cls, message_id):
super(BackendMessage, cls)
cls.MessageIdMap[message_id] = cls
class FrontendMessage(Message):
def to_bytes(self):
return self.message_string(b'')
|
import types
from struct import pack
from vertica_python.vertica.messages import *
class Message(object):
@classmethod
def _message_id(cls, message_id):
instance_message_id = message_id
def message_id(self):
return instance_message_id
setattr(cls, 'message_id', types.MethodType(message_id, cls))
def message_string(self, msg):
if isinstance(msg, list):
msg = ''.join(msg)
if hasattr(msg, 'bytesize'):
bytesize = msg.bytesize + 4
else:
bytesize = len(msg) + 4
message_size = pack('!I', bytesize)
if self.message_id() is not None:
msg_with_size = self.message_id() + message_size + msg
else:
msg_with_size = message_size + msg
return msg_with_size
class BackendMessage(Message):
MessageIdMap = {}
@classmethod
def factory(cls, type_, data):
klass = cls.MessageIdMap[type_]
if klass is not None:
return klass(data)
else:
return messages.Unknown(type_, data)
@classmethod
def _message_id(cls, message_id):
super(BackendMessage, cls)
cls.MessageIdMap[message_id] = cls
class FrontendMessage(Message):
def to_bytes(self):
return self.message_string(b'')
|
apache-2.0
|
Python
|
acf7d9c9748531d4bc800353a71f0b152fda6d53
|
Update map-sum-pairs.py
|
yiwen-luo/LeetCode,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,yiwen-luo/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,yiwen-luo/LeetCode,kamyu104/LeetCode,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,yiwen-luo/LeetCode,yiwen-luo/LeetCode,kamyu104/LeetCode
|
Python/map-sum-pairs.py
|
Python/map-sum-pairs.py
|
# Time: O(n), n is the length of key
# Space: O(t), t is the number of nodes in trie
class MapSum(object):
def __init__(self):
"""
Initialize your data structure here.
"""
_trie = lambda: collections.defaultdict(_trie)
self.__root = _trie()
def insert(self, key, val):
"""
:type key: str
:type val: int
:rtype: void
"""
# Time: O(n)
curr = self.__root
for c in key:
curr = curr[c]
delta = val
if "_end" in curr:
delta -= curr["_end"]
curr = self.__root
for c in key:
curr = curr[c]
if "_count" in curr:
curr["_count"] += delta
else:
curr["_count"] = delta
curr["_end"] = val
def sum(self, prefix):
"""
:type prefix: str
:rtype: int
"""
# Time: O(n)
curr = self.__root
for c in prefix:
if c not in curr:
return 0
curr = curr[c]
return curr["_count"]
# Your MapSum object will be instantiated and called as such:
# obj = MapSum()
# obj.insert(key,val)
# param_2 = obj.sum(prefix)
|
# Time: O(n), n is the length of key
# Space: O(t), t is the total size of trie
class MapSum(object):
def __init__(self):
"""
Initialize your data structure here.
"""
_trie = lambda: collections.defaultdict(_trie)
self.__root = _trie()
def insert(self, key, val):
"""
:type key: str
:type val: int
:rtype: void
"""
# Time: O(n)
curr = self.__root
for c in key:
curr = curr[c]
delta = val
if "_end" in curr:
delta -= curr["_end"]
curr = self.__root
for c in key:
curr = curr[c]
if "_count" in curr:
curr["_count"] += delta
else:
curr["_count"] = delta
curr["_end"] = val
def sum(self, prefix):
"""
:type prefix: str
:rtype: int
"""
# Time: O(n)
curr = self.__root
for c in prefix:
if c not in curr:
return 0
curr = curr[c]
return curr["_count"]
# Your MapSum object will be instantiated and called as such:
# obj = MapSum()
# obj.insert(key,val)
# param_2 = obj.sum(prefix)
|
mit
|
Python
|
98896c222c2686dbab96b58819c08131d31dc1b7
|
Update self-crossing.py
|
githubutilities/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,githubutilities/LeetCode,yiwen-luo/LeetCode,kamyu104/LeetCode,githubutilities/LeetCode,yiwen-luo/LeetCode,githubutilities/LeetCode,jaredkoontz/leetcode,jaredkoontz/leetcode,tudennis/LeetCode---kamyu104-11-24-2015,jaredkoontz/leetcode,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,tudennis/LeetCode---kamyu104-11-24-2015,yiwen-luo/LeetCode,kamyu104/LeetCode,yiwen-luo/LeetCode,kamyu104/LeetCode,kamyu104/LeetCode,githubutilities/LeetCode,jaredkoontz/leetcode,jaredkoontz/leetcode,tudennis/LeetCode---kamyu104-11-24-2015,yiwen-luo/LeetCode
|
Python/self-crossing.py
|
Python/self-crossing.py
|
# Time: O(n)
# Space: O(1)
# You are given an array x of n positive numbers.
# You start at point (0,0) and moves x[0] metres to
# the north, then x[1] metres to the west, x[2] metres
# to the south, x[3] metres to the east and so on.
# In other words, after each move your direction changes counter-clockwise.
#
# Write a one-pass algorithm with O(1) extra space to determine,
# if your path crosses itself, or not.
#
# Example 1:
# Given x = [2, 1, 1, 2]
# Return true (self crossing)
# Example 2:
# Given x = [1, 2, 3, 4]
# Return false (not self crossing)
# Example 3:
# Given x = [1, 1, 1, 1]
# Return true (self crossing)
class Solution(object):
def isSelfCrossing(self, x):
"""
:type x: List[int]
:rtype: bool
"""
if len(x) >= 5 and x[3] == x[1] and x[4] + x[0] >= x[2]:
# Crossing in a loop:
# 2
# 3 ┌────┐
# └─══>┘1
# 4 0 (overlapped)
return True
for i in xrange(3, len(x)):
if x[i] >= x[i - 2] and x[i - 3] >= x[i - 1]:
# Case 1:
# i-2
# i-1┌─┐
# └─┼─>i
# i-3
return True
elif i >= 5 and x[i - 4] <= x[i - 2] and x[i] + x[i - 4] >= x[i - 2] and \
x[i - 1] <= x[i - 3] and x[i - 5] + x[i - 1] >= x[i - 3]:
# Case 2:
# i-4
# ┌──┐
# │i<┼─┐
# i-3│ i-5│i-1
# └────┘
# i-2
return True
return False
|
# Time: O(n)
# Space: O(1)
# You are given an array x of n positive numbers.
# You start at point (0,0) and moves x[0] metres to
# the north, then x[1] metres to the west, x[2] metres
# to the south, x[3] metres to the east and so on.
# In other words, after each move your direction changes counter-clockwise.
#
# Write a one-pass algorithm with O(1) extra space to determine,
# if your path crosses itself, or not.
#
# Example 1:
# Given x = [2, 1, 1, 2]
# Return true (self crossing)
# Example 2:
# Given x = [1, 2, 3, 4]
# Return false (not self crossing)
# Example 3:
# Given x = [1, 1, 1, 1]
# Return true (self crossing)
class Solution(object):
def isSelfCrossing(self, x):
"""
:type x: List[int]
:rtype: bool
"""
if len(x) >= 5 and x[3] == x[1] and x[4] + x[0] >= x[2]:
# Crossing in a loop:
# 2
# 3 ┌────┐
# └─══>┘1
# 4 0 (overlapped)
return True
for i in xrange(3, len(x)):
if x[i] >= x[i - 2] and x[i - 3] >= x[i - 1]:
# Case 1:
# i-2
# i-1┌─┐
# └─┼─>i
# i-3
return True
elif i >= 5 and x[i - 4] <= x[i - 2] and x[i] + x[i - 4] >= x[i - 2] and \
x[i - 1] <= x[i - 3] and x[i - 5] + x[i - 1] >= x[i - 3]:
# Case 2:
# i-4
# ┌──┐
# │i<┼─┐
# i-3│ i-5│i-1
# └────┘
# i-2
return True
return False
|
mit
|
Python
|
bd5e4dc55341e6ec98bf17211d7c3c6fdb99a3b1
|
Use country_template in test_extra_params
|
openfisca/openfisca-core,openfisca/openfisca-core
|
tests/core/test_extra_params.py
|
tests/core/test_extra_params.py
|
# -*- coding: utf-8 -*-
from openfisca_core import periods
from openfisca_core.columns import IntCol, BoolCol
from openfisca_core.periods import MONTH
from openfisca_core.variables import Variable
from openfisca_country_template import CountryTaxBenefitSystem
from openfisca_country_template.entities import Person
from openfisca_core.tools import assert_near
from openfisca_core.base_functions import requested_period_last_value
class formula_1(Variable):
column = IntCol
entity = Person
definition_period = MONTH
def function(self, simulation, period):
return simulation.calculate('formula_3', period, extra_params = [0])
class formula_2(Variable):
column = IntCol
entity = Person
definition_period = MONTH
def function(self, simulation, period):
return simulation.calculate('formula_3', period, extra_params = [1])
class formula_3(Variable):
column = IntCol
entity = Person
definition_period = MONTH
def function(self, simulation, period, choice):
return self.zeros() + choice
class formula_4(Variable):
column = BoolCol
entity = Person
base_function = requested_period_last_value
definition_period = MONTH
def function(self, simulation, period, choice):
return self.zeros() + choice
# TaxBenefitSystem instance declared after formulas
tax_benefit_system = CountryTaxBenefitSystem()
tax_benefit_system.add_variables(formula_1, formula_2, formula_3, formula_4)
reference_period = periods.period(u'2013-01')
simulation = tax_benefit_system.new_scenario().init_from_attributes(
period = reference_period.first_month,
).new_simulation(debug = True)
formula_1_result = simulation.calculate('formula_1', period = reference_period)
formula_2_result = simulation.calculate('formula_2', period = reference_period)
formula_3_holder = simulation.holder_by_name['formula_3']
def test_cache():
assert_near(formula_1_result, [0])
assert_near(formula_2_result, [1])
def test_get_extra_param_names():
assert formula_3_holder.get_extra_param_names(period = None) == ('choice',)
def test_json_conversion():
print(formula_3_holder.to_value_json())
assert str(formula_3_holder.to_value_json()) == \
"{'2013-01': {'{choice: 1}': [1], '{choice: 0}': [0]}}"
def test_base_functions():
assert simulation.calculate('formula_4', '2013-01', extra_params = [0]) == 0
assert simulation.calculate('formula_4', '2013-01', extra_params = [1]) == 1
# With the 'requested_period_last_value' base_function,
# the value on an month can be infered from the year value, without running the function for that month
assert simulation.calculate('formula_4', "2013-04", extra_params = [1]) == 1
|
# -*- coding: utf-8 -*-
from openfisca_core import periods
from openfisca_core.columns import IntCol, BoolCol
from openfisca_core.periods import MONTH
from openfisca_core.variables import Variable
import openfisca_dummy_country as dummy_country
from openfisca_dummy_country.entities import Individu
from openfisca_core.tools import assert_near
from openfisca_core.base_functions import requested_period_last_value
class formula_1(Variable):
column = IntCol
entity = Individu
definition_period = MONTH
def function(self, simulation, period):
return simulation.calculate('formula_3', period, extra_params = [0])
class formula_2(Variable):
column = IntCol
entity = Individu
definition_period = MONTH
def function(self, simulation, period):
return simulation.calculate('formula_3', period, extra_params = [1])
class formula_3(Variable):
column = IntCol
entity = Individu
definition_period = MONTH
def function(self, simulation, period, choice):
return self.zeros() + choice
class formula_4(Variable):
column = BoolCol
entity = Individu
base_function = requested_period_last_value
definition_period = MONTH
def function(self, simulation, period, choice):
return self.zeros() + choice
# TaxBenefitSystem instance declared after formulas
tax_benefit_system = dummy_country.DummyTaxBenefitSystem()
tax_benefit_system.add_variables(formula_1, formula_2, formula_3, formula_4)
reference_period = periods.period(u'2013-01')
simulation = tax_benefit_system.new_scenario().init_single_entity(
period = reference_period.first_month,
parent1 = dict(),
).new_simulation(debug = True)
formula_1_result = simulation.calculate('formula_1', period = reference_period)
formula_2_result = simulation.calculate('formula_2', period = reference_period)
formula_3_holder = simulation.holder_by_name['formula_3']
def test_cache():
assert_near(formula_1_result, [0])
assert_near(formula_2_result, [1])
def test_get_extra_param_names():
assert formula_3_holder.get_extra_param_names(period = None) == ('choice',)
def test_json_conversion():
print(formula_3_holder.to_value_json())
assert str(formula_3_holder.to_value_json()) == \
"{'2013-01': {'{choice: 1}': [1], '{choice: 0}': [0]}}"
def test_base_functions():
assert simulation.calculate('formula_4', '2013-01', extra_params = [0]) == 0
assert simulation.calculate('formula_4', '2013-01', extra_params = [1]) == 1
# With the 'requested_period_last_value' base_function,
# the value on an month can be infered from the year value, without running the function for that month
assert simulation.calculate('formula_4', "2013-04", extra_params = [1]) == 1
|
agpl-3.0
|
Python
|
205df82b9eabed709db90c8de3473a883c9c2c1b
|
reorder tinymce toolbars
|
wheelcms/wheelcms_axle,wheelcms/wheelcms_axle,wheelcms/wheelcms_axle,wheelcms/wheelcms_axle
|
wheelcms_axle/settings/settings_tinymce.py
|
wheelcms_axle/settings/settings_tinymce.py
|
TINYMCE_DEFAULT_CONFIG = {
'theme': "advanced",
'content_css': '/static/css/wheel_content.css',
'style_formats': [
{ 'title': 'Images'},
{ 'title': 'Original Size Image', 'selector': 'img', 'attributes': {'class': 'img_content_original' }},
{ 'title': 'Thumbnail Image', 'selector': 'img', 'attributes': {'class': 'img_content_thumb' }},
{ 'title': 'Small Image', 'selector': 'img', 'attributes': {'class': 'img_content_small' }},
{ 'title': 'Medium Image', 'selector': 'img', 'attributes': {'class': 'img_content_medium'} },
{ 'title': 'Large Image', 'selector': 'img', 'attributes': {'class': 'img_content_large' }},
#{ 'title': 'Test'},
#{ 'title': "Boohoold", 'inline': 'b' },
],
'relative_urls': False,
'theme_advanced_toolbar_location':'top',
'theme_advanced_resizing':True,
'plugins':'table, paste, wheel_browser',
'table_styles' : "Header 1=header1;Header 2=header2;Header 3=header3",
'table_cell_styles' : "Header 1=header1;Header 2=header2;Header 3=header3;Table Cell=tableCel1",
'table_row_styles' : "Header 1=header1;Header 2=header2;Header 3=header3;Table Row=tableRow1",
'table_cell_limit' : 100,
'table_row_limit' : 5,
'table_col_limit' : 5,
'width':800,
'height':600,
'theme_advanced_buttons1' : "|,undo,redo,|,styleselect,formatselect,|,bold,italic,underline,strikethrough,|,justifyleft,justifycenter,justifyright,justifyfull,|,bullist,numlist,|,outdent,indent,|,sub,sup,|,charmap",
'theme_advanced_buttons2' : "link,unlink,anchor,image,cleanup,code,hr,removeformat,visualaid,|,tablecontrols,|,pastetext,pasteword,selectall",
'paste_auto_cleanup_on_paste' : True
}
|
TINYMCE_DEFAULT_CONFIG = {
'theme': "advanced",
'content_css': '/static/css/wheel_content.css',
'style_formats': [
{ 'title': 'Images'},
{ 'title': 'Original Size Image', 'selector': 'img', 'attributes': {'class': 'img_content_original' }},
{ 'title': 'Thumbnail Image', 'selector': 'img', 'attributes': {'class': 'img_content_thumb' }},
{ 'title': 'Small Image', 'selector': 'img', 'attributes': {'class': 'img_content_small' }},
{ 'title': 'Medium Image', 'selector': 'img', 'attributes': {'class': 'img_content_medium'} },
{ 'title': 'Large Image', 'selector': 'img', 'attributes': {'class': 'img_content_large' }},
#{ 'title': 'Test'},
#{ 'title': "Boohoold", 'inline': 'b' },
],
'relative_urls': False,
'theme_advanced_toolbar_location':'top',
'theme_advanced_resizing':True,
'plugins':'table, paste, wheel_browser',
'table_styles' : "Header 1=header1;Header 2=header2;Header 3=header3",
'table_cell_styles' : "Header 1=header1;Header 2=header2;Header 3=header3;Table Cell=tableCel1",
'table_row_styles' : "Header 1=header1;Header 2=header2;Header 3=header3;Table Row=tableRow1",
'table_cell_limit' : 100,
'table_row_limit' : 5,
'table_col_limit' : 5,
'width':800,
'height':600,
'theme_advanced_buttons1' : "bold,italic,underline,strikethrough,|,justifyleft,justifycenter,justifyright,justifyfull,|,styleselect,formatselect",
'theme_advanced_buttons2' : "bullist,numlist,|,outdent,indent,|,undo,redo,|,link,unlink,anchor,image,cleanup,help,code,hr,removeformat,visualaid,|,sub,sup,|,charmap",
'theme_advanced_buttons3': "tablecontrols,|,pastetext,pasteword,selectall",
'paste_auto_cleanup_on_paste' : True
}
|
bsd-2-clause
|
Python
|
a9fd9bcecc5d237d767a2fbb773e0780cce7fa99
|
Add combinations of all the python bins.
|
ionelmc/virtualenv,ionelmc/virtualenv,ionelmc/virtualenv
|
tests/functional/test_create.py
|
tests/functional/test_create.py
|
import os
import sys
import pytest
import scripttest
IS_WINDOWS = (
sys.platform.startswith("win") or
(sys.platform == "cli" and os.name == "nt")
)
IS_26 = sys.version_info[:2] == (2, 6)
PYTHON_BINS = [
"C:\\Python27\\python.exe",
"C:\\Python27-x64\\python.exe",
"C:\\Python33\\python.exe",
"C:\\Python33-x64\\python.exe",
"C:\\Python34\\python.exe",
"C:\\Python34-x64\\python.exe",
"C:\\PyPy\\pypy.exe",
"C:\\PyPy3\\pypy.exe",
None,
"python",
"python2.6",
"python2.7",
"python3.2",
"python3.3",
"python3.4",
"pypy",
]
@pytest.yield_fixture
def env(request):
env = scripttest.TestFileEnvironment()
try:
yield env
finally:
env.clear()
@pytest.mark.parametrize('python', PYTHON_BINS)
def test_create_via_script(env, python):
extra = ['--python', python] if python else []
result = env.run('virtualenv', 'myenv', *extra)
if IS_WINDOWS:
assert 'myenv\\Scripts\\activate.bat' in result.files_created
assert 'myenv\\Scripts\\activate.ps1' in result.files_created
assert 'myenv\\Scripts\\activate_this.py' in result.files_created
assert 'myenv\\Scripts\\deactivate.bat' in result.files_created
assert 'myenv\\Scripts\\pip.exe' in result.files_created
assert 'myenv\\Scripts\\python.exe' in result.files_created
else:
assert 'myenv/bin/activate.sh' in result.files_created
assert 'myenv/bin/activate_this.py' in result.files_created
assert 'myenv/bin/python' in result.files_created
@pytest.mark.parametrize('python', PYTHON_BINS)
def test_create_via_module(env, python):
extra = ['--python', python] if python else []
result = env.run('python', '-mvirtualenv.__main__' if IS_26 else '-mvirtualenv', 'myenv', *extra)
if IS_WINDOWS:
assert 'myenv\\Scripts\\activate.bat' in result.files_created
assert 'myenv\\Scripts\\activate.ps1' in result.files_created
assert 'myenv\\Scripts\\activate_this.py' in result.files_created
assert 'myenv\\Scripts\\deactivate.bat' in result.files_created
assert 'myenv\\Scripts\\pip.exe' in result.files_created
assert 'myenv\\Scripts\\python.exe' in result.files_created
else:
assert 'myenv/bin/activate.sh' in result.files_created
assert 'myenv/bin/activate_this.py' in result.files_created
assert 'myenv/bin/python' in result.files_created
|
import os
import sys
import pytest
import scripttest
is_windows = (
sys.platform.startswith("win") or
(sys.platform == "cli" and os.name == "nt")
)
is_26 = sys.version_info[:2] == (2, 6)
@pytest.yield_fixture
def env(request):
env = scripttest.TestFileEnvironment()
try:
yield env
finally:
env.clear()
def test_create_via_script(env):
result = env.run('virtualenv', 'myenv')
if is_windows:
assert 'myenv\\Scripts\\activate.bat' in result.files_created
assert 'myenv\\Scripts\\activate.ps1' in result.files_created
assert 'myenv\\Scripts\\activate_this.py' in result.files_created
assert 'myenv\\Scripts\\deactivate.bat' in result.files_created
assert 'myenv\\Scripts\\pip.exe' in result.files_created
assert 'myenv\\Scripts\\python.exe' in result.files_created
else:
assert 'myenv/bin/activate.sh' in result.files_created
assert 'myenv/bin/activate_this.py' in result.files_created
assert 'myenv/bin/python' in result.files_created
def test_create_via_module(env):
result = env.run('python', '-mvirtualenv.__main__' if is_26 else '-mvirtualenv', 'myenv')
if is_windows:
assert 'myenv\\Scripts\\activate.bat' in result.files_created
assert 'myenv\\Scripts\\activate.ps1' in result.files_created
assert 'myenv\\Scripts\\activate_this.py' in result.files_created
assert 'myenv\\Scripts\\deactivate.bat' in result.files_created
assert 'myenv\\Scripts\\pip.exe' in result.files_created
assert 'myenv\\Scripts\\python.exe' in result.files_created
else:
assert 'myenv/bin/activate.sh' in result.files_created
assert 'myenv/bin/activate_this.py' in result.files_created
assert 'myenv/bin/python' in result.files_created
|
mit
|
Python
|
c407d023a59b5863b5890836c17a1aa1208244fa
|
use local_base_url from webfront to compose the API URL
|
nickchen-mitac/fork,eavatar/eavatar-me,eavatar/eavatar-me,eavatar/eavatar-me,nickchen-mitac/fork,nickchen-mitac/fork,nickchen-mitac/fork,eavatar/eavatar-me
|
tests/functional/test_webapi.py
|
tests/functional/test_webapi.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
import requests
from ava.util.tests import AgentTest
SUCCESS = 'success'
ERROR = 'error'
class TestWebAPI(AgentTest):
# api_url = 'http://127.0.0.1:5080/api'
api_url = ''
@classmethod
def setUpClass(cls):
AgentTest.setUpClass()
webfront = cls.agent.context().lookup('webfront')
cls.api_url = webfront.local_base_url + 'api'
def test_ping(self):
r = requests.get(self.api_url + '/ping')
assert r.status_code == 200
data = r.json()
assert data['status'] == SUCCESS
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
import requests
from ava.util.tests import AgentTest
SUCCESS = 'success'
ERROR = 'error'
class TestWebAPI(AgentTest):
api_url = 'http://127.0.0.1:5080/api'
def test_ping(self):
r = requests.get(self.api_url + '/ping')
assert r.status_code == 200
data = r.json()
assert data['status'] == SUCCESS
|
apache-2.0
|
Python
|
6028ae4c2c75a29c0a050429f7ab76da129791fd
|
Improve keosd_auto_launch_test by checking stderr
|
EOSIO/eos,EOSIO/eos,EOSIO/eos,EOSIO/eos,EOSIO/eos
|
tests/keosd_auto_launch_test.py
|
tests/keosd_auto_launch_test.py
|
#!/usr/bin/env python3
# This script tests that cleos launches keosd automatically when keosd is not
# running yet.
import subprocess
def run_cleos_wallet_command(command: str, no_auto_keosd: bool):
"""Run the given cleos command and return subprocess.CompletedProcess."""
args = ['./programs/cleos/cleos']
if no_auto_keosd:
args.append('--no-auto-keosd')
args += 'wallet', command
return subprocess.run(args,
check=False,
stdout=subprocess.DEVNULL,
stderr=subprocess.PIPE)
def stop_keosd():
"""Stop the default keosd instance."""
run_cleos_wallet_command('stop', no_auto_keosd=True)
def keosd_auto_launch_test():
"""Test that keos auto-launching works but can be optionally inhibited."""
stop_keosd()
# Make sure that when '--no-auto-keosd' is given, keosd is not started by
# cleos.
completed_process = run_cleos_wallet_command('list', no_auto_keosd=True)
assert completed_process.returncode != 0
assert b'Failed to connect to keosd' in completed_process.stderr
# Verify that keosd auto-launching works.
completed_process = run_cleos_wallet_command('list', no_auto_keosd=False)
assert completed_process.returncode == 0
assert b'launched' in completed_process.stderr
try:
keosd_auto_launch_test()
finally:
stop_keosd()
|
#!/usr/bin/env python3
# This script tests that cleos launches keosd automatically when keosd is not
# running yet.
import subprocess
def run_cleos_wallet_command(command: str, no_auto_keosd: bool):
"""Run the given cleos command and return subprocess.CompletedProcess."""
args = ['./programs/cleos/cleos']
if no_auto_keosd:
args.append('--no-auto-keosd')
args += 'wallet', command
return subprocess.run(args,
check=False,
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL)
def stop_keosd():
"""Stop the default keosd instance."""
run_cleos_wallet_command('stop', no_auto_keosd=True)
def keosd_auto_launch_test():
"""Test that keos auto-launching works but can be optionally inhibited."""
stop_keosd()
# Make sure that when '--no-auto-keosd' is given, keosd is not started by
# cleos.
assert run_cleos_wallet_command('list',
no_auto_keosd=True).returncode != 0
# Verify that keosd auto-launching works.
assert run_cleos_wallet_command('list',
no_auto_keosd=False).returncode == 0
try:
keosd_auto_launch_test()
finally:
stop_keosd()
|
mit
|
Python
|
d43ddab5908a543236a05860fb15658ec154aa5b
|
Fix import in test
|
quartz55/mopidy,swak/mopidy,mokieyue/mopidy,bencevans/mopidy,diandiankan/mopidy,vrs01/mopidy,adamcik/mopidy,bencevans/mopidy,rawdlite/mopidy,bacontext/mopidy,vrs01/mopidy,tkem/mopidy,pacificIT/mopidy,diandiankan/mopidy,woutervanwijk/mopidy,bacontext/mopidy,dbrgn/mopidy,ali/mopidy,ZenithDK/mopidy,rawdlite/mopidy,SuperStarPL/mopidy,mopidy/mopidy,pacificIT/mopidy,mokieyue/mopidy,glogiotatidis/mopidy,adamcik/mopidy,swak/mopidy,jcass77/mopidy,kingosticks/mopidy,ali/mopidy,mopidy/mopidy,bencevans/mopidy,jcass77/mopidy,jodal/mopidy,vrs01/mopidy,liamw9534/mopidy,bacontext/mopidy,bencevans/mopidy,quartz55/mopidy,priestd09/mopidy,jodal/mopidy,hkariti/mopidy,abarisain/mopidy,mokieyue/mopidy,diandiankan/mopidy,swak/mopidy,bacontext/mopidy,jmarsik/mopidy,hkariti/mopidy,ZenithDK/mopidy,tkem/mopidy,dbrgn/mopidy,jmarsik/mopidy,ZenithDK/mopidy,dbrgn/mopidy,jodal/mopidy,SuperStarPL/mopidy,hkariti/mopidy,pacificIT/mopidy,tkem/mopidy,rawdlite/mopidy,quartz55/mopidy,abarisain/mopidy,hkariti/mopidy,kingosticks/mopidy,rawdlite/mopidy,quartz55/mopidy,SuperStarPL/mopidy,ali/mopidy,tkem/mopidy,dbrgn/mopidy,kingosticks/mopidy,SuperStarPL/mopidy,vrs01/mopidy,woutervanwijk/mopidy,mopidy/mopidy,ali/mopidy,jcass77/mopidy,glogiotatidis/mopidy,jmarsik/mopidy,mokieyue/mopidy,glogiotatidis/mopidy,diandiankan/mopidy,adamcik/mopidy,priestd09/mopidy,swak/mopidy,ZenithDK/mopidy,priestd09/mopidy,glogiotatidis/mopidy,pacificIT/mopidy,jmarsik/mopidy,liamw9534/mopidy
|
tests/outputs/gstreamer_test.py
|
tests/outputs/gstreamer_test.py
|
import multiprocessing
import unittest
from mopidy.outputs.gstreamer import GStreamerOutput
from mopidy.utils.path import path_to_uri
from mopidy.utils.process import pickle_connection
from tests import data_folder, SkipTest
class GStreamerOutputTest(unittest.TestCase):
def setUp(self):
self.song_uri = path_to_uri(data_folder('song1.wav'))
self.output_queue = multiprocessing.Queue()
self.core_queue = multiprocessing.Queue()
self.output = GStreamerOutput(self.core_queue, self.output_queue)
def tearDown(self):
self.output.destroy()
def send_recv(self, message):
(my_end, other_end) = multiprocessing.Pipe()
message.update({'reply_to': pickle_connection(other_end)})
self.output_queue.put(message)
my_end.poll(None)
return my_end.recv()
def send(self, message):
self.output_queue.put(message)
@SkipTest
def test_play_uri_existing_file(self):
message = {'command': 'play_uri', 'uri': self.song_uri}
self.assertEqual(True, self.send_recv(message))
@SkipTest
def test_play_uri_non_existing_file(self):
message = {'command': 'play_uri', 'uri': self.song_uri + 'bogus'}
self.assertEqual(False, self.send_recv(message))
def test_default_get_volume_result(self):
message = {'command': 'get_volume'}
self.assertEqual(100, self.send_recv(message))
def test_set_volume(self):
self.send({'command': 'set_volume', 'volume': 50})
self.assertEqual(50, self.send_recv({'command': 'get_volume'}))
def test_set_volume_to_zero(self):
self.send({'command': 'set_volume', 'volume': 0})
self.assertEqual(0, self.send_recv({'command': 'get_volume'}))
def test_set_volume_to_one_hundred(self):
self.send({'command': 'set_volume', 'volume': 100})
self.assertEqual(100, self.send_recv({'command': 'get_volume'}))
@SkipTest
def test_set_state(self):
raise NotImplementedError
|
import multiprocessing
import unittest
from mopidy.outputs.gstreamer import GStreamerOutput
from mopidy.process import pickle_connection
from mopidy.utils.path import path_to_uri
from tests import data_folder, SkipTest
class GStreamerOutputTest(unittest.TestCase):
def setUp(self):
self.song_uri = path_to_uri(data_folder('song1.wav'))
self.output_queue = multiprocessing.Queue()
self.core_queue = multiprocessing.Queue()
self.output = GStreamerOutput(self.core_queue, self.output_queue)
def tearDown(self):
self.output.destroy()
def send_recv(self, message):
(my_end, other_end) = multiprocessing.Pipe()
message.update({'reply_to': pickle_connection(other_end)})
self.output_queue.put(message)
my_end.poll(None)
return my_end.recv()
def send(self, message):
self.output_queue.put(message)
@SkipTest
def test_play_uri_existing_file(self):
message = {'command': 'play_uri', 'uri': self.song_uri}
self.assertEqual(True, self.send_recv(message))
@SkipTest
def test_play_uri_non_existing_file(self):
message = {'command': 'play_uri', 'uri': self.song_uri + 'bogus'}
self.assertEqual(False, self.send_recv(message))
def test_default_get_volume_result(self):
message = {'command': 'get_volume'}
self.assertEqual(100, self.send_recv(message))
def test_set_volume(self):
self.send({'command': 'set_volume', 'volume': 50})
self.assertEqual(50, self.send_recv({'command': 'get_volume'}))
def test_set_volume_to_zero(self):
self.send({'command': 'set_volume', 'volume': 0})
self.assertEqual(0, self.send_recv({'command': 'get_volume'}))
def test_set_volume_to_one_hundred(self):
self.send({'command': 'set_volume', 'volume': 100})
self.assertEqual(100, self.send_recv({'command': 'get_volume'}))
@SkipTest
def test_set_state(self):
raise NotImplementedError
|
apache-2.0
|
Python
|
010a827abdc891bd79f7474c5ef65b991edf2a1b
|
Update candidate_party_corrections.py
|
california-civic-data-coalition/django-calaccess-processed-data,california-civic-data-coalition/django-calaccess-processed-data
|
calaccess_processed/candidate_party_corrections.py
|
calaccess_processed/candidate_party_corrections.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Includes correct party affiliation for candidate in specific contests.
"""
corrections = (
# http://elections.cdn.sos.ca.gov/statewide-elections/2014-primary/updated-contact-info.pdf # noqa
('WINSTON, ALMA MARIE', 2014, 'PRIMARY', 'GOVERNOR', 'REPUBLICAN'),
# http://elections.cdn.sos.ca.gov/statewide-elections/2014-primary/certified-write-in-list.pdf # noqa
('WALLS, JIMELLE L.', 2014, 'PRIMARY', 'GOVERNOR', 'NO PARTY PREFERENCE'),
# http://elections.cdn.sos.ca.gov/statewide-elections/2012-primary/updated-contact-info-cert-list.pdf # noqa
('ESPINOSA, GEBY E.', 2014, 'PRIMARY', 'ASSEMBLY 24', 'DEMOCRATIC'),
# http://elections.cdn.sos.ca.gov/special-elections/2011-sd28/certified-list.pdf
('VALENTINE, ROBERT S.', 2011, 'SPECIAL ELECTION', 'STATE SENATE 28', 'REPUBLICAN'),
# http://cal-access.sos.ca.gov/Campaign/Candidates/Detail.aspx?id=1273672
('WALDRON, MARIE', 2018, 'PRIMARY', 'ASSEMBLY 75', 'REPUBLICAN'),
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Includes correct party affiliation for candidate in specific contests.
"""
corrections = (
# http://elections.cdn.sos.ca.gov/statewide-elections/2014-primary/updated-contact-info.pdf # noqa
('WINSTON, ALMA MARIE', 2014, 'PRIMARY', 'GOVERNOR', 'REPUBLICAN'),
# http://elections.cdn.sos.ca.gov/statewide-elections/2014-primary/certified-write-in-list.pdf # noqa
('WALLS, JIMELLE L.', 2014, 'PRIMARY', 'GOVERNOR', 'NO PARTY PREFERENCE'),
# http://elections.cdn.sos.ca.gov/statewide-elections/2012-primary/updated-contact-info-cert-list.pdf # noqa
('ESPINOSA, GEBY E.', 2014, 'PRIMARY', 'ASSEMBLY 24', 'DEMOCRATIC'),
# http://elections.cdn.sos.ca.gov/special-elections/2011-sd28/certified-list.pdf
('VALENTINE, ROBERT S.', 2011, 'SPECIAL ELECTION', 'STATE SENATE 28', 'REPUBLICAN'),
# http://elections.cdn.sos.ca.gov/statewide-elections/2014-general/updated-contact-info.pdf
('KEPHART, GARY', 2014, 'GENERAL', 'STATE SENATE 36', 'DEMOCRATIC'),
)
|
mit
|
Python
|
c5ccf36fbeb6b744918e3090422763103b181de8
|
Fix name (copy paste fail...)
|
ProjetPP/PPP-QuestionParsing-Grammatical,ProjetPP/PPP-QuestionParsing-Grammatical
|
tests/test_tripleStandardize.py
|
tests/test_tripleStandardize.py
|
import json
from ppp_nlp_classical import Triple, TriplesBucket, computeTree, simplify, buildBucket, DependenciesTree, tripleProduce1, tripleProduce2, tripleProduce3, buildTree
from ppp_datamodel import Triple, Resource, Missing
import data
from unittest import TestCase
class StandardTripleTests(TestCase):
def testBuildFromBucket(self):
tree = computeTree(data.give_president_of_USA()['sentences'][0])
qw = simplify(tree)
triple = buildTree(buildBucket(tree,qw))
self.assertIsInstance(triple,Triple)
self.assertEqual(triple.get("predicate"),Resource("identity"))
self.assertEqual(triple.get("object"),Missing())
subj=triple.get("subject")
self.assertEqual(subj.get("subject"),Missing())
self.assertEqual(subj.get("predicate"),Resource("president of"))
self.assertEqual(subj.get("object"),Resource("United States"))
|
import json
from ppp_nlp_classical import Triple, TriplesBucket, computeTree, simplify, buildBucket, DependenciesTree, tripleProduce1, tripleProduce2, tripleProduce3, buildTree
from ppp_datamodel import Triple, Resource, Missing
import data
from unittest import TestCase
class StandardTripleTests(TestCase):
def testBuildBucket(self):
tree = computeTree(data.give_president_of_USA()['sentences'][0])
qw = simplify(tree)
triple = buildTree(buildBucket(tree,qw))
self.assertIsInstance(triple,Triple)
self.assertEqual(triple.get("predicate"),Resource("identity"))
self.assertEqual(triple.get("object"),Missing())
subj=triple.get("subject")
self.assertEqual(subj.get("subject"),Missing())
self.assertEqual(subj.get("predicate"),Resource("president of"))
self.assertEqual(subj.get("object"),Resource("United States"))
|
agpl-3.0
|
Python
|
fdd4a88c7a7981e8df1dd7da150b164c8121d4be
|
Add more youtube and vimeo links for testing
|
matthiask/feincms3,matthiask/feincms3,matthiask/feincms3
|
tests/testapp/test_embedding.py
|
tests/testapp/test_embedding.py
|
from django.test import TestCase
from feincms3 import embedding
class EmbeddingTest(TestCase):
def test_no_handlers(self):
"""Embed video without handlers"""
self.assertEqual(embedding.embed_video("stuff"), None)
def test_youtube(self):
"""Test a youtube link"""
self.assertEqual(
embedding.embed_video("https://www.youtube.com/watch?v=dQw4w9WgXcQ"),
"""\
<div class="responsive-embed widescreen youtube"><iframe \
src="https://www.youtube.com/embed/dQw4w9WgXcQ" frameborder="0" \
allow="autoplay; fullscreen" allowfullscreen=""></iframe></div>""",
)
self.assertEqual(
embedding.embed_video("https://youtu.be/y7-s5ZvC_2A"),
"""\
<div class="responsive-embed widescreen youtube"><iframe \
src="https://www.youtube.com/embed/y7-s5ZvC_2A" frameborder="0" \
allow="autoplay; fullscreen" allowfullscreen=""></iframe></div>""",
)
self.assertTrue(
embedding.embed_video(
"https://www.youtube.com/watch?v=4zGnNmncJWg&feature=emb_title"
)
)
self.assertTrue(
embedding.embed_video(
"https://www.youtube.com/watch?v=DYu_bGbZiiQ&list=RDJMOOG7rWTPg&index=7"
)
)
def test_vimeo(self):
self.assertEqual(
embedding.embed_video("https://vimeo.com/455728498"),
"""\
<div class="responsive-embed widescreen vimeo"><iframe \
src="https://player.vimeo.com/video/455728498" frameborder="0" \
allow="autoplay; fullscreen" allowfullscreen=""></iframe></div>""",
)
self.assertTrue(
embedding.embed_video("https://player.vimeo.com/video/417955670")
)
self.assertEqual(
embedding.embed_video("https://vimeo.com/12345678/3213124324"),
"""\
<div class="responsive-embed widescreen vimeo"><iframe \
src="https://player.vimeo.com/video/12345678" frameborder="0" \
allow="autoplay; fullscreen" allowfullscreen=""></iframe></div>""",
)
|
from django.test import TestCase
from feincms3 import embedding
class EmbeddingTest(TestCase):
def test_no_handlers(self):
"""Embed video without handlers"""
self.assertEqual(embedding.embed_video("stuff"), None)
def test_youtube(self):
"""Test a youtube link"""
self.assertEqual(
embedding.embed_video("https://www.youtube.com/watch?v=dQw4w9WgXcQ"),
"""\
<div class="responsive-embed widescreen youtube"><iframe \
src="https://www.youtube.com/embed/dQw4w9WgXcQ" frameborder="0" \
allow="autoplay; fullscreen" allowfullscreen=""></iframe></div>""",
)
def test_vimeo(self):
self.assertEqual(
embedding.embed_video("https://vimeo.com/455728498"),
"""\
<div class="responsive-embed widescreen vimeo"><iframe \
src="https://player.vimeo.com/video/455728498" frameborder="0" \
allow="autoplay; fullscreen" allowfullscreen=""></iframe></div>""",
)
|
bsd-3-clause
|
Python
|
efe792aa0d634905f1c6242c26b78a6d6364eea9
|
Reduce YoChannel tests
|
ymyzk/kawasemi,ymyzk/django-channels
|
tests/tests/backends/test_yo.py
|
tests/tests/backends/test_yo.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from copy import deepcopy
from django.conf import settings
from django.test import TestCase
from channels.backends.yo import YoChannel
from channels.exceptions import HttpError, ImproperlyConfigured
config = settings.CHANNELS["CHANNELS"]["channels.backends.yo.YoChannel"]
class YoChannelTestCase(TestCase):
def setUp(self):
self.channel = YoChannel(**config)
def test_init(self):
with self.assertRaises(TypeError):
YoChannel(**{})
def test_send(self):
self.channel.send("Just Yo")
self.channel.send("Yo Link", options={
"yo": {"link": "http://docs.justyo.co/v1.0/docs/yo"}})
self.channel.send("Yo Location", options={
"yo": {"location": "35.0261581,135.7818476"}})
def test_send_fail(self):
conf = deepcopy(config)
conf["api_token"] = "api_token"
channel = YoChannel(**conf)
with self.assertRaises(HttpError):
channel.send("Yo", fail_silently=False)
channel.send("Yo", fail_silently=True)
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from copy import deepcopy
from django.conf import settings
from django.test import TestCase
from channels.backends.yo import YoChannel
from channels.exceptions import HttpError, ImproperlyConfigured
config = settings.CHANNELS["CHANNELS"]["channels.backends.yo.YoChannel"]
class YoChannelTestCase(TestCase):
def setUp(self):
self.channel = YoChannel(**config)
def test_init(self):
with self.assertRaises(TypeError):
YoChannel(**{})
def test_send(self):
self.channel.send("Just Yo")
self.channel.send("Yo Link", options={
"yo": {"link": "http://docs.justyo.co/v1.0/docs/yo"}})
self.channel.send("Yo Location", options={
"yo": {"location": "35.0261581,135.7818476"}})
with self.assertRaises(HttpError):
self.channel.send("Yo Link", options={
"yo": {
"link": "http://docs.justyo.co/v1.0/docs/yo",
"location": "35.0261581,135.7818476"
}})
def test_send_fail(self):
conf = deepcopy(config)
conf["api_token"] = "api_token"
channel = YoChannel(**conf)
with self.assertRaises(HttpError):
channel.send("Yo", fail_silently=False)
channel.send("Yo", fail_silently=True)
|
mit
|
Python
|
c9d45a96236b822e2a5ca11490afdb02b9a5e699
|
Drop Py2 and six on tests/unit/states/test_modjk.py
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
tests/unit/states/test_modjk.py
|
tests/unit/states/test_modjk.py
|
"""
:codeauthor: Jayesh Kariya <[email protected]>
"""
import salt.states.modjk as modjk
from tests.support.unit import TestCase
LIST_NOT_STR = "workers should be a list not a <class 'str'>"
class ModjkTestCase(TestCase):
"""
Test cases for salt.states.modjk
"""
# 'worker_stopped' function tests: 1
def test_worker_stopped(self):
"""
Test to stop all the workers in the modjk load balancer
"""
name = "loadbalancer"
ret = {"name": name, "result": False, "comment": "", "changes": {}}
ret.update({"comment": LIST_NOT_STR})
self.assertDictEqual(modjk.worker_stopped(name, "app1"), ret)
# 'worker_activated' function tests: 1
def test_worker_activated(self):
"""
Test to activate all the workers in the modjk load balancer
"""
name = "loadbalancer"
ret = {"name": name, "result": False, "comment": "", "changes": {}}
ret.update({"comment": LIST_NOT_STR})
self.assertDictEqual(modjk.worker_activated(name, "app1"), ret)
# 'worker_disabled' function tests: 1
def test_worker_disabled(self):
"""
Test to disable all the workers in the modjk load balancer
"""
name = "loadbalancer"
ret = {"name": name, "result": False, "comment": "", "changes": {}}
ret.update({"comment": LIST_NOT_STR})
self.assertDictEqual(modjk.worker_disabled(name, "app1"), ret)
# 'worker_recover' function tests: 1
def test_worker_recover(self):
"""
Test to recover all the workers in the modjk load balancer
"""
name = "loadbalancer"
ret = {"name": name, "result": False, "comment": "", "changes": {}}
ret.update({"comment": LIST_NOT_STR})
self.assertDictEqual(modjk.worker_recover(name, "app1"), ret)
|
# -*- coding: utf-8 -*-
"""
:codeauthor: Jayesh Kariya <[email protected]>
"""
# Import Python libs
from __future__ import absolute_import, print_function, unicode_literals
# Import Salt Libs
import salt.states.modjk as modjk
from salt.ext import six
# Import Salt Testing Libs
from tests.support.unit import TestCase
if six.PY2:
LIST_NOT_STR = "workers should be a list not a <type 'unicode'>"
else:
LIST_NOT_STR = "workers should be a list not a <class 'str'>"
class ModjkTestCase(TestCase):
"""
Test cases for salt.states.modjk
"""
# 'worker_stopped' function tests: 1
def test_worker_stopped(self):
"""
Test to stop all the workers in the modjk load balancer
"""
name = "loadbalancer"
ret = {"name": name, "result": False, "comment": "", "changes": {}}
ret.update({"comment": LIST_NOT_STR})
self.assertDictEqual(modjk.worker_stopped(name, "app1"), ret)
# 'worker_activated' function tests: 1
def test_worker_activated(self):
"""
Test to activate all the workers in the modjk load balancer
"""
name = "loadbalancer"
ret = {"name": name, "result": False, "comment": "", "changes": {}}
ret.update({"comment": LIST_NOT_STR})
self.assertDictEqual(modjk.worker_activated(name, "app1"), ret)
# 'worker_disabled' function tests: 1
def test_worker_disabled(self):
"""
Test to disable all the workers in the modjk load balancer
"""
name = "loadbalancer"
ret = {"name": name, "result": False, "comment": "", "changes": {}}
ret.update({"comment": LIST_NOT_STR})
self.assertDictEqual(modjk.worker_disabled(name, "app1"), ret)
# 'worker_recover' function tests: 1
def test_worker_recover(self):
"""
Test to recover all the workers in the modjk load balancer
"""
name = "loadbalancer"
ret = {"name": name, "result": False, "comment": "", "changes": {}}
ret.update({"comment": LIST_NOT_STR})
self.assertDictEqual(modjk.worker_recover(name, "app1"), ret)
|
apache-2.0
|
Python
|
0a84c767395bd8cb88711afae8bc94b045d50e78
|
Remove unused import
|
RasaHQ/rasa_nlu,RasaHQ/rasa_nlu,RasaHQ/rasa_nlu
|
tests/utils/test_train_utils.py
|
tests/utils/test_train_utils.py
|
from typing import Any, Dict
import numpy as np
import pytest
import rasa.utils.train_utils as train_utils
from rasa.nlu.constants import NUMBER_OF_SUB_TOKENS
from rasa.nlu.tokenizers.tokenizer import Token
from rasa.shared.nlu.constants import (
SPLIT_ENTITIES_BY_COMMA_DEFAULT_VALUE,
SPLIT_ENTITIES_BY_COMMA,
)
def test_align_token_features():
tokens = [
Token("This", 0, data={NUMBER_OF_SUB_TOKENS: 1}),
Token("is", 5, data={NUMBER_OF_SUB_TOKENS: 1}),
Token("a", 8, data={NUMBER_OF_SUB_TOKENS: 1}),
Token("sentence", 10, data={NUMBER_OF_SUB_TOKENS: 2}),
Token("embedding", 19, data={NUMBER_OF_SUB_TOKENS: 4}),
]
seq_dim = sum(t.get(NUMBER_OF_SUB_TOKENS) for t in tokens)
token_features = np.random.rand(1, seq_dim, 64)
actual_features = train_utils.align_token_features([tokens], token_features)
assert np.all(actual_features[0][0] == token_features[0][0])
assert np.all(actual_features[0][1] == token_features[0][1])
assert np.all(actual_features[0][2] == token_features[0][2])
# sentence is split into 2 sub-tokens
assert np.all(actual_features[0][3] == np.mean(token_features[0][3:5], axis=0))
# embedding is split into 4 sub-tokens
assert np.all(actual_features[0][4] == np.mean(token_features[0][5:10], axis=0))
@pytest.mark.parametrize(
"split_entities_config, expected_initialized_config",
[
(
SPLIT_ENTITIES_BY_COMMA_DEFAULT_VALUE,
{SPLIT_ENTITIES_BY_COMMA: SPLIT_ENTITIES_BY_COMMA_DEFAULT_VALUE},
),
(
{"address": False, "ingredients": True},
{
"address": False,
"ingredients": True,
SPLIT_ENTITIES_BY_COMMA: SPLIT_ENTITIES_BY_COMMA_DEFAULT_VALUE,
},
),
],
)
def test_init_split_entities_config(
split_entities_config: Any, expected_initialized_config: Dict[(str, bool)],
):
assert (
train_utils.init_split_entities(
split_entities_config, SPLIT_ENTITIES_BY_COMMA_DEFAULT_VALUE
)
== expected_initialized_config
)
|
from typing import Any, Dict
import numpy as np
import pytest
import rasa.utils.train_utils as train_utils
from rasa.core.policies.ted_policy import TEDPolicy
from rasa.nlu.constants import NUMBER_OF_SUB_TOKENS
from rasa.nlu.tokenizers.tokenizer import Token
from rasa.shared.nlu.constants import (
SPLIT_ENTITIES_BY_COMMA_DEFAULT_VALUE,
SPLIT_ENTITIES_BY_COMMA,
)
def test_align_token_features():
tokens = [
Token("This", 0, data={NUMBER_OF_SUB_TOKENS: 1}),
Token("is", 5, data={NUMBER_OF_SUB_TOKENS: 1}),
Token("a", 8, data={NUMBER_OF_SUB_TOKENS: 1}),
Token("sentence", 10, data={NUMBER_OF_SUB_TOKENS: 2}),
Token("embedding", 19, data={NUMBER_OF_SUB_TOKENS: 4}),
]
seq_dim = sum(t.get(NUMBER_OF_SUB_TOKENS) for t in tokens)
token_features = np.random.rand(1, seq_dim, 64)
actual_features = train_utils.align_token_features([tokens], token_features)
assert np.all(actual_features[0][0] == token_features[0][0])
assert np.all(actual_features[0][1] == token_features[0][1])
assert np.all(actual_features[0][2] == token_features[0][2])
# sentence is split into 2 sub-tokens
assert np.all(actual_features[0][3] == np.mean(token_features[0][3:5], axis=0))
# embedding is split into 4 sub-tokens
assert np.all(actual_features[0][4] == np.mean(token_features[0][5:10], axis=0))
@pytest.mark.parametrize(
"split_entities_config, expected_initialized_config",
[
(
SPLIT_ENTITIES_BY_COMMA_DEFAULT_VALUE,
{SPLIT_ENTITIES_BY_COMMA: SPLIT_ENTITIES_BY_COMMA_DEFAULT_VALUE},
),
(
{"address": False, "ingredients": True},
{
"address": False,
"ingredients": True,
SPLIT_ENTITIES_BY_COMMA: SPLIT_ENTITIES_BY_COMMA_DEFAULT_VALUE,
},
),
],
)
def test_init_split_entities_config(
split_entities_config: Any, expected_initialized_config: Dict[(str, bool)],
):
assert (
train_utils.init_split_entities(
split_entities_config, SPLIT_ENTITIES_BY_COMMA_DEFAULT_VALUE
)
== expected_initialized_config
)
|
apache-2.0
|
Python
|
96dbf260a5c7bf9d5f89951f77792cf1c04d5e38
|
add profiling to perf.py
|
oakmac/parinfer.py
|
perf.py
|
perf.py
|
import cProfile
import time
from parinfer import indent_mode, paren_mode
def timeProcess(string, options):
numlines = len(string.splitlines())
print "Testing file with", numlines, "lines"
t = time.clock()
indent_mode(string, options)
dt = time.clock() - t
print "Indent Mode:", dt, "s"
t = time.clock()
paren_mode(string, options)
dt = time.clock() - t
print "Paren Mode:", dt, "s"
cProfile.runctx("indent_mode(string, options)", globals(), locals())
cProfile.runctx("paren_mode(string, options)", globals(), locals())
with open('tests/really_long_file', 'r') as f:
text = f.read()
timeProcess(text, {})
|
import time
from parinfer import indent_mode, paren_mode
def timeProcess(string, options):
numlines = len(string.splitlines())
print "Testing file with", numlines, "lines"
t = time.clock()
indent_mode(string, options)
dt = time.clock() - t
print "Indent Mode:", dt, "s"
t = time.clock()
paren_mode(string, options)
dt = time.clock() - t
print "Paren Mode:", dt, "s"
with open('tests/really_long_file', 'r') as f:
text = f.read()
timeProcess(text, {})
|
isc
|
Python
|
dbb6fff417a3beac0db7dec603d4793eabc68a89
|
bump version but not calling it stable in readme yet
|
NYUCCL/psiTurk,NYUCCL/psiTurk,NYUCCL/psiTurk
|
psiturk/version.py
|
psiturk/version.py
|
version_number = '2.2.4'
|
version_number = '2.2.3'
|
mit
|
Python
|
e9fd001a21c594f3efd076aab73fdb3fafaa49f0
|
fix typo
|
xpybuild/xpybuild,xpybuild/xpybuild,xpybuild/xpybuild,xpybuild/xpybuild
|
tests/build_utilities/native_config.xpybuild.py
|
tests/build_utilities/native_config.xpybuild.py
|
import os, glob, logging
from xpybuild.propertysupport import *
from xpybuild.buildcommon import *
from xpybuild.pathsets import *
from xpybuild.utils.compilers import GCC, VisualStudio
log = logging.getLogger('xpybuild.tests.native_config')
# some basic defaults for recent default compilers for running our testcases with
if IS_WINDOWS:
VSROOT=r'c:\Program Files (x86)\Microsoft Visual Studio *'
if glob.glob(VSROOT):
VSROOT = sorted(glob.glob(VSROOT))[-1] # pick the latest one
else:
raise Exception('Cannot find Visual Studio installed in: %s'%VSROOT)
setGlobalOption('native.include', [
VSROOT+r"\VC\ATLMFC\INCLUDE",
VSROOT+r"\VC\INCLUDE",
r"C:\Program Files (x86)\Windows Kits\10\Include\10.0.10240.0\ucrt",
])
if not os.path.exists(r"C:\Program Files (x86)\Windows Kits\10"):
log.warning('WARN - Cannot find expected Windows Kits, got: %s'%sorted(glob.glob(r"C:\Program Files (x86)\Windows Kits\*")))
if not os.path.exists(r"C:\Program Files (x86)\Windows Kits\10\Lib\10.0.10240.0\ucrt"):
log.warning('WARN - Cannot find expected Windows Kits UCRT, got: %s'%sorted(glob.glob(r"C:\Program Files (x86)\Windows Kits\10\Lib\*\*")))
setGlobalOption('native.libpaths', [
VSROOT+r"\VC\ATLMFC\LIB\amd64",
VSROOT+r"\VC\LIB\amd64",
r"C:\Program Files (x86)\Windows Kits\10\Lib\10.0.10240.0\ucrt\x64",
r"C:\Program Files (x86)\Windows Kits\10\Lib\10.0.19041.0\um\x64",
])
setGlobalOption('native.cxx.path', [
VSROOT+r"\Common7\IDE",
VSROOT+r"\VC\BIN\amd64",
VSROOT+r"\Common7\Tools",
r"c:\Windows\Microsoft.NET\Framework\v3.5",
])
setGlobalOption('native.compilers', VisualStudio(VSROOT+r'\VC\bin\amd64'))
setGlobalOption('native.cxx.flags', ['/EHa', '/GR', '/O2', '/Ox', '/Ot', '/MD', '/nologo'])
else:
setGlobalOption('native.compilers', GCC())
setGlobalOption('native.cxx.flags', ['-fPIC', '-O3', '--std=c++0x'])
|
import os, glob, logging
from xpybuild.propertysupport import *
from xpybuild.buildcommon import *
from xpybuild.pathsets import *
from xpybuild.utils.compilers import GCC, VisualStudio
log = logging.getLogger('xpybuild.tests.native_config')
# some basic defaults for recent default compilers for running our testcases with
if IS_WINDOWS:
VSROOT=r'c:\Program Files (x86)\Microsoft Visual Studio *'
if glob.glob(VSROOT):
VSROOT = sorted(glob.glob(VSROOT))[-1] # pick the latest one
else:
raise Exception('Cannot find Visual Studio installed in: %s'%VSROOT)
setGlobalOption('native.include', [
VSROOT+r"\VC\ATLMFC\INCLUDE",
VSROOT+r"\VC\INCLUDE",
r"C:\Program Files (x86)\Windows Kits\10\Include\10.0.10240.0\ucrt",
])
if not os.path.exists(r"C:\Program Files (x86)\Windows Kits\10"):
log.warning('WARN - Cannot find expected Windows Kits, got: %s'%sorted(glob.glob(r"C:\Program Files (x86)\Windows Kits\*")))
if not os.path.exists(r"C:\Program Files (x86)\Windows Kits\10\Lib\10.0.10240.0\ucrtx"):
log.warning('WARN - Cannot find expected Windows Kits UCRT, got: %s'%sorted(glob.glob(r"C:\Program Files (x86)\Windows Kits\10\Lib\*\*")))
setGlobalOption('native.libpaths', [
VSROOT+r"\VC\ATLMFC\LIB\amd64",
VSROOT+r"\VC\LIB\amd64",
r"C:\Program Files (x86)\Windows Kits\10\Lib\10.0.10240.0\ucrt\x64",
r"C:\Program Files (x86)\Windows Kits\10\Lib\10.0.19041.0\um\x64",
])
setGlobalOption('native.cxx.path', [
VSROOT+r"\Common7\IDE",
VSROOT+r"\VC\BIN\amd64",
VSROOT+r"\Common7\Tools",
r"c:\Windows\Microsoft.NET\Framework\v3.5",
])
setGlobalOption('native.compilers', VisualStudio(VSROOT+r'\VC\bin\amd64'))
setGlobalOption('native.cxx.flags', ['/EHa', '/GR', '/O2', '/Ox', '/Ot', '/MD', '/nologo'])
else:
setGlobalOption('native.compilers', GCC())
setGlobalOption('native.cxx.flags', ['-fPIC', '-O3', '--std=c++0x'])
|
apache-2.0
|
Python
|
bdaf7b8f30b6a3a493cc5246dd908bdcdff69ab8
|
Increase test coverage
|
Clinical-Genomics/scout,Clinical-Genomics/scout,Clinical-Genomics/scout
|
tests/commands/load/test_load_cnv_report_cmd.py
|
tests/commands/load/test_load_cnv_report_cmd.py
|
# -*- coding: utf-8 -*-
import os
from scout.demo import cnv_report_path
from scout.commands import cli
def test_load_cnv_report(mock_app, case_obj):
"""Testing the load delivery report cli command"""
# Make sure the path to delivery report is a valid path
assert os.path.isfile(cnv_report_path)
runner = mock_app.test_cli_runner()
assert runner
# Test CLI function
result = runner.invoke(cli, ["load", "cnv-report", case_obj["_id"], cnv_report_path, "-u"],)
assert "saved report to case!" in result.output
assert result.exit_code == 0
def test_invalid_path_load_cnv_report(mock_app, case_obj):
"""Testing the load delivery report cli command"""
runner = mock_app.test_cli_runner()
assert runner
# Test CLI function
result = runner.invoke(cli, ["load", "cnv-report", case_obj["_id"], "invalid-path", "-u"],)
assert 'Path "invalid-path" does not exist.' in result.output
assert result.exit_code == 2
|
# -*- coding: utf-8 -*-
import os
from scout.demo import cnv_report_path
from scout.commands import cli
def test_load_cnv_report(mock_app, case_obj):
"""Testing the load delivery report cli command"""
# Make sure the path to delivery report is a valid path
assert os.path.isfile(cnv_report_path)
runner = mock_app.test_cli_runner()
assert runner
# Test CLI function
result = runner.invoke(
cli,
["load", "cnv-report", case_obj["_id"], cnv_report_path, "-u"],
)
assert "saved report to case!" in result.output
assert result.exit_code == 0
|
bsd-3-clause
|
Python
|
a403dff24e33e6b0ef0b31b4342a9b978f9090f2
|
Improve admin display
|
Deavelleye/dj-CerberusAC,Deavelleye/dj-CerberusAC,Deavelleye/dj-CerberusAC,Deavelleye/dj-CerberusAC
|
src/cerberus_ac/admin.py
|
src/cerberus_ac/admin.py
|
# -*- coding: utf-8 -*-
"""Admin module."""
from django.contrib import admin
from django.core.urlresolvers import reverse
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext_lazy as _
from .apps import AppSettings
from .models import (
AccessHistory, PrivilegeHistory, Role, RoleHierarchy, RolePrivilege)
# class SecurityAdmin(AdminSite):
# pass
#
#
# class DataAdmin(AdminSite):
# pass
#
#
# class AuditAdmin(AdminSite):
# pass
#
#
# security_admin_site = SecurityAdmin(name='SecurityAdmin')
# data_admin_site = DataAdmin(name='DataAdmin')
# audit_admin_site = AuditAdmin(name='AuditAdmin')
#
# Use decorator like @security_admin_site.register(AccessHistory)
# TODO: override save_model methods for history
# https://docs.djangoproject.com/en/1.10/ref/contrib/admin/#django.contrib.admin.ModelAdmin.save_model
class RoleAdmin(admin.ModelAdmin):
"""Role admin class."""
class RolePrivilegeAdmin(admin.ModelAdmin):
"""Role privilege admin class."""
class RoleHierarchyAdmin(admin.ModelAdmin):
"""Role hierarchy admin class."""
list_display = ('role_type_a', 'role_id_a', 'role_type_b', 'role_id_b')
class AccessHistoryAdmin(admin.ModelAdmin):
"""Acces history admin class."""
list_display = (
'role_type',
'role_id',
'response',
'response_type',
'access_type',
'resource_type',
'resource_id',
'datetime',
'conveyor_type',
'conveyor_id'
)
class PrivilegeHistoryAdmin(admin.ModelAdmin):
"""Privilege history admin class."""
list_display = (
'datetime',
'user',
'action',
'role_type',
'role_id',
'role_link',
'authorized',
'access_type',
'resource_type',
'resource_id',
'resource_link')
def role_link(self, obj):
instance = AppSettings.get_mapping().instance_from_name_and_id(
obj.resource_type, obj.resource_id)
info = (instance._meta.app_label, instance._meta.model_name)
admin_url = reverse('admin:%s_%s_change' % info,
args=(instance.pk,))
return mark_safe('<a href="%s">%s</a>' % (admin_url, instance))
role_link.short_description = _('Role link')
def resource_link(self, obj):
instance = AppSettings.get_mapping().instance_from_name_and_id(
obj.resource_type, obj.resource_id)
info = (instance._meta.app_label, instance._meta.model_name)
admin_url = reverse('admin:%s_%s_change' % info,
args=(instance.pk,))
return mark_safe('<a href="%s">%s</a>' % (admin_url, instance))
resource_link.short_description = _('Resource link')
# class HierarchyHistoryAdmin(admin.ModelAdmin):
# pass
admin.site.register(Role, RoleAdmin)
admin.site.register(RolePrivilege, RolePrivilegeAdmin)
admin.site.register(RoleHierarchy, RoleHierarchyAdmin)
admin.site.register(AccessHistory, AccessHistoryAdmin)
admin.site.register(PrivilegeHistory, PrivilegeHistoryAdmin)
# admin.site.register(HierarchyHistory, HierarchyHistoryAdmin)
|
# -*- coding: utf-8 -*-
"""Admin module."""
from django.contrib import admin
from .models import (
AccessHistory, PrivilegeHistory, Role, RoleHierarchy, RolePrivilege)
# class SecurityAdmin(AdminSite):
# pass
#
#
# class DataAdmin(AdminSite):
# pass
#
#
# class AuditAdmin(AdminSite):
# pass
#
#
# security_admin_site = SecurityAdmin(name='SecurityAdmin')
# data_admin_site = DataAdmin(name='DataAdmin')
# audit_admin_site = AuditAdmin(name='AuditAdmin')
#
# Use decorator like @security_admin_site.register(AccessHistory)
# TODO: override save_model methods for history
# https://docs.djangoproject.com/en/1.10/ref/contrib/admin/#django.contrib.admin.ModelAdmin.save_model
class RoleAdmin(admin.ModelAdmin):
"""Role admin class."""
class RolePrivilegeAdmin(admin.ModelAdmin):
"""Role privilege admin class."""
class RoleHierarchyAdmin(admin.ModelAdmin):
"""Role hierarchy admin class."""
class AccessHistoryAdmin(admin.ModelAdmin):
"""Acces history admin class."""
class PrivilegeHistoryAdmin(admin.ModelAdmin):
"""Privilege history admin class."""
# class HierarchyHistoryAdmin(admin.ModelAdmin):
# pass
admin.site.register(Role, RoleAdmin)
admin.site.register(RolePrivilege, RolePrivilegeAdmin)
admin.site.register(RoleHierarchy, RoleHierarchyAdmin)
admin.site.register(AccessHistory, AccessHistoryAdmin)
admin.site.register(PrivilegeHistory, PrivilegeHistoryAdmin)
# admin.site.register(HierarchyHistory, HierarchyHistoryAdmin)
|
isc
|
Python
|
b7f4696d1384f656df71332055cd4ea87f85e3c9
|
Bump to v0.2.3.
|
tryolabs/luminoth,tryolabs/luminoth,tryolabs/luminoth
|
luminoth/__init__.py
|
luminoth/__init__.py
|
__version__ = '0.2.3'
__title__ = 'Luminoth'
__description__ = 'Computer vision toolkit based on TensorFlow'
__uri__ = 'https://luminoth.ai'
__doc__ = __description__ + ' <' + __uri__ + '>'
__author__ = 'Tryolabs'
__email__ = '[email protected]'
__license__ = 'BSD 3-Clause License'
__copyright__ = 'Copyright (c) 2018 Tryolabs S.A.'
__min_tf_version__ = '1.5'
import sys
# Check for a current TensorFlow installation.
try:
import tensorflow # noqa: F401
except ImportError:
sys.exit("""Luminoth requires a TensorFlow >= {} installation.
Depending on your use case, you should install either `tensorflow` or
`tensorflow-gpu` packages manually or via PyPI.""".format(__min_tf_version__))
# Import functions that are part of Luminoth's public interface.
from luminoth.cli import cli # noqa
from luminoth.io import read_image # noqa
from luminoth.tasks import Detector # noqa
from luminoth.vis import vis_objects # noqa
|
__version__ = '0.2.3dev0'
__title__ = 'Luminoth'
__description__ = 'Computer vision toolkit based on TensorFlow'
__uri__ = 'https://luminoth.ai'
__doc__ = __description__ + ' <' + __uri__ + '>'
__author__ = 'Tryolabs'
__email__ = '[email protected]'
__license__ = 'BSD 3-Clause License'
__copyright__ = 'Copyright (c) 2018 Tryolabs S.A.'
__min_tf_version__ = '1.5'
import sys
# Check for a current TensorFlow installation.
try:
import tensorflow # noqa: F401
except ImportError:
sys.exit("""Luminoth requires a TensorFlow >= {} installation.
Depending on your use case, you should install either `tensorflow` or
`tensorflow-gpu` packages manually or via PyPI.""".format(__min_tf_version__))
# Import functions that are part of Luminoth's public interface.
from luminoth.cli import cli # noqa
from luminoth.io import read_image # noqa
from luminoth.tasks import Detector # noqa
from luminoth.vis import vis_objects # noqa
|
bsd-3-clause
|
Python
|
ea5e6ca2e6523f0b2a585112b5fd5f18e9fcf969
|
add namespace
|
SAOImageDS9/tkblt,SAOImageDS9/tkblt,SAOImageDS9/tkblt
|
ds9/library/parser.tac
|
ds9/library/parser.tac
|
%{
%}
%token INT_
%token REAL_
%token STRING_
%token FOOCMD_
%token EXITCMD_
%token CLOSE_
%token FIT_
%token OPEN_
%token TO_
%%
commands : commands command
| command
;
int : INT_ {set _ $1}
;
command : FOOCMD_ foo
| EXITCMD_ exit
;
numeric : int {set _ $1}
| REAL_ {set _ $1}
;
foo : STRING_ {puts "STRING $1"}
| INT_ {puts "INT $1"}
| REAL_ {puts "REAL $1"}
| OPEN_ {puts "OPEN"}
| CLOSE_ {puts "CLOSE"}
| TO_ fooTo
;
fooTo: FIT_ {puts "TO FIT"}
| numeric {puts "TO NUMERIC $1"}
;
exit: {puts "EXIT"; QuitDS9}
;
%%
proc yy::yyerror {s} {
puts stderr "parse error:"
puts stderr "$yy::yy_buffer"
puts stderr [format "%*s" $yy::yy_index ^]
}
|
%{
%}
%token INT_
%token REAL_
%token STRING_
%token FOOCMD_
%token EXITCMD_
%token CLOSE_
%token FIT_
%token OPEN_
%token TO_
%%
commands : commands command
| command
;
int : INT_ {set _ $1}
;
command : FOOCMD_ foo
| EXITCMD_ exit
;
numeric : int {set _ $1}
| REAL_ {set _ $1}
;
foo : STRING_ {puts "STRING $1"}
| INT_ {puts "INT $1"}
| REAL_ {puts "REAL $1"}
| OPEN_ {puts "OPEN"}
| CLOSE_ {puts "CLOSE"}
| TO_ fooTo
;
fooTo: FIT_ {puts "TO FIT"}
| numeric {puts "TO NUMERIC $1"}
;
exit: {puts "EXIT"; QuitDS9}
;
%%
proc yyerror {s} {
puts stderr "parse error:"
puts stderr "$::yy_buffer"
puts stderr [format "%*s" $::yy_index ^]
}
|
mit
|
Python
|
818d89c897603eeb33caf1ca2cdaeae5c3010880
|
Use passed directory in mako engine.
|
blubberdiblub/eztemplate
|
engines/mako_engine.py
|
engines/mako_engine.py
|
#!/usr/bin/env python
"""Provide the mako templating engine."""
from __future__ import print_function
from mako.template import Template
from mako.lookup import TemplateLookup
from . import Engine
class MakoEngine(Engine):
"""Mako templating engine."""
handle = 'mako'
def __init__(self, template, dirname=None, tolerant=False, **kwargs):
"""Initialize mako template."""
super(MakoEngine, self).__init__(**kwargs)
directories = [dirname] if dirname is not None else ['.']
lookup = TemplateLookup(directories=directories)
default_filters = ['filter_undefined'] if tolerant else None
encoding_errors = 'replace' if tolerant else 'strict'
imports = ['def filter_undefined(value):\n'
' if value is UNDEFINED:\n'
' return \'<UNDEFINED>\'\n'
' return value\n']
self.template = Template(template,
default_filters=default_filters,
encoding_errors=encoding_errors,
imports=imports,
lookup=lookup,
strict_undefined=not tolerant,
)
def apply(self, mapping):
"""Apply a mapping of name-value-pairs to a template."""
return self.template.render(**mapping)
|
#!/usr/bin/env python
"""Provide the mako templating engine."""
from __future__ import print_function
from mako.template import Template
from mako.lookup import TemplateLookup
from . import Engine
class MakoEngine(Engine):
"""Mako templating engine."""
handle = 'mako'
def __init__(self, template, tolerant=False, **kwargs):
"""Initialize mako template."""
super(MakoEngine, self).__init__(**kwargs)
default_filters = ['filter_undefined'] if tolerant else None
encoding_errors = 'replace' if tolerant else 'strict'
imports = ['def filter_undefined(value):\n'
' if value is UNDEFINED:\n'
' return \'<UNDEFINED>\'\n'
' return value\n']
lookup = TemplateLookup(directories=['.'])
self.template = Template(template,
default_filters=default_filters,
encoding_errors=encoding_errors,
imports=imports,
lookup=lookup,
strict_undefined=not tolerant,
)
def apply(self, mapping):
"""Apply a mapping of name-value-pairs to a template."""
return self.template.render(**mapping)
|
mit
|
Python
|
d3057d336332f8315580cc7fa7c6e3d3fb1cbcc8
|
Use a format-string to build the command
|
guiniol/py3status,valdur55/py3status,ultrabug/py3status,tobes/py3status,Andrwe/py3status,Andrwe/py3status,valdur55/py3status,ultrabug/py3status,valdur55/py3status,tobes/py3status,guiniol/py3status,ultrabug/py3status
|
py3status/modules/taskwarrior.py
|
py3status/modules/taskwarrior.py
|
# -*- coding: utf-8 -*-
"""
Display tasks currently running in taskwarrior.
Configuration parameters:
cache_timeout: refresh interval for this module (default 5)
format: display format for this module (default '{task}')
filter: arguments passed to the command
(default 'start.before:today status:pending')
Format placeholders:
{task} active tasks
Requires
task: https://taskwarrior.org/download/
@author James Smith http://jazmit.github.io/
@license BSD
SAMPLE OUTPUT
{'full_text': '1 Prepare first draft, 2 Buy milk'}
"""
import json
STRING_NOT_INSTALLED = "not installed"
class Py3status:
"""
"""
# available configuration parameters
cache_timeout = 5
format = '{task}'
filter = 'start.before:today status:pending'
def post_config_hook(self):
if not self.py3.check_commands('task'):
raise Exception(STRING_NOT_INSTALLED)
def taskWarrior(self):
def describeTask(taskObj):
return str(taskObj['id']) + ' ' + taskObj['description']
task_command = 'task %s export' % self.filter
task_json = json.loads(self.py3.command_output(task_command))
task_result = ', '.join(map(describeTask, task_json))
return {
'cached_until': self.py3.time_in(self.cache_timeout),
'full_text': self.py3.safe_format(self.format, {'task': task_result})
}
if __name__ == "__main__":
"""
Run module in test mode.
"""
from py3status.module_test import module_test
module_test(Py3status)
|
# -*- coding: utf-8 -*-
"""
Display tasks currently running in taskwarrior.
Configuration parameters:
cache_timeout: refresh interval for this module (default 5)
format: display format for this module (default '{task}')
filter: arguments passed to the command
(default 'start.before:today status:pending')
Format placeholders:
{task} active tasks
Requires
task: https://taskwarrior.org/download/
@author James Smith http://jazmit.github.io/
@license BSD
SAMPLE OUTPUT
{'full_text': '1 Prepare first draft, 2 Buy milk'}
"""
import json
STRING_NOT_INSTALLED = "not installed"
class Py3status:
"""
"""
# available configuration parameters
cache_timeout = 5
format = '{task}'
filter = 'start.before:today status:pending'
def post_config_hook(self):
if not self.py3.check_commands('task'):
raise Exception(STRING_NOT_INSTALLED)
def taskWarrior(self):
def describeTask(taskObj):
return str(taskObj['id']) + ' ' + taskObj['description']
task_command = 'task ' + self.filter + ' export'
task_json = json.loads(self.py3.command_output(task_command))
task_result = ', '.join(map(describeTask, task_json))
return {
'cached_until': self.py3.time_in(self.cache_timeout),
'full_text': self.py3.safe_format(self.format, {'task': task_result})
}
if __name__ == "__main__":
"""
Run module in test mode.
"""
from py3status.module_test import module_test
module_test(Py3status)
|
bsd-3-clause
|
Python
|
ae9b7bf45832b69f6a3ca94e24ed6d2e7d3a384d
|
Update __init__.py
|
vladcalin/pymicroservice,vladcalin/gemstone,vladcalin/pymicroservice,vladcalin/gemstone
|
gemstone/__init__.py
|
gemstone/__init__.py
|
"""
Build microservices with Python
"""
from gemstone.core.microservice import MicroService
from gemstone.core.decorators import private_api_method, public_method, event_handler
from gemstone.core.handlers import TornadoJsonRpcHandler
from gemstone.client.remote_service import RemoteService
from gemstone.util import as_completed, first_completed, make_callbacks
__author__ = "Vlad Calin"
__email__ = "[email protected]"
__version__ = "0.5.0"
__all__ = [
# core classes
'MicroService',
'RemoteService',
# decorators
'public_method',
'private_api_method',
'event_handler',
# tornado handler
'TornadoJsonRpcHandler',
# async utilities
'as_completed',
'first_completed',
'make_callbacks'
]
|
"""
Build microservices with Python
"""
from gemstone.core.microservice import MicroService
from gemstone.core.decorators import private_api_method, public_method, event_handler
from gemstone.core.handlers import TornadoJsonRpcHandler
from gemstone.client.remote_service import RemoteService
from gemstone.util import as_completed, first_completed, make_callbacks
__author__ = "Vlad Calin"
__email__ = "[email protected]"
__version__ = "0.4.0"
__all__ = [
# core classes
'MicroService',
'RemoteService',
# decorators
'public_method',
'private_api_method',
'event_handler',
# tornado handler
'TornadoJsonRpcHandler',
# async utilities
'as_completed',
'first_completed',
'make_callbacks'
]
|
mit
|
Python
|
322d10c8932f160a305fab126401e3f172453e7e
|
Refactor prototxt generation.
|
Hornobster/Ball-Tracking,Hornobster/Ball-Tracking
|
generate_prototxt.py
|
generate_prototxt.py
|
#!/usr/bin/python
import os
import sys
caffe_root = os.getenv('CAFFE_ROOT', './')
sys.path.insert(0, caffe_root + '/python')
import caffe
from caffe import layers as L, params as P
def lenet(batch_size, phase):
n = caffe.NetSpec()
# empty layers as placeholders
# the resulting prototxt must be edited manually
n.data = L.Input()
n.label = L.Input()
n.conv1 = L.Convolution(n.data, kernel_size=5, num_output=20, weight_filler=dict(type='xavier'))
n.pool1 = L.Pooling(n.conv1, kernel_size=2, stride=2, pool=P.Pooling.MAX)
n.conv2 = L.Convolution(n.pool1, kernel_size=5, num_output=50, weight_filler=dict(type='xavier'))
n.pool2 = L.Pooling(n.conv2, kernel_size=2, stride=2, pool=P.Pooling.MAX)
n.fc1 = L.InnerProduct(n.pool2, num_output=500, weight_filler=dict(type='xavier'))
n.relu1 = L.ReLU(n.fc1, in_place=True)
n.score = L.InnerProduct(n.relu1, num_output=2, weight_filler=dict(type='xavier'))
if (phase == 'TRAIN'):
n.loss = L.SoftmaxWithLoss(n.score, n.label)
else if (phase == 'TEST'):
n.prob = L.Softmax(n.score)
return n.to_proto()
with open('lenet_auto_train.prototxt', 'w') as f:
f.write(str(lenet(50, 'TRAIN')))
with open('lenet_auto_test.prototxt', 'w') as f:
f.write(str(lenet(50, 'TEST')))
|
#!/usr/bin/python
import os
import sys
caffe_root = os.getenv('CAFFE_ROOT', './')
sys.path.insert(0, caffe_root + '/python')
import caffe
from caffe import layers as L, params as P
def lenet():
n = caffe.NetSpec()
# empty layers as placeholders
# the resulting prototxt must be edited manually
n.data = L.Input()
n.label = L.Input()
n.conv1 = L.Convolution(n.data, kernel_size=5, num_output=20, weight_filler=dict(type='xavier'))
n.pool1 = L.Pooling(n.conv1, kernel_size=2, stride=2, pool=P.Pooling.MAX)
n.conv2 = L.Convolution(n.pool1, kernel_size=5, num_output=50, weight_filler=dict(type='xavier'))
n.pool2 = L.Pooling(n.conv2, kernel_size=2, stride=2, pool=P.Pooling.MAX)
n.fc1 = L.InnerProduct(n.pool2, num_output=500, weight_filler=dict(type='xavier'))
n.relu1 = L.ReLU(n.fc1, in_place=True)
n.score = L.InnerProduct(n.relu1, num_output=2, weight_filler=dict(type='xavier'))
n.loss = L.SoftmaxWithLoss(n.score, n.label)
return n.to_proto()
with open('lenet_auto_train.prototxt', 'w') as f:
f.write(str(lenet()))
with open('lenet_auto_test.prototxt', 'w') as f:
f.write(str(lenet()))
|
mit
|
Python
|
81d41ceaf89848851d8353021f01f41c724aaae8
|
Add docstring to PySplunk
|
studiawan/pygraphc
|
pygraphc/misc/splunk/pysplunk.py
|
pygraphc/misc/splunk/pysplunk.py
|
from os import system, remove
class PySplunk(object):
"""Get log clustering using Python Splunk API [SplunkDev2016]_.
References
----------
.. [SplunkDev2016] Command line examples in the Splunk SDK for Python.
http://dev.splunk.com/view/python-sdk/SP-CAAAEFK
"""
def __init__(self, username, source, host, output_mode, tmp_file='/tmp/pysplunk_cluster.csv'):
"""The constructor of class PySplunk.
Parameters
----------
username : str
Username to access Splunk daemon. No password required since we use Splunk free version.
source : str
Identifier for log source. It is usually filename of log.
host : str
Hostname for the source log.
output_mode : str
Output for clustering result. Recommended output is csv
tmp_file : str
Path for temporary clustering result.
"""
self.username = username
self.source = source.replace(' ', '\ ')
self.host = host
self.output_mode = output_mode
self.tmp_file = tmp_file
def get_splunk_cluster(self):
"""Get log clusters.
Returns
-------
clusters : dict
Dictionary of log cluster. Key: cluster_id, value: list of log line identifier.
"""
# run Python Splunk API command
command = 'python search.py --username=' + self.username + ' "search source=' + self.source + \
' host=' + self.host + ' sourcetype=linux_secure | cluster labelfield=cluster_id labelonly=t |' \
' table cluster_id _raw | sort _time | reverse" ' + '--output_mode=' + \
self.output_mode + " > " + self.tmp_file
system(command)
# get clusters
with open(self.tmp_file, 'r') as f:
logs = f.readlines()
clusters = {}
for index, log in enumerate(logs):
cluster_id = log.split(',')[0]
clusters[cluster_id] = clusters.get(cluster_id, []) + [index]
# remove tmp_file
remove(self.tmp_file)
return clusters
|
from os import system, remove
class PySplunk(object):
def __init__(self, username, source, host, output_mode, tmp_file='/tmp/pysplunk_cluster.csv'):
self.username = username
self.source = source.replace(' ', '\ ')
self.host = host
self.output_mode = output_mode
self.tmp_file = tmp_file
def get_splunk_cluster(self):
# run Python Splunk API command
command = 'python search.py --username=' + self.username + ' "search source=' + self.source + \
' host=' + self.host + ' sourcetype=linux_secure | cluster labelfield=cluster_id labelonly=t |' \
' table cluster_id _raw | sort _time | reverse" ' + '--output_mode=' + \
self.output_mode + " > " + self.tmp_file
system(command)
# get clusters
with open(self.tmp_file, 'r') as f:
logs = f.readlines()
clusters = {}
for index, log in enumerate(logs):
cluster_id = log.split(',')[0]
clusters[cluster_id] = clusters.get(cluster_id, []) + [index]
# remove tmp_file
remove(self.tmp_file)
return clusters
|
mit
|
Python
|
931c016e43402f847c6e58b4679f7f5cf132776f
|
add DESI_LOGLEVEL environment variable, add doc, define WARNING... in module
|
gdhungana/desispec,timahutchinson/desispec,profxj/desispec,desihub/desispec,profxj/desispec,desihub/desispec,timahutchinson/desispec,gdhungana/desispec
|
py/desispec/log.py
|
py/desispec/log.py
|
"""
Utility functions to dump log messages
We can have something specific for DESI in the future but for now we use the standard python
"""
import sys
import logging
import os
desi_logger = None
# just for convenience to avoid importing logging
# we duplicate the logging levels
DEBUG=logging.DEBUG # Detailed information, typically of interest only when diagnosing problems.
INFO=logging.INFO # Confirmation that things are working as expected.
WARNING=logging.WARNING # An indication that something unexpected happened, or indicative of some problem
# in the near future (e.g. "disk space low"). The software is still working as expected.
ERROR=logging.ERROR # Due to a more serious problem, the software has not been able to perform some function.
CRITICAL=logging.CRITICAL # A serious error, indicating that the program itself may be unable to continue running.
# see example of usage in test/test_log.py
def get_logger(level=None) :
"""
returns a default desi logger
Args:
level: debugging level.
If level=None, will look for environment variable DESI_LOGLEVEL, accepting only values DEBUG,INFO,WARNING,ERROR.
If DESI_LOGLEVEL is not set, default level is INFO.
"""
if level is None :
desi_level=os.getenv("DESI_LOGLEVEL")
if desi_level is None :
level=INFO
else :
dico={"DEBUG":DEBUG,"INFO":INFO,"WARNING":WARNING,"ERROR":ERROR}
if dico.has_key(desi_level) :
level=dico[desi_level]
else :
# amusingly I need the logger to dump a warning here
logger=get_logger(level=WARNING)
message="ignore DESI_LOGLEVEL=%s (only recognize"%desi_level
for k in dico :
message+=" %s"%k
message+=")"
logger.warning(message)
level=INFO
global desi_logger
if desi_logger is not None :
if level is not None :
desi_logger.setLevel(level)
return desi_logger
desi_logger = logging.getLogger("DESI")
desi_logger.setLevel(level)
while len(desi_logger.handlers) > 0:
h = desi_logger.handlers[0]
desi_logger.removeHandler(h)
ch = logging.StreamHandler(sys.stdout)
#formatter = logging.Formatter('%(asctime)s:%(name)s:%(levelname)s:%(message)s')
formatter = logging.Formatter('%(levelname)s:%(filename)s:%(lineno)s:%(funcName)s: %(message)s')
ch.setFormatter(formatter)
desi_logger.addHandler(ch)
return desi_logger
|
"""
Utility functions to dump log messages
We can have something specific for DESI in the future but for now we use the standard python
"""
import sys
import logging
desi_logger = None
def get_logger(level=logging.DEBUG) :
"""
returns a default desi logger
"""
global desi_logger
if desi_logger is not None :
return desi_logger
desi_logger = logging.getLogger("DESI")
desi_logger.setLevel(level)
while len(desi_logger.handlers) > 0:
h = desi_logger.handlers[0]
desi_logger.removeHandler(h)
ch = logging.StreamHandler(sys.stdout)
#formatter = logging.Formatter('%(asctime)s:%(name)s:%(levelname)s:%(message)s')
formatter = logging.Formatter('%(levelname)s:%(filename)s:%(lineno)s:%(funcName)s: %(message)s')
ch.setFormatter(formatter)
desi_logger.addHandler(ch)
return desi_logger
|
bsd-3-clause
|
Python
|
c8c2785b156523204e530cd78268686886ce2a37
|
Fix incorrect module publics
|
mjhanninen/oldfart,mjhanninen/oldfart,mjhanninen/oldfart
|
py/oldfart/make.py
|
py/oldfart/make.py
|
import os
import re
import subprocess
__all__ = ['NOTHING_DONE', 'SUCCESS', 'NO_RULE', 'FAILURE', 'Maker']
NOTHING_DONE = 1
SUCCESS = 2
NO_RULE = 3
FAILURE = 4
class Maker(object):
def __init__(self, project_dir='.', makefile='Makefile'):
self.project_dir = os.path.abspath(project_dir)
self.makefile = os.path.abspath(os.path.join(project_dir, makefile))
def make(self, target):
"""Runs `make(1)` on `target` and returning a tuple `(status, output)`
where `status` is one of:
- `make.SUCCESS`: the target was successfully generated
- `make.NOTHING_DONE`: the target was already up-to-date
- `make.NO_RULE`: there is no rule to build the requested target
- `make.FAILURE`: `make(1)` exited otherwise with a non-zero error code
Returned `output` contains always the mixed output from `stdout` and
`stderr`.
"""
try:
capture = subprocess.check_output(
['make', '--makefile=' + self.makefile, target],
cwd=self.project_dir, stderr=subprocess.STDOUT,
universal_newlines=True)
if re.match(r"make: `[^']*' is up to date.", capture):
return (NOTHING_DONE, capture)
else:
return (SUCCESS, capture)
except subprocess.CalledProcessError as e:
if re.match(r"make: \*\*\* No rule to make target `{:s}'. Stop."
.format(target), e.output):
return (NO_RULE, e.output)
else:
return (FAILURE, e.output)
|
import os
import re
import subprocess
__all__ = ['NOOP', 'SUCCESS', 'FAIL', 'Maker']
NOTHING_DONE = 1
SUCCESS = 2
NO_RULE = 3
FAILURE = 4
class Maker(object):
def __init__(self, project_dir='.', makefile='Makefile'):
self.project_dir = os.path.abspath(project_dir)
self.makefile = os.path.abspath(os.path.join(project_dir, makefile))
def make(self, target):
"""Runs `make(1)` on `target` and returning a tuple `(status, output)`
where `status` is one of:
- `make.SUCCESS`: the target was successfully generated
- `make.NOTHING_DONE`: the target was already up-to-date
- `make.NO_RULE`: there is no rule to build the requested target
- `make.FAILURE`: `make(1)` exited otherwise with a non-zero error code
Returned `output` contains always the mixed output from `stdout` and
`stderr`.
"""
try:
capture = subprocess.check_output(
['make', '--makefile=' + self.makefile, target],
cwd=self.project_dir, stderr=subprocess.STDOUT,
universal_newlines=True)
if re.match(r"make: `[^']*' is up to date.", capture):
return (NOTHING_DONE, capture)
else:
return (SUCCESS, capture)
except subprocess.CalledProcessError as e:
if re.match(r"make: \*\*\* No rule to make target `{:s}'. Stop."
.format(target), e.output):
return (NO_RULE, e.output)
else:
return (FAILURE, e.output)
|
bsd-3-clause
|
Python
|
89c2e367e4d691e83ccf92055c1dc8be59e05497
|
use list comprehensions for legibility
|
rforgione/pynder
|
pynder/session.py
|
pynder/session.py
|
from . import api
from . import models
class Session(object):
def __init__(self, facebook_id, facebook_token):
self._api = api.TinderAPI()
# perform authentication
self._api.auth(facebook_id, facebook_token)
self.profile = models.Profile(self._api.profile(), self)
def nearby_users(self):
return [models.Hopeful(u, self) for u in self._api.recs()['results']]
def update_location(self, latitude, longitude):
return self._api.ping(latitude, longitude)
def matches(self):
return [models.Match(m, self) for m in self._api.matches()]
|
from . import api
from . import models
class Session(object):
def __init__(self, facebook_id, facebook_token):
self._api = api.TinderAPI()
# perform authentication
self._api.auth(facebook_id, facebook_token)
self.profile = models.Profile(self._api.profile(), self)
def nearby_users(self):
return map(lambda user: models.Hopeful(user, self),
self._api.recs()['results'])
def update_location(self, latitude, longitude):
return self._api.ping(latitude, longitude)
def matches(self):
return map(lambda match: models.Match(match, self),
self._api.matches())
|
mit
|
Python
|
1aa687b70aea9074ae28d1154ae0db4364add26e
|
Rewrite example.py.
|
ryanpepper/oommf-python,fangohr/oommf-python,ryanpepper/oommf-python,fangohr/oommf-python,fangohr/oommf-python,ryanpepper/oommf-python,ryanpepper/oommf-python
|
pyoommf/example.py
|
pyoommf/example.py
|
from sim import Sim
from mesh import Mesh
from exchange import Exchange
from demag import Demag
from zeeman import Zeeman
# Mesh specification.
lx = ly = lz = 50e-9 # x, y, and z dimensions (m)
dx = dy = dz = 5e-9 # x, y, and z cell dimensions (m)
Ms = 8e5 # saturation magnetisation (A/m)
A = 1e-11 # exchange energy constant (J/m)
H = (1e6, 0, 0) # external magnetic field (A/m)
m_init = (0, 0, 1) # initial magnetisation
t_sim = 1e-9 # simulation time (s)
# Create a mesh.
mesh = Mesh(lx, ly, lz, dx, dy, dz)
# Create a simulation object.
sim = Sim(mesh, Ms)
# Add energies.
sim.add(Exchange(A))
sim.add(Demag())
sim.add(Zeeman(H))
sim.create_mif()
"""
# Set initial magnetisation.
sim.set_m(m_init)
# Run simulation.
sim.run_until(t_sim)
# Get the results.
results = sim.result()
"""
|
import sim, mesh
# Mesh specification.
lx = ly = lz = 50e-9 # x, y, and z dimensions (m)
dx = dy = dz = 5e-9 # x, y, and z cell dimensions (m)
Ms = 8e5 # saturation magnetisation (A/m)
A = 1e-11 # exchange energy constant (J/m)
H = (1e6, 0, 0) # external magnetic field (A/m)
m_init = (0, 0, 1) # initial magnetisation
t_sim = 1e-9 # simulation time (s)
# Create a mesh.
mesh = mesh.Mesh(lx, ly, lz, dx, dy, dz)
# Create a simulation object.
sim = sim.Sim(mesh, Ms)
# Add energies.
sim.add_exchange(A)
sim.add_demag()
sim.add_zeeman(H)
# Set initial magnetisation.
sim.set_m(m_init)
# Run simulation.
sim.run_until(t_sim)
# Get the results.
results = sim.result()
|
bsd-2-clause
|
Python
|
50ae1d3fb7e14fec94831a4c6667c7b1ba2e073b
|
add python script
|
colddew/mix-script,colddew/mix-script
|
python/python2.py
|
python/python2.py
|
#!/usr/bin/python2
# -*- coding: UTF-8 -*-
# install
# sudo pip install pymongo
# sudo pip install MySQL-python
# sudo install_name_tool -change libmysqlclient.18.dylib /usr/local/mysql/lib/libmysqlclient.18.dylib /Library/Python/2.7/site-packages/_mysql.so
# sudo pip install requests
# sudo pip install threadpool
# sudo pip install apscheduler
# command
python -V
import sys, pprint
pprint.pprint(sys.path)
dir(copy)
help(copy.copy)
print copy.__doc__
print copy.__file__
import webbrowser
webbrowser.open("http://www.baidu.com")
import urllib
html = urllib.urlopen("http://www.baidu.com")
temp_file = urllib.urlretrieve("http://www.baidu.com")
urllib.urlcleanup()
# script
for letter in 'Python':
print 'current letter:', letter
fruits = ['banana', 'apple', 'mango']
for index in range(len(fruits)):
print 'current fruit:', fruits[index]
with open("/tmp/file.txt") as file:
do(file)
f = open(filename)
for line in f.readlines():
process(line)
f.close()
import fileinput
for line in fileinput.input(line):
process(line)
f = open(filename)
for line in f:
process(line)
f.close()
|
#!/usr/bin/python2
# -*- coding: UTF-8 -*-
# install
# sudo pip install pymongo
# sudo pip install MySQL-python
# sudo install_name_tool -change libmysqlclient.18.dylib /usr/local/mysql/lib/libmysqlclient.18.dylib /Library/Python/2.7/site-packages/_mysql.so
# sudo pip install requests
# sudo pip install threadpool
# sudo pip install apscheduler
# command
python -V
import sys, pprint
pprint.pprint(sys.path)
dir(copy)
help(copy.copy)
print copy.__doc__
print copy.__file__
import webbrowser
webbrowser.open("http://www.baidu.com")
# script
for letter in 'Python':
print 'current letter:', letter
fruits = ['banana', 'apple', 'mango']
for index in range(len(fruits)):
print 'current fruit:', fruits[index]
with open("/tmp/file.txt") as file:
do(file)
f = open(filename)
for line in f.readlines():
process(line)
f.close()
import fileinput
for line in fileinput.input(line):
process(line)
f = open(filename)
for line in f:
process(line)
f.close()
|
mit
|
Python
|
c0cbc2458c42bfb116c0d631c837f042f66d33a8
|
Add explanatory comments to Python varargs script
|
rtoal/ple,rtoal/ple,rtoal/polyglot,rtoal/ple,rtoal/ple,rtoal/polyglot,rtoal/ple,rtoal/ple,rtoal/polyglot,rtoal/ple,rtoal/ple,rtoal/polyglot,rtoal/ple,rtoal/ple,rtoal/polyglot,rtoal/polyglot,rtoal/polyglot,rtoal/polyglot,rtoal/ple,rtoal/polyglot,rtoal/ple,rtoal/polyglot,rtoal/polyglot,rtoal/ple,rtoal/ple,rtoal/ple,rtoal/polyglot,rtoal/polyglot,rtoal/ple,rtoal/polyglot,rtoal/polyglot
|
python/varargs.py
|
python/varargs.py
|
def f(x, y=1000, *z):
print('x={} y={} z={}'.format(x,y,z))
f(0) # x=0, y=1000, z=()
f(0,1) # x=0, y=1, z=()
f(0,1,2) # x=0, y=1, z=(2,)
f(0,1,2,3) # x=0, y=1, z=(2,3)
f(0,1,2,3,4) # x=0, y=1, z=(2,3,4)
f(*[i for i in range(6)]) # x=0, y=1, z=(2,3,4,5)
f(*range(7)) # x=0, y=1, z=(2,3,4,5,6)
|
def f(x, y=1000, *z):
print('x={} y={} z={}'.format(x,y,z))
f(0)
f(0,1)
f(0,1,2)
f(0,1,2,3)
f(0,1,2,3,4)
f(*[i for i in range(6)])
f(*range(7))
|
mit
|
Python
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.