commit
stringlengths 40
40
| subject
stringlengths 4
1.73k
| repos
stringlengths 5
127k
| old_file
stringlengths 2
751
| new_file
stringlengths 2
751
| new_contents
stringlengths 1
8.98k
| old_contents
stringlengths 0
6.59k
| license
stringclasses 13
values | lang
stringclasses 23
values |
---|---|---|---|---|---|---|---|---|
643c8bf95bd5ac0df32eed39beb7124badd723ed
|
allow extra args in subprocess dispatcher
|
genome/flow-core,genome/flow-core,genome/flow-core
|
lib/amqp_service/dispatcher/subprocess_dispatcher.py
|
lib/amqp_service/dispatcher/subprocess_dispatcher.py
|
import logging
import subprocess
from amqp_service.dispatcher import util
LOG = logging.getLogger(__name__)
class SubprocessDispatcher(object):
def launch_job(self, command, arguments=[],
wrapper=None, wrapper_arguments=[], environment={},
stdout=None, stderr=None, **kwargs):
command_list = []
if wrapper:
command_list.append(wrapper)
command_list.extend(wrapper_arguments)
command_list.append(command)
command_list.extend(arguments)
with util.environment(environment):
LOG.debug('executing subprocess using command_list: %s',
command_list)
exit_code = subprocess.call(command_list,
stdout=stdout, stderr=stderr)
if exit_code > 0:
# XXX get error message
LOG.debug('failed to execute subprocess job, exit_code = %d',
exit_code)
return False, exit_code
else:
LOG.debug('succesfully executed subprocess job')
return True, exit_code
|
import logging
import subprocess
from amqp_service.dispatcher import util
LOG = logging.getLogger(__name__)
class SubprocessDispatcher(object):
def launch_job(self, command, arguments=[],
wrapper=None, wrapper_arguments=[], environment={},
stdout=None, stderr=None):
command_list = []
if wrapper:
command_list.append(wrapper)
command_list.extend(wrapper_arguments)
command_list.append(command)
command_list.extend(arguments)
with util.environment(environment):
LOG.debug('executing subprocess using command_list: %s',
command_list)
exit_code = subprocess.call(command_list,
stdout=stdout, stderr=stderr)
if exit_code > 0:
# XXX get error message
LOG.debug('failed to execute subprocess job, exit_code = %d',
exit_code)
return False, exit_code
else:
LOG.debug('succesfully executed subprocess job')
return True, exit_code
|
agpl-3.0
|
Python
|
054dc32d30ca9175a6c8b40af52491b8e3a98978
|
Debug the URL that's being requested
|
Heufneutje/PyHeufyBot,Heufneutje/PyHeufyBot
|
heufybot/modules/util/webutils.py
|
heufybot/modules/util/webutils.py
|
from twisted.plugin import IPlugin
from twisted.python import log
from heufybot.moduleinterface import BotModule, IBotModule
from heufybot.utils.logutils import logExceptionTrace
from zope.interface import implements
import logging, re, requests
class WebUtils(BotModule):
implements(IPlugin, IBotModule)
name = "WebUtils"
canDisable = False
def hookBot(self, bot):
self.bot = bot
def actions(self):
return [ ("fetch-url", 1, self.fetchURL) ]
def fetchURL(self, url, params = None, extraHeaders = None):
headers = { "user-agent": "Mozilla/5.0" }
if extraHeaders:
headers.update(extraHeaders)
try:
request = requests.get(url, params=params, headers=headers)
pageType = request.headers["content-type"]
if not re.match("^(text/.*|application/((rss|atom|rdf)\+)?xml(;.*)?|application/(.*)json(;.*)?)$", pageType):
# Make sure we don't download any unwanted things
return None
log.msg(request.url, level=logging.DEBUG)
return request
except requests.RequestException as ex:
logExceptionTrace("Error while fetching from {}: {}".format(url, ex))
return None
webutils = WebUtils()
|
from twisted.plugin import IPlugin
from heufybot.moduleinterface import BotModule, IBotModule
from heufybot.utils.logutils import logExceptionTrace
from zope.interface import implements
import re, requests
class WebUtils(BotModule):
implements(IPlugin, IBotModule)
name = "WebUtils"
canDisable = False
def hookBot(self, bot):
self.bot = bot
def actions(self):
return [ ("fetch-url", 1, self.fetchURL) ]
def fetchURL(self, url, params = None, extraHeaders = None):
headers = { "user-agent": "Mozilla/5.0" }
if extraHeaders:
headers.update(extraHeaders)
try:
request = requests.get(url, params=params, headers=headers)
pageType = request.headers["content-type"]
if not re.match("^(text/.*|application/((rss|atom|rdf)\+)?xml(;.*)?|application/(.*)json(;.*)?)$", pageType):
# Make sure we don't download any unwanted things
return None
return request
except requests.RequestException as ex:
logExceptionTrace("Error while fetching from {}: {}".format(url, ex))
return None
webutils = WebUtils()
|
mit
|
Python
|
ab51dd3c6c649e582deeb2309a88738b45bccba8
|
clean up formatting of logger
|
krdyke/OGP-metadata-py,krdyke/OGP-metadata-py
|
src/logger.py
|
src/logger.py
|
"""
create simple logger class to output results both to text file and display
"""
import os.path
import csv
import time
class Logger(object):
def __init__(self, output_location):
self.filename = '__ogp-mdt-log-' + str(time.time()).replace('.', '') + '.csv'
self.csvfile = open(os.path.join(output_location, self.filename), mode='a')
self.log = csv.writer(self.csvfile)
def write(self, filename, message):
s = os.path.split(filename)
self.log.writerow([s[0], s[1], message])
def close(self):
self.csvfile.close()
|
"""
create simple logger class to output results both to text file and display
"""
import os.path,csv,time
class Logger(object):
def __init__(self,OUTPUT_LOCATION):
self.filename = '__ogp-mdt-log-' + str(time.time()).replace('.','') + '.csv'
self.csvfile = open(os.path.join(OUTPUT_LOCATION, self.filename), mode='a')
self.log = csv.writer(self.csvfile)
def write(self, filename,message):
s = os.path.split(filename)
self.log.writerow([s[0],s[1],message])
def close(self):
self.csvfile.close()
|
mit
|
Python
|
ebdb3a510718288f5db14539d7261f10abb59c96
|
Fix a small typo error in clusterdemo.py (#945)
|
mininet/mininet,mininet/mininet,mininet/mininet
|
examples/clusterdemo.py
|
examples/clusterdemo.py
|
#!/usr/bin/python
"clusterdemo.py: demo of Mininet Cluster Edition prototype"
from mininet.examples.cluster import ( MininetCluster, SwitchBinPlacer,
RemoteLink )
# ^ Could also use: RemoteSSHLink, RemoteGRELink
from mininet.topolib import TreeTopo
from mininet.log import setLogLevel
from mininet.examples.clustercli import ClusterCLI as CLI
def demo():
"Simple Demo of Cluster Mode"
servers = [ 'localhost', 'ubuntu2', 'ubuntu3' ]
topo = TreeTopo( depth=3, fanout=3 )
net = MininetCluster( topo=topo, servers=servers, link=RemoteLink,
placement=SwitchBinPlacer )
net.start()
CLI( net )
net.stop()
if __name__ == '__main__':
setLogLevel( 'info' )
demo()
|
#!/usr/bin/python
"clusterdemo.py: demo of Mininet Cluster Edition prototype"
from mininet.examples.cluster import ( MininetCluster, SwitchBinPlacer,
RemoteLink )
# ^ Could also use: RemoteSSHLink, RemoteGRELink
from mininet.topolib import TreeTopo
from mininet.log import setLogLevel
from mininet.examples.clustercli import ClusterCLI as CLI
def demo():
"Simple Demo of Cluster Mode"
servers = [ 'localhost', 'ubuntu2', 'ubuntu3' ]
topo = TreeTopo( depth=3, fanout=3 )
net = MininetCluster( topo=topo, servers=servers, Link=RemoteLink,
placement=SwitchBinPlacer )
net.start()
CLI( net )
net.stop()
if __name__ == '__main__':
setLogLevel( 'info' )
demo()
|
bsd-3-clause
|
Python
|
5dbaf2f519c573dbeb239be0d21282ad432339e8
|
Fix order in base
|
cryvate/project-euler,cryvate/project-euler
|
project_euler/library/base.py
|
project_euler/library/base.py
|
from typing import List
def number_to_list(number: int, base: int = 10) -> List[int]:
if number < 0:
raise ValueError(f'Cannot convert {number} to list, must be positive.')
if base <= 0:
raise ValueError(f'Cannot convert to base {base}.')
digits = []
while number > 0:
digits.append(number % base)
number //= base
return list(reversed(digits))
def list_to_number(representation: List[int], base: int = 10) -> int:
accumulate = 0
for digit in representation:
accumulate = accumulate * base + digit
return accumulate
def is_permutation(self: int, other: int, base: int = 10) -> bool:
if self // other >= base or other // self >= base:
return False
else:
return sorted(number_to_list(self, base)) == \
sorted(number_to_list(other, base))
|
from typing import List
def number_to_list(number: int, base: int = 10) -> List[int]:
if number < 0:
raise ValueError(f'Cannot convert {number} to list, must be positive.')
if base <= 0:
raise ValueError(f'Cannot convert to base {base}.')
digits = []
while number > 0:
digits.append(number % base)
number //= base
return digits
def list_to_number(representation: List[int], base: int = 10) -> int:
accumulate = 0
for digit in representation:
accumulate = accumulate * base + digit
return accumulate
def is_permutation(self: int, other: int, base: int = 10) -> bool:
if self // other >= base or other // self >= base:
return False
else:
return sorted(number_to_list(self, base)) == \
sorted(number_to_list(other, base))
|
mit
|
Python
|
24b868f99d40e5309fc4a8f8e1ca9d9ca00524ea
|
move init code into its own function
|
fretboardfreak/netify
|
src/app.py
|
src/app.py
|
# Copyright 2015 Curtis Sand
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The netify application object."""
import os
from flask import Flask
DEFAULT_SECRET_KEY_SIZE = 64 # bytes -> 64 * 8 = 512bits
APP = None # Singleton Flask App
def init():
global APP
APP = Flask(__name__)
APP.config.from_object(__name__)
APP.config.update(dict(SECRET_KEY=os.urandom(DEFAULT_SECRET_KEY_SIZE)))
return APP
def run(host=None, port=None, debug=None):
APP.run(host, port, debug)
|
# Copyright 2015 Curtis Sand
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The netify application object."""
import os
from flask import Flask
DEFAULT_SECRET_KEY_SIZE = 64 # bytes -> 64 * 8 = 512bits
APP = Flask(__name__)
APP.config.from_object(__name__)
APP.config.update(dict(SECRET_KEY=os.urandom(DEFAULT_SECRET_KEY_SIZE)))
|
apache-2.0
|
Python
|
e1cb37a061f1522e027004d5ed2aca572223a4a2
|
Update cooler test utility
|
hms-dbmi/clodius,hms-dbmi/clodius
|
test/utils.py
|
test/utils.py
|
import h5py
import logging
logger = logging.getLogger(__name__)
def get_cooler_info(file_path):
"""Get information of a cooler file.
Args:
file_path (str): Path to a cooler file.
Returns:
dict: Dictionary containing basic information about the cooler file.
"""
TILE_SIZE = 256
CHROM_CUM_LEN = 0
with h5py.File(file_path, 'r') as f:
max_zoom = f.attrs.get('max-zoom')
if max_zoom is None:
logger.info('no zoom found')
raise ValueError(
'The `max_zoom` attribute is missing.'
)
total_length = CHROM_CUM_LEN
max_zoom = f.attrs['max-zoom']
bin_size = int(f[str(max_zoom)].attrs['bin-size'])
max_width = bin_size * TILE_SIZE * 2**max_zoom
info = {
'min_pos': [0.0, 0.0],
'max_pos': [total_length, total_length],
'max_zoom': max_zoom,
'max_width': max_width,
'bins_per_dimension': TILE_SIZE,
}
return info
|
import h5py
import logging
logger = logging.getLogger(__name__)
def get_cooler_info(file_path):
"""Get information of a cooler file.
Args:
file_path (str): Path to a cooler file.
Returns:
dict: Dictionary containing basic information about the cooler file.
"""
with h5py.File(file_path, 'r') as f:
max_zoom = f.attrs.get('max-zoom')
if max_zoom is None:
logger.info('no zoom found')
raise ValueError(
'The `max_zoom` attribute is missing.'
)
total_length = int(CHROM_CUM_LEN[-1])
max_zoom = f.attrs['max-zoom']
bin_size = int(f[str(max_zoom)].attrs['bin-size'])
max_width = bin_size * TILE_SIZE * 2**max_zoom
info = {
'min_pos': [0.0, 0.0],
'max_pos': [total_length, total_length],
'max_zoom': max_zoom,
'max_width': max_width,
'bins_per_dimension': TILE_SIZE,
}
return info
|
mit
|
Python
|
b149d3e2be52a9876815b4599164210f086cf0c0
|
update TODO objectives
|
MrXlVii/crypto_project
|
testCaesar.py
|
testCaesar.py
|
import unittest
import Caesar
class TestCryptMethods(unittest.TestCase):
"""Tests for Caesar.py"""
cryptInput = ['encrypt', 'Encrypt', 'decrypt', 'Decrypt', 'blah', 'WHOCARES']
encryptInput = ['foo', 'bar', 'Hello World', 'xyz', '101010111']
decryptInput = ['ktt', 'gfw', 'Mjqqt Btwqi', 'cde', '101010111']
def setUp(self):
pass
def test_crypt(self):
result = []
for i in range(len(self.cryptInput)):
result.append(Caesar.crypt(self.cryptInput[i]))
self.assertTrue(result[0])
self.assertTrue(result[1])
self.assertFalse(result[2])
self.assertFalse(result[3])
#self.assertRaises
#self.assertRaises
def test_encryption(self):
for i in range(len(encryptInput)):
result.append(Caesar.encryption(self.encryptInput[i]))
#TODO: test encryption runs appropriately
def test_decryption(self):
pass
#TODO: test decryption runs appropriately
if __name__ == "__main__":
unittest.main()
|
import unittest
import Caesar
class TestCryptMethods(unittest.TestCase):
"""Tests for Caesar.py"""
cryptInput = ['encrypt', 'Encrypt', 'decrypt', 'Decrypt', 'blah', 'WHOCARES']
encryptInput = ['foo', 'bar', 'Hello World', '342', '101010111']
decryptInput = ['ktt', 'gfw', 'Mjqqt%\twqi', '897', '656565666']
def setUp(self):
pass
def test_crypt(self):
result = []
for i in range(len(self.cryptInput)):
result.append(Caesar.crypt(self.cryptInput[i]))
self.assertTrue(result[0])
self.assertTrue(result[1])
self.assertFalse(result[2])
self.assertFalse(result[3])
#self.assertRaises
#self.assertRaises
def test_encryption(self):
for i in range(len(encryptInput)):
result[i] = Caesar.crypt(encryptInput[i])
def test_decryption(self):
pass
if __name__ == "__main__":
unittest.main()
|
mit
|
Python
|
cf4debe97d48d42ac28fb8e2d328a8583e81a007
|
Fix version_info comparison
|
wbond/asn1crypto
|
dev/ci.py
|
dev/ci.py
|
# coding: utf-8
from __future__ import unicode_literals, division, absolute_import, print_function
import sys
from .tests import run as run_tests
if sys.version_info >= (2, 7):
from .lint import run as run_lint
def run():
"""
Runs the linter and tests
:return:
A bool - if the linter and tests ran successfully
"""
print('Python ' + sys.version.replace('\n', ''))
if sys.version_info >= (2, 7):
print('')
lint_result = run_lint()
else:
lint_result = True
print('\nRunning tests')
sys.stdout.flush()
tests_result = run_tests()
return lint_result and tests_result
|
# coding: utf-8
from __future__ import unicode_literals, division, absolute_import, print_function
import sys
from .tests import run as run_tests
if sys.version_info > (2, 6):
from .lint import run as run_lint
def run():
"""
Runs the linter and tests
:return:
A bool - if the linter and tests ran successfully
"""
print('Python ' + sys.version.replace('\n', ''))
if sys.version_info > (2, 6):
print('')
lint_result = run_lint()
else:
lint_result = True
print('\nRunning tests')
sys.stdout.flush()
tests_result = run_tests()
return lint_result and tests_result
|
mit
|
Python
|
22ae21ab43c1f94807e282b7d50987af13a6a9d6
|
Exclude ps1 modules from the TestModules unittest
|
thaim/ansible,thaim/ansible
|
test/units/TestModules.py
|
test/units/TestModules.py
|
# -*- coding: utf-8 -*-
import os
import ast
import unittest
from ansible import utils
class TestModules(unittest.TestCase):
def list_all_modules(self):
paths = utils.plugins.module_finder._get_paths()
paths = [x for x in paths if os.path.isdir(x)]
module_list = []
for path in paths:
for (dirpath, dirnames, filenames) in os.walk(path):
for filename in filenames:
(path, ext) = os.path.splitext(filename)
if ext != ".ps1":
module_list.append(os.path.join(dirpath, filename))
return module_list
def test_ast_parse(self):
module_list = self.list_all_modules()
ERRORS = []
# attempt to parse each module with ast
for m in module_list:
try:
ast.parse(''.join(open(m)))
except Exception, e:
ERRORS.append((m, e))
assert len(ERRORS) == 0, "get_docstring errors: %s" % ERRORS
|
# -*- coding: utf-8 -*-
import os
import ast
import unittest
from ansible import utils
class TestModules(unittest.TestCase):
def list_all_modules(self):
paths = utils.plugins.module_finder._get_paths()
paths = [x for x in paths if os.path.isdir(x)]
module_list = []
for path in paths:
for (dirpath, dirnames, filenames) in os.walk(path):
for filename in filenames:
module_list.append(os.path.join(dirpath, filename))
return module_list
def test_ast_parse(self):
module_list = self.list_all_modules()
ERRORS = []
# attempt to parse each module with ast
for m in module_list:
try:
ast.parse(''.join(open(m)))
except Exception, e:
ERRORS.append((m, e))
assert len(ERRORS) == 0, "get_docstring errors: %s" % ERRORS
|
mit
|
Python
|
7f00ff930b70b9c0ca00874d00704cdf540e3939
|
correct errors
|
yanadsl/ML-Autocar
|
serial_test.py
|
serial_test.py
|
import datetime
import sys
import pigpio
import time
import math
time_record = int(time.time() * 1000)
time_limit = 50
pi = pigpio.pi()
sensor_message_size = 7
sensor_signal_pin = 4
dead_pin = 17
pi.set_mode(sensor_signal_pin, pigpio.OUTPUT)
h1 = pi.serial_open("/dev/ttyAMA0", 9600)
pi.serial_write_byte(h1, 10 * 2)
pi.write(sensor_signal_pin, pigpio.LOW)
print("start")
sita = 1
try:
while True:
while (int(time.time() * 1000) - time_record) <= time_limit:
time.sleep(0.002)
time_record = int(time.time() * 1000)
distance = []
pi.serial_read(h1) # clear any redauntancy data
pi.write(sensor_signal_pin, pigpio.HIGH)
while pi.serial_data_available(h1) < sensor_message_size - 1:
# print( pi.serial_data_available(h1))
time.sleep(0.0007)
(b, d) = pi.serial_read(h1, sensor_message_size)
pi.write(sensor_signal_pin, pigpio.LOW)
sets = []
for a in d:
sets.append(int(a) / 2.0)
if pi.read(dead_pin) == pigpio.LOW:
print("dead")
if (abs(sets[2] - sets[1]) < 7 and sets[2] < 40) or (abs(sets[4] - sets[5]) < 7 and set[4] < 40):
print("修正FIXED")
if sets[2] < 40:
a = sets[1]+0.5
b = sets[2]
else:
a = sets[5]+0.5
b = sets[4]
c = math.sqrt(a ** 2 + b ** 2 - 2 * a * b * math.cos(math.pi * 25 / 180))
sita = math.acos((b ** 2 + c ** 2 - a ** 2) / (2 * b * c))
ans = a * math.sin(math.pi - sita) / math.sin(sita - math.pi * 25 / 180)
sets[3] = round(ans, 1)
print([sets[0], sets[1], sets[2], sets[3], sets[4], sets[5], sets[6]], round(math.degrees(sita), 1))
# distance = normalize(distance)
except KeyboardInterrupt:
pi.serial_close(h1)
sys.exit(0)
|
import datetime
import sys
import pigpio
import time
import math
time_record = int(time.time() * 1000)
time_limit = 50
pi = pigpio.pi()
sensor_message_size = 7
sensor_signal_pin = 4
dead_pin = 17
pi.set_mode(sensor_signal_pin, pigpio.OUTPUT)
h1 = pi.serial_open("/dev/ttyAMA0", 9600)
pi.serial_write_byte(h1, 10 * 2)
pi.write(sensor_signal_pin, pigpio.LOW)
print("start")
sita = 1
try:
while True:
while (int(time.time() * 1000) - time_record) <= time_limit:
time.sleep(0.002)
time_record = int(time.time() * 1000)
distance = []
pi.serial_read(h1) # clear any redauntancy data
pi.write(sensor_signal_pin, pigpio.HIGH)
while pi.serial_data_available(h1) < sensor_message_size - 1:
# print( pi.serial_data_available(h1))
time.sleep(0.0007)
(b, d) = pi.serial_read(h1, sensor_message_size)
pi.write(sensor_signal_pin, pigpio.LOW)
sets = []
for a in d:
sets.append(int(a) / 2.0)
if pi.read(dead_pin) == pigpio.LOW:
print("dead")
if not (abs(sets[2] - sets[1]) > 7 and abs(sets[4] - sets[5]) > 7):
print("SMALLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLLER")
if sets[2] < 40:
a = sets[1]+0.5
b = sets[2]
else:
a = sets[5]+0.5
b = sets[4]
c = math.sqrt(a ** 2 + b ** 2 - 2 * a * b * math.cos(math.pi * 25 / 180))
sita = math.acos((b ** 2 + c ** 2 - a ** 2) / (2 * b * c))
ans = a * math.sin(math.pi - sita) / math.sin(sita - math.pi * 25 / 180)
sets[3] = round(ans, 1)
print([sets[0], sets[1], sets[2], sets[3], sets[4], sets[5], sets[6]], round(math.degrees(sita), 1))
# distance = normalize(distance)
except KeyboardInterrupt:
pi.serial_close(h1)
sys.exit(0)
|
mit
|
Python
|
dba06078985716bda0a0d3a6ab26d0fad73b4c73
|
add a flags parameter to test() to allow passing in during interactive sessions
|
matthew-brett/draft-statsmodels,matthew-brett/draft-statsmodels
|
lib/neuroimaging/algorithms/statistics/__init__.py
|
lib/neuroimaging/algorithms/statistics/__init__.py
|
"""
TODO
"""
__docformat__ = 'restructuredtext'
def test(level=1, verbosity=1, flags=[]):
from neuroimaging.utils.test_decorators import set_flags
set_flags(flags)
from numpy.testing import NumpyTest
return NumpyTest().test(level, verbosity)
|
"""
TODO
"""
__docformat__ = 'restructuredtext'
def test(level=1, verbosity=1):
from numpy.testing import NumpyTest
return NumpyTest().test(level, verbosity)
|
bsd-3-clause
|
Python
|
1c9f12f808ffa0f1d4f16ea9f35021a83126243f
|
Update test Solr download script to work with default Python 3
|
upayavira/pysolr,mylanium/pysolr,mbeacom/pysolr,shasha79/pysolr,django-searchstack/skisolr,toastdriven/pysolr,CANTUS-Project/pysolr-tornado,toastdriven/pysolr,mylanium/pysolr,mbeacom/pysolr,upayavira/pysolr,django-haystack/pysolr,swistakm/pysolr,CANTUS-Project/pysolr-tornado,swistakm/pysolr,django-haystack/pysolr,rokaka/pysolr,rokaka/pysolr,django-searchstack/skisolr,shasha79/pysolr
|
get-solr-download-url.py
|
get-solr-download-url.py
|
#!/usr/bin/env python
# encoding: utf-8
from __future__ import absolute_import, print_function, unicode_literals
import sys
import requests
# Try to import urljoin from the Python 3 reorganized stdlib first:
try:
from urllib.parse import urljoin
except ImportError:
from urlparse import urljoin
if len(sys.argv) != 2:
print('Usage: %s SOLR_VERSION' % sys.argv[0], file=sys.stderr)
sys.exit(1)
solr_version = sys.argv[1]
tarball = 'solr-{0}.tgz'.format(solr_version)
dist_path = 'lucene/solr/{0}/{1}'.format(solr_version, tarball)
download_url = urljoin('http://archive.apache.org/dist/', dist_path)
mirror_response = requests.get("http://www.apache.org/dyn/mirrors/mirrors.cgi/%s?asjson=1" % dist_path)
if mirror_response.ok:
mirror_data = mirror_response.json()
download_url = urljoin(mirror_data['preferred'], mirror_data['path_info'])
print(download_url)
|
#!/usr/bin/env python
# encoding: utf-8
from __future__ import absolute_import, print_function, unicode_literals
import sys
import requests
# Try to import urljoin from the Python 3 reorganized stdlib first:
try:
from urlparse.parse import urljoin
except ImportError:
from urlparse import urljoin
if len(sys.argv) != 2:
print('Usage: %s SOLR_VERSION' % sys.argv[0], file=sys.stderr)
sys.exit(1)
solr_version = sys.argv[1]
tarball = 'solr-{0}.tgz'.format(solr_version)
dist_path = 'lucene/solr/{0}/{1}'.format(solr_version, tarball)
download_url = urljoin('http://archive.apache.org/dist/', dist_path)
mirror_response = requests.get("http://www.apache.org/dyn/mirrors/mirrors.cgi/%s?asjson=1" % dist_path)
if mirror_response.ok:
mirror_data = mirror_response.json()
download_url = urljoin(mirror_data['preferred'], mirror_data['path_info'])
print(download_url)
|
bsd-3-clause
|
Python
|
f9b7fa7fd7a75c5fb85fae545ed05080a162e967
|
Add sphinx markup to test_input.py
|
raspearsy/bme590hrm
|
test_input.py
|
test_input.py
|
import pandas as pd
import numpy as np
from read_file import input_dataframe
# requires test .csv file containing 2 columns w/ 1 row string header and all below rows float/int
def test_ecg_dataframe_size():
""".. function:: test_ecg_dataframe_size()
Test size of dataframe.
"""
ecg_dataframe = input_dataframe("testfile1.csv")
assert ecg_dataframe.shape[1] == 2
def test_ecg_dataframe_type():
""".. function:: test_ecg_dataframe_type()
Test type of dataframe.
"""
ecg_dataframe = input_dataframe("testfile1.csv")
assert isinstance(ecg_dataframe, pd.DataFrame)
assert isinstance(ecg_dataframe.time[0], np.float64) or isinstance(ecg_dataframe.time[0], np.int64)
assert isinstance(ecg_dataframe.voltage[0], np.float64) or isinstance(ecg_dataframe.voltage[0], np.int64)
def test_exception_nofile():
""".. function:: test_exception_nofile()
Test that file can be found.
"""
try:
input_dataframe("")
assert False
except FileNotFoundError:
assert True
def test_exception_nonnumeric_values():
""".. function:: test_exception_nonnumeric_values()
Test for non-numeric values.
"""
try:
ecg_nonnumeric_dataframe = input_dataframe("test_non_numeric.csv")
pd.to_numeric(ecg_nonnumeric_dataframe['time'])
pd.to_numeric(ecg_nonnumeric_dataframe['voltage'])
assert False
except ValueError:
assert True
def test_exception_empty_file():
""".. function:: test_exception_empty_file()
Test if dataframe is empty.
"""
assert len(input_dataframe("test_data_empty.csv")) == 0
|
import pandas as pd
import numpy as np
from read_file import input_dataframe
# requires test .csv file containing 2 columns w/ 1 row string header and all below rows float/int
def test_ecg_dataframe_size():
ecg_dataframe = input_dataframe("testfile1.csv")
assert ecg_dataframe.shape[1] == 2
def test_ecg_dataframe_type():
ecg_dataframe = input_dataframe("testfile1.csv")
assert isinstance(ecg_dataframe, pd.DataFrame)
assert isinstance(ecg_dataframe.time[0], np.float64) or isinstance(ecg_dataframe.time[0], np.int64)
assert isinstance(ecg_dataframe.voltage[0], np.float64) or isinstance(ecg_dataframe.voltage[0], np.int64)
def test_exception_nofile():
try:
input_dataframe("")
assert False
except FileNotFoundError:
assert True
def test_exception_nonnumeric_values():
try:
ecg_nonnumeric_dataframe = input_dataframe("test_non_numeric.csv")
pd.to_numeric(ecg_nonnumeric_dataframe['time'])
pd.to_numeric(ecg_nonnumeric_dataframe['voltage'])
assert False
except ValueError:
assert True
def test_exception_empty_file():
assert len(input_dataframe("test_data_empty.csv")) == 0
|
mit
|
Python
|
6143888a2fa396b868ed44c3ceab765a95abea45
|
Check function must always use all parameters.
|
knub/skypyblue
|
tests/constraint_tests.py
|
tests/constraint_tests.py
|
from unittest import TestCase
from skypyblue.core import ConstraintSystem
from skypyblue.models import Method, Constraint, Strength
try:
from unittest.mock import MagicMock as Mock
except ImportError as e:
from mock import Mock
class ConstraintTests(TestCase):
def setUp(self):
self.cs = ConstraintSystem()
self.vars = self.cs.create_variables(["v1", "v2", "v3"], [4, 5, 3])
self.v1, self.v2, self.v3 = self.vars
m1_2 = Method(self.v1, self.v2, lambda x: x // 2)
m1_3 = Method(self.v1, self.v3, lambda x: x // 3)
self.cn = Constraint(lambda v1, v2, v3: True, Strength.STRONG, self.vars, [m1_3, m1_2])
self.cs.add_constraint(self.cn)
def tearDown(self):
pass
def test_adding_enforced_to_pplan(self):
self.cn.is_enforced = Mock(return_value = True)
self.assertIsNone(self.cn.mark)
mark = self.cs.marker.new_mark()
pplan = self.cn.add_to_pplan([], mark)
self.assertEqual([self.cn], pplan)
self.assertEqual(mark, self.cn.mark)
def test_adding_unenforced_to_pplan(self):
self.cn.is_enforced = Mock(return_value = False)
self.assertIsNone(self.cn.mark)
pplan = self.cn.add_to_pplan([], self.cs.marker.new_mark())
self.assertEqual([], pplan)
self.assertIsNone(self.cn.mark)
def test_adding_with_the_same_mark(self):
self.cn.is_enforced = Mock(return_value = True)
mark = self.cs.marker.new_mark()
self.cn.mark = mark
pplan = self.cn.add_to_pplan([], mark)
self.assertEqual([], pplan)
self.assertEqual(mark, self.cn.mark)
def test_adding_with_other_mark(self):
self.cn.is_enforced = Mock(return_value = True)
mark1 = self.cs.marker.new_mark()
mark2 = self.cs.marker.new_mark()
self.cn.mark = mark1
pplan = self.cn.add_to_pplan([], mark2)
self.assertEqual([self.cn], pplan)
self.assertEqual(mark2, self.cn.mark)
|
from unittest import TestCase
from skypyblue.core import ConstraintSystem
from skypyblue.models import Method, Constraint, Strength
try:
from unittest.mock import MagicMock as Mock
except ImportError as e:
from mock import Mock
class ConstraintTests(TestCase):
def setUp(self):
self.cs = ConstraintSystem()
self.vars = self.cs.create_variables(["v1", "v2", "v3"], [4,5,3])
self.v1, self.v2, self.v3 = self.vars
m1_2 = Method(self.v1, self.v2, lambda x: x // 2)
m1_3 = Method(self.v1, self.v3, lambda x: x // 3)
self.cn = Constraint(lambda: True, Strength.STRONG, self.vars, [m1_3, m1_2])
self.cs.add_constraint(self.cn)
def tearDown(self):
pass
def test_adding_enforced_to_pplan(self):
self.cn.is_enforced = Mock(return_value = True)
self.assertIsNone(self.cn.mark)
mark = self.cs.marker.new_mark()
pplan = self.cn.add_to_pplan([], mark)
self.assertEqual([self.cn], pplan)
self.assertEqual(mark, self.cn.mark)
def test_adding_unenforced_to_pplan(self):
self.cn.is_enforced = Mock(return_value = False)
self.assertIsNone(self.cn.mark)
pplan = self.cn.add_to_pplan([], self.cs.marker.new_mark())
self.assertEqual([], pplan)
self.assertIsNone(self.cn.mark)
def test_adding_with_the_same_mark(self):
self.cn.is_enforced = Mock(return_value = True)
mark = self.cs.marker.new_mark()
self.cn.mark = mark
pplan = self.cn.add_to_pplan([], mark)
self.assertEqual([], pplan)
self.assertEqual(mark, self.cn.mark)
def test_adding_with_other_mark(self):
self.cn.is_enforced = Mock(return_value = True)
mark1 = self.cs.marker.new_mark()
mark2 = self.cs.marker.new_mark()
self.cn.mark = mark1
pplan = self.cn.add_to_pplan([], mark2)
self.assertEqual([self.cn], pplan)
self.assertEqual(mark2, self.cn.mark)
|
mit
|
Python
|
f52a19fe28fa84b3d83ab20998fd678c795490dc
|
Remove generated code
|
davidmogar/lexgen,davidmogar/lexgen
|
lexgen/__init__.py
|
lexgen/__init__.py
|
__author__ = 'David'
|
mit
|
Python
|
|
aff805625f465421277447e5bd2a53a552dd175f
|
Fix assertion and error
|
charanpald/APGL
|
exp/util/MCEvaluator.py
|
exp/util/MCEvaluator.py
|
import numpy
import numpy.testing as nptst
class MCEvaluator(object):
"""
A class to evaluate machine learning performance for the matrix completion
problem.
"""
def __init__(self):
pass
@staticmethod
def meanSqError(testX, predX):
"""
Find the mean squared error between two sparse matrices testX and predX.
Note that the matrices must have nonzero elements in the same places.
"""
#Note that some predictions might be zero
assert numpy.in1d(predX.nonzero()[0], testX.nonzero()[0]).all()
assert numpy.in1d(predX.nonzero()[1], testX.nonzero()[1]).all()
diff = testX - predX
error = numpy.sum(diff.data**2)/testX.data.shape[0]
return error
@staticmethod
def rootMeanSqError(testX, predX):
"""
Find the root mean squared error between two sparse matrices testX and predX.
"""
return numpy.sqrt(MCEvaluator.meanSqError(testX, predX))
|
import numpy
import numpy.testing as nptst
class MCEvaluator(object):
"""
A class to evaluate machine learning performance for the matrix completion
problem.
"""
def __init__(self):
pass
@staticmethod
def meanSqError(testX, predX):
"""
Find the mean squared error between two sparse matrices testX and predX.
Note that the matrices must have nonzero elements in the same places.
"""
nptst.assert_array_equal(testX.nonzero()[0], predX.nonzero()[0])
nptst.assert_array_equal(testX.nonzero()[1], predX.nonzero()[1])
diff = testX - predX
if diff.data.shape[0] != 0:
return numpy.mean(diff.data**2)
else:
return 0
@staticmethod
def rootMeanSqError(testX, predX):
"""
Find the root mean squared error between two sparse matrices testX and predX.
"""
return numpy.sqrt(MCEvaluator.meanSqError(testX, predX))
|
bsd-3-clause
|
Python
|
b7256a0696331b5b0889708449ebb93ef90fab4a
|
add language and save function.
|
imwithye/git-ignore,imwithye/git-ignore
|
git-ignore.py
|
git-ignore.py
|
#! /usr/bin/env python2
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright © 2014 Ciel <[email protected]>
#
# Distributed under terms of the MIT license.
import sys
def language(languages):
print languages
def save(filename):
print filename
# print usage
def usage():
print "usage: git ignore <subcommand>"
print
print "Available subcommands are:"
print " language Add gitignore files. Try use 'git ignore language Python C'"
print " save Save current .gitignore file as a template"
print " usage Show this help message and exit"
print " version Show version and exit"
print
print "http://github.com/imwithye/git-ignore"
print "git ignore, copyright Ciel <[email protected]>"
# print version
def version():
print "git ignore, version 0.1."
print
print "http://github.com/imwithye/git-ignore"
print "git ignore, copyright Ciel <[email protected]>"
# subcommand router
def select( argv ):
if argv[1] == "language":
language(argv[2:])
exit()
elif argv[1] == "save":
save(argv[2:])
exit()
elif argv[1] == "help" or argv[1] == "usage":
usage()
exit()
elif argv[1] == "version":
version()
exit()
else:
print "unknown subcommand"
usage()
exit()
if __name__ == "__main__":
if len(sys.argv)==1:
sys.argv.append("usage")
select(sys.argv)
|
#! /usr/bin/env python2
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright © 2014 Ciel <[email protected]>
#
# Distributed under terms of the MIT license.
import sys
# print version
def version():
print "git ignore, version 0.1."
print
print "http://github.com/imwithye/git-ignore"
print "git ignore, copyright Ciel <[email protected]>"
# print usage
def usage():
print "usage: git ignore <subcommand>"
print
print "Available subcommands are:"
print " language Add gitignore files. Try use 'git ignore language Python C'"
print " save Save current .gitignore file as a template"
print " usage Show this help message and exit"
print " version Show version and exit"
print
print "http://github.com/imwithye/git-ignore"
print "git ignore, copyright Ciel <[email protected]>"
# subcommand router
def select( argv ):
if argv[1] == "language":
print "language"
elif argv[1] == "save":
print "save"
elif argv[1] == "help" or argv[1] == "usage":
usage()
exit()
elif argv[1] == "version":
version()
exit()
else:
print "unknown subcommand"
usage()
exit()
if __name__ == "__main__":
if len(sys.argv)==1:
sys.argv.append("usage")
select(sys.argv)
|
mit
|
Python
|
b6ab29eed44fa0b63043d1481d835a1b25418a22
|
Remove unused code
|
jcass77/mopidy,bencevans/mopidy,jmarsik/mopidy,jmarsik/mopidy,quartz55/mopidy,mopidy/mopidy,SuperStarPL/mopidy,woutervanwijk/mopidy,ZenithDK/mopidy,ali/mopidy,pacificIT/mopidy,mokieyue/mopidy,tkem/mopidy,dbrgn/mopidy,dbrgn/mopidy,rawdlite/mopidy,tkem/mopidy,glogiotatidis/mopidy,glogiotatidis/mopidy,glogiotatidis/mopidy,diandiankan/mopidy,bacontext/mopidy,jodal/mopidy,bacontext/mopidy,hkariti/mopidy,bencevans/mopidy,ZenithDK/mopidy,swak/mopidy,bencevans/mopidy,tkem/mopidy,diandiankan/mopidy,adamcik/mopidy,bacontext/mopidy,pacificIT/mopidy,glogiotatidis/mopidy,woutervanwijk/mopidy,dbrgn/mopidy,hkariti/mopidy,SuperStarPL/mopidy,hkariti/mopidy,quartz55/mopidy,diandiankan/mopidy,adamcik/mopidy,vrs01/mopidy,adamcik/mopidy,priestd09/mopidy,swak/mopidy,jmarsik/mopidy,jcass77/mopidy,tkem/mopidy,bacontext/mopidy,ali/mopidy,priestd09/mopidy,swak/mopidy,bencevans/mopidy,mopidy/mopidy,pacificIT/mopidy,liamw9534/mopidy,ali/mopidy,priestd09/mopidy,kingosticks/mopidy,pacificIT/mopidy,mopidy/mopidy,vrs01/mopidy,jodal/mopidy,swak/mopidy,diandiankan/mopidy,kingosticks/mopidy,dbrgn/mopidy,rawdlite/mopidy,jmarsik/mopidy,SuperStarPL/mopidy,mokieyue/mopidy,quartz55/mopidy,hkariti/mopidy,ali/mopidy,mokieyue/mopidy,jodal/mopidy,mokieyue/mopidy,ZenithDK/mopidy,jcass77/mopidy,liamw9534/mopidy,SuperStarPL/mopidy,vrs01/mopidy,ZenithDK/mopidy,rawdlite/mopidy,rawdlite/mopidy,vrs01/mopidy,kingosticks/mopidy,quartz55/mopidy
|
tests/http/test_router.py
|
tests/http/test_router.py
|
from __future__ import unicode_literals
import unittest
import mock
from mopidy import http
class TestRouter(http.Router):
name = 'test'
class TestRouterMissingName(http.Router):
pass
class HttpRouterTest(unittest.TestCase):
def setUp(self):
self.config = {
'http': {
'hostname': '127.0.0.1',
'port': 6680,
'static_dir': None,
'zeroconf': '',
}
}
self.core = mock.Mock()
def test_keeps_reference_to_config_and_core(self):
router = TestRouter(self.config, self.core)
self.assertIs(router.config, self.config)
self.assertIs(router.core, self.core)
def test_undefined_name_raises_error(self):
with self.assertRaises(ValueError):
TestRouterMissingName(self.config, self.core)
def test_undefined_request_handlers_raises_error(self):
router = TestRouter(self.config, self.core)
with self.assertRaises(NotImplementedError):
router.get_request_handlers()
|
from __future__ import unicode_literals
import os
import unittest
import mock
from mopidy import http
class TestRouter(http.Router):
name = 'test'
static_file_path = os.path.join(os.path.dirname(__file__), 'static')
class TestRouterMissingName(http.Router):
pass
class HttpRouterTest(unittest.TestCase):
def setUp(self):
self.config = {
'http': {
'hostname': '127.0.0.1',
'port': 6680,
'static_dir': None,
'zeroconf': '',
}
}
self.core = mock.Mock()
def test_keeps_reference_to_config_and_core(self):
router = TestRouter(self.config, self.core)
self.assertIs(router.config, self.config)
self.assertIs(router.core, self.core)
def test_undefined_name_raises_error(self):
with self.assertRaises(ValueError):
TestRouterMissingName(self.config, self.core)
def test_undefined_request_handlers_raises_error(self):
router = TestRouter(self.config, self.core)
with self.assertRaises(NotImplementedError):
router.get_request_handlers()
|
apache-2.0
|
Python
|
9153fc157866b071b3a4322a5e68171f82abe6fd
|
reduce redundancy in settings.py
|
uw-it-aca/pivot,uw-it-aca/pivot,uw-it-aca/pivot,uw-it-aca/pivot,uw-it-aca/pivot
|
docker/settings.py
|
docker/settings.py
|
from .base_settings import *
import os
from django.urls import reverse_lazy
ALLOWED_HOSTS = ['*']
if os.getenv("ENV") == "localdev":
DEBUG = True
else:
DEBUG = False
INSTALLED_APPS += [
'pivot', 'templatetag_handlebars', 'compressor',
'django.contrib.humanize',
'django_user_agents',
]
WEBPACK_LOADER = {
'DEFAULT': {
'BUNDLE_DIR_NAME': 'pivot/bundles/',
'STATS_FILE': os.path.join(BASE_DIR, 'pivot', 'static', 'webpack-stats.json'),
}
}
CSV_ROOT = os.path.join(BASE_DIR, "data/")
STATIC_ROOT = 'static/'
COMPRESS_ROOT = 'static/'
COMPRESS_PRECOMPILERS = (('text/less', 'lessc {infile} {outfile}'),)
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
'compressor.finders.CompressorFinder',
)
TEMPLATES[0]['OPTIONS']['context_processors'].append('pivot.context_processors.google_analytics')
GOOGLE_ANALYTICS_KEY = os.getenv("GOOGLE_ANALYTICS_KEY", default=" ")
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
|
from .base_settings import *
import os
from django.urls import reverse_lazy
# BASE_DIR = os.path.dirname(os.path.dirname(__file__))
ALLOWED_HOSTS = ['*']
if os.getenv("ENV") == "localdev":
DEBUG = True
else:
DEBUG = False
SECRET_KEY = os.getenv('DJANGO_SECRET_KEY', 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx')
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'pivot', 'templatetag_handlebars', 'compressor',
]
INSTALLED_APPS += [
'django_prometheus',
'django.contrib.humanize',
'django_user_agents',
# 'supporttools',
# 'rc_django',
]
INSTALLED_APPS += ['uw_saml',]
WEBPACK_LOADER = {
'DEFAULT': {
'BUNDLE_DIR_NAME': 'pivot/bundles/',
'STATS_FILE': os.path.join(BASE_DIR, 'pivot', 'static', 'webpack-stats.json'),
}
}
CSV_ROOT = os.path.join(BASE_DIR, "data/")
STATIC_ROOT = 'static/'
COMPRESS_ROOT = 'static/'
COMPRESS_PRECOMPILERS = (('text/less', 'lessc {infile} {outfile}'),)
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
'compressor.finders.CompressorFinder',
)
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'project.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'pivot.context_processors.google_analytics',
# 'supporttools.context_processors.supportools_globals',
],
},
},
]
WSGI_APPLICATION = 'project.wsgi.application'
GOOGLE_ANALYTICS_KEY = os.getenv("GOOGLE_ANALYTICS_KEY", default=" ")
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
|
apache-2.0
|
Python
|
094ff00f180926d44e2102f119beb33354fc7122
|
Remove unused import
|
wintersandroid/tvrenamr,ghickman/tvrenamr
|
tests/base.py
|
tests/base.py
|
import logging
import os
import shutil
from tvrenamr.config import Config
from tvrenamr.main import File, TvRenamr
logging.disable(logging.CRITICAL)
class BaseTest(object):
def setup(self):
# absolute path to the file is pretty useful
self.path = os.path.abspath(os.path.dirname(__file__))
def join_path(path):
return os.path.join(self.path, path)
self.files = join_path('files')
self.subfolder = join_path('subfolder')
self.organised = join_path('organised')
self.renamed = join_path('renamed')
# if `file` isn't there, make it
if not os.path.exists(self.files):
os.mkdir(self.files)
if not os.path.exists(self.subfolder):
os.mkdir(self.subfolder)
for path in (self.files, self.subfolder):
self.build_files(path)
# instantiate tvr
self.config = Config()
self.config.config['defaults']['renamed'] = self.files
self.tv = TvRenamr(self.files, self.config, cache=False)
self._file = File('The Big Bang Theory', '3', ['01'], '.mp4')
self._file.episodes[0].title = 'The Electric Can Opener Fluctuation'
def teardown(self):
shutil.rmtree(self.files)
shutil.rmtree(self.subfolder)
def build_files(self, path):
# build the file list
with open(os.path.join(self.path, 'file_list'), 'r') as f:
for fn in f.readlines():
filepath = os.path.abspath(os.path.join(path, fn.strip()))
with open(filepath, 'w') as f:
f.write('')
|
import logging
import os
import shutil
from tvrenamr.config import Config
from tvrenamr.main import File, TvRenamr
from . import mock_requests # noqa
logging.disable(logging.CRITICAL)
class BaseTest(object):
def setup(self):
# absolute path to the file is pretty useful
self.path = os.path.abspath(os.path.dirname(__file__))
def join_path(path):
return os.path.join(self.path, path)
self.files = join_path('files')
self.subfolder = join_path('subfolder')
self.organised = join_path('organised')
self.renamed = join_path('renamed')
# if `file` isn't there, make it
if not os.path.exists(self.files):
os.mkdir(self.files)
if not os.path.exists(self.subfolder):
os.mkdir(self.subfolder)
for path in (self.files, self.subfolder):
self.build_files(path)
# instantiate tvr
self.config = Config()
self.config.config['defaults']['renamed'] = self.files
self.tv = TvRenamr(self.files, self.config, cache=False)
self._file = File('The Big Bang Theory', '3', ['01'], '.mp4')
self._file.episodes[0].title = 'The Electric Can Opener Fluctuation'
def teardown(self):
shutil.rmtree(self.files)
shutil.rmtree(self.subfolder)
def build_files(self, path):
# build the file list
with open(os.path.join(self.path, 'file_list'), 'r') as f:
for fn in f.readlines():
filepath = os.path.abspath(os.path.join(path, fn.strip()))
with open(filepath, 'w') as f:
f.write('')
|
mit
|
Python
|
0c0a1d0ec480c7df9dd8821d40af7791e46db453
|
Fix for wrong test: create_semester_accounts
|
agdsn/pycroft,agdsn/pycroft,lukasjuhrich/pycroft,lukasjuhrich/pycroft,lukasjuhrich/pycroft,lukasjuhrich/pycroft,agdsn/pycroft,agdsn/pycroft,agdsn/pycroft
|
tests/lib/test_finance.py
|
tests/lib/test_finance.py
|
# Copyright (c) 2013 The Pycroft Authors. See the AUTHORS file.
# This file is part of the Pycroft project and licensed under the terms of
# the Apache License, Version 2.0. See the LICENSE file for details.
from tests import OldPythonTestCase
__author__ = 'felix_kluge'
from pycroft.lib.finance import create_semester, import_csv
from pycroft.lib.config import get, config
from pycroft.model.finance import FinanceAccount, Journal, JournalEntry
from sqlalchemy.orm import backref
from pycroft.model import session
import time
from datetime import date, datetime
class Test_010_Semester(OldPythonTestCase):
def test_0010_create_semester_accounts(self):
"""
This test should verify that all semester-related finance-accounts have
been created.
"""
new_semester = create_semester("NewSemesterName",
2500, 1500,
date(2013, 9, 1),
date(2014, 2, 1))
config._configpath = "../tests/example/test_config.json"
for account in config["finance"]["semester_accounts"]:
new_created_account = FinanceAccount.q.filter(
FinanceAccount.semester == new_semester,
FinanceAccount.tag == account["tag"]).first()
self.assertEqual(new_created_account.name, account["name"])
self.assertEqual(new_created_account.type, account["type"])
session.session.commit()
|
# Copyright (c) 2013 The Pycroft Authors. See the AUTHORS file.
# This file is part of the Pycroft project and licensed under the terms of
# the Apache License, Version 2.0. See the LICENSE file for details.
from tests import OldPythonTestCase
__author__ = 'felix_kluge'
from pycroft.lib.finance import create_semester
from pycroft.lib.config import get,config
from pycroft.model.finance import FinanceAccount
from sqlalchemy.orm import backref
from pycroft.model import session
import time
from datetime import date
class Test_010_Semester(OldPythonTestCase):
def test_0010_create_semester_accounts(self):
"""
This test should verify that all semester-related finance-accounts have
been created.
"""
new_semester = create_semester("NewSemesterName", 2500, 1500, date(2013, 9, 1), date(2014, 2, 1))
config._configpath = "../tests/example/test_config.json"
for account in config["finance"]["semester_accounts"]:
for new_account in new_semester.accounts:
if(new_account.tag == account["tag"]):
new_account_equivalent = new_account
compare_account = FinanceAccount(type=account["type"],name=account["name"],semester=new_semester,tag=account["tag"])
self.assertEqual(new_account_equivalent.name, compare_account.name)
self.assertEqual(new_account_equivalent.type, compare_account.type)
|
apache-2.0
|
Python
|
38de328a3899b96542f702a51eb180efb47a5556
|
Fix town regroup bugs
|
credis/geo-django-fla
|
geodjangofla/management/commands/fixtowns.py
|
geodjangofla/management/commands/fixtowns.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# This code is free software; you can redistribute it and/or modify it
# under the terms of the BSD License (see the file COPYING included with
# the distribution).
import os
from optparse import make_option
from django.contrib.gis.geos import GEOSGeometry
from django.contrib.gis.gdal import DataSource
from django.core.management.base import BaseCommand, CommandError
from geodjangofla import settings
from geodjangofla import models
from geodjangofla.utils import dbf
class Command(BaseCommand):
help = 'Regroupe les arrondissements en une seule commune'
def handle(self, *args, **options):
limits = {}
for commune in models.Commune.objects.filter(
nom_comm__endswith='-ARRONDISSEMENT').all():
items = commune.nom_comm.split('--')
if len(items) < 3:
items = commune.nom_comm.split('-')
nb_ardt = items[-2]
nom_comm = "-".join(items[0:-2])
if nom_comm.endswith('-'):
nom_comm = nom_comm[:-1]
key = (nom_comm, commune.insee_com[0:2])
if key not in limits:
limits[key] = commune.limite
else:
limits[key] = limits[key].union(commune.limite)
if nb_ardt[0:2] != '1E':
commune.delete()
continue
commune.nom_comm = nom_comm
commune.save()
for nom_comm, dpt in limits:
print nom_comm, dpt
com = models.Commune.objects.get(nom_comm__startswith=nom_comm,
insee_com__startswith=dpt)
com.limite = limits[(nom_comm, dpt)]
com.save()
self.stdout.write('Regroup done\n')
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# This code is free software; you can redistribute it and/or modify it
# under the terms of the BSD License (see the file COPYING included with
# the distribution).
import os
from optparse import make_option
from django.contrib.gis.geos import GEOSGeometry
from django.contrib.gis.gdal import DataSource
from django.core.management.base import BaseCommand, CommandError
from geodjangofla import settings
from geodjangofla import models
from geodjangofla.utils import dbf
class Command(BaseCommand):
help = 'Regroupe les arrondissements en une seule commune'
def handle(self, *args, **options):
for commune in models.Commune.objects.filter(
nom_comm__endswith='-ARRONDISSEMENT').all():
items = commune.nom_comm.split('--')
if len(items) < 3:
items = commune.nom_comm.split('-')
nb_ardt = items[-2]
if nb_ardt[0:2] != '1E':
commune.delete()
continue
commune.nom_comm = "-".join(items[0:-2])
if commune.nom_comm.endswith('-'):
commune.nom_comm = commune.nom_comm[:-1]
commune.save()
self.stdout.write('Regroup done\n')
|
bsd-3-clause
|
Python
|
928a428a79e574edc9e00416335eccba139dffdd
|
change csv lists to python sets
|
Guokr1991/ProstateSensitivityAnalysis
|
nn27.py
|
nn27.py
|
'''
nn27.py - create nearest neighbor sets for the 27 prostate regions
'''
__author__ = 'Mark L. Palmeri'
__email__ = '[email protected]'
__date__ = '2014-01-05'
r1p = set(['2p', '2a', '1a', '7p', '7a', '3p'])
r2p = set(['1p', '1a', '2a', '4p'])
r3p = set(['4p', '3a', '9p', '5p', '1p'])
r4p = set(['4a', '3p', '3a', '2p', '6p'])
r5p = set(['6p', '11p', '5a', '15as', '3p', '6p'])
r6p = set(['5p', '6a', '5a', '4p'])
r7p = set(['1p', '8p', '7a', '8a', '1a', '9p'])
r8p = set(['7p', '8a', '10p', '7a'])
r9p = set(['10p', '3p', '9a', '7a', '11p'])
r10p = set(['9p', '10a', '9a', '8p', '12p'])
r11p = set(['12p', '5p', '11a', '15as', '12a', '9p'])
r12p = set(['11p', '12a', '11a', '10p'])
r1a = set(['13as', '2a', '7a', '3a'])
r2a = set(['1a', '13as', '2p', '4a'])
r3a = set(['5a', '4p', '3p', '14as', '5a', '1p'])
r4a = set(['3a', '14as', '4p', '2a', '6a'])
r5a = set(['15as', '6a', '3a', '5p'])
r6a = set(['5a', '15as', '6p', '4a'])
r7a = set(['1a', '13as', '8a', '7p', '9a'])
r8a = set(['8p', '7a', '13as', '10a'])
r9a = set(['10a', '14as', '7a', '11a'])
r10a = set(['10p', '8a', '12a', '9a'])
r11a = set(['12a', '15as', '12p', '9a'])
r12a = set(['11a', '15as', '12p', '10a'])
r13as = set(['1a', '7a', '8a', '2a', '14as'])
r14as = set(['3a', '9a', '4a', '10a', '13as', '15a'])
r15as = set(['6a', '12a', '5a', '11a', '14as'])
|
1p,2p,2a,1a,7p,7a,3p
2p,1p,1a,2a,4p
3p,4p,3a,9p,5p,1p,
4p,4a,3p,3a,2p,6p,
5p,6p,11p,5a,15as,3p,6p
6p,5p,6a,5a,4p,,
7p,1p,8p,7a,8a,1a,9p
8p,7p,8a,10p,7a,
9p,10p,3p,9a,7a,11p,
10p,9p,10a,9a,8p,12p,
11p,12p,5p,11a,15as,12a,9p
12p,11p,12a,11a,10p,
1a,13as,2a,7a,3a,
2a,1a,13as,2p,4a,
3a,5a,4p,3p,14as,5a,1p
4a,3a,14as,4p,2a,6a,
5a,15as,6a,3a,5p,
6a,5a,15as,6p,4a,
7a,1a,13as,8a,7p,9a,
8a,8p,7a,13as,10a,
9a,10a,14as,7a,11a
10a,10p,8a,12a,9a
11a,12a,15as,12p,9a
12a,11a,15as,12p,10a
13as,1a,7a,8a,2a,14as,
14as,3a,9a,4a,10a,13as,15a
15as,6a,12a,5a,11a,14as
|
apache-2.0
|
Python
|
211aff9fd57775a87409e1eab50f8803a9efe9f7
|
add imports for chart and effects
|
J216/gimp_be,J216/gimp_be
|
gimp_be/draw/__init__.py
|
gimp_be/draw/__init__.py
|
from gimp_be.draw.draw import *
from gimp_be.draw.tree import *
from gimp_be.draw.polygon import *
from gimp_be.draw.effects import *
from gimp_be.draw.chart import *
|
from gimp_be.draw.draw import *
from gimp_be.draw.tree import *
from gimp_be.draw.polygon import *
|
mit
|
Python
|
d362d3770b999293db854a08e9627cfb96557544
|
disable pagination
|
tschaume/global_gitfeed_api,tschaume/global_gitfeed_api
|
api/__init__.py
|
api/__init__.py
|
import os, bcrypt
from eve import Eve
from flask.ext.bootstrap import Bootstrap
from eve_docs import eve_docs
from eve.auth import BasicAuth
class BCryptAuth(BasicAuth):
def check_auth(self, username, password, allowed_roles, resource, method):
accounts = app.data.driver.db['accounts']
account = accounts.find_one({'username': username})
return (
account and
bcrypt.hashpw(password, account['password']) == account['password']
)
accounts = {
'public_methods': [],
'public_item_methods': [],
'schema': {
'username': {
'type': 'string',
'minlength': 5,
'required': True,
'unique': True
},
'password': {
'type': 'string',
'required': True
}
}
}
gitcommits = {
'datasource': {
'default_sort': [('datetime',1)],
},
'schema': {
'project': {
'type': 'string',
'minlength': 3,
'maxlength': 50,
'required': True,
},
'message': {
'type': 'string',
'minlength': 5,
'required': True,
},
'datetime': {
'type': 'datetime',
'required': True,
},
'sha1': {
'type': 'string',
'required': True,
},
'deletions': {
'type': 'integer',
'required': True,
},
'lines': {
'type': 'integer',
'required': True,
},
'insertions': {
'type': 'integer',
'required': True,
},
'files': {
'type': 'integer',
'required': True,
},
}
}
settings = {
#'SERVER_NAME': '127.0.0.1:5000', # dev
'SERVER_NAME': 'api.the-huck.com', # prod
'MONGO_HOST': 'localhost',
'MONGO_PORT': '27017',
#'MONGO_USERNAME': 'user',
#'MONGO_PASSWORD': 'user',
'MONGO_DBNAME': 'apieve',
'RESOURCE_METHODS': ['GET', 'POST', 'DELETE'],
'ITEM_METHODS': ['GET', 'PATCH', 'PUT', 'DELETE'],
'PUBLIC_METHODS': ['GET'],
'PUBLIC_ITEM_METHODS': ['GET'],
'CACHE_CONTROL': 'max-age=0',
'CACHE_EXPIRES': 0,
'PAGINATION': False,
'DOMAIN': {
'accounts': accounts,
'gitcommits': gitcommits
}
}
app = Eve(auth=BCryptAuth, settings=settings)
Bootstrap(app)
app.register_blueprint(eve_docs, url_prefix='/docs')
|
import os, bcrypt
from eve import Eve
from flask.ext.bootstrap import Bootstrap
from eve_docs import eve_docs
from eve.auth import BasicAuth
class BCryptAuth(BasicAuth):
def check_auth(self, username, password, allowed_roles, resource, method):
accounts = app.data.driver.db['accounts']
account = accounts.find_one({'username': username})
return (
account and
bcrypt.hashpw(password, account['password']) == account['password']
)
accounts = {
'public_methods': [],
'public_item_methods': [],
'schema': {
'username': {
'type': 'string',
'minlength': 5,
'required': True,
'unique': True
},
'password': {
'type': 'string',
'required': True
}
}
}
gitcommits = {
'datasource': {
'default_sort': [('datetime',1)],
},
'schema': {
'project': {
'type': 'string',
'minlength': 3,
'maxlength': 50,
'required': True,
},
'message': {
'type': 'string',
'minlength': 5,
'required': True,
},
'datetime': {
'type': 'datetime',
'required': True,
},
'sha1': {
'type': 'string',
'required': True,
},
'deletions': {
'type': 'integer',
'required': True,
},
'lines': {
'type': 'integer',
'required': True,
},
'insertions': {
'type': 'integer',
'required': True,
},
'files': {
'type': 'integer',
'required': True,
},
}
}
settings = {
#'SERVER_NAME': '127.0.0.1:5000', # dev
'SERVER_NAME': 'api.the-huck.com', # prod
'MONGO_HOST': 'localhost',
'MONGO_PORT': '27017',
#'MONGO_USERNAME': 'user',
#'MONGO_PASSWORD': 'user',
'MONGO_DBNAME': 'apieve',
'RESOURCE_METHODS': ['GET', 'POST', 'DELETE'],
'ITEM_METHODS': ['GET', 'PATCH', 'PUT', 'DELETE'],
'PUBLIC_METHODS': ['GET'],
'PUBLIC_ITEM_METHODS': ['GET'],
'CACHE_CONTROL': 'max-age=0',
'CACHE_EXPIRES': 0,
'DOMAIN': {
'accounts': accounts,
'gitcommits': gitcommits
}
}
app = Eve(auth=BCryptAuth, settings=settings)
Bootstrap(app)
app.register_blueprint(eve_docs, url_prefix='/docs')
|
mit
|
Python
|
248ad807e98aa379f24cb41b6fcf0af753c4f169
|
Test _assert_eq_nan with scalars and 0-D arrays
|
dask-image/dask-ndmeasure
|
tests/test__test_utils.py
|
tests/test__test_utils.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import pytest
import numpy as np
import dask.array as da
import dask_ndmeasure._test_utils
nan = np.nan
@pytest.mark.parametrize("match, a, b", [
[True] + 2 * [np.array(2)[()]],
[True] + 2 * [np.array(nan)[()]],
[True] + 2 * [np.array(2)],
[True] + 2 * [np.array(nan)],
[True] + [np.array(1.0), da.ones(tuple(), chunks=tuple())],
[True] + 2 * [np.random.randint(10, size=(15, 16))],
[True] + 2 * [da.random.randint(10, size=(15, 16), chunks=(5, 5))],
[True, np.array([2, nan]), np.array([2, nan])],
[False, np.array([2, nan]), np.array([3, nan])],
[False, np.array([2, nan]), np.array([2, 3])],
[True, np.array([2, 3]), da.from_array(np.array([2, 3]), chunks=1)],
[True, np.array([nan]), da.from_array(np.array([nan]), chunks=1)],
[False, np.array([2]), da.from_array(np.array([nan]), chunks=1)],
[False, np.array([nan]), da.from_array(np.array([2]), chunks=1)],
[True, np.array([2, nan]), da.from_array(np.array([2, nan]), chunks=1)],
[False, np.array([2, nan]), da.from_array(np.array([3, nan]), chunks=1)],
[False, np.array([2, nan]), da.from_array(np.array([2, 3]), chunks=1)],
])
def test_assert_eq_nan(match, a, b):
if match:
dask_ndmeasure._test_utils._assert_eq_nan(a, b)
else:
with pytest.raises(AssertionError):
dask_ndmeasure._test_utils._assert_eq_nan(a, b)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import pytest
import numpy as np
import dask.array as da
import dask_ndmeasure._test_utils
nan = np.nan
@pytest.mark.parametrize("match, a, b", [
[True] + 2 * [np.random.randint(10, size=(15, 16))],
[True] + 2 * [da.random.randint(10, size=(15, 16), chunks=(5, 5))],
[True, np.array([2, nan]), np.array([2, nan])],
[False, np.array([2, nan]), np.array([3, nan])],
[False, np.array([2, nan]), np.array([2, 3])],
[True, np.array([2, 3]), da.from_array(np.array([2, 3]), chunks=1)],
[True, np.array([nan]), da.from_array(np.array([nan]), chunks=1)],
[False, np.array([2]), da.from_array(np.array([nan]), chunks=1)],
[False, np.array([nan]), da.from_array(np.array([2]), chunks=1)],
[True, np.array([2, nan]), da.from_array(np.array([2, nan]), chunks=1)],
[False, np.array([2, nan]), da.from_array(np.array([3, nan]), chunks=1)],
[False, np.array([2, nan]), da.from_array(np.array([2, 3]), chunks=1)],
])
def test_assert_eq_nan(match, a, b):
if match:
dask_ndmeasure._test_utils._assert_eq_nan(a, b)
else:
with pytest.raises(AssertionError):
dask_ndmeasure._test_utils._assert_eq_nan(a, b)
|
bsd-3-clause
|
Python
|
8533400ecf69aac64c6210cb9fca1dfe90d0e6b7
|
work around hanging issue on Windows (#27)
|
osrf/osrf_pycommon
|
tests/test_code_format.py
|
tests/test_code_format.py
|
import os
import sys
import subprocess
def test_flake8():
"""Test source code for pyFlakes and PEP8 conformance"""
this_dir = os.path.dirname(os.path.abspath(__file__))
source_dir = os.path.join(this_dir, '..', 'osrf_pycommon')
cmd = ['flake8', source_dir, '--count']
# work around for https://gitlab.com/pycqa/flake8/issues/179
cmd.extend(['--jobs', '1'])
if sys.version_info < (3,4):
# Unless Python3, skip files with new syntax, like `yield from`
cmd.append('--exclude=*async_execute_process_asyncio/impl.py')
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
stdout, stderr = p.communicate()
print(stdout)
assert p.returncode == 0, \
"Command '{0}' returned non-zero exit code '{1}'".format(' '.join(cmd), p.returncode)
|
import os
import sys
import subprocess
def test_flake8():
"""Test source code for pyFlakes and PEP8 conformance"""
this_dir = os.path.dirname(os.path.abspath(__file__))
source_dir = os.path.join(this_dir, '..', 'osrf_pycommon')
cmd = ['flake8', source_dir, '--count']
if sys.version_info < (3,4):
# Unless Python3, skip files with new syntax, like `yield from`
cmd.append('--exclude=*async_execute_process_asyncio/impl.py')
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
stdout, stderr = p.communicate()
print(stdout)
assert p.returncode == 0, \
"Command '{0}' returned non-zero exit code '{1}'".format(' '.join(cmd), p.returncode)
|
apache-2.0
|
Python
|
63483418d2169cde88649a29754846d30c6cb5c4
|
Improve fuel.downloaders.basic unit tests
|
codeaudit/fuel,chrishokamp/fuel,glewis17/fuel,codeaudit/fuel,janchorowski/fuel,udibr/fuel,harmdevries89/fuel,dribnet/fuel,harmdevries89/fuel,mjwillson/fuel,dmitriy-serdyuk/fuel,dhruvparamhans/fuel,vdumoulin/fuel,aalmah/fuel,capybaralet/fuel,orhanf/fuel,bouthilx/fuel,laurent-dinh/fuel,mjwillson/fuel,aalmah/fuel,EderSantana/fuel,glewis17/fuel,mila-udem/fuel,dhruvparamhans/fuel,rizar/fuel,dwf/fuel,hantek/fuel,jbornschein/fuel,rodrigob/fuel,orhanf/fuel,ejls/fuel,jbornschein/fuel,udibr/fuel,hantek/fuel,capybaralet/fuel,rodrigob/fuel,lamblin/fuel,lamblin/fuel,dwf/fuel,markusnagel/fuel,dmitriy-serdyuk/fuel,rizar/fuel,mila-udem/fuel,laurent-dinh/fuel,dribnet/fuel,EderSantana/fuel,chrishokamp/fuel,vdumoulin/fuel,bouthilx/fuel,janchorowski/fuel,ejls/fuel,markusnagel/fuel
|
tests/test_downloaders.py
|
tests/test_downloaders.py
|
import hashlib
import os
from fuel.downloaders.base import download, default_manager
iris_url = ('https://archive.ics.uci.edu/ml/machine-learning-databases/' +
'iris/iris.data')
iris_hash = "6f608b71a7317216319b4d27b4d9bc84e6abd734eda7872b71a458569e2656c0"
def test_download_no_path():
download(iris_url)
with open('iris.data', 'r') as f:
assert hashlib.sha256(f.read()).hexdigest() == iris_hash
os.remove('iris.data')
def test_download_path_is_dir():
os.mkdir('tmp')
download(iris_url, 'tmp')
with open('tmp/iris.data', 'r') as f:
assert hashlib.sha256(f.read()).hexdigest() == iris_hash
os.remove('tmp/iris.data')
os.rmdir('tmp')
def test_download_path_is_file():
download(iris_url, 'iris_tmp.data')
with open('iris_tmp.data', 'r') as f:
assert hashlib.sha256(f.read()).hexdigest() == iris_hash
os.remove('iris_tmp.data')
def test_default_manager_save():
class DummyArgs:
pass
args = DummyArgs()
args.directory = '.'
args.clear = False
default_manager([iris_url], ['iris.data'])(args)
with open('iris.data', 'r') as f:
assert hashlib.sha256(f.read()).hexdigest() == iris_hash
os.remove('iris.data')
def test_default_manager_clear():
open('tmp.data', 'a').close()
class DummyArgs:
pass
args = DummyArgs()
args.directory = '.'
args.clear = True
default_manager([None], ['tmp.data'])(args)
assert not os.path.isfile('tmp.data')
|
import os
from fuel.downloaders.base import download, default_manager
iris_url = ('https://archive.ics.uci.edu/ml/machine-learning-databases/' +
'iris/iris.data')
iris_first_line = '5.1,3.5,1.4,0.2,Iris-setosa\n'
def test_download_no_path():
download(iris_url)
with open('iris.data') as f:
first_line = f.readline()
assert first_line == iris_first_line
os.remove('iris.data')
def test_download_path_is_dir():
os.mkdir('tmp')
download(iris_url, 'tmp')
with open('tmp/iris.data') as f:
first_line = f.readline()
assert first_line == iris_first_line
os.remove('tmp/iris.data')
os.rmdir('tmp')
def test_download_path_is_file():
download(iris_url, 'iris_tmp.data')
with open('iris_tmp.data') as f:
first_line = f.readline()
assert first_line == iris_first_line
os.remove('iris_tmp.data')
def test_default_manager_save():
class DummyArgs:
pass
args = DummyArgs()
args.directory = '.'
args.clear = False
default_manager([iris_url], ['iris.data'])(args)
with open('iris.data') as f:
first_line = f.readline()
assert first_line == iris_first_line
os.remove('iris.data')
def test_default_manager_clear():
open('tmp.data', 'a').close()
class DummyArgs:
pass
args = DummyArgs()
args.directory = '.'
args.clear = True
default_manager([None], ['tmp.data'])(args)
assert not os.path.isfile('tmp.data')
|
mit
|
Python
|
169a8612eb06410a5ae7e110227f7bea010d2ba9
|
Make stdout and stderr into strings.
|
YPlan/treepoem
|
tests/test_ghostscript.py
|
tests/test_ghostscript.py
|
import subprocess
import unittest
class GhostscriptTest(unittest.TestCase):
def test_installed(self):
process = subprocess.Popen(
['gs', '--version'],
stdin=None,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
stdout, stderr = process.communicate()
self.assertEqual(process.returncode, 0)
self.assertEqual(str(stderr), "")
self.assertRegexpMatches(str(stdout), r'9\.\d\d')
|
import subprocess
import unittest
class GhostscriptTest(unittest.TestCase):
def test_installed(self):
process = subprocess.Popen(
['gs', '--version'],
stdin=None,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
stdout, stderr = process.communicate()
self.assertEqual(process.returncode, 0)
self.assertEqual(stderr, "")
self.assertRegexpMatches(stdout, r'9\.\d\d')
|
mit
|
Python
|
ba37080645153d66a8ae1c8df10312806999f8ec
|
Add use of fmisid to tests.
|
kipe/fmi
|
tests/test_observation.py
|
tests/test_observation.py
|
import unittest
from datetime import datetime
from dateutil.tz import tzutc
from fmi import FMI
class TestObservations(unittest.TestCase):
def test_lappeenranta(self):
now = datetime.now(tz=tzutc())
f = FMI(place='Lappeenranta')
for point in f.observations():
assert point.time < now
assert isinstance(point.temperature, float)
for point in f.observations(fmisid=101237):
assert point.time < now
assert isinstance(point.temperature, float)
|
import unittest
from datetime import datetime
from dateutil.tz import tzutc
from fmi import FMI
class TestObservations(unittest.TestCase):
def test_lappeenranta(self):
now = datetime.now(tz=tzutc())
f = FMI(place='Lappeenranta')
for point in f.observations():
assert point.time < now
assert isinstance(point.temperature, float)
|
mit
|
Python
|
9369f72c4fe9a544e24f10a1db976589dc013424
|
Add dependency on apache module
|
kkampardi/Plinth,harry-7/Plinth,kkampardi/Plinth,harry-7/Plinth,harry-7/Plinth,kkampardi/Plinth,kkampardi/Plinth,harry-7/Plinth,kkampardi/Plinth,harry-7/Plinth
|
plinth/modules/sso/__init__.py
|
plinth/modules/sso/__init__.py
|
#
# This file is part of Plinth.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
Plinth module to configure Single Sign On services.
"""
from plinth import actions
from django.utils.translation import ugettext_lazy as _
version = 1
is_essential = True
depends = ['security', 'apache']
name = _('Single Sign On')
managed_packages = ['libapache2-mod-auth-pubtkt', 'openssl', 'python3-openssl']
def setup(helper, old_version=None):
"""Install the required packages"""
helper.install(managed_packages)
actions.superuser_run('auth-pubtkt', ['enable-mod'])
actions.superuser_run('auth-pubtkt', ['create-key-pair'])
|
#
# This file is part of Plinth.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
Plinth module to configure Single Sign On services.
"""
from plinth import actions
from django.utils.translation import ugettext_lazy as _
version = 1
is_essential = True
depends = ['security']
name = _('Single Sign On')
managed_packages = ['libapache2-mod-auth-pubtkt', 'openssl', 'python3-openssl']
def setup(helper, old_version=None):
"""Install the required packages"""
helper.install(managed_packages)
actions.superuser_run('auth-pubtkt', ['enable-mod'])
actions.superuser_run('auth-pubtkt', ['create-key-pair'])
|
agpl-3.0
|
Python
|
3de616fa0bfc8e075e4c7aa4a5e8e9108e168f7c
|
fix bugs
|
xmaruto/mcord,zdw/xos,cboling/xos,wathsalav/xos,wathsalav/xos,opencord/xos,zdw/xos,opencord/xos,open-cloud/xos,xmaruto/mcord,open-cloud/xos,cboling/xos,cboling/xos,cboling/xos,jermowery/xos,wathsalav/xos,xmaruto/mcord,cboling/xos,wathsalav/xos,open-cloud/xos,jermowery/xos,jermowery/xos,zdw/xos,jermowery/xos,zdw/xos,xmaruto/mcord,opencord/xos
|
plstackapi/planetstack/urls.py
|
plstackapi/planetstack/urls.py
|
from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
from plstackapi.planetstack.views.roles import RoleListCreate, RoleRetrieveUpdateDestroy
from plstackapi.planetstack.views.sites import SiteListCreate, SiteRetrieveUpdateDestroy
from plstackapi.planetstack.views.api_root import api_root
from plstackapi.planetstack.models import Site
from rest_framework import generics
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'planetstack.views.home', name='home'),
# url(r'^planetstack/', include('planetstack.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
url(r'^plstackapi/$', api_root),
url(r'^plstackapi/roles/$', RoleListCreate.as_view(), name='role-list'),
url(r'^plstackapi/roles/(?P<pk>[a-zA-Z0-9]+)/$', RoleRetrieveUpdateDestroy.as_view(), name='role-detail'),
url(r'^plstackapi/sites/$', SiteListCreate.as_view(), name='site-list'),
url(r'^plstackapi/sites/(?P<pk>[0-9]+)/$', SiteRetrieveUpdateDestroy.as_view(), name='site-detail'),
#url(r'^plstackapi/slices/$', views.SliceList.as_view(), name='slice-list'),
#url(r'^plstackapi/slices/(?P<pk>[0-9]+)/$', views.SliceDetail.as_view(), name='slice-detail'),
#url(r'^plstackapi/slivers/$', views.SliverList.as_view()),
#url(r'^plstackapi/slivers/(?P<pk>[0-9]+)/$', views.SliverDetail.as_view()),
#url(r'^plstackapi/nodes/$', views.NodeList.as_view(), name='node-list'),
#url(r'^plstackapi/nodes/(?P<pk>[0-9]+)/$', views.NodeDetail.as_view(), name='node-detail'),
#url(r'^plstackapi/deploymentnetworks/$', views.DeploymentNetworkList.as_view(), name='deploymentnetwork-list'),
#url(r'^plstackapi/deploymentnetworks/(?P<pk>[0-9]+)/$', views.DeploymentNetworkDetail.as_view(), name='deploymentnetwork-detail'),
#url(r'^plstackapi/sitedeploymentnetworks/$', views.SiteDeploymentNetworkList.as_view(), name='sitedeploymentnetwork-list'),
#url(r'^plstackapi/sitedeploymentnetworks/(?P<pk>[0-9]+)/$', views.SiteDeploymentNetworkDetail.as_view(), name='sitedeploymentnetwork-detail'),
#Adding in rest_framework urls
url(r'^plstackapi/', include('rest_framework.urls', namespace='rest_framework')),
)
|
from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
from plstackapi.planetstack.views.roles import RoleListCreate, RoleRetrieveUpdateDestroy
from plstackapi.planetstack.views.roles import SiteListCreate, SiteRetrieveUpdateDestroy
from plstackapi.planetstack.views.api_root import api_root
from plstackapi.planetstack.models import Site
from rest_framework import generics
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'planetstack.views.home', name='home'),
# url(r'^planetstack/', include('planetstack.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
url(r'^plstackapi/$', api_root),
url(r'^plstackapi/roles/$', RoleListCreate.as_view(), name='role-list'),
url(r'^plstackapi/roles/(?P<pk>[a-zA-Z0-9]+)/$', RoleRetrieveUpdateDestroy.as_view(), name='role-detail'),
url(r'^plstackapi/sites/$', SiteListCreate.as_view(), name='site-list'),
url(r'^plstackapi/sites/(?P<pk>[0-9]+)/$', SiteRetrieveUpdateDestroy.as_view(), name='site-detail'),
#url(r'^plstackapi/slices/$', views.SliceList.as_view(), name='slice-list'),
#url(r'^plstackapi/slices/(?P<pk>[0-9]+)/$', views.SliceDetail.as_view(), name='slice-detail'),
#url(r'^plstackapi/slivers/$', views.SliverList.as_view()),
#url(r'^plstackapi/slivers/(?P<pk>[0-9]+)/$', views.SliverDetail.as_view()),
#url(r'^plstackapi/nodes/$', views.NodeList.as_view(), name='node-list'),
#url(r'^plstackapi/nodes/(?P<pk>[0-9]+)/$', views.NodeDetail.as_view(), name='node-detail'),
#url(r'^plstackapi/deploymentnetworks/$', views.DeploymentNetworkList.as_view(), name='deploymentnetwork-list'),
#url(r'^plstackapi/deploymentnetworks/(?P<pk>[0-9]+)/$', views.DeploymentNetworkDetail.as_view(), name='deploymentnetwork-detail'),
#url(r'^plstackapi/sitedeploymentnetworks/$', views.SiteDeploymentNetworkList.as_view(), name='sitedeploymentnetwork-list'),
#url(r'^plstackapi/sitedeploymentnetworks/(?P<pk>[0-9]+)/$', views.SiteDeploymentNetworkDetail.as_view(), name='sitedeploymentnetwork-detail'),
#Adding in rest_framework urls
url(r'^plstackapi/', include('rest_framework.urls', namespace='rest_framework')),
)
|
apache-2.0
|
Python
|
6e87102251f6448ffa7b9c662ace3b50b00b69b2
|
Test for git config.
|
charanpald/APGL
|
apgl/data/ExamplesGenerator.py
|
apgl/data/ExamplesGenerator.py
|
'''
A simple class which can be used to generate test sets of examples.
'''
#import numpy
import numpy.random
class ExamplesGenerator():
def __init__(self):
pass
def generateBinaryExamples(self, numExamples=100, numFeatures=10, noise=0.4):
"""
Generate a certain number of examples with a uniform distribution between 0 and 1. Create
binary -/+ 1 labels. Must have more than 1 example and feature.
"""
if numExamples == 0 or numFeatures == 0:
raise ValueError("Cannot generate empty dataset")
X = numpy.random.rand(numExamples, numFeatures)
c = numpy.random.rand(numFeatures)
y = numpy.sign((X.dot(c)) - numpy.mean(X.dot(c)) + numpy.random.randn(numExamples)*noise)
return X, y
def generateRandomBinaryExamples(self, numExamples=100, numFeatures=10):
"""
Generate a certain number of examples with a uniform distribution between 0 and 1. Create
binary -/+ 1 labels
"""
X = numpy.random.rand(numExamples, numFeatures)
y = (numpy.random.rand(numExamples)>0.5)*2 - 1
return X, y
|
'''
A simple class which can be used to generate test sets of examples.
'''
#import numpy
import numpy.random
class ExamplesGenerator():
def __init__(self):
pass
def generateBinaryExamples(self, numExamples=100, numFeatures=10, noise=0.4):
"""
Generate a certain number of examples with a uniform distribution between 0 and 1. Create
binary -/+ 1 labels
"""
if numExamples == 0 or numFeatures == 0:
raise ValueError("Cannot generate empty dataset")
X = numpy.random.rand(numExamples, numFeatures)
c = numpy.random.rand(numFeatures)
y = numpy.sign((X.dot(c)) - numpy.mean(X.dot(c)) + numpy.random.randn(numExamples)*noise)
return X, y
def generateRandomBinaryExamples(self, numExamples=100, numFeatures=10):
"""
Generate a certain number of examples with a uniform distribution between 0 and 1. Create
binary -/+ 1 labels
"""
X = numpy.random.rand(numExamples, numFeatures)
y = (numpy.random.rand(numExamples)>0.5)*2 - 1
return X, y
|
bsd-3-clause
|
Python
|
25870e710ca51a6fe373677f2d2889a0df3641ca
|
Revert "test commit heroku"
|
codeforamerica/westsac-urban-land-locator,codeforamerica/westsac-urban-land-locator,codeforamerica/westsac-urban-land-locator,codeforamerica/westsac-urban-land-locator
|
farmsList/public/api.py
|
farmsList/public/api.py
|
import jsonpickle
from flask import Blueprint
from farmsList.public.models import Parcel
blueprint = Blueprint('api', __name__, url_prefix='/api',
static_folder="../static")
@blueprint.route("/parcel/", methods=["GET", "POST"])
def api_parcel():
parcelData = Parcel.query.filter(Parcel.listedToPublic == True).all()
return jsonpickle.encode(parcelData, unpicklable=False, make_refs=False)
|
import jsonpickle
from flask import Blueprint
from farmsList.public.models import Parcel
blueprint = Blueprint('api', __name__, url_prefix='/api',
static_folder="../static")
@blueprint.route("/parcel/", methods=["GET", "POST"])
def api_parcel():
print "HELLO"
parcelData = Parcel.query.filter(Parcel.listedToPublic == True).all()
print "HELLO 2"
return jsonpickle.encode(parcelData, unpicklable=False, make_refs=False)
|
bsd-3-clause
|
Python
|
57c0668b9dd11845fb2d845ecbcb0a8aae19eb9c
|
Add default params to template
|
mweb/python,exercism/python,behrtam/xpython,exercism/xpython,pheanex/xpython,exercism/xpython,exercism/python,smalley/python,N-Parsons/exercism-python,smalley/python,behrtam/xpython,jmluy/xpython,N-Parsons/exercism-python,mweb/python,pheanex/xpython,jmluy/xpython
|
exercises/scale-generator/scale_generator.py
|
exercises/scale-generator/scale_generator.py
|
class Scale(object):
def __init__(self, tonic, scale_name, pattern=None):
pass
|
class Scale(object):
def __init__(self):
pass
|
mit
|
Python
|
2b3431f302cf08d3892eb613079df905ba1f68cb
|
Add tests to check content page rendering
|
pycontw/pycontw2016,uranusjr/pycontw2016,uranusjr/pycontw2016,pycontw/pycontw2016,pycontw/pycontw2016,uranusjr/pycontw2016,uranusjr/pycontw2016,pycontw/pycontw2016
|
src/core/tests.py
|
src/core/tests.py
|
import os
import pytest
from django.test import override_settings
from django.utils.translation import activate
from core.utils import collect_language_codes
def test_locale_fallback_middleware(client, settings):
response = client.get('/en/', follow=True)
assert response.redirect_chain == [('/en-us/', 302)]
@override_settings(USE_I18N=False)
def test_locale_fallback_middleware_no_i18n(client, settings):
response = client.get('/en/')
assert response.status_code == 404
def test_collect_language_codes():
assert collect_language_codes('zh-tw') == ['zh-tw', 'zh', 'en-us', 'en']
assert collect_language_codes('zh') == ['zh', 'en-us', 'en']
assert collect_language_codes('en-us') == ['en-us', 'en', 'en-us', 'en']
assert collect_language_codes('en') == ['en', 'en-us', 'en']
def test_index_page(client):
response = client.get('/en-us/')
assert response.status_code == 200
assert 'PyCon' in response.content.decode('utf-8')
@pytest.mark.parametrize('path,expected', [
('/en-us/speaking/cfp/', 200),
('/en-us/speaking/talk/', 200),
('/en-us/speaking/base/', 404),
('/en-us/speaking/_base/', 404),
])
def test_speaking_pages(client, path, expected):
assert client.get(path).status_code == expected
def content_page_path_gen():
from django.conf import settings
for template_setting in settings.TEMPLATES:
for template_dir in template_setting['DIRS']:
for lang in ['en', 'zh']:
contents_path = os.path.join(template_dir, 'contents', lang)
os.chdir(contents_path)
for dirpath, _, filenames in os.walk('.'):
if os.path.basename(dirpath).startswith('_'):
continue
for filename in filenames:
if filename.startswith('_'):
continue
root, ext = os.path.splitext(filename)
if ext != '.html':
continue
comps = [c for c in dirpath.split(os.sep) if c != '.']
yield '/'.join([''] + comps + [root, ''])
@pytest.fixture(params=content_page_path_gen())
def content_page_path(request):
return request.param
def language_gen():
from django.conf import settings
for lang_code, _ in settings.LANGUAGES:
yield lang_code
@pytest.fixture(params=language_gen())
def language(request):
return request.param
def test_content_pages(client, language, content_page_path):
activate(language)
path = '/' + language + '/' + content_page_path
response = client.get(path)
assert response.status_code == 200, path
|
import pytest
from django.test import override_settings
from core.utils import collect_language_codes
def test_locale_fallback_middleware(client, settings):
response = client.get('/en/', follow=True)
assert response.redirect_chain == [('/en-us/', 302)]
@override_settings(USE_I18N=False)
def test_locale_fallback_middleware_no_i18n(client, settings):
response = client.get('/en/')
assert response.status_code == 404
def test_collect_language_codes():
assert collect_language_codes('zh-tw') == ['zh-tw', 'zh', 'en-us', 'en']
assert collect_language_codes('zh') == ['zh', 'en-us', 'en']
assert collect_language_codes('en-us') == ['en-us', 'en', 'en-us', 'en']
assert collect_language_codes('en') == ['en', 'en-us', 'en']
def test_index_page(client):
response = client.get('/en-us/')
assert response.status_code == 200
assert 'PyCon' in response.content.decode('utf-8')
@pytest.mark.parametrize('path,expected', [
('/en-us/speaking/cfp/', 200),
('/en-us/speaking/talk/', 200),
('/en-us/speaking/base/', 404),
('/en-us/speaking/_base/', 404),
])
def test_speaking_pages(client, path, expected):
assert client.get(path).status_code == expected
|
mit
|
Python
|
ee98b5a5c6b82671738bc60e68ea87d838c5400f
|
Improve the migration for unique data source name
|
ninneko/redash,EverlyWell/redash,hudl/redash,useabode/redash,pubnative/redash,chriszs/redash,akariv/redash,alexanderlz/redash,easytaxibr/redash,pubnative/redash,amino-data/redash,rockwotj/redash,ninneko/redash,guaguadev/redash,rockwotj/redash,denisov-vlad/redash,imsally/redash,ninneko/redash,useabode/redash,crowdworks/redash,EverlyWell/redash,jmvasquez/redashtest,hudl/redash,denisov-vlad/redash,moritz9/redash,stefanseifert/redash,alexanderlz/redash,alexanderlz/redash,chriszs/redash,rockwotj/redash,M32Media/redash,vishesh92/redash,stefanseifert/redash,easytaxibr/redash,EverlyWell/redash,pubnative/redash,44px/redash,ninneko/redash,hudl/redash,amino-data/redash,M32Media/redash,moritz9/redash,guaguadev/redash,chriszs/redash,chriszs/redash,44px/redash,imsally/redash,EverlyWell/redash,amino-data/redash,44px/redash,guaguadev/redash,guaguadev/redash,denisov-vlad/redash,44px/redash,easytaxibr/redash,hudl/redash,getredash/redash,moritz9/redash,ninneko/redash,imsally/redash,imsally/redash,M32Media/redash,jmvasquez/redashtest,easytaxibr/redash,crowdworks/redash,stefanseifert/redash,akariv/redash,moritz9/redash,pubnative/redash,useabode/redash,getredash/redash,crowdworks/redash,pubnative/redash,vishesh92/redash,jmvasquez/redashtest,amino-data/redash,denisov-vlad/redash,getredash/redash,M32Media/redash,vishesh92/redash,guaguadev/redash,jmvasquez/redashtest,easytaxibr/redash,stefanseifert/redash,useabode/redash,akariv/redash,akariv/redash,akariv/redash,alexanderlz/redash,stefanseifert/redash,getredash/redash,vishesh92/redash,getredash/redash,rockwotj/redash,denisov-vlad/redash,crowdworks/redash,jmvasquez/redashtest
|
migrations/0020_change_ds_name_to_non_uniqe.py
|
migrations/0020_change_ds_name_to_non_uniqe.py
|
from redash.models import db
import peewee
from playhouse.migrate import PostgresqlMigrator, migrate
if __name__ == '__main__':
migrator = PostgresqlMigrator(db.database)
with db.database.transaction():
# Change the uniqueness constraint on data source name to be (org, name):
# In some cases it's a constraint:
db.database.execute_sql('ALTER TABLE data_sources DROP CONSTRAINT IF EXISTS unique_name')
# In others only an index:
db.database.execute_sql('DROP INDEX IF EXISTS data_sources_name')
migrate(
migrator.add_index('data_sources', ('org_id', 'name'), unique=True)
)
db.close_db(None)
|
from redash.models import db
import peewee
from playhouse.migrate import PostgresqlMigrator, migrate
if __name__ == '__main__':
migrator = PostgresqlMigrator(db.database)
with db.database.transaction():
# Change the uniqueness constraint on data source name to be (org, name):
success = False
for index_name in ['unique_name', 'data_sources_name']:
try:
print "Trying to remove data source name uniqueness index with the name: {}".format(index_name)
migrate(migrator.drop_index("data_sources", index_name))
print "Success!"
success = True
break
except peewee.ProgrammingError:
db.close_db(None)
if not success:
print "Failed removing uniqueness constraint on data source name."
print "Please verify its name in the schema, update the migration and run again."
exit(1)
migrate(
migrator.add_index('data_sources', ('org_id', 'name'), unique=True)
)
db.close_db(None)
|
bsd-2-clause
|
Python
|
791546d9fa1fc0317dc613e0ba7e74ca1cbf8210
|
Update __init__.py
|
closeio/flask-admin,jmagnusson/flask-admin,flask-admin/flask-admin,lifei/flask-admin,rochacbruno/flask-admin,quokkaproject/flask-admin,closeio/flask-admin,rochacbruno/flask-admin,jmagnusson/flask-admin,quokkaproject/flask-admin,rochacbruno/flask-admin,ArtemSerga/flask-admin,flask-admin/flask-admin,ArtemSerga/flask-admin,lifei/flask-admin,quokkaproject/flask-admin,flask-admin/flask-admin,jmagnusson/flask-admin,lifei/flask-admin,closeio/flask-admin,flask-admin/flask-admin,jmagnusson/flask-admin,rochacbruno/flask-admin,quokkaproject/flask-admin,closeio/flask-admin,ArtemSerga/flask-admin,ArtemSerga/flask-admin,lifei/flask-admin
|
flask_admin/__init__.py
|
flask_admin/__init__.py
|
__version__ = '1.4.2'
__author__ = 'Flask-Admin team'
__email__ = '[email protected]'
from .base import expose, expose_plugview, Admin, BaseView, AdminIndexView
|
__version__ = '1.4.1'
__author__ = 'Flask-Admin team'
__email__ = '[email protected]'
from .base import expose, expose_plugview, Admin, BaseView, AdminIndexView
|
bsd-3-clause
|
Python
|
558085de2a32ef14d7b9ef2884f34699b7f7c39b
|
Update test.py
|
liontoolkit/test
|
tests/test.py
|
tests/test.py
|
import sys
sys.path.append('..')
from nose.tools import with_setup
from MyModule import MyModule
count = 0
def setup_module():
print('<<<Setup Module>>>')
def teardown_module():
print('<<<Teardown Module>>>')
def setup_function():
print('<<<Setup Function>>>')
global count
count = 1
def teardown_function():
print('<<<Teardown Function>>>')
global count
count = 0
@with_setup(setup_function, teardown_function)
def test_function():
print('<<<Test function>>>')
global count
assert count == 1
@with_setup(setup_function, teardown_function)
def test_generator():
print('<<<Test generator>>>')
for i in range(5):
yield generator_function, 2, i
def setup_generator_function():
print('<<<Setup generator_function>>>')
def teardown_generator_function():
print('<<<Teardown generator_function>>>')
@with_setup(setup_generator_function, teardown_generator_function)
def generator_function(step,i):
print('<<<generator_function {}>>>'.format(i+1))
global count
count = count + step
assert count == 1 + step*(i+1)
class TestMyModule:
@classmethod
def setup_class(cls):
print('<<<Setup Class>>>')
cls.obj = MyModule()
@classmethod
def teardown_class(cls):
print('<<<Teardown Class>>>')
del cls.obj
def setup(self):
print('<<<Setup Method>>>')
self.obj = self.__class__.obj
def teardown(self):
print('<<<Teardown Method>>>')
self.obj = None
def test___init__(self):
print('<<<Test __init__>>>')
assert self.obj.number1 == 1
assert self.obj.number2 == 2
def test_my_function(self):
print('<<<Test my_function>>>')
assert self.obj.my_function() == self.obj.number1 + self.obj.number2
assert self.obj.my_function(3,4) == 7
assert self.obj.my_function(num1=5) == 5 + self.obj.number2
assert self.obj.my_function(num2=3) == self.obj.number1 + 3
assert self.obj.my_function(10,-5) == 5
assert self.obj.my_function(-10,-5) == -15
|
from nose.tools import with_setup
from ..MyModule import MyModule
count = 0
def setup_module():
print('<<<Setup Module>>>')
def teardown_module():
print('<<<Teardown Module>>>')
def setup_function():
print('<<<Setup Function>>>')
global count
count = 1
def teardown_function():
print('<<<Teardown Function>>>')
global count
count = 0
@with_setup(setup_function, teardown_function)
def test_function():
print('<<<Test function>>>')
global count
assert count == 1
@with_setup(setup_function, teardown_function)
def test_generator():
print('<<<Test generator>>>')
for i in range(5):
yield generator_function, 2, i
def setup_generator_function():
print('<<<Setup generator_function>>>')
def teardown_generator_function():
print('<<<Teardown generator_function>>>')
@with_setup(setup_generator_function, teardown_generator_function)
def generator_function(step,i):
print('<<<generator_function {}>>>'.format(i+1))
global count
count = count + step
assert count == 1 + step*(i+1)
class TestMyModule:
@classmethod
def setup_class(cls):
print('<<<Setup Class>>>')
cls.obj = MyModule()
@classmethod
def teardown_class(cls):
print('<<<Teardown Class>>>')
del cls.obj
def setup(self):
print('<<<Setup Method>>>')
self.obj = self.__class__.obj
def teardown(self):
print('<<<Teardown Method>>>')
self.obj = None
def test___init__(self):
print('<<<Test __init__>>>')
assert self.obj.number1 == 1
assert self.obj.number2 == 2
def test_my_function(self):
print('<<<Test my_function>>>')
assert self.obj.my_function() == self.obj.number1 + self.obj.number2
assert self.obj.my_function(3,4) == 7
assert self.obj.my_function(num1=5) == 5 + self.obj.number2
assert self.obj.my_function(num2=3) == self.obj.number1 + 3
assert self.obj.my_function(10,-5) == 5
assert self.obj.my_function(-10,-5) == -15
|
mpl-2.0
|
Python
|
53d75c14d79b92ad1fdf9c99b0773db6427d4294
|
update to reveal.js 3.2.0
|
humrochagf/flask-reveal,humrochagf/flask-reveal
|
flask_reveal/tools/commands/installreveal.py
|
flask_reveal/tools/commands/installreveal.py
|
# -*- coding: utf-8 -*-
import argparse
import os
from urllib import request
import flask_reveal
from flask_reveal.tools.helpers import extract_file, move_and_replace
class InstallReveal(argparse.ArgumentParser):
info = ({
'prog': 'installreveal',
'description': 'installs Reveal.js',
})
def __init__(self):
super(InstallReveal, self).__init__(**self.info)
self.url = None
self.path = None
self.add_argument('-u', '--url', action='store')
self.add_argument('-p', '--path', action='store')
def parse_args(self, args=None, namespace=None):
super(InstallReveal, self).parse_args(args, self)
if not self.url and not self.path:
self.url = 'https://github.com/hakimel/reveal.js/' + \
'archive/3.2.0.tar.gz'
def run(self, args=None):
self.parse_args(args)
if self.url:
try:
response = request.urlretrieve(self.url)
self.path = response[0]
except Exception:
raise
move_and_replace(
extract_file(self.path),
os.path.join(os.path.dirname(flask_reveal.__file__), 'static/')
)
command = InstallReveal()
|
# -*- coding: utf-8 -*-
import argparse
import os
from urllib import request
import flask_reveal
from flask_reveal.tools.helpers import extract_file, move_and_replace
class InstallReveal(argparse.ArgumentParser):
info = ({
'prog': 'installreveal',
'description': 'installs Reveal.js',
})
def __init__(self):
super(InstallReveal, self).__init__(**self.info)
self.url = None
self.path = None
self.add_argument('-u', '--url', action='store')
self.add_argument('-p', '--path', action='store')
def parse_args(self, args=None, namespace=None):
super(InstallReveal, self).parse_args(args, self)
if not self.url and not self.path:
self.url = 'https://github.com/hakimel/reveal.js/' + \
'archive/3.1.0.tar.gz'
def run(self, args=None):
self.parse_args(args)
if self.url:
try:
response = request.urlretrieve(self.url)
self.path = response[0]
except Exception:
raise
move_and_replace(
extract_file(self.path),
os.path.join(os.path.dirname(flask_reveal.__file__), 'static/')
)
command = InstallReveal()
|
mit
|
Python
|
a7db7bdb277eed65c93fbc9f5e9e923487711071
|
Update file_system_storage.py
|
ArabellaTech/django-image-diet
|
image_diet/file_system_storage.py
|
image_diet/file_system_storage.py
|
import os
from image_diet import settings
from django.conf import settings as main_settings
from django.contrib.staticfiles.storage import StaticFilesStorage
class ImageDietFileSystemStorage(StaticFilesStorage):
def post_process(self, files, *args, **kwargs):
results = []
print 'test'
print settings
die
if settings.DIET_COMPRESS_STATIC_IMAGES:
if 'image_diet' not in main_settings.INSTALLED_APPS:
raise NotImplementedError("You need to install django_image_diet to use DIET_COMPRESS_STATIC_IMAGES")
from image_diet.diet import squeeze
for f in files:
processed_file = squeeze(os.path.join(main_settings.STATIC_ROOT, f))
results.append([f, processed_file, True if processed_file is not None else False])
return results
|
import os
from image_diet import settings
from django.conf import settings as main_settings
from django.contrib.staticfiles.storage import StaticFilesStorage
class ImageDietFileSystemStorage(StaticFilesStorage):
def post_process(self, files, *args, **kwargs):
results = []
print settings
die
if settings.DIET_COMPRESS_STATIC_IMAGES:
if 'image_diet' not in main_settings.INSTALLED_APPS:
raise NotImplementedError("You need to install django_image_diet to use DIET_COMPRESS_STATIC_IMAGES")
from image_diet.diet import squeeze
for f in files:
processed_file = squeeze(os.path.join(main_settings.STATIC_ROOT, f))
results.append([f, processed_file, True if processed_file is not None else False])
return results
|
mit
|
Python
|
9c3b3cb541e8d42d1206fabae83fbef4a249f3ec
|
bump version
|
slash-testing/backslash-python,vmalloc/backslash-python
|
backslash/__version__.py
|
backslash/__version__.py
|
__version__ = "2.4.0"
|
__version__ = "2.3.1"
|
bsd-3-clause
|
Python
|
9834fab5a7e061f0eb1cb7b737cec8d2b23b4c7c
|
Declare numpydoc template in setup_package.py
|
astropy/astropy-helpers,Cadair/astropy-helpers,bsipocz/astropy-helpers,bsipocz/astropy-helpers,dpshelio/astropy-helpers,bsipocz/astropy-helpers,Cadair/astropy-helpers,astropy/astropy-helpers,dpshelio/astropy-helpers
|
astropy_helpers/extern/setup_package.py
|
astropy_helpers/extern/setup_package.py
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
def get_package_data():
return {'astropy_helpers.extern': ['automodapi/templates/*/*.rst', 'numpydoc/templates/*.rst']}
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
def get_package_data():
return {'astropy_helpers.extern': ['automodapi/templates/*/*.rst']}
|
bsd-3-clause
|
Python
|
220708945a18b5d876cdff32dcfb9b8b8971d85b
|
Add time/space complexity
|
bowen0701/algorithms_data_structures
|
lc017_letter_combinations_of_a_phone_number.py
|
lc017_letter_combinations_of_a_phone_number.py
|
"""Leetcode 17. Letter Combinations of a Phone Number
Medium
URL: https://leetcode.com/problems/letter-combinations-of-a-phone-number/
Given a string containing digits from 2-9 inclusive, return all possible
letter combinations that the number could represent.
A mapping of digit to letters (just like on the telephone buttons) is
given below. Note that 1 does not map to any letters.
Example:
Input: "23"
Output: ["ad", "ae", "af", "bd", "be", "bf", "cd", "ce", "cf"].
Note:
Although the above answer is in lexicographical order,
your answer could be in any order you want.
"""
class Solution(object):
def letterCombinations(self, digits):
"""
:type digits: str
:rtype: List[str]
Time complexity: O(n * m^2)
- n is the length of digits,
- m is the mean length of digit's letters, basically 3.
Space complexity: O(m^n).
"""
# Store digit->letter-list dict.
d2l_d = {
'2': ['a', 'b', 'c'],
'3': ['d', 'e', 'f'],
'4': ['g', 'h', 'i'],
'5': ['j', 'k' ,'l'],
'6': ['m', 'n', 'o'],
'7': ['p', 'q', 'r', 's'],
'8': ['t', 'u', 'v'],
'9': ['w', 'x', 'y', 'z']
}
# Edge cases handling.
if not digits:
return []
if len(digits) == 1:
return d2l_d[digits]
# Initialize output lc by the 0th digit's letter-list.
lc = d2l_d[digits[0]]
# Run for loop over digits starting from index i = 1.
# - Get the ith digit's letter list li.
# - Use list comprehension to combine lc and li to replace lc.
for i in range(1, len(digits)):
li = d2l_d[digits[i]]
lc = [m + n for m in lc for n in li]
return lc
def main():
digits = "23"
# Output: ["ad", "ae", "af", "bd", "be", "bf", "cd", "ce", "cf"].
print Solution().letterCombinations(digits)
if __name__ == '__main__':
main()
|
"""Leetcode 17. Letter Combinations of a Phone Number
Medium
URL: https://leetcode.com/problems/letter-combinations-of-a-phone-number/
Given a string containing digits from 2-9 inclusive, return all possible
letter combinations that the number could represent.
A mapping of digit to letters (just like on the telephone buttons) is
given below. Note that 1 does not map to any letters.
Example:
Input: "23"
Output: ["ad", "ae", "af", "bd", "be", "bf", "cd", "ce", "cf"].
Note:
Although the above answer is in lexicographical order,
your answer could be in any order you want.
"""
class Solution(object):
def letterCombinations(self, digits):
"""
:type digits: str
:rtype: List[str]
"""
# Store digit->letter-list dict.
d2l_d = {
'2': ['a', 'b', 'c'],
'3': ['d', 'e', 'f'],
'4': ['g', 'h', 'i'],
'5': ['j', 'k' ,'l'],
'6': ['m', 'n', 'o'],
'7': ['p', 'q', 'r', 's'],
'8': ['t', 'u', 'v'],
'9': ['w', 'x', 'y', 'z']
}
# Edge cases handling.
if not digits:
return []
if len(digits) == 1:
return d2l_d[digits]
# Initialize output lc by the 0th digit's letter-list.
lc = d2l_d[digits[0]]
# Run for loop over digits starting from index i = 1.
# - Get the ith digit's letter list li.
# - Use list comprehension to combine lc and li to replace lc.
for i in range(1, len(digits)):
li = d2l_d[digits[i]]
lc = [m + n for m in lc for n in li]
return lc
def main():
digits = "23"
# Output: ["ad", "ae", "af", "bd", "be", "bf", "cd", "ce", "cf"].
print Solution().letterCombinations(digits)
if __name__ == '__main__':
main()
|
bsd-2-clause
|
Python
|
8161561499e813c8821d6f810e6e28d4e3984922
|
Update __init__.py
|
delitamakanda/socialite,delitamakanda/socialite,delitamakanda/socialite
|
app/__init__.py
|
app/__init__.py
|
from flask import Flask, render_template
from flask_assets import Bundle, Environment
from flask.ext.mail import Mail
from flask.ext.login import LoginManager
from flask.ext.moment import Moment
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.pagedown import PageDown
from flask.ext.flatpages import FlatPages
from flask.ext.cache import Cache
from flask_sitemap import Sitemap
from config import config
mail = Mail()
moment = Moment()
pagedown = PageDown()
pages = FlatPages()
db = SQLAlchemy()
cache = Cache(config={'CACHE_TYPE': 'simple'})
ext = Sitemap()
login_manager = LoginManager()
login_manager.session_protection = 'strong'
login_manager.login_view = 'auth.login'
def create_app(config_name):
app = Flask(__name__)
assets = Environment(app)
app.config.from_object(config[config_name])
config[config_name].init_app(app)
if not app.debug and not app.testing and not app.config['SSL_DISABLE']:
from flask.ext.sslify import SSLify
sslify = SSLify(app)
mail.init_app(app)
moment.init_app(app)
pagedown.init_app(app)
pages.init_app(app)
cache.init_app(app)
ext.init_app(app)
db.init_app(app)
login_manager.init_app(app)
from .main import main as main_blueprint
from .auth import auth as auth_blueprint
from .api_1_0 import api as api_1_0_blueprint
app.register_blueprint(auth_blueprint, url_prefix='/auth')
app.register_blueprint(main_blueprint)
app.register_blueprint(api_1_0_blueprint, url_prefix='/api/v1.0')
root_js = Bundle(
'js/vendors/jquery-2.1.1.min.js',
#'js/vendors/materialize.min.js',
'js/app.js',
'js/init.js',
output='dist/bundle.js')
root_css = Bundle(
'css/vendors/reset.css',
'css/vendors/materialize.min.css',
'css/style.css',
'css/common.css',
'css/app.css',
output='dist/styles.css')
assets.register('root_js', root_js)
assets.register('root_css', root_css)
return app
|
from flask import Flask, render_template
from flask_assets import Bundle, Environment
from flask_socketio import SocketIO, emit
from flask.ext.mail import Mail
from flask.ext.login import LoginManager
from flask.ext.moment import Moment
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.pagedown import PageDown
from flask.ext.flatpages import FlatPages
from flask.ext.cache import Cache
from flask_sitemap import Sitemap
from config import config
mail = Mail()
moment = Moment()
pagedown = PageDown()
pages = FlatPages()
db = SQLAlchemy()
cache = Cache(config={'CACHE_TYPE': 'simple'})
ext = Sitemap()
socketio = SocketIO()
login_manager = LoginManager()
login_manager.session_protection = 'strong'
login_manager.login_view = 'auth.login'
def create_app(config_name):
app = Flask(__name__)
assets = Environment(app)
app.config.from_object(config[config_name])
config[config_name].init_app(app)
if not app.debug and not app.testing and not app.config['SSL_DISABLE']:
from flask.ext.sslify import SSLify
sslify = SSLify(app)
mail.init_app(app)
moment.init_app(app)
pagedown.init_app(app)
pages.init_app(app)
cache.init_app(app)
ext.init_app(app)
socketio.init_app(app)
db.init_app(app)
login_manager.init_app(app)
from .main import main as main_blueprint
from .auth import auth as auth_blueprint
from .api_1_0 import api as api_1_0_blueprint
app.register_blueprint(auth_blueprint, url_prefix='/auth')
app.register_blueprint(main_blueprint)
app.register_blueprint(api_1_0_blueprint, url_prefix='/api/v1.0')
root_js = Bundle(
'js/vendors/jquery-2.1.1.min.js',
#'js/vendors/materialize.min.js',
'js/app.js',
'js/init.js',
output='dist/bundle.js')
root_css = Bundle(
'css/vendors/reset.css',
'css/vendors/materialize.min.css',
'css/style.css',
'css/common.css',
'css/app.css',
output='dist/styles.css')
assets.register('root_js', root_js)
assets.register('root_css', root_css)
return app
|
mit
|
Python
|
65f8dc529a78593621f4f2b2477707b8e877f0d3
|
Add Mooseman to privileged users. --autopull
|
Charcoal-SE/SmokeDetector,NickVolynkin/SmokeDetector,Charcoal-SE/SmokeDetector,ArtOfCode-/SmokeDetector,NickVolynkin/SmokeDetector,ArtOfCode-/SmokeDetector
|
globalvars.py
|
globalvars.py
|
import os
from datetime import datetime
from ChatExchange.chatexchange.client import Client
import HTMLParser
class GlobalVars:
false_positives = []
whitelisted_users = []
blacklisted_users = []
ignored_posts = []
auto_ignored_posts = []
startup_utc = datetime.utcnow().strftime("%H:%M:%S")
latest_questions = []
blockedTime = 0
charcoal_room_id = "11540"
meta_tavern_room_id = "89"
site_filename = { "electronics.stackexchange.com" : "ElectronicsGood.txt",
"gaming.stackexchange.com" : "GamingGood.txt", "german.stackexchange.com" : "GermanGood.txt",
"italian.stackexchange.com" : "ItalianGood.txt", "math.stackexchange.com" : "MathematicsGood.txt",
"spanish.stackexchange.com" : "SpanishGood.txt", "stats.stackexchange.com" : "StatsGood.txt" }
parser = HTMLParser.HTMLParser()
wrap = Client("stackexchange.com")
wrapm = Client("meta.stackexchange.com")
privileged_users = { charcoal_room_id: ["117490", "66258", "31768","103081","73046","88521","59776", "31465"],
meta_tavern_room_id: ["259867", "244519", "244382", "194047", "158100", "178438", "237685",
"215468", "229438", "180276", "161974", "244382", "186281", "266094",
"245167", "230261", "213575", "241919", "203389", "202832"] }
smokeDetector_user_id = { charcoal_room_id: "120914", meta_tavern_room_id: "266345" }
commit = os.popen("git log --pretty=format:'%h' -n 1").read()
commit_with_author = os.popen("git log --pretty=format:'%h (%cn: *%s*)' -n 1").read()
on_master = os.popen("git rev-parse --abbrev-ref HEAD").read().strip() == "master"
charcoal_hq = None
tavern_on_the_meta = None
s = ""
s_reverted = ""
specialrooms = []
bayesian_testroom = None
apiquota = -1
bodyfetcher = None
|
import os
from datetime import datetime
from ChatExchange.chatexchange.client import Client
import HTMLParser
class GlobalVars:
false_positives = []
whitelisted_users = []
blacklisted_users = []
ignored_posts = []
auto_ignored_posts = []
startup_utc = datetime.utcnow().strftime("%H:%M:%S")
latest_questions = []
blockedTime = 0
charcoal_room_id = "11540"
meta_tavern_room_id = "89"
site_filename = { "electronics.stackexchange.com" : "ElectronicsGood.txt",
"gaming.stackexchange.com" : "GamingGood.txt", "german.stackexchange.com" : "GermanGood.txt",
"italian.stackexchange.com" : "ItalianGood.txt", "math.stackexchange.com" : "MathematicsGood.txt",
"spanish.stackexchange.com" : "SpanishGood.txt", "stats.stackexchange.com" : "StatsGood.txt" }
parser = HTMLParser.HTMLParser()
wrap = Client("stackexchange.com")
wrapm = Client("meta.stackexchange.com")
privileged_users = { charcoal_room_id: ["117490", "66258", "31768","103081","73046","88521","59776", "31465"],
meta_tavern_room_id: ["259867", "244519", "244382", "194047", "158100", "178438", "237685",
"215468", "229438", "180276", "161974", "244382", "186281", "266094",
"245167", "230261", "213575", "241919", "203389"] }
smokeDetector_user_id = { charcoal_room_id: "120914", meta_tavern_room_id: "266345" }
commit = os.popen("git log --pretty=format:'%h' -n 1").read()
commit_with_author = os.popen("git log --pretty=format:'%h (%cn: *%s*)' -n 1").read()
on_master = os.popen("git rev-parse --abbrev-ref HEAD").read().strip() == "master"
charcoal_hq = None
tavern_on_the_meta = None
s = ""
s_reverted = ""
specialrooms = []
bayesian_testroom = None
apiquota = -1
bodyfetcher = None
|
apache-2.0
|
Python
|
b518de210dc3ae075beea60a06e981844ecff3d8
|
fix lint errors
|
happyraul/tv
|
app/__init__.py
|
app/__init__.py
|
import os
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.login import LoginManager
from flask.ext.openid import OpenID
from flask.ext.mail import Mail
from config import config, basedir
db = SQLAlchemy()
mail = Mail()
login_manager = LoginManager()
login_manager.session_protection = 'strong'
login_manager.login_view = 'auth.login'
oid = OpenID(fs_store_path=os.path.join(basedir, 'tmp'))
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(config[config_name])
config[config_name].init_app(app)
mail.init_app(app)
db.init_app(app)
login_manager.init_app(app)
oid.init_app(app)
from .main import main as main_blueprint
app.register_blueprint(main_blueprint)
from .auth import auth as auth_blueprint
app.register_blueprint(auth_blueprint, url_prefix='/auth')
return app
|
import os
from flask import Flask, render_template
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.login import LoginManager
from flask.ext.openid import OpenID
from flask.ext.mail import Mail
from config import config, basedir
db = SQLAlchemy()
mail = Mail()
login_manager = LoginManager()
login_manager.session_protection = 'strong'
login_manager.login_view = 'auth.login'
oid = OpenID(fs_store_path=os.path.join(basedir, 'tmp'))
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(config[config_name])
config[config_name].init_app(app)
mail.init_app(app)
db.init_app(app)
login_manager.init_app(app)
oid.init_app(app)
from .main import main as main_blueprint
app.register_blueprint(main_blueprint)
from .auth import auth as auth_blueprint
app.register_blueprint(auth_blueprint, url_prefix='/auth')
return app
|
apache-2.0
|
Python
|
10bbd489e4123363ee4ecafe0ca43151f52a9813
|
Create compatibility settings/functions for pluggable auth user functionality. See #24
|
vyscond/django-all-access,dpoirier/django-all-access,dpoirier/django-all-access,iXioN/django-all-access,mlavin/django-all-access,vyscond/django-all-access,mlavin/django-all-access,iXioN/django-all-access
|
allaccess/compat.py
|
allaccess/compat.py
|
"Python and Django compatibility functions."
from __future__ import unicode_literals
from django.conf import settings
AUTH_USER_MODEL = getattr(settings, 'AUTH_USER_MODEL', 'auth.User')
try:
from django.utils.crypto import get_random_string
except ImportError: # pragma: no cover
# Backport implementation from Django 1.4
import hashlib
import random
import string
import time
try:
random = random.SystemRandom()
using_sysrandom = True
except NotImplementedError:
import warnings
warnings.warn('A secure pseudo-random number generator is not available '
'on your system. Falling back to Mersenne Twister.')
using_sysrandom = False
def get_random_string(length=12, allowed_chars=string.ascii_letters + string.digits):
"Returns a securely generated random string."
if not using_sysrandom:
# Re-seed random
bytes = b"{0}{1}{2}".format(random.getstate(), time.time(), settings.SECRET_KEY)
random.seed(hashlib.sha256(bytes).digest())
return ''.join([random.choice(allowed_chars) for i in range(length)])
try:
from django.contrib.auth import get_user_model
except ImportError: # pragma: no cover
# Django < 1.5
from django.contrib.auth.models import User
get_user_model = lambda: User
|
"Python and Django compatibility functions."
from __future__ import unicode_literals
from django.conf import settings
try:
from django.utils.crypto import get_random_string
except ImportError: # pragma: no cover
# Backport implementation from Django 1.4
import hashlib
import random
import string
import time
try:
random = random.SystemRandom()
using_sysrandom = True
except NotImplementedError:
import warnings
warnings.warn('A secure pseudo-random number generator is not available '
'on your system. Falling back to Mersenne Twister.')
using_sysrandom = False
def get_random_string(length=12, allowed_chars=string.ascii_letters + string.digits):
"Returns a securely generated random string."
if not using_sysrandom:
# Re-seed random
bytes = b"{0}{1}{2}".format(random.getstate(), time.time(), settings.SECRET_KEY)
random.seed(hashlib.sha256(bytes).digest())
return ''.join([random.choice(allowed_chars) for i in range(length)])
|
bsd-2-clause
|
Python
|
15f00997113ecb87de9daf636738bc0b51686918
|
Fix assertion in AutocompleteList.choices_for_request for empty lists
|
Eraldo/django-autocomplete-light,yourlabs/django-autocomplete-light,Perkville/django-autocomplete-light,shubhamdipt/django-autocomplete-light,shubhamdipt/django-autocomplete-light,Visgean/django-autocomplete-light,luzfcb/django-autocomplete-light,blueyed/django-autocomplete-light,dsanders11/django-autocomplete-light,yourlabs/django-autocomplete-light,dsanders11/django-autocomplete-light,yourlabs/django-autocomplete-light,blueyed/django-autocomplete-light,Eraldo/django-autocomplete-light,shubhamdipt/django-autocomplete-light,luzfcb/django-autocomplete-light,Perkville/django-autocomplete-light,luzfcb/django-autocomplete-light,Perkville/django-autocomplete-light,Visgean/django-autocomplete-light,blueyed/django-autocomplete-light,Eraldo/django-autocomplete-light,dsanders11/django-autocomplete-light,luzfcb/django-autocomplete-light,Visgean/django-autocomplete-light,Eraldo/django-autocomplete-light,yourlabs/django-autocomplete-light,Perkville/django-autocomplete-light,Visgean/django-autocomplete-light,dsanders11/django-autocomplete-light,shubhamdipt/django-autocomplete-light
|
autocomplete_light/autocomplete/list.py
|
autocomplete_light/autocomplete/list.py
|
from __future__ import unicode_literals
from django.utils.encoding import force_text
__all__ = ('AutocompleteList',)
class AutocompleteList(object):
"""
Simple Autocomplete implementation which expects :py:attr:`choices` to be a
list of string choices.
.. py:attribute:: choices
List of string choices.
.. py:attribute:: limit_choices
The maximum of items to suggest from :py:attr:`choices`.
.. py:attribute:: order_by
:py:meth:`~.list.AutocompleteList.order_choices` will use this against
:py:attr:`choices` as an argument :py:func:`sorted`.
It was mainly used as a starter for me when doing test-driven development
and to ensure that the Autocomplete pattern would be concretely simple and
yet powerful.
"""
limit_choices = 20
order_by = lambda cls, choice: force_text(choice).lower()
def choices_for_values(self):
"""
Return any :py:attr:`choices` that is in :py:attr:`values`.
"""
values_choices = []
for choice in self.choices:
if choice in self.values:
values_choices.append(choice)
return self.order_choices(values_choices)
def choices_for_request(self):
"""
Return any :py:attr:`choices` that contains the search string. It is
case insensitive and ignores spaces.
"""
assert self.choices is not None, 'autocomplete.choices is not set'
requests_choices = []
q = self.request.GET.get('q', '').lower().strip()
for choice in self.choices:
if q in force_text(choice).lower():
requests_choices.append(choice)
return self.order_choices(requests_choices)[0:self.limit_choices]
def order_choices(self, choices):
"""
Run :py:func:`sorted` against ``choices`` and :py:attr:`order_by`.
"""
return sorted(choices, key=self.order_by)
|
from __future__ import unicode_literals
from django.utils.encoding import force_text
__all__ = ('AutocompleteList',)
class AutocompleteList(object):
"""
Simple Autocomplete implementation which expects :py:attr:`choices` to be a
list of string choices.
.. py:attribute:: choices
List of string choices.
.. py:attribute:: limit_choices
The maximum of items to suggest from :py:attr:`choices`.
.. py:attribute:: order_by
:py:meth:`~.list.AutocompleteList.order_choices` will use this against
:py:attr:`choices` as an argument :py:func:`sorted`.
It was mainly used as a starter for me when doing test-driven development
and to ensure that the Autocomplete pattern would be concretely simple and
yet powerful.
"""
limit_choices = 20
order_by = lambda cls, choice: force_text(choice).lower()
def choices_for_values(self):
"""
Return any :py:attr:`choices` that is in :py:attr:`values`.
"""
values_choices = []
for choice in self.choices:
if choice in self.values:
values_choices.append(choice)
return self.order_choices(values_choices)
def choices_for_request(self):
"""
Return any :py:attr:`choices` that contains the search string. It is
case insensitive and ignores spaces.
"""
assert self.choices, 'autocomplete.choices is not set'
requests_choices = []
q = self.request.GET.get('q', '').lower().strip()
for choice in self.choices:
if q in force_text(choice).lower():
requests_choices.append(choice)
return self.order_choices(requests_choices)[0:self.limit_choices]
def order_choices(self, choices):
"""
Run :py:func:`sorted` against ``choices`` and :py:attr:`order_by`.
"""
return sorted(choices, key=self.order_by)
|
mit
|
Python
|
d92cfdf6644663a6e615e032c6bc6ca52bed3edb
|
Add DragonLord to privileged users
|
ArtOfCode-/SmokeDetector,NickVolynkin/SmokeDetector,Charcoal-SE/SmokeDetector,NickVolynkin/SmokeDetector,ArtOfCode-/SmokeDetector,Charcoal-SE/SmokeDetector
|
globalvars.py
|
globalvars.py
|
import os
from datetime import datetime
from ChatExchange.chatexchange.client import Client
import HTMLParser
class GlobalVars:
false_positives = []
whitelisted_users = []
blacklisted_users = []
ignored_posts = []
auto_ignored_posts = []
startup_utc = datetime.utcnow().strftime("%H:%M:%S")
latest_questions = []
blockedTime = 0
charcoal_room_id = "11540"
meta_tavern_room_id = "89"
site_filename = {"electronics.stackexchange.com": "ElectronicsGood.txt",
"gaming.stackexchange.com": "GamingGood.txt", "german.stackexchange.com": "GermanGood.txt",
"italian.stackexchange.com": "ItalianGood.txt", "math.stackexchange.com": "MathematicsGood.txt",
"spanish.stackexchange.com": "SpanishGood.txt", "stats.stackexchange.com": "StatsGood.txt"}
parser = HTMLParser.HTMLParser()
wrap = Client("stackexchange.com")
wrapm = Client("meta.stackexchange.com")
privileged_users = {charcoal_room_id: ["117490", "66258", "31768", "103081", "73046", "88521", "59776", "31465"],
meta_tavern_room_id: ["259867", "244519", "244382", "194047", "158100", "178438", "237685",
"215468", "229438", "180276", "161974", "244382", "186281", "266094",
"245167", "230261", "213575", "241919", "203389", "202832", "160017"]}
smokeDetector_user_id = {charcoal_room_id: "120914", meta_tavern_room_id: "266345"}
commit = os.popen("git log --pretty=format:'%h' -n 1").read()
commit_with_author = os.popen("git log --pretty=format:'%h (%cn: *%s*)' -n 1").read()
on_master = os.popen("git rev-parse --abbrev-ref HEAD").read().strip() == "master"
charcoal_hq = None
tavern_on_the_meta = None
s = ""
s_reverted = ""
specialrooms = []
bayesian_testroom = None
apiquota = -1
bodyfetcher = None
|
import os
from datetime import datetime
from ChatExchange.chatexchange.client import Client
import HTMLParser
class GlobalVars:
false_positives = []
whitelisted_users = []
blacklisted_users = []
ignored_posts = []
auto_ignored_posts = []
startup_utc = datetime.utcnow().strftime("%H:%M:%S")
latest_questions = []
blockedTime = 0
charcoal_room_id = "11540"
meta_tavern_room_id = "89"
site_filename = {"electronics.stackexchange.com": "ElectronicsGood.txt",
"gaming.stackexchange.com": "GamingGood.txt", "german.stackexchange.com": "GermanGood.txt",
"italian.stackexchange.com": "ItalianGood.txt", "math.stackexchange.com": "MathematicsGood.txt",
"spanish.stackexchange.com": "SpanishGood.txt", "stats.stackexchange.com": "StatsGood.txt"}
parser = HTMLParser.HTMLParser()
wrap = Client("stackexchange.com")
wrapm = Client("meta.stackexchange.com")
privileged_users = {charcoal_room_id: ["117490", "66258", "31768", "103081", "73046", "88521", "59776", "31465"],
meta_tavern_room_id: ["259867", "244519", "244382", "194047", "158100", "178438", "237685",
"215468", "229438", "180276", "161974", "244382", "186281", "266094",
"245167", "230261", "213575", "241919", "203389", "202832"]}
smokeDetector_user_id = {charcoal_room_id: "120914", meta_tavern_room_id: "266345"}
commit = os.popen("git log --pretty=format:'%h' -n 1").read()
commit_with_author = os.popen("git log --pretty=format:'%h (%cn: *%s*)' -n 1").read()
on_master = os.popen("git rev-parse --abbrev-ref HEAD").read().strip() == "master"
charcoal_hq = None
tavern_on_the_meta = None
s = ""
s_reverted = ""
specialrooms = []
bayesian_testroom = None
apiquota = -1
bodyfetcher = None
|
apache-2.0
|
Python
|
cc1000824237cd74dec3e0ff210ee08020c2cd92
|
add config ini to ament_mypy site package (#182)
|
ament/ament_lint,ament/ament_lint,ament/ament_lint
|
ament_mypy/setup.py
|
ament_mypy/setup.py
|
from setuptools import find_packages
from setuptools import setup
setup(
name='ament_mypy',
version='0.7.3',
packages=find_packages(exclude=['test']),
install_requires=['setuptools'],
package_data={'': [
'configuration/ament_mypy.ini',
]},
zip_safe=False,
author='Ted Kern',
author_email='[email protected]',
maintainer='Ted Kern',
maintainer_email='[email protected]',
url='https://github.com/ament/ament_lint',
download_url='https://github.com/ament/ament_lint/releases',
keywords=['ROS'],
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
'Topic :: Software Development',
],
description='Check Python static typing using mypy.',
long_description="""\
The ability to check code for user specified static typing with mypy.""",
license='Apache License, Version 2.0',
tests_require=['pytest'],
entry_points={
'console_scripts': [
'ament_mypy = ament_mypy.main:main',
],
},
)
|
from setuptools import find_packages
from setuptools import setup
setup(
name='ament_mypy',
version='0.7.3',
packages=find_packages(exclude=['test']),
install_requires=['setuptools'],
zip_safe=False,
author='Ted Kern',
author_email='[email protected]',
maintainer='Ted Kern',
maintainer_email='[email protected]',
url='https://github.com/ament/ament_lint',
download_url='https://github.com/ament/ament_lint/releases',
keywords=['ROS'],
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
'Topic :: Software Development',
],
description='Check Python static typing using mypy.',
long_description="""\
The ability to check code for user specified static typing with mypy.""",
license='Apache License, Version 2.0',
tests_require=['pytest'],
entry_points={
'console_scripts': [
'ament_mypy = ament_mypy.main:main',
],
},
)
|
apache-2.0
|
Python
|
1ec0b7bf12b8d0ea452caa9aad17535a2fd745d8
|
Optimise for readability
|
eugene-eeo/scell
|
scell/core.py
|
scell/core.py
|
"""
scell.core
~~~~~~~~~~
Provides abstractions over lower level APIs and
file objects and their interests.
"""
from select import select as _select
from collections import namedtuple
def select(rl, wl, timeout=None):
"""
Returns the file objects ready for reading/writing
from the read-list (*rl*) and write-list (*wl*),
subject to *timeout* in seconds.
:param rl: Objects interested in readability.
:param wl: Objects interested in writability.
:param timeout: Maximum blocking time in seconds,
*None* for no timeout.
"""
if not (rl or wl):
return [], []
readers, writers, _ = _select(rl, wl, (), timeout)
return readers, writers
_Monitored = namedtuple('Monitored', 'fp,wants_read,wants_write,callback')
_Event = namedtuple('Event', 'monitored,readable,writable,fp,callback,ready')
class Monitored(_Monitored):
"""
Represents the interests of a file handle *fp*,
and whether it *wants_read* and or *wants_write*,
as well as an attached *callback*.
"""
__slots__ = ()
class Event(_Event):
"""
Represents the readability or writability
of a *monitored* file object.
"""
__slots__ = ()
def __new__(cls, monitored, readable, writable):
ready = (
readable >= monitored.wants_read and
writable >= monitored.wants_write
)
return super(Event, cls).__new__(
cls,
monitored,
readable,
writable,
fp=monitored.fp,
callback=monitored.callback,
ready=ready,
)
|
"""
scell.core
~~~~~~~~~~
Provides abstractions over lower level APIs and
file objects and their interests.
"""
from select import select as _select
from collections import namedtuple
def select(rl, wl, timeout=None):
"""
Returns the file objects ready for reading/writing
from the read-list (*rl*) and write-list (*wl*),
subject to *timeout* in seconds.
:param rl: Objects interested in readability.
:param wl: Objects interested in writability.
:param timeout: Maximum blocking time in seconds,
*None* for no timeout.
"""
if not (rl or wl):
return [], []
readers, writers, _ = _select(rl, wl, (), timeout)
return readers, writers
class Monitored(namedtuple('_Monitored', 'fp,wants_read,wants_write,callback')):
"""
Represents the interests of a file handle *fp*,
and whether it *wants_read* and or *wants_write*,
as well as an attached *callback*.
"""
__slots__ = ()
class Event(namedtuple('_Event', 'monitored,readable,writable,fp,callback,ready')):
"""
Represents the readability or writability
of a *monitored* file object.
"""
__slots__ = ()
def __new__(cls, monitored, readable, writable):
ready = (
readable >= monitored.wants_read and
writable >= monitored.wants_write
)
return super(Event, cls).__new__(
cls,
monitored,
readable,
writable,
fp=monitored.fp,
callback=monitored.callback,
ready=ready,
)
|
mit
|
Python
|
9f0837d387c7303d5c8c925a9989ca77a1a96e3e
|
Bump version after keras model fix
|
iskandr/fancyimpute,hammerlab/fancyimpute
|
fancyimpute/__init__.py
|
fancyimpute/__init__.py
|
from __future__ import absolute_import, print_function, division
from .solver import Solver
from .nuclear_norm_minimization import NuclearNormMinimization
from .iterative_imputer import IterativeImputer
from .matrix_factorization import MatrixFactorization
from .iterative_svd import IterativeSVD
from .simple_fill import SimpleFill
from .soft_impute import SoftImpute
from .scaler import BiScaler
from .knn import KNN
from .similarity_weighted_averaging import SimilarityWeightedAveraging
__version__ = "0.4.3"
__all__ = [
"Solver",
"NuclearNormMinimization",
"MatrixFactorization",
"IterativeSVD",
"SimpleFill",
"SoftImpute",
"BiScaler",
"KNN",
"SimilarityWeightedAveraging",
"IterativeImputer"
]
|
from __future__ import absolute_import, print_function, division
from .solver import Solver
from .nuclear_norm_minimization import NuclearNormMinimization
from .iterative_imputer import IterativeImputer
from .matrix_factorization import MatrixFactorization
from .iterative_svd import IterativeSVD
from .simple_fill import SimpleFill
from .soft_impute import SoftImpute
from .scaler import BiScaler
from .knn import KNN
from .similarity_weighted_averaging import SimilarityWeightedAveraging
__version__ = "0.4.2"
__all__ = [
"Solver",
"NuclearNormMinimization",
"MatrixFactorization",
"IterativeSVD",
"SimpleFill",
"SoftImpute",
"BiScaler",
"KNN",
"SimilarityWeightedAveraging",
"IterativeImputer"
]
|
apache-2.0
|
Python
|
5b7abc62a541622b007da367e52488eab72f2b5a
|
Fix font usage.
|
rsmith-nl/scripts,rsmith-nl/scripts
|
graph-deps.py
|
graph-deps.py
|
#!/usr/bin/env python3
# file: graph-deps.py
# vim:fileencoding=utf-8:fdm=marker:ft=python
#
# Author: R.F. Smith <[email protected]>
# Created: 2017-04-27 13:50:28 +0200
# Last modified: 2018-03-10 22:50:33 +0100
#
# To the extent possible under law, R.F. Smith has waived all copyright and
# related or neighboring rights to graph-deps.py. This work is published
# from the Netherlands. See http://creativecommons.org/publicdomain/zero/1.0/
"""
Creates a graph of FreeBSD package dependencies.
Use it like this:
pkg info -dx py27- | python3 graph-deps.py | dot -o py27-deps.pdf -Tpdf
This will output a graphviz digraph for all Python 2.7 packages on stdout,
which is processed by the “dot” program from the graphics/graphviz port and
turned into a PDF rendering of the graph.
"""
import sys
if len(sys.argv) > 1 and sys.argv[1] in ('-h', '--help'):
print(__doc__)
exit(0)
output = ['digraph deps {', 'rankdir=LR;', 'fontname="Alegreya";', 'concentratr=true;',
'node [shape=box, fontname="Alegreya"];']
parent = None
try:
for ln in sys.stdin:
pkgname = ln.strip()
if pkgname.endswith(':'):
pkgname = pkgname[:-1]
output.append('"{0}" [label="{0}"];'.format(pkgname))
if ln[0] not in ' \t': # parent
parent = pkgname
else:
output.append('"{}" -> "{}";'.format(parent, pkgname))
except KeyboardInterrupt:
print('\n', __doc__)
else:
print('\n'.join(output) + '}')
|
#!/usr/bin/env python3
# file: graph-deps.py
# vim:fileencoding=utf-8:fdm=marker:ft=python
#
# Author: R.F. Smith <[email protected]>
# Created: 2017-04-27 13:50:28 +0200
# Last modified: 2017-06-04 13:38:06 +0200
#
# To the extent possible under law, R.F. Smith has waived all copyright and
# related or neighboring rights to graph-deps.py. This work is published
# from the Netherlands. See http://creativecommons.org/publicdomain/zero/1.0/
"""
Creates a graph of FreeBSD package dependencies.
Use it like this:
pkg info -dx py27- | python3 graph-deps.py | dot -o py27-deps.pdf -Tpdf
This will output a graphviz digraph for all Python 2.7 packages on stdout,
which is processed by the “dot” program from the graphics/graphviz port and
turned into a PDF rendering of the graph.
"""
import sys
if len(sys.argv) > 1 and sys.argv[1] in ('-h', '--help'):
print(__doc__)
exit(0)
output = ['digraph deps {', 'rankdir=LR;', 'node [shape=box];']
parent = None
try:
for ln in sys.stdin:
pkgname = ln.strip()
if pkgname.endswith(':'):
pkgname = pkgname[:-1]
output.append('"{0}" [label="{0}"];'.format(pkgname))
if ln[0] not in ' \t': # parent
parent = pkgname
else:
output.append('"{}" -> "{}";'.format(parent, pkgname))
except KeyboardInterrupt:
print('\n', __doc__)
else:
print('\n'.join(output) + '}')
|
mit
|
Python
|
8408f5431e56309d95076db16c86b0aa2ef044ba
|
Decrease number of messages from MoveToFort worker
|
halsafar/PokemonGo-Bot,DBa2016/PokemonGo-Bot,Gobberwart/PokemonGo-Bot,dtee/PokemonGo-Bot,heihachi/PokemonGo-Bot,goedzo/PokemonGo-Bot,lythien/pokemongo,Gobberwart/PokemonGo-Bot,cmezh/PokemonGo-Bot,DBa2016/PokemonGo-Bot,dtee/PokemonGo-Bot,heihachi/PokemonGo-Bot,halsafar/PokemonGo-Bot,goedzo/PokemonGo-Bot,pengzhangdev/PokemonGo-Bot,goedzo/PokemonGo-Bot,Gobberwart/PokemonGo-Bot,goshan/PokemonGo-Bot,goshan/PokemonGo-Bot,lythien/pokemongo,cmezh/PokemonGo-Bot,cmezh/PokemonGo-Bot,dtee/PokemonGo-Bot,heihachi/PokemonGo-Bot,DBa2016/PokemonGo-Bot,goedzo/PokemonGo-Bot,dtee/PokemonGo-Bot,pengzhangdev/PokemonGo-Bot,lythien/pokemongo,lythien/pokemongo,Gobberwart/PokemonGo-Bot,halsafar/PokemonGo-Bot,pengzhangdev/PokemonGo-Bot,pengzhangdev/PokemonGo-Bot,heihachi/PokemonGo-Bot,DBa2016/PokemonGo-Bot,halsafar/PokemonGo-Bot,cmezh/PokemonGo-Bot
|
pokemongo_bot/event_manager.py
|
pokemongo_bot/event_manager.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from sys import stdout
class EventNotRegisteredException(Exception):
pass
class EventMalformedException(Exception):
pass
class EventHandler(object):
def __init__(self):
pass
def handle_event(self, event, kwargs):
raise NotImplementedError("Please implement")
class EventManager(object):
def __init__(self, *handlers):
self._registered_events = dict()
self._handlers = list(handlers) or []
self._last_event = None
def event_report(self):
for event, parameters in self._registered_events.iteritems():
print '-'*80
print 'Event: {}'.format(event)
if parameters:
print 'Parameters:'
for parameter in parameters:
print '* {}'.format(parameter)
def add_handler(self, event_handler):
self._handlers.append(event_handler)
def register_event(self, name, parameters=[]):
self._registered_events[name] = parameters
def emit(self, event, sender=None, level='info', formatted='', data={}):
if not sender:
raise ArgumentError('Event needs a sender!')
levels = ['info', 'warning', 'error', 'critical', 'debug']
if not level in levels:
raise ArgumentError('Event level needs to be in: {}'.format(levels))
if event not in self._registered_events:
raise EventNotRegisteredException("Event %s not registered..." % event)
if (event == self._last_event) and (event in ["moving_to_fort", "moving_to_lured_fort"]):
stdout.write("\033[1A\033[0K\r")
stdout.flush()
if level == "info" and formatted: self._last_event = event
# verify params match event
parameters = self._registered_events[event]
if parameters:
for k, v in data.iteritems():
if k not in parameters:
raise EventMalformedException("Event %s does not require parameter %s" % (event, k))
formatted_msg = formatted.format(**data)
# send off to the handlers
for handler in self._handlers:
handler.handle_event(event, sender, level, formatted_msg, data)
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
class EventNotRegisteredException(Exception):
pass
class EventMalformedException(Exception):
pass
class EventHandler(object):
def __init__(self):
pass
def handle_event(self, event, kwargs):
raise NotImplementedError("Please implement")
class EventManager(object):
def __init__(self, *handlers):
self._registered_events = dict()
self._handlers = list(handlers) or []
def event_report(self):
for event, parameters in self._registered_events.iteritems():
print '-'*80
print 'Event: {}'.format(event)
if parameters:
print 'Parameters:'
for parameter in parameters:
print '* {}'.format(parameter)
def add_handler(self, event_handler):
self._handlers.append(event_handler)
def register_event(self, name, parameters=[]):
self._registered_events[name] = parameters
def emit(self, event, sender=None, level='info', formatted='', data={}):
if not sender:
raise ArgumentError('Event needs a sender!')
levels = ['info', 'warning', 'error', 'critical', 'debug']
if not level in levels:
raise ArgumentError('Event level needs to be in: {}'.format(levels))
if event not in self._registered_events:
raise EventNotRegisteredException("Event %s not registered..." % event)
# verify params match event
parameters = self._registered_events[event]
if parameters:
for k, v in data.iteritems():
if k not in parameters:
raise EventMalformedException("Event %s does not require parameter %s" % (event, k))
formatted_msg = formatted.format(**data)
# send off to the handlers
for handler in self._handlers:
handler.handle_event(event, sender, level, formatted_msg, data)
|
mit
|
Python
|
05835304797c9486d0c715d5a07d02fffd676b67
|
Fix test to account for new composition
|
johnwlockwood/stream_tap,johnwlockwood/iter_karld_tools,johnwlockwood/stream_tap,johnwlockwood/karl_data
|
karld/tests/test_run_together.py
|
karld/tests/test_run_together.py
|
from itertools import islice
import string
import unittest
from mock import patch, Mock
from ..run_together import csv_file_to_file
class TestCSVFileToFile(unittest.TestCase):
def setUp(self):
self.csv_contents = iter([
'a,b',
'c,d',
'e,f',
])
self.csv_list = (
[u'a', u'b'],
[u'c', u'd'],
[u'e', u'f'],
)
@patch('karld.run_together.ensure_dir')
@patch('karld.run_together.write_as_csv')
@patch('karld.run_together.i_read_buffered_file')
@patch('karld.run_together.csv_reader')
def test_csv_file_to_file(self,
mock_csv_reader,
mock_file_reader,
mock_out_csv,
mock_ensure_dir):
"""
Ensure csv_file_to_file ensures the out directory,
then writes as csv to a filename the same as the input
filename, but lowercase with a prefix and to the out directory
the data from the input file as called with
the csv_row_consumer.
"""
def out_csv(rows, out_file):
return list(islice(rows, 3))
mock_out_csv.side_effect = out_csv
mock_file_reader.return_value = self.csv_contents
mock_csv_reader.return_value = self.csv_list
def row_consumer(rows):
for row in rows:
yield map(string.upper, row)
mock_row_consumer = Mock(side_effect=row_consumer)
out_prefix = "yeah_"
out_dir = "out"
file_path_name = ("in/File.csv", "File.csv")
csv_file_to_file(mock_row_consumer,
out_prefix,
out_dir,
file_path_name)
mock_ensure_dir.assert_called_once_with("out")
self.assertIn('out/yeah_file.csv', mock_out_csv.call_args[0])
mock_row_consumer.assert_called_once_with(self.csv_list)
|
from itertools import islice
import string
import unittest
from mock import patch, Mock
from ..run_together import csv_file_to_file
class TestCSVFileToFile(unittest.TestCase):
def setUp(self):
self.csv_contents = iter([
['a', 'b'],
['c', 'd'],
['e', 'f'],
])
@patch('karld.run_together.ensure_dir')
@patch('karld.run_together.write_as_csv')
@patch('karld.run_together.i_get_csv_data')
def test_csv_file_to_file(self,
mock_in_csv,
mock_out_csv,
mock_ensure_dir):
"""
Ensure csv_file_to_file ensures the out directory,
then writes as csv to a filename the same as the input
filename, but lowercase with a prefix and to the out directory
the data from the input file as called with
the csv_row_consumer.
"""
def out_csv(rows, out_file):
return list(islice(rows, 3))
mock_out_csv.side_effect = out_csv
mock_in_csv.return_value = self.csv_contents
def row_consumer(rows):
for row in rows:
yield map(string.upper, row)
mock_row_consumer = Mock(side_effect=row_consumer)
out_prefix = "yeah_"
out_dir = "out"
file_path_name = ("in/File.csv", "File.csv")
csv_file_to_file(mock_row_consumer,
out_prefix,
out_dir,
file_path_name)
mock_ensure_dir.assert_called_once_with("out")
self.assertIn('out/yeah_file.csv', mock_out_csv.call_args[0])
mock_row_consumer.assert_called_once_with(self.csv_contents)
|
apache-2.0
|
Python
|
cb202e49d2b96dd46d322bb2c9ef21eb3cce05f7
|
Update Google OAuth to user requests_oauthlib
|
alexisrolland/data-quality,alexisrolland/data-quality,alexisrolland/data-quality,alexisrolland/data-quality
|
api/init/security/oauth/google.py
|
api/init/security/oauth/google.py
|
import json
import os
from flask import redirect, request, session
from flask_restplus import Namespace, Resource
from requests_oauthlib import OAuth2Session
from security.token import get_jwt_token, TokenType, get_token_redirect_response
# OAuth endpoints given in the Google API documentation
AUTHORIZATION_URI = 'https://accounts.google.com/o/oauth2/auth'
TOKEN_URI = 'https://accounts.google.com/o/oauth2/token'
USER_PROFILE_URI = 'https://www.googleapis.com/auth/userinfo.profile'
USER_EMAIL_URI = 'https://www.googleapis.com/auth/userinfo.email'
SCOPE = [USER_PROFILE_URI, USER_EMAIL_URI]
# OAuth application configuration created on Google
client_id = os.environ['GOOGLE_CLIENT_ID']
client_secret = os.environ['GOOGLE_CLIENT_SECRET']
redirect_uri = os.environ['GOOGLE_REDIRECT_URI']
def get_user_info(google_session: object):
"""Gets user profile using OAuth session."""
user_profile = google_session.get(USER_PROFILE_URI).content.decode('utf-8')
user_profile = json.loads(user_profile)
print(user_profile)
return user_profile
def register_google_oauth(namespace: Namespace):
"""Registers all endpoints used for Google OAuth authentication."""
@namespace.route('/security/oauth/google')
@namespace.doc()
class googleOAuth(Resource):
"""Defines resource to redirect user to Google OAuth page."""
def get(self):
"""Redirects user to Google OAuth page."""
google_session = OAuth2Session(client_id, redirect_uri=redirect_uri, scope=SCOPE)
url, state = google_session.authorization_url(AUTHORIZATION_URI)
# State is used to prevent CSRF, keep this for later.
session['oauth_state'] = state
return redirect(url)
@namespace.route('/security/oauth/google/callback')
@namespace.doc()
class googleOAuthCallback(Resource):
"""Defines resource to handle callback from Google OAuth."""
def get(self):
"""Handles Google OAuth callback and fetch user access token."""
google_session = OAuth2Session(client_id, state=session['oauth_state'])
token = google_session.fetch_token(TOKEN_URI, client_secret=client_secret, authorization_response=request.url)
# Persist token in session
# session['oauth_token'] = token
user_info = get_user_info(google_session)
jwt = get_jwt_token(TokenType.GOOGLE, user_info['email'], user_info, token)
return get_token_redirect_response(jwt)
|
import os
import json
import requests
from flask import redirect, request
from flask_restplus import Namespace, Resource
from google_auth_oauthlib.flow import Flow
from security.token import get_jwt_token, TokenType, get_token_redirect_response
# pylint: disable=unused-variable
google_redirect_url = os.environ['GOOGLE_REDIRECT_URI']
USER_PROFILE = 'https://www.googleapis.com/oauth2/v1/userinfo?alt=json&access_token={}'
SCOPES = [
'https://www.googleapis.com/auth/userinfo.email',
'https://www.googleapis.com/auth/userinfo.profile'
]
CLIENT_CONFIG = {
'installed': {
'auth_uri': 'https://accounts.google.com/o/oauth2/auth',
'token_uri': 'https://accounts.google.com/o/oauth2/token',
'redirect_uris': [google_redirect_url],
'client_id': os.environ['GOOGLE_CLIENT_ID'],
'client_secret': os.environ['GOOGLE_CLIENT_SECRET']
}
}
# Create an OAuth flow (Authorization Code) from client id and client secret
flow = Flow.from_client_config(CLIENT_CONFIG, SCOPES)
# Redirect the user back after login
flow.redirect_uri = google_redirect_url
def get_user_info(oauth_token: str):
"""Gets the user profile of the user after login using the corresponding OAuth token"""
request_url = USER_PROFILE.format(oauth_token)
response = requests.get(request_url)
return json.loads(response.text)
def register_google_oauth(namespace: Namespace):
"""Registers all endpoints used for Google OAuth authentication"""
@namespace.route('/security/oauth/google')
@namespace.doc()
class GoogleOAuth(Resource):
"""Defines resource to redirect user to Google OAuth page."""
def get(self):
"""Redirects user to Google OAuth page."""
url, _ = flow.authorization_url()
return redirect(url)
@namespace.route('/security/oauth/google/callback')
@namespace.doc()
class GoogleOAuthCallback(Resource):
"""Defines resource to handle callback from Google OAuth."""
def get(self):
"""Handles Google OAuth callback and fetch user access token."""
code = request.args.get('code')
token = flow.fetch_token(code=code)
user_info = get_user_info(token['access_token'])
jwt = get_jwt_token(TokenType.Google, user_info['email'], user_info, token)
return get_token_redirect_response(jwt)
|
apache-2.0
|
Python
|
25e7574b6d58444ba81b3ad9321662e3a1a6b7e8
|
Apply some PEP8 cleanup
|
OCA/product-variant,OCA/product-variant,OCA/product-variant
|
product_variant_sale_price/models/product_product.py
|
product_variant_sale_price/models/product_product.py
|
# -*- coding: utf-8 -*-
# © 2016 Sergio Teruel <[email protected]>
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from openerp import models, fields, api
class ProductTemplate(models.Model):
_inherit = "product.template"
@api.multi
def write(self, vals):
res = super(ProductTemplate, self).write(vals)
if 'list_price' in vals:
for variant in self.mapped('product_variant_ids'):
variant._onchange_lst_price()
return res
class ProductProduct(models.Model):
_inherit = "product.product"
@api.multi
@api.depends('fix_price')
def _compute_lst_price(self):
for product in self:
price = product.fix_price or product.list_price
if 'uom' in self.env.context:
uom = product.uos_id or product.uom_id
price = uom._compute_price(
product.uom_id.id, price, self.env.context['uom'])
product.lst_price = price
@api.multi
def _inverse_product_lst_price(self):
for product in self:
vals = {}
if 'uom' in self.env.context:
uom = product.uos_id or product.uom_id
vals['fix_price'] = uom._compute_price(
product.uom_id.id,
product.lst_price, self.env.context['uom'])
else:
vals['fix_price'] = product.lst_price
product.write(vals)
lst_price = fields.Float(
compute='_compute_lst_price',
inverse='_inverse_product_lst_price',
)
fix_price = fields.Float(string='Fix Price')
|
# -*- coding: utf-8 -*-
# © 2016 Sergio Teruel <[email protected]>
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from openerp import models, fields, api
class ProductTemplate(models.Model):
_inherit = "product.template"
@api.multi
def write(self, vals):
res = super(ProductTemplate, self).write(vals)
if 'list_price' in vals:
for product in self:
for variant in product.mapped('product_variant_ids'):
variant._onchange_lst_price()
return res
class ProductProduct(models.Model):
_inherit = "product.product"
@api.multi
@api.depends('fix_price')
def _compute_lst_price(self):
for product in self:
price = product.fix_price or product.list_price
if 'uom' in self.env.context:
uom = product.uos_id or product.uom_id
price = uom._compute_price(
product.uom_id.id, price, self.env.context['uom'])
product.lst_price = price
@api.multi
def _inverse_product_lst_price(self):
for product in self:
vals={}
if 'uom' in self.env.context:
uom = product.uos_id or product.uom_id
vals['fix_price'] = uom._compute_price(product.uom_id.id,
product.lst_price, self.env.context['uom'])
else:
vals['fix_price'] = product.lst_price
product.write(vals)
lst_price = fields.Float(
compute='_compute_lst_price',
inverse='_inverse_product_lst_price',
)
fix_price = fields.Float(string='Fix Price')
|
agpl-3.0
|
Python
|
93181a9a8df89c9ed1ff1e06672cc592a2b689dc
|
Fix deadcode
|
ktok07b6/polyphony,ktok07b6/polyphony,ktok07b6/polyphony
|
polyphony/compiler/deadcode.py
|
polyphony/compiler/deadcode.py
|
from .env import env
from .ir import *
from logging import getLogger
logger = getLogger(__name__)
class DeadCodeEliminator(object):
def process(self, scope):
if scope.is_namespace() or scope.is_class():
return
usedef = scope.usedef
for blk in scope.traverse_blocks():
dead_stms = []
for stm in blk.stms:
if stm.is_a([MOVE, PHIBase]):
if stm.is_a(MOVE) and stm.src.is_a([TEMP, ATTR]) and stm.src.symbol().is_param():
continue
if stm.is_a(MOVE) and stm.src.is_a(CALL):
continue
defvars = usedef.get_vars_defined_at(stm)
for var in defvars:
if not var.is_a(TEMP):
break
uses = usedef.get_stms_using(var.symbol())
if uses:
break
else:
dead_stms.append(stm)
for stm in dead_stms:
blk.stms.remove(stm)
logger.debug('removed dead code: ' + str(stm))
if stm.is_a(MOVE):
var = stm.dst
elif stm.is_a(PHIBase):
var = stm.var
if var.is_a([TEMP, ATTR]) and var.symbol().typ.is_seq():
memnode = var.symbol().typ.get_memnode()
env.memref_graph.remove_node(memnode)
|
from .env import env
from .ir import *
from logging import getLogger
logger = getLogger(__name__)
class DeadCodeEliminator(object):
def process(self, scope):
if scope.is_namespace() or scope.is_class() or scope.is_method():
return
usedef = scope.usedef
for blk in scope.traverse_blocks():
dead_stms = []
for stm in blk.stms:
if stm.is_a([MOVE, PHIBase]):
if stm.is_a(MOVE) and stm.src.is_a([TEMP, ATTR]) and stm.src.symbol().is_param():
continue
if stm.is_a(MOVE) and stm.src.is_a(CALL):
continue
defvars = usedef.get_vars_defined_at(stm)
for var in defvars:
if not var.is_a(TEMP):
break
uses = usedef.get_stms_using(var.symbol())
if uses:
break
else:
dead_stms.append(stm)
for stm in dead_stms:
blk.stms.remove(stm)
logger.debug('removed dead code: ' + str(stm))
if stm.is_a(MOVE):
var = stm.dst
elif stm.is_a(PHIBase):
var = stm.var
if var.is_a([TEMP, ATTR]) and var.symbol().typ.is_seq():
memnode = var.symbol().typ.get_memnode()
env.memref_graph.remove_node(memnode)
|
mit
|
Python
|
e1e4d36096fe2c8cea92b77feabc60d94ac4310a
|
Break class now inherits behaviour from KitchenTimer.
|
doughgle/pomodoro_evolved,doughgle/pomodoro_evolved,doughgle/pomodoro_evolved
|
pomodoro_evolved/rest_break.py
|
pomodoro_evolved/rest_break.py
|
from kitchen_timer import KitchenTimer, AlreadyRunningError, TimeAlreadyUp, NotRunningError
from math import ceil
class BreakAlreadySkipped(Exception): pass
class BreakCannotBeSkippedOnceStarted(Exception): pass
class BreakAlreadyStarted(Exception): pass
class BreakNotStarted(Exception): pass
class BreakAlreadyTerminated(Exception): pass
class Break(KitchenTimer):
'''
Models a timed rest break with a default duration of 5 minutes.
Allows the break to be skipped before starting.
Does not allow the break to be restarted after time's up or it's stopped.
'''
IDLE = "IDLE"
RUNNING = "RUNNING"
SKIPPED = "SKIPPED"
STOPPED = "STOPPED"
TIMEUP = "TIMEUP"
def __init__(self, whenTimeup, durationInMins=5):
self._state = self.IDLE
self._canSkip = True
super(Break, self).__init__(whenTimeup, durationInMins)
def skip(self):
'''
Skips this break forever.
'''
if self._canSkip:
self._state = self.SKIPPED
else:
raise BreakCannotBeSkippedOnceStarted()
def start(self):
'''
Starts the break counting down from the given durationInMins.
'''
if self.wasSkipped():
raise BreakAlreadySkipped()
if self.isStopped():
raise BreakAlreadyTerminated()
try:
super(Break, self).start()
except AlreadyRunningError:
raise BreakAlreadyStarted()
except TimeAlreadyUp:
raise BreakAlreadyTerminated()
else:
self._canSkip = False
def stop(self):
'''
Stops the break forever. Restarting is forbidden.
'''
if self.wasSkipped():
raise BreakAlreadySkipped()
try:
super(Break, self).stop()
except NotRunningError:
raise BreakNotStarted()
def wasSkipped(self):
return self._state == self.SKIPPED
@property
def timeRemaining(self):
'''
Returns the number of whole seconds remaining.
'''
return ceil(super(Break, self).timeRemaining)
|
from kitchen_timer import KitchenTimer, AlreadyRunningError, TimeAlreadyUp, NotRunningError
from math import ceil
class BreakAlreadySkipped(Exception): pass
class BreakCannotBeSkippedOnceStarted(Exception): pass
class BreakAlreadyStarted(Exception): pass
class BreakNotStarted(Exception): pass
class BreakAlreadyTerminated(Exception): pass
class Break(object):
'''
Models a timed rest break with a default duration of 5 minutes.
Allows the break to be skipped before starting.
Does not allow the break to be restarted after time's up or it's stopped.
'''
IDLE = "IDLE"
RUNNING = "RUNNING"
SKIPPED = "SKIPPED"
STOPPED = "STOPPED"
TIMEUP = "TIMEUP"
def __init__(self, whenTimeup, durationInMins=5):
self._state = self.IDLE
self._canSkip = True
self._timer = KitchenTimer(whenTimeup, durationInMins)
def skip(self):
'''
Skips this break forever.
'''
if self._canSkip:
self._state = self.SKIPPED
else:
raise BreakCannotBeSkippedOnceStarted()
def start(self):
'''
Starts the break counting down from the given durationInMins.
'''
if self.wasSkipped():
raise BreakAlreadySkipped()
if self._timer.isStopped():
raise BreakAlreadyTerminated
try:
self._timer.start()
except AlreadyRunningError:
raise BreakAlreadyStarted()
except TimeAlreadyUp:
raise BreakAlreadyTerminated()
else:
self._canSkip = False
def stop(self):
'''
Stops the break forever. Restarting is forbidden.
'''
if self.wasSkipped():
raise BreakAlreadySkipped()
try:
self._timer.stop()
except NotRunningError:
raise BreakNotStarted()
def isRunning(self):
return self._timer.isRunning()
def wasSkipped(self):
return self._state == self.SKIPPED
@property
def timeRemaining(self):
'''
Returns the number of whole seconds remaining.
'''
return ceil(self._timer.timeRemaining)
|
mit
|
Python
|
ca15e6523bd34e551528dce6c6ee3dcb70cf7806
|
Use sed inline (unsure why mv was used originally).
|
Fizzadar/pyinfra,Fizzadar/pyinfra
|
pyinfra/modules/util/files.py
|
pyinfra/modules/util/files.py
|
# pyinfra
# File: pyinfra/modules/util/files.py
# Desc: common functions for handling the filesystem
from types import NoneType
def ensure_mode_int(mode):
# Already an int (/None)?
if isinstance(mode, (int, NoneType)):
return mode
try:
# Try making an int ('700' -> 700)
return int(mode)
except (TypeError, ValueError):
pass
# Return as-is (ie +x which we don't need to normalise, it always gets run)
return mode
def sed_replace(state, filename, line, replace, flags=None):
flags = ''.join(flags) if flags else ''
line = line.replace('/', '\/')
replace = replace.replace('/', '\/')
return 'sed -i "s/{0}/{1}/{2}" {3}'.format(
line, replace, flags, filename
)
def chmod(target, mode, recursive=False):
return 'chmod {0}{1} {2}'.format(('-R ' if recursive else ''), mode, target)
def chown(target, user, group=None, recursive=False):
command = 'chown'
user_group = None
if user and group:
user_group = '{0}:{1}'.format(user, group)
elif user:
user_group = user
elif group:
command = 'chgrp'
user_group = group
return '{0}{1} {2} {3}'.format(
command,
' -R' if recursive else '',
user_group,
target
)
|
# pyinfra
# File: pyinfra/modules/util/files.py
# Desc: common functions for handling the filesystem
from types import NoneType
def ensure_mode_int(mode):
# Already an int (/None)?
if isinstance(mode, (int, NoneType)):
return mode
try:
# Try making an int ('700' -> 700)
return int(mode)
except (TypeError, ValueError):
pass
# Return as-is (ie +x which we don't need to normalise, it always gets run)
return mode
def sed_replace(state, filename, line, replace, flags=None):
flags = ''.join(flags) if flags else ''
line = line.replace('/', '\/')
replace = replace.replace('/', '\/')
temp_filename = state.get_temp_filename()
return 'sed "s/{0}/{1}/{2}" {3} > {4} && mv {4} {3}'.format(
line, replace, flags, filename, temp_filename
)
def chmod(target, mode, recursive=False):
return 'chmod {0}{1} {2}'.format(('-R ' if recursive else ''), mode, target)
def chown(target, user, group=None, recursive=False):
command = 'chown'
user_group = None
if user and group:
user_group = '{0}:{1}'.format(user, group)
elif user:
user_group = user
elif group:
command = 'chgrp'
user_group = group
return '{0}{1} {2} {3}'.format(
command,
' -R' if recursive else '',
user_group,
target
)
|
mit
|
Python
|
f015c3e5973c9424734ff6181563ee7905c73428
|
fix version pattern
|
amoskong/scylla-cluster-tests,scylladb/scylla-cluster-tests,amoskong/scylla-cluster-tests,scylladb/scylla-cluster-tests,scylladb/scylla-longevity-tests,amoskong/scylla-cluster-tests,amoskong/scylla-cluster-tests,scylladb/scylla-cluster-tests,scylladb/scylla-cluster-tests,scylladb/scylla-cluster-tests,amoskong/scylla-cluster-tests,scylladb/scylla-longevity-tests,scylladb/scylla-longevity-tests
|
sdcm/utils.py
|
sdcm/utils.py
|
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
#
# See LICENSE for more details.
#
# Copyright (c) 2017 ScyllaDB
import logging
import re
import os
import glob
from avocado.utils import process
def _remote_get_hash(remoter, file_path):
try:
result = remoter.run('md5sum {}'.format(file_path), verbose=True)
return result.stdout.strip().split()[0]
except Exception as details:
test_logger = logging.getLogger('avocado.test')
test_logger.error(str(details))
return None
def _remote_get_file(remoter, src, dst):
result = remoter.run('curl -L {} -o {}'.format(src, dst), ignore_status=True)
def remote_get_file(remoter, src, dst, hash_expected=None, retries=1):
if not hash_expected:
_remote_get_file(remoter, src, dst)
return
while retries > 0 and _remote_get_hash(remoter, dst) != hash_expected:
_remote_get_file(remoter, src, dst)
retries -= 1
#assert _remote_get_hash(remoter, dst) == hash_expected
def get_monitor_version(full_version, clone=False):
"""
Detect matched dashboard version from scylla version.
:param full_version: version info returned by `scylla --version`
:param clone: force to clone scylla-grafana-monitoring project
:return: dashboard version (eg: 1.7, 2.0, master)
"""
if not os.path.exists('scylla-grafana-monitoring/') or clone:
process.run('rm -rf scylla-grafana-monitoring/')
process.run('git clone https://github.com/scylladb/scylla-grafana-monitoring/')
process.run('cp -r scylla-grafana-monitoring/grafana data_dir/')
if not full_version or '666.development' in full_version:
ret = 'master'
else:
ret = re.findall("-(\d+\.\d+)", full_version)[0]
# We only add dashboard for release version, let's use master for pre-release version
jsons = glob.glob('data_dir/grafana/*.%s.json' % ret)
if not jsons:
ret = 'master'
return ret
|
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
#
# See LICENSE for more details.
#
# Copyright (c) 2017 ScyllaDB
import logging
import re
import os
import glob
from avocado.utils import process
def _remote_get_hash(remoter, file_path):
try:
result = remoter.run('md5sum {}'.format(file_path), verbose=True)
return result.stdout.strip().split()[0]
except Exception as details:
test_logger = logging.getLogger('avocado.test')
test_logger.error(str(details))
return None
def _remote_get_file(remoter, src, dst):
result = remoter.run('curl -L {} -o {}'.format(src, dst), ignore_status=True)
def remote_get_file(remoter, src, dst, hash_expected=None, retries=1):
if not hash_expected:
_remote_get_file(remoter, src, dst)
return
while retries > 0 and _remote_get_hash(remoter, dst) != hash_expected:
_remote_get_file(remoter, src, dst)
retries -= 1
#assert _remote_get_hash(remoter, dst) == hash_expected
def get_monitor_version(full_version, clone=False):
"""
Detect matched dashboard version from scylla version.
:param full_version: version info returned by `scylla --version`
:param clone: force to clone scylla-grafana-monitoring project
:return: dashboard version (eg: 1.7, 2.0, master)
"""
if not os.path.exists('scylla-grafana-monitoring/') or clone:
process.run('rm -rf scylla-grafana-monitoring/')
process.run('git clone https://github.com/scylladb/scylla-grafana-monitoring/')
process.run('cp -r scylla-grafana-monitoring/grafana data_dir/')
if not full_version or '666.development' in full_version:
ret = 'master'
else:
ret = re.findall("^\w+.\w+", full_version)[0]
# We only add dashboard for release version, let's use master for pre-release version
jsons = glob.glob('data_dir/grafana/*.%s.json' % ret)
if not jsons:
ret = 'master'
return ret
|
agpl-3.0
|
Python
|
c1a263107cac6f55ce01ea5f260c005d307398e7
|
add env vars to ping.json
|
ministryofjustice/laa-legal-adviser-api,ministryofjustice/laa-legal-adviser-api,ministryofjustice/laa-legal-adviser-api
|
laalaa/apps/healthcheck/views.py
|
laalaa/apps/healthcheck/views.py
|
import os
import requests
from django.http import JsonResponse
from django.conf import settings
def ping(request):
res = {
"version_number": os.environ.get('APPVERSION'),
"build_date": os.environ.get('APP_BUILD_DATE'),
"commit_id": os.environ.get('APP_GIT_COMMIT'),
"build_tag": os.environ.get('APP_BUILD_TAG')
}
return JsonResponse(res)
def healthcheck(request):
# Default status is `DOWN` for all services
health = {
'postcodeinfo': {
'status': 'DOWN',
'endpoint': settings.POSTCODEINFO_API_URL
}
}
# Test postcodeinfo
try:
headers = {'Authorization': 'Token {0}'.format(
settings.POSTCODEINFO_AUTH_TOKEN)}
req = requests.get(
'{0}/addresses/?postcode=sw1a1aa'.format(
settings.POSTCODEINFO_API_URL),
headers=headers)
if req.status_code == 200:
health['postcodeinfo']['status'] = 'UP'
except:
pass
if health['postcodeinfo']['status'] == 'UP':
return JsonResponse(health, status=200)
else:
return JsonResponse(health, status=503)
|
import requests
from django.http import JsonResponse
from django.conf import settings
def ping(request):
res = {
"version_number": None,
"build_date": None,
"commit_id": None,
"build_tag": None
}
# Get version details
try:
res['version_number'] = str(open("{0}/../VERSION".format(settings.PROJECT_ROOT)).read().strip())
res['commit_id'] = res['version_number']
res['build'] = res['version_number']
except IOError:
pass
# Get build tag
try:
res['build_tag'] = str(open("{0}/../BUILD_TAG".format(settings.PROJECT_ROOT)).read().strip())
except IOError:
pass
# Get build date
try:
res['build_date'] = str(open("{0}/../BUILD_DATE".format(settings.PROJECT_ROOT)).read().strip())
except IOError:
pass
return JsonResponse(res)
def healthcheck(request):
# Default status is `DOWN` for all services
health = {
'postcodeinfo': {
'status': 'DOWN',
'endpoint': settings.POSTCODEINFO_API_URL
}
}
# Test postcodeinfo
try:
headers = {'Authorization': 'Token {0}'.format(
settings.POSTCODEINFO_AUTH_TOKEN)}
req = requests.get(
'{0}/addresses/?postcode=sw1a1aa'.format(
settings.POSTCODEINFO_API_URL),
headers=headers)
if req.status_code == 200:
health['postcodeinfo']['status'] = 'UP'
except:
pass
if health['postcodeinfo']['status'] == 'UP':
return JsonResponse(health, status=200)
else:
return JsonResponse(health, status=503)
|
mit
|
Python
|
5a4d9255c59be0d5dda8272e0e7ced71822f4d40
|
Fix memory issues by just trying every number
|
CubicComet/exercism-python-solutions
|
prime-factors/prime_factors.py
|
prime-factors/prime_factors.py
|
def prime_factors(n):
factors = []
factor = 2
while n != 1:
while n % factor == 0:
factors += [factor]
n //= factor
factor += 1
return factors
|
import sieve
def prime_factors(n):
primes = sieve.sieve(n)
factors = []
for p in primes:
while n % p == 0:
factors += [p]
n //= p
return factors
|
agpl-3.0
|
Python
|
8ea3350c6944946b60732308c912dc240952237c
|
Revert "Set the right recalbox.log path"
|
recalbox/recalbox-manager,recalbox/recalbox-manager,recalbox/recalbox-manager,sveetch/recalbox-manager,sveetch/recalbox-manager,sveetch/recalbox-manager,sveetch/recalbox-manager,recalbox/recalbox-manager,sveetch/recalbox-manager,recalbox/recalbox-manager
|
project/settings_production.py
|
project/settings_production.py
|
from .settings import *
# Update SITE infos to use the common port 80 to publish the webapp
SITE_FIXED = {
'name': "Recalbox Manager",
'ip': None, # If 'None' find the ip automatically. Use a string to define another ip/hostname
'port': None, # If 'None' no port is added to hostname, so the server have to be reachable from port 80
}
# Production path to the Recalbox logs file
RECALBOX_LOGFILE_PATH = "/root/recalbox.log"
# Use packaged assets
ASSETS_PACKAGED = True
|
from .settings import *
# Update SITE infos to use the common port 80 to publish the webapp
SITE_FIXED = {
'name': "Recalbox Manager",
'ip': None, # If 'None' find the ip automatically. Use a string to define another ip/hostname
'port': None, # If 'None' no port is added to hostname, so the server have to be reachable from port 80
}
# Production path to the Recalbox logs file
RECALBOX_LOGFILE_PATH = "/recalbox/share/system/logs"
# Use packaged assets
ASSETS_PACKAGED = True
|
mit
|
Python
|
812efd4b5addeee879e91c6c660ac2a1a2adfe5d
|
mueve logica de avance de un paso a una funcion
|
uchileFI3104B-2015B/demo-crank-nicolson
|
heat.py
|
heat.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
Este script resuelve un problema simple de diffusion en 1D.
La ecuación a resover es:
dT/dt = d2T/dx2; T(0,x) = sin(pi * x); T(t, 0) = T(t, 1) = 0
'''
from __future__ import division
import numpy as np
def inicializa_T(T, N_steps, h):
'''
Rellena T con las condiciones iniciales del problema.
Se asegura que las condiciones en los bordes sean cero.
'''
for i in range(N_steps):
x = i * h
T[i] = np.sin(np.pi * x)
T[0] = 0
T[-1] = 0
def calcula_b(b, N_steps, r):
for j in range(1, N_steps - 1):
b[j] = r * T[j+1] + (1-2*r) * T[j] + r * T[j-1]
def calcula_alpha_y_beta(alhpa, beta, b, r, N_Steps):
Aplus = -1 * r
Acero = (1+2 * r)
Aminus = -1 * r
alpha[0] = 0
beta[0] = 0 # viene de la condicion de borde T(t, 0) = 0
for i in range(1, N_steps):
alpha[i] = -Aplus / (Acero + Aminus*alpha[i-1])
beta[i] = (b[i] - Aminus*beta[i-1]) / (Aminus*alpha[i-1] + Acero)
def avanza_paso_temporal(T, T_next, alpha, beta, N_steps):
T_next[0] = 0
T_next[-1] = 0
for i in range(N_steps - 2, 0, -1):
T_next[i] = alpha[i] * T[i+1] + beta[i]
# T = T_next.copy() # Esto no funciona, hacerlo fuera de la funcion
# Main
# setup
N_steps = 5
h = 1 / (N_steps - 1)
dt = h**2 / 2 # Este es el máximo teórico para el metodo explicito
r = dt / 2 / h**2
T = np.zeros(N_steps)
T_next = np.zeros(N_steps)
b = np.zeros(N_steps)
alpha = np.zeros(N_steps)
beta = np.zeros(N_steps)
inicializa_T(T, N_steps, h)
calcula_b(b, N_steps, r)
calcula_alpha_y_beta(alpha, beta, b, r, N_steps)
# Avanza T en el tiempo
# T_next[0] = 0
# T_next[-1] = 0
# for i in range(N_steps - 2, 0, -1):
# T_next[i] = alpha[i] * T[i+1] + beta[i]
avanza_paso_temporal(T, T_next, alpha, beta, N_steps)
T = T_next.copy()
print T
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
Este script resuelve un problema simple de diffusion en 1D.
La ecuación a resover es:
dT/dt = d2T/dx2; T(0,x) = sin(pi * x); T(t, 0) = T(t, 1) = 0
'''
from __future__ import division
import numpy as np
def inicializa_T(T, N_steps, h):
'''
Rellena T con las condiciones iniciales del problema.
Se asegura que las condiciones en los bordes sean cero.
'''
for i in range(N_steps):
x = i * h
T[i] = np.sin(np.pi * x)
T[0] = 0
T[-1] = 0
def calcula_b(b, N_steps, r):
for j in range(1, N_steps - 1):
b[j] = r * T[j+1] + (1-2*r) * T[j] + r * T[j-1]
def calcula_alpha_y_beta(alhpa, beta, b, r, N_Steps):
Aplus = -1 * r
Acero = (1+2 * r)
Aminus = -1 * r
alpha[0] = 0
beta[0] = 0 # viene de la condicion de borde T(t, 0) = 0
for i in range(1, N_steps):
alpha[i] = -Aplus / (Acero + Aminus*alpha[i-1])
beta[i] = (b[i] - Aminus*beta[i-1]) / (Aminus*alpha[i-1] + Acero)
# Main
# setup
N_steps = 5
h = 1 / (N_steps - 1)
dt = h**2 / 2 # Este es el máximo teórico para el metodo explicito
r = dt / 2 / h**2
T = np.zeros(N_steps)
T_next = np.zeros(N_steps)
b = np.zeros(N_steps)
alpha = np.zeros(N_steps)
beta = np.zeros(N_steps)
inicializa_T(T, N_steps, h)
calcula_b(b, N_steps, r)
calcula_alpha_y_beta(alpha, beta, b, r, N_steps)
# Avanza T en el tiempo
T_next[0] = 0
T_next[-1] = 0
for i in range(N_steps - 2, 0, -1):
T_next[i] = alpha[i] * T[i+1] + beta[i]
|
mit
|
Python
|
eb66cae55dee3b401cd84a71f9906cdb42a217bc
|
Update __init__.py
|
williamFalcon/pytorch-lightning,williamFalcon/pytorch-lightning
|
pytorch_lightning/__init__.py
|
pytorch_lightning/__init__.py
|
"""Root package info."""
__version__ = '0.9.0rc4'
__author__ = 'William Falcon et al.'
__author_email__ = '[email protected]'
__license__ = 'Apache-2.0'
__copyright__ = 'Copyright (c) 2018-2020, %s.' % __author__
__homepage__ = 'https://github.com/PyTorchLightning/pytorch-lightning'
# this has to be simple string, see: https://github.com/pypa/twine/issues/522
__docs__ = (
"PyTorch Lightning is the lightweight PyTorch wrapper for ML researchers."
" Scale your models. Write less boilerplate."
)
__long_docs__ = """
Lightning is a way to organize your PyTorch code to decouple the science code from the engineering.
It's more of a style-guide than a framework.
In Lightning, you organize your code into 3 distinct categories:
1. Research code (goes in the LightningModule).
2. Engineering code (you delete, and is handled by the Trainer).
3. Non-essential research code (logging, etc. this goes in Callbacks).
Although your research/production project might start simple, once you add things like GPU AND TPU training,
16-bit precision, etc, you end up spending more time engineering than researching.
Lightning automates AND rigorously tests those parts for you.
Overall, Lightning guarantees rigorously tested, correct, modern best practices for the automated parts.
Documentation
-------------
- https://pytorch-lightning.readthedocs.io/en/latest
- https://pytorch-lightning.readthedocs.io/en/stable
"""
import logging as python_logging
_logger = python_logging.getLogger("lightning")
_logger.addHandler(python_logging.StreamHandler())
_logger.setLevel(python_logging.INFO)
try:
# This variable is injected in the __builtins__ by the build
# process. It used to enable importing subpackages of skimage when
# the binaries are not built
__LIGHTNING_SETUP__
except NameError:
__LIGHTNING_SETUP__ = False
if __LIGHTNING_SETUP__:
import sys # pragma: no-cover
sys.stdout.write(f'Partial import of `{__name__}` during the build process.\n') # pragma: no-cover
# We are not importing the rest of the lightning during the build process, as it may not be compiled yet
else:
from pytorch_lightning.core import LightningDataModule, LightningModule
from pytorch_lightning.core.step_result import TrainResult, EvalResult
from pytorch_lightning.callbacks import Callback
from pytorch_lightning.trainer import Trainer
from pytorch_lightning.utilities.seed import seed_everything
from pytorch_lightning import metrics
__all__ = [
'Trainer',
'LightningDataModule',
'LightningModule',
'Callback',
'seed_everything',
'metrics',
'EvalResult',
'TrainResult',
]
# necessary for regular bolts imports. Skip exception since bolts is not always installed
try:
from pytorch_lightning import bolts
except ImportError:
pass
# __call__ = __all__
# for compatibility with namespace packages
__import__('pkg_resources').declare_namespace(__name__)
|
"""Root package info."""
__version__ = '0.9.0rc3'
__author__ = 'William Falcon et al.'
__author_email__ = '[email protected]'
__license__ = 'Apache-2.0'
__copyright__ = 'Copyright (c) 2018-2020, %s.' % __author__
__homepage__ = 'https://github.com/PyTorchLightning/pytorch-lightning'
# this has to be simple string, see: https://github.com/pypa/twine/issues/522
__docs__ = (
"PyTorch Lightning is the lightweight PyTorch wrapper for ML researchers."
" Scale your models. Write less boilerplate."
)
__long_docs__ = """
Lightning is a way to organize your PyTorch code to decouple the science code from the engineering.
It's more of a style-guide than a framework.
In Lightning, you organize your code into 3 distinct categories:
1. Research code (goes in the LightningModule).
2. Engineering code (you delete, and is handled by the Trainer).
3. Non-essential research code (logging, etc. this goes in Callbacks).
Although your research/production project might start simple, once you add things like GPU AND TPU training,
16-bit precision, etc, you end up spending more time engineering than researching.
Lightning automates AND rigorously tests those parts for you.
Overall, Lightning guarantees rigorously tested, correct, modern best practices for the automated parts.
Documentation
-------------
- https://pytorch-lightning.readthedocs.io/en/latest
- https://pytorch-lightning.readthedocs.io/en/stable
"""
import logging as python_logging
_logger = python_logging.getLogger("lightning")
_logger.addHandler(python_logging.StreamHandler())
_logger.setLevel(python_logging.INFO)
try:
# This variable is injected in the __builtins__ by the build
# process. It used to enable importing subpackages of skimage when
# the binaries are not built
__LIGHTNING_SETUP__
except NameError:
__LIGHTNING_SETUP__ = False
if __LIGHTNING_SETUP__:
import sys # pragma: no-cover
sys.stdout.write(f'Partial import of `{__name__}` during the build process.\n') # pragma: no-cover
# We are not importing the rest of the lightning during the build process, as it may not be compiled yet
else:
from pytorch_lightning.core import LightningDataModule, LightningModule
from pytorch_lightning.core.step_result import TrainResult, EvalResult
from pytorch_lightning.callbacks import Callback
from pytorch_lightning.trainer import Trainer
from pytorch_lightning.utilities.seed import seed_everything
from pytorch_lightning import metrics
__all__ = [
'Trainer',
'LightningDataModule',
'LightningModule',
'Callback',
'seed_everything',
'metrics',
'EvalResult',
'TrainResult',
]
# necessary for regular bolts imports. Skip exception since bolts is not always installed
try:
from pytorch_lightning import bolts
except ImportError:
pass
# __call__ = __all__
# for compatibility with namespace packages
__import__('pkg_resources').declare_namespace(__name__)
|
apache-2.0
|
Python
|
a5f274b5a3dbb72e109184b7a3c56b2a1dac13b4
|
Enable WebForm page
|
StrellaGroup/frappe,mhbu50/frappe,yashodhank/frappe,almeidapaulopt/frappe,mhbu50/frappe,almeidapaulopt/frappe,frappe/frappe,yashodhank/frappe,yashodhank/frappe,StrellaGroup/frappe,almeidapaulopt/frappe,frappe/frappe,StrellaGroup/frappe,almeidapaulopt/frappe,mhbu50/frappe,mhbu50/frappe,frappe/frappe,yashodhank/frappe
|
frappe/website/serve.py
|
frappe/website/serve.py
|
import frappe
from frappe import _
from frappe.utils import cstr
from frappe.website.page_controllers.document_page import DocumentPage
from frappe.website.page_controllers.list_page import ListPage
from frappe.website.page_controllers.not_permitted_page import NotPermittedPage
from frappe.website.page_controllers.print_page import PrintPage
from frappe.website.page_controllers.template_page import TemplatePage
from frappe.website.page_controllers.static_page import StaticPage
from frappe.website.page_controllers.web_form import WebFormPage
from frappe.website.redirect import resolve_redirect
from frappe.website.render import build_response, resolve_path
def get_response(path=None, http_status_code=200):
"""render html page"""
query_string = None
if not path:
path = frappe.local.request.path
query_string = frappe.local.request.query_string
try:
path = path.strip('/ ')
resolve_redirect(path, query_string)
path = resolve_path(path)
# there is no way to determine the type of the page based on the route
# so evaluate each type of page sequentially
response = StaticPage(path, http_status_code).get()
if not response:
response = TemplatePage(path, http_status_code).get()
if not response:
response = ListPage(path, http_status_code).get()
if not response:
response = WebFormPage(path, http_status_code).get()
if not response:
response = DocumentPage(path, http_status_code).get()
if not response:
response = PrintPage(path, http_status_code).get()
if not response:
response = TemplatePage('404', 404).get()
except frappe.Redirect:
return build_response(path, "", 301, {
"Location": frappe.flags.redirect_location or (frappe.local.response or {}).get('location'),
"Cache-Control": "no-store, no-cache, must-revalidate"
})
except frappe.PermissionError as e:
frappe.local.message = cstr(e)
response = NotPermittedPage(path, http_status_code).get()
except Exception as e:
response = TemplatePage('error', getattr(e, 'http_status_code', None) or http_status_code).get()
return response
|
import frappe
from frappe import _
from frappe.utils import cstr
from frappe.website.page_controllers.document_page import DocumentPage
from frappe.website.page_controllers.list_page import ListPage
from frappe.website.page_controllers.not_permitted_page import NotPermittedPage
from frappe.website.page_controllers.print_page import PrintPage
from frappe.website.page_controllers.template_page import TemplatePage
from frappe.website.page_controllers.static_page import StaticPage
from frappe.website.redirect import resolve_redirect
from frappe.website.render import build_response, resolve_path
def get_response(path=None, http_status_code=200):
"""render html page"""
query_string = None
if not path:
path = frappe.local.request.path
query_string = frappe.local.request.query_string
try:
path = path.strip('/ ')
resolve_redirect(path, query_string)
path = resolve_path(path)
data = None
# there is no way to determine the type of the page based on the route
# so evaluate each type of page sequentially
response = StaticPage(path, http_status_code).get()
if not response:
response = TemplatePage(path, http_status_code).get()
if not response:
response = ListPage(path, http_status_code).get()
if not response:
response = DocumentPage(path, http_status_code).get()
if not response:
response = PrintPage(path, http_status_code).get()
if not response:
response = TemplatePage('404', 404).get()
except frappe.Redirect:
return build_response(path, "", 301, {
"Location": frappe.flags.redirect_location or (frappe.local.response or {}).get('location'),
"Cache-Control": "no-store, no-cache, must-revalidate"
})
except frappe.PermissionError as e:
frappe.local.message = cstr(e)
response = NotPermittedPage(path, http_status_code).get()
except Exception as e:
response = TemplatePage('error', getattr(e, 'http_status_code', None) or http_status_code).get()
return response
|
mit
|
Python
|
fd4688cc899b08253cc50b345bb7e836081783d8
|
Add Beta and Binomial to automatically imported nodes
|
jluttine/bayespy,SalemAmeen/bayespy,fivejjs/bayespy,bayespy/bayespy
|
bayespy/inference/vmp/nodes/__init__.py
|
bayespy/inference/vmp/nodes/__init__.py
|
######################################################################
# Copyright (C) 2011,2012 Jaakko Luttinen
#
# This file is licensed under Version 3.0 of the GNU General Public
# License. See LICENSE for a text of the license.
######################################################################
######################################################################
# This file is part of BayesPy.
#
# BayesPy is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# BayesPy is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with BayesPy. If not, see <http://www.gnu.org/licenses/>.
######################################################################
# Import some most commonly used nodes
from . import *
from .binomial import Binomial
from .categorical import Categorical
from .beta import Beta
from .dirichlet import Dirichlet
from .gaussian import Gaussian, GaussianARD
from .wishart import Wishart
from .gamma import Gamma
from .gaussian_markov_chain import GaussianMarkovChain
from .gaussian_markov_chain import VaryingGaussianMarkovChain
from .gaussian_markov_chain import SwitchingGaussianMarkovChain
from .categorical_markov_chain import CategoricalMarkovChain
from .mixture import Mixture
from .dot import Dot
from .dot import SumMultiply
|
######################################################################
# Copyright (C) 2011,2012 Jaakko Luttinen
#
# This file is licensed under Version 3.0 of the GNU General Public
# License. See LICENSE for a text of the license.
######################################################################
######################################################################
# This file is part of BayesPy.
#
# BayesPy is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# BayesPy is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with BayesPy. If not, see <http://www.gnu.org/licenses/>.
######################################################################
# Import some most commonly used nodes
from . import *
from .gaussian import Gaussian, GaussianARD
from .wishart import Wishart
from .gamma import Gamma
from .dirichlet import Dirichlet
from .categorical import Categorical
from .dot import Dot, SumMultiply
from .mixture import Mixture
from .gaussian_markov_chain import GaussianMarkovChain
from .gaussian_markov_chain import VaryingGaussianMarkovChain
from .gaussian_markov_chain import SwitchingGaussianMarkovChain
from .categorical_markov_chain import CategoricalMarkovChain
|
mit
|
Python
|
769a334675cc451c6de07ed21e23ffd4480088df
|
Add time/space complexity
|
bowen0701/algorithms_data_structures
|
lc0041_first_missing_positive.py
|
lc0041_first_missing_positive.py
|
"""Leetcode 41. First Missing Positive
Hard
URL: https://leetcode.com/problems/first-missing-positive/
Given an unsorted integer array, find the smallest missing positive integer.
Example 1:
Input: [1,2,0]
Output: 3
Example 2:
Input: [3,4,-1,1]
Output: 2
Example 3:
Input: [7,8,9,11,12]
Output: 1
Note:
Your algorithm should run in O(n) time and uses constant extra space.
"""
class Solution(object):
def firstMissingPositive(self, nums):
"""
:type nums: List[int]
:rtype: int
Time complexity: O(n).
Space complexity: O(1).
"""
n = len(nums)
for i in range(n):
# Keep swapping old & new nums[i]'s to their correct positions.
while nums[i] > 0 and nums[i] <= n and nums[i] != nums[nums[i] - 1]:
# If nums[i] = k, swap it and nums[k - 1] at correct position k - 1.
nums[nums[i] - 1], nums[i] = nums[i], nums[nums[i] - 1]
# Check each updated elements in nums with true positive integer.
for i in range(n):
if i + 1 != nums[i]:
return i + 1
# If all elements in nums are correct, return the last one plus one.
return n + 1
def main():
# Ans: 3
nums = [1,2,0]
print Solution().firstMissingPositive(nums)
# Ans: 2
nums = [3,4,-1,1]
print Solution().firstMissingPositive(nums)
# Ans: 1
nums = [7,8,9,11,12]
print Solution().firstMissingPositive(nums)
# Ans: 1
nums = []
print Solution().firstMissingPositive(nums)
# Ans: 2
nums = [1]
print Solution().firstMissingPositive(nums)
# Ans: 3
nums = [-1,4,2,1,9,10]
print Solution().firstMissingPositive(nums)
if __name__ == '__main__':
main()
|
"""Leetcode 41. First Missing Positive
Hard
URL: https://leetcode.com/problems/first-missing-positive/
Given an unsorted integer array, find the smallest missing positive integer.
Example 1:
Input: [1,2,0]
Output: 3
Example 2:
Input: [3,4,-1,1]
Output: 2
Example 3:
Input: [7,8,9,11,12]
Output: 1
Note:
Your algorithm should run in O(n) time and uses constant extra space.
"""
class Solution(object):
def firstMissingPositive(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
n = len(nums)
for i in range(n):
# Keep swapping old & new nums[i] to their correct positions.
while nums[i] > 0 and nums[i] <= n and nums[i] != nums[nums[i] - 1]:
# If nums[i] = k, swap it and nums[k - 1], with correct position k - 1.
nums[nums[i] - 1], nums[i] = nums[i], nums[nums[i] - 1]
# Check each updated elements in nums with true positive integer.
for i in range(n):
if i + 1 != nums[i]:
return i + 1
# If all elements in nums are correct, return the last one plus one.
return n + 1
def main():
# Ans: 3
nums = [1,2,0]
print Solution().firstMissingPositive(nums)
# Ans: 2
nums = [3,4,-1,1]
print Solution().firstMissingPositive(nums)
# Ans: 1
nums = [7,8,9,11,12]
print Solution().firstMissingPositive(nums)
# Ans: 1
nums = []
print Solution().firstMissingPositive(nums)
# Ans: 2
nums = [1]
print Solution().firstMissingPositive(nums)
# Ans: 3
nums = [-1,4,2,1,9,10]
print Solution().firstMissingPositive(nums)
if __name__ == '__main__':
main()
|
bsd-2-clause
|
Python
|
f33b294d60ffbfb5351d4579e38ea197e2c3787a
|
Complete reverse iter sol
|
bowen0701/algorithms_data_structures
|
lc0482_license_key_formatting.py
|
lc0482_license_key_formatting.py
|
"""Leecode 482. License Key Formatting
Easy
URL: https://leetcode.com/problems/license-key-formatting/
You are given a license key represented as a string S which consists only
alphanumeric character and dashes. The string is separated into N+1 groups
by N dashes.
Given a number K, we would want to reformat the strings such that each group
contains exactly K characters, except for the first group which could be
shorter than K, but still must contain at least one character. Furthermore,
there must be a dash inserted between two groups and all lowercase letters
should be converted to uppercase.
Given a non-empty string S and a number K, format the string according to the
rules described above.
Example 1:
Input: S = "5F3Z-2e-9-w", K = 4
Output: "5F3Z-2E9W"
Explanation: The string S has been split into two parts, each part has 4
characters.
Note that the two extra dashes are not needed and can be removed.
Example 2:
Input: S = "2-5g-3-J", K = 2
Output: "2-5G-3J"
Explanation: The string S has been split into three parts, each part has 2
characters except the first part as it could be shorter as mentioned above.
Note:
- The length of string S will not exceed 12,000, and K is a positive integer.
- String S consists only of alphanumerical characters (a-z and/or A-Z and/or 0-9)
and dashes(-).
- String S is non-empty.
"""
class SolutionReverseIter(object):
def licenseKeyFormatting(self, S, K):
"""
:type S: str
:type K: int
:rtype: str
"""
# Upper case and drop dash.
S_nodashes = S.upper().replace('-','')
size_nodashes = len(S_nodashes)
# Reversely iterate through no-dashed list, concat to string until K chars.
res_ls = [''] * (size_nodashes // K + (size_nodashes % K > 0))
cur_idx = len(res_ls) - 1
cur_counter = 0
for i in range(size_nodashes - 1, -1, -1):
if cur_counter < K:
res_ls[cur_idx] = S_nodashes[i] + res_ls[cur_idx]
cur_counter += 1
else:
cur_idx -= 1
res_ls[cur_idx] = S_nodashes[i] + res_ls[cur_idx]
cur_counter = 1
return '-'.join(res_ls)
def main():
# Output: "5F3Z-2E9W"
S = "5F3Z-2e-9-w"
K = 4
print SolutionReverseIter().licenseKeyFormatting(S, K)
# Output: "5F3Z-2E9W"
S = "2-5g-3-J"
K = 2
print SolutionReverseIter().licenseKeyFormatting(S, K)
if __name__ == '__main__':
main()
|
"""Leecode 482. License Key Formatting
Easy
URL: https://leetcode.com/problems/license-key-formatting/
You are given a license key represented as a string S which consists only
alphanumeric character and dashes. The string is separated into N+1 groups
by N dashes.
Given a number K, we would want to reformat the strings such that each group
contains exactly K characters, except for the first group which could be
shorter than K, but still must contain at least one character. Furthermore,
there must be a dash inserted between two groups and all lowercase letters
should be converted to uppercase.
Given a non-empty string S and a number K, format the string according to the
rules described above.
Example 1:
Input: S = "5F3Z-2e-9-w", K = 4
Output: "5F3Z-2E9W"
Explanation: The string S has been split into two parts, each part has 4
characters.
Note that the two extra dashes are not needed and can be removed.
Example 2:
Input: S = "2-5g-3-J", K = 2
Output: "2-5G-3J"
Explanation: The string S has been split into three parts, each part has 2
characters except the first part as it could be shorter as mentioned above.
Note:
- The length of string S will not exceed 12,000, and K is a positive integer.
- String S consists only of alphanumerical characters (a-z and/or A-Z and/or 0-9)
and dashes(-).
- String S is non-empty.
"""
class Solution(object):
def licenseKeyFormatting(self, S, K):
"""
:type S: str
:type K: int
:rtype: str
"""
pass
def main():
pass
if __name__ == '__main__':
main()
|
bsd-2-clause
|
Python
|
dc95d6766d305f2126c158f50417e29d0c47ce3f
|
Change doc route
|
odtvince/APITaxi,odtvince/APITaxi,odtvince/APITaxi,l-vincent-l/APITaxi,l-vincent-l/APITaxi,openmaraude/APITaxi,odtvince/APITaxi,openmaraude/APITaxi
|
backoffice_operateurs/__init__.py
|
backoffice_operateurs/__init__.py
|
# -*- coding: utf8 -*-
VERSION = (0, 1, 0)
__author__ = 'Vincent Lara'
__contact__ = "[email protected]"
__homepage__ = "https://github.com/"
__version__ = ".".join(map(str, VERSION))
from flask import Flask, make_response
from flask.ext.security import Security, SQLAlchemyUserDatastore
from flask.ext.script import Manager
from flask.ext.security.utils import verify_and_update_password
from flask.ext import restful
from flask_bootstrap import Bootstrap
import os
from models import db
from models import security as security_models, taxis as taxis_models,\
administrative as administrative_models
from flask.ext.restplus import Api, apidoc
app = Flask(__name__)
app.config.from_object('default_settings')
if 'BO_OPERATEURS_CONFIG_FILE' in os.environ:
app.config.from_envvar('BO_OPERATEURS_CONFIG_FILE')
db.init_app(app)
user_datastore = SQLAlchemyUserDatastore(db, security_models.User,
security_models.Role)
security = Security(app, user_datastore)
api = Api(app, ui=False)
api.model(taxis_models.ADS, taxis_models.ADS.marshall_obj())
ns = api.namespace('ADS', description="Description ADS")
@app.route('/doc/', endpoint='doc')
def swagger_ui():
return apidoc.ui_for(api)
from views import ads
from views import conducteur
from views import zupc
from views import home
app.register_blueprint(ads.mod)
app.register_blueprint(conducteur.mod)
app.register_blueprint(zupc.mod)
app.register_blueprint(home.mod)
app.register_blueprint(apidoc.apidoc)
@api.representation('text/html')
def output_html(data, code=200, headers=None):
resp = make_response(data, code)
resp.headers.extend(headers or {})
return resp
@app.login_manager.request_loader
def load_user_from_request(request):
auth = request.headers.get('Authorization')
if not auth or auth.count(':') != 1:
return None
login, password = auth.split(':')
user = user_datastore.get_user(login.strip())
if user is None:
return None
if not verify_and_update_password(password.strip(), user):
return None
if not user.is_active():
return None
return user
Bootstrap(app)
manager = Manager(app)
|
# -*- coding: utf8 -*-
VERSION = (0, 1, 0)
__author__ = 'Vincent Lara'
__contact__ = "[email protected]"
__homepage__ = "https://github.com/"
__version__ = ".".join(map(str, VERSION))
from flask import Flask, make_response
from flask.ext.security import Security, SQLAlchemyUserDatastore
from flask.ext.script import Manager
from flask.ext.security.utils import verify_and_update_password
from flask.ext import restful
from flask_bootstrap import Bootstrap
import os
from models import db
from models import security as security_models, taxis as taxis_models,\
administrative as administrative_models
from flask.ext.restplus import Api
app = Flask(__name__)
app.config.from_object('default_settings')
if 'BO_OPERATEURS_CONFIG_FILE' in os.environ:
app.config.from_envvar('BO_OPERATEURS_CONFIG_FILE')
db.init_app(app)
user_datastore = SQLAlchemyUserDatastore(db, security_models.User,
security_models.Role)
security = Security(app, user_datastore)
api = Api(app)
api.model(taxis_models.ADS, taxis_models.ADS.marshall_obj())
ns = api.namespace('ADS', description="Description ADS")
from views import ads
from views import conducteur
from views import zupc
from views import home
app.register_blueprint(ads.mod)
app.register_blueprint(conducteur.mod)
app.register_blueprint(zupc.mod)
app.register_blueprint(home.mod)
@api.representation('text/html')
def output_html(data, code=200, headers=None):
resp = make_response(data, code)
resp.headers.extend(headers or {})
return resp
@app.login_manager.request_loader
def load_user_from_request(request):
auth = request.headers.get('Authorization')
if not auth or auth.count(':') != 1:
return None
login, password = auth.split(':')
user = user_datastore.get_user(login.strip())
if user is None:
return None
if not verify_and_update_password(password.strip(), user):
return None
if not user.is_active():
return None
return user
Bootstrap(app)
manager = Manager(app)
|
agpl-3.0
|
Python
|
aa4061887fc750dd63cd226e3fa45f0b56ec2462
|
Update server.py
|
mikelambson/tcid,mikelambson/tcid,mikelambson/tcid,mikelambson/tcid
|
site/server.py
|
site/server.py
|
#Import flask libraries
import json, re, os, datetime, logging;#Import general libraries
from flask import Flask, jsonify, request, render_template, send_from_directory;
from flask_socketio import SocketIO, send, emit, join_room, leave_room, close_room;
from flask_mail import Mail, Message;
from flask_socketio import join_room;
from flask_sqlalchemy import SQLAlchemy;
from sqlalchemy import create_engine;#Engine handler
#from PIL import Image;
#from logging.handlers import RotatingFileHandler;
#from logging import Formatter;
#import environment, recorder;#Import custom libraries
FlaskServer = Flask(__name__);#Dynamic web server
Interactive = SocketIO(FlaskServer);#Socket handler
FlaskServer.config.from_object(os.getenv("SERVER_ENV") if os.getenv("SERVER_ENV") else "environment.Testing");
Mailer = Mail(FlaskServer);#Mail handler
DB = SQLAlchemy(FlaskServer);#Sqlalchemy database handler
import models;
#engine = create_engine('mysqli://tcid:tcid@localhost/tcid);#set username, password, and database
@app.route('/')
def hello_world():
return 'Hello, World!'
|
#Import flask libraries
import json, re, os, datetime, logging;#Import general libraries
from flask import Flask, jsonify, request, render_template, send_from_directory;
from flask_socketio import SocketIO, send, emit, join_room, leave_room, close_room;
from flask_mail import Mail, Message;
from flask_socketio import join_room;
from flask_sqlalchemy import SQLAlchemy;
from sqlalchemy import create_engine;#Engine handler
#from PIL import Image;
from logging.handlers import RotatingFileHandler;
from logging import Formatter;
import environment, recorder;#Import custom libraries
FlaskServer = Flask(__name__);#Dynamic web server
Interactive = SocketIO(FlaskServer);#Socket handler
FlaskServer.config.from_object(os.getenv("SERVER_ENV") if os.getenv("SERVER_ENV") else "environment.Testing");
Mailer = Mail(FlaskServer);#Mail handler
DB = SQLAlchemy(FlaskServer);#Sqlalchemy database handler
import models;
#engine = create_engine('mysqli://tcid:tcid@localhost/tcid);#set username, password, and database
@app.route('/')
def hello_world():
return 'Hello, World!'
|
bsd-3-clause
|
Python
|
ec149e2e6b56f201ed154eaeecab2f651fe70351
|
Update docstrings.
|
makism/dyfunconn
|
dyfunconn/graphs/laplacian_energy.py
|
dyfunconn/graphs/laplacian_energy.py
|
# -*- coding: utf-8 -*-
""" Laplcian Energy
The Laplcian energy (LE) for a graph :math:`G` is computed as
.. math::
LE(G) = \\sum_{i=1}^n | { \\mu_{i} - \\frac{2m}{n} } |
ξ(A_1, A_2 ; t) = ‖exp(-tL_1 ) - exp(-tL_2 )‖_F^2
Where :math:`\mu_i` denote the eigenvalue associated with the node of the Laplcian
matrix of :math:`G` (Laplcian spectrum) and :math:`\\frac{2m}{n}` the average vertex degree.
For a details please go through the original work (Gutman2006_).
|
-----
.. [Gutman2006] Gutman, I., & Zhou, B. (2006). Laplacian energy of a graph. Linear Algebra and its applications, 414(1), 29-37.
"""
# Author: Avraam Marimpis <[email protected]>"
import numpy as np
import scipy
from scipy import sparse
import bct
def laplacian_energy(mtx):
""" Laplacian Energy
Parameters
----------
mtx : array-like, shape(N, N)
Symmetric, weighted and undirected connectivity matrix.
Returns
-------
le : float
The Laplacian Energy.
"""
lmtx = scipy.sparse.csgraph.laplacian(mtx, normed=False)
w, v = np.linalg.eig(lmtx)
avg_degree = np.mean(bct.degrees_und(mtx))
le = np.sum(np.abs(w - avg_degree))
return le
|
# -*- coding: utf-8 -*-
""" Laplcian Energy
The Laplcian energy (LE) for a graph :math:`G` is computed as
.. math::
LE(G) = \sum_{i=1}^n | {\mu_i - \frac{2m}{n}} |
ξ(A_1, A_2 ; t) = ‖exp(-tL_1 ) - exp(-tL_2 )‖_F^2
Where :math:``\mu_i` denote the eigenvalue associated with the node of the Laplcian
matrix of :math:`G` (Laplcian spectrum) and :math:`\frac{2m}{n}` the average vertex degree.
For a details please go through the original work (Gutman2006_).
|
-----
.. [Gutman2006] Gutman, I., & Zhou, B. (2006). Laplacian energy of a graph. Linear Algebra and its applications, 414(1), 29-37.
"""
# Author: Avraam Marimpis <[email protected]>"
import numpy as np
import scipy
from scipy import sparse
import bct
def laplacian_energy(mtx):
""" Laplacian Energy
Parameters
----------
mtx : array-like, shape(N, N)
Symmetric, weighted and undirected connectivity matrix.
Returns
-------
le : float
The Laplacian Energy.
"""
lmtx = scipy.sparse.csgraph.laplacian(mtx, normed=False)
w, v = np.linalg.eig(lmtx)
avg_degree = np.mean(bct.degrees_und(mtx))
le = np.sum(np.abs(w - avg_degree))
return le
|
bsd-3-clause
|
Python
|
cd3e129c1951dbb1d2d99d454b1e07d96d1d5497
|
Support multi or non-multi mappers for bowtie alignments
|
lbeltrame/bcbio-nextgen,vladsaveliev/bcbio-nextgen,SciLifeLab/bcbio-nextgen,hjanime/bcbio-nextgen,vladsaveliev/bcbio-nextgen,fw1121/bcbio-nextgen,lpantano/bcbio-nextgen,SciLifeLab/bcbio-nextgen,SciLifeLab/bcbio-nextgen,Cyberbio-Lab/bcbio-nextgen,biocyberman/bcbio-nextgen,elkingtonmcb/bcbio-nextgen,verdurin/bcbio-nextgen,elkingtonmcb/bcbio-nextgen,lbeltrame/bcbio-nextgen,elkingtonmcb/bcbio-nextgen,biocyberman/bcbio-nextgen,brainstorm/bcbio-nextgen,hjanime/bcbio-nextgen,mjafin/bcbio-nextgen,Cyberbio-Lab/bcbio-nextgen,guillermo-carrasco/bcbio-nextgen,brainstorm/bcbio-nextgen,chapmanb/bcbio-nextgen,a113n/bcbio-nextgen,fw1121/bcbio-nextgen,hjanime/bcbio-nextgen,gifford-lab/bcbio-nextgen,mjafin/bcbio-nextgen,Cyberbio-Lab/bcbio-nextgen,gifford-lab/bcbio-nextgen,brainstorm/bcbio-nextgen,biocyberman/bcbio-nextgen,verdurin/bcbio-nextgen,guillermo-carrasco/bcbio-nextgen,lbeltrame/bcbio-nextgen,a113n/bcbio-nextgen,vladsaveliev/bcbio-nextgen,mjafin/bcbio-nextgen,lpantano/bcbio-nextgen,a113n/bcbio-nextgen,lpantano/bcbio-nextgen,guillermo-carrasco/bcbio-nextgen,fw1121/bcbio-nextgen,chapmanb/bcbio-nextgen,chapmanb/bcbio-nextgen,gifford-lab/bcbio-nextgen,verdurin/bcbio-nextgen
|
bcbio/ngsalign/bowtie.py
|
bcbio/ngsalign/bowtie.py
|
"""Next gen sequence alignments with Bowtie (http://bowtie-bio.sourceforge.net).
"""
import os
import subprocess
from bcbio.utils import file_transaction
galaxy_location_file = "bowtie_indices.loc"
def align(fastq_file, pair_file, ref_file, out_base, align_dir, config):
"""Before a standard or paired end alignment with bowtie.
"""
qual_format = config["algorithm"].get("quality_format", None)
if qual_format is None or qual_format.lower() == "illumina":
qual_flags = ["--phred64-quals"]
else:
qual_flags = []
multi_mappers = config["algorithm"].get("multiple_mappers", True)
multi_flags = ["-M", 1] if multi_mappers else ["-m", 1]
out_file = os.path.join(align_dir, "%s.sam" % out_base)
if not os.path.exists(out_file):
cl = [config["program"]["bowtie"]]
cl += qual_flags
cl += multi_flags
cl += ["-q",
"-v", config["algorithm"]["max_errors"],
"-k", 1,
"-X", 1000, # matches bwa sampe default size
"--best",
"--strata",
"--sam",
ref_file]
if pair_file:
cl += ["-1", fastq_file, "-2", pair_file]
else:
cl += [fastq_file]
cl += [out_file]
cl = [str(i) for i in cl]
with file_transaction(out_file):
subprocess.check_call(cl)
return out_file
|
"""Next gen sequence alignments with Bowtie (http://bowtie-bio.sourceforge.net).
"""
import os
import subprocess
from bcbio.utils import file_transaction
galaxy_location_file = "bowtie_indices.loc"
def align(fastq_file, pair_file, ref_file, out_base, align_dir, config):
"""Before a standard or paired end alignment with bowtie.
"""
qual_format = config["algorithm"].get("quality_format", None)
if qual_format is None or qual_format.lower() == "illumina":
qual_flags = ["--phred64-quals"]
else:
qual_flags = []
out_file = os.path.join(align_dir, "%s.sam" % out_base)
if not os.path.exists(out_file):
cl = [config["program"]["bowtie"]]
cl += qual_flags
cl += ["-q",
"-v", config["algorithm"]["max_errors"],
"-k", 1,
"-X", 1000, # matches bwa sampe default size
"-M", 1,
"--best",
"--strata",
"--sam",
ref_file]
if pair_file:
cl += ["-1", fastq_file, "-2", pair_file]
else:
cl += [fastq_file]
cl += [out_file]
cl = [str(i) for i in cl]
with file_transaction(out_file):
subprocess.check_call(cl)
return out_file
|
mit
|
Python
|
a6283772b07a29faa54a8c141947e19005bef61e
|
append max and min to entire dataset
|
navierula/Research-Fall-2017
|
minMaxCalc.py
|
minMaxCalc.py
|
import pandas as pd
# read in dataset
xl = pd.ExcelFile("data/130N_Cycles_1-47.xlsx")
df = xl.parse("Specimen_RawData_1")
df
"""
This is what the dataset currently looks like - it has 170,101 rows and two columns.
The dataset contains data from 47 cycles following an experiment.
The output of these experiments form the two columns:<br>
- time (seconds)
- load (exerted force, in Newtons)
My task is to find the local maxima and minima in the dataset, and mark these values in a
database. Initially, the database will consist of four columns: time, load, max, and min.
It can be modified or condensed later on to fit further requirements.
This is the criteria I will use to find the maxima:
- write each row in the db to a cache
- initialize a flag value to false
- if the force in the previous row is smaller than the force in the next row, write the new row to the cache (leave the flag as false)
- if the force in the previous row is bigger than the force in the next row, write the new row to cache and mark it as a max cycle
(change the flag to true)
This is the criteria I will use to find the minima:
- write each row in the db to a cache
- initialize a flag value to false
- if the force in the previous row is bigger than the force in the next row, write the new row to the cache (leave the flag as false)
- if the force in the previous row is smaller than the force in the next row, write the new row to the cache and mark it as a min cycle
(change the flag to true)
"""
# append data from time column to list
time = []
for item in df.index:
time.append(df["Time"][item])
# append data from load column to list
load = []
for item in df.index:
load.append(df["Load"][item])
# create list of tuples for time and load
data = []
for i, j in zip(time, load):
data.append((i,j))
##############################################################################
# create db model
db = []
# create cache store
cache = []
load.sort(key=float) # previously key = int
totals = []
for count, items in enumerate(load):
counter = count + 1
last_object = (counter, load[count], load[(len(load)-1) - count])
totals.append(last_object)
our_totals = totals[:47]
#print(our_totals)
combine_data = []
for i in data:
for j in our_totals:
if i[1] == j[1]:
combine_data.append(i + ("min",))
if i[1] == j[2]:
combine_data.append(i + ("max",))
else:
combine_data.append(i)
print(combine_data)
with open("cycleStartEnd.txt", "w") as fp:
for item in totals[:47]:
fp.write("Cycle: %s" % item[0] + "\n")
fp.write("Starting force: %s" % item[1] + "\n")
fp.write("Ending force: %s" % item[2] + "\n\n")
|
import pandas as pd
# read in dataset
xl = pd.ExcelFile("data/130N_Cycles_1-47.xlsx")
df = xl.parse("Specimen_RawData_1")
df
"""
This is what the dataset currently looks like - it has 170,101 rows and two columns.
The dataset contains data from 47 cycles following an experiment.
The output of these experiments form the two columns:<br>
- time (seconds)
- load (exerted force, in Newtons)
My task is to find the local maxima and minima in the dataset, and mark these values in a
database. Initially, the database will consist of four columns: time, load, max, and min.
It can be modified or condensed later on to fit further requirements.
This is the criteria I will use to find the maxima:
- write each row in the db to a cache
- initialize a flag value to false
- if the force in the previous row is smaller than the force in the next row, write the new row to the cache (leave the flag as false)
- if the force in the previous row is bigger than the force in the next row, write the new row to cache and mark it as a max cycle
(change the flag to true)
This is the criteria I will use to find the minima:
- write each row in the db to a cache
- initialize a flag value to false
- if the force in the previous row is bigger than the force in the next row, write the new row to the cache (leave the flag as false)
- if the force in the previous row is smaller than the force in the next row, write the new row to the cache and mark it as a min cycle
(change the flag to true)
"""
# append data from time column to list
time = []
for item in df.index:
time.append(df["Time"][item])
# append data from load column to list
load = []
for item in df.index:
load.append(df["Load"][item])
# create list of tuples for time and load
data = []
for i, j in zip(time, load):
data.append((i,j))
##############################################################################
# create db model
db = []
# create cache store
cache = []
load.sort(key=float) # previously key = int
totals = []
for count, items in enumerate(load):
counter = count + 1
last_object = (counter, load[count], load[(len(load)-1) - count])
totals.append(last_object)
our_totals = totals[:47]
print(our_totals)
combine_data = []
for i in data:
for j in our_totals:
if i[1] == j[1]:
with open("cycleStartEnd.txt", "w") as fp:
for item in totals[:47]:
fp.write("Cycle: %s" % item[0] + "\n")
fp.write("Starting force: %s" % item[1] + "\n")
fp.write("Ending force: %s" % item[2] + "\n\n")
|
mit
|
Python
|
fff56b52afb40ee0a69c9a84b847f7ccc0836bd6
|
Update some admin list parameters.
|
gauravjns/taiga-back,CoolCloud/taiga-back,joshisa/taiga-back,crr0004/taiga-back,dycodedev/taiga-back,gam-phon/taiga-back,astronaut1712/taiga-back,astagi/taiga-back,19kestier/taiga-back,bdang2012/taiga-back-casting,CoolCloud/taiga-back,seanchen/taiga-back,coopsource/taiga-back,gam-phon/taiga-back,Tigerwhit4/taiga-back,gam-phon/taiga-back,taigaio/taiga-back,gauravjns/taiga-back,CoolCloud/taiga-back,19kestier/taiga-back,seanchen/taiga-back,EvgeneOskin/taiga-back,frt-arch/taiga-back,obimod/taiga-back,Rademade/taiga-back,astagi/taiga-back,frt-arch/taiga-back,coopsource/taiga-back,xdevelsistemas/taiga-back-community,jeffdwyatt/taiga-back,obimod/taiga-back,astronaut1712/taiga-back,Zaneh-/bearded-tribble-back,joshisa/taiga-back,dayatz/taiga-back,WALR/taiga-back,obimod/taiga-back,crr0004/taiga-back,jeffdwyatt/taiga-back,CMLL/taiga-back,CMLL/taiga-back,WALR/taiga-back,rajiteh/taiga-back,forging2012/taiga-back,taigaio/taiga-back,coopsource/taiga-back,jeffdwyatt/taiga-back,xdevelsistemas/taiga-back-community,astronaut1712/taiga-back,Rademade/taiga-back,EvgeneOskin/taiga-back,seanchen/taiga-back,CoolCloud/taiga-back,gam-phon/taiga-back,astronaut1712/taiga-back,Rademade/taiga-back,forging2012/taiga-back,bdang2012/taiga-back-casting,gauravjns/taiga-back,Tigerwhit4/taiga-back,19kestier/taiga-back,Rademade/taiga-back,astagi/taiga-back,rajiteh/taiga-back,dayatz/taiga-back,dycodedev/taiga-back,EvgeneOskin/taiga-back,gauravjns/taiga-back,frt-arch/taiga-back,bdang2012/taiga-back-casting,dycodedev/taiga-back,coopsource/taiga-back,forging2012/taiga-back,obimod/taiga-back,bdang2012/taiga-back-casting,astagi/taiga-back,jeffdwyatt/taiga-back,Tigerwhit4/taiga-back,Zaneh-/bearded-tribble-back,xdevelsistemas/taiga-back-community,dayatz/taiga-back,CMLL/taiga-back,rajiteh/taiga-back,crr0004/taiga-back,CMLL/taiga-back,joshisa/taiga-back,joshisa/taiga-back,WALR/taiga-back,WALR/taiga-back,seanchen/taiga-back,dycodedev/taiga-back,crr0004/taiga-back,Rademade/taiga-back,EvgeneOskin/taiga-back,taigaio/taiga-back,Zaneh-/bearded-tribble-back,forging2012/taiga-back,rajiteh/taiga-back,Tigerwhit4/taiga-back
|
greenmine/scrum/admin.py
|
greenmine/scrum/admin.py
|
# -*- coding: utf-8 -*-
from django.contrib import admin
from guardian.admin import GuardedModelAdmin
from greenmine.scrum import models
import reversion
class MilestoneInline(admin.TabularInline):
model = models.Milestone
fields = ('name', 'owner', 'estimated_start', 'estimated_finish', 'closed', 'disponibility', 'order')
sortable_field_name = 'order'
extra = 0
class UserStoryInline(admin.TabularInline):
model = models.UserStory
fields = ('subject', 'order')
sortable_field_name = 'order'
extra = 0
def get_inline_instances(self, request, obj=None):
if obj:
return obj.user_stories.filter(mileston__isnone=True)
else:
return models.UserStory.objects.none()
class ProjectAdmin(reversion.VersionAdmin):
list_display = ["name", "owner"]
inlines = [MilestoneInline, UserStoryInline]
admin.site.register(models.Project, ProjectAdmin)
class MilestoneAdmin(reversion.VersionAdmin):
list_display = ["name", "project", "owner", "closed", "estimated_start", "estimated_finish"]
admin.site.register(models.Milestone, MilestoneAdmin)
class UserStoryAdmin(reversion.VersionAdmin):
list_display = ["ref", "milestone", "project", "owner", 'status', 'is_closed']
admin.site.register(models.UserStory, UserStoryAdmin)
class ChangeAdmin(reversion.VersionAdmin):
list_display = ["id", "change_type", "project", "owner"]
admin.site.register(models.Change, ChangeAdmin)
class ChangeAttachmentAdmin(reversion.VersionAdmin):
list_display = ["id", "change", "owner"]
admin.site.register(models.ChangeAttachment, ChangeAttachmentAdmin)
class TaskAdmin(reversion.VersionAdmin):
list_display = ["subject", "user_story"]
class IssueAdmin(reversion.VersionAdmin):
list_display = ["subject", "type"]
class SeverityAdmin(admin.ModelAdmin):
list_display = ["name", "order", "project"]
class PriorityAdmin(admin.ModelAdmin):
list_display = ["name", "order", "project"]
class PointsAdmin(admin.ModelAdmin):
list_display = ["name", "order", "project"]
class IssueTypeAdmin(admin.ModelAdmin):
list_display = ["name", "order", "project"]
class IssueStatusAdmin(admin.ModelAdmin):
list_display = ["name", "order", "is_closed", "project"]
class TaskStatusAdmin(admin.ModelAdmin):
list_display = ["name", "order", "is_closed", "project"]
class UserStoryStatusAdmin(admin.ModelAdmin):
list_display = ["name", "order", "is_closed", "project"]
admin.site.register(models.Task, TaskAdmin)
admin.site.register(models.Issue, IssueAdmin)
admin.site.register(models.Severity, SeverityAdmin)
admin.site.register(models.IssueStatus, IssueStatusAdmin)
admin.site.register(models.TaskStatus, TaskStatusAdmin)
admin.site.register(models.UserStoryStatus, UserStoryStatusAdmin)
admin.site.register(models.Priority, PriorityAdmin)
admin.site.register(models.IssueType, IssueTypeAdmin)
admin.site.register(models.Points, PointsAdmin)
|
# -*- coding: utf-8 -*-
from django.contrib import admin
from guardian.admin import GuardedModelAdmin
from greenmine.scrum import models
import reversion
class MilestoneInline(admin.TabularInline):
model = models.Milestone
fields = ('name', 'owner', 'estimated_start', 'estimated_finish', 'closed', 'disponibility', 'order')
sortable_field_name = 'order'
extra = 0
class UserStoryInline(admin.TabularInline):
model = models.UserStory
fields = ('subject', 'order')
sortable_field_name = 'order'
extra = 0
def get_inline_instances(self, request, obj=None):
if obj:
return obj.user_stories.filter(mileston__isnone=True)
else:
return models.UserStory.objects.none()
class ProjectAdmin(reversion.VersionAdmin):
list_display = ["name", "owner"]
inlines = [MilestoneInline, UserStoryInline]
admin.site.register(models.Project, ProjectAdmin)
class MilestoneAdmin(reversion.VersionAdmin):
list_display = ["name", "project", "owner", "closed", "estimated_start", "estimated_finish"]
admin.site.register(models.Milestone, MilestoneAdmin)
class UserStoryAdmin(reversion.VersionAdmin):
list_display = ["ref", "milestone", "project", "owner"]
admin.site.register(models.UserStory, UserStoryAdmin)
class ChangeAdmin(reversion.VersionAdmin):
list_display = ["id", "change_type", "project", "owner"]
admin.site.register(models.Change, ChangeAdmin)
class ChangeAttachmentAdmin(reversion.VersionAdmin):
list_display = ["id", "change", "owner"]
admin.site.register(models.ChangeAttachment, ChangeAttachmentAdmin)
class TaskAdmin(reversion.VersionAdmin):
list_display = ["subject", "user_story"]
class IssueAdmin(reversion.VersionAdmin):
list_display = ["subject", "type"]
class SeverityAdmin(admin.ModelAdmin):
list_display = ["name", "order", "project"]
class PriorityAdmin(admin.ModelAdmin):
list_display = ["name", "order", "project"]
class PointsAdmin(admin.ModelAdmin):
list_display = ["name", "order", "project"]
class IssueTypeAdmin(admin.ModelAdmin):
list_display = ["name", "order", "project"]
class IssueStatusAdmin(admin.ModelAdmin):
list_display = ["name", "order", "is_closed", "project"]
class TaskStatusAdmin(admin.ModelAdmin):
list_display = ["name", "order", "is_closed", "project"]
class UserStoryStatusAdmin(admin.ModelAdmin):
list_display = ["name", "order", "is_closed", "project"]
admin.site.register(models.Task, TaskAdmin)
admin.site.register(models.Issue, IssueAdmin)
admin.site.register(models.Severity, SeverityAdmin)
admin.site.register(models.IssueStatus, IssueStatusAdmin)
admin.site.register(models.TaskStatus, TaskStatusAdmin)
admin.site.register(models.UserStoryStatus, UserStoryStatusAdmin)
admin.site.register(models.Priority, PriorityAdmin)
admin.site.register(models.IssueType, IssueTypeAdmin)
admin.site.register(models.Points, PointsAdmin)
|
agpl-3.0
|
Python
|
f66a679a1ca8f78a12567a1d8acfe04ca2778ce3
|
allow removal of genomes and fragments in admin
|
ginkgobioworks/edge,ginkgobioworks/edge,ginkgobioworks/edge,ginkgobioworks/edge
|
src/edge/admin.py
|
src/edge/admin.py
|
from django.contrib import admin
from edge.models import Genome, Fragment
class Genome_Admin(admin.ModelAdmin):
list_display = ('id', 'name', 'notes', 'parent', 'created_on')
search_fields = ('name',)
fields = ('name', 'notes', 'active')
actions = None
def has_add_permission(self, request):
return False
admin.site.register(Genome, Genome_Admin)
class Fragment_Admin(admin.ModelAdmin):
list_display = ('id', 'name', 'circular', 'parent', 'created_on')
search_fields = ('name',)
fields = ('name', 'circular', 'active')
actions = None
def has_add_permission(self, request):
return False
admin.site.register(Fragment, Fragment_Admin)
|
from django.contrib import admin
from edge.models import Genome, Fragment
class Genome_Admin(admin.ModelAdmin):
list_display = ('id', 'name', 'notes', 'parent', 'created_on')
search_fields = ('name',)
fields = ('name', 'notes', 'active')
actions = None
def has_add_permission(self, request):
return False
def has_delete_permission(self, request, obj=None):
return False
def delete_model(self, request, obj):
raise Exception("Not allowed")
admin.site.register(Genome, Genome_Admin)
class Fragment_Admin(admin.ModelAdmin):
list_display = ('id', 'name', 'circular', 'parent', 'created_on')
search_fields = ('name',)
fields = ('name', 'circular', 'active')
actions = None
def has_add_permission(self, request):
return False
def has_delete_permission(self, request, obj=None):
return False
def delete_model(self, request, obj):
raise Exception("Not allowed")
admin.site.register(Fragment, Fragment_Admin)
|
mit
|
Python
|
ed21e865f346b700c48458f22e3d3f1841f63451
|
Fix JSON encoder to work with Decimal fields
|
jimbobhickville/swd6,jimbobhickville/swd6,jimbobhickville/swd6
|
api/swd6/api/app.py
|
api/swd6/api/app.py
|
import flask
import flask_cors
from sqlalchemy_jsonapi import flaskext as flask_jsonapi
import logging
from swd6.config import CONF
from swd6.db.models import db
logging.basicConfig(level=logging.DEBUG)
app = flask.Flask(__name__)
app.config['DEBUG'] = True
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = True
app.config['SQLALCHEMY_DATABASE_URI'] = CONF.db.uri
app.config['SERVER_NAME'] = CONF.api.host
app.logger.setLevel(logging.DEBUG)
flask_cors.CORS(app, origins=CONF.api.cors_hosts)
logging.getLogger('flask_cors').level = logging.DEBUG
db.init_app(app)
import json
import uuid
import datetime
import decimal
class JSONAPIEncoder(json.JSONEncoder):
""" JSONEncoder Implementation that allows for UUID and datetime """
def default(self, value):
"""
Handle UUID, datetime, decimal, and callables.
:param value: Value to encode
"""
if isinstance(value, uuid.UUID):
return str(value)
elif isinstance(value, datetime.datetime):
return value.isoformat()
elif isinstance(value, decimal.Decimal):
return str(value)
elif callable(value):
return str(value)
return json.JSONEncoder.default(self, value)
flask_jsonapi.FlaskJSONAPI.json_encoder = JSONAPIEncoder
api = flask_jsonapi.FlaskJSONAPI(app, db)
|
import flask
import flask_cors
from sqlalchemy_jsonapi import flaskext as flask_jsonapi
import logging
from swd6.config import CONF
from swd6.db.models import db
logging.basicConfig(level=logging.DEBUG)
app = flask.Flask(__name__)
app.config['DEBUG'] = True
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = True
app.config['SQLALCHEMY_DATABASE_URI'] = CONF.db.uri
app.config['SERVER_NAME'] = CONF.api.host
app.logger.setLevel(logging.DEBUG)
flask_cors.CORS(app, origins=CONF.api.cors_hosts)
logging.getLogger('flask_cors').level = logging.DEBUG
db.init_app(app)
api = flask_jsonapi.FlaskJSONAPI(app, db)
|
apache-2.0
|
Python
|
efe79ebfa2b023e6971244b7f3c803a09dd6d2c7
|
change check to skipp
|
FESOM/pyfesom,FESOM/pyfesom
|
tools/pcdo.py
|
tools/pcdo.py
|
import glob
from joblib import Parallel, delayed
import os
import click
def cdo_command(ifile, opath, command, ext, skip):
if opath != 'no':
ofile = os.path.join(opath, '{}_{}.nc'.format(os.path.basename(ifile)[:-3], ext))
else:
ofile = ' '
if skip:
if os.path.isfile(ofile):
print('File {} exist, --checko flag is present, skipping'.format(ofile))
return
print('cdo {} {} {}'.format(command, ifile, ofile))
os.system('cdo {} {} {}'.format(command, ifile, ofile))
@click.command()
@click.argument('ipath', nargs=-1, type=click.Path(exists=True), required=True)
@click.argument('opath', nargs=1, required=False, default='no')
@click.option('--ncore', '-n', default=2, help = 'Number of cores (parallel processes)', show_default=True)
@click.option('--cdo','-c', required=True, help = 'CDO command as a string !!!IN QUOTATION MARKS!!!, eg \" monmean -shifttime,-12hour \"')
@click.option('--ext','-e', default='tm', required=False,show_default=True,
help='Extention to be used for the output file.')
@click.option('--skip', '-s', is_flag=True,
help='Skip the calculation if the output file already exist.')
def pcdo(ipath, opath, ncore, cdo, ext, skip):
'''
Runs several (-n) cdo processes in paralel. Input (ipath) is a list (wildcard) of files. The cdo command (-c) is
executed for every file and the output files with extention (-e) will be written to the output path (opath).
Example:
python pcdo.py
ipath - Input files, must be the path with wildcards (e.g. /path/to/files/temp_fesom_193[3-7]????.nc)
opath - Path where the output will be stored or "no" for operators that do not require output file.
'''
Parallel(n_jobs=ncore)(delayed(cdo_command)(l, opath, cdo, ext, skip) for l in ipath)
if __name__ == '__main__':
pcdo()
|
import glob
from joblib import Parallel, delayed
import os
import click
def cdo_comand(ifile, opath, command, ext, checko):
if opath != 'no':
ofile = os.path.join(opath, '{}_tm.nc'.format(os.path.basename(ifile)[:-3]))
else:
ofile = ' '
if checko:
if os.path.isfile(ofile):
print('File {} exist, --checko flag is present, skipping'.format(ofile))
return
print('cdo {} {} {}'.format(command, ifile, ofile))
os.system('cdo {} {} {}'.format(command, ifile, ofile))
@click.command()
@click.argument('ipath', nargs=-1, type=click.Path(exists=True), required=True)
@click.argument('opath', nargs=1, required=False, default='no')
@click.option('--ncore', '-n', default=2, help = 'Number of cores (parallel processes)', show_default=True)
@click.option('--cdo', required=True, help = 'String of cdo commands !!!IN QUOTATION MARKS!!!, eg \" monmean -shifttime,-12hour \"')
@click.option('--ext','-e', default='tm', required=False,show_default=True,
help='Extention to be used for the output file.')
@click.option('--checko', '-c', is_flag=True,
help='Skip the calculation if the output file already exist.')
def pcdo(ipath, opath, ncore, cdo, ext, checko):
'''
ipath - Input files, must be the path with wildcards (e.g. /path/to/files/temp_fesom_193[3-7]????.nc)
opath - Path where the output will be stored or "no" for operators that do not require output file.
'''
Parallel(n_jobs=ncore)(delayed(cdo_comand)(l, opath, cdo, ext, checko) for l in ipath)
if __name__ == '__main__':
pcdo()
|
mit
|
Python
|
eb368c11b7d0e481c6539130c34cb0b04c8f57a6
|
add prompt number
|
faycheng/tpl,faycheng/tpl
|
tpl/prompt.py
|
tpl/prompt.py
|
# -*- coding:utf-8 -*-
import os
import sys
sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
import prompt_toolkit
from prompt_toolkit.history import FileHistory
from prompt_toolkit.completion import Completion, Completer
from tpl import path
class WordMatchType(object):
CONTAINS = 'CONTAINES'
STARTSWITH = 'STARTSWITH'
class WordCompleter(Completer):
def __init__(self, words=None, history=None, match_type=WordMatchType.CONTAINS):
self.words = words or []
self.history = history or []
self.match_type = match_type
def match(self, word_before_cursor, word):
if self.match_type == WordMatchType.CONTAINS:
return word_before_cursor in word
# TODO 需要做一下去重,避免 words 和 history yield 了相同的 completions
def get_completions(self, document, complete_event):
word_before_cursor = document.text_before_cursor.lower()
for word in self.words:
if self.match(word_before_cursor, word):
display_meta = ' custom'
yield Completion(word, -len(word_before_cursor), display_meta=display_meta)
for record in self.history:
if self.match(word_before_cursor, record):
display_meta = ' history'
yield Completion(record, -len(word_before_cursor), display_meta=display_meta)
history = FileHistory(os.path.join(path.HOME, '.templates', 'tpl.history'))
def prompt_str(message, default=None, multiline=False):
completer = WordCompleter(words=[], history=history)
res = prompt_toolkit.prompt(message, default=default or '', history=history, completer=completer, multiline=multiline)
return str(res)
def prompt_number(message, default=None):
res = prompt_toolkit.prompt(message, default=default or '', history=history)
return int(res)
def prompt_path():
pass
|
# -*- coding:utf-8 -*-
import os
import sys
sys.path.insert(0, os.path.dirname(os.path.dirname(__file__)))
import prompt_toolkit
from prompt_toolkit.history import FileHistory
from prompt_toolkit.completion import Completion, Completer
from tpl import path
class WordMatchType(object):
CONTAINS = 'CONTAINES'
STARTSWITH = 'STARTSWITH'
class WordCompleter(Completer):
def __init__(self, words=None, history=None, match_type=WordMatchType.CONTAINS):
self.words = words or []
self.history = history or []
self.match_type = match_type
def match(self, word_before_cursor, word):
if self.match_type == WordMatchType.CONTAINS:
return word_before_cursor in word
# TODO 需要做一下去重,避免 words 和 history yield 了相同的 completions
def get_completions(self, document, complete_event):
word_before_cursor = document.text_before_cursor.lower()
for word in self.words:
if self.match(word_before_cursor, word):
display_meta = ' custom'
yield Completion(word, -len(word_before_cursor), display_meta=display_meta)
for record in self.history:
if self.match(word_before_cursor, record):
display_meta = ' history'
yield Completion(record, -len(word_before_cursor), display_meta=display_meta)
history = FileHistory(os.path.join(path.HOME, '.templates', 'tpl.history'))
def prompt_str(message, default=None, multiline=False):
completer = WordCompleter(words=[], history=history)
res = prompt_toolkit.prompt(message, default=default or '', history=history, completer=completer, multiline=multiline)
return str(res)
def prompt_path():
pass
|
mit
|
Python
|
63fddd07e3b110c06c7369fa9d815e79384ef27e
|
update try_pandas.py
|
hanhanwu/Hanhan_Data_Science_Practice,hanhanwu/Hanhan_Data_Science_Practice,hanhanwu/Hanhan_Data_Science_Practice,hanhanwu/Hanhan_Data_Science_Practice
|
try_pandas.py
|
try_pandas.py
|
# I'm using Spark Cloud Community Edition, sicne my own machine cannot have the right numpy for pandas...
# So, in this code, so features could only be used in Spark Cloud Python Notebook
# Try pandas :)
# cell 1 - load the data (I upload the .csv into Spark Cloud first)
import pandas as pd
import numpy as np
## The path here is the .csv file path in HDFS
pdata = sqlContext.read.format('csv').load("/FileStore/tables/[file name in HDFS]",
index_col="ID", header =True).toPandas()
# cell 2 - Bollean Indexing
pdata.loc[(pdata["Gender"]=="Female") & (pdata["Salary_Account"]=="ICICI Bank") & (pdata["Mobile_Verified"]=="Y"),
["Gender", "Salary_Account", "Mobile_Verified"]]
# cell 3 - apply function, similar to R apply()
def get_missing_data(x):
return sum(x.isnull())
print "find missing data for each column:"
print pdata.apply(get_missing_data, axis = 0)
print "find missing data for each row:"
print pdata.apply(get_missing_data, axis = 1)
# cell 4 - fillna(), updating missing values with the overall mean/mode/median of the column
from scipy.stats import mode
# check the mode
mode(pdata['Gender'])[0][0]
pdata['Gender'].fillna(mode(pdata['Gender'])[0][0], inplace=True)
pdata.apply(get_missing_data, axis=0)
# cell 5 - create Excel style pivot table, I really like this
# check data type first
pdata.dtypes
# convert Monthly_Income into numerical data
pdata['Monthly_Income'] = pdata['Monthly_Income'].astype(float)
pdata.dtypes
pivot_t = pdata.pivot_table(values=['Monthly_Income'], index=['Gender', 'Mobile_Verified', 'Device_Type'], aggfunc = np.mean)
print pivot_t
|
# I'm using Spark Cloud Community Edition, sicne my own machine cannot have the right numpy for pandas...
# So, in this code, so features could only be used in Spark Cloud Python Notebook
# Try pandas :)
# cell 1 - load the data (I upload the .csv into Spark Cloud first)
import pandas as pd
import numpy as np
## The path here is the .csv file path in HDFS
pdata = sqlContext.read.format('csv').load("/FileStore/tables/[file name in HDFS]",
index_col="ID", header =True).toPandas()
# cell 2 - Bollean Indexing
pdata.loc[(pdata["Gender"]=="Female") & (pdata["Salary_Account"]=="ICICI Bank") & (pdata["Mobile_Verified"]=="Y"),
["Gender", "Salary_Account", "Mobile_Verified"]]
# cell 3 - apply function, similar to R apply()
def get_missing_data(x):
return sum(x.isnull())
print "find missing data for each column:"
print pdata.apply(get_missing_data, axis = 0)
print "find missing data for each row:"
print pdata.apply(get_missing_data, axis = 1)
# cell 4 - fillna(), updating missing values with the overall mean/mode/median of the column
from scipy.stats import mode
# check the mode
mode(pdata['Gender'])[0][0]
pdata['Gender'].fillna(mode(pdata['Gender'])[0][0], inplace=True)
pdata.apply(get_missing_data, axis=0)
|
mit
|
Python
|
31caf3d6366cdc3669eb72007a1a6a45bffe2ce3
|
Update at 2017-07-23 11-30-32
|
amoshyc/tthl-code
|
plot.py
|
plot.py
|
from sys import argv
from pathlib import Path
import matplotlib as mpl
mpl.use('Agg')
import seaborn as sns
sns.set_style("darkgrid")
import matplotlib.pyplot as plt
import pandas as pd
# from keras.utils import plot_model
# plot_model(model, to_file='model.png', show_shapes=True, show_layer_names=False)
def plot_svg(log, name):
df = pd.read_csv(log)
graph = Path('./graph/')
loss_path = graph / (name + '_loss.svg')
acc_path = graph / (name + '_acc.svg')
keys = ['loss', 'val_loss']
ax = df[keys][:22].plot(kind='line')
ax.set_xlabel('epoch')
ax.set_ylabel('loss(binary crossentropy)')
plt.savefig(str(loss_path))
keys = ['binary_accuracy', 'val_binary_accuracy']
ax = df[keys][:22].plot(kind='line')
ax.set_xlabel('epoch')
ax.set_ylabel('accuracy')
plt.savefig(str(acc_path))
if __name__ == '__main__':
log, name = argv[1], argv[2]
plot_svg(log, name)
|
from sys import argv
from pathlib import Path
import matplotlib as mpl
mpl.use('Agg')
import seaborn as sns
sns.set_style("darkgrid")
import matplotlib.pyplot as plt
import pandas as pd
# from keras.utils import plot_model
# plot_model(model, to_file='model.png', show_shapes=True, show_layer_names=False)
def plot_svg(log, name):
df = pd.read_csv(log)
graph = Path('./graph/')
loss_path = graph / (name + '_loss.svg')
acc_path = graph / (name + '_acc.svg')
keys = ['loss', 'val_loss']
ax = df[keys].plot(kind='line')
ax.set_xlabel('epoch')
ax.set_ylabel('loss(binary crossentropy)')
plt.savefig(str(loss_path))
keys = ['binary_accuracy', 'val_binary_accuracy']
ax = df[keys].plot(kind='line')
ax.set_xlabel('epoch')
ax.set_ylabel('accuracy')
plt.savefig(str(acc_path))
if __name__ == '__main__':
log, name = argv[1], argv[2]
plot_svg(log, name)
|
apache-2.0
|
Python
|
e2330caffae04bc31376a2e0f66f0e86ebf92532
|
Add my own K-nearest-neighbor algorithm
|
a-holm/MachinelearningAlgorithms,a-holm/MachinelearningAlgorithms
|
kNearestNeighbors/howItWorksKNearestNeighbors.py
|
kNearestNeighbors/howItWorksKNearestNeighbors.py
|
# -*- coding: utf-8 -*-
"""K Nearest Neighbors classification for machine learning.
This file demonstrate knowledge of K Nearest Neighbors classification. By
building the algorithm from scratch.
The idea of K Nearest Neighbors classification is to best divide and separate
the data based on clustering the data and classifying based on the proximity
to it's K closest neighbors and their classifications.
'Closeness' is measured by the euclidean distance.
dataset is breast cancer data from: http://archive.ics.uci.edu/ml/datasets.html
Example:
$ python howItWorksKNearestNeighbors.py
Todo:
*
"""
from collections import Counter
import numpy as np
# import matplotlib.pyplot as plt
from matplotlib import style
# from math import sqrt
import warnings
style.use('fivethirtyeight')
# hardcoded testdata
dataset = {'k': [[1, 2], [2, 3], [3, 1]], 'r': [[6, 5], [7, 7], [8, 6]]}
new_features = [5, 7]
# [[plt.scatter(ii[0], ii[1], s=100, color=i) for ii in dataset[i]] for i in dataset]
# plt.scatter(new_features[0], new_features[1], s=100)
# plt.show()
def k_nearest_neighbors(data, predict, k=3):
"""Function to calculate k nearest neighbors.
Based on the parameter 'predict' we find the points in the local proximity
of the training data and their label. In a larger dataset it would make
sense to specify a radius to avoid going over all data points each time,
but with the current dataset it does not matter so I avoid it to simplify.
Args:
data (dictionary): a dictionary where the keys are labels and the
values are a list of lists of features.
predict (list): a list of features that we will classify
k (int): an int that is the amount of neighbors to be counted. Should
be an odd number and higher than len(data) to avoid errors.
Returns:
str: The return value. The label that the predicted parameter has.
"""
if len(data) >= k:
warnings.warn('K is set to a value less than total voting groups')
distances = []
for group in data:
for features in data[group]:
# euclidean_distance = np.sqrt(np.sum((np.array(features)-np.array(predict))**2))
euclidean_distance = np.linalg.norm(np.array(features) - np.array(predict)) # faster
distances.append([euclidean_distance, group])
votes = [i[1] for i in sorted(distances)[:k]]
print(Counter(votes).most_common(1))
vote_result = Counter(votes).most_common(1)[0][0]
return vote_result
result = k_nearest_neighbors(dataset, new_features, k=5)
print(result)
|
mit
|
Python
|
|
9f8cf1c321e9c325fbaaaba3a6ef30c7cf3dd211
|
Hide deleted course updates from mobile API.
|
ak2703/edx-platform,lduarte1991/edx-platform,DNFcode/edx-platform,defance/edx-platform,pabloborrego93/edx-platform,Edraak/circleci-edx-platform,dsajkl/123,chand3040/cloud_that,antoviaque/edx-platform,ahmedaljazzar/edx-platform,jamiefolsom/edx-platform,Edraak/edx-platform,OmarIthawi/edx-platform,alu042/edx-platform,analyseuc3m/ANALYSE-v1,JioEducation/edx-platform,Edraak/edraak-platform,jonathan-beard/edx-platform,sameetb-cuelogic/edx-platform-test,vikas1885/test1,kmoocdev2/edx-platform,ampax/edx-platform-backup,etzhou/edx-platform,proversity-org/edx-platform,shubhdev/edx-platform,10clouds/edx-platform,B-MOOC/edx-platform,atsolakid/edx-platform,atsolakid/edx-platform,waheedahmed/edx-platform,hastexo/edx-platform,naresh21/synergetics-edx-platform,valtech-mooc/edx-platform,ampax/edx-platform-backup,ESOedX/edx-platform,halvertoluke/edx-platform,Endika/edx-platform,jjmiranda/edx-platform,jswope00/griffinx,MSOpenTech/edx-platform,jelugbo/tundex,ubc/edx-platform,mushtaqak/edx-platform,angelapper/edx-platform,shurihell/testasia,eestay/edx-platform,CourseTalk/edx-platform,EDUlib/edx-platform,y12uc231/edx-platform,jamesblunt/edx-platform,shabab12/edx-platform,utecuy/edx-platform,cselis86/edx-platform,edry/edx-platform,jelugbo/tundex,itsjeyd/edx-platform,SivilTaram/edx-platform,unicri/edx-platform,deepsrijit1105/edx-platform,olexiim/edx-platform,wwj718/edx-platform,Livit/Livit.Learn.EdX,devs1991/test_edx_docmode,Edraak/edx-platform,philanthropy-u/edx-platform,chauhanhardik/populo,kmoocdev/edx-platform,shubhdev/openedx,zhenzhai/edx-platform,jazztpt/edx-platform,TeachAtTUM/edx-platform,MSOpenTech/edx-platform,CredoReference/edx-platform,kamalx/edx-platform,ovnicraft/edx-platform,simbs/edx-platform,MSOpenTech/edx-platform,unicri/edx-platform,wwj718/ANALYSE,J861449197/edx-platform,jelugbo/tundex,shubhdev/edx-platform,UXE/local-edx,teltek/edx-platform,xinjiguaike/edx-platform,DNFcode/edx-platform,JCBarahona/edX,ahmadio/edx-platform,xinjiguaike/edx-platform,jazkarta/edx-platform-for-isc,TeachAtTUM/edx-platform,jzoldak/edx-platform,nttks/jenkins-test,shubhdev/edx-platform,jruiperezv/ANALYSE,doismellburning/edx-platform,andyzsf/edx,chudaol/edx-platform,CredoReference/edx-platform,edx-solutions/edx-platform,jswope00/griffinx,jruiperezv/ANALYSE,pabloborrego93/edx-platform,chauhanhardik/populo,Semi-global/edx-platform,leansoft/edx-platform,eemirtekin/edx-platform,arbrandes/edx-platform,shurihell/testasia,jolyonb/edx-platform,sameetb-cuelogic/edx-platform-test,mtlchun/edx,polimediaupv/edx-platform,romain-li/edx-platform,playm2mboy/edx-platform,doganov/edx-platform,4eek/edx-platform,fly19890211/edx-platform,Softmotions/edx-platform,y12uc231/edx-platform,teltek/edx-platform,OmarIthawi/edx-platform,cognitiveclass/edx-platform,Ayub-Khan/edx-platform,jbassen/edx-platform,alu042/edx-platform,kamalx/edx-platform,utecuy/edx-platform,mbareta/edx-platform-ft,bigdatauniversity/edx-platform,JCBarahona/edX,jazkarta/edx-platform,chauhanhardik/populo_2,utecuy/edx-platform,vasyarv/edx-platform,andyzsf/edx,zerobatu/edx-platform,B-MOOC/edx-platform,mitocw/edx-platform,nikolas/edx-platform,Semi-global/edx-platform,shashank971/edx-platform,antoviaque/edx-platform,Endika/edx-platform,eemirtekin/edx-platform,eemirtekin/edx-platform,vismartltd/edx-platform,benpatterson/edx-platform,rismalrv/edx-platform,chudaol/edx-platform,chauhanhardik/populo,ampax/edx-platform,kamalx/edx-platform,teltek/edx-platform,jazkarta/edx-platform,DNFcode/edx-platform,TeachAtTUM/edx-platform,UXE/local-edx,etzhou/edx-platform,zadgroup/edx-platform,pomegranited/edx-platform,jazkarta/edx-platform,knehez/edx-platform,mcgachey/edx-platform,kursitet/edx-platform,waheedahmed/edx-platform,jzoldak/edx-platform,DefyVentures/edx-platform,cecep-edu/edx-platform,xuxiao19910803/edx-platform,nanolearningllc/edx-platform-cypress,jswope00/griffinx,chudaol/edx-platform,polimediaupv/edx-platform,doganov/edx-platform,chauhanhardik/populo_2,jazkarta/edx-platform-for-isc,benpatterson/edx-platform,nttks/jenkins-test,edry/edx-platform,Semi-global/edx-platform,playm2mboy/edx-platform,alexthered/kienhoc-platform,msegado/edx-platform,franosincic/edx-platform,IONISx/edx-platform,dkarakats/edx-platform,shubhdev/openedx,fly19890211/edx-platform,UOMx/edx-platform,olexiim/edx-platform,don-github/edx-platform,nttks/jenkins-test,Shrhawk/edx-platform,cyanna/edx-platform,jbassen/edx-platform,SivilTaram/edx-platform,IONISx/edx-platform,peterm-itr/edx-platform,wwj718/ANALYSE,cpennington/edx-platform,zhenzhai/edx-platform,mcgachey/edx-platform,a-parhom/edx-platform,Stanford-Online/edx-platform,SivilTaram/edx-platform,lduarte1991/edx-platform,mushtaqak/edx-platform,gsehub/edx-platform,jazztpt/edx-platform,xuxiao19910803/edx,shubhdev/edxOnBaadal,kursitet/edx-platform,beni55/edx-platform,mcgachey/edx-platform,chauhanhardik/populo,mitocw/edx-platform,devs1991/test_edx_docmode,eestay/edx-platform,jamiefolsom/edx-platform,romain-li/edx-platform,gymnasium/edx-platform,IndonesiaX/edx-platform,jolyonb/edx-platform,benpatterson/edx-platform,amir-qayyum-khan/edx-platform,Kalyzee/edx-platform,eestay/edx-platform,iivic/BoiseStateX,kamalx/edx-platform,marcore/edx-platform,motion2015/edx-platform,raccoongang/edx-platform,leansoft/edx-platform,wwj718/ANALYSE,jjmiranda/edx-platform,analyseuc3m/ANALYSE-v1,fintech-circle/edx-platform,xinjiguaike/edx-platform,vismartltd/edx-platform,appsembler/edx-platform,edry/edx-platform,jolyonb/edx-platform,jbassen/edx-platform,vikas1885/test1,kursitet/edx-platform,10clouds/edx-platform,OmarIthawi/edx-platform,shubhdev/edxOnBaadal,zhenzhai/edx-platform,fly19890211/edx-platform,Lektorium-LLC/edx-platform,jjmiranda/edx-platform,eduNEXT/edunext-platform,fly19890211/edx-platform,Ayub-Khan/edx-platform,synergeticsedx/deployment-wipro,antoviaque/edx-platform,zofuthan/edx-platform,solashirai/edx-platform,ZLLab-Mooc/edx-platform,procangroup/edx-platform,ubc/edx-platform,rhndg/openedx,wwj718/ANALYSE,louyihua/edx-platform,ahmedaljazzar/edx-platform,waheedahmed/edx-platform,stvstnfrd/edx-platform,playm2mboy/edx-platform,ubc/edx-platform,UOMx/edx-platform,naresh21/synergetics-edx-platform,bigdatauniversity/edx-platform,cyanna/edx-platform,cognitiveclass/edx-platform,andyzsf/edx,appliedx/edx-platform,Kalyzee/edx-platform,valtech-mooc/edx-platform,antoviaque/edx-platform,cpennington/edx-platform,adoosii/edx-platform,martynovp/edx-platform,4eek/edx-platform,jruiperezv/ANALYSE,zubair-arbi/edx-platform,inares/edx-platform,iivic/BoiseStateX,edry/edx-platform,sameetb-cuelogic/edx-platform-test,ampax/edx-platform,zubair-arbi/edx-platform,vasyarv/edx-platform,ahmadiga/min_edx,mushtaqak/edx-platform,jjmiranda/edx-platform,dkarakats/edx-platform,motion2015/edx-platform,shurihell/testasia,raccoongang/edx-platform,romain-li/edx-platform,jbassen/edx-platform,longmen21/edx-platform,kxliugang/edx-platform,eduNEXT/edx-platform,mahendra-r/edx-platform,ampax/edx-platform-backup,gsehub/edx-platform,eduNEXT/edunext-platform,MSOpenTech/edx-platform,fly19890211/edx-platform,xingyepei/edx-platform,ovnicraft/edx-platform,edx-solutions/edx-platform,CredoReference/edx-platform,dsajkl/reqiop,IONISx/edx-platform,etzhou/edx-platform,benpatterson/edx-platform,rismalrv/edx-platform,franosincic/edx-platform,jazkarta/edx-platform-for-isc,knehez/edx-platform,romain-li/edx-platform,shubhdev/openedx,arbrandes/edx-platform,synergeticsedx/deployment-wipro,arbrandes/edx-platform,devs1991/test_edx_docmode,jruiperezv/ANALYSE,fintech-circle/edx-platform,jonathan-beard/edx-platform,marcore/edx-platform,J861449197/edx-platform,RPI-OPENEDX/edx-platform,appliedx/edx-platform,hamzehd/edx-platform,solashirai/edx-platform,peterm-itr/edx-platform,antonve/s4-project-mooc,motion2015/a3,gymnasium/edx-platform,longmen21/edx-platform,rue89-tech/edx-platform,B-MOOC/edx-platform,pepeportela/edx-platform,chauhanhardik/populo_2,jzoldak/edx-platform,peterm-itr/edx-platform,mtlchun/edx,zadgroup/edx-platform,JCBarahona/edX,nanolearningllc/edx-platform-cypress-2,pepeportela/edx-platform,chauhanhardik/populo,motion2015/a3,franosincic/edx-platform,nttks/edx-platform,zerobatu/edx-platform,zubair-arbi/edx-platform,AkA84/edx-platform,deepsrijit1105/edx-platform,doganov/edx-platform,Shrhawk/edx-platform,atsolakid/edx-platform,proversity-org/edx-platform,nttks/jenkins-test,vismartltd/edx-platform,nttks/edx-platform,openfun/edx-platform,rismalrv/edx-platform,appsembler/edx-platform,kmoocdev/edx-platform,hamzehd/edx-platform,UXE/local-edx,wwj718/ANALYSE,msegado/edx-platform,Edraak/circleci-edx-platform,zadgroup/edx-platform,beacloudgenius/edx-platform,MakeHer/edx-platform,mcgachey/edx-platform,philanthropy-u/edx-platform,cselis86/edx-platform,SravanthiSinha/edx-platform,knehez/edx-platform,mjirayu/sit_academy,solashirai/edx-platform,stvstnfrd/edx-platform,J861449197/edx-platform,zofuthan/edx-platform,stvstnfrd/edx-platform,shurihell/testasia,eemirtekin/edx-platform,olexiim/edx-platform,jbzdak/edx-platform,bitifirefly/edx-platform,ak2703/edx-platform,waheedahmed/edx-platform,jonathan-beard/edx-platform,jbzdak/edx-platform,mahendra-r/edx-platform,UOMx/edx-platform,RPI-OPENEDX/edx-platform,benpatterson/edx-platform,procangroup/edx-platform,openfun/edx-platform,ovnicraft/edx-platform,zofuthan/edx-platform,motion2015/a3,andyzsf/edx,gsehub/edx-platform,xuxiao19910803/edx-platform,louyihua/edx-platform,arifsetiawan/edx-platform,bigdatauniversity/edx-platform,ahmadiga/min_edx,cecep-edu/edx-platform,xingyepei/edx-platform,shurihell/testasia,jazkarta/edx-platform,jelugbo/tundex,cyanna/edx-platform,lduarte1991/edx-platform,hastexo/edx-platform,synergeticsedx/deployment-wipro,ampax/edx-platform,beni55/edx-platform,synergeticsedx/deployment-wipro,y12uc231/edx-platform,DNFcode/edx-platform,lduarte1991/edx-platform,CourseTalk/edx-platform,adoosii/edx-platform,nikolas/edx-platform,nagyistoce/edx-platform,inares/edx-platform,alexthered/kienhoc-platform,antonve/s4-project-mooc,etzhou/edx-platform,xinjiguaike/edx-platform,dcosentino/edx-platform,xuxiao19910803/edx-platform,dkarakats/edx-platform,dsajkl/reqiop,a-parhom/edx-platform,chudaol/edx-platform,polimediaupv/edx-platform,msegado/edx-platform,nikolas/edx-platform,jazztpt/edx-platform,angelapper/edx-platform,kxliugang/edx-platform,nagyistoce/edx-platform,defance/edx-platform,vismartltd/edx-platform,ahmadio/edx-platform,TeachAtTUM/edx-platform,martynovp/edx-platform,kmoocdev/edx-platform,kxliugang/edx-platform,jamesblunt/edx-platform,inares/edx-platform,knehez/edx-platform,zadgroup/edx-platform,Shrhawk/edx-platform,zadgroup/edx-platform,jruiperezv/ANALYSE,ZLLab-Mooc/edx-platform,bitifirefly/edx-platform,chand3040/cloud_that,eduNEXT/edx-platform,valtech-mooc/edx-platform,Shrhawk/edx-platform,Softmotions/edx-platform,4eek/edx-platform,franosincic/edx-platform,amir-qayyum-khan/edx-platform,nanolearningllc/edx-platform-cypress,SravanthiSinha/edx-platform,bitifirefly/edx-platform,chand3040/cloud_that,adoosii/edx-platform,shashank971/edx-platform,Livit/Livit.Learn.EdX,vikas1885/test1,antonve/s4-project-mooc,miptliot/edx-platform,dsajkl/123,IndonesiaX/edx-platform,procangroup/edx-platform,edx/edx-platform,eduNEXT/edx-platform,cselis86/edx-platform,DefyVentures/edx-platform,beacloudgenius/edx-platform,ESOedX/edx-platform,analyseuc3m/ANALYSE-v1,bigdatauniversity/edx-platform,martynovp/edx-platform,hamzehd/edx-platform,eemirtekin/edx-platform,playm2mboy/edx-platform,ak2703/edx-platform,BehavioralInsightsTeam/edx-platform,don-github/edx-platform,longmen21/edx-platform,gsehub/edx-platform,dsajkl/reqiop,rhndg/openedx,devs1991/test_edx_docmode,marcore/edx-platform,jamiefolsom/edx-platform,longmen21/edx-platform,cognitiveclass/edx-platform,dcosentino/edx-platform,hamzehd/edx-platform,motion2015/a3,sameetb-cuelogic/edx-platform-test,miptliot/edx-platform,ZLLab-Mooc/edx-platform,EDUlib/edx-platform,SravanthiSinha/edx-platform,xuxiao19910803/edx-platform,nikolas/edx-platform,B-MOOC/edx-platform,kursitet/edx-platform,nttks/edx-platform,simbs/edx-platform,chrisndodge/edx-platform,zhenzhai/edx-platform,xingyepei/edx-platform,jazkarta/edx-platform,10clouds/edx-platform,jbzdak/edx-platform,JCBarahona/edX,utecuy/edx-platform,cpennington/edx-platform,unicri/edx-platform,msegado/edx-platform,prarthitm/edxplatform,SravanthiSinha/edx-platform,DNFcode/edx-platform,olexiim/edx-platform,Livit/Livit.Learn.EdX,MakeHer/edx-platform,chand3040/cloud_that,RPI-OPENEDX/edx-platform,rue89-tech/edx-platform,halvertoluke/edx-platform,vikas1885/test1,motion2015/edx-platform,xingyepei/edx-platform,B-MOOC/edx-platform,JCBarahona/edX,vismartltd/edx-platform,antonve/s4-project-mooc,ahmadio/edx-platform,shabab12/edx-platform,appliedx/edx-platform,louyihua/edx-platform,itsjeyd/edx-platform,xuxiao19910803/edx,playm2mboy/edx-platform,4eek/edx-platform,ahmadio/edx-platform,caesar2164/edx-platform,zerobatu/edx-platform,iivic/BoiseStateX,xuxiao19910803/edx,jswope00/griffinx,mushtaqak/edx-platform,ovnicraft/edx-platform,itsjeyd/edx-platform,raccoongang/edx-platform,IndonesiaX/edx-platform,polimediaupv/edx-platform,doganov/edx-platform,jolyonb/edx-platform,Livit/Livit.Learn.EdX,ampax/edx-platform-backup,AkA84/edx-platform,Ayub-Khan/edx-platform,amir-qayyum-khan/edx-platform,zerobatu/edx-platform,mjirayu/sit_academy,jazztpt/edx-platform,simbs/edx-platform,deepsrijit1105/edx-platform,ahmedaljazzar/edx-platform,ZLLab-Mooc/edx-platform,pomegranited/edx-platform,atsolakid/edx-platform,Edraak/edx-platform,pepeportela/edx-platform,EDUlib/edx-platform,caesar2164/edx-platform,ovnicraft/edx-platform,gymnasium/edx-platform,JioEducation/edx-platform,prarthitm/edxplatform,miptliot/edx-platform,msegado/edx-platform,mitocw/edx-platform,Kalyzee/edx-platform,edx-solutions/edx-platform,Edraak/circleci-edx-platform,shubhdev/edx-platform,caesar2164/edx-platform,dsajkl/123,mbareta/edx-platform-ft,shubhdev/edxOnBaadal,RPI-OPENEDX/edx-platform,shubhdev/openedx,Kalyzee/edx-platform,arifsetiawan/edx-platform,beacloudgenius/edx-platform,ubc/edx-platform,zerobatu/edx-platform,EDUlib/edx-platform,chrisndodge/edx-platform,beni55/edx-platform,ferabra/edx-platform,angelapper/edx-platform,romain-li/edx-platform,mtlchun/edx,knehez/edx-platform,Softmotions/edx-platform,ferabra/edx-platform,shashank971/edx-platform,ahmadiga/min_edx,JioEducation/edx-platform,cyanna/edx-platform,10clouds/edx-platform,proversity-org/edx-platform,stvstnfrd/edx-platform,xuxiao19910803/edx,shubhdev/openedx,arifsetiawan/edx-platform,utecuy/edx-platform,philanthropy-u/edx-platform,Shrhawk/edx-platform,AkA84/edx-platform,nagyistoce/edx-platform,marcore/edx-platform,mtlchun/edx,defance/edx-platform,halvertoluke/edx-platform,AkA84/edx-platform,tanmaykm/edx-platform,cecep-edu/edx-platform,Edraak/edraak-platform,devs1991/test_edx_docmode,Stanford-Online/edx-platform,atsolakid/edx-platform,Semi-global/edx-platform,nanolearningllc/edx-platform-cypress-2,martynovp/edx-platform,beacloudgenius/edx-platform,tiagochiavericosta/edx-platform,Edraak/circleci-edx-platform,tiagochiavericosta/edx-platform,nttks/edx-platform,eestay/edx-platform,prarthitm/edxplatform,mahendra-r/edx-platform,kmoocdev2/edx-platform,ahmadiga/min_edx,cognitiveclass/edx-platform,wwj718/edx-platform,mitocw/edx-platform,tanmaykm/edx-platform,solashirai/edx-platform,shabab12/edx-platform,jbzdak/edx-platform,nanolearningllc/edx-platform-cypress,dkarakats/edx-platform,eduNEXT/edx-platform,jazkarta/edx-platform-for-isc,ak2703/edx-platform,OmarIthawi/edx-platform,dcosentino/edx-platform,ampax/edx-platform-backup,nagyistoce/edx-platform,4eek/edx-platform,Softmotions/edx-platform,nanolearningllc/edx-platform-cypress-2,ahmadiga/min_edx,doismellburning/edx-platform,adoosii/edx-platform,shubhdev/edx-platform,J861449197/edx-platform,iivic/BoiseStateX,bitifirefly/edx-platform,iivic/BoiseStateX,Edraak/circleci-edx-platform,cecep-edu/edx-platform,motion2015/edx-platform,jamiefolsom/edx-platform,kmoocdev/edx-platform,jswope00/griffinx,xingyepei/edx-platform,vasyarv/edx-platform,CourseTalk/edx-platform,fintech-circle/edx-platform,Kalyzee/edx-platform,MakeHer/edx-platform,kmoocdev2/edx-platform,olexiim/edx-platform,edry/edx-platform,peterm-itr/edx-platform,ESOedX/edx-platform,dsajkl/123,prarthitm/edxplatform,shashank971/edx-platform,dsajkl/reqiop,UXE/local-edx,beacloudgenius/edx-platform,cecep-edu/edx-platform,IONISx/edx-platform,sameetb-cuelogic/edx-platform-test,mtlchun/edx,beni55/edx-platform,DefyVentures/edx-platform,y12uc231/edx-platform,edx/edx-platform,nanolearningllc/edx-platform-cypress,edx/edx-platform,rue89-tech/edx-platform,BehavioralInsightsTeam/edx-platform,arifsetiawan/edx-platform,nagyistoce/edx-platform,zubair-arbi/edx-platform,motion2015/edx-platform,jelugbo/tundex,appliedx/edx-platform,mbareta/edx-platform-ft,SravanthiSinha/edx-platform,ferabra/edx-platform,itsjeyd/edx-platform,nikolas/edx-platform,mjirayu/sit_academy,tiagochiavericosta/edx-platform,IndonesiaX/edx-platform,Semi-global/edx-platform,philanthropy-u/edx-platform,jonathan-beard/edx-platform,jamesblunt/edx-platform,edx/edx-platform,inares/edx-platform,rismalrv/edx-platform,Endika/edx-platform,defance/edx-platform,Ayub-Khan/edx-platform,jamiefolsom/edx-platform,arifsetiawan/edx-platform,RPI-OPENEDX/edx-platform,dcosentino/edx-platform,doismellburning/edx-platform,dcosentino/edx-platform,tiagochiavericosta/edx-platform,doganov/edx-platform,mbareta/edx-platform-ft,chudaol/edx-platform,devs1991/test_edx_docmode,jbassen/edx-platform,cyanna/edx-platform,alexthered/kienhoc-platform,adoosii/edx-platform,JioEducation/edx-platform,mcgachey/edx-platform,longmen21/edx-platform,tanmaykm/edx-platform,don-github/edx-platform,solashirai/edx-platform,nanolearningllc/edx-platform-cypress-2,rhndg/openedx,jzoldak/edx-platform,devs1991/test_edx_docmode,chrisndodge/edx-platform,cpennington/edx-platform,chauhanhardik/populo_2,y12uc231/edx-platform,DefyVentures/edx-platform,ferabra/edx-platform,tiagochiavericosta/edx-platform,vasyarv/edx-platform,unicri/edx-platform,eduNEXT/edunext-platform,shubhdev/edxOnBaadal,DefyVentures/edx-platform,kursitet/edx-platform,halvertoluke/edx-platform,shabab12/edx-platform,martynovp/edx-platform,Endika/edx-platform,MakeHer/edx-platform,bitifirefly/edx-platform,antonve/s4-project-mooc,arbrandes/edx-platform,caesar2164/edx-platform,SivilTaram/edx-platform,kmoocdev/edx-platform,Lektorium-LLC/edx-platform,hamzehd/edx-platform,xinjiguaike/edx-platform,devs1991/test_edx_docmode,vikas1885/test1,procangroup/edx-platform,fintech-circle/edx-platform,pomegranited/edx-platform,analyseuc3m/ANALYSE-v1,zhenzhai/edx-platform,CourseTalk/edx-platform,appsembler/edx-platform,mushtaqak/edx-platform,Edraak/edraak-platform,J861449197/edx-platform,ahmadio/edx-platform,dkarakats/edx-platform,simbs/edx-platform,jamesblunt/edx-platform,MakeHer/edx-platform,shashank971/edx-platform,bigdatauniversity/edx-platform,openfun/edx-platform,ZLLab-Mooc/edx-platform,valtech-mooc/edx-platform,rismalrv/edx-platform,deepsrijit1105/edx-platform,jamesblunt/edx-platform,chrisndodge/edx-platform,BehavioralInsightsTeam/edx-platform,waheedahmed/edx-platform,inares/edx-platform,leansoft/edx-platform,pomegranited/edx-platform,wwj718/edx-platform,zofuthan/edx-platform,mahendra-r/edx-platform,raccoongang/edx-platform,kamalx/edx-platform,appsembler/edx-platform,louyihua/edx-platform,appliedx/edx-platform,rhndg/openedx,angelapper/edx-platform,gymnasium/edx-platform,pabloborrego93/edx-platform,pepeportela/edx-platform,IONISx/edx-platform,mahendra-r/edx-platform,xuxiao19910803/edx,ahmedaljazzar/edx-platform,beni55/edx-platform,hastexo/edx-platform,rue89-tech/edx-platform,alexthered/kienhoc-platform,Stanford-Online/edx-platform,doismellburning/edx-platform,alu042/edx-platform,mjirayu/sit_academy,Edraak/edx-platform,doismellburning/edx-platform,alexthered/kienhoc-platform,alu042/edx-platform,leansoft/edx-platform,hastexo/edx-platform,zubair-arbi/edx-platform,miptliot/edx-platform,openfun/edx-platform,pomegranited/edx-platform,polimediaupv/edx-platform,motion2015/a3,etzhou/edx-platform,ubc/edx-platform,rue89-tech/edx-platform,BehavioralInsightsTeam/edx-platform,vasyarv/edx-platform,eestay/edx-platform,Edraak/edraak-platform,Lektorium-LLC/edx-platform,amir-qayyum-khan/edx-platform,a-parhom/edx-platform,wwj718/edx-platform,Lektorium-LLC/edx-platform,Softmotions/edx-platform,zofuthan/edx-platform,chand3040/cloud_that,ak2703/edx-platform,dsajkl/123,a-parhom/edx-platform,UOMx/edx-platform,nanolearningllc/edx-platform-cypress,don-github/edx-platform,chauhanhardik/populo_2,jazkarta/edx-platform-for-isc,jbzdak/edx-platform,simbs/edx-platform,rhndg/openedx,leansoft/edx-platform,pabloborrego93/edx-platform,cognitiveclass/edx-platform,ESOedX/edx-platform,shubhdev/edxOnBaadal,wwj718/edx-platform,halvertoluke/edx-platform,kmoocdev2/edx-platform,Edraak/edx-platform,nanolearningllc/edx-platform-cypress-2,kxliugang/edx-platform,xuxiao19910803/edx-platform,ferabra/edx-platform,Ayub-Khan/edx-platform,valtech-mooc/edx-platform,don-github/edx-platform,unicri/edx-platform,nttks/jenkins-test,proversity-org/edx-platform,franosincic/edx-platform,naresh21/synergetics-edx-platform,ampax/edx-platform,edx-solutions/edx-platform,mjirayu/sit_academy,CredoReference/edx-platform,SivilTaram/edx-platform,nttks/edx-platform,kmoocdev2/edx-platform,tanmaykm/edx-platform,AkA84/edx-platform,eduNEXT/edunext-platform,jonathan-beard/edx-platform,cselis86/edx-platform,Stanford-Online/edx-platform,MSOpenTech/edx-platform,cselis86/edx-platform,naresh21/synergetics-edx-platform,openfun/edx-platform,jazztpt/edx-platform,IndonesiaX/edx-platform,kxliugang/edx-platform,teltek/edx-platform
|
lms/djangoapps/mobile_api/course_info/views.py
|
lms/djangoapps/mobile_api/course_info/views.py
|
from rest_framework import generics, permissions
from rest_framework.authentication import OAuth2Authentication, SessionAuthentication
from rest_framework.response import Response
from rest_framework.views import APIView
from courseware.model_data import FieldDataCache
from courseware.module_render import get_module
from courseware.courses import get_course_about_section, get_course_info_section_module
from opaque_keys.edx.keys import CourseKey
from xmodule.modulestore.django import modulestore
from student.models import CourseEnrollment, User
class CourseUpdatesList(generics.ListAPIView):
"""Notes:
1. This only works for new-style course updates and is not the older freeform
format.
"""
authentication_classes = (OAuth2Authentication, SessionAuthentication)
permission_classes = (permissions.IsAuthenticated,)
def list(self, request, *args, **kwargs):
course_id = CourseKey.from_string(kwargs['course_id'])
course = modulestore().get_course(course_id)
course_updates_module = get_course_info_section_module(request, course, 'updates')
updates_to_show = [
update for update in reversed(course_updates_module.items)
if update.get("status") != "deleted"
]
return Response(updates_to_show)
class CourseHandoutsList(generics.ListAPIView):
"""Please just render this in an HTML view for now.
"""
authentication_classes = (OAuth2Authentication, SessionAuthentication)
permission_classes = (permissions.IsAuthenticated,)
def list(self, request, *args, **kwargs):
course_id = CourseKey.from_string(kwargs['course_id'])
course = modulestore().get_course(course_id)
course_handouts_module = get_course_info_section_module(request, course, 'handouts')
return Response({'handouts_html': course_handouts_module.data})
class CourseAboutDetail(generics.RetrieveAPIView):
authentication_classes = (OAuth2Authentication, SessionAuthentication)
permission_classes = (permissions.IsAuthenticated,)
def get(self, request, *args, **kwargs):
course_id = CourseKey.from_string(kwargs['course_id'])
course = modulestore().get_course(course_id)
# There are other fields, but they don't seem to be in use.
# see courses.py:get_course_about_section.
#
# This can also return None, so check for that before calling strip()
about_section_html = get_course_about_section(course, "overview")
return Response(
{"overview": about_section_html.strip() if about_section_html else ""}
)
|
from rest_framework import generics, permissions
from rest_framework.authentication import OAuth2Authentication, SessionAuthentication
from rest_framework.response import Response
from rest_framework.views import APIView
from courseware.model_data import FieldDataCache
from courseware.module_render import get_module
from courseware.courses import get_course_about_section, get_course_info_section_module
from opaque_keys.edx.keys import CourseKey
from xmodule.modulestore.django import modulestore
from student.models import CourseEnrollment, User
class CourseUpdatesList(generics.ListAPIView):
"""Notes:
1. This only works for new-style course updates and is not the older freeform
format.
"""
authentication_classes = (OAuth2Authentication, SessionAuthentication)
permission_classes = (permissions.IsAuthenticated,)
def list(self, request, *args, **kwargs):
course_id = CourseKey.from_string(kwargs['course_id'])
course = modulestore().get_course(course_id)
course_updates_module = get_course_info_section_module(request, course, 'updates')
return Response(reversed(course_updates_module.items))
class CourseHandoutsList(generics.ListAPIView):
"""Please just render this in an HTML view for now.
"""
authentication_classes = (OAuth2Authentication, SessionAuthentication)
permission_classes = (permissions.IsAuthenticated,)
def list(self, request, *args, **kwargs):
course_id = CourseKey.from_string(kwargs['course_id'])
course = modulestore().get_course(course_id)
course_handouts_module = get_course_info_section_module(request, course, 'handouts')
return Response({'handouts_html': course_handouts_module.data})
class CourseAboutDetail(generics.RetrieveAPIView):
authentication_classes = (OAuth2Authentication, SessionAuthentication)
permission_classes = (permissions.IsAuthenticated,)
def get(self, request, *args, **kwargs):
course_id = CourseKey.from_string(kwargs['course_id'])
course = modulestore().get_course(course_id)
# There are other fields, but they don't seem to be in use.
# see courses.py:get_course_about_section.
#
# This can also return None, so check for that before calling strip()
about_section_html = get_course_about_section(course, "overview")
return Response(
{"overview": about_section_html.strip() if about_section_html else ""}
)
|
agpl-3.0
|
Python
|
72a633793b30a87b6affa528459185d46fc37007
|
Update getJob signature
|
bryantrobbins/baseball,bryantrobbins/baseball,bryantrobbins/baseball,bryantrobbins/baseball,bryantrobbins/baseball
|
shared/api.py
|
shared/api.py
|
from __future__ import print_function
import boto3
import json
import os
import btr3baseball
jobTable = os.environ['JOB_TABLE']
jobQueue = os.environ['JOB_QUEUE']
repo = btr3baseball.JobRepository(jobTable)
queue = boto3.resource('sqs').get_queue_by_name(QueueName=jobQueue)
def submitJob(event, context):
# Put initial entry in dynamo db
jobId = repo.createJob(event)
# Put the job ID on the SQS queue
response = queue.send_message(MessageBody=jobId)
# Update the DB entry with sqs message ID for traceability
repo.updateWithMessageId(jobId, response.get('MessageId'))
def getJob(event, context):
repo.getJob(event['jobId'])
|
from __future__ import print_function
import boto3
import json
import os
import btr3baseball
jobTable = os.environ['JOB_TABLE']
jobQueue = os.environ['JOB_QUEUE']
repo = btr3baseball.JobRepository(jobTable)
queue = boto3.resource('sqs').get_queue_by_name(QueueName=jobQueue)
def submitJob(event, context):
# Put initial entry in dynamo db
jobId = repo.createJob(event)
# Put the job ID on the SQS queue
response = queue.send_message(MessageBody=jobId)
# Update the DB entry with sqs message ID for traceability
repo.updateWithMessageId(jobId, response.get('MessageId'))
def getJobInfo():
repo.getJob(event['jobId'])
|
apache-2.0
|
Python
|
06d0287a8fef0679b281296e6ed76e0b6c803acb
|
Improve management command to clear or clean kvstore
|
mariocesar/sorl-thumbnail,perdona/sorl-thumbnail,fdgogogo/sorl-thumbnail,Knotis/sorl-thumbnail,jcupitt/sorl-thumbnail,einvalentin/sorl-thumbnail,fladi/sorl-thumbnail,lampslave/sorl-thumbnail,seedinvest/sorl-thumbnail,guilouro/sorl-thumbnail,TriplePoint-Software/sorl-thumbnail,jazzband/sorl-thumbnail,jcupitt/sorl-thumbnail,lampslave/sorl-thumbnail,Knotis/sorl-thumbnail,mcenirm/sorl-thumbnail,JordanReiter/sorl-thumbnail,fdgogogo/sorl-thumbnail,Knotis/sorl-thumbnail,mariocesar/sorl-thumbnail,Resmin/sorl-thumbnail,leture/sorl-thumbnail,jazzband/sorl-thumbnail,einvalentin/sorl-thumbnail,TriplePoint-Software/sorl-thumbnail,lampslave/sorl-thumbnail,jannon/sorl-thumbnail,Resmin/sorl-thumbnail,MatthewWilkes/sorl-thumbnail,perdona/sorl-thumbnail,einvalentin/sorl-thumbnail,mariocesar/sorl-thumbnail,CGenie/sorl-thumbnail,jannon/sorl-thumbnail,leture/sorl-thumbnail,MatthewWilkes/sorl-thumbnail,guilouro/sorl-thumbnail,gregplaysguitar/sorl-thumbnail,jannon/sorl-thumbnail,mcenirm/sorl-thumbnail,MatthewWilkes/sorl-thumbnail,gregplaysguitar/sorl-thumbnail,guilouro/sorl-thumbnail,seedinvest/sorl-thumbnail,chriscauley/sorl-thumbnail,seedinvest/sorl-thumbnail,chriscauley/sorl-thumbnail,CGenie/sorl-thumbnail,TriplePoint-Software/sorl-thumbnail,perdona/sorl-thumbnail,jcupitt/sorl-thumbnail,jazzband/sorl-thumbnail,JordanReiter/sorl-thumbnail,mcenirm/sorl-thumbnail,leture/sorl-thumbnail,fdgogogo/sorl-thumbnail,chriscauley/sorl-thumbnail,fladi/sorl-thumbnail,JordanReiter/sorl-thumbnail,CGenie/sorl-thumbnail,gregplaysguitar/sorl-thumbnail,Resmin/sorl-thumbnail
|
sorl/thumbnail/management/commands/thumbnail.py
|
sorl/thumbnail/management/commands/thumbnail.py
|
import sys
from django.core.management.base import BaseCommand, CommandError
from sorl.thumbnail import default
class Command(BaseCommand):
help = (
u'Handles thumbnails and key value store'
)
args = '[cleanup, clear]'
option_list = BaseCommand.option_list
def handle(self, *labels, **options):
verbosity = int(options.get('verbosity'))
if not labels:
print self.print_help('thumbnail', '')
sys.exit(1)
if len(labels) != 1:
raise CommandError('`%s` is not a valid argument' % labels)
label = labels[0]
if label not in ['cleanup', 'clear']:
raise CommandError('`%s` unknown action' % label)
if label == 'cleanup':
if verbosity >= 1:
self.stdout.write("Cleanup thumbnails ... ", ending=' ... ')
default.kvstore.cleanup()
if verbosity >= 1:
self.stdout.write("[Done]")
elif label == 'clear':
if verbosity >= 1:
self.stdout.write("Clear the Key Value Store", ending=' ... ')
default.kvstore.clear()
if verbosity >= 1:
self.stdout.write("[Done]")
|
from django.core.management.base import BaseCommand, CommandError
from sorl.thumbnail.conf import settings
from sorl.thumbnail import default
class Command(BaseCommand):
help = (
u'Handles thumbnails and key value store'
)
args = '[cleanup, clear]'
option_list = BaseCommand.option_list
def handle(self, *labels, **options):
verbosity = int(options.get('verbosity'))
if len(labels) != 1:
raise CommandError('`%s` is not a valid argument' % labels)
label = labels[0]
if label not in ['cleanup', 'clear']:
raise CommandError('`%s` unknown action' % label)
if label == 'cleanup':
if verbosity >= 1:
self.stdout.write("Cleanup thumbnails ... ")
default.kvstore.cleanup()
if verbosity >= 1:
self.stdout.write("[Done]\n")
if label == 'clear':
if verbosity >= 1:
self.stdout.write("Clear the Key Value Store ... ")
default.kvstore.clear()
if verbosity >= 1:
self.stdout.write("[Done]\n")
|
bsd-3-clause
|
Python
|
a8805982ff5b92a59d25a28e2acd63af3c210f65
|
Add brute force sol
|
bowen0701/algorithms_data_structures
|
lc0945_minimum_increment_to_make_array_unique.py
|
lc0945_minimum_increment_to_make_array_unique.py
|
"""Leetcode 945. Minimum Increment to Make Array Unique
Medium
URL: https://leetcode.com/problems/minimum-increment-to-make-array-unique/
Given an array of integers A, a move consists of choosing any A[i], and
incrementing it by 1.
Return the least number of moves to make every value in A unique.
Example 1:
Input: [1,2,2]
Output: 1
Explanation: After 1 move, the array could be [1, 2, 3].
Example 2:
Input: [3,2,1,2,1,7]
Output: 6
Explanation: After 6 moves, the array could be [3, 4, 1, 2, 5, 7].
It can be shown with 5 or less moves that it is impossible for the array to
have all unique values.
Note:
- 0 <= A.length <= 40000
- 0 <= A[i] < 40000
"""
class SolutionBruteForce(object):
def minIncrementForUnique(self, A):
"""
:type A: List[int]
:rtype: int
Note: Time limit exceeded.
Time complexity: O(n^2), where n is A's length.
Space complexity: O(n).
"""
from collections import defaultdict
if not A:
return 0
# Create a dict:number->count.
num_count_d = defaultdict(int)
for num in A:
num_count_d[num] += 1
# While exists repeated numbers, move number by incrementing it.
moves = 0
repeated_nums = set([num for num, count in num_count_d.items()
if count > 1])
while repeated_nums:
num = repeated_nums.pop()
while num_count_d[num] > 1:
num_count_d[num] -= 1
num_count_d[num + 1] += 1
moves += 1
# If num's or num + 1's counts > 1, add back to set.
if num_count_d[num] > 1:
repeated_nums.add(num)
if num_count_d[num + 1] > 1:
repeated_nums.add(num + 1)
return moves
class SolutionSortPrevPlusOne(object):
def minIncrementForUnique(self, A):
"""
:type A: List[int]
:rtype: int
Time complexity: O(n*logn), where n is A's length.
Space complexity: O(1).
"""
if not A:
return 0
# Sort the input array, compare current number with previous one.
moves = need = 0
print sorted(A)
for num in sorted(A):
# Current number need to be at least previous + 1.
moves += max(need - num, 0)
need = max(num, need) + 1
return moves
def main():
# Output: 1
A = [1, 2, 2]
print SolutionBruteForce().minIncrementForUnique(A)
print SolutionSortPrevPlusOne().minIncrementForUnique(A)
# Output: 6
A = [3, 2, 1, 2, 1, 7]
print SolutionBruteForce().minIncrementForUnique(A)
print SolutionSortPrevPlusOne().minIncrementForUnique(A)
if __name__ == '__main__':
main()
|
"""Leetcode 945. Minimum Increment to Make Array Unique
Medium
URL: https://leetcode.com/problems/minimum-increment-to-make-array-unique/
Given an array of integers A, a move consists of choosing any A[i], and
incrementing it by 1.
Return the least number of moves to make every value in A unique.
Example 1:
Input: [1,2,2]
Output: 1
Explanation: After 1 move, the array could be [1, 2, 3].
Example 2:
Input: [3,2,1,2,1,7]
Output: 6
Explanation: After 6 moves, the array could be [3, 4, 1, 2, 5, 7].
It can be shown with 5 or less moves that it is impossible for the array to
have all unique values.
Note:
- 0 <= A.length <= 40000
- 0 <= A[i] < 40000
"""
class SolutionSortPrevPlusOne(object):
def minIncrementForUnique(self, A):
"""
:type A: List[int]
:rtype: int
Time complexity: O(n*logn), where n is A's length.
Space complexity: O(1).
"""
if not A:
return 0
# Sort the input array, compare current number with previous one.
moves = need = 0
for num in sorted(A):
# Current number need to be at least previous + 1.
moves += max(need - num, 0)
need = max(num, need) + 1
return moves
def main():
# Output: 1
A = [1, 2, 2]
print SolutionSortPrevPlusOne().minIncrementForUnique(A)
# Output: 6
A = [3, 2, 1, 2, 1, 7]
[1, 1, 2, 2, 3, 7]
print SolutionSortPrevPlusOne().minIncrementForUnique(A)
if __name__ == '__main__':
main()
|
bsd-2-clause
|
Python
|
ca3b1c09705d65307851711dca71714915e4525a
|
Fix the formatting of log message
|
apophys/ipaqe-provision-hosts
|
ipaqe_provision_hosts/__main__.py
|
ipaqe_provision_hosts/__main__.py
|
#!/usr/bin/env python
from __future__ import print_function
import argparse
import logging
import sys
from ipaqe_provision_hosts.runner import create, delete
from ipaqe_provision_hosts.errors import IPAQEProvisionerError
CONFIG_HELP_MSG = (
'Configuration file for the topology. Must contain core configuration as '
' well as configuration for backend. If not specified, the tool checks '
'the configuration from /etc/ipaqe-provision-hosts/config.yaml')
def main():
parser = argparse.ArgumentParser(description='FreeIPA provisioning')
parser.add_argument('-d', '--debug', dest='loglevel',
help='Set logging level. Default level is ERROR',
metavar='LEVEL')
subparsers = parser.add_subparsers(dest="command")
parser_create = subparsers.add_parser("create")
parser_create.add_argument("--topology", required=True, metavar='FILE',
help="The topology template file")
parser_create.add_argument("--output", required=True, metavar='FILE',
help="File to print final configuration into")
parser_create.add_argument("--config", required=False, metavar='FILE',
help=CONFIG_HELP_MSG)
parser_delete = subparsers.add_parser("delete")
parser_delete.add_argument("--config", required=False, metavar='FILE',
help=CONFIG_HELP_MSG)
args = parser.parse_args()
loglevel = None
if args.loglevel:
try:
loglevel = getattr(logging, args.loglevel.upper())
except AttributeError:
loglevel = logging.ERROR
finally:
logging.basicConfig(level=loglevel)
log = logging.getLogger(__name__)
log.debug('Setting log level to %s', logging.getLevelName(loglevel))
try:
if args.command == "create":
create(args.topology, args.config, args.output)
elif args.command == "delete":
delete(args.config)
except IPAQEProvisionerError:
# Backend exception should be handled by now
sys.exit(1)
except Exception as e:
log.error("Unhandled exception: %s", e)
sys.exit(1)
if __name__ == "__main__":
main()
|
#!/usr/bin/env python
from __future__ import print_function
import argparse
import logging
import sys
from ipaqe_provision_hosts.runner import create, delete
from ipaqe_provision_hosts.errors import IPAQEProvisionerError
CONFIG_HELP_MSG = (
'Configuration file for the topology. Must contain core configuration as '
' well as configuration for backend. If not specified, the tool checks '
'the configuration from /etc/ipaqe-provision-hosts/config.yaml')
def main():
parser = argparse.ArgumentParser(description='FreeIPA provisioning')
parser.add_argument('-d', '--debug', dest='loglevel',
help='Set logging level. Default level is ERROR',
metavar='LEVEL')
subparsers = parser.add_subparsers(dest="command")
parser_create = subparsers.add_parser("create")
parser_create.add_argument("--topology", required=True, metavar='FILE',
help="The topology template file")
parser_create.add_argument("--output", required=True, metavar='FILE',
help="File to print final configuration into")
parser_create.add_argument("--config", required=False, metavar='FILE',
help=CONFIG_HELP_MSG)
parser_delete = subparsers.add_parser("delete")
parser_delete.add_argument("--config", required=False, metavar='FILE',
help=CONFIG_HELP_MSG)
args = parser.parse_args()
loglevel = None
if args.loglevel:
try:
loglevel = getattr(logging, args.loglevel.upper())
except AttributeError:
loglevel = logging.ERROR
finally:
logging.basicConfig(level=loglevel)
log = logging.getLogger(__name__)
log.debug('Setting log level to {}'.format(logging.getLevelName(loglevel)))
try:
if args.command == "create":
create(args.topology, args.config, args.output)
elif args.command == "delete":
delete(args.config)
except IPAQEProvisionerError:
# Backend exception should be handled by now
sys.exit(1)
except Exception as e:
log.error("Unhandled exception: %s", e)
sys.exit(1)
if __name__ == "__main__":
main()
|
mit
|
Python
|
01afa5bdbdf1900b5d67ffb6b0bb880d257a1869
|
Update server.py
|
carloscadena/http-server,carloscadena/http-server
|
src/server.py
|
src/server.py
|
"""Server for http-server echo assignment."""
import socket # pragma: no cover
import sys # pragma: no cover
from email.utils import formatdate
def server(): # pragma: no cover
"""
Open the server, waits for input from client.
Closes connection on completed message.
Closes server with Ctrl-C
"""
server = socket.socket(socket.AF_INET,
socket.SOCK_STREAM,
socket.IPPROTO_TCP)
address = ('127.0.0.1', 5000)
server.bind(address)
server.listen(1)
while True:
try:
connection, address = server.accept()
message = b''
buffer_length = 8
message_complete = False
while not message_complete:
part = connection.recv(buffer_length)
message += part
if b'\r\n\r\n' in message:
message_complete = True
print(message)
connection.sendall(response_ok())
connection.close()
except KeyboardInterrupt:
print('\nServer closed good bye.')
server.shutdown(socket.SHUT_WR)
server.close()
sys.exit(0)
def response_ok():
"""Send a 200 OK response."""
msg = b'HTTP/1.1 200 OK\r\n'
msg += u'Date: {}\r\n\r\n'.format(formatdate(usegmt=True)).encode('utf8')
return msg
def response_error():
"""Send a 500 server error response."""
return b'HTTP/1.1 500 Internal Server Error'
if __name__ == '__main__': # pragma: no cover
print('Server ready and waiting...\n')
server()
|
"""Server for http-server echo assignment."""
import socket # pragma: no cover
import sys # pragma: no cover
from email.utils import formatdate
def server(): # pragma: no cover
"""
Open the server, waits for input from client.
Closes connection on completed message.
Closes server with Ctrl-C
"""
server = socket.socket(socket.AF_INET,
socket.SOCK_STREAM,
socket.IPPROTO_TCP)
address = ('127.0.0.1', 5000)
server.bind(address)
server.listen(1)
while True:
try:
connection, address = server.accept()
message = b''
buffer_length = 8
message_complete = False
while not message_complete:
part = connection.recv(buffer_length)
message += part
if b'\r\n\r\n' in message:
message_complete = True
print(message)
connection.sendall(response_ok())
connection.close()
except KeyboardInterrupt:
print('\nServer closed good bye.')
server.shutdown(socket.SHUT_WR)
server.close()
sys.exit(0)
def response_ok():
"""Send a response OK."""
msg = b'HTTP/1.1 200 OK\r\nMessage recieved.\r\n'
msg += u'Date: {}\r\n\r\n'.format(formatdate(usegmt=True)).encode('utf8')
return msg
def response_error():
"""Send a response erorr."""
return b'HTTP/1.1 500 Internal Server Error\r\nError!'
if __name__ == '__main__': # pragma: no cover
print('Server ready and waiting...\n')
server()
|
mit
|
Python
|
5c97b9911a2dafde5fd1e4c40cda4e84974eb855
|
Allow keys to be set (in anticipation of write commands). Better object __repr__() for spaces and tickets.
|
markfinger/assembla
|
assembla/lib.py
|
assembla/lib.py
|
from functools import wraps
class AssemblaObject(object):
"""
Proxies getitem calls (eg: `instance['id']`) to a dictionary `instance.data['id']`.
"""
def __init__(self, data):
self.data = data
def __getitem__(self, key):
return self.data[key]
def __setitem__(self, key, value):
self.data[key] = value
def keys(self):
return self.data.keys()
def values(self):
return self.data.values()
def get(self, *args, **kwargs):
return self.data.get(*args, **kwargs)
def __repr__(self):
if 'name' in self.data:
return "<%s: %s>" % (type(self).__name__, self.data['name'])
if ('number' in self.data) and ('summary' in self.data):
return "<%s: #%s - %s>" % (type(self).__name__, self.data['number'], self.data['summary'])
return super(AssemblaObject, self).__repr__()
def assembla_filter(func):
"""
Filters :data for the objects in it which possess attributes equal in
name/value to a key/value in kwargs.
Each key/value combination in kwargs is compared against the object, so
multiple keyword arguments can be passed in to constrain the filtering.
"""
@wraps(func)
def wrapper(class_instance, **kwargs):
results = func(class_instance)
if not kwargs:
return results
else:
return filter(
# Find the objects who have an equal number of matching attr/value
# combinations as `len(kwargs)`
lambda obj: len(kwargs) == len(
filter(
lambda boolean: boolean,
[obj.get(attr_name) == value
for attr_name, value in kwargs.iteritems()]
)
),
results
)
return wrapper
|
from functools import wraps
class AssemblaObject(object):
"""
Proxies getitem calls (eg: `instance['id']`) to a dictionary `instance.data['id']`.
"""
def __init__(self, data):
self.data = data
def __getitem__(self, key):
return self.data[key]
def keys(self):
return self.data.keys()
def values(self):
return self.data.values()
def get(self, *args, **kwargs):
return self.data.get(*args, **kwargs)
def assembla_filter(func):
"""
Filters :data for the objects in it which possess attributes equal in
name/value to a key/value in kwargs.
Each key/value combination in kwargs is compared against the object, so
multiple keyword arguments can be passed in to constrain the filtering.
"""
@wraps(func)
def wrapper(class_instance, **kwargs):
results = func(class_instance)
if not kwargs:
return results
else:
return filter(
# Find the objects who have an equal number of matching attr/value
# combinations as `len(kwargs)`
lambda obj: len(kwargs) == len(
filter(
lambda boolean: boolean,
[obj.get(attr_name) == value
for attr_name, value in kwargs.iteritems()]
)
),
results
)
return wrapper
|
mit
|
Python
|
d013f50b92e968258b14b67ebea9e70b4c35dcb0
|
Fix completion
|
iff/dotfiles-old,iff/dotfiles-old
|
pylibs/ropemode/environment.py
|
pylibs/ropemode/environment.py
|
class Environment(object):
def ask(self, prompt, default=None, starting=None):
pass
def ask_values(self, prompt, values, default=None, starting=None):
pass
def ask_directory(self, prompt, default=None, starting=None):
pass
def ask_completion(self, prompt, values, starting=None):
pass
def message(self, message):
pass
def yes_or_no(self, prompt):
pass
def y_or_n(self, prompt):
pass
def get(self, name, default=None):
pass
def get_offset(self):
pass
def get_text(self):
pass
def get_region(self):
pass
def filename(self):
pass
def is_modified(self):
pass
def goto_line(self, lineno):
pass
def insert_line(self, line, lineno):
pass
def insert(self, text):
pass
def delete(self, start, end):
pass
def filenames(self):
pass
def save_files(self, filenames):
pass
def reload_files(self, filenames, moves=None):
pass
def find_file(self, filename, readonly=False, other=False):
pass
def create_progress(self, name):
pass
def current_word(self):
pass
def push_mark(self):
pass
def pop_mark(self):
pass
def prefix_value(self, prefix):
pass
def show_occurrences(self, locations):
pass
def show_doc(self, docs, altview=False):
pass
def preview_changes(self, diffs):
pass
def local_command(self, name, callback, key=None, prefix=False):
pass
def global_command(self, name, callback, key=None, prefix=False):
pass
def add_hook(self, name, callback, hook):
pass
@staticmethod
def _completion_text(proposal):
return proposal.name.partition(':')[0]
def _completion_data(self, proposal):
return self._completion_text(proposal)
|
class Environment(object):
def ask(self, prompt, default=None, starting=None):
pass
def ask_values(self, prompt, values, default=None, starting=None):
pass
def ask_directory(self, prompt, default=None, starting=None):
pass
def ask_completion(self, prompt, values, starting=None):
pass
def message(self, message):
pass
def yes_or_no(self, prompt):
pass
def y_or_n(self, prompt):
pass
def get(self, name, default=None):
pass
def get_offset(self):
pass
def get_text(self):
pass
def get_region(self):
pass
def filename(self):
pass
def is_modified(self):
pass
def goto_line(self, lineno):
pass
def insert_line(self, line, lineno):
pass
def insert(self, text):
pass
def delete(self, start, end):
pass
def filenames(self):
pass
def save_files(self, filenames):
pass
def reload_files(self, filenames, moves=None):
pass
def find_file(self, filename, readonly=False, other=False):
pass
def create_progress(self, name):
pass
def current_word(self):
pass
def push_mark(self):
pass
def pop_mark(self):
pass
def prefix_value(self, prefix):
pass
def show_occurrences(self, locations):
pass
def show_doc(self, docs, altview=False):
pass
def preview_changes(self, diffs):
pass
def local_command(self, name, callback, key=None, prefix=False):
pass
def global_command(self, name, callback, key=None, prefix=False):
pass
def add_hook(self, name, callback, hook):
pass
@staticmethod
def _completion_text(proposal):
return proposal.name
def _completion_data(self, proposal):
return self._completion_text(proposal)
|
mit
|
Python
|
1563c35f10ac4419d6c732e0e25c3d2d62fcd3fd
|
send all available output to client if are multiple lines available
|
wilsaj/hey
|
hey/server.py
|
hey/server.py
|
from twisted.internet import protocol, reactor
from twisted.internet.endpoints import TCP4ServerEndpoint
try:
from Queue import Queue, Empty
except ImportError:
# python 3.x
from queue import Queue, Empty
class HeyQueueFactory(protocol.Factory, object):
def __init__(self, outQueue, *args, **kwargs):
self.outQueue = outQueue
super(HeyQueueFactory, self).__init__(*args, **kwargs)
def buildProtocol(self, addr):
return HeyQueueProtocol(self.outQueue)
class HeyQueueProtocol(protocol.Protocol, object):
def __init__(self, outQueue, *args, **kwargs):
self.outQueue = outQueue
super(HeyQueueProtocol, self).__init__(*args, **kwargs)
def dataReceived(self, data):
if data == 'whatsup':
self.whatsup()
if data == 'stopit':
self.stopit()
def stopit(self):
self.transport.write('stopping server')
reactor.callLater(1, reactor.stop)
def whatsup(self):
output = ""
while True:
try:
output += self.outQueue.get_nowait()
except Empty:
if output == "":
output = "nothing to report, sir"
break
self.transport.write(output)
class HeyProcessProtocol(protocol.ProcessProtocol, object):
def __init__(self, outQueue, *args, **kwargs):
self.outQueue = outQueue
self.status = 'open'
super(HeyProcessProtocol, self).__init__(*args, **kwargs)
def outReceived(self, data):
self.outQueue.put(data)
def processExited(self, reason):
self.status = 'closed'
def processEnded(self, reason):
self.status = 'closed'
class HeyServer(object):
def __init__(self, command, port):
outQueue = Queue()
self.proc = HeyProcessProtocol(outQueue)
reactor.spawnProcess(self.proc, command[0], command, usePTY=True)
endpoint = TCP4ServerEndpoint(reactor, port)
endpoint.listen(HeyQueueFactory(outQueue))
def run(self):
reactor.run()
def start(command):
host, port = "localhost", 9999
server = HeyServer(command, port)
server.run()
|
from twisted.internet import protocol, reactor
from twisted.internet.endpoints import TCP4ServerEndpoint
try:
from Queue import Queue, Empty
except ImportError:
# python 3.x
from queue import Queue, Empty
class HeyQueueFactory(protocol.Factory, object):
def __init__(self, outQueue, *args, **kwargs):
self.outQueue = outQueue
super(HeyQueueFactory, self).__init__(*args, **kwargs)
def buildProtocol(self, addr):
return HeyQueueProtocol(self.outQueue)
class HeyQueueProtocol(protocol.Protocol, object):
def __init__(self, outQueue, *args, **kwargs):
self.outQueue = outQueue
super(HeyQueueProtocol, self).__init__(*args, **kwargs)
def dataReceived(self, data):
if data == 'whatsup':
self.whatsup()
if data == 'stopit':
self.stopit()
def stopit(self):
self.transport.write('stopping server')
reactor.callLater(1, reactor.stop)
def whatsup(self):
try:
output = self.outQueue.get_nowait()
except Empty:
output = "nothing to report, sir"
self.transport.write(output)
class HeyProcessProtocol(protocol.ProcessProtocol, object):
def __init__(self, outQueue, *args, **kwargs):
self.outQueue = outQueue
self.status = 'open'
super(HeyProcessProtocol, self).__init__(*args, **kwargs)
def outReceived(self, data):
self.outQueue.put(data)
def processExited(self, reason):
self.status = 'closed'
def processEnded(self, reason):
self.status = 'closed'
class HeyServer(object):
def __init__(self, command, port):
outQueue = Queue()
self.proc = HeyProcessProtocol(outQueue)
reactor.spawnProcess(self.proc, command[0], command, usePTY=True)
endpoint = TCP4ServerEndpoint(reactor, port)
endpoint.listen(HeyQueueFactory(outQueue))
def run(self):
reactor.run()
def start(command):
host, port = "localhost", 9999
server = HeyServer(command, port)
server.run()
|
bsd-2-clause
|
Python
|
0dce50c77963ef0d2cdb168f85c2588d62f43220
|
Remove duplicate import
|
yunity/yunity-core,yunity/foodsaving-backend,yunity/foodsaving-backend,yunity/yunity-core,yunity/foodsaving-backend
|
yunity/stores/models.py
|
yunity/stores/models.py
|
from config import settings
from yunity.base.base_models import BaseModel
from django.db import models
class PickupDate(BaseModel):
date = models.DateTimeField()
collectors = models.ManyToManyField(settings.AUTH_USER_MODEL)
store = models.ForeignKey('stores.store', related_name='pickupdates', on_delete=models.CASCADE)
max_collectors = models.IntegerField(null=True)
class Store(BaseModel):
group = models.ForeignKey('groups.Group', on_delete=models.CASCADE)
name = models.TextField()
description = models.TextField(null=True)
|
from django.db import models
from config import settings
from yunity.base.base_models import BaseModel
from django.db import models
class PickupDate(BaseModel):
date = models.DateTimeField()
collectors = models.ManyToManyField(settings.AUTH_USER_MODEL)
store = models.ForeignKey('stores.store', related_name='pickupdates', on_delete=models.CASCADE)
max_collectors = models.IntegerField(null=True)
class Store(BaseModel):
group = models.ForeignKey('groups.Group', on_delete=models.CASCADE)
name = models.TextField()
description = models.TextField(null=True)
|
agpl-3.0
|
Python
|
bc63b8f19742277ad96c2427405f1430687430d1
|
expire jwt in 1 day
|
saks/hb,saks/hb,saks/hb,saks/hb
|
hbapi/settings/heroku.py
|
hbapi/settings/heroku.py
|
import dj_database_url
import os
from .base import *
DEBUG = False
TEMPLATE_DEBUG = DEBUG
ADMINS = [('znotdead', '[email protected]')]
DATABASES['default'] = dj_database_url.config()
STATICFILES_STORAGE = 'whitenoise.django.GzipManifestStaticFilesStorage'
STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles')
STATIC_URL = '/static/'
ALLOWED_HOSTS = [".herokuapp.com", ]
REDIS_POOL = redis.ConnectionPool.from_url(os.environ.get('REDISCLOUD_URL'))
REDIS_CONN = redis.Redis(connection_pool=REDIS_POOL)
if not DEBUG:
SECURE_SSL_REDIRECT = True
JWT_AUTH = {
'JWT_EXPIRATION_DELTA': datetime.timedelta(days=1),
}
|
import dj_database_url
import os
from .base import *
DEBUG = False
TEMPLATE_DEBUG = DEBUG
ADMINS = [('znotdead', '[email protected]')]
DATABASES['default'] = dj_database_url.config()
STATICFILES_STORAGE = 'whitenoise.django.GzipManifestStaticFilesStorage'
STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles')
STATIC_URL = '/static/'
ALLOWED_HOSTS = [".herokuapp.com", ]
REDIS_POOL = redis.ConnectionPool.from_url(os.environ.get('REDISCLOUD_URL'))
REDIS_CONN = redis.Redis(connection_pool=REDIS_POOL)
if not DEBUG:
SECURE_SSL_REDIRECT = True
|
mit
|
Python
|
cee2368dac250ef9655a3df9af3188b8abd095dc
|
Disable slow test. Not intended to run.
|
PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge
|
spec/puzzle/examples/gph/a_basic_puzzle_spec.py
|
spec/puzzle/examples/gph/a_basic_puzzle_spec.py
|
from data import warehouse
from puzzle.examples.gph import a_basic_puzzle
from puzzle.problems import number_problem
from puzzle.puzzlepedia import prod_config
from spec.mamba import *
with _description('a_basic_puzzle'):
with before.all:
warehouse.save()
prod_config.init()
self.subject = a_basic_puzzle.get()
with after.all:
prod_config.reset()
warehouse.restore()
with it('parses'):
problems = self.subject.problems()
expect(problems).to(have_len(len(a_basic_puzzle.SOURCE.split('\n')) - 2))
for problem in problems:
expect(problem).to(be_a(number_problem.NumberProblem))
with it('solves first problem'):
expect(self.subject.problem(0).solution).not_to(be_empty)
with it('gets some solutions right'):
solutions = self.subject.solutions()
matches = []
expect(solutions).to(equal([
'decimal +25',
'octal +12',
'sept e nary +1',
'binary +1',
None,
'qui nary +9',
None,
None,
'quaternary +12',
None
]))
|
from data import warehouse
from puzzle.examples.gph import a_basic_puzzle
from puzzle.problems import number_problem
from puzzle.puzzlepedia import prod_config
from spec.mamba import *
with description('a_basic_puzzle'):
with before.all:
warehouse.save()
prod_config.init()
self.subject = a_basic_puzzle.get()
with after.all:
prod_config.reset()
warehouse.restore()
with it('parses'):
problems = self.subject.problems()
expect(problems).to(have_len(len(a_basic_puzzle.SOURCE.split('\n')) - 2))
for problem in problems:
expect(problem).to(be_a(number_problem.NumberProblem))
with it('solves first problem'):
expect(self.subject.problem(0).solution).not_to(be_empty)
with it('gets some solutions right'):
solutions = self.subject.solutions()
matches = []
expect(solutions).to(equal([
'decimal +25',
'octal +12',
'sept e nary +1',
'binary +1',
None,
'qui nary +9',
None,
None,
'quaternary +12',
None
]))
|
mit
|
Python
|
3066f8f64f185624fe95a696d7fcef102dc61921
|
add galery models
|
Samael500/helena,Samael500/helena,Samael500/helena
|
helena/content/models.py
|
helena/content/models.py
|
from django.db import models
from helpers.service import image_path
class ImgWithDescr(models.Model):
""" class with genres model """
directory = None
def get_image_path(instace, filename):
return image_path(instace, filename, directory=self.directory)
title = models.CharField(verbose_name='Заголовок', max_length=200, required=True)
description = models.TextField(verbose_name='Описание')
image = models.ImageField(verbose_name='Изображение', upload_to=get_image_path, required=True)
def __str__(self):
return self.title
class Meta:
abstract = True
class Genres(ImgWithDescr):
""" class with genres model """
directory = 'genres'
class Meta:
verbose_name = 'жанр'
verbose_name_plural = 'жанры'
class Gallery(ImgWithDescr):
""" class with gallery model """
directory = 'gallery'
external_img = models.URLField(verbose_name='Изображение во внешнем источнике')
def img_url(self):
""" return external img url or self file img """
return self.external_img or self.image.url
class Meta:
verbose_name = 'изображение в галлерее'
verbose_name_plural = 'изображения в галлерее'
|
from django.db import models
from helpers.service import image_path
class Genres(models.Model):
""" class with genres model """
def get_image_path(instace, filename):
return image_path(instace, filename, directory='genres')
title = models.CharField(verbose_name='Заголовок', max_length=200)
description = models.TextField(verbose_name='Описание')
image = models.ImageField(verbose_name='Изображение пример', upload_to=get_image_path)
def __str__(self):
return self.title
class Meta:
verbose_name = 'жанр'
verbose_name_plural = 'жанры'
|
unlicense
|
Python
|
a1d3304f993702460077d7f6c70607131aff874b
|
add fix keyword
|
IfengAutomation/uitester,IfengAutomation/uitester
|
libs/player.py
|
libs/player.py
|
# @Time : 2016/11/11 11:01
# @Author : lixintong
from keywords import keyword, var_cache
@keyword('current_activity')
def current_activity(acticity_desc):
"""
:param acticity_desc:video_player or topic_player or live or vr_live or pic_player or local_player
:return:
"""
return var_cache['proxy'].current_activity(acticity_desc)
@keyword('change_video_state')
def change_video_state(player_name, state):
"""
:param player_name: player_name or topic_player or live or pic_player or local_player
:param state: play or pause
:return:
"""
return var_cache['proxy'].change_video_state(player_name, state)
|
# @Time : 2016/11/11 11:01
# @Author : lixintong
from keywords import keyword, var_cache
@keyword('current_activity')
def current_activity(acticity_desc):
"""
:param acticity_desc:video_player or topic_player or live or vr_live or pic_player or local_player
:return:
"""
return var_cache['proxy'].current_activity(acticity_desc)
@keyword('change_video_state')
def change_video_state(player_name, state):
"""
:param player_name: player_name or topic_player or live or vr_live or pic_player or local_player
:param state: play or pause
:return:
"""
return var_cache['proxy'].change_video_state(player_name, state)
|
apache-2.0
|
Python
|
04d0bb5a32b3e1b66c6ac1e27df656aed607c3cb
|
Test suite: Fix re_util doctest on PyPy
|
gencer/python-phonenumbers,daviddrysdale/python-phonenumbers,titansgroup/python-phonenumbers,daviddrysdale/python-phonenumbers,daviddrysdale/python-phonenumbers
|
python/phonenumbers/re_util.py
|
python/phonenumbers/re_util.py
|
"""Additional regular expression utilities, to make it easier to sync up
with Java regular expression code.
>>> import re
>>> from .re_util import fullmatch
>>> from .util import u
>>> string = 'abcd'
>>> r1 = re.compile('abcd')
>>> r2 = re.compile('bc')
>>> r3 = re.compile('abc')
>>> fullmatch(r1, string) # doctest: +ELLIPSIS
<...SRE_Match object...>
>>> fullmatch(r2, string)
>>> fullmatch(r3, string)
>>> r = re.compile('\\d{8}|\\d{10,11}')
>>> m = fullmatch(r, '1234567890')
>>> m.end()
10
>>> r = re.compile(u('[+\uff0b\\d]'), re.UNICODE)
>>> m = fullmatch(r, u('\uff10'))
>>> m.end()
1
"""
import re
def fullmatch(pattern, string, flags=0):
"""Try to apply the pattern at the start of the string, returning a match
object if the whole string matches, or None if no match was found."""
# Build a version of the pattern with a non-capturing group around it.
# This is needed to get m.end() to correctly report the size of the
# matched expression (as per the final doctest above).
grouped_pattern = re.compile("^(?:%s)$" % pattern.pattern, pattern.flags)
m = grouped_pattern.match(string)
if m and m.end() < len(string):
# Incomplete match (which should never happen because of the $ at the
# end of the regexp), treat as failure.
m = None # pragma no cover
return m
if __name__ == '__main__': # pragma no cover
import doctest
doctest.testmod()
|
"""Additional regular expression utilities, to make it easier to sync up
with Java regular expression code.
>>> import re
>>> from .re_util import fullmatch
>>> from .util import u
>>> string = 'abcd'
>>> r1 = re.compile('abcd')
>>> r2 = re.compile('bc')
>>> r3 = re.compile('abc')
>>> fullmatch(r1, string) # doctest: +ELLIPSIS
<_sre.SRE_Match object...>
>>> fullmatch(r2, string)
>>> fullmatch(r3, string)
>>> r = re.compile('\\d{8}|\\d{10,11}')
>>> m = fullmatch(r, '1234567890')
>>> m.end()
10
>>> r = re.compile(u('[+\uff0b\\d]'), re.UNICODE)
>>> m = fullmatch(r, u('\uff10'))
>>> m.end()
1
"""
import re
def fullmatch(pattern, string, flags=0):
"""Try to apply the pattern at the start of the string, returning a match
object if the whole string matches, or None if no match was found."""
# Build a version of the pattern with a non-capturing group around it.
# This is needed to get m.end() to correctly report the size of the
# matched expression (as per the final doctest above).
grouped_pattern = re.compile("^(?:%s)$" % pattern.pattern, pattern.flags)
m = grouped_pattern.match(string)
if m and m.end() < len(string):
# Incomplete match (which should never happen because of the $ at the
# end of the regexp), treat as failure.
m = None # pragma no cover
return m
if __name__ == '__main__': # pragma no cover
import doctest
doctest.testmod()
|
apache-2.0
|
Python
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.