commit
stringlengths 40
40
| subject
stringlengths 4
1.73k
| repos
stringlengths 5
127k
| old_file
stringlengths 2
751
| new_file
stringlengths 2
751
| new_contents
stringlengths 1
8.98k
| old_contents
stringlengths 0
6.59k
| license
stringclasses 13
values | lang
stringclasses 23
values |
---|---|---|---|---|---|---|---|---|
7f0b530db953698e6e923366be6d0d98033e4afb | add description | seanpwilliams/pronto | prontopull.py | prontopull.py | # -*- coding: utf-8 -*-
'''
Pulls data from pronto cycle share. Combine with cron job to
get data over time
'''
from urllib2 import Request, urlopen
import json
from pandas.io.json import json_normalize
import time
url = "https://secure.prontocycleshare.com/data/stations.json"
request = Request(url)
response = urlopen(request)
data = json.loads(response.read())
df=json_normalize(data['stations'])
timestring = time.strftime("%Y%m%d-%H%M%S")
SAVE_PATH = "../Desktop/pronto/pronto%s.csv" %timestring
df.to_csv(SAVE_PATH, sep = ",")
| # -*- coding: utf-8 -*-
from urllib2 import Request, urlopen
import json
from pandas.io.json import json_normalize
import time
#from datetime import datetime
url = "https://secure.prontocycleshare.com/data/stations.json"
request = Request(url)
response = urlopen(request)
data = json.loads(response.read())
df=json_normalize(data['stations'])
timestring = time.strftime("%Y%m%d-%H%M%S")
SAVE_PATH = "../Desktop/pronto/pronto%s.csv" %timestring
df.to_csv(SAVE_PATH, sep = ",")
| mit | Python |
d016e9f2620688bc1059977a12df638393c3fff1 | Bump version | markstory/lint-review,markstory/lint-review,markstory/lint-review | lintreview/__init__.py | lintreview/__init__.py | __version__ = '2.1.2'
| __version__ = '2.1.1'
| mit | Python |
c86e7107d2f9d8079b0010ac100f627f1c34d127 | Update ipc_lista1.2.py | any1m1c/ipc20161 | lista1/ipc_lista1.2.py | lista1/ipc_lista1.2.py | #ipc_lista1.2
#Professor: Jucimar Junior
#Any Mendes Carvalho - 1615310044
#
#
#
#
#Faça um Programa que peça um número e então mostre a mensagem O número informado foi [número].
number = input("Digite um número: ")
print "O número digitado foi ",number
| #ipc_lista1.2
#Professor: Jucimar Junior
#Any Mendes Carvalho - 1615310044
#
#
#
#
#Faça um Programa que peça um número e então mostre a mensagem O número informado foi [número].
number = input("Digite um número: ")
print "O número digitado foi ",number
| apache-2.0 | Python |
85432b9509744eadc47c73a21b49f9ea93172c78 | Update ipc_lista1.8.py | any1m1c/ipc20161 | lista1/ipc_lista1.8.py | lista1/ipc_lista1.8.py | #ipc_lista1.8
#Professor: Jucimar Junior
#Any Mendes Carvalho - 161531
| #ipc_lista1.8
#Professor: Jucimar Junior
#Any Mendes Carvalho - 1615
| apache-2.0 | Python |
9cad93eb5f04e9f455cec679089d8c8787ce3b04 | Enable appsembler reporting settings | gymnasium/edx-platform,gymnasium/edx-platform,gymnasium/edx-platform,gymnasium/edx-platform | lms/envs/appsembler.py | lms/envs/appsembler.py | import os
import json
from path import path
SERVICE_VARIANT = os.environ.get('SERVICE_VARIANT', None)
CONFIG_ROOT = path('/edx/app/edxapp/') #don't hardcode this in the future
CONFIG_PREFIX = SERVICE_VARIANT + "." if SERVICE_VARIANT else ""
with open(CONFIG_ROOT / CONFIG_PREFIX + 'env.json') as env_file:
ENV_TOKENS = json.load(env_file)
APPSEMBLER_FEATURES = ENV_TOKENS.get('APPSEMBLER_FEATURES', {})
# search APPSEMBLER_FEATURES first, env variables second, fallback to None
GOOGLE_TAG_MANAGER_ID = APPSEMBLER_FEATURES.get('GOOGLE_TAG_MANAGER_ID', os.environ.get('GOOGLE_TAG_MANAGER_ID', None))
INTERCOM_APP_ID = APPSEMBLER_FEATURES.get('INTERCOM_APP_ID', os.environ.get('INTERCOM_APP_ID', ''))
INTERCOM_API_KEY = APPSEMBLER_FEATURES.get('INTERCOM_API_KEY', os.environ.get('INTERCOM_API_KEY', ''))
INTERCOM_USER_EMAIL = APPSEMBLER_FEATURES.get('INTERCOM_USER_EMAIL', os.environ.get('INTERCOM_USER_EMAIL', ''))
if APPSEMBLER_FEATURES.get('ENABLE_APPSEMBLER_REPORTING', False):
from appsembler_reporting.settings import APPSEMBLER_REPORTING
APPSEMBLER_REPORTING.update(APPSEMBLER_FEATURES.get(
'APPSEMBLER_REPORTING', {} ))
| import os
import json
from path import path
SERVICE_VARIANT = os.environ.get('SERVICE_VARIANT', None)
CONFIG_ROOT = path('/edx/app/edxapp/') #don't hardcode this in the future
CONFIG_PREFIX = SERVICE_VARIANT + "." if SERVICE_VARIANT else ""
with open(CONFIG_ROOT / CONFIG_PREFIX + 'env.json') as env_file:
ENV_TOKENS = json.load(env_file)
APPSEMBLER_FEATURES = ENV_TOKENS.get('APPSEMBLER_FEATURES', {})
# search APPSEMBLER_FEATURES first, env variables second, fallback to None
GOOGLE_TAG_MANAGER_ID = APPSEMBLER_FEATURES.get('GOOGLE_TAG_MANAGER_ID', os.environ.get('GOOGLE_TAG_MANAGER_ID', None))
INTERCOM_APP_ID = APPSEMBLER_FEATURES.get('INTERCOM_APP_ID', os.environ.get('INTERCOM_APP_ID', ''))
INTERCOM_API_KEY = APPSEMBLER_FEATURES.get('INTERCOM_API_KEY', os.environ.get('INTERCOM_API_KEY', ''))
INTERCOM_USER_EMAIL = APPSEMBLER_FEATURES.get('INTERCOM_USER_EMAIL', os.environ.get('INTERCOM_USER_EMAIL', ''))
| agpl-3.0 | Python |
4315d028f114ae1005f57d33df964be05b2fb8a6 | use bin/penchy_test_job instead of running it directly | fhirschmann/penchy,fhirschmann/penchy | docs/commented_sample_job.py | docs/commented_sample_job.py | # A job description is two part: part 1 introduces the involved elements and
# part 2 joins them in a job
# part 1: introduce the elements
# setup job environment
from penchy.jobs import *
# import the configuration file (if needed)
import config
# define a node
node = NodeConfiguration(
# that is the localhost
'localhost',
# ssh port is 22
22,
# the executing user is the current one
os.environ['USER'],
# we execute in /tmp
'/tmp',
# all jvm are specified relative to /usr/bin
'/usr/bin')
# define a jvm with relative path java
jvm = jvms.JVM('java')
# you can also specify an absolute path:
# jvm = jvms.JVM('/usr/java')
# fuse jvm and node
jconfig = makeJVMNodeConfiguration(jvm, node,
# and give it a decorative name (optional)
name="Simple Example!")
# setup a workload
w = workloads.ScalaBench('dummy')
# and add it the the jvms that should execute it
jvm.workload = w
# setup filter, used in flows
f1 = filters.DacapoHarness()
f2 = filters.Print()
# part 2: form elements to a job
job = Job(
# setup the JVMNodeConfigurations that are included, can be a single one or
# a list of configurations
configurations=jconfig,
# specify the flow of data on clients
client_flow=[
# flow from Scalabench workload to DacapoHarness filter
Edge(w, f1,
# and match filter inputs to workload outputs (here with same name)
[('stderr', 'stderr'),
('exit_code', 'exit_code')]),
# flow from ScalaBench workload to Print filter
Edge(w, f2,
# and feed stderr and exit_code output prefix with 'workload_' to filter
[('stderr', 'workload_stderr'),
('exit_code', 'workload_exit_code')]),
# feed whole output of DacapoHarness filter to print filter (with the name of the output)
Edge(f1, f2)
],
# there is no flow on the server side
server_flow=[],
# jvms will be run twice
invocations = 2
)
| # A job description is two part: part 1 introduces the involved elements and
# part 2 joins them in a job
# part 1: introduce the elements
# setup job environment
from penchy.jobs import *
# define a node
node = NodeConfiguration(
# that is the localhost
'localhost',
# ssh port is 22
22,
# the executing user is the current one
os.environ['USER'],
# we execute in /tmp
'/tmp',
# all jvm are specified relative to /usr/bin
'/usr/bin')
# define a jvm with relative path java
jvm = jvms.JVM('java')
# you can also specify an absolute path:
# jvm = jvms.JVM('/usr/java')
# fuse jvm and node
jconfig = makeJVMNodeConfiguration(jvm, node,
# and give it a decorative name (optional)
name="Simple Example!")
# setup a workload
w = workloads.ScalaBench('dummy')
# and add it the the jvms that should execute it
jvm.workload = w
# setup filter, used in flows
f1 = filters.DacapoHarness()
f2 = filters.Print()
# part 2: form elements to a job
job = Job(
# setup the JVMNodeConfigurations that are included, can be a single one or
# a list of configurations
configurations=jconfig,
# specify the flow of data on clients
client_flow=[
# flow from Scalabench workload to DacapoHarness filter
Edge(w, f1,
# and match filter inputs to workload outputs (here with same name)
[('stderr', 'stderr'),
('exit_code', 'exit_code')]),
# flow from ScalaBench workload to Print filter
Edge(w, f2,
# and feed stderr and exit_code output prefix with 'workload_' to filter
[('stderr', 'workload_stderr'),
('exit_code', 'workload_exit_code')]),
# feed whole output of DacapoHarness filter to print filter (with the name of the output)
Edge(f1, f2)
],
# there is no flow on the server side
server_flow=[],
# jvms will be run twice
invocations = 2
)
# a nice trick: check the job for plausibility if run as ``python <jobname>``
if __name__ == '__main__':
job.check()
| mit | Python |
927de70d3212c5106846b6f6f6333b93eceacea5 | add python 脚本 | ohun/mpush,hongjun117/mpush,hongjun117/mpush,mpusher/mpush,mpusher/mpush,mpusher/mpush,hongjun117/mpush,ohun/mpush | pub-python.py | pub-python.py | # coding=utf8
import paramiko
import datetime
import telnetlib
HOSTS = [
{
'HOST':'hive1_host',
'PORT':9092,
'USER':'root'
},
{
'HOST':'hive2_host',
'PORT':9092,
'USER':'root'
}
]
BASEPATH = '/root/mpush'
class SSH():
def __init__(self):
self.client = None
def connect(self,host,port=22,username='root',password=None):
self.client = paramiko.SSHClient()
self.client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
self.client.connect(host, port, username=username, password=password, timeout=10)
return self
def exe(self,cmd,isprint=True):
if not cmd:
return
stdin, stdout, stderr = self.client.exec_command(cmd)
if isprint:
for std in stdout.readlines():
print std,
return stdin, stdout, stderr
def close(self):
if self.client:
self.client.close()
def showText(s, typ):
if typ == 'RED':
return redText(s)
elif typ == 'GREEN':
return greenText(s)
elif typ == 'YELLOW':
return yellowText(s)
else:
return s
def redText(s):
return "\033[1;31m%s\033[0m" % s
def greenText(s):
return "\033[1;32m%s\033[0m" % s
def yellowText(s):
return "\033[1;33m%s\033[0m" % s
def main():
for item in HOSTS:
ssh = SSH().connect(item['HOST'],item['PORT'],username=item['USER'])
##backup
base = BASEPATH+'/mpush-jar-with-dependency.tar.gz'
to = BASEPATH+'/back/mpush-jar-with-dependency.tar.gz.'+datetime.datetime.now().strftime('%Y%m%d%H%M%S')
ssh.exe('cp %s %s '%(base,to))
print greenText('backup mpush ok')
##telnet remove zk info
#ssh.exe('telent 127.0.0.1 4001')
#ssh.exe('')
## kill process
ssh.exe('ps aux|grep mpush-cs.jar')
## start process
# ssh.exe('')
ssh.close()
if __name__ == "__main__":
main()
| # coding=utf8
import paramiko
import datetime
HOSTS = [
{
'HOST':'hive1_host',
'PORT':9092,
'USER':'root'
},
{
'HOST':'hive2_host',
'PORT':9092,
'USER':'root'
}
]
class SSH():
def __init__(self):
self.client = None
def connect(self,host,port=22,username='root',password=None):
self.client = paramiko.SSHClient()
self.client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
self.client.connect(host, port, username=username, password=password, timeout=10)
return self
def exe(self,cmd,isprint=True):
if not cmd:
return
stdin, stdout, stderr = self.client.exec_command(cmd)
if isprint:
for std in stdout.readlines():
print std,
return stdin, stdout, stderr
def close(self):
if self.client:
self.client.close()
def showText(s, typ):
if typ == 'RED':
return redText(s)
elif typ == 'GREEN':
return greenText(s)
elif typ == 'YELLOW':
return yellowText(s)
else:
return s
def redText(s):
return "\033[1;31m%s\033[0m" % s
def greenText(s):
return "\033[1;32m%s\033[0m" % s
def yellowText(s):
return "\033[1;33m%s\033[0m" % s
def main():
for item in HOSTS:
ssh = SSH().connect(item['HOST'],item['PORT'],username=item['USER'])
##back
base = '/root/mpush/mpush-jar-with-dependency.tar.gz'
to = '/root/mpush/back/mpush-jar-with-dependency.tar.gz.'+datetime.datetime.now().strftime('%Y%m%d%H%M%S')
ssh.exe('cp %s %s '%(base,to))
ssh.close()
if __name__ == "__main__":
main()
| apache-2.0 | Python |
9f699f66c1ff14d884157cee358793d715b1e702 | delete print | LiGhT1EsS/cobra,40huo/cobra,braveghz/cobra,40huo/cobra,braveghz/cobra,40huo/cobra,wufeifei/cobra,LiGhT1EsS/cobra,LiGhT1EsS/cobra,LiGhT1EsS/cobra,40huo/cobra,40huo/cobra,wufeifei/cobra,wufeifei/cobra,LiGhT1EsS/cobra,braveghz/cobra,braveghz/cobra,wufeifei/cobra,wufeifei/cobra,braveghz/cobra,40huo/cobra,LiGhT1EsS/cobra,braveghz/cobra,wufeifei/cobra | tests/test_apiserver.py | tests/test_apiserver.py | # -*- coding: utf-8 -*-
"""
tests.apiserver
~~~~~~~~~~~~
Tests cobra.api
:author: 40huo <[email protected]>
:homepage: https://github.com/wufeifei/cobra
:license: MIT, see LICENSE for more details.
:copyright: Copyright (c) 2017 Feei. All rights reserved
"""
# 测试完成需要手动关闭 API server 和扫描进程
# kill -9 $(ps aux|grep test_apiserver.py|awk '{print $2}')
# kill -9 $(ps aux|grep cobra.py|awk '{print $2}')
# 第一次启动 server 测试可能会卡住
import requests
from cobra.api import start
import json
start(host="127.0.0.1", port=5000, debug=True)
def test_add_job():
url = "http://127.0.0.1:5000/api/add"
post_data = {
"key": "your_secret_key",
"target": "https://github.com/wufeifei/grw.git",
}
headers = {
"Content-Type": "application/json",
}
re = requests.post(url=url, data=json.dumps(post_data), headers=headers)
assert "1001" in re.content
assert "Add scan job successfully" in re.content
assert "scan_id" in re.content
def test_job_status():
url = "http://127.0.0.1:5000/api/status"
post_data = {
"key": "your_secret_key",
"scan_id": 24,
}
headers = {
"Content-Type": "application/json",
}
re = requests.post(url=url, data=json.dumps(post_data), headers=headers)
assert "1001" in re.content
assert "msg" in re.content
assert "scan_id" in re.content
assert "status" in re.content
assert "report" in re.content
| # -*- coding: utf-8 -*-
"""
tests.apiserver
~~~~~~~~~~~~
Tests cobra.api
:author: 40huo <[email protected]>
:homepage: https://github.com/wufeifei/cobra
:license: MIT, see LICENSE for more details.
:copyright: Copyright (c) 2017 Feei. All rights reserved
"""
# 测试完成需要手动关闭 API server 和扫描进程
# kill -9 $(ps aux|grep test_apiserver.py|awk '{print $2}')
# kill -9 $(ps aux|grep cobra.py|awk '{print $2}')
# 第一次启动 server 测试可能会卡住
import requests
from cobra.api import start
import json
start(host="127.0.0.1", port=5000, debug=True)
def test_add_job():
url = "http://127.0.0.1:5000/api/add"
post_data = {
"key": "your_secret_key",
"target": "https://github.com/wufeifei/grw.git",
}
headers = {
"Content-Type": "application/json",
}
re = requests.post(url=url, data=json.dumps(post_data), headers=headers)
assert "1001" in re.content
assert "Add scan job successfully" in re.content
assert "scan_id" in re.content
def test_job_status():
url = "http://127.0.0.1:5000/api/status"
post_data = {
"key": "your_secret_key",
"scan_id": 24,
}
headers = {
"Content-Type": "application/json",
}
re = requests.post(url=url, data=json.dumps(post_data), headers=headers)
print re.content
assert "1001" in re.content
assert "msg" in re.content
assert "scan_id" in re.content
assert "status" in re.content
assert "report" in re.content
| mit | Python |
17d79c5ec4584ea2f1f8b7fe52b157b3988bb7fc | test gap score | yjzhang/uncurl_python,yjzhang/uncurl_python | tests/test_gap_score.py | tests/test_gap_score.py | """
Using gap score to determine optimal cluster number
"""
import unittest
from unittest import TestCase
from flaky import flaky
import numpy as np
import scipy
from uncurl import gap_score
class GapScoreTest(TestCase):
def setUp(self):
pass
def test_gap_score(self):
data_mat = scipy.io.loadmat('data/10x_pooled_400.mat')
data = data_mat['data']
data_tsvd = gap_score.preproc_data(data)
max_k, gap_vals, sk_vals = gap_score.run_gap_k_selection(data_tsvd,
k_min=1, k_max=50, skip=5, B=5)
# just test that the score is in a very broad range
self.assertTrue(max_k > 3)
self.assertTrue(max_k < 20)
def test_gap_score_2(self):
data_mat = scipy.io.loadmat('data/GSE60361_dat.mat')
data = data_mat['Dat']
data_tsvd = gap_score.preproc_data(data)
max_k, gap_vals, sk_vals = gap_score.run_gap_k_selection(data_tsvd,
k_min=1, k_max=50, skip=5, B=5)
self.assertTrue(max_k > 3)
self.assertTrue(max_k < 30)
@flaky(max_runs=3)
def test_gap_score_3(self):
data_mat = scipy.io.loadmat('data/SCDE_test.mat')
data = data_mat['dat']
data_tsvd = gap_score.preproc_data(data, gene_subset=True)
max_k, gap_vals, sk_vals = gap_score.run_gap_k_selection(data_tsvd,
k_min=1, k_max=50, skip=5, B=5)
self.assertTrue(max_k < 10)
if __name__ == '__main__':
unittest.main()
| """
Using gap score to determine optimal cluster number
"""
import unittest
from unittest import TestCase
from flaky import flaky
import numpy as np
import scipy
from uncurl import gap_score
class GapScoreTest(TestCase):
def setUp(self):
pass
def test_gap_score(self):
data_mat = scipy.io.loadmat('data/10x_pooled_400.mat')
data = data_mat['data']
data_tsvd = gap_score.preproc_data(data)
max_k, gap_vals, sk_vals = gap_score.run_gap_k_selection(data_tsvd,
k_min=1, k_max=50, skip=5, B=5)
# just test that the score is in a very broad range
self.assertTrue(max_k > 3)
self.assertTrue(max_k < 20)
def test_gap_score_2(self):
data_mat = scipy.io.loadmat('data/GSE60361_dat.mat')
data = data_mat['Dat']
data_tsvd = gap_score.preproc_data(data)
max_k, gap_vals, sk_vals = gap_score.run_gap_k_selection(data_tsvd,
k_min=1, k_max=50, skip=5, B=5)
self.assertTrue(max_k > 3)
self.assertTrue(max_k < 30)
@flaky(max_runs=3)
def test_gap_score_3(self):
data_mat = scipy.io.loadmat('data/SCDE_test.mat')
data = data_mat['dat']
data_tsvd = gap_score.preproc_data(data)
max_k, gap_vals, sk_vals = gap_score.run_gap_k_selection(data_tsvd,
k_min=1, k_max=50, skip=5, B=5)
self.assertTrue(max_k < 10)
if __name__ == '__main__':
unittest.main()
| mit | Python |
04f7b8aa85bf2bb2c16eb246ee7c9d7ae5fc8cff | check contents | deginner/bitjws | tests/test_roundtrip.py | tests/test_roundtrip.py | import json
import bitjws
def test_encode_decode():
key = bitjws.PrivateKey()
ser = bitjws.sign_serialize(key)
header, payload = bitjws.validate_deserialize(ser)
rawheader, rawpayload = ser.rsplit('.', 1)[0].split('.')
origheader = bitjws.base64url_decode(rawheader.encode('utf8'))
origpayload = bitjws.base64url_decode(rawpayload.encode('utf8'))
assert header['typ'] == 'JWT'
assert header['alg'] == 'CUSTOM-BITCOIN-SIGN'
assert header['kid'] == bitjws.pubkey_to_addr(key.pubkey.serialize())
assert len(header) == 3
assert header == json.loads(origheader.decode('utf8'))
assert isinstance(payload.get('exp', ''), float)
assert payload['aud'] is None
assert len(payload) == 2
assert payload == json.loads(origpayload.decode('utf8'))
def test_audience():
key = bitjws.PrivateKey()
audience = 'https://example.com/api/login'
ser = bitjws.sign_serialize(key, requrl=audience)
header, payload = bitjws.validate_deserialize(ser, requrl=audience)
assert header is not None
assert payload is not None
assert payload['aud'] == audience
| import json
import bitjws
def test_encode_decode():
key = bitjws.PrivateKey()
ser = bitjws.sign_serialize(key)
header, payload = bitjws.validate_deserialize(ser)
rawheader, rawpayload = ser.rsplit('.', 1)[0].split('.')
origheader = bitjws.base64url_decode(rawheader.encode('utf8'))
origpayload = bitjws.base64url_decode(rawpayload.encode('utf8'))
assert header == json.loads(origheader.decode('utf8'))
assert payload == json.loads(origpayload.decode('utf8'))
def test_audience():
key = bitjws.PrivateKey()
ser = bitjws.sign_serialize(key, requrl='https://example.com/api/login')
header, payload = bitjws.validate_deserialize(
ser, requrl='https://example.com/api/login')
assert header is not None
assert payload is not None
| mit | Python |
3059e2cf76e2e7bfb90c6c03afc5ee372294de94 | use with_setup instead of setUp/tearDown | chelmertz/spotifile,raoulh/spotifile,raoulh/spotifile,catharsis/spotifile,raoulh/spotifile,catharsis/spotifile,chelmertz/spotifile,chelmertz/spotifile,catharsis/spotifile | tests/test_spotifile.py | tests/test_spotifile.py | from nose import with_setup
import os
from os import path
from subprocess import check_call
from sh import ls, cat
mountpoint = '/tmp/spotifile_test_mount'
def fs_mount():
if not path.exists(mountpoint):
os.mkdir(mountpoint)
check_call(['./spotifile', mountpoint])
def fs_unmount():
check_call(['fusermount', '-u', mountpoint])
if path.exists(mountpoint):
os.rmdir(mountpoint)
@with_setup(fs_mount, fs_unmount)
def test_ls():
assert 'connection' in ls(mountpoint)
@with_setup(fs_mount, fs_unmount)
def test_cat_connection():
assert 'logged in' in cat(path.join(mountpoint, 'connection'))
| import unittest
import os
from subprocess import check_call
from sh import ls
mountpoint = '/tmp/spotifile_test_mount'
class SpotifileTestClass(unittest.TestCase):
@classmethod
def setUpClass(cls):
if not os.path.exists(mountpoint):
os.mkdir(mountpoint)
@classmethod
def tearDownClass(cls):
if os.path.exists(mountpoint):
os.rmdir(mountpoint)
def setUp(self):
check_call(['./spotifile', mountpoint])
def tearDown(self):
check_call(['fusermount', '-u', mountpoint])
def test_ls(self):
assert 'connection' in ls(mountpoint)
| bsd-3-clause | Python |
7fb5b04bb4054f60cefc79efabcef07979628285 | add directory encoding test in test_conf | zestyr/lbry,zestyr/lbry,lbryio/lbry,zestyr/lbry,lbryio/lbry,lbryio/lbry | tests/unit/test_conf.py | tests/unit/test_conf.py | import os
from twisted.trial import unittest
from lbrynet import conf
class SettingsTest(unittest.TestCase):
def setUp(self):
os.environ['LBRY_TEST'] = 'test_string'
def tearDown(self):
del os.environ['LBRY_TEST']
@staticmethod
def get_mock_config_instance():
settings = {'test': (str, '')}
env = conf.Env(**settings)
return conf.Config({}, settings, environment=env)
def test_envvar_is_read(self):
settings = self.get_mock_config_instance()
self.assertEqual('test_string', settings['test'])
def test_setting_can_be_overridden(self):
settings = self.get_mock_config_instance()
settings['test'] = 'my_override'
self.assertEqual('my_override', settings['test'])
def test_setting_can_be_updated(self):
settings = self.get_mock_config_instance()
settings.update({'test': 'my_update'})
self.assertEqual('my_update', settings['test'])
def test_setting_is_in_dict(self):
settings = self.get_mock_config_instance()
setting_dict = settings.get_current_settings_dict()
self.assertEqual({'test': 'test_string'}, setting_dict)
def test_invalid_setting_raises_exception(self):
settings = self.get_mock_config_instance()
self.assertRaises(AssertionError, settings.set, 'invalid_name', 123)
def test_invalid_data_type_raises_exception(self):
settings = self.get_mock_config_instance()
self.assertIsNone(settings.set('test', 123))
self.assertRaises(AssertionError, settings.set, 'test', 123, ('fake_data_type',))
def test_setting_precedence(self):
settings = self.get_mock_config_instance()
settings.set('test', 'cli_test_string', data_types=(conf.TYPE_CLI,))
self.assertEqual('cli_test_string', settings['test'])
settings.set('test', 'this_should_not_take_precedence', data_types=(conf.TYPE_ENV,))
self.assertEqual('cli_test_string', settings['test'])
settings.set('test', 'runtime_takes_precedence', data_types=(conf.TYPE_RUNTIME,))
self.assertEqual('runtime_takes_precedence', settings['test'])
def test_data_dir(self):
# check if these directories are returned as string and not unicode
# otherwise there will be problems when calling os.path.join on
# unicode directory names with string file names
self.assertEqual(str, type(conf.default_download_directory))
self.assertEqual(str, type(conf.default_data_dir))
self.assertEqual(str, type(conf.default_lbryum_dir))
| import os
from twisted.trial import unittest
from lbrynet import conf
class SettingsTest(unittest.TestCase):
def setUp(self):
os.environ['LBRY_TEST'] = 'test_string'
def tearDown(self):
del os.environ['LBRY_TEST']
@staticmethod
def get_mock_config_instance():
settings = {'test': (str, '')}
env = conf.Env(**settings)
return conf.Config({}, settings, environment=env)
def test_envvar_is_read(self):
settings = self.get_mock_config_instance()
self.assertEqual('test_string', settings['test'])
def test_setting_can_be_overridden(self):
settings = self.get_mock_config_instance()
settings['test'] = 'my_override'
self.assertEqual('my_override', settings['test'])
def test_setting_can_be_updated(self):
settings = self.get_mock_config_instance()
settings.update({'test': 'my_update'})
self.assertEqual('my_update', settings['test'])
def test_setting_is_in_dict(self):
settings = self.get_mock_config_instance()
setting_dict = settings.get_current_settings_dict()
self.assertEqual({'test': 'test_string'}, setting_dict)
def test_invalid_setting_raises_exception(self):
settings = self.get_mock_config_instance()
self.assertRaises(AssertionError, settings.set, 'invalid_name', 123)
def test_invalid_data_type_raises_exception(self):
settings = self.get_mock_config_instance()
self.assertIsNone(settings.set('test', 123))
self.assertRaises(AssertionError, settings.set, 'test', 123, ('fake_data_type',))
def test_setting_precedence(self):
settings = self.get_mock_config_instance()
settings.set('test', 'cli_test_string', data_types=(conf.TYPE_CLI,))
self.assertEqual('cli_test_string', settings['test'])
settings.set('test', 'this_should_not_take_precedence', data_types=(conf.TYPE_ENV,))
self.assertEqual('cli_test_string', settings['test'])
settings.set('test', 'runtime_takes_precedence', data_types=(conf.TYPE_RUNTIME,))
self.assertEqual('runtime_takes_precedence', settings['test'])
| mit | Python |
c81393a8de27595f61cffc09fa6fa8352bb54b9c | Return a random set of factors | CubicComet/exercism-python-solutions | palindrome-products/palindrome_products.py | palindrome-products/palindrome_products.py | import random
from collections import defaultdict
def largest_palindrome(max_factor, min_factor=0):
return _palindromes(max_factor, min_factor, max)
def smallest_palindrome(max_factor, min_factor=0):
return _palindromes(max_factor, min_factor, min)
def _palindromes(max_factor, min_factor, minmax):
pals = defaultdict(set)
for i in range(min_factor, max_factor+1):
for j in range(min_factor, max_factor+1):
p = i * j
if is_palindrome(p):
pals[p].add(tuple(sorted([i,j])))
value = minmax(pals)
factors = random.choice(list(pals[value]))
return (value, factors)
def is_palindrome(n):
return str(n) == str(n)[::-1]
| from collections import defaultdict
def largest_palindrome(max_factor, min_factor=0):
return _palindromes(max_factor, min_factor, max)
def smallest_palindrome(max_factor, min_factor=0):
return _palindromes(max_factor, min_factor, min)
def _palindromes(max_factor, min_factor, minmax):
pals = defaultdict(set)
for i in range(min_factor, max_factor+1):
for j in range(min_factor, max_factor+1):
p = i * j
if is_palindrome(p):
pals[p].add(tuple(sorted([i,j])))
value = minmax(pals)
factors = pals[value]
return (value, factors)
def is_palindrome(n):
return str(n) == str(n)[::-1]
| agpl-3.0 | Python |
45c1446779cbce050573264101b1afe3d7fe42b4 | Update BaseSearchCommand | yunojuno/elasticsearch-django | elasticsearch_django/management/commands/__init__.py | elasticsearch_django/management/commands/__init__.py | # -*- coding: utf-8 -*-
"""Base command for search-related management commands."""
import logging
from django.core.management.base import BaseCommand
from elasticsearch.exceptions import TransportError
logger = logging.getLogger(__name__)
class BaseSearchCommand(BaseCommand):
"""Base class for commands that interact with the search index."""
description = "Base search command."
def add_arguments(self, parser):
"""Add default base options of --noinput and indexes."""
parser.add_argument(
'--noinput',
action='store_false',
dest='interactive',
default=True,
help='Do no display user prompts - may affect data.'
)
parser.add_argument(
'indexes',
nargs='*',
help="Names of indexes on which to run the command."
)
def do_index_command(self, index, interactive):
"""Run a command against a named index."""
raise NotImplementedError()
def handle(self, *args, **options):
"""Run do_index_command on each specified index and log the output."""
for index in options.pop('indexes'):
data = {}
try:
data = self.do_index_command(index, **options)
except TransportError as ex:
logger.warn("ElasticSearch threw an error: %s", ex)
data = {
"index": index,
"status": ex.status_code,
"reason": ex.info['error']['reason']
}
finally:
logger.info(data)
| # -*- coding: utf-8 -*-
"""Base command for search-related management commands."""
import logging
from django.core.management.base import BaseCommand
from elasticsearch.exceptions import TransportError
logger = logging.getLogger(__name__)
class BaseSearchCommand(BaseCommand):
"""Base class for commands that interact with the search index."""
description = "Base search command."
def add_arguments(self, parser):
"""Add default base options of --noinput and indexes."""
parser.add_argument(
'--noinput',
action='store_false',
dest='interactive',
default=True,
help='Do no display user prompts - may affect data.'
)
parser.add_argument(
'indexes',
nargs='*',
help="Names of indexes on which to run the command."
)
def do_index_command(self, index, interactive):
"""Run a command against a named index."""
raise NotImplementedError()
def handle(self, *args, **options):
"""Run do_index_command on each specified index and log the output."""
for index in options.pop('indexes'):
data = {}
try:
print "calling do_index_command", index, options
data = self.do_index_command(index, **options)
except TransportError as ex:
logger.warn("ElasticSearch threw an error: %s", ex)
data = {
"index": index,
"status": ex.status_code,
"reason": ex.info['error']['reason']
}
finally:
logger.info(data)
| mit | Python |
15a32b91b36c9deba5a4fc1d8c843a5e044b62c3 | remove unnecessary comments and print statements | datavisyn/tdp_core,datavisyn/tdp_core,datavisyn/tdp_core,datavisyn/tdp_core | tdp_core/mapping_table.py | tdp_core/mapping_table.py | import logging
from . import db
import itertools
_log = logging.getLogger(__name__)
class SQLMappingTable(object):
def __init__(self, mapping, engine):
self.from_idtype = mapping.from_idtype
self.to_idtype = mapping.to_idtype
self._engine = engine
self._query = mapping.query
self._integer_ids = mapping.integer_ids
def __call__(self, ids):
# ensure strings
if self._integer_ids: # convert to integer ids
ids = [int(i) for i in ids]
with db.session(self._engine) as session:
mapped = session.execute(self._query, ids=ids)
# handle multi mappings
data = sorted(mapped, key=lambda x: x['f'])
grouped = {k: [r['t'] for r in g] for k, g in itertools.groupby(data, lambda x: x['f'])}
return [grouped.get(id, []) for id in ids]
def _discover_mappings():
for k, connector in db.configs.connectors.items():
if not connector.mappings:
continue
engine = db.configs.engine(k)
for mapping in connector.mappings:
_log.info('registering %s to %s', mapping.from_idtype, mapping.to_idtype)
yield SQLMappingTable(mapping, engine)
class SQLMappingProvider(object):
def __init__(self):
self._mappings = list(_discover_mappings())
def __iter__(self):
return iter(((f.from_idtype, f.to_idtype, f) for f in self._mappings))
def create():
return SQLMappingProvider()
| import logging
from . import db
import itertools
_log = logging.getLogger(__name__)
class SQLMappingTable(object):
def __init__(self, mapping, engine):
self.from_idtype = mapping.from_idtype
self.to_idtype = mapping.to_idtype
self._engine = engine
self._query = mapping.query
self._integer_ids = mapping.integer_ids
def __call__(self, ids):
# ensure strings
print(type(ids))
# ids = [i.decode('utf-8') for i in ids if not isinstance(i, int)]
ids = [i for i in ids]
if self._integer_ids: # convert to integer ids
ids = [int(i) for i in ids]
with db.session(self._engine) as session:
mapped = session.execute(self._query, ids=ids)
# handle multi mappings
data = sorted(mapped, key=lambda x: x['f'])
grouped = {k: [r['t'] for r in g] for k, g in itertools.groupby(data, lambda x: x['f'])}
return [grouped.get(id, []) for id in ids]
def _discover_mappings():
for k, connector in db.configs.connectors.items():
if not connector.mappings:
continue
engine = db.configs.engine(k)
for mapping in connector.mappings:
_log.info('registering %s to %s', mapping.from_idtype, mapping.to_idtype)
yield SQLMappingTable(mapping, engine)
class SQLMappingProvider(object):
def __init__(self):
self._mappings = list(_discover_mappings())
def __iter__(self):
return iter(((f.from_idtype, f.to_idtype, f) for f in self._mappings))
def create():
return SQLMappingProvider()
| bsd-3-clause | Python |
f14c5c9e4a3c7d196421ce3d60ec64fdee4749dd | make arguments consistent | kuchenrolle/redditquery | src/redditquery/parse.py | src/redditquery/parse.py | #!/usr/bin/python3
import os
import argparse
def parser():
"""Parses arguments from comman line using argparse.
Parameters"""
# default directory for reddit files
default_directory = os.path.join(os.getcwd(), "data")
parser = argparse.ArgumentParser()
# obligatory
parser.add_argument("mode", type = int, help = "execution mode: 1 build index, 2: query using existing index, 3 build and query")
# conditionally obligatory
parser.add_argument("--first", "-f", type = str, help = "first year/month")
parser.add_argument("--last", "-l", type = str, help = "last year/month")
# optional with defaults
parser.add_argument("--dir", "-d", type = str, nargs = "?", default = default_directory, help = "directory for data storage")
parser.add_argument("--num", "-n", type = int, nargs = "?", default = 10, help = "number of results per query")
parser.add_argument("--cores", "-c", type = int, nargs = "?", default = 1, help = "number of cores to use")
parser.add_argument("--minfreq", "-m", type = int, nargs = "?", default = 5, help = "minimum term frequency")
parser.add_argument("--progress", "-p", action = "store_true", help = "report progress")
return parser | #!/usr/bin/python3
import os
import argparse
def parser():
"""Parses arguments from comman line using argparse.
Parameters"""
# default directory for reddit files
default_directory = os.path.join(os.getcwd(), "data")
parser = argparse.ArgumentParser()
# obligatory
parser.add_argument("mode", type = int, help = "execution mode: 1 build index, 2: query using existing index, 3 build and query")
# conditionally obligatory
parser.add_argument("-f", "--first", type = str, help = "first year/month")
parser.add_argument("-l", "--last", type = str, help = "last year/month")
# optional with defaults
parser.add_argument("--dir", "-d", type = str, nargs = "?", default = default_directory, help = "directory for data storage")
parser.add_argument("--num", "-n", type = int, nargs = "?", default = 10, help = "number of results per query")
parser.add_argument("--cores", "-c", type = int, nargs = "?", default = 1, help = "number of cores to use")
parser.add_argument("--minfreq", "-m", type = int, nargs = "?", default = 5, help = "minimum term frequency")
parser.add_argument("--progress", "-p", action = "store_true", help = "report progress")
return parser | mit | Python |
2797797497f4f5ad606764815b334321732bef3b | Rename fibonacci() to fibonacci_recur() | bowen0701/algorithms_data_structures | alg_fibonacci.py | alg_fibonacci.py | """Fibonacci series:
0, 1, 1, 2, 3, 5, 8,...
- Fib(0) = 0
- Fib(1) = 1
- Fib(n) = Fib(n - 1) + Fib(n - 2)
"""
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def fibonacci_recur(n):
"""Get nth number of Fibonacci series by recursion."""
if n <= 1:
return n
else:
return fibonacci_recur(n - 1) + fibonacci_recur(n - 2)
def main():
import time
n = 13
start_time = time.time()
print('{}th number of Fibonacci series by recursion: {}'
.format(n, fibonacci_recur(n)))
print('Time: {}'.format(time.time() - start_time))
if __name__ == '__main__':
main()
| """Fibonacci series:
0, 1, 1, 2, 3, 5, 8,...
- Fib(0) = 0
- Fib(1) = 1
- Fib(n) = Fib(n - 1) + Fib(n - 2)
"""
from __future__ import print_function
def fibonacci(n):
"""Get nth number of Fibonacci series by recursion."""
if n == 0:
return 0
elif n == 1 or n == 2:
return 1
else:
return fibonacci(n - 1) + fibonacci(n - 2)
def main():
import time
n = 13
print('{}th number of Fibonacci series: {}'
.format(n, fibonacci(n)))
if __name__ == '__main__':
main()
| bsd-2-clause | Python |
ebbcce590483a5970268db0c59bae0cec81648ad | Add example commands for the User Preferences api | ColdrickSotK/storyboard,ColdrickSotK/storyboard,ColdrickSotK/storyboard | storyboard/api/v1/user_preferences.py | storyboard/api/v1/user_preferences.py | # Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_config import cfg
from oslo_log import log
from pecan import abort
from pecan import request
from pecan import rest
from pecan.secure import secure
from wsme import types as wtypes
import wsmeext.pecan as wsme_pecan
from storyboard.api.auth import authorization_checks as checks
from storyboard.api.v1 import validations
from storyboard.common import decorators
import storyboard.db.api.users as user_api
from storyboard.openstack.common.gettextutils import _ # noqa
CONF = cfg.CONF
LOG = log.getLogger(__name__)
class UserPreferencesController(rest.RestController):
validation_post_schema = validations.USER_PREFERENCES_POST_SCHEMA
@decorators.db_exceptions
@secure(checks.authenticated)
@wsme_pecan.wsexpose(wtypes.DictType(wtypes.text, wtypes.text), int)
def get_all(self, user_id):
"""Return all preferences for the current user.
Example::
curl https://my.example.org/api/v1/users/21/preferences \\
-H 'Authorization: Bearer MY_ACCESS_TOKEN'
:param user_id: An ID of the user.
"""
if request.current_user_id != user_id:
abort(403, _("You can't read preferences of other users."))
return
return user_api.user_get_preferences(user_id)
@decorators.db_exceptions
@secure(checks.authenticated)
@wsme_pecan.wsexpose(wtypes.DictType(wtypes.text, wtypes.text), int,
body=wtypes.DictType(wtypes.text, wtypes.text))
def post(self, user_id, body):
"""Allow a user to update their preferences. Note that a user must
explicitly set a preference value to Null/None to have it deleted.
Example::
curl https://my.example.org/api/v1/users/21/preferences \\
-H 'Authorization: Bearer MY_ACCESS_TOKEN' \\
-H 'Content-Type: application/json;charset=UTF-8' \\
--data-binary '{"display_events_tags_added":"false"}'
:param user_id: The ID of the user whose preferences we're updating.
:param body: A dictionary of preference values.
"""
if request.current_user_id != user_id:
abort(403, _("You can't change preferences of other users."))
return user_api.user_update_preferences(user_id, body)
| # Copyright (c) 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_config import cfg
from oslo_log import log
from pecan import abort
from pecan import request
from pecan import rest
from pecan.secure import secure
from wsme import types as wtypes
import wsmeext.pecan as wsme_pecan
from storyboard.api.auth import authorization_checks as checks
from storyboard.api.v1 import validations
from storyboard.common import decorators
import storyboard.db.api.users as user_api
from storyboard.openstack.common.gettextutils import _ # noqa
CONF = cfg.CONF
LOG = log.getLogger(__name__)
class UserPreferencesController(rest.RestController):
validation_post_schema = validations.USER_PREFERENCES_POST_SCHEMA
@decorators.db_exceptions
@secure(checks.authenticated)
@wsme_pecan.wsexpose(wtypes.DictType(wtypes.text, wtypes.text), int)
def get_all(self, user_id):
"""Return all preferences for the current user.
:param user_id: An ID of the user.
"""
if request.current_user_id != user_id:
abort(403, _("You can't read preferences of other users."))
return
return user_api.user_get_preferences(user_id)
@decorators.db_exceptions
@secure(checks.authenticated)
@wsme_pecan.wsexpose(wtypes.DictType(wtypes.text, wtypes.text), int,
body=wtypes.DictType(wtypes.text, wtypes.text))
def post(self, user_id, body):
"""Allow a user to update their preferences. Note that a user must
explicitly set a preference value to Null/None to have it deleted.
:param user_id: The ID of the user whose preferences we're updating.
:param body: A dictionary of preference values.
"""
if request.current_user_id != user_id:
abort(403, _("You can't change preferences of other users."))
return user_api.user_update_preferences(user_id, body)
| apache-2.0 | Python |
f0ab4ecbc2e385dd69d644b6f8e4e41cdaa48423 | Add note. | christabor/MoAL,christabor/MoAL,christabor/MoAL,christabor/MoAL,christabor/MoAL | software_engineering/problem_solving/design_patterns/grasp/pattern_pure_fabrication.py | software_engineering/problem_solving/design_patterns/grasp/pattern_pure_fabrication.py | # -*- coding: utf-8 -*-
__author__ = """Chris Tabor ([email protected])"""
if __name__ == '__main__':
from os import getcwd
from os import sys
sys.path.append(getcwd())
from helpers.display import Section
from uuid import uuid1
from random import choice
from random import randrange as rr
DEBUG = True if __name__ == '__main__' else False
class MonitorDB:
def __init__(self):
self.store = {}
def __setitem__(self, id, data):
self.store[id] = data
class GridMonitorService:
"""This monitor service acts as an intermediary for handling db and object
related functionality, and can be used to continually add more utilities
that are related to the single entity, but that shouldn't be stored
directly on it.
It can be though of as a service-like layer of indirection:
entity <------> entity_service <------> data-store
"""
def __init__(self, grid):
self.data = MonitorDB()
self.grid = grid
def check_status(self):
for id, light in self.grid.lights.iteritems():
print('Light #{} is currently: {} @ x:{} y:{} z:{}'.format(
id, light.status(), *light.coords))
class Monitor:
def on(self):
self.on = True
def off(self):
self.off = False
def status(self):
return 'ON' if self.on else 'OFF'
class LightMonitor(Monitor):
def __init__(self, coords):
self.coords = coords
# For fun
self.on = choice([True, False])
class LightGrid:
def __init__(self):
self.lights = {}
def __setitem__(self, id, coords):
self.lights[id] = LightMonitor(coords)
if DEBUG:
with Section('GRASP pure fabrication pattern'):
grid = LightGrid()
gridmon = GridMonitorService(grid)
for _ in xrange(10):
grid[uuid1()] = (rr(0, 1000), rr(0, 1000), rr(0, 1000))
gridmon.check_status()
| # -*- coding: utf-8 -*-
__author__ = """Chris Tabor ([email protected])"""
if __name__ == '__main__':
from os import getcwd
from os import sys
sys.path.append(getcwd())
from helpers.display import Section
from uuid import uuid1
from random import choice
from random import randrange as rr
DEBUG = True if __name__ == '__main__' else False
class MonitorDB:
def __init__(self):
self.store = {}
def __setitem__(self, id, data):
self.store[id] = data
class GridMonitorService:
"""This monitor service acts as an intermediary for handling db and object
related functionality, and can be used to continually add more utilities
that are related to the single entity, but that shouldn't be stored
directly on it."""
def __init__(self, grid):
self.data = MonitorDB()
self.grid = grid
def check_status(self):
for id, light in self.grid.lights.iteritems():
print('Light #{} is currently: {} @ x:{} y:{} z:{}'.format(
id, light.status(), *light.coords))
class Monitor:
def on(self):
self.on = True
def off(self):
self.off = False
def status(self):
return 'ON' if self.on else 'OFF'
class LightMonitor(Monitor):
def __init__(self, coords):
self.coords = coords
# For fun
self.on = choice([True, False])
class LightGrid:
def __init__(self):
self.lights = {}
def __setitem__(self, id, coords):
self.lights[id] = LightMonitor(coords)
if DEBUG:
with Section('GRASP pure fabrication pattern'):
grid = LightGrid()
gridmon = GridMonitorService(grid)
for _ in xrange(10):
grid[uuid1()] = (rr(0, 1000), rr(0, 1000), rr(0, 1000))
gridmon.check_status()
| apache-2.0 | Python |
aa278487b4e65da413a217729b852a9c08a090cf | create function headers and change request structure | pagarme/pagarme-python | pagarme/resources/handler_request.py | pagarme/resources/handler_request.py | import requests
TEMPORARY_COMPANY = 'https://api.pagar.me/1/companies/temporary'
def validate_response(pagarme_response):
if pagarme_response.status_code == 200:
return pagarme_response.json()
else:
return error(pagarme_response.json())
def create_temporary_company():
company = requests.post(TEMPORARY_COMPANY)
valid_company = validate_response(company)
return valid_company
KEYS = {}
def authentication_key(api_key=None):
global KEYS
if api_key is None:
company = create_temporary_company()
api_key = company['api_key']['test']
encryption_key = company['encryption_key']['test']
KEYS['api_key'] = api_key
KEYS['encryption_key'] = encryption_key
return KEYS
else:
KEYS['api_key'] = api_key
return KEYS
def post(end_point, data={}):
data['api_key'] = KEYS['api_key']
pagarme_response = requests.post(end_point, json=data, headers=headers())
return validate_response(pagarme_response)
def get(end_point, data = {}):
data['api_key'] = KEYS['api_key']
pagarme_response = requests.get(end_point, json=data, headers=headers())
return validate_response(pagarme_response)
def put(end_point, data = {}):
data['api_key'] = KEYS['api_key']
pagarme_response = requests.put(end_point, json=data, headers=headers())
return validate_response(pagarme_response)
def delete(end_point, data = {}):
data['api_key'] = KEYS['api_key']
pagarme_response = requests.delete(end_point, json=data, headers=headers())
return validate_response(pagarme_response)
def error(data):
erros = data['errors']
return erros
def headers():
_headers = {'content-type': 'application/json'}
return _headers
| import requests
import json
TEMPORARY_COMPANY = 'https://api.pagar.me/1/companies/temporary'
def validate_response(pagarme_response):
if pagarme_response.status_code == 200:
return pagarme_response.json()
else:
return error(pagarme_response.json())
def create_temporary_company():
company = requests.post(TEMPORARY_COMPANY)
valid_company = validate_response(company)
return valid_company
KEYS = {}
def authentication_key(api_key=None):
global KEYS
if api_key is None:
company = create_temporary_company()
api_key = company['api_key']['test']
encryption_key = company['encryption_key']['test']
KEYS['api_key'] = api_key
KEYS['encryption_key'] = encryption_key
return KEYS
else:
KEYS['api_key'] = api_key
return KEYS
def post(end_point, data={}):
data['api_key'] = KEYS['api_key']
headers = {'content-type': 'application/json'}
pagarme_response = requests.post(end_point, data=json.dumps(data), headers=headers)
return validate_response(pagarme_response)
def get(end_point, data = {}):
data['api_key'] = KEYS['api_key']
headers = {'content-type': 'application/json'}
pagarme_response = requests.get(end_point, data=json.dumps(data), headers=headers)
return validate_response(pagarme_response)
def put(end_point, data = {}):
data['api_key'] = KEYS['api_key']
headers = {'content-type': 'application/json'}
pagarme_response = requests.put(end_point, data=json.dumps(data), headers=headers)
return validate_response(pagarme_response)
def delete(end_point, data = {}):
data['api_key'] = KEYS['api_key']
headers = {'content-type': 'application/json'}
pagarme_response = requests.delete(end_point, data=json.dumps(data), headers=headers)
return validate_response(pagarme_response)
def error(data):
erros = data['errors']
return erros
| mit | Python |
c838bee36ac1e68afd5f00630b98f806289f89c8 | Update fetch_metrics.py | GoogleCloudPlatform/gcsfuse,GoogleCloudPlatform/gcsfuse,GoogleCloudPlatform/gcsfuse | perfmetrics/scripts/fetch_metrics.py | perfmetrics/scripts/fetch_metrics.py | """Executes fio_metrics.py and vm_metrics.py by passing appropriate arguments.
"""
import socket
import sys
import time
from fio import fio_metrics
from vm_metrics import vm_metrics
from gsheet import gsheet
INSTANCE = socket.gethostname()
PERIOD_SEC = 120
# Google sheet worksheets
FIO_WORKSHEET_NAME = 'fio_metrics'
VM_WORKSHEET_NAME = 'vm_metrics'
if __name__ == '__main__':
argv = sys.argv
if len(argv) != 2:
raise TypeError('Incorrect number of arguments.\n'
'Usage: '
'python3 fetch_metrics.py <fio output json filepath>')
fio_metrics_obj = fio_metrics.FioMetrics()
print('Getting fio metrics...')
temp = fio_metrics_obj.get_metrics(argv[1], FIO_WORKSHEET_NAME)
print('Waiting for 250 seconds for metrics to be updated on VM...')
# It takes up to 240 seconds for sampled data to be visible on the VM metrics graph
# So, waiting for 250 seconds to ensure the returned metrics are not empty
time.sleep(250)
vm_metrics_obj = vm_metrics.VmMetrics()
vm_metrics_data = []
# Getting VM metrics for every job
for ind, job in enumerate(temp):
start_time_sec = job[fio_metrics.consts.START_TIME]
end_time_sec = job[fio_metrics.consts.END_TIME]
rw = job[fio_metrics.consts.PARAMS][fio_metrics.consts.RW]
print(f'Getting VM metrics for job at index {ind+1}...')
metrics_data = vm_metrics_obj.fetch_metrics(start_time_sec, end_time_sec, INSTANCE, PERIOD_SEC, rw)
for row in metrics_data:
vm_metrics_data.append(row)
gsheet.write_to_google_sheet(VM_WORKSHEET_NAME, vm_metrics_data)
| """Executes fio_metrics.py and vm_metrics.py by passing appropriate arguments.
"""
import socket
import sys
import time
from fio import fio_metrics
from vm_metrics import vm_metrics
from gsheet import gsheet
INSTANCE = socket.gethostname()
PERIOD = 120
# Google sheet worksheets
FIO_WORKSHEET_NAME = 'fio_metrics'
VM_WORKSHEET_NAME = 'vm_metrics'
if __name__ == '__main__':
argv = sys.argv
if len(argv) != 2:
raise TypeError('Incorrect number of arguments.\n'
'Usage: '
'python3 fetch_metrics.py <fio output json filepath>')
fio_metrics_obj = fio_metrics.FioMetrics()
print('Getting fio metrics...')
temp = fio_metrics_obj.get_metrics(argv[1], FIO_WORKSHEET_NAME)
print('Waiting for 250 seconds for metrics to be updated on VM...')
# It takes up to 240 seconds for sampled data to be visible on the VM metrics graph
# So, waiting for 250 seconds to ensure the returned metrics are not empty
time.sleep(250)
vm_metrics_obj = vm_metrics.VmMetrics()
vm_metrics_data = []
# Getting VM metrics for every job
for ind, job in enumerate(temp):
start_time_sec = job[fio_metrics.consts.START_TIME]
end_time_sec = job[fio_metrics.consts.END_TIME]
rw = job[fio_metrics.consts.PARAMS][fio_metrics.consts.RW]
print(f'Getting VM metrics for job at index {ind+1}...')
metrics_data = vm_metrics_obj.fetch_metrics(start_time_sec, end_time_sec, INSTANCE, PERIOD, rw)
for row in metrics_data:
vm_metrics_data.append(row)
gsheet.write_to_google_sheet(VM_WORKSHEET_NAME, vm_metrics_data)
| apache-2.0 | Python |
597ea6bd20c9c1dbca46891d8c2aa12c625da555 | Fix unit tests | nettorta/yandex-tank,asekretenko/yandex-tank,yandex/yandex-tank,netortik/yandex-tank,direvius/yandex-tank,nnugumanov/yandex-tank,f2nd/yandex-tank,fomars/yandex-tank,direvius/yandex-tank,yandex/yandex-tank,nnugumanov/yandex-tank,asekretenko/yandex-tank,netortik/yandex-tank,nettorta/yandex-tank,ei-grad/yandex-tank,f2nd/yandex-tank,fomars/yandex-tank,ei-grad/yandex-tank | Tests/ConsoleWorkerTest.py | Tests/ConsoleWorkerTest.py | from Tank.ConsoleWorker import ConsoleTank
from Tank.Plugins.ConsoleOnline import ConsoleOnlinePlugin
from Tank.Plugins.DataUploader import DataUploaderPlugin
from Tests.ConsoleOnlinePluginTest import FakeConsoleMarkup
from Tests.DataUploaderTest import FakeAPICLient
from Tests.TankTests import FakeOptions
import TankTests
import logging
import time
import unittest
class ConsoleWorkerTestCase(TankTests.TankTestCase):
def setUp(self):
opts = FakeOptions()
opts.no_rc = False
self.foo = ConsoleTank(opts, None)
self.foo.set_baseconfigs_dir('full')
def tearDown(self):
del self.foo
self.foo = None
def test_perform(self):
self.foo.configure()
uploader = self.foo.core.get_plugin_of_type(DataUploaderPlugin)
uploader.api_client = FakeAPICLient()
uploader.api_client.get_results.append('[{"closed":"", "name": "test task"}]')
uploader.api_client.get_results.append('[{"success":1}]')
uploader.api_client.post_results.append('[{"job":' + str(time.time()) + '}]')
for n in range(1, 120):
uploader.api_client.post_results.append('[{"success":1}]')
console = self.foo.core.get_plugin_of_type(ConsoleOnlinePlugin)
console.console_markup = FakeConsoleMarkup()
if self.foo.perform_test() != 0:
raise RuntimeError()
def test_option_override(self):
options = FakeOptions()
options.config = ["config/old-style.conf"]
options.option = ["owner.address=overridden"]
self.foo = ConsoleTank(options, None)
self.foo.configure()
res = self.foo.core.get_option("owner", "address")
logging.debug(res)
self.assertEquals("overridden", res)
def test_option_old_convert(self):
options = FakeOptions()
options.config = ["data/old_to_migrate.conf"]
self.foo = ConsoleTank(options, None)
self.foo.configure()
if __name__ == '__main__':
unittest.main()
| import TankTests
import os
import unittest
from Tank.ConsoleWorker import ConsoleTank
from Tests.TankTests import FakeOptions
from Tank.Plugins.DataUploader import DataUploaderPlugin
from Tests.DataUploaderTest import FakeAPICLient
from Tank.Plugins.ConsoleOnline import ConsoleOnlinePlugin
from Tests.ConsoleOnlinePluginTest import FakeConsoleMarkup
import time
import logging
class ConsoleWorkerTestCase(TankTests.TankTestCase):
def setUp(self):
self.foo = ConsoleTank(FakeOptions(), None)
self.foo.set_baseconfigs_dir('full')
def tearDown(self):
del self.foo
self.foo = None
def test_perform(self):
self.foo.configure()
uploader = self.foo.core.get_plugin_of_type(DataUploaderPlugin)
uploader.api_client = FakeAPICLient()
uploader.api_client.get_results.append('[{"closed":"", "name": "test task"}]')
uploader.api_client.get_results.append('[{"success":1}]')
uploader.api_client.post_results.append('[{"job":' + str(time.time()) + '}]')
for n in range(1, 120):
uploader.api_client.post_results.append('[{"success":1}]')
console = self.foo.core.get_plugin_of_type(ConsoleOnlinePlugin)
console.console_markup = FakeConsoleMarkup()
if self.foo.perform_test() != 0:
raise RuntimeError()
def test_option_override(self):
options = FakeOptions()
options.config = ["config/old-style.conf"]
options.option = ["owner.address=overridden"]
self.foo = ConsoleTank(options, None)
self.foo.configure()
res = self.foo.core.get_option("owner", "address")
logging.debug(res)
self.assertEquals("overridden", res)
def test_option_old_convert(self):
options = FakeOptions()
options.config = ["data/old_to_migrate.conf"]
self.foo = ConsoleTank(options, None)
self.foo.configure()
if __name__ == '__main__':
unittest.main()
| lgpl-2.1 | Python |
8a4d259df272a65f95bacf233dc8654c68f5f54f | add identity coordinate mapping to ToUint8 and ToFloat32 augmentors (#339) | ppwwyyxx/tensorpack,eyaler/tensorpack,eyaler/tensorpack,ppwwyyxx/tensorpack | tensorpack/dataflow/imgaug/convert.py | tensorpack/dataflow/imgaug/convert.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# File: convert.py
from .base import ImageAugmentor
from .meta import MapImage
import numpy as np
import cv2
__all__ = ['ColorSpace', 'Grayscale', 'ToUint8', 'ToFloat32']
class ColorSpace(ImageAugmentor):
""" Convert into another colorspace. """
def __init__(self, mode, keepdims=True):
"""
Args:
mode: opencv colorspace conversion code (e.g., `cv2.COLOR_BGR2HSV`)
keepdims (bool): keep the dimension of image unchanged if opencv
changes it.
"""
self._init(locals())
def _augment(self, img, _):
transf = cv2.cvtColor(img, self.mode)
if self.keepdims:
if len(transf.shape) is not len(img.shape):
transf = transf[..., None]
return transf
class Grayscale(ColorSpace):
""" Convert image to grayscale. """
def __init__(self, keepdims=True, rgb=False):
"""
Args:
keepdims (bool): return image of shape [H, W, 1] instead of [H, W]
rgb (bool): interpret input as RGB instead of the default BGR
"""
mode = cv2.COLOR_RGB2GRAY if rgb else cv2.COLOR_BGR2GRAY
super(Grayscale, self).__init__(mode, keepdims)
class ToUint8(MapImage):
""" Convert image to uint8. Useful to reduce communication overhead. """
def __init__(self):
super(ToUint8, self).__init__(lambda x: np.clip(x, 0, 255).astype(np.uint8), lambda x: x)
class ToFloat32(MapImage):
""" Convert image to float32, may increase quality of the augmentor. """
def __init__(self):
super(ToFloat32, self).__init__(lambda x: x.astype(np.float32), lambda x: x)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
# File: convert.py
from .base import ImageAugmentor
from .meta import MapImage
import numpy as np
import cv2
__all__ = ['ColorSpace', 'Grayscale', 'ToUint8', 'ToFloat32']
class ColorSpace(ImageAugmentor):
""" Convert into another colorspace. """
def __init__(self, mode, keepdims=True):
"""
Args:
mode: opencv colorspace conversion code (e.g., `cv2.COLOR_BGR2HSV`)
keepdims (bool): keep the dimension of image unchanged if opencv
changes it.
"""
self._init(locals())
def _augment(self, img, _):
transf = cv2.cvtColor(img, self.mode)
if self.keepdims:
if len(transf.shape) is not len(img.shape):
transf = transf[..., None]
return transf
class Grayscale(ColorSpace):
""" Convert image to grayscale. """
def __init__(self, keepdims=True, rgb=False):
"""
Args:
keepdims (bool): return image of shape [H, W, 1] instead of [H, W]
rgb (bool): interpret input as RGB instead of the default BGR
"""
mode = cv2.COLOR_RGB2GRAY if rgb else cv2.COLOR_BGR2GRAY
super(Grayscale, self).__init__(mode, keepdims)
class ToUint8(MapImage):
""" Convert image to uint8. Useful to reduce communication overhead. """
def __init__(self):
super(ToUint8, self).__init__(lambda x: np.clip(x, 0, 255).astype(np.uint8))
class ToFloat32(MapImage):
""" Convert image to float32, may increase quality of the augmentor. """
def __init__(self):
super(ToFloat32, self).__init__(lambda x: x.astype(np.float32))
| apache-2.0 | Python |
1e7a6b0fbbdb57053d3510b67c95c5d7e2fb6b81 | Enable to display accuracy graph | fukatani/CW_gui | floppy/report_widget.py | floppy/report_widget.py | from floppy.train_configuration import TrainParamServer
from PyQt5.QtWidgets import QWidget
from PyQt5.QtWidgets import QTabWidget
from PyQt5.QtGui import QPixmap
from PyQt5.QtGui import QPainter
from PyQt5.QtCore import Qt
from PyQt5.QtCore import QPoint
class ReportWidget(QTabWidget):
def __init__(self, *args, **kwargs):
super(ReportWidget, self).__init__(height=210, *args, **kwargs)
self.setStyleSheet('''ReportWidget{background: rgb(55,55,55)}
''')
try:
loss_image = TrainParamServer()['WorkDir'] + "result/loss.png"
except KeyError:
loss_image = "result/loss.png"
self.addTab(GraphWidget(loss_image, parent=self), 'Loss')
try:
acc_image = TrainParamServer()['WorkDir'] + "result/accuracy.png"
except KeyError:
acc_image = "result/accuracy.png"
self.addTab(GraphWidget(acc_image, parent=self), 'Accuracy')
self.resize(200, 200)
class GraphWidget(QWidget):
def __init__(self, image_file, *args, **kwargs):
super(GraphWidget, self).__init__(height=200)
self.setStyleSheet('''ReportWidget{background: rgb(55,55,55)}
''')
self.pixmap = None
self.image_file = image_file
def paintEvent(self, event):
if 'Class' not in TrainParamServer()['TrainMode']:
if 'accuracy' in self.image_file:
return
self.pixmap = QPixmap(self.image_file)
#self.adjustSize()
size = self.size()
painter = QPainter(self)
point = QPoint(0, 0)
scaled_pix = self.pixmap.scaled(size, Qt.KeepAspectRatio,
transformMode=Qt.SmoothTransformation)
# start painting the label from left upper corner
point.setX((size.width() - scaled_pix.width()) / 2)
point.setY((size.height() - scaled_pix.height()) / 2)
painter.drawPixmap(point, scaled_pix)
| from floppy.train_configuration import TrainParamServer
from PyQt5.QtWidgets import QWidget
from PyQt5.QtGui import QPixmap
from PyQt5.QtGui import QPainter
from PyQt5.QtCore import Qt
from PyQt5.QtCore import QPoint
class ReportWidget(QWidget):
def __init__(self, *args, **kwargs):
super(ReportWidget, self).__init__(height=200, *args, **kwargs)
self.setStyleSheet('''ReportWidget{background: rgb(55,55,55)}
''')
try:
loss_image = TrainParamServer()['WorkDir'] + "result/loss.png"
except KeyError:
loss_image = "result/loss.png"
self.pixmap = QPixmap(loss_image)
self.resize(200, 200)
def paintEvent(self, event):
self.adjustSize()
size = self.size()
painter = QPainter(self)
point = QPoint(0, 0)
scaled_pix = self.pixmap.scaled(size, Qt.KeepAspectRatio,
transformMode=Qt.SmoothTransformation)
# start painting the label from left upper corner
point.setX((size.width() - scaled_pix.width()) / 2)
point.setY((size.height() - scaled_pix.height()) / 2)
painter.drawPixmap(point, scaled_pix)
| bsd-3-clause | Python |
335abb7a4ddeabf9175b522d9336b94b7e32acc0 | Fix incorrect FAIL data. | zlargon/mosquitto,zlargon/mosquitto,zlargon/mosquitto,zlargon/mosquitto,zlargon/mosquitto | test/broker/01-connect-anon-denied.py | test/broker/01-connect-anon-denied.py | #!/usr/bin/python
# Test whether an anonymous connection is correctly denied.
import subprocess
import socket
import time
from struct import *
rc = 1
keepalive = 10
connect_packet = pack('!BBH6sBBHH17s', 16, 12+2+17,6,"MQIsdp",3,2,keepalive,17,"connect-anon-test")
connack_packet = pack('!BBBB', 32, 2, 0, 5);
broker = subprocess.Popen(['../../src/mosquitto', '-c', '01-connect-anon-denied.conf'], stderr=subprocess.PIPE)
try:
time.sleep(0.1)
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect(("localhost", 1888))
sock.send(connect_packet)
connack_recvd = sock.recv(256)
sock.close()
if connack_recvd != connack_packet:
(cmd, rl, resv, rc) = unpack('!BBBB', connack_recvd)
print("FAIL: Expected 32,2,0,5 got " + str(cmd) + "," + str(rl) + "," + str(resv) + "," + str(rc))
else:
rc = 0
finally:
broker.terminate()
exit(rc)
| #!/usr/bin/python
# Test whether an anonymous connection is correctly denied.
import subprocess
import socket
import time
from struct import *
rc = 1
keepalive = 10
connect_packet = pack('!BBH6sBBHH17s', 16, 12+2+17,6,"MQIsdp",3,2,keepalive,17,"connect-anon-test")
connack_packet = pack('!BBBB', 32, 2, 0, 5);
broker = subprocess.Popen(['../../src/mosquitto', '-c', '01-connect-anon-denied.conf'], stderr=subprocess.PIPE)
try:
time.sleep(0.1)
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect(("localhost", 1888))
sock.send(connect_packet)
connack_recvd = sock.recv(256)
sock.close()
if connack_recvd != connack_packet:
(cmd, rl, resv, rc) = unpack('!BBBB', connack_recvd)
print("FAIL: Expected 32,2,0,0 got " + str(cmd) + "," + str(rl) + "," + str(resv) + "," + str(rc))
else:
rc = 0
finally:
broker.terminate()
exit(rc)
| bsd-3-clause | Python |
9de0a05d28c83742224c0e708e80b8add198a8a8 | Add user data export for comments | stefanw/froide,fin/froide,stefanw/froide,fin/froide,stefanw/froide,stefanw/froide,stefanw/froide,fin/froide,fin/froide | froide/comments/apps.py | froide/comments/apps.py | import json
from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
class CommentConfig(AppConfig):
name = 'froide.comments'
verbose_name = _('Comments')
def ready(self):
from froide.account import account_canceled
from froide.account.export import registry
account_canceled.connect(cancel_user)
registry.register(export_user_data)
def cancel_user(sender, user=None, **kwargs):
from .models import FroideComment
if user is None:
return
FroideComment.objects.filter(user=user).update(
user_name='',
user_email='',
user_url=''
)
def export_user_data(user):
from .models import FroideComment
comments = FroideComment.objects.filter(user=user)
if not comments:
return
yield ('comments.json', json.dumps([
{
'submit_date': (
c.submit_date.isoformat() if c.submit_date else None
),
'comment': c.comment,
'is_public': c.is_public,
'is_removed': c.is_removed,
'url': c.get_absolute_url(),
}
for c in comments]).encode('utf-8')
)
| from django.apps import AppConfig
from django.utils.translation import ugettext_lazy as _
class CommentConfig(AppConfig):
name = 'froide.comments'
verbose_name = _('Comments')
def ready(self):
from froide.account import account_canceled
account_canceled.connect(cancel_user)
def cancel_user(sender, user=None, **kwargs):
from .models import FroideComment
if user is None:
return
FroideComment.objects.filter(user=user).update(
user_name='',
user_email='',
user_url=''
)
| mit | Python |
4a201a37318d5eea1e50e0619580a23f37e2e7da | Fix path for boringssl | peeracle/libwebsockets,peeracle/libwebsockets,peeracle/libwebsockets,peeracle/libwebsockets,peeracle/libwebsockets | libwebsockets.gyp | libwebsockets.gyp | {
'targets': [
{
'target_name': 'libwebsockets',
'type': 'static_library',
'standalone_static_library': 1,
'sources': [
'lib/base64-decode.c',
'lib/handshake.c',
'lib/libwebsockets.c',
'lib/service.c',
'lib/pollfd.c',
'lib/output.c',
'lib/parsers.c',
'lib/context.c',
'lib/sha-1.c',
'lib/alloc.c',
'lib/header.c',
'lib/client.c',
'lib/client-handshake.c',
'lib/client-parser.c',
'lib/ssl.c',
'lib/server.c',
'lib/server-handshake.c',
'lib/extension.c',
'lib/extension-deflate-frame.c',
'lib/extension-deflate-stream.c',
],
'dependencies': [
'<(DEPTH)/third_party/zlib/zlib.gyp:zlib',
'<(peeracle_webrtc_root)/third_party/boringssl/boringssl.gyp:boringssl'
],
'include_dirs': [
'config/<(OS)/<(target_arch)',
],
'direct_dependent_settings': {
'include_dirs': [
'config/<(OS)/<(target_arch)',
],
},
'conditions': [
['OS == "win"', {
'sources': [
'lib/lws-plat-win.c',
],
'msvs_settings': {
'VCCLCompilerTool': {
'WarnAsError': 'true',
'DisableSpecificWarnings': ['4018']
}
}
}, {
'sources': [
'lib/lws-plat-unix.c',
],
}],
],
},
],
}
| {
'targets': [
{
'target_name': 'libwebsockets',
'type': 'static_library',
'standalone_static_library': 1,
'sources': [
'lib/base64-decode.c',
'lib/handshake.c',
'lib/libwebsockets.c',
'lib/service.c',
'lib/pollfd.c',
'lib/output.c',
'lib/parsers.c',
'lib/context.c',
'lib/sha-1.c',
'lib/alloc.c',
'lib/header.c',
'lib/client.c',
'lib/client-handshake.c',
'lib/client-parser.c',
'lib/ssl.c',
'lib/server.c',
'lib/server-handshake.c',
'lib/extension.c',
'lib/extension-deflate-frame.c',
'lib/extension-deflate-stream.c',
],
'dependencies': [
'<(DEPTH)/third_party/zlib/zlib.gyp:zlib',
'<(peeracle_webrtc_root)/chromium/src/third_party/boringssl/boringssl.gyp:boringssl'
],
'include_dirs': [
'config/<(OS)/<(target_arch)',
],
'direct_dependent_settings': {
'include_dirs': [
'config/<(OS)/<(target_arch)',
],
},
'conditions': [
['OS == "win"', {
'sources': [
'lib/lws-plat-win.c',
],
'msvs_settings': {
'VCCLCompilerTool': {
'WarnAsError': 'true',
'DisableSpecificWarnings': ['4018']
}
}
}, {
'sources': [
'lib/lws-plat-unix.c',
],
}],
],
},
],
}
| lgpl-2.1 | Python |
eed4faf3bfe670421e7dc9c3065adbfceef0d2b6 | fix test for heapify | dnl-blkv/algorithms | linear_heapify.py | linear_heapify.py | # Building hash in O(n) time and O(1) additional space. Inspired by https://www.youtube.com/watch?v=MiyLo8adrWw
def heapify(a):
for i in range(len(a) // 2, -1, -1):
parent = i
while True:
candidates = [parent, 2 * parent + 1, 2 * parent + 2]
candidates = [e for e in candidates if e < len(a)]
largest = max(candidates, key=lambda e: a[e])
if largest == parent:
break
else:
a[parent], a[largest], parent = a[largest], a[parent], largest
# Test
arr = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19]
heapify(arr)
print(arr)
| # Building hash in O(n) time and O(1) additional space. Inspired by https://www.youtube.com/watch?v=MiyLo8adrWw
def heapify(a):
for i in range(len(a) // 2, -1, -1):
parent = i
while True:
candidates = [parent, 2 * parent + 1, 2 * parent + 2]
candidates = [e for e in candidates if e < len(a)]
largest = max(candidates, key=lambda e: a[e])
if largest == parent:
break
else:
a[parent], a[largest], parent = a[largest], a[parent], largest
# Test
arr = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19]
print(heapify(arr))
| mit | Python |
b113cf82004b608b371d1a249801340f57195587 | add __str__. | ulule/django-linguist | linguist/cache.py | linguist/cache.py | # -*- coding: utf-8 -*-
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class CachedTranslation(object):
def __init__(self, **kwargs):
from .models import Translation
self.instances = ['instance', 'translation']
self.fields = Translation._meta.get_all_field_names()
self.fields.remove('id')
attrs = self.fields + self.instances
for attr in attrs:
setattr(self, attr, None)
self.__dict__.update(**kwargs)
self.is_new = True
if self.instance is not None:
self.identifier = self.instance.linguist_identifier
self.object_id = self.instance.pk
if self.translation is not None:
self.is_new = bool(self.translation.pk is None)
for attr in ('language', 'field_name', 'field_value'):
setattr(self, attr, getattr(self.translation, attr))
@property
def attrs(self):
"""
Returns Translation attributes to pass as kwargs for creating or updating objects.
"""
return dict((k, getattr(self, k)) for k in self.fields)
@property
def lookup(self):
"""
Returns Translation lookup to use for filter method.
"""
lookup = {'identifier': self.identifier,
'object_id': self.object_id}
if self.language is not None:
lookup['language'] = self.language
return lookup
@classmethod
def from_object(cls, obj):
"""
Updates values from the given object.
"""
from .models import Translation
fields = Translation._meta.get_all_field_names()
fields.remove('id')
return cls(**dict((field, getattr(obj, field)) for field in fields))
def __str__(self):
return '%s:%s:%s:%s' % (
self.identifier,
self.object_id,
self.field_name,
self.language)
| # -*- coding: utf-8 -*-
class CachedTranslation(object):
def __init__(self, **kwargs):
from .models import Translation
self.instances = ['instance', 'translation']
self.fields = Translation._meta.get_all_field_names()
self.fields.remove('id')
attrs = self.fields + self.instances
for attr in attrs:
setattr(self, attr, None)
self.__dict__.update(**kwargs)
self.is_new = True
if self.instance is not None:
self.identifier = self.instance.linguist_identifier
self.object_id = self.instance.pk
if self.translation is not None:
self.is_new = bool(self.translation.pk is None)
for attr in ('language', 'field_name', 'field_value'):
setattr(self, attr, getattr(self.translation, attr))
@property
def attrs(self):
"""
Returns Translation attributes to pass as kwargs for creating or updating objects.
"""
return dict((k, getattr(self, k)) for k in self.fields)
@property
def lookup(self):
"""
Returns Translation lookup to use for filter method.
"""
lookup = {'identifier': self.identifier,
'object_id': self.object_id}
if self.language is not None:
lookup['language'] = self.language
return lookup
@classmethod
def from_object(cls, obj):
"""
Updates values from the given object.
"""
from .models import Translation
fields = Translation._meta.get_all_field_names()
fields.remove('id')
return cls(**dict((field, getattr(obj, field)) for field in fields))
| mit | Python |
b83576644d5aacd4ab841a03ff228796ff1b1b67 | change name | juschaef/purchase-workflow,juschaef/purchase-workflow,juschaef/purchase-workflow | coolTestly/__openerp__.py | coolTestly/__openerp__.py | {
"name": "coolTestly",
"author": "Ju",
"version": "1.0",
"category": "Generic Modules/Sales & Purchases",
"depends": [
"purchase",
],
"demo": [],
"data": [
],
"installable": True
}
| {
"name": "testly",
"author": "Ju",
"version": "1.0",
"category": "Generic Modules/Sales & Purchases",
"depends": [
"purchase",
],
"demo": [],
"data": [
],
"installable": True
} | agpl-3.0 | Python |
f844b95e7cb034985f1d284a41789d1e427b0f74 | Update env_detect.py | Python-IoT/Smart-IoT-Planting-System,Python-IoT/Smart-IoT-Planting-System | device/src/env_detect.py | device/src/env_detect.py | #!/usr/bin/env python
#Weather station.
#detect environment information from several sensors:
#water leverl, air humity, raining, air temperature, light sensitivity.
#Air temperature&humity sensor: DHT11.
#Add dht.py in micropython/stmhal/modules, refer to esp8266
#Compile the DHT in firmware, then use DHT lib in application.
#Raining, same to soil moisture.
#Raining ? DO value: 0
from pyb import Pin
p_in = Pin('Y12', Pin.IN, Pin.PULL_UP)
p_in.value
adc = pyb.ADC(Pin('Y11')) # create an analog object from a pin
adc = pyb.ADC(pyb.Pin.board.Y11)
val = adc.read() # read an analog value
#-----------------------------------------#
#Light intensity sensor(GY-30) <--> I2C(1)
#SDA <--> X10
#SCL <--> X9
#VCC
#GND
#ADO(ADDR/address ?)
from pyb import I2C
i2c = I2C(1) # create on bus 1
i2c = I2C(1, I2C.MASTER) # create and init as a master
i2c.init(I2C.MASTER, baudrate=20000) # init as a master
i2c.init(I2C.SLAVE, addr=0x23) # init as a slave with given address(GY-30 address is 0x23)
i2c.deinit() # turn off the peripheral
i2c.init(I2C.MASTER)
i2c.send('123', 0x23) # send 3 bytes to slave with address 0x23
i2c.send(b'456', addr=0x23) # keyword for address
| #!/usr/bin/env python
#Weather station.
#detect environment information from several sensors:
#water leverl, air humity, raining, air temperature, light sensitivity.
#Air temperature&humity sensor: DHT11.
#Add dht.py in micropython/stmhal/modules, refer to esp8266
#Compile the DHT in firmware, then use DHT lib in application.
#Raining, same to soil moisture.
#Raining ? DO value: 0
from pyb import Pin
p_in = Pin('Y12', Pin.IN, Pin.PULL_UP)
p_in.value
adc = pyb.ADC(Pin('Y11')) # create an analog object from a pin
adc = pyb.ADC(pyb.Pin.board.Y11)
val = adc.read() # read an analog value
#-----------------------------------------#
#Light intensity sensor(GY-30) <--> I2C(1)
#SDA <--> X10
#SCL <--> X9
#VCC
#GND
#ADO(ADDR/address ?)
from pyb import I2C
i2c = I2C(1) # create on bus 1
i2c = I2C(1, I2C.MASTER) # create and init as a master
i2c.init(I2C.MASTER, baudrate=20000) # init as a master
i2c.init(I2C.SLAVE, addr=0x23) # init as a slave with given address(GY-30 address is 0x23)
i2c.deinit() # turn off the peripheral
i2c.init(I2C.MASTER)
| mit | Python |
ce948b49fbd4f3e8012b6d351bbf53db32172474 | fix repeated import of CascadeRPNHead (#8578) | open-mmlab/mmdetection,open-mmlab/mmdetection | mmdet/models/dense_heads/__init__.py | mmdet/models/dense_heads/__init__.py | # Copyright (c) OpenMMLab. All rights reserved.
from .anchor_free_head import AnchorFreeHead
from .anchor_head import AnchorHead
from .atss_head import ATSSHead
from .autoassign_head import AutoAssignHead
from .cascade_rpn_head import CascadeRPNHead, StageCascadeRPNHead
from .centernet_head import CenterNetHead
from .centripetal_head import CentripetalHead
from .corner_head import CornerHead
from .ddod_head import DDODHead
from .deformable_detr_head import DeformableDETRHead
from .detr_head import DETRHead
from .embedding_rpn_head import EmbeddingRPNHead
from .fcos_head import FCOSHead
from .fovea_head import FoveaHead
from .free_anchor_retina_head import FreeAnchorRetinaHead
from .fsaf_head import FSAFHead
from .ga_retina_head import GARetinaHead
from .ga_rpn_head import GARPNHead
from .gfl_head import GFLHead
from .guided_anchor_head import FeatureAdaption, GuidedAnchorHead
from .lad_head import LADHead
from .ld_head import LDHead
from .mask2former_head import Mask2FormerHead
from .maskformer_head import MaskFormerHead
from .nasfcos_head import NASFCOSHead
from .paa_head import PAAHead
from .pisa_retinanet_head import PISARetinaHead
from .pisa_ssd_head import PISASSDHead
from .reppoints_head import RepPointsHead
from .retina_head import RetinaHead
from .retina_sepbn_head import RetinaSepBNHead
from .rpn_head import RPNHead
from .sabl_retina_head import SABLRetinaHead
from .solo_head import DecoupledSOLOHead, DecoupledSOLOLightHead, SOLOHead
from .solov2_head import SOLOV2Head
from .ssd_head import SSDHead
from .tood_head import TOODHead
from .vfnet_head import VFNetHead
from .yolact_head import YOLACTHead, YOLACTProtonet, YOLACTSegmHead
from .yolo_head import YOLOV3Head
from .yolof_head import YOLOFHead
from .yolox_head import YOLOXHead
__all__ = [
'AnchorFreeHead', 'AnchorHead', 'GuidedAnchorHead', 'FeatureAdaption',
'RPNHead', 'GARPNHead', 'RetinaHead', 'RetinaSepBNHead', 'GARetinaHead',
'SSDHead', 'FCOSHead', 'RepPointsHead', 'FoveaHead',
'FreeAnchorRetinaHead', 'ATSSHead', 'FSAFHead', 'NASFCOSHead',
'PISARetinaHead', 'PISASSDHead', 'GFLHead', 'CornerHead', 'YOLACTHead',
'YOLACTSegmHead', 'YOLACTProtonet', 'YOLOV3Head', 'PAAHead',
'SABLRetinaHead', 'CentripetalHead', 'VFNetHead', 'StageCascadeRPNHead',
'CascadeRPNHead', 'EmbeddingRPNHead', 'LDHead', 'AutoAssignHead',
'DETRHead', 'YOLOFHead', 'DeformableDETRHead', 'SOLOHead',
'DecoupledSOLOHead', 'CenterNetHead', 'YOLOXHead',
'DecoupledSOLOLightHead', 'LADHead', 'TOODHead', 'MaskFormerHead',
'Mask2FormerHead', 'SOLOV2Head', 'DDODHead'
]
| # Copyright (c) OpenMMLab. All rights reserved.
from .anchor_free_head import AnchorFreeHead
from .anchor_head import AnchorHead
from .atss_head import ATSSHead
from .autoassign_head import AutoAssignHead
from .cascade_rpn_head import CascadeRPNHead, StageCascadeRPNHead
from .centernet_head import CenterNetHead
from .centripetal_head import CentripetalHead
from .corner_head import CornerHead
from .ddod_head import DDODHead
from .deformable_detr_head import DeformableDETRHead
from .detr_head import DETRHead
from .embedding_rpn_head import EmbeddingRPNHead
from .fcos_head import FCOSHead
from .fovea_head import FoveaHead
from .free_anchor_retina_head import FreeAnchorRetinaHead
from .fsaf_head import FSAFHead
from .ga_retina_head import GARetinaHead
from .ga_rpn_head import GARPNHead
from .gfl_head import GFLHead
from .guided_anchor_head import FeatureAdaption, GuidedAnchorHead
from .lad_head import LADHead
from .ld_head import LDHead
from .mask2former_head import Mask2FormerHead
from .maskformer_head import MaskFormerHead
from .nasfcos_head import NASFCOSHead
from .paa_head import PAAHead
from .pisa_retinanet_head import PISARetinaHead
from .pisa_ssd_head import PISASSDHead
from .reppoints_head import RepPointsHead
from .retina_head import RetinaHead
from .retina_sepbn_head import RetinaSepBNHead
from .rpn_head import RPNHead
from .sabl_retina_head import SABLRetinaHead
from .solo_head import DecoupledSOLOHead, DecoupledSOLOLightHead, SOLOHead
from .solov2_head import SOLOV2Head
from .ssd_head import SSDHead
from .tood_head import TOODHead
from .vfnet_head import VFNetHead
from .yolact_head import YOLACTHead, YOLACTProtonet, YOLACTSegmHead
from .yolo_head import YOLOV3Head
from .yolof_head import YOLOFHead
from .yolox_head import YOLOXHead
__all__ = [
'AnchorFreeHead', 'AnchorHead', 'GuidedAnchorHead', 'FeatureAdaption',
'RPNHead', 'GARPNHead', 'RetinaHead', 'RetinaSepBNHead', 'GARetinaHead',
'SSDHead', 'FCOSHead', 'RepPointsHead', 'FoveaHead',
'FreeAnchorRetinaHead', 'ATSSHead', 'FSAFHead', 'NASFCOSHead',
'PISARetinaHead', 'PISASSDHead', 'GFLHead', 'CornerHead', 'YOLACTHead',
'YOLACTSegmHead', 'YOLACTProtonet', 'YOLOV3Head', 'PAAHead',
'SABLRetinaHead', 'CentripetalHead', 'VFNetHead', 'StageCascadeRPNHead',
'CascadeRPNHead', 'EmbeddingRPNHead', 'LDHead', 'CascadeRPNHead',
'AutoAssignHead', 'DETRHead', 'YOLOFHead', 'DeformableDETRHead',
'SOLOHead', 'DecoupledSOLOHead', 'CenterNetHead', 'YOLOXHead',
'DecoupledSOLOLightHead', 'LADHead', 'TOODHead', 'MaskFormerHead',
'Mask2FormerHead', 'SOLOV2Head', 'DDODHead'
]
| apache-2.0 | Python |
5e7e60f8afb3ddeeec2714a9d11dd30a6ea3e52f | Add new calc_total_error tests | larrybradley/photutils,astropy/photutils | photutils/utils/tests/test_prepare_data.py | photutils/utils/tests/test_prepare_data.py | # Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import numpy as np
from numpy.testing import assert_allclose
from astropy.tests.helper import pytest
import astropy.units as u
from .. import calc_total_error
SHAPE = (5, 5)
DATAVAL = 2.
DATA = np.ones(SHAPE) * DATAVAL
MASK = np.zeros_like(DATA, dtype=bool)
MASK[2, 2] = True
BKG_ERROR = np.ones(SHAPE)
EFFGAIN = np.ones(SHAPE) * DATAVAL
BACKGROUND = np.ones(SHAPE)
WRONG_SHAPE = np.ones((2, 2))
class TestCalculateTotalError(object):
def test_error_shape(self):
with pytest.raises(ValueError):
calc_total_error(DATA, WRONG_SHAPE, EFFGAIN)
def test_gain_shape(self):
with pytest.raises(ValueError):
calc_total_error(DATA, BKG_ERROR, WRONG_SHAPE)
@pytest.mark.parametrize('effective_gain', (0, -1))
def test_gain_le_zero(self, effective_gain):
with pytest.raises(ValueError):
calc_total_error(DATA, BKG_ERROR, effective_gain)
def test_gain_scalar(self):
error_tot = calc_total_error(DATA, BKG_ERROR, 2.)
assert_allclose(error_tot, np.sqrt(2.) * BKG_ERROR)
def test_gain_array(self):
error_tot = calc_total_error(DATA, BKG_ERROR, EFFGAIN)
assert_allclose(error_tot, np.sqrt(2.) * BKG_ERROR)
def test_units(self):
units = u.electron / u.s
error_tot1 = calc_total_error(DATA * units, BKG_ERROR * units,
EFFGAIN * u.s)
assert error_tot1.unit == units
error_tot2 = calc_total_error(DATA, BKG_ERROR, EFFGAIN)
assert_allclose(error_tot1.value, error_tot2)
def test_error_units(self):
units = u.electron / u.s
with pytest.raises(ValueError):
calc_total_error(DATA * units, BKG_ERROR * u.electron,
EFFGAIN * u.s)
def test_effgain_units(self):
units = u.electron / u.s
with pytest.raises(u.UnitsError):
calc_total_error(DATA * units, BKG_ERROR * units, EFFGAIN * u.km)
def test_missing_bkgerror_units(self):
units = u.electron / u.s
with pytest.raises(ValueError):
calc_total_error(DATA * units, BKG_ERROR, EFFGAIN * u.s)
def test_missing_effgain_units(self):
units = u.electron / u.s
with pytest.raises(ValueError):
calc_total_error(DATA * units, BKG_ERROR * units,
EFFGAIN)
| # Licensed under a 3-clause BSD style license - see LICENSE.rst
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import numpy as np
from numpy.testing import assert_allclose
from astropy.tests.helper import pytest
from .. import calc_total_error
SHAPE = (5, 5)
DATAVAL = 2.
DATA = np.ones(SHAPE) * DATAVAL
MASK = np.zeros_like(DATA, dtype=bool)
MASK[2, 2] = True
BKG_ERROR = np.ones(SHAPE)
EFFGAIN = np.ones(SHAPE) * DATAVAL
BACKGROUND = np.ones(SHAPE)
WRONG_SHAPE = np.ones((2, 2))
class TestCalculateTotalError(object):
def test_error_shape(self):
with pytest.raises(ValueError):
calc_total_error(DATA, WRONG_SHAPE, EFFGAIN)
def test_gain_shape(self):
with pytest.raises(ValueError):
calc_total_error(DATA, BKG_ERROR, WRONG_SHAPE)
@pytest.mark.parametrize('effective_gain', (0, -1))
def test_gain_le_zero(self, effective_gain):
with pytest.raises(ValueError):
calc_total_error(DATA, BKG_ERROR, effective_gain)
def test_gain_scalar(self):
error_tot = calc_total_error(DATA, BKG_ERROR, 2.)
assert_allclose(error_tot, np.sqrt(2.) * BKG_ERROR)
def test_gain_array(self):
error_tot = calc_total_error(DATA, BKG_ERROR, EFFGAIN)
assert_allclose(error_tot, np.sqrt(2.) * BKG_ERROR)
| bsd-3-clause | Python |
5dd6fa526eac5632f75d543342a7a5a45b866716 | Update plantcv/plantcv/visualize/obj_size_ecdf.py | stiphyMT/plantcv,stiphyMT/plantcv,danforthcenter/plantcv,danforthcenter/plantcv,stiphyMT/plantcv,danforthcenter/plantcv | plantcv/plantcv/visualize/obj_size_ecdf.py | plantcv/plantcv/visualize/obj_size_ecdf.py | # Plot Empirical Cumulative Distribution Function for Object Size
import os
import cv2
import pandas as pd
from plantcv.plantcv import params
from plantcv.plantcv._debug import _debug
from statsmodels.distributions.empirical_distribution import ECDF
from plotnine import ggplot, aes, geom_point, labels, scale_x_log10
def obj_size_ecdf(mask, title=None):
""" Plot empirical cumulative distribution for object size based on binary
mask
Inputs:
mask = binary mask
title = a custom title for the plot (default=None)
Returns:
fig_ecdf = empirical cumulative distribution function plot
:param mask: numpy.ndarray
:param title: str
:return fig_ecdf: plotnine.ggplot.ggplot
"""
objects, hierarchy = cv2.findContours(mask, cv2.RETR_TREE, cv2.CHAIN_APPROX_NONE)[-2:]
areas = [cv2.contourArea(cnt) for cnt in objects]
ecdf = ECDF(areas, side='right')
ecdf_df = pd.DataFrame({'object area': ecdf.x, 'cumulative probability': ecdf.y})
# create ecdf plot and apply log-scale for x-axis (areas)
fig_ecdf = (ggplot(data=ecdf_df, mapping=aes(x='object area', y='cumulative probability'))
+ geom_point(size=.1)
+ scale_x_log10())
if title is not None:
fig_ecdf = fig_ecdf + labels.ggtitle(title)
# Plot or print the ecdf
_debug(visual=fig_ecdf,
filename=os.path.join(params.debug_outdir, str(params.device) + '_area_ecdf.png'))
return fig_ecdf
| # Plot Empirical Cumulative Distribution Function for Object Size
import os
import cv2
import pandas as pd
from plantcv.plantcv import params
from plantcv.plantcv._debug import _debug
from statsmodels.distributions.empirical_distribution import ECDF
from plotnine import ggplot, aes, geom_point, labels, \
scale_color_manual, scale_x_log10
def obj_size_ecdf(mask, title=None):
""" Plot empirical cumulative distribution for object size based on binary
mask
Inputs:
mask = binary mask
title = a custom title for the plot (default=None)
Returns:
fig_ecdf = empirical cumulative distribution function plot
:param mask: numpy.ndarray
:param title: str
:return fig_ecdf: plotnine.ggplot.ggplot
"""
objects, hierarchy = cv2.findContours(mask, cv2.RETR_TREE, cv2.CHAIN_APPROX_NONE)[-2:]
areas = [cv2.contourArea(cnt) for cnt in objects]
ecdf = ECDF(areas, side='right')
ecdf_df = pd.DataFrame({'object area': ecdf.x, 'cumulative probability': ecdf.y})
# create ecdf plot and apply log-scale for x-axis (areas)
fig_ecdf = (ggplot(data=ecdf_df, mapping=aes(x='object area', y='cumulative probability'))
+ geom_point(size=.1)
+ scale_x_log10())
if title is not None:
fig_ecdf = fig_ecdf + labels.ggtitle(title)
# Plot or print the ecdf
_debug(visual=fig_ecdf,
filename=os.path.join(params.debug_outdir, str(params.device) + '_area_ecdf.png'))
return fig_ecdf
| mit | Python |
e56df10f3e40b9287735f1295a0ed72e1525896f | change json to md RasaHQ/roadmap#280 | RasaHQ/rasa_core,RasaHQ/rasa_nlu,RasaHQ/rasa_nlu,RasaHQ/rasa_core,RasaHQ/rasa_nlu,RasaHQ/rasa_core | examples/restaurantbot/bot.py | examples/restaurantbot/bot.py | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import argparse
import logging
import warnings
from policy import RestaurantPolicy
from rasa_core import utils
from rasa_core.agent import Agent
from rasa_core.policies.memoization import MemoizationPolicy
logger = logging.getLogger(__name__)
class RestaurantAPI(object):
def search(self, info):
return "papi's pizza place"
def train_dialogue(domain_file="restaurant_domain.yml",
model_path="models/dialogue",
training_data_file="data/babi_stories.md"):
agent = Agent(domain_file,
policies=[MemoizationPolicy(max_history=3),
RestaurantPolicy()])
training_data = agent.load_data(training_data_file)
agent.train(
training_data,
epochs=400,
batch_size=100,
validation_split=0.2
)
agent.persist(model_path)
return agent
def train_nlu():
from rasa_nlu.training_data import load_data
from rasa_nlu import config
from rasa_nlu.model import Trainer
training_data = load_data('data/nlu_data.md')
trainer = Trainer(config.load("nlu_model_config.yml"))
trainer.train(training_data)
model_directory = trainer.persist('models/nlu/',
fixed_model_name="current")
return model_directory
if __name__ == '__main__':
utils.configure_colored_logging(loglevel="INFO")
parser = argparse.ArgumentParser(
description='starts the bot')
parser.add_argument(
'task',
choices=["train-nlu", "train-dialogue", "run"],
help="what the bot should do - e.g. run or train?")
task = parser.parse_args().task
# decide what to do based on first parameter of the script
if task == "train-nlu":
train_nlu()
elif task == "train-dialogue":
train_dialogue()
| from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import argparse
import logging
import warnings
from policy import RestaurantPolicy
from rasa_core import utils
from rasa_core.agent import Agent
from rasa_core.policies.memoization import MemoizationPolicy
logger = logging.getLogger(__name__)
class RestaurantAPI(object):
def search(self, info):
return "papi's pizza place"
def train_dialogue(domain_file="restaurant_domain.yml",
model_path="models/dialogue",
training_data_file="data/babi_stories.md"):
agent = Agent(domain_file,
policies=[MemoizationPolicy(max_history=3),
RestaurantPolicy()])
training_data = agent.load_data(training_data_file)
agent.train(
training_data,
epochs=400,
batch_size=100,
validation_split=0.2
)
agent.persist(model_path)
return agent
def train_nlu():
from rasa_nlu.training_data import load_data
from rasa_nlu import config
from rasa_nlu.model import Trainer
training_data = load_data('data/nlu_data.json')
trainer = Trainer(config.load("nlu_model_config.yml"))
trainer.train(training_data)
model_directory = trainer.persist('models/nlu/',
fixed_model_name="current")
return model_directory
if __name__ == '__main__':
utils.configure_colored_logging(loglevel="INFO")
parser = argparse.ArgumentParser(
description='starts the bot')
parser.add_argument(
'task',
choices=["train-nlu", "train-dialogue", "run"],
help="what the bot should do - e.g. run or train?")
task = parser.parse_args().task
# decide what to do based on first parameter of the script
if task == "train-nlu":
train_nlu()
elif task == "train-dialogue":
train_dialogue()
| apache-2.0 | Python |
40316f4a2f1e973ba98a50592a54f21a2d5ac18e | Add the exceptions module to describe exceptions from external target | yehzhang/RapidTest,yehzhang/RapidTest | rapidtest/executors/exceptions.py | rapidtest/executors/exceptions.py | from ..utils import Dictable
MSG_CANNOT_GUESS_METHOD = '''cannot find the target method. You may specify operations as \
arguments to Case if there are multiple methods to be called, or prepend all names of private \
methods with underscores.'''
class ExternalError(Exception):
pass
class ExternalEnvironmentError(ExternalError):
pass
class ExternalRuntimeError(ExternalError):
pass
class ExternalException(Dictable):
def __init__(self, name, message=None, stack_trace=None, runtime=False):
self.name = name
self.message = message or ''
self.stack_trace = (stack_trace or '').rstrip()
self.runtime = runtime
def to_exception(self):
Exc = type(self.name, (Exception,), {})
msg = '{}\n{}'.format(self.message, self.stack_trace)
Wrapper = ExternalRuntimeError if self.runtime else ExternalEnvironmentError
return Wrapper, Exc(msg)
| class ExternalExecutionTargetError(Exception):
pass
| mit | Python |
07aca8e96d5e93edb684d0c4684ef8f837e8fc58 | Use comment builder for dirhtml too | sunnyzwh/readthedocs.org,hach-que/readthedocs.org,kenwang76/readthedocs.org,singingwolfboy/readthedocs.org,sid-kap/readthedocs.org,michaelmcandrew/readthedocs.org,dirn/readthedocs.org,sid-kap/readthedocs.org,espdev/readthedocs.org,tddv/readthedocs.org,mrshoki/readthedocs.org,SteveViss/readthedocs.org,sils1297/readthedocs.org,Tazer/readthedocs.org,raven47git/readthedocs.org,gjtorikian/readthedocs.org,kenwang76/readthedocs.org,VishvajitP/readthedocs.org,kenshinthebattosai/readthedocs.org,pombredanne/readthedocs.org,emawind84/readthedocs.org,clarkperkins/readthedocs.org,soulshake/readthedocs.org,stevepiercy/readthedocs.org,emawind84/readthedocs.org,kdkeyser/readthedocs.org,nikolas/readthedocs.org,soulshake/readthedocs.org,mhils/readthedocs.org,techtonik/readthedocs.org,LukasBoersma/readthedocs.org,sils1297/readthedocs.org,safwanrahman/readthedocs.org,royalwang/readthedocs.org,SteveViss/readthedocs.org,kenshinthebattosai/readthedocs.org,CedarLogic/readthedocs.org,VishvajitP/readthedocs.org,techtonik/readthedocs.org,jerel/readthedocs.org,attakei/readthedocs-oauth,takluyver/readthedocs.org,Tazer/readthedocs.org,techtonik/readthedocs.org,kenwang76/readthedocs.org,CedarLogic/readthedocs.org,clarkperkins/readthedocs.org,fujita-shintaro/readthedocs.org,soulshake/readthedocs.org,clarkperkins/readthedocs.org,titiushko/readthedocs.org,GovReady/readthedocs.org,tddv/readthedocs.org,Tazer/readthedocs.org,michaelmcandrew/readthedocs.org,royalwang/readthedocs.org,hach-que/readthedocs.org,d0ugal/readthedocs.org,asampat3090/readthedocs.org,wanghaven/readthedocs.org,LukasBoersma/readthedocs.org,Carreau/readthedocs.org,safwanrahman/readthedocs.org,istresearch/readthedocs.org,cgourlay/readthedocs.org,GovReady/readthedocs.org,agjohnson/readthedocs.org,mhils/readthedocs.org,dirn/readthedocs.org,Carreau/readthedocs.org,istresearch/readthedocs.org,atsuyim/readthedocs.org,pombredanne/readthedocs.org,fujita-shintaro/readthedocs.org,titiushko/readthedocs.org,wanghaven/readthedocs.org,safwanrahman/readthedocs.org,agjohnson/readthedocs.org,istresearch/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org,laplaceliu/readthedocs.org,hach-que/readthedocs.org,cgourlay/readthedocs.org,davidfischer/readthedocs.org,stevepiercy/readthedocs.org,CedarLogic/readthedocs.org,royalwang/readthedocs.org,Tazer/readthedocs.org,rtfd/readthedocs.org,clarkperkins/readthedocs.org,raven47git/readthedocs.org,dirn/readthedocs.org,gjtorikian/readthedocs.org,sils1297/readthedocs.org,emawind84/readthedocs.org,singingwolfboy/readthedocs.org,jerel/readthedocs.org,safwanrahman/readthedocs.org,nikolas/readthedocs.org,mrshoki/readthedocs.org,wijerasa/readthedocs.org,wijerasa/readthedocs.org,royalwang/readthedocs.org,KamranMackey/readthedocs.org,espdev/readthedocs.org,Carreau/readthedocs.org,istresearch/readthedocs.org,titiushko/readthedocs.org,davidfischer/readthedocs.org,espdev/readthedocs.org,laplaceliu/readthedocs.org,emawind84/readthedocs.org,d0ugal/readthedocs.org,atsuyim/readthedocs.org,SteveViss/readthedocs.org,singingwolfboy/readthedocs.org,KamranMackey/readthedocs.org,atsuyim/readthedocs.org,attakei/readthedocs-oauth,fujita-shintaro/readthedocs.org,mhils/readthedocs.org,sid-kap/readthedocs.org,davidfischer/readthedocs.org,laplaceliu/readthedocs.org,singingwolfboy/readthedocs.org,VishvajitP/readthedocs.org,jerel/readthedocs.org,sid-kap/readthedocs.org,d0ugal/readthedocs.org,atsuyim/readthedocs.org,raven47git/readthedocs.org,attakei/readthedocs-oauth,kenwang76/readthedocs.org,LukasBoersma/readthedocs.org,asampat3090/readthedocs.org,takluyver/readthedocs.org,stevepiercy/readthedocs.org,kenshinthebattosai/readthedocs.org,asampat3090/readthedocs.org,takluyver/readthedocs.org,hach-que/readthedocs.org,sils1297/readthedocs.org,rtfd/readthedocs.org,gjtorikian/readthedocs.org,sunnyzwh/readthedocs.org,VishvajitP/readthedocs.org,titiushko/readthedocs.org,mrshoki/readthedocs.org,espdev/readthedocs.org,michaelmcandrew/readthedocs.org,agjohnson/readthedocs.org,kdkeyser/readthedocs.org,techtonik/readthedocs.org,SteveViss/readthedocs.org,raven47git/readthedocs.org,agjohnson/readthedocs.org,cgourlay/readthedocs.org,pombredanne/readthedocs.org,wanghaven/readthedocs.org,wanghaven/readthedocs.org,asampat3090/readthedocs.org,espdev/readthedocs.org,takluyver/readthedocs.org,GovReady/readthedocs.org,dirn/readthedocs.org,wijerasa/readthedocs.org,sunnyzwh/readthedocs.org,michaelmcandrew/readthedocs.org,gjtorikian/readthedocs.org,KamranMackey/readthedocs.org,wijerasa/readthedocs.org,jerel/readthedocs.org,stevepiercy/readthedocs.org,fujita-shintaro/readthedocs.org,attakei/readthedocs-oauth,Carreau/readthedocs.org,d0ugal/readthedocs.org,nikolas/readthedocs.org,kenshinthebattosai/readthedocs.org,nikolas/readthedocs.org,sunnyzwh/readthedocs.org,KamranMackey/readthedocs.org,kdkeyser/readthedocs.org,kdkeyser/readthedocs.org,LukasBoersma/readthedocs.org,CedarLogic/readthedocs.org,mrshoki/readthedocs.org,tddv/readthedocs.org,mhils/readthedocs.org,cgourlay/readthedocs.org,laplaceliu/readthedocs.org,GovReady/readthedocs.org,davidfischer/readthedocs.org,soulshake/readthedocs.org | readthedocs/doc_builder/loader.py | readthedocs/doc_builder/loader.py | from django.utils.importlib import import_module
from django.conf import settings
# Managers
mkdocs = import_module(getattr(settings, 'MKDOCS_BACKEND', 'doc_builder.backends.mkdocs'))
sphinx = import_module(getattr(settings, 'SPHINX_BACKEND', 'doc_builder.backends.sphinx'))
loading = {
# Possible HTML Builders
'sphinx': sphinx.HtmlBuilderComments,
'sphinx_htmldir': sphinx.HtmlDirBuilderComments,
'sphinx_singlehtml': sphinx.SingleHtmlBuilder,
# Other Sphinx Builders
'sphinx_pdf': sphinx.PdfBuilder,
'sphinx_epub': sphinx.EpubBuilder,
'sphinx_search': sphinx.SearchBuilder,
'sphinx_singlehtmllocalmedia': sphinx.LocalMediaBuilder,
# Other markup
'mkdocs': mkdocs.MkdocsHTML,
'mkdocs_json': mkdocs.MkdocsJSON,
}
| from django.utils.importlib import import_module
from django.conf import settings
# Managers
mkdocs = import_module(getattr(settings, 'MKDOCS_BACKEND', 'doc_builder.backends.mkdocs'))
sphinx = import_module(getattr(settings, 'SPHINX_BACKEND', 'doc_builder.backends.sphinx'))
loading = {
# Possible HTML Builders
'sphinx': sphinx.HtmlBuilderComments,
'sphinx_htmldir': sphinx.HtmlDirBuilder,
'sphinx_singlehtml': sphinx.SingleHtmlBuilder,
# Other Sphinx Builders
'sphinx_pdf': sphinx.PdfBuilder,
'sphinx_epub': sphinx.EpubBuilder,
'sphinx_search': sphinx.SearchBuilder,
'sphinx_singlehtmllocalmedia': sphinx.LocalMediaBuilder,
# Other markup
'mkdocs': mkdocs.MkdocsHTML,
'mkdocs_json': mkdocs.MkdocsJSON,
}
| mit | Python |
adb9b262167beecb30edc281e4ab10fc05b3e6da | optimize imports and add common exceptions | joaolrpaulo/eletronic-voting-system,joaolrpaulo/eletronic-voting-system,joaolrpaulo/eletronic-voting-system | voting-server/app/errors.py | voting-server/app/errors.py | from flask import jsonify
from werkzeug.exceptions import HTTPException, default_exceptions
from app import app
def json_error(error):
response = jsonify(message = str(error))
response.status_code = error.code if isinstance(error, HTTPException) else 500
return response
for code in default_exceptions.keys():
app.register_error_handler(code, json_error)
app.register_error_handler(Exception, json_error)
| from flask import jsonify
from werkzeug.exceptions import HTTPException
from werkzeug.exceptions import default_exceptions
from app import app
def json_error(error):
response = jsonify(message = str(error))
response.status_code = error.code if isinstance(error, HTTPException) else 500
return response
for code in default_exceptions.keys():
app.register_error_handler(code, json_error)
| mit | Python |
7d650f3ee367a8eac710893c1818aa08cccf7598 | Add auth | Jim-Lin/like-japanavgirls,Jim-Lin/like-japanavgirls,Jim-Lin/like-japanavgirls,Jim-Lin/like-japanavgirls,Jim-Lin/like-japanavgirls | bot/dao.py | bot/dao.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
from pymongo import MongoClient, IndexModel
class DAO:
def __init__(self, *args, **kwargs):
#args -- tuple of anonymous arguments
#kwargs -- dictionary of named arguments
if kwargs.get('mongo') is None:
self.default_mongo_init()
else:
self.mongo = MongoClient(kwargs.get('mongo').get('host'), kwargs.get('mongo').get('port'))
self.mongo_db = self.mongo[kwargs.get('mongo').get('db')]
def default_mongo_init(self):
self.mongo = MongoClient('mongodb://username:password@localhost:27017/dark')
self.mongo_db = self.mongo['dark']
self.mongo_db['actress'].create_index('id', unique=True)
def update_one_feedback_by_id(self, id, ox, image):
collection = self.mongo_db['actress']
result = collection.update_one({"id": id}, {'$inc': {'count': 1}, '$push': {ox: image}}, upsert=True)
def update_one_works_by_id(self, id, no):
collection = self.mongo_db['actress']
result = collection.update_one({"id": id}, {'$push': {"works": no}}, upsert=True)
def find_one_works_by_id(self, id):
collection = self.mongo_db['actress']
return collection.find_one({"id": id}, {"works": True, "_id": False})
def update_one_info_by_actress(self, actress):
collection = self.mongo_db['actress']
result = collection.update_one({"id": actress.get("id")}, {'$set': {"id": actress.get("id"), "name": actress.get("name"), "img": actress.get("img")}}, upsert=True)
def find_one_actress_by_id(self, id):
collection = self.mongo_db['actress']
return collection.find_one({"id": id}, {"_id": False})
| #!/usr/bin/python
# -*- coding: utf-8 -*-
from pymongo import MongoClient, IndexModel
class DAO:
def __init__(self, *args, **kwargs):
#args -- tuple of anonymous arguments
#kwargs -- dictionary of named arguments
if kwargs.get('mongo') is None:
self.default_mongo_init()
else:
self.mongo = MongoClient(kwargs.get('mongo').get('host'), kwargs.get('mongo').get('port'))
self.mongo_db = self.mongo[kwargs.get('mongo').get('db')]
def default_mongo_init(self):
self.mongo = MongoClient('mongodb://localhost:27017/')
self.mongo_db = self.mongo['dark']
self.mongo_db['actress'].create_index('id', unique=True)
def update_one_feedback_by_id(self, id, ox, image):
collection = self.mongo_db['actress']
result = collection.update_one({"id": id}, {'$inc': {'count': 1}, '$push': {ox: image}}, upsert=True)
def update_one_works_by_id(self, id, no):
collection = self.mongo_db['actress']
result = collection.update_one({"id": id}, {'$push': {"works": no}}, upsert=True)
def find_one_works_by_id(self, id):
collection = self.mongo_db['actress']
return collection.find_one({"id": id}, {"works": True, "_id": False})
def update_one_info_by_actress(self, actress):
collection = self.mongo_db['actress']
result = collection.update_one({"id": actress.get("id")}, {'$set': {"id": actress.get("id"), "name": actress.get("name"), "img": actress.get("img")}}, upsert=True)
def find_one_actress_by_id(self, id):
collection = self.mongo_db['actress']
return collection.find_one({"id": id}, {"_id": False})
| mit | Python |
4b8339b53f1b9dcd79f2a9060933713328a13b90 | Mark dask-distributed tests on Windows as xfail (#1747) | markelg/xray,shoyer/xarray,shoyer/xarray,pydata/xarray,chunweiyuan/xarray,jhamman/xarray,jcmgray/xarray,jhamman/xray,markelg/xray,pydata/xarray,pydata/xarray,markelg/xray,xray/xray,jhamman/xarray,jhamman/xarray,shoyer/xray | xarray/tests/test_distributed.py | xarray/tests/test_distributed.py | import sys
import pytest
import xarray as xr
from xarray.core.pycompat import suppress
distributed = pytest.importorskip('distributed')
da = pytest.importorskip('dask.array')
import dask
from distributed.utils_test import cluster, loop, gen_cluster
from distributed.client import futures_of, wait
from xarray.tests.test_backends import create_tmp_file, ON_WINDOWS
from xarray.tests.test_dataset import create_test_data
from . import assert_allclose, has_scipy, has_netCDF4, has_h5netcdf
ENGINES = []
if has_scipy:
ENGINES.append('scipy')
if has_netCDF4:
ENGINES.append('netcdf4')
if has_h5netcdf:
ENGINES.append('h5netcdf')
@pytest.mark.xfail(sys.platform == 'win32',
reason='https://github.com/pydata/xarray/issues/1738')
@pytest.mark.parametrize('engine', ENGINES)
def test_dask_distributed_integration_test(loop, engine):
with cluster() as (s, _):
with distributed.Client(s['address'], loop=loop):
original = create_test_data()
with create_tmp_file(allow_cleanup_failure=ON_WINDOWS) as filename:
original.to_netcdf(filename, engine=engine)
with xr.open_dataset(filename, chunks=3, engine=engine) as restored:
assert isinstance(restored.var1.data, da.Array)
computed = restored.compute()
assert_allclose(original, computed)
@pytest.mark.skipif(distributed.__version__ <= '1.19.3',
reason='Need recent distributed version to clean up get')
@gen_cluster(client=True, timeout=None)
def test_async(c, s, a, b):
x = create_test_data()
assert not dask.is_dask_collection(x)
y = x.chunk({'dim2': 4}) + 10
assert dask.is_dask_collection(y)
assert dask.is_dask_collection(y.var1)
assert dask.is_dask_collection(y.var2)
z = y.persist()
assert str(z)
assert dask.is_dask_collection(z)
assert dask.is_dask_collection(z.var1)
assert dask.is_dask_collection(z.var2)
assert len(y.__dask_graph__()) > len(z.__dask_graph__())
assert not futures_of(y)
assert futures_of(z)
future = c.compute(z)
w = yield future
assert not dask.is_dask_collection(w)
assert_allclose(x + 10, w)
assert s.task_state
| import pytest
import xarray as xr
from xarray.core.pycompat import suppress
distributed = pytest.importorskip('distributed')
da = pytest.importorskip('dask.array')
import dask
from distributed.utils_test import cluster, loop, gen_cluster
from distributed.client import futures_of, wait
from xarray.tests.test_backends import create_tmp_file, ON_WINDOWS
from xarray.tests.test_dataset import create_test_data
from . import assert_allclose, has_scipy, has_netCDF4, has_h5netcdf
ENGINES = []
if has_scipy:
ENGINES.append('scipy')
if has_netCDF4:
ENGINES.append('netcdf4')
if has_h5netcdf:
ENGINES.append('h5netcdf')
@pytest.mark.parametrize('engine', ENGINES)
def test_dask_distributed_integration_test(loop, engine):
with cluster() as (s, _):
with distributed.Client(s['address'], loop=loop):
original = create_test_data()
with create_tmp_file(allow_cleanup_failure=ON_WINDOWS) as filename:
original.to_netcdf(filename, engine=engine)
with xr.open_dataset(filename, chunks=3, engine=engine) as restored:
assert isinstance(restored.var1.data, da.Array)
computed = restored.compute()
assert_allclose(original, computed)
@pytest.mark.skipif(distributed.__version__ <= '1.19.3',
reason='Need recent distributed version to clean up get')
@gen_cluster(client=True, timeout=None)
def test_async(c, s, a, b):
x = create_test_data()
assert not dask.is_dask_collection(x)
y = x.chunk({'dim2': 4}) + 10
assert dask.is_dask_collection(y)
assert dask.is_dask_collection(y.var1)
assert dask.is_dask_collection(y.var2)
z = y.persist()
assert str(z)
assert dask.is_dask_collection(z)
assert dask.is_dask_collection(z.var1)
assert dask.is_dask_collection(z.var2)
assert len(y.__dask_graph__()) > len(z.__dask_graph__())
assert not futures_of(y)
assert futures_of(z)
future = c.compute(z)
w = yield future
assert not dask.is_dask_collection(w)
assert_allclose(x + 10, w)
assert s.task_state
| apache-2.0 | Python |
6c3ff180c3bda17f1b6fdcf4500fdefdef9d713f | Fix copy & paste errors in setting I brought over from django-skel. | PrecisionMojo/pm-www,PrecisionMojo/pm-www | vsub/settings/production.py | vsub/settings/production.py | """Settings used in the production environment."""
from memcacheify import memcacheify
from postgresify import postgresify
from base import *
## Email configuration
# See: https://docs.djangoproject.com/en/dev/ref/settings/#email-backend
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#email-host
EMAIL_HOST = os.environ.get('EMAIL_HOST', 'smtp.gmail.com')
# See: https://docs.djangoproject.com/en/dev/ref/settings/#email-host-password
EMAIL_HOST_PASSWORD = os.environ.get('EMAIL_HOST_PASSWORD', '')
# See: https://docs.djangoproject.com/en/dev/ref/settings/#email-host-user
EMAIL_HOST_USER = os.environ.get('EMAIL_HOST_USER', '[email protected]')
# See: https://docs.djangoproject.com/en/dev/ref/settings/#email-port
EMAIL_PORT = os.environ.get('EMAIL_PORT', 587)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#email-subject-prefix
EMAIL_SUBJECT_PREFIX = '[%s] ' % SITE_NAME
# See: https://docs.djangoproject.com/en/dev/ref/settings/#email-use-tls
EMAIL_USE_TLS = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#server-email
SERVER_EMAIL = EMAIL_HOST_USER
## Database configuration
# See: https://docs.djangoproject.com/en/dev/ref/settings/#databases
DATABASES = postgresify()
## Cache configuration
# See: https://docs.djangoproject.com/en/dev/ref/settings/#caches
CACHES = memcacheify()
## Secret key configuration
# See: https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
# Use the value set in the Heroku configuration.
SECRET_KEY = os.environ.get('SECRET_KEY', SECRET_KEY)
| """Settings used in the production environment."""
from memcacheify import memcacheify
from postgresify import postgresify
from base import *
## Email configuration
# See: https://docs.djangoproject.com/en/dev/ref/settings/#email-backend
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
# See: https://docs.djangoproject.com/en/dev/ref/settings/#email-host
EMAIL_HOST = os.environ.get('EMAIL_HOST', 'smtp.gmail.com')
# See: https://docs.djangoproject.com/en/dev/ref/settings/#email-host-password
EMAIL_HOST_PASSWORD = environ.get('EMAIL_HOST_PASSWORD', '')
# See: https://docs.djangoproject.com/en/dev/ref/settings/#email-host-user
EMAIL_HOST_USER = environ.get('EMAIL_HOST_USER', '[email protected]')
# See: https://docs.djangoproject.com/en/dev/ref/settings/#email-port
EMAIL_PORT = environ.get('EMAIL_PORT', 587)
# See: https://docs.djangoproject.com/en/dev/ref/settings/#email-subject-prefix
EMAIL_SUBJECT_PREFIX = '[%s] ' % SITE_NAME
# See: https://docs.djangoproject.com/en/dev/ref/settings/#email-use-tls
EMAIL_USE_TLS = True
# See: https://docs.djangoproject.com/en/dev/ref/settings/#server-email
SERVER_EMAIL = EMAIL_HOST_USER
## Database configuration
# See: https://docs.djangoproject.com/en/dev/ref/settings/#databases
DATABASES = postgresify()
## Cache configuration
# See: https://docs.djangoproject.com/en/dev/ref/settings/#caches
CACHES = memcacheify()
## Secret key configuration
# See: https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
# Use the value set in the Heroku configuration.
SECRET_KEY = environ.get('SECRET_KEY', SECRET_KEY)
| mit | Python |
c3b0d4b05314dc9fd51c790a86d30659d09c5250 | Allow negative numbers in the GEOS string | Frojd/wagtail-geo-widget,Frojd/wagtail-geo-widget,Frojd/wagtail-geo-widget,Frojd/wagtail-geo-widget | wagtailgeowidget/helpers.py | wagtailgeowidget/helpers.py | import re
geos_ptrn = re.compile(
"^SRID=([0-9]{1,});POINT\((-?[0-9\.]{1,})\s(-?[0-9\.]{1,})\)$"
)
def geosgeometry_str_to_struct(value):
'''
Parses a geosgeometry string into struct.
Example:
SRID=5432;POINT(12.0 13.0)
Returns:
>> [5432, 12.0, 13.0]
'''
result = geos_ptrn.match(value)
if not result:
return None
return {
'srid': result.group(1),
'x': result.group(2),
'y': result.group(3),
}
| import re
geos_ptrn = re.compile(
"^SRID=([0-9]{1,});POINT\(([0-9\.]{1,})\s([0-9\.]{1,})\)$"
)
def geosgeometry_str_to_struct(value):
'''
Parses a geosgeometry string into struct.
Example:
SRID=5432;POINT(12.0 13.0)
Returns:
>> [5432, 12.0, 13.0]
'''
result = geos_ptrn.match(value)
if not result:
return None
return {
'srid': result.group(1),
'x': result.group(2),
'y': result.group(3),
}
| mit | Python |
7e45a26f86095ee2f6972e08697aa132e642636e | Test for types in __mul__ | bjodah/symengine.py,bjodah/symengine.py,symengine/symengine.py,bjodah/symengine.py,symengine/symengine.py,symengine/symengine.py | csympy/tests/test_arit.py | csympy/tests/test_arit.py | from nose.tools import raises
from csympy import Symbol, Integer
def test_arit1():
x = Symbol("x")
y = Symbol("y")
e = x + y
e = x * y
e = Integer(2)*x
e = 2*x
def test_arit2():
x = Symbol("x")
y = Symbol("y")
assert x+x == Integer(2) * x
assert x+x != Integer(3) * x
assert x+x == 2 * x
@raises(TypeError)
def test_arit3():
x = Symbol("x")
y = Symbol("y")
e = "x"*x
| from csympy import Symbol, Integer
def test_arit1():
x = Symbol("x")
y = Symbol("y")
e = x + y
e = x * y
e = Integer(2)*x
e = 2*x
def test_arit2():
x = Symbol("x")
y = Symbol("y")
assert x+x == Integer(2) * x
assert x+x != Integer(3) * x
assert x+x == 2 * x
| mit | Python |
852c62eef3d9beea43927f75b1a8aaa021ce25f9 | Add a docstring to test helper | VinnieJohns/ggrc-core,AleksNeStu/ggrc-core,edofic/ggrc-core,kr41/ggrc-core,josthkko/ggrc-core,kr41/ggrc-core,josthkko/ggrc-core,j0gurt/ggrc-core,jmakov/ggrc-core,edofic/ggrc-core,andrei-karalionak/ggrc-core,josthkko/ggrc-core,j0gurt/ggrc-core,plamut/ggrc-core,plamut/ggrc-core,selahssea/ggrc-core,NejcZupec/ggrc-core,josthkko/ggrc-core,jmakov/ggrc-core,jmakov/ggrc-core,andrei-karalionak/ggrc-core,kr41/ggrc-core,prasannav7/ggrc-core,plamut/ggrc-core,j0gurt/ggrc-core,jmakov/ggrc-core,VinnieJohns/ggrc-core,NejcZupec/ggrc-core,selahssea/ggrc-core,selahssea/ggrc-core,kr41/ggrc-core,prasannav7/ggrc-core,prasannav7/ggrc-core,j0gurt/ggrc-core,AleksNeStu/ggrc-core,edofic/ggrc-core,edofic/ggrc-core,prasannav7/ggrc-core,selahssea/ggrc-core,plamut/ggrc-core,NejcZupec/ggrc-core,andrei-karalionak/ggrc-core,NejcZupec/ggrc-core,VinnieJohns/ggrc-core,AleksNeStu/ggrc-core,AleksNeStu/ggrc-core,andrei-karalionak/ggrc-core,VinnieJohns/ggrc-core,jmakov/ggrc-core | test/selenium/src/lib/test_helpers.py | test/selenium/src/lib/test_helpers.py | # Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: [email protected]
# Maintained By: [email protected]
"""
Utility classes for page objects used in tests.
Details:
Most of the tests require a sequence of primitive methods of the page
object. If the sequence repeats itself among tests, it should be shared in
this module.
"""
import uuid
from lib import base
from lib.constants.test import modal_create_new
from lib.constants.test import modal_custom_attribute
class HtmlParser(base.Test):
"""The HtmlParser class simulates what happens with (non-rich)text in HTML.
"""
@staticmethod
def parse_text(text):
"""Simulates text parsed by html
Args:
text (str)
"""
return text.replace(" ", "").replace("\n", "")
class ModalNewProgramPage(base.Test):
"""Methods for simulating common user actions"""
@staticmethod
def enter_test_data(modal):
"""Fills out all fields in the lhn_modal
Args:
modal (lib.page.lhn_modal.new_program.EditProgramModal)
"""
unique_id = str(uuid.uuid4())
modal.enter_title(modal_create_new.Program.TITLE + unique_id)
modal.enter_description(
modal_create_new.Program.DESCRIPTION_SHORT + unique_id)
modal.enter_notes(
modal_create_new.Program.NOTES_SHORT + unique_id)
modal.enter_code(modal_create_new.Program.CODE + unique_id)
modal.filter_and_select_primary_contact("example")
modal.filter_and_select_secondary_contact("example")
modal.enter_program_url(
unique_id + modal_create_new.Program.PROGRAM_URL)
modal.enter_reference_url(
unique_id + modal_create_new.Program.REFERENCE_URL)
@staticmethod
def set_start_end_dates(modal, day_start, day_end):
"""
Sets the dates from the datepicker in the new program/edit modal.
Args:
modal (lib.page.lhn_modal.new_program.EditProgramModal)
day_start (int): for more info see
base.DatePicker.select_day_in_current_month
day_end (int): for more info see
base.DatePicker.select_day_in_current_month
"""
modal.enter_effective_date_start_month(day_start)
modal.enter_stop_date_end_month(day_end)
class ModalNewProgramCustomAttribute(base.Test):
@staticmethod
def enter_test_data(modal):
"""Fills out all fields in the lhn_modal
Args:
modal (lib.page.widget.custom_attribute.NewCustomAttributeModal)
"""
modal.enter_title(modal_custom_attribute.Program.TITLE)
modal.enter_inline_help(modal_custom_attribute.Program.INLINE_HELP)
modal.enter_placeholder(modal_custom_attribute.Program.PLACEHOLDER)
| # Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: [email protected]
# Maintained By: [email protected]
"""
Utility classes for page objects used in tests.
Details:
Most of the tests require a sequence of primitive methods of the page
object. If the sequence repeats itself among tests, it should be shared in
this module.
"""
import uuid
from lib import base
from lib.constants.test import modal_create_new
from lib.constants.test import modal_custom_attribute
class HtmlParser(base.Test):
@staticmethod
def parse_text(text):
"""Simulates text parsed by html
Args:
text (str)
"""
return text.replace(" ", "").replace("\n", "")
class ModalNewProgramPage(base.Test):
"""Methods for simulating common user actions"""
@staticmethod
def enter_test_data(modal):
"""Fills out all fields in the lhn_modal
Args:
modal (lib.page.lhn_modal.new_program.EditProgramModal)
"""
unique_id = str(uuid.uuid4())
modal.enter_title(modal_create_new.Program.TITLE + unique_id)
modal.enter_description(
modal_create_new.Program.DESCRIPTION_SHORT + unique_id)
modal.enter_notes(
modal_create_new.Program.NOTES_SHORT + unique_id)
modal.enter_code(modal_create_new.Program.CODE + unique_id)
modal.filter_and_select_primary_contact("example")
modal.filter_and_select_secondary_contact("example")
modal.enter_program_url(
unique_id + modal_create_new.Program.PROGRAM_URL)
modal.enter_reference_url(
unique_id + modal_create_new.Program.REFERENCE_URL)
@staticmethod
def set_start_end_dates(modal, day_start, day_end):
"""
Sets the dates from the datepicker in the new program/edit modal.
Args:
modal (lib.page.lhn_modal.new_program.EditProgramModal)
day_start (int): for more info see
base.DatePicker.select_day_in_current_month
day_end (int): for more info see
base.DatePicker.select_day_in_current_month
"""
modal.enter_effective_date_start_month(day_start)
modal.enter_stop_date_end_month(day_end)
class ModalNewProgramCustomAttribute(base.Test):
@staticmethod
def enter_test_data(modal):
"""Fills out all fields in the lhn_modal
Args:
modal (lib.page.widget.custom_attribute.NewCustomAttributeModal)
"""
modal.enter_title(modal_custom_attribute.Program.TITLE)
modal.enter_inline_help(modal_custom_attribute.Program.INLINE_HELP)
modal.enter_placeholder(modal_custom_attribute.Program.PLACEHOLDER)
| apache-2.0 | Python |
665ece3f699d6a62be0d9c859532ae73e250d86f | Update __init__.py | inkenbrandt/WellApplication | wellapplication/__init__.py | wellapplication/__init__.py | # -*- coding: utf-8 -*-
__version__ = '0.2.24'
__author__ = 'Paul Inkenbrandt'
__name__ = 'wellapplication'
from transport import *
from usgs import *
from chem import WQP
from mesopy import *
from graphs import piper, fdc, gantt
import MannKendall
import avgMeths
| # -*- coding: utf-8 -*-
__version__ = '0.2.24'
__author__ = 'Paul Inkenbrandt'
__name__ = 'wellapplication'
from transport import *
from usgs import *
from chem import WQP
from graphs import piper, fdc, gantt
import MannKendall
import avgMeths
| mit | Python |
f5e459e369f1a00d3407f7993e427b676047da21 | bump version | cberzan/django-anger | django_anger/__init__.py | django_anger/__init__.py | __version__ = "0.1.1-20130516"
| __version__ = "0.1-20130513.try3"
| mit | Python |
ed37e4fc5968c7ab17fa55aca3958e72c963ea78 | replace with right code | nvoron23/owasp-pysec,blabla1337/owasp-pysec,cardoso010/owasp-pysec,cardoso010/owasp-pysec,ebranca/owasp-pysec,nvoron23/owasp-pysec,ebranca/owasp-pysec,blabla1337/owasp-pysec,blabla1337/owasp-pysec,ebranca/owasp-pysec,cardoso010/owasp-pysec,nvoron23/owasp-pysec | demo/sort/sort.py | demo/sort/sort.py | #!/usr/bin/python2.7 -OOBRtt
import errno
import os
import operator
import sys
from pysec import alg
from pysec.io import fd
from pysec.utils import ilen, xrange
from pysec import tb
from pysec.xsplit import xbounds
# tb.set_excepthook(tb.short_tb)
BUFSIZE = 4096
MAX_MEMORY_SORT = 10240
TMP_DIR = os.path.abspath('./tmp')
try:
os.mkdir(TMP_DIR)
except OSError, ex:
if ex.errno != errno.EEXIST:
raise
def sort_in_memory(fp, start, end):
lines = [fp[start:end] for start, end in fp.xlines(start, end, keep_eol=1, size=BUFSIZE)]
lines.sort()
return lines
def _main():
path = os.path.abspath(sys.argv[1])
fno = 0
with fd.File.open(path, fd.FO_READEX) as txt:
# split and sort
prev_end = offset = 0
for lineno, (start, end) in enumerate(txt.xlines(keep_eol=1, size=BUFSIZE)):
if end - offset > MAX_MEMORY_SORT:
if end - prev_end > MAX_MEMORY_SORT:
print >> sys.stderr, "[ERROR]"
print >> sys.stderr, "Line %d bigger than MAX_MEMORY_SORT limit" % lineno
print >> sys.stderr, "Line's length: %d" % (end - prev_end)
print >> sys.stderr, "MAX_MEMORY_SORT limit: %d" % MAX_MEMORY_SORT
return 1
with fd.File.open(os.path.join(TMP_DIR, '%s.srt' % str(fno)), fd.FO_WRITE) as fout:
fout.truncate()
for line in sort_in_memory(txt, offset, prev_end):
fout.write(line)
fno += 1
offset = end
prev_end = end
else:
with fd.File.open(os.path.join(TMP_DIR, '%s.srt' % str(fno)), fd.FO_WRITE) as fout:
fout.truncate()
for line in sort_in_memory(txt, offset, prev_end):
fout.write(line)
fno += 1
splits = fno
# merge and sort
files = [fd.File.open(os.path.join(TMP_DIR, '%s.srt' % str(fno)), fd.FO_READ).lines()
for fno in xrange(0, splits)]
lines = [f.next() for f in files]
while files:
fno, line = min(enumerate(lines), key=operator.itemgetter(1))
print line
try:
lines[fno] = files[fno].next()
except StopIteration:
del lines[fno]
del files[fno]
for i in xrange(0, splits):
os.unlink(os.path.join(TMP_DIR, '%s.srt' % str(i)))
if __name__ == '__main__':
ret = _main()
os.rmdir(TMP_DIR)
| #!/usr/bin/python2.7 -OOBRtt
import os
import sys
from pysec import alg
from pysec.io import fd
from pysec.xsplit import xbounds
def find_ck(fp, sub, chunk):
buf = fp[:chunk]
offset = len(buf)
sub_len = len(sub)
while buf:
pos = alg.find(sub)
if pos >= 0:
yield pos
buf = buf[pos+1:]
else:
offset = offset - sub_len
buf = buf[offset:offset+chunk-sub_len]
if __name__ == '__main__':
path = os.path.abspath(sys.argv[1])
with fd.File.open(path, fd.FO_READEX) as txt:
for lineno, (start, end) in enumerate(xbounds(txt, sep='\n', keep_sep=1, find=lambda t, s: find_ck(t, s, 4096))):
print lineno
| apache-2.0 | Python |
34db4460aa67fc9abfaaaf2c48a6ea7c5b801ff0 | Fix for libtest for cpython 2.6 / jython / pypy | spaceone/pyjs,minghuascode/pyj,pombredanne/pyjs,lancezlin/pyjs,Hasimir/pyjs,gpitel/pyjs,pyjs/pyjs,anandology/pyjamas,gpitel/pyjs,pombredanne/pyjs,minghuascode/pyj,minghuascode/pyj,lancezlin/pyjs,pyjs/pyjs,pyjs/pyjs,pyjs/pyjs,minghuascode/pyj,spaceone/pyjs,spaceone/pyjs,anandology/pyjamas,Hasimir/pyjs,pombredanne/pyjs,pombredanne/pyjs,gpitel/pyjs,anandology/pyjamas,Hasimir/pyjs,spaceone/pyjs,Hasimir/pyjs,lancezlin/pyjs,lancezlin/pyjs,anandology/pyjamas,gpitel/pyjs | examples/libtest/imports/__init__.py | examples/libtest/imports/__init__.py |
exec_order = []
class Imports(object):
exec_order = exec_order
def __init__(self):
self.v = 1
imports = Imports()
overrideme = "not overridden"
from . import cls as loccls
# This is not valid since Python 2.6!
try:
from .imports import cls as upcls
except ImportError:
upcls = loccls
def conditional_func():
return "not overridden"
if True:
def conditional_func():
return "overridden"
|
exec_order = []
class Imports(object):
exec_order = exec_order
def __init__(self):
self.v = 1
imports = Imports()
overrideme = "not overridden"
from . import cls as loccls
from .imports import cls as upcls
def conditional_func():
return "not overridden"
if True:
def conditional_func():
return "overridden"
| apache-2.0 | Python |
e354eba380c6df2f0a14e324da9cbe6467494ddc | add - ModelIORedis & build up interface. | rfaulkner/easyML,rfaulkner/easyML,rfaulkner/easyML,rfaulkner/easyML | versus/tools/modelIO.py | versus/tools/modelIO.py | """
Class family for Model IO classes to handle read/write of learning models
"""
import redis
class ModelIO(object):
def __init__(self, **kwargs):
pass
def write(self, model):
raise NotImplementedError()
def validate(self, model):
""" Ensures that the model is valid. """
pass
def genkey(self, model):
""" Generates a key from the model. Presumes model is valid. """
return str(model)
def package(self, model):
""" Prepares the model for writing. """
return model
class ModelIORedis(ModelIO):
""" Performs IO to redis. """
def __init__(self, **kwargs):
super(ModelIORedis, self).__init__(**kwargs)
def write(self, model):
pool = redis.ConnectionPool(host='localhost', port=6379, db=0)
r = redis.Redis(connection_pool=pool)
r.set(self.genkey(model), self.package(model)) | """
Class family for Model IO classes to handle read/write of learning models
"""
class ModelIO(object):
def __init__(self):
pass
def write(self):
raise NotImplementedError() | bsd-3-clause | Python |
4b9da366b4169caf8802dcbbf20168512cc4e12e | Fix typo | mikalstill/ostrich,mikalstill/ostrich | ostrich/stages/stage_30_clone_osa.py | ostrich/stages/stage_30_clone_osa.py | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ostrich import steps
from ostrich import utils
def _ansible_debug(r):
if r.complete['ansible-debug'] == 'yes':
return 1
return 0
def get_steps(r):
"""Clone OSA."""
nextsteps = []
nextsteps.append(
steps.SimpleCommandStep(
'git-clone-osa',
('git clone %s/openstack/openstack-ansible '
'/opt/openstack-ansible'
% r.complete['git-mirror-openstack']),
**r.kwargs
)
)
nextsteps.append(
steps.KwargsStep(
'kwargs-osa',
r,
{
'cwd': '/opt/openstack-ansible',
'env': {
'ANSIBLE_ROLE_FETCH_MODE': 'git-clone',
'ANSIBLE_DEBUG': _ansible_debug(r),
'ANSIBLE_KEEP_REMOTE_FILES': '1'
}
},
**r.kwargs
)
)
if utils.is_ironic(r):
nextsteps.append(
steps.KwargsStep(
'kwargs-ironic',
r,
{
'env': {
'BOOTSTRAP_OPTS': 'nova_virt_type=ironic'
}
},
**r.kwargs
)
)
return nextsteps
| # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ostrich import steps
from ostrich import utils
def _ansible_debug(r):
if r.complete['ansbile-debug'] == 'yes':
return 1
return 0
def get_steps(r):
"""Clone OSA."""
nextsteps = []
nextsteps.append(
steps.SimpleCommandStep(
'git-clone-osa',
('git clone %s/openstack/openstack-ansible '
'/opt/openstack-ansible'
% r.complete['git-mirror-openstack']),
**r.kwargs
)
)
nextsteps.append(
steps.KwargsStep(
'kwargs-osa',
r,
{
'cwd': '/opt/openstack-ansible',
'env': {
'ANSIBLE_ROLE_FETCH_MODE': 'git-clone',
'ANSIBLE_DEBUG': _ansible_debug(r),
'ANSIBLE_KEEP_REMOTE_FILES': '1'
}
},
**r.kwargs
)
)
if utils.is_ironic(r):
nextsteps.append(
steps.KwargsStep(
'kwargs-ironic',
r,
{
'env': {
'BOOTSTRAP_OPTS': 'nova_virt_type=ironic'
}
},
**r.kwargs
)
)
return nextsteps
| apache-2.0 | Python |
62f137072aa26999ad30dda01fe2a736c3e00495 | exclude in admin | Samael500/django-typograf | django_typograf/admin.py | django_typograf/admin.py | from django.contrib import admin
from django_typograf.utils import get_typograf_field_name, get_typograf_hash_field_name
class TypografAdmin(admin.ModelAdmin):
""" Admin class for hide typograf fields from admin site """
def _exclude(self, obj=None):
""" Mark typograf fields as exclude """
exclude = ()
if obj:
exclude += tuple((get_typograf_field_name(field) for field in obj._meta.typografed_fields))
exclude += tuple((get_typograf_hash_field_name(field) for field in obj._meta.typografed_fields))
return exclude
def get_form(self, request, obj=None, **kwargs):
exclude = self.exclude or ()
exclude += self._exclude(obj)
kwargs.update(dict(exclude=exclude))
return super().get_form(request, obj, **kwargs)
| from django.contrib import admin
from django_typograf.utils import get_typograf_field_name, get_typograf_hash_field_name
class TypografAdmin(admin.ModelAdmin):
""" Admin class for hide typograf fields from admin site """
def _exclude(self, obj=None):
""" Mark typograf fields as exclude """
exclude = ()
if obj:
exclude += tuple((get_typograf_field_name(field) for field in obj._meta.typografed_fields))
exclude += tuple((get_typograf_hash_field_name(field) for field in obj._meta.typografed_fields))
return exclude
def get_form(self, request, obj=None, **kwargs):
self.exclude = self.exclude or ()
self.exclude += self._exclude(obj)
return super().get_form(request, obj, **kwargs)
| mit | Python |
1ba74fc225d71bc071827291d9942738ef56dd1f | Correct login view | jason2506/flask-skeleton,jason2506/flask-skeleton | apps/__init__.py | apps/__init__.py | # -*- coding: utf-8 -*-
from flask import Flask
from flask.ext.login import LoginManager
from .models import db, User
from .views import module
__all__ = ('create_app',)
def _init_db(app):
db.app = app
db.init_app(app)
def _init_jinja(app):
pass
def _init_login(app):
login_manager = LoginManager()
login_manager.init_app(app)
login_manager.user_loader(User.get)
login_manager.login_view = '/signin'
def create_app(name=None):
if name is None:
name = __name__
app = Flask(name)
app.config.from_object('config')
_init_db(app)
_init_jinja(app)
_init_login(app)
app.register_blueprint(module)
return app
| # -*- coding: utf-8 -*-
from flask import Flask
from flask.ext.login import LoginManager
from .models import db, User
from .views import module
__all__ = ('create_app',)
def _init_db(app):
db.app = app
db.init_app(app)
def _init_jinja(app):
pass
def _init_login(app):
login_manager = LoginManager()
login_manager.init_app(app)
login_manager.user_loader(User.get)
login_manager.login_view = '/login'
def create_app(name=None):
if name is None:
name = __name__
app = Flask(name)
app.config.from_object('config')
_init_db(app)
_init_jinja(app)
_init_login(app)
app.register_blueprint(module)
return app
| bsd-3-clause | Python |
586418860c0441eaebadd0fe79989d6d9f90fa28 | Fix for the component lookup error in vocabulary | espenmn/bda.plone.productshop,espenmn/bda.plone.productshop,espenmn/bda.plone.productshop | src/bda/plone/productshop/vocabularies.py | src/bda/plone/productshop/vocabularies.py | from zope.interface import directlyProvides
from zope.schema.interfaces import IVocabularyFactory
from zope.schema.vocabulary import (
SimpleVocabulary,
SimpleTerm,
)
from zope.i18nmessageid import MessageFactory
from .utils import (
dotted_name,
available_variant_aspects,
)
#added by espen
from zope.component import getUtility
from plone.dexterity.interfaces import IDexterityFTI
_ = MessageFactory('bda.plone.productshop')
def AvailableVariantAspectsVocabulary(context):
terms = list()
for definition in available_variant_aspects():
terms.append(SimpleTerm(value=dotted_name(definition.interface),
title=definition.title))
return SimpleVocabulary(terms)
directlyProvides(AvailableVariantAspectsVocabulary, IVocabularyFactory)
def RtfFieldsVocabulary(context):
try:
fields = getUtility(IDexterityFTI, name='bda.plone.productshop.product').lookupSchema()
except:
fields = ['Datasheet', ]
terms = [ SimpleTerm(value=pair, token=pair, title=pair) for pair in fields]
return SimpleVocabulary(terms)
directlyProvides(RtfFieldsVocabulary, IVocabularyFactory)
| from zope.interface import directlyProvides
from zope.schema.interfaces import IVocabularyFactory
from zope.schema.vocabulary import (
SimpleVocabulary,
SimpleTerm,
)
from zope.i18nmessageid import MessageFactory
from .utils import (
dotted_name,
available_variant_aspects,
)
#added by espen
from zope.component import getUtility
from plone.dexterity.interfaces import IDexterityFTI
from zope.component import ComponentLookupError
_ = MessageFactory('bda.plone.productshop')
def AvailableVariantAspectsVocabulary(context):
terms = list()
for definition in available_variant_aspects():
terms.append(SimpleTerm(value=dotted_name(definition.interface),
title=definition.title))
return SimpleVocabulary(terms)
directlyProvides(AvailableVariantAspectsVocabulary, IVocabularyFactory)
def RtfFieldsVocabulary(context):
try:
type = getUtility(IDexterityFTI, name='bda.plone.productshop.product')
fields = type.lookupSchema()
terms = [ SimpleTerm(value=pair, token=pair, title=pair) for pair in fields]
return SimpleVocabulary(terms)
except KeyError:
pass
finally:
pass
directlyProvides(RtfFieldsVocabulary, IVocabularyFactory)
| bsd-3-clause | Python |
3cade5788e55b124ce6c55350afb1beae4d3a5c3 | Update __init__.py | qpxu007/Flask-AppBuilder,zhounanshu/Flask-AppBuilder,dpgaspar/Flask-AppBuilder,rpiotti/Flask-AppBuilder,qpxu007/Flask-AppBuilder,zhounanshu/Flask-AppBuilder,rpiotti/Flask-AppBuilder,qpxu007/Flask-AppBuilder,dpgaspar/Flask-AppBuilder,zhounanshu/Flask-AppBuilder,zhounanshu/Flask-AppBuilder,qpxu007/Flask-AppBuilder,rpiotti/Flask-AppBuilder,dpgaspar/Flask-AppBuilder,dpgaspar/Flask-AppBuilder,rpiotti/Flask-AppBuilder | examples/quickhowto2/app/__init__.py | examples/quickhowto2/app/__init__.py | import logging
from flask import Flask
from flask.ext.appbuilder import SQLA, AppBuilder
#from sqlalchemy.engine import Engine
#from sqlalchemy import event
logging.basicConfig(format='%(asctime)s:%(levelname)s:%(name)s:%(message)s')
logging.getLogger().setLevel(logging.DEBUG)
app = Flask(__name__)
app.config.from_object('config')
db = SQLA(app)
appbuilder = AppBuilder(app, db.session, menu=Menu(reverse=False))
"""
Only include this for SQLLite constraints
@event.listens_for(Engine, "connect")
def set_sqlite_pragma(dbapi_connection, connection_record):
cursor = dbapi_connection.cursor()
cursor.execute("PRAGMA foreign_keys=ON")
cursor.close()
"""
from app import views
| import logging
from flask import Flask
from flask.ext.appbuilder import SQLA, AppBuilder
#from sqlalchemy.engine import Engine
#from sqlalchemy import event
logging.basicConfig(format='%(asctime)s:%(levelname)s:%(name)s:%(message)s')
logging.getLogger().setLevel(logging.DEBUG)
app = Flask(__name__)
app.config.from_object('config')
db = SQLA(app)
appbuilder = AppBuilder(app, db.session)
"""
Only include this for SQLLite constraints
@event.listens_for(Engine, "connect")
def set_sqlite_pragma(dbapi_connection, connection_record):
cursor = dbapi_connection.cursor()
cursor.execute("PRAGMA foreign_keys=ON")
cursor.close()
"""
from app import views
| bsd-3-clause | Python |
14d12b1d6bbcf5784256b82f58974f02fe8d1503 | Remove unused imports. Gating tests so they do not run on Windows | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | tests/integration/states/test_cron.py | tests/integration/states/test_cron.py | # -*- coding: utf-8 -*-
'''
Tests for the cron state
'''
# Import Python libs
from __future__ import absolute_import, print_function, unicode_literals
import logging
log = logging.getLogger(__name__)
# Import Salt Testing libs
from tests.support.case import ModuleCase
from tests.support.unit import skipIf
# Import Salt libs
import salt.utils.platform
@skipIf(salt.utils.platform.is_windows(), 'minion is windows')
class CronTest(ModuleCase):
'''
Validate the file state
'''
def setUp(self):
'''
Setup
'''
self.run_state('user.present', name='test_cron_user')
def tearDown(self):
'''
Teardown
'''
# Remove cron file
self.run_function('cmd.run',
cmd='crontab -u test_cron_user -r')
# Delete user
self.run_state('user.absent', name='test_cron_user')
def test_managed(self):
'''
file.managed
'''
ret = self.run_state(
'cron.file',
name='salt://issue-46881/cron',
user='test_cron_user'
)
_expected = '--- \n+++ \n@@ -1 +1,2 @@\n-\n+# Lines below here are managed by Salt, do not edit\n+@hourly touch /tmp/test-file\n'
self.assertIn('changes', ret['cron_|-salt://issue-46881/cron_|-salt://issue-46881/cron_|-file'])
self.assertIn('diff', ret['cron_|-salt://issue-46881/cron_|-salt://issue-46881/cron_|-file']['changes'])
self.assertEqual(_expected, ret['cron_|-salt://issue-46881/cron_|-salt://issue-46881/cron_|-file']['changes']['diff'])
| # -*- coding: utf-8 -*-
'''
Tests for the cron state
'''
# Import Python libs
from __future__ import absolute_import, print_function, unicode_literals
import logging
log = logging.getLogger(__name__)
# Import Salt Testing libs
from tests.support.case import ModuleCase
from tests.support.unit import skipIf
from tests.support.paths import FILES, TMP, TMP_STATE_TREE
from tests.support.helpers import (
skip_if_not_root,
with_system_user_and_group,
with_tempfile,
Webserver,
)
class CronTest(ModuleCase):
'''
Validate the file state
'''
def setUp(self):
'''
Setup
'''
self.run_state('user.present', name='test_cron_user')
def tearDown(self):
'''
Teardown
'''
# Remove cron file
self.run_function('cmd.run',
cmd='crontab -u test_cron_user -r')
# Delete user
self.run_state('user.absent', name='test_cron_user')
def test_managed(self):
'''
file.managed
'''
ret = self.run_state(
'cron.file',
name='salt://issue-46881/cron',
user='test_cron_user'
)
_expected = '--- \n+++ \n@@ -1 +1,2 @@\n-\n+# Lines below here are managed by Salt, do not edit\n+@hourly touch /tmp/test-file\n'
self.assertIn('changes', ret['cron_|-salt://issue-46881/cron_|-salt://issue-46881/cron_|-file'])
self.assertIn('diff', ret['cron_|-salt://issue-46881/cron_|-salt://issue-46881/cron_|-file']['changes'])
self.assertEqual(_expected, ret['cron_|-salt://issue-46881/cron_|-salt://issue-46881/cron_|-file']['changes']['diff'])
| apache-2.0 | Python |
6493cc31acdc09df6c0ad952e95380ac31a0e504 | Update __init__.py | hronoses/vispy,sh4wn/vispy,julienr/vispy,jay3sh/vispy,sh4wn/vispy,ghisvail/vispy,jay3sh/vispy,drufat/vispy,hronoses/vispy,bollu/vispy,Eric89GXL/vispy,inclement/vispy,RebeccaWPerry/vispy,drufat/vispy,ghisvail/vispy,dchilds7/Deysha-Star-Formation,kkuunnddaannkk/vispy,srinathv/vispy,sh4wn/vispy,RebeccaWPerry/vispy,srinathv/vispy,dchilds7/Deysha-Star-Formation,sbtlaarzc/vispy,QuLogic/vispy,kkuunnddaannkk/vispy,julienr/vispy,bollu/vispy,srinathv/vispy,inclement/vispy,hronoses/vispy,bollu/vispy,inclement/vispy,kkuunnddaannkk/vispy,RebeccaWPerry/vispy,julienr/vispy,jdreaver/vispy,michaelaye/vispy,Eric89GXL/vispy,michaelaye/vispy,ghisvail/vispy,QuLogic/vispy,sbtlaarzc/vispy,Eric89GXL/vispy,sbtlaarzc/vispy,jay3sh/vispy,dchilds7/Deysha-Star-Formation,drufat/vispy,michaelaye/vispy,jdreaver/vispy,QuLogic/vispy,jdreaver/vispy | vispy/color/__init__.py | vispy/color/__init__.py | # -*- coding: utf-8 -*-
# Copyright (c) 2014, Vispy Development Team.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
"""
Convience interfaces to manipulate colors.
This module provides support for manipulating colors.
"""
__all__ = ['Color', 'ColorArray', 'Colormap',
'get_colormap', 'get_colormaps',
'get_color_names', 'get_color_dict']
from ._color_dict import get_color_names, get_color_dict # noqa
from .color_array import Color, ColorArray
from .colormap import (Colormap, # noqa
get_colormap, get_colormaps) # noqa
| # -*- coding: utf-8 -*-
# Copyright (c) 2014, Vispy Development Team.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
"""
Convience interfaces to manipulate colors.
This module provides support for manipulating colors.
"""
<<<<<<< HEAD
__all__ = ['Color', 'ColorArray', 'Colormap',
'get_colormap', 'get_colormaps',
'get_color_names', 'get_color_dict']
from ._color_dict import get_color_names, get_color_dict # noqa
from .color_array import Color, ColorArray
from .colormap import (Colormap, # noqa
get_colormap, get_colormaps) # noqa
=======
__all__ = ['Color', 'ColorArray', 'LinearGradient', 'get_color_names',
'get_colormap_py']
from ._color_dict import get_color_names # noqa
from ._color import (Color, ColorArray, LinearGradient, # noqa
get_colormap, colormaps, get_colormap_py) # noqa
>>>>>>> new visuals/isocurve for tri mesh
| bsd-3-clause | Python |
a364196814c3b33e7fd51a42b4c3a48a3aaeaee8 | Update list of extraordinary gentlemen | RealDolos/volaparrot | volaparrot/constants.py | volaparrot/constants.py | ADMINFAG = ["RealDolos"]
PARROTFAG = "Parrot"
BLACKFAGS = [i.casefold() for i in (
"kalyx", "merc", "loliq", "annoying", "RootBeats", "JEW2FORU", "quag", "mire", "perici", "Voldemort", "briseis", "brisis", "GNUsuks", "rhooes", "n1sm4n", "honeyhole", "Printer", "yume1")]
OBAMAS = [i.casefold() for i in (
"counselor", "briseis", "apha", "bread", "ark3", "jizzbomb", "acid", "elkoalemos", "tarta", "counselor", "myon")]
BLACKROOMS = "e7u-CG", "jAzmc3", "f66jeG", "24_zFd", "BHfjGvT", "BHI0pxg",
WHITEROOMS = "BEEPi",
| ADMINFAG = ["RealDolos"]
PARROTFAG = "Parrot"
BLACKFAGS = [i.casefold() for i in (
"kalyx", "merc", "loliq", "annoying", "bot", "RootBeats", "JEW2FORU", "quag", "mire", "perici")]
OBAMAS = [i.casefold() for i in (
"counselor", "briseis", "apha", "bread", "ark3", "jizzbomb", "acid", "elkoalemos", "tarta")]
BLACKROOMS = "e7u-CG", "jAzmc3", "f66jeG", "24_zFd"
WHITEROOMS = "9pdLvy"
| mit | Python |
84965de8b53dcb5b10788808a2bf135df82cd4d9 | Update dev DB to sqlite3 | voer-platform/vp.repo,voer-platform/vp.repo,voer-platform/vp.repo,voer-platform/vp.repo | vpr/vpr/settings/dev.py | vpr/vpr/settings/dev.py | # Django settings for vpr project.
from base import *
DEBUG = True
DEVELOPMENT = True
TEMPLATE_DEBUG = DEBUG
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'vpr.sqlite3', # Or path to database file if using sqlite3.
#'ENGINE': 'django.db.backends.mysql', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'vpr_dev', # Or path to database file if using sqlite3.
'USER': 'vpr', # Not used with sqlite3.
'PASSWORD': 'vpr', # Not used with sqlite3.
'HOST': 'localhost', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '3306', # Set to empty string for default. Not used with sqlite3.
}
}
# Make this unique, and don't share it with anybody.
#SECRET_KEY = 'kw7#s$8t&6d9*7*$a$(gui0r1ze7f#u%(hua=^a3u66+vyj+9g'
ROOT_URLCONF = 'vpr.urls.dev'
INSTALLED_APPS += (
'django_extensions',
)
| # Django settings for vpr project.
from base import *
DEBUG = True
DEVELOPMENT = True
TEMPLATE_DEBUG = DEBUG
DATABASES = {
'default': {
#'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
#'NAME': 'vpr.sqlite3', # Or path to database file if using sqlite3.
'ENGINE': 'django.db.backends.mysql', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'vpr_dev', # Or path to database file if using sqlite3.
'USER': 'vpr', # Not used with sqlite3.
'PASSWORD': 'vpr', # Not used with sqlite3.
'HOST': 'localhost', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '3306', # Set to empty string for default. Not used with sqlite3.
}
}
# Make this unique, and don't share it with anybody.
#SECRET_KEY = 'kw7#s$8t&6d9*7*$a$(gui0r1ze7f#u%(hua=^a3u66+vyj+9g'
ROOT_URLCONF = 'vpr.urls.dev'
INSTALLED_APPS += (
'django_extensions',
)
| agpl-3.0 | Python |
52f9d95582456754aa4e9f9cd5d59a308c9e256c | remove comments | curtiswest/pepi,curtiswest/pepi,curtiswest/pepi,curtiswest/pepi | server.py | server.py | import socket
import communication
import time
import cv2
import numpy as np
from picamera import PiCamera
import sys
import signal
run_condition = True
def signal_handler(signal, frame):
print('Exiting...')
global run_condition
run_condition = False
sys.exit(0)
def generateRandomImg():
z = np.random.random((500, 500)) # Test data
print z.dtype
return z
def getCameraStill():
with PiCamera() as camera:
camera.resolution=(500,500)
camera.capture('temp.bmp')
data = np.asarray(cv2.imread('temp.bmp'), dtype='uint16')
return data
def getData():
z = getCameraStill()
return z
def waitForClient(sock):
connection, address = sock.accept()
print "sending ", communication.SERVER_READY
communication.send_msg(connection, communication.SERVER_READY)
msg = communication.recv_msg(connection)
print "received ", msg
return connection
camera_id = sys.argv[1].zfill(2)
signal.signal(signal.SIGINT, signal_handler)
print 'starting server ', camera_id
server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server_socket.bind(("", 10006))
server_socket.listen(5)
while(run_condition):
try:
connection = waitForClient(server_socket)
print "sending ", camera_id
communication.send_msg(connection, camera_id)
print "received ", communication.recv_msg(connection)
data = getData()
print "sending image data"
communication.send_img(connection, data)
print "closing connection"
connection.close()
except:
print "Server failure, resetting connection"
server_socket.close()
| import socket
import communication
import time
import cv2
import numpy as np
from picamera import PiCamera
import sys
import signal
run_condition = True
def signal_handler(signal, frame):
print('Exiting...')
global run_condition
run_condition = False
sys.exit(0)
def generateRandomImg():
z = np.random.random((500, 500)) # Test data
print z.dtype
return z
def getCameraStill():
with PiCamera() as camera:
camera.resolution= (500,500)
camera.capture('temp.bmp')
data = np.asarray(cv2.imread('temp.bmp'), dtype='uint16')
return data
def getData():
z = getCameraStill()
return z
def waitForClient(sock):
connection, address = sock.accept()
#send SERVER_READY
print "sending ", communication.SERVER_READY
communication.send_msg(connection, communication.SERVER_READY)
#receive CLIENT_READY
msg = communication.recv_msg(connection)
print "received ", msg
return connection
camera_id = sys.argv[1].zfill(2)
signal.signal(signal.SIGINT, signal_handler)
print 'starting server ', camera_id
server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server_socket.bind(("", 10006))
server_socket.listen(5)
while(run_condition):
try:
connection = waitForClient(server_socket)
#send camera id
print "sending ", camera_id
communication.send_msg(connection, camera_id)
#receive CAMERA_ID_ACK
print "received ", communication.recv_msg(connection)
data = getData()
print "sending image data"
communication.send_img(connection, data)
print "closing connection"
connection.close()
except:
print "Server failure, resetting connection"
server_socket.close()
| apache-2.0 | Python |
95495c1b0ae1a696b95eb486c7a40d54f507dacb | Add cross origin requests. | Imvoo/MiniZinc-server,Imvoo/MiniZinc-server,Imvoo/MiniZinc-server | server.py | server.py | import pymzn
import os
from subprocess import Popen, PIPE
from flask import Flask, json, Response
app = Flask(__name__)
folder = 'models' #where the .mzn files are stored
models = []
for file in os.listdir(folder):
if file.endswith('.mzn'):
models.append(file)
@app.route('/')
def Allmodels():
return json.jsonify(result=models)
#inputs models musn't 'output'
@app.route('/model/<string:model>.json')
def Model(model):
if (model+".mzn" in models):
def output_line():
with Popen(["minizinc", folder + '/' + model+".mzn", "-a"], stdout=PIPE, bufsize=1, universal_newlines=True) as p: #-a outputs all solutions
for line in p.stdout:
markup = ['----------','==========']
if line.rstrip() not in markup: #each new solution is a new JSON object
yield str(pymzn.parse_dzn(line)) #use pymzn to turn output into nice JSON objects
return Response(output_line(), mimetype='text/json')
else:
return json.jsonify(model="no model found")
# TODO: Unsure if this is safe security wise, have to look into it.
# aka. CORS request.
@app.after_request
def after_request(response):
response.headers.add('Access-Control-Allow-Origin', '*')
response.headers.add('Access-Control-Allow-Headers', 'Content-Type,Authorization')
response.headers.add('Access-Control-Allow-Methods', 'GET,PUT,POST,DELETE')
return response
| import pymzn
import os
from subprocess import Popen, PIPE
from flask import Flask, json, Response
app = Flask(__name__)
folder = 'models' #where the .mzn files are stored
models = []
for file in os.listdir(folder):
if file.endswith('.mzn'):
models.append(file)
@app.route('/')
def Allmodels():
return json.jsonify(result=models)
#inputs models musn't 'output'
@app.route('/model/<string:model>.json')
def Model(model):
if (model+".mzn" in models):
def output_line():
with Popen(["MiniZinc", folder + '/' + model+".mzn", "-a"], stdout=PIPE, bufsize=1, universal_newlines=True) as p: #-a outputs all solutions
for line in p.stdout:
markup = ['----------','==========']
if line.rstrip() not in markup: #each new solution is a new JSON object
yield str(pymzn.parse_dzn(line)) #use pymzn to turn output into nice JSON objects
return Response(output_line(), mimetype='text/json')
else:
return json.jsonify(model="no model found") | mit | Python |
eb889d08c6031ba6c2b55a883d09e22f5684f0ee | Add mode (training|arena) option to the command line | miguel89/vinidium | server.py | server.py | #!/usr/bin/env python
import os
import sys
import requests
import re
from bot import RandomBot
def get_new_game_state(server_host, key, mode='training', number_of_turns = '20'):
if(mode=='training'):
params = { 'key': key, 'turns': number_of_turns}
r = requests.post(server_host + '/api/training', params)
if(r.status_code == 200):
return r.json()
else:
print("Error when creating the game")
print(r.text)
else:
pass
def move(url, direction):
r = requests.post(url, {'dir': direction})
return r.json()
def start(server_host, key, mode, bot, number_of_games = 20):
def play(state, games_played = 0):
if (state['game']['finished']):
games_played += 1
print('Game finished: %d/%d' % (games_played, number_of_games))
if(games_played < number_of_games):
print('asking a new game')
state = get_new_game_state(server_host, key)
play(state, games_played)
else:
url = state['playUrl']
direction = bot.move(state)
new_state = move(url, direction)
print("Playing turn %d with direction %s" % (state['game']['turn'], direction))
play(new_state, games_played)
state = get_new_game_state(server_host, key, mode)
print("Start: " + state['viewUrl'])
play(state)
if __name__ == "__main__":
if (len(sys.argv) > 4):
start(sys.argv[1], sys.argv[2], sys.argv[3], RandomBot(), int(sys.argv[4]))
else:
print("Usage: %s <server> <key> <[training|arena]> <number-of-games-to-play>" % (sys.argv[0]))
print('Example: %s http://localhost:9000 mySecretKey training 20' % (sys.argv[0]))
| #!/usr/bin/env python
import os
import sys
import requests
import re
from bot import RandomBot
def get_new_game_state(server_host, key, number_of_turns = '20', mode='training'):
if(mode=='training'):
params = { 'key': key, 'turns': number_of_turns}
r = requests.post(server_host + '/api/training', params)
if(r.status_code == 200):
return r.json()
else:
print("Error when creating the game")
print(r.text)
else:
pass
def move(url, direction):
r = requests.post(url, {'dir': direction})
return r.json()
def start(server_host, key, bot, number_of_games = 20):
def play(state, games_played = 0):
if (state['game']['finished']):
games_played += 1
print('Game finished: %d/%d' % (games_played, number_of_games))
if(games_played < number_of_games):
print('asking a new game')
state = get_new_game_state(server_host, key)
play(state, games_played)
else:
url = state['playUrl']
direction = bot.move(state)
new_state = move(url, direction)
print("Playing turn %d with direction %s" % (state['game']['turn'], direction))
play(new_state, games_played)
state = get_new_game_state(server_host, key)
print("Start: " + state['viewUrl'])
play(state)
if __name__ == "__main__":
if (len(sys.argv) > 3):
start(sys.argv[1], sys.argv[2], RandomBot(), int(sys.argv[3]))
else:
print("Usage: %s <server> <key> <number-of-games-to-play>" % (sys.argv[0]))
print('Example: %s http://localhost:9000 mySecretKey 20' % (sys.argv[0]))
| apache-2.0 | Python |
7542d224d2ab15adec31a2d896a22cab6a7a2b37 | add log | yiplee/ltc-huobi,yiplee/ltc-huobi,yiplee/ltc-huobi | spider.py | spider.py | # -*- coding: utf-8 -*-
import http.client
from ltc.models import Record
import json
def get_current_price():
conn = http.client.HTTPSConnection('api.huobi.com')
conn.request('GET','/staticmarket/ticker_ltc_json.js')
r = conn.getResponse()
if r.status == 200 :
data = r.read()
string = data.decode('utf8').replace("'", '"')
json_data = json.loads(string)
json = json.dumps(json_data,indent=4, sort_keys=True)
print(json)
price = json['ticker']['last']
date = json['time']
recod = Record.create(price,date)
record.save()
if __name__ == '__main__':
get_current_price()
| # -*- coding: utf-8 -*-
import http.client
from ltc.models import Record
import json
def get_current_price():
conn = http.client.HTTPSConnection('api.huobi.com')
conn.request('GET','/staticmarket/ticker_ltc_json.js')
r = conn.getResponse()
if r.status == 200 :
data = r.read()
string = data.decode('utf8').replace("'", '"')
json_data = json.loads(string)
json = json.dumps(json_data,indent=4, sort_keys=True)
price = json['ticker']['last']
date = json['time']
recod = Record.create(price,date)
record.save()
if __name__ == '__main__':
get_current_price()
| mit | Python |
195e5cde1d81d7f73b77ce1b1c52f0beea8a2595 | Fix tclist iterator slicing | uranusjr/ctypes-ejdb | ejdb/tc.py | ejdb/tc.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import numbers
from . import c
from .utils import CObjectWrapper
class ListIterator(CObjectWrapper):
"""Python iterator wrapper for a `TCLIST *`.
"""
def __init__(self, wrapped, count=None):
super(ListIterator, self).__init__(
wrapped=wrapped, finalizer=c.tc.listdel,
)
if count is None:
count = c.tc.listnum(wrapped)
self._len = count
self._i = 0
def __iter__(self): # pragma: no cover
return self
def __len__(self):
return self._len
def __getitem__(self, key):
if isinstance(key, slice):
return [self[i] for i in range(*key.indices(len(self)))]
elif isinstance(key, numbers.Number):
if key >= len(self):
raise IndexError('Iterator index out of range.')
value_p = c.tc.listval2(self._wrapped, key)
return self.instantiate(value_p)
return NotImplemented
def __next__(self):
if self._i >= self._len:
raise StopIteration
value_p = c.tc.listval2(self._wrapped, self._i)
self._i += 1
return self.instantiate(value_p)
def next(self): # pragma: no cover
"""Python 2 compatibility.
"""
return self.__next__()
def instantiate(self, value_p):
"""Subclasses should override this method to instantiate an item during
iteration.
:param value_p: Points to the current TCList iterator value of type
`c_void_p`.
"""
raise NotImplementedError
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import numbers
from . import c
from .utils import CObjectWrapper
class ListIterator(CObjectWrapper):
"""Python iterator wrapper for a `TCLIST *`.
"""
def __init__(self, wrapped, count=None):
super(ListIterator, self).__init__(
wrapped=wrapped, finalizer=c.tc.listdel,
)
if count is None:
count = c.tc.listnum(wrapped)
self._len = count
self._i = 0
def __iter__(self): # pragma: no cover
return self
def __len__(self):
return self._len
def __getitem__(self, key):
if isinstance(key, slice):
return [self[i] for i in range(key.indices(len(self)))]
elif isinstance(key, numbers.Number):
if key >= len(self):
raise IndexError('Iterator index out of range.')
value_p = c.tc.listval2(self._wrapped, key)
return self.instantiate(value_p)
return NotImplemented
def __next__(self):
if self._i >= self._len:
raise StopIteration
value_p = c.tc.listval2(self._wrapped, self._i)
self._i += 1
return self.instantiate(value_p)
def next(self): # pragma: no cover
"""Python 2 compatibility.
"""
return self.__next__()
def instantiate(self, value_p):
"""Subclasses should override this method to instantiate an item during
iteration.
:param value_p: Points to the current TCList iterator value of type
`c_void_p`.
"""
raise NotImplementedError
| bsd-2-clause | Python |
93ed74d3c904c743f91f0c9e81f6d36bda731a10 | modify example.py | minoku/FixQueue | example.py | example.py | #! /usr/bin/python
import FixQueue
if __name__ == "__main__" :
fq = FixQueue(2) # 2 means queue size
fq.append('a')
fq.append('b')
print (fq) # ['a', 'b']
fq.append('c')
print (fq) # ['b', 'c']
print (fq.pop()) # b
print (fq) # ['b']
| #! /usr/bin/python
from FixQueue import FixQueue
if __name__ == "__main__" :
fq = FixQueue(2) # 2 means queue size
fq.append('a')
fq.append('b')
print (fq) # ['a', 'b']
fq.append('c')
print (fq) # ['b', 'c']
print (fq.pop()) # b
print (fq) # ['b']
| mit | Python |
80adb43a22f82a036d72bdd398d45ee4daa9a5ff | Update example.py | sskender/pornhub-api | example.py | example.py | import pornhub
search_keywords = []
#client = pornhub.PornHub("5.135.164.72", 3128, search_keywords)
#With proxy, given a Proxy IP and Port. For the countries with restricted access like Turkey, etc.
client = pornhub.PornHub(search_keywords)
for star in client.getStars(10):
print(star)
print(star["name"])
for video in client.getVideos(10,page=2):
print(video)
for photo_url in client.getPhotos(5):
print(photo_url)
video = client.getVideo("SOME VIDEO URL")
print(video)
print(video['accurate_views'])
| import pornhub
search_keywords = []
#client = pornhub.PornHub("5.135.164.72", 3128, search_keywords)
#With proxy, given a Proxy IP and Port. For the countries with restricted access like Turkey, etc.
client = pornhub.PornHub(search_keywords)
for star in client.getStars(10):
print(star)
print(star["name"])
for video in client.getVideos(10,page=2):
print(video)
for photo_url in client.getPhotos(5):
print(photo_url) | mit | Python |
4053baa698f1047f09540c9ff3d6e8a08d844335 | Comment example | hickford/MechanicalSoup,hemberger/MechanicalSoup,MechanicalSoup/MechanicalSoup | example.py | example.py | """Example app to login to GitHub"""
import argparse
import mechanicalsoup
parser = argparse.ArgumentParser(description='Login to GitHub.')
parser.add_argument("username")
parser.add_argument("password")
args = parser.parse_args()
browser = mechanicalsoup.Browser()
# request github login page
login_page = browser.get("https://github.com/login")
# find login form
login_form = login_page.soup.select("#login")[0].select("form")[0]
# specify username and password
login_form.select("#login_field")[0]['value'] = args.username
login_form.select("#password")[0]['value'] = args.password
# submit!
page2 = browser.submit(login_page.response, login_form)
# verify we are now logged in
assert page2.soup.select(".logout-form")
# verify we remain logged in (thanks to cookies) as we browse the rest of the site
page3 = browser.get("https://github.com/matt-hickford/MechanicalSoup")
assert page3.soup.select(".logout-form")
| """Example app to login to GitHub"""
import argparse
import mechanicalsoup
parser = argparse.ArgumentParser(description='Login to GitHub.')
parser.add_argument("username")
parser.add_argument("password")
args = parser.parse_args()
browser = mechanicalsoup.Browser()
# request github login page
login_page = browser.get("https://github.com/login")
# find login form
login_form = login_page.soup.select("#login")[0].select("form")[0]
# specify username and password
login_form.select("#login_field")[0]['value'] = args.username
login_form.select("#password")[0]['value'] = args.password
page2 = browser.submit(login_page.response, login_form)
assert page2.soup.select(".logout-form") | mit | Python |
72b3642953d0e14d4b4c9ec03560a96d259f7d16 | Remove monkey patching in favor of inheritance for SpatialReference | bkg/greenwich | contones/srs.py | contones/srs.py | """Spatial reference systems"""
from osgeo import osr
class BaseSpatialReference(osr.SpatialReference):
"""Base class for extending osr.SpatialReference."""
def __repr__(self):
return self.wkt
@property
def srid(self):
"""Returns the EPSG ID as int if it exists."""
epsg_id = (self.GetAuthorityCode('PROJCS') or
self.GetAuthorityCode('GEOGCS'))
try:
return int(epsg_id)
except TypeError:
return
@property
def wkt(self):
"""Returns this projection in WKT format."""
return self.ExportToWkt()
@property
def proj4(self):
"""Returns this projection as a proj4 string."""
return self.ExportToProj4()
class SpatialReference(object):
"""A spatial reference."""
def __new__(cls, sref):
"""Returns a new BaseSpatialReference instance
This allows for customized construction of osr.SpatialReference which
has no init method which precludes the use of super().
"""
sr = BaseSpatialReference()
if isinstance(sref, int):
sr.ImportFromEPSG(sref)
elif isinstance(sref, str):
if sref.strip().startswith('+proj='):
sr.ImportFromProj4(sref)
else:
sr.ImportFromWkt(sref)
# Add EPSG authority if applicable
sr.AutoIdentifyEPSG()
else:
raise TypeError('Cannot create SpatialReference '
'from {}'.format(str(sref)))
return sr
| """Spatial reference systems"""
from osgeo import osr
# Monkey patch SpatialReference since inheriting from SWIG classes is a hack
def srid(self):
"""Returns the EPSG ID as int if it exists."""
epsg_id = (self.GetAuthorityCode('PROJCS') or
self.GetAuthorityCode('GEOGCS'))
try:
return int(epsg_id)
except TypeError:
return
osr.SpatialReference.srid = property(srid)
def wkt(self):
"""Returns this projection in WKT format."""
return self.ExportToWkt()
osr.SpatialReference.wkt = property(wkt)
def proj4(self):
"""Returns this projection as a proj4 string."""
return self.ExportToProj4()
osr.SpatialReference.proj4 = property(proj4)
def __repr__(self): return self.wkt
osr.SpatialReference.__repr__ = __repr__
class SpatialReference(object):
def __new__(cls, sref):
sr = osr.SpatialReference()
if isinstance(sref, int):
sr.ImportFromEPSG(sref)
elif isinstance(sref, str):
if sref.strip().startswith('+proj='):
sr.ImportFromProj4(sref)
else:
sr.ImportFromWkt(sref)
# Add EPSG authority if applicable
sr.AutoIdentifyEPSG()
else:
raise TypeError('Cannot create SpatialReference '
'from {}'.format(str(sref)))
return sr
| bsd-3-clause | Python |
aef91895b31a615bb9af4cfae96aae97d91cad91 | Add Intercept transformer. | madrury/basis-expansions | dftransformers.py | dftransformers.py | import numpy as np
import pandas as pd
from sklearn.base import BaseEstimator, TransformerMixin
class ColumnSelector(BaseEstimator, TransformerMixin):
"""Transformer that selects a column in a numpy array or DataFrame
by index or name.
"""
def __init__(self, idxs=None, name=None):
self.idxs = np.asarray(idxs)
self.idxs = idxs
self.name = name
def fit(self, *args, **kwargs):
return self
def transform(self, X, **transform_params):
# Need to teat pandas data frames and numpy arrays slightly differently.
if isinstance(X, pd.DataFrame) and self.idxs:
return X.iloc[:, self.idxs]
if isinstance(X, pd.DataFrame) and self.name:
return X[self.name]
return X[:, self.idxs]
class FeatureUnion(TransformerMixin):
def __init__(self, transformer_list):
self.transformer_list = transformer_list
def fit(self, X, y=None):
for _, t in self.transformer_list:
t.fit(X, y)
def transform(self, X, *args, **kwargs):
Xs = [t.transform(X) for _, t in self.transformer_list]
if isinstance(X, pd.DataFrame):
return pd.concat(Xs, axis=1)
return np.hstack(Xs)
class Intercept(TransformerMixin):
def fit(self, *args, **kwargs):
return self
def transform(self, X, *args, **kwargs):
if isinstance(X, pd.DataFrame) or isinstance(X, pd.Series):
return pd.Series(np.ones(X.shape[0]),
index=X.index, name="intercept")
return np.ones(X.shape[0])
| import numpy as np
import pandas as pd
from sklearn.base import BaseEstimator, TransformerMixin
class ColumnSelector(BaseEstimator, TransformerMixin):
"""Transformer that selects a column in a numpy array or DataFrame
by index or name.
"""
def __init__(self, idxs=None, name=None):
self.idxs = np.asarray(idxs)
self.idxs = idxs
self.name = name
def fit(self, *args, **kwargs):
return self
def transform(self, X, **transform_params):
# Need to teat pandas data frames and numpy arrays slightly differently.
if isinstance(X, pd.DataFrame) and self.idxs:
return X.iloc[:, self.idxs]
if isinstance(X, pd.DataFrame) and self.name:
return X[self.name]
return X[:, self.idxs]
class FeatureUnion(TransformerMixin):
def __init__(self, transformer_list):
self.transformer_list = transformer_list
def fit(self, X, y=None):
for _, t in self.transformer_list:
t.fit(X, y)
def transform(self, X, *args, **kwargs):
Xs = [t.transform(X) for _, t in self.transformer_list]
if isinstance(X, pd.DataFrame):
return pd.concat(Xs, axis=1)
return np.hstack(Xs)
| bsd-3-clause | Python |
4a67891a1b8a96fcc666e12b2d4c27e3598d20a2 | add support email | it-projects-llc/website-addons,it-projects-llc/website-addons,it-projects-llc/website-addons | product_price_factor_online/__openerp__.py | product_price_factor_online/__openerp__.py | # -*- coding: utf-8 -*-
{
'name': "Product price factor for web shop",
'summary': """Multiplies price depending on product attributes""",
'category': 'Website',
'license': 'GPL-3',
'author': "IT-Projects LLC, Ildar Nasyrov",
'price': 20.00,
'currency': 'EUR',
'images': ['images/1.png'],
"support": "[email protected]",
'website': "https://twitter.com/nasyrov_ildar",
'depends': ['website_sale', 'product_price_factor'],
'data': [
'views/templates.xml',
],
'auto_install': True,
}
| # -*- coding: utf-8 -*-
{
'name': "Product price factor for web shop",
'summary': """Multiplies price depending on product attributes""",
'category': 'Website',
'license': 'GPL-3',
'author': "IT-Projects LLC, Ildar Nasyrov",
'price': 20.00,
'currency': 'EUR',
'images': ['images/1.png'],
'website': "https://twitter.com/nasyrov_ildar",
'depends': ['website_sale', 'product_price_factor'],
'data': [
'views/templates.xml',
],
'auto_install': True,
}
| mit | Python |
6853fd1e45370a9db650b0983e9835c14ea9209e | Add diagnostics endpoint | c-w/gutenberg-http,c-w/gutenberg-http | gutenberg_http/views.py | gutenberg_http/views.py | from sanic.exceptions import RequestTimeout
from sanic.request import Request
from sanic.response import json
from gutenberg_http import app
from gutenberg_http.errors import InvalidUsage
from gutenberg_http.logic import body as _body
from gutenberg_http.logic import metadata as _metadata
from gutenberg_http.logic import search as _search
@app.route('/texts/<text_id:int>')
def metadata(request: Request, text_id: int):
include = _metadata(text_id, request.args.get('include'))
return json({'text_id': text_id, 'metadata': include})
# noinspection PyUnusedLocal
@app.route('/texts/<text_id:int>/body')
def body(request: Request, text_id: int):
fulltext = _body(text_id)
return json({'text_id': text_id, 'body': fulltext})
# noinspection PyUnusedLocal
@app.route('/search/<query>')
def search(request: Request, query: str):
results = _search(query, request.args.get('include'))
return json({'texts': results})
# noinspection PyUnusedLocal
@app.exception(InvalidUsage)
def bad_request(request: Request, exception: InvalidUsage):
error = {'error': 'invalid_usage', 'message': exception.message}
return json(error, exception.status_code)
# noinspection PyUnusedLocal
@app.exception(RequestTimeout)
def timeout(request: Request, exception: RequestTimeout):
error = {'error': 'timeout', 'message': 'The request timed out.'}
return json(error, exception.status_code)
# noinspection PyUnusedLocal
@app.exception(Exception)
def on_exception(request: Request, exception: Exception):
error = {'error': exception.__class__.__name__, 'message': str(exception)}
return json(error, getattr(exception, 'status_code', 500))
# noinspection PyUnusedLocal,PyProtectedMember
@app.route('/healthcheck')
def healthcheck(request: Request):
return json({
'caches': {
'metadata': _metadata.cache_info()._asdict(),
'body': _body.cache_info()._asdict(),
'search': _search.cache_info()._asdict(),
}
})
| from sanic.exceptions import RequestTimeout
from sanic.request import Request
from sanic.response import json
from gutenberg_http import app
from gutenberg_http.errors import InvalidUsage
from gutenberg_http.logic import body as _body
from gutenberg_http.logic import metadata as _metadata
from gutenberg_http.logic import search as _search
@app.route('/texts/<text_id:int>')
def metadata(request: Request, text_id: int):
include = _metadata(text_id, request.args.get('include'))
return json({'text_id': text_id, 'metadata': include})
# noinspection PyUnusedLocal
@app.route('/texts/<text_id:int>/body')
def body(request: Request, text_id: int):
fulltext = _body(text_id)
return json({'text_id': text_id, 'body': fulltext})
# noinspection PyUnusedLocal
@app.route('/search/<query>')
def search(request: Request, query: str):
results = _search(query, request.args.get('include'))
return json({'texts': results})
# noinspection PyUnusedLocal
@app.exception(InvalidUsage)
def bad_request(request: Request, exception: InvalidUsage):
error = {'error': 'invalid_usage', 'message': exception.message}
return json(error, exception.status_code)
# noinspection PyUnusedLocal
@app.exception(RequestTimeout)
def timeout(request: Request, exception: RequestTimeout):
error = {'error': 'timeout', 'message': 'The request timed out.'}
return json(error, exception.status_code)
# noinspection PyUnusedLocal
@app.exception(Exception)
def on_exception(request: Request, exception: Exception):
error = {'error': exception.__class__.__name__, 'message': str(exception)}
return json(error, getattr(exception, 'status_code', 500))
| apache-2.0 | Python |
39f0e8ed68bea6318bcb58801b5368428582f549 | remove SF checks from user api endpoint | openstax/openstax-cms,Connexions/openstax-cms,openstax/openstax-cms,openstax/openstax-cms,openstax/openstax-cms,Connexions/openstax-cms | api/views.py | api/views.py | from django.core.management import call_command
from django.utils.six import StringIO
from rest_framework import viewsets
from salesforce.models import Adopter
from salesforce.functions import check_if_faculty_pending
from social.apps.django_app.default.models import \
DjangoStorage as SocialAuthStorage
from wagtail.wagtailimages.models import Image
from .serializers import AdopterSerializer, ImageSerializer, UserSerializer
class AdopterViewSet(viewsets.ModelViewSet):
queryset = Adopter.objects.all()
serializer_class = AdopterSerializer
class ImageViewSet(viewsets.ModelViewSet):
queryset = Image.objects.all()
serializer_class = ImageSerializer
class UserView(viewsets.ModelViewSet):
serializer_class = UserSerializer
def get_queryset(self):
user = self.request.user
return [user]
| from django.core.management import call_command
from django.utils.six import StringIO
from rest_framework import viewsets
from salesforce.models import Adopter
from salesforce.functions import check_if_faculty_pending
from social.apps.django_app.default.models import \
DjangoStorage as SocialAuthStorage
from wagtail.wagtailimages.models import Image
from .serializers import AdopterSerializer, ImageSerializer, UserSerializer
class AdopterViewSet(viewsets.ModelViewSet):
queryset = Adopter.objects.all()
serializer_class = AdopterSerializer
class ImageViewSet(viewsets.ModelViewSet):
queryset = Image.objects.all()
serializer_class = ImageSerializer
class UserView(viewsets.ModelViewSet):
serializer_class = UserSerializer
def get_queryset(self):
user = self.request.user
try:
social_auth = SocialAuthStorage.user.get_social_auth_for_user(user)
user.accounts_id = social_auth[0].uid
except:
user.accounts_id = None
try:
out = StringIO()
call_command('update_faculty_status', str(user.pk), stdout=out)
except:
pass
# check if there is a record in salesforce for this user - if so, they are pending verification
user.pending_verification = check_if_faculty_pending(user.pk)
return [user]
| agpl-3.0 | Python |
d994337007eb9cfe41edef591cbd30765660a822 | Prepare for next development iteration | toidi/hadoop-yarn-api-python-client | yarn_api_client/__init__.py | yarn_api_client/__init__.py | # -*- coding: utf-8 -*-
__version__ = '0.3.8.dev'
__all__ = ['ApplicationMaster', 'HistoryServer', 'NodeManager', 'ResourceManager']
from .application_master import ApplicationMaster
from .history_server import HistoryServer
from .node_manager import NodeManager
from .resource_manager import ResourceManager
| # -*- coding: utf-8 -*-
__version__ = '0.3.7'
__all__ = ['ApplicationMaster', 'HistoryServer', 'NodeManager', 'ResourceManager']
from .application_master import ApplicationMaster
from .history_server import HistoryServer
from .node_manager import NodeManager
from .resource_manager import ResourceManager
| bsd-3-clause | Python |
e3ff630917d6bb0f744330abace7c41d2b48ba8f | Add "pip_install" task | Turbo87/skylines,Harry-R/skylines,skylines-project/skylines,Turbo87/skylines,RBE-Avionik/skylines,Turbo87/skylines,Turbo87/skylines,kerel-fs/skylines,skylines-project/skylines,shadowoneau/skylines,kerel-fs/skylines,Harry-R/skylines,skylines-project/skylines,kerel-fs/skylines,shadowoneau/skylines,shadowoneau/skylines,Harry-R/skylines,RBE-Avionik/skylines,Harry-R/skylines,shadowoneau/skylines,skylines-project/skylines,RBE-Avionik/skylines,RBE-Avionik/skylines | fabfile.py | fabfile.py | from fabric.api import env, task, local, cd, run, sudo, put
from tempfile import NamedTemporaryFile
env.use_ssh_config = True
env.hosts = ['skylines@skylines']
APP_DIR = '/home/skylines'
SRC_DIR = '%s/src' % APP_DIR
@task
def deploy(branch='master', force=False):
push(branch, force)
restart()
@task
def push(branch='master', force=False):
cmd = 'git push %s:%s %s:master' % (env.host_string, SRC_DIR, branch)
if force:
cmd += ' --force'
local(cmd)
@task
def restart():
with cd(SRC_DIR):
run('git reset --hard')
# compile i18n .mo files
manage('babel compile')
# generate JS/CSS assets
manage('assets build')
# do database migrations
manage('migrate upgrade')
# restart services
restart_service('skylines-api')
restart_service('skylines')
restart_service('mapserver')
restart_service('tracking')
restart_service('celery')
restart_service('mapproxy')
@task
def restart_service(service):
# Using the sudo() command somehow always provokes a password prompt,
# even if NOPASSWD is specified in the sudoers file...
run('sudo supervisorctl restart %s' % service)
@task
def manage(cmd, user=None):
with cd(SRC_DIR):
if user:
sudo('./manage.py %s' % cmd, user=user)
else:
run('./manage.py %s' % cmd)
@task
def update_mapproxy():
with NamedTemporaryFile() as f:
content = open('mapserver/mapproxy/mapproxy.yaml').read()
content = content.replace(
'base_dir: \'/tmp/cache_data\'',
'base_dir: \'%s/cache/mapproxy\'' % APP_DIR,
)
content = content.replace(
'lock_dir: \'/tmp/cache_data/tile_locks\'',
'lock_dir: \'%s/cache/mapproxy/tile_locks\'' % APP_DIR,
)
f.write(content)
f.flush()
put(f.name, '%s/config/mapproxy.yaml' % APP_DIR)
@task
def pip_install():
with cd(SRC_DIR):
run('git reset --hard')
run('pip install -e .')
@task
def clean_mapproxy_cache():
with cd('/home/skylines/cache/mapproxy'):
run('rm -rv *')
| from fabric.api import env, task, local, cd, run, sudo, put
from tempfile import NamedTemporaryFile
env.use_ssh_config = True
env.hosts = ['skylines@skylines']
APP_DIR = '/home/skylines'
SRC_DIR = '%s/src' % APP_DIR
@task
def deploy(branch='master', force=False):
push(branch, force)
restart()
@task
def push(branch='master', force=False):
cmd = 'git push %s:%s %s:master' % (env.host_string, SRC_DIR, branch)
if force:
cmd += ' --force'
local(cmd)
@task
def restart():
with cd(SRC_DIR):
run('git reset --hard')
# compile i18n .mo files
manage('babel compile')
# generate JS/CSS assets
manage('assets build')
# do database migrations
manage('migrate upgrade')
# restart services
restart_service('skylines-api')
restart_service('skylines')
restart_service('mapserver')
restart_service('tracking')
restart_service('celery')
restart_service('mapproxy')
@task
def restart_service(service):
# Using the sudo() command somehow always provokes a password prompt,
# even if NOPASSWD is specified in the sudoers file...
run('sudo supervisorctl restart %s' % service)
@task
def manage(cmd, user=None):
with cd(SRC_DIR):
if user:
sudo('./manage.py %s' % cmd, user=user)
else:
run('./manage.py %s' % cmd)
@task
def update_mapproxy():
with NamedTemporaryFile() as f:
content = open('mapserver/mapproxy/mapproxy.yaml').read()
content = content.replace(
'base_dir: \'/tmp/cache_data\'',
'base_dir: \'%s/cache/mapproxy\'' % APP_DIR,
)
content = content.replace(
'lock_dir: \'/tmp/cache_data/tile_locks\'',
'lock_dir: \'%s/cache/mapproxy/tile_locks\'' % APP_DIR,
)
f.write(content)
f.flush()
put(f.name, '%s/config/mapproxy.yaml' % APP_DIR)
@task
def clean_mapproxy_cache():
with cd('/home/skylines/cache/mapproxy'):
run('rm -rv *')
| agpl-3.0 | Python |
6fae23c1d442880256ed2d4298844a50d6a7968e | Make sure "fab publish" cleans the dist folder | exhuma/puresnmp,exhuma/puresnmp | fabfile.py | fabfile.py | import fabric.api as fab
def generate_type_hierarchy():
"""
Generate a document containing the available variable types.
"""
fab.local('./env/bin/python -m puresnmp.types > docs/typetree.rst')
@fab.task
def doc():
generate_type_hierarchy()
fab.local('sphinx-apidoc '
'-o docs/developer_guide/api '
'-f '
'-e '
'puresnmp '
'puresnmp/test')
with fab.lcd('docs'):
fab.local('make html')
@fab.task
def publish():
fab.local('rm -rf dist')
fab.local('python3 setup.py bdist_wheel --universal')
fab.local('python3 setup.py sdist')
fab.local('twine upload dist/*')
| import fabric.api as fab
def generate_type_hierarchy():
"""
Generate a document containing the available variable types.
"""
fab.local('./env/bin/python -m puresnmp.types > docs/typetree.rst')
@fab.task
def doc():
generate_type_hierarchy()
fab.local('sphinx-apidoc '
'-o docs/developer_guide/api '
'-f '
'-e '
'puresnmp '
'puresnmp/test')
with fab.lcd('docs'):
fab.local('make html')
@fab.task
def publish():
fab.local('python3 setup.py bdist_wheel --universal')
fab.local('python3 setup.py sdist')
fab.local('twine upload dist/*')
| mit | Python |
fcdb3de7465467e4ea4a50faec1c39ec9a26e15c | set main function | mehmetkose/python3-async-crawler | async_crawler.py | async_crawler.py | #!/usr/bin/env python
# python 3.5 async web crawler.
# https://github.com/mehmetkose/python3.5-async-crawler
# Licensed under the MIT license:
# http://www.opensource.org/licenses/mit-license
# Copyright (c) 2016 Mehmet Kose [email protected]
import aiohttp
import asyncio
from urllib.parse import urljoin, urldefrag
root_url = "http://python.org"
crawled_urls, url_hub = [], [root_url, "%s/sitemap.xml" % (root_url)]
async def get_body(url):
response = await aiohttp.request('GET', url)
return await response.read()
def remove_fragment(url):
pure_url, frag = urldefrag(url)
return pure_url
def get_links(html):
new_urls = [link.split('"')[0] for link in str(html).replace("'",'"').split('href="')[1:]]
return [urljoin(root_url, remove_fragment(new_url)) for new_url in new_urls]
async def main():
for to_crawl in url_hub:
raw_html = await get_body(to_crawl)
for link in get_links(raw_html):
if root_url in link and not link in crawled_urls:
url_hub.append(link)
url_hub.remove(to_crawl)
crawled_urls.append(to_crawl)
print("url hub: %s | crawled: %s |url : %s" % (len(url_hub), len(crawled_urls), to_crawl))
if __name__ == '__main__':
loop = asyncio.get_event_loop()
loop.run_until_complete(main()) | #!/usr/bin/env python
# python 3.5 async web crawler.
# https://github.com/mehmetkose/python3.5-async-crawler
# Licensed under the MIT license:
# http://www.opensource.org/licenses/mit-license
# Copyright (c) 2016 Mehmet Kose [email protected]
import aiohttp
import asyncio
from urllib.parse import urljoin, urldefrag
root_url = "http://python.org"
crawled_urls = []
url_hub = [root_url, "%s/robots.txt" % (root_url), "%s/sitemap.xml" % (root_url)]
async def get_body(url):
response = await aiohttp.request('GET', url)
return await response.read()
def remove_fragment(url):
pure_url, frag = urldefrag(url)
return pure_url
def get_links(html):
new_urls = [link.split('"')[0] for link in str(html).replace("'",'"').split('href="')[1:]]
return [urljoin(root_url, remove_fragment(new_url)) for new_url in new_urls]
if __name__ == '__main__':
loop = asyncio.get_event_loop()
client = aiohttp.ClientSession(loop=loop)
for to_crawl in url_hub:
raw_html = loop.run_until_complete(get_body(to_crawl))
for link in get_links(raw_html):
if root_url in link and not link in crawled_urls:
url_hub.append(link)
url_hub.remove(to_crawl)
crawled_urls.append(to_crawl)
print("url hub: %s | crawled: %s |url : %s" % (len(url_hub), len(crawled_urls), to_crawl))
client.close()
| mit | Python |
604d610d7d7e5d883c643daeba09f5b65db943ce | Make aws template use non-interactive apt-get (#1098) | Parsl/parsl,Parsl/parsl,Parsl/parsl,Parsl/parsl | parsl/providers/aws/template.py | parsl/providers/aws/template.py | template_string = """#!/bin/bash
#sed -i 's/us-east-2\.ec2\.//g' /etc/apt/sources.list
cd ~
export DEBIAN_FRONTEND=noninteractive
apt-get update -y
apt-get install -y python3 python3-pip libffi-dev g++ libssl-dev
pip3 install numpy scipy parsl
$worker_init
$user_script
# Shutdown the instance as soon as the worker scripts exits
# or times out to avoid EC2 costs.
if ! $linger
then
halt
fi
"""
| template_string = """#!/bin/bash
#sed -i 's/us-east-2\.ec2\.//g' /etc/apt/sources.list
cd ~
apt-get update -y
apt-get install -y python3 python3-pip libffi-dev g++ libssl-dev
pip3 install numpy scipy parsl
$worker_init
$user_script
# Shutdown the instance as soon as the worker scripts exits
# or times out to avoid EC2 costs.
if ! $linger
then
halt
fi
"""
| apache-2.0 | Python |
406987b860987894e297d5172a2a3eb0f4e082f8 | Fix a bug in the data resizer. | berendkleinhaneveld/Registrationshop,berendkleinhaneveld/Registrationshop | core/data/DataResizer.py | core/data/DataResizer.py | """
DataResizer
:Authors:
Berend Klein Haneveld
"""
from vtk import vtkImageResample
from vtk import vtkVersion
VTK_MAJOR_VERSION = vtkVersion.GetVTKMajorVersion()
class DataResizer(object):
"""
DataResizer is a tool that will resize a given image dataset.
You can specify a certain magnification factor or you can use a maximum
number of voxels that it should contain. If the image is larger than the
maximum amount of voxels, it will resize the volume to just below the
specified maximum.
It will never upscale a volume! So factor value that are higher than 1.0
will not have any result.
"""
def __init__(self):
super(DataResizer, self).__init__()
def ResizeData(self, imageData, factor=1.0, maximum=0):
self.imageResampler = vtkImageResample()
self.imageResampler.SetInterpolationModeToLinear()
if VTK_MAJOR_VERSION <= 5:
self.imageResampler.SetInput(imageData)
else:
self.imageResampler.SetInputData(imageData)
# If a maximum has been set: calculate the right factor
if maximum > 0:
factor = self.calculateFactor(imageData.GetDimensions(), maximum)
# Make sure that we are never upscaling the data
if factor > 1.0:
factor = 1.0
# The factor is now only in amount of pixels. This has to be translated
# to each of the dimensions: factor^(1/3)
axisMagnificationFactor = pow(factor, 1.0/3.0)
self.resampledImageData = None
if factor != 1.0:
self.imageResampler.SetAxisMagnificationFactor(0, axisMagnificationFactor)
self.imageResampler.SetAxisMagnificationFactor(1, axisMagnificationFactor)
self.imageResampler.SetAxisMagnificationFactor(2, axisMagnificationFactor)
self.imageResampler.Update()
self.resampledImageData = self.imageResampler.GetOutput()
else:
self.resampledImageData = imageData
return self.resampledImageData
# Private methods
def calculateFactor(self, dimensions, maximum):
voxels = dimensions[0] * dimensions[1] * dimensions[2]
factor = float(maximum) / float(voxels)
return factor
| """
DataResizer
:Authors:
Berend Klein Haneveld
"""
from vtk import vtkImageResample
from vtk import vtkVersion
VTK_MAJOR_VERSION = vtkVersion.GetVTKMajorVersion()
class DataResizer(object):
"""
DataResizer is a tool that will resize a given image dataset.
You can specify a certain magnification factor or you can use a maximum
number of voxels that it should contain. If the image is larger than the
maximum amount of voxels, it will resize the volume to just below the
specified maximum.
It will never upscale a volume! So factor value that are higher than 1.0
will not have any result.
"""
def __init__(self):
super(DataResizer, self).__init__()
def ResizeData(self, imageData, factor=1.0, maximum=0):
self.imageResampler = vtkImageResample()
self.imageResampler.SetInterpolationModeToLinear()
if VTK_MAJOR_VERSION <= 5:
self.imageResampler.SetInput(imageData)
else:
self.imageResampler.SetInputData(imageData)
# If a maximum has been set: calculate the right factor
if maximum > 0:
factor = self.calculateFactor(imageData.GetDimensions(), maximum)
# Make sure that we are never upscaling the data
if factor > 1.0:
factor = 1.0
self.resampledImageData = None
if factor != 1.0:
self.imageResampler.SetAxisMagnificationFactor(0, factor)
self.imageResampler.SetAxisMagnificationFactor(1, factor)
self.imageResampler.SetAxisMagnificationFactor(2, factor)
self.imageResampler.Update()
self.resampledImageData = self.imageResampler.GetOutput()
else:
self.resampledImageData = imageData
return self.resampledImageData
# Private methods
def calculateFactor(self, dimensions, maximum):
voxels = dimensions[0] * dimensions[1] * dimensions[2]
factor = float(maximum) / float(voxels)
return factor
| mit | Python |
31b0b97590ce496ba22a39c396ff868c6f511637 | install pre-commit | RockefellerArchiveCenter/DACSspace | dacsspace/client.py | dacsspace/client.py | #!/usr/bin/env python3
from configparser import ConfigParser
from asnake.aspace import ASpace
class ArchivesSpaceClient:
"""Handles communication with ArchivesSpace."""
def __init__(self):
config = ConfigParser()
config.read("local_settings.cfg")
self.aspace = ASpace(baseurl=config.get('ArchivesSpace', 'baseURL'),
username=config.get('ArchivesSpace', 'user'),
password=config.get('ArchivesSpace', 'password'))
self.repo = self.aspace.repositories(config.get('ArchivesSpace', 'repository'))
def get_resources(self, published_only):
"""Returns data about resource records from AS.
Args:
published_only (boolean): Fetch only published records from AS
Returns:
resources (list): Full JSON of AS resource records
"""
if published_only is True:
for resource in self.repo.search.with_params(q='publish:true AND primary_type:resource'):
resource_json = resource.json()
return resource_json
else:
for resource in self.repo.search.with_params(q='primary_type:resource'):
resource_json = resource.json()
return resource_json
# ArchivesSpaceClient().get_resources(published_only)
| #!/usr/bin/env python3
import argparse
from configparser import ConfigParser
from asnake.aspace import ASpace
#published_only = False
class ArchivesSpaceClient:
"""Handles communication with ArchivesSpace."""
def __init__(self):
config = ConfigParser()
config.read("local_settings.cfg")
self.aspace = ASpace(baseurl=config.get('ArchivesSpace', 'baseURL'),
username=config.get('ArchivesSpace', 'user'),
password=config.get('ArchivesSpace', 'password'))
self.repo = self.aspace.repositories(config.get('ArchivesSpace', 'repository'))
def get_resources(self, published_only):
"""Returns data about resource records from AS.
Args:
published_only (boolean): Fetch only published records from AS
Returns:
resources (list): Full JSON of AS resource records
"""
if published_only is True:
for resource in self.repo.search.with_params(q='publish:true AND primary_type:resource'):
resource_json = resource.json()
return resource_json
else:
for resource in self.repo.search.with_params(q='primary_type:resource'):
resource_json = resource.json()
return resource_json
#return resource.publish
#return resource_json
#return resource.publish
#build in tests
#ArchivesSpaceClient().get_resources(published_only)
| mit | Python |
0ca727f0ce5877ba2ca3ef74c9309c752a51fbf6 | Fix enable action on plugins | looker/sentry,looker/sentry,jean/sentry,ifduyue/sentry,jean/sentry,BuildingLink/sentry,fotinakis/sentry,zenefits/sentry,BuildingLink/sentry,jean/sentry,fotinakis/sentry,daevaorn/sentry,looker/sentry,alexm92/sentry,fotinakis/sentry,beeftornado/sentry,JackDanger/sentry,nicholasserra/sentry,BuildingLink/sentry,mvaled/sentry,gencer/sentry,nicholasserra/sentry,BuildingLink/sentry,gencer/sentry,zenefits/sentry,fotinakis/sentry,daevaorn/sentry,mvaled/sentry,jean/sentry,zenefits/sentry,JackDanger/sentry,zenefits/sentry,JamesMura/sentry,jean/sentry,imankulov/sentry,imankulov/sentry,alexm92/sentry,JackDanger/sentry,gencer/sentry,gencer/sentry,mitsuhiko/sentry,beeftornado/sentry,JamesMura/sentry,gencer/sentry,beeftornado/sentry,daevaorn/sentry,ifduyue/sentry,nicholasserra/sentry,mvaled/sentry,mvaled/sentry,ifduyue/sentry,mvaled/sentry,alexm92/sentry,BuildingLink/sentry,ifduyue/sentry,looker/sentry,zenefits/sentry,JamesMura/sentry,mitsuhiko/sentry,JamesMura/sentry,mvaled/sentry,JamesMura/sentry,imankulov/sentry,looker/sentry,daevaorn/sentry,ifduyue/sentry | src/sentry/web/frontend/project_plugin_enable.py | src/sentry/web/frontend/project_plugin_enable.py | from __future__ import absolute_import
from django.core.urlresolvers import reverse
from sentry.plugins import plugins
from sentry.web.frontend.base import ProjectView
class ProjectPluginEnableView(ProjectView):
required_scope = 'project:write'
def post(self, request, organization, team, project, slug):
try:
plugin = plugins.get(slug)
except KeyError:
return self.redirect(reverse('sentry-configure-project-plugin', args=[project.organization.slug, project.slug, slug]))
if plugin.is_enabled(project):
return self.redirect(reverse('sentry-configure-project-plugin', args=[project.organization.slug, project.slug, slug]))
plugin.enable(project=project)
return self.redirect(reverse('sentry-configure-project-plugin', args=[project.organization.slug, project.slug, slug]))
| from __future__ import absolute_import
from django.core.urlresolvers import reverse
from sentry.plugins import plugins
from sentry.web.frontend.base import ProjectView
class ProjectPluginEnableView(ProjectView):
required_scope = 'project:write'
def post(self, request, organization, team, project, slug):
try:
plugin = plugins.get(slug)
except KeyError:
return self.redirect(reverse('sentry-configure-project-plugin', args=[project.organization.slug, project.slug, slug]))
if not plugin.is_enabled(project):
return self.redirect(reverse('sentry-configure-project-plugin', args=[project.organization.slug, project.slug, slug]))
plugin.enable(project=project)
return self.redirect(reverse('sentry-configure-project-plugin', args=[project.organization.slug, project.slug, slug]))
| bsd-3-clause | Python |
0599b259ed08121160196734f7212dc7fa33149f | Remove execute_auth_api_request method | devicehive/devicehive-python | devicehive/token.py | devicehive/token.py | from devicehive.api_request import ApiRequest
class Token(object):
"""Token class."""
AUTH_HEADER_NAME = 'Authorization'
AUTH_HEADER_VALUE_PREFIX = 'Bearer '
def __init__(self, transport, auth):
self._transport = transport
self._login = auth.get('login')
self._password = auth.get('password')
self._refresh_token = auth.get('refresh_token')
self._access_token = auth.get('access_token')
def _login(self):
# TODO: implement token/login request.
# Set self._refresh_token and self._access_token after success login.
pass
def _auth(self):
api_request = ApiRequest(self._transport)
if not api_request.websocket_transport:
return
api_request.action('authenticate')
api_request.set('token', self._access_token)
api_request.execute('Authentication failure')
@property
def access_token(self):
return self._access_token
@property
def auth_header(self):
auth_header_name = self.AUTH_HEADER_NAME
auth_header_value = self.AUTH_HEADER_VALUE_PREFIX + self._access_token
return auth_header_name, auth_header_value
def refresh(self):
api_request = ApiRequest(self._transport)
api_request.method('POST')
api_request.url('token/refresh')
api_request.action('token/refresh')
api_request.set('refreshToken', self._refresh_token)
tokens = api_request.execute('Token refresh failure')
self._access_token = tokens['accessToken']
def auth(self):
if self._refresh_token:
self.refresh()
else:
self._login()
self._auth()
| from devicehive.api_request import ApiRequest
from devicehive.api_response import ApiResponseError
class Token(object):
"""Token class."""
AUTH_HEADER_NAME = 'Authorization'
AUTH_HEADER_VALUE_PREFIX = 'Bearer '
def __init__(self, transport, auth):
self._transport = transport
self._login = auth.get('login')
self._password = auth.get('password')
self._refresh_token = auth.get('refresh_token')
self._access_token = auth.get('access_token')
def _login(self):
# TODO: implement token/login request.
# Set self._refresh_token and self._access_token after success login.
pass
def _auth(self):
api_request = ApiRequest(self._transport)
if not api_request.websocket_transport:
return
api_request.action('authenticate')
api_request.set('token', self._access_token)
api_request.execute('Authentication failure')
@property
def access_token(self):
return self._access_token
@property
def auth_header(self):
auth_header_name = self.AUTH_HEADER_NAME
auth_header_value = self.AUTH_HEADER_VALUE_PREFIX + self._access_token
return auth_header_name, auth_header_value
def execute_auth_api_request(self, api_request, error_message):
api_request.header(*self.auth_header)
try:
return api_request.execute(error_message)
except ApiResponseError as api_response_error:
if api_response_error.code != 401:
raise
self.auth()
api_request.header(*self.auth_header)
return api_request.execute(error_message)
def refresh(self):
api_request = ApiRequest(self._transport)
api_request.method('POST')
api_request.url('token/refresh')
api_request.action('token/refresh')
api_request.set('refreshToken', self._refresh_token)
tokens = api_request.execute('Token refresh failure')
self._access_token = tokens['accessToken']
def auth(self):
if self._refresh_token:
self.refresh()
else:
self._login()
self._auth()
| apache-2.0 | Python |
0237fb8114f5a8423d39f44b2882d5dbf10954d7 | make .seen replies for CTCP ACTIONs say "doing nick message"; leaves .seen replies for PRIVMSG to channel the same ("saying message"). | Uname-a/knife_scraper,Uname-a/knife_scraper,Uname-a/knife_scraper | willie/modules/seen.py | willie/modules/seen.py | # coding=utf8
"""
seen.py - Willie Seen Module
Copyright 2008, Sean B. Palmer, inamidst.com
Copyright © 2012, Elad Alfassa <[email protected]>
Licensed under the Eiffel Forum License 2.
http://willie.dftba.net
"""
from __future__ import unicode_literals
import time
import datetime
from willie.tools import Identifier
from willie.tools.time import get_timezone, format_time
from willie.module import commands, rule, priority, thread
@commands('seen')
def seen(bot, trigger):
"""Reports when and where the user was last seen."""
if not trigger.group(2):
bot.say(".seen <nick> - Reports when <nick> was last seen.")
return
nick = trigger.group(2).strip()
timestamp = bot.db.get_nick_value(nick, 'seen_timestamp')
if timestamp:
channel = bot.db.get_nick_value(nick, 'seen_channel')
message = bot.db.get_nick_value(nick, 'seen_message')
action = bot.db.get_nick_value(nick, 'seen_action')
tz = get_timezone(bot.db, bot.config, None, trigger.nick,
trigger.sender)
saw = datetime.datetime.utcfromtimestamp(timestamp)
timestamp = format_time(bot.db, bot.config, tz, trigger.nick,
trigger.sender, saw)
msg = "I last saw {} at {}".format(nick, timestamp)
if Identifier(channel) == trigger.sender:
if action:
msg = msg + " in here, doing " + nick + " " + message
else:
msg = msg + " in here, saying " + message
else:
msg += " in another channel."
bot.say(str(trigger.nick) + ': ' + msg)
else:
bot.say("Sorry, I haven't seen {} around.".format(nick))
@thread(False)
@rule('(.*)')
@priority('low')
def note(bot, trigger):
if not trigger.is_privmsg:
bot.db.set_nick_value(trigger.nick, 'seen_timestamp', time.time())
bot.db.set_nick_value(trigger.nick, 'seen_channel', trigger.sender)
bot.db.set_nick_value(trigger.nick, 'seen_message', trigger)
bot.db.set_nick_value(trigger.nick, 'seen_action', 'intent' in trigger.tags)
| # coding=utf8
"""
seen.py - Willie Seen Module
Copyright 2008, Sean B. Palmer, inamidst.com
Copyright © 2012, Elad Alfassa <[email protected]>
Licensed under the Eiffel Forum License 2.
http://willie.dftba.net
"""
from __future__ import unicode_literals
import time
import datetime
from willie.tools import Identifier
from willie.tools.time import get_timezone, format_time
from willie.module import commands, rule, priority, thread
@commands('seen')
def seen(bot, trigger):
"""Reports when and where the user was last seen."""
if not trigger.group(2):
bot.say(".seen <nick> - Reports when <nick> was last seen.")
return
nick = trigger.group(2).strip()
timestamp = bot.db.get_nick_value(nick, 'seen_timestamp')
if timestamp:
channel = bot.db.get_nick_value(nick, 'seen_channel')
message = bot.db.get_nick_value(nick, 'seen_message')
tz = get_timezone(bot.db, bot.config, None, trigger.nick,
trigger.sender)
saw = datetime.datetime.utcfromtimestamp(timestamp)
timestamp = format_time(bot.db, bot.config, tz, trigger.nick,
trigger.sender, saw)
msg = "I last saw {} at {}".format(nick, timestamp)
if Identifier(channel) == trigger.sender:
msg = msg + " in here, saying " + message
else:
msg += " in another channel."
bot.say(str(trigger.nick) + ': ' + msg)
else:
bot.say("Sorry, I haven't seen {} around.".format(nick))
@thread(False)
@rule('(.*)')
@priority('low')
def note(bot, trigger):
if not trigger.is_privmsg:
bot.db.set_nick_value(trigger.nick, 'seen_timestamp', time.time())
bot.db.set_nick_value(trigger.nick, 'seen_channel', trigger.sender)
bot.db.set_nick_value(trigger.nick, 'seen_message', trigger)
| mit | Python |
107b72da8629d97452dc6b7ee0f44eeb7d9e351c | remove x-based matplotlib rendering | mahnen/gamma_limits_sensitivity | gamma_limits_sensitivity/__init__.py | gamma_limits_sensitivity/__init__.py | '''
This is the hard working code in order to calculate ULs, sensitivities,
and time to detections.
'''
import matplotlib.pyplot as plt
matplotlib.use('Agg')
def upper_limit(N_on, N_off, alpha, l_lim, A_eff):
figures = [plt.figure()]
dictionary = {
'plots': figures
}
return dictionary
def sensitivity(s_bg, alpha, t_obs, A_eff):
figures = [plt.figure()]
dictionary = {
'plots': figures
}
return dictionary
def predict(s_bg, alpha, f_0, df_0, Gamma, dGamma, E_0, A_eff):
figures = [plt.figure()]
times = [1., 2., 3.]
dictionary = {
'times': times,
'plots': figures
}
return dictionary
| '''
This is the hard working code in order to calculate ULs, sensitivities,
and time to detections.
'''
import matplotlib.pyplot as plt
def upper_limit(N_on, N_off, alpha, l_lim, A_eff):
figures = [plt.figure()]
dictionary = {
'plots': figures
}
return dictionary
def sensitivity(s_bg, alpha, t_obs, A_eff):
figures = [plt.figure()]
dictionary = {
'plots': figures
}
return dictionary
def predict(s_bg, alpha, f_0, df_0, Gamma, dGamma, E_0, A_eff):
figures = [plt.figure()]
times = [1., 2., 3.]
dictionary = {
'times': times,
'plots': figures
}
return dictionary
| mit | Python |
2a67ef989fa79aefccb9bcefe543715346642f91 | Normalize time to UTC. | abusesa/abusehelper | abusehelper/contrib/autoshun/autoshunbot.py | abusehelper/contrib/autoshun/autoshunbot.py | import idiokit
import time as _time
import calendar
from abusehelper.core import utils, cymruwhois, bot, events
AUTOSHUN_CSV_URL = "http://www.autoshun.org/files/shunlist.csv"
class AutoshunBot(bot.PollingBot):
COLUMNS = ["ip", "time", "type"]
feed_url = bot.Param(default=AUTOSHUN_CSV_URL)
use_cymru_whois = bot.BoolParam(default=True)
def poll(self):
pipe = self._poll(url=self.feed_url)
if self.use_cymru_whois:
pipe = pipe | cymruwhois.augment("ip")
return pipe | self._normalize()
@idiokit.stream
def _poll(self, url):
self.log.info("Downloading %s" % url)
try:
info, fileobj = yield utils.fetch_url(url)
except utils.FetchUrlFailed, fuf:
self.log.error("Download failed: %r", fuf)
idiokit.stop()
self.log.info("Downloaded")
# Skip first line
fileobj.readline()
yield utils.csv_to_events(fileobj,
columns=self.COLUMNS,
charset=info.get_param("charset"))
@idiokit.stream
def _normalize(self):
while True:
event = yield idiokit.next()
event.add("feed", "autoshun")
event.add("source url", self.feed_url)
times = event.values("time")
event.clear("time")
for time in times:
event.add("time", self._normalize_time(time))
yield idiokit.send(event)
def _normalize_time(self, time):
parsed = _time.strptime(time, "%Y-%m-%d %H:%M:%S")
seconds = calendar.timegm(parsed)
seconds += 5 * 3600 # UTC-5 to UTC
time_tuple = _time.gmtime(seconds)
return _time.strftime("%Y-%m-%d %H:%M:%S UTC", time_tuple)
if __name__ == "__main__":
AutoshunBot.from_command_line().execute()
| import idiokit
from abusehelper.core import utils, cymruwhois, bot, events
AUTOSHUN_CSV_URL = "http://www.autoshun.org/files/shunlist.csv"
class AutoshunBot(bot.PollingBot):
COLUMNS = ["ip", "time", "type"]
feed_url = bot.Param(default=AUTOSHUN_CSV_URL)
use_cymru_whois = bot.BoolParam(default=True)
def poll(self):
pipe = self._poll(url=self.feed_url)
if self.use_cymru_whois:
pipe = pipe | cymruwhois.augment("ip")
return pipe | self._normalize()
@idiokit.stream
def _poll(self, url):
self.log.info("Downloading %s" % url)
try:
info, fileobj = yield utils.fetch_url(url)
except utils.FetchUrlFailed, fuf:
self.log.error("Download failed: %r", fuf)
idiokit.stop()
self.log.info("Downloaded")
# Skip first line
fileobj.readline()
yield utils.csv_to_events(fileobj,
columns=self.COLUMNS,
charset=info.get_param("charset"))
@idiokit.stream
def _normalize(self):
while True:
event = yield idiokit.next()
event.add("feed", "autoshun")
event.add("source url", self.feed_url)
yield idiokit.send(event)
if __name__ == "__main__":
AutoshunBot.from_command_line().execute()
| mit | Python |
19afe973bffe1bb90942757fcbf81f3630ffddda | Update code formatting. | ghostwords/chameleon-crawler,ghostwords/chameleon-crawler,ghostwords/chameleon-crawler | crawler/args.py | crawler/args.py | #!/usr/bin/env python3
# chameleon-crawler
#
# Copyright 2015 ghostwords.
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from os import path
import argparse
def is_valid_file(f, parser):
if path.isfile(f):
return f
raise argparse.ArgumentTypeError("%s does not exist!" % f)
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument(
"crx", metavar='CHAMELEON_CRX_FILE_PATH',
type=lambda x: is_valid_file(x, parser),
help="path to Chameleon CRX package"
)
group = parser.add_mutually_exclusive_group()
group.add_argument(
"--headless", action="store_true", default=True,
help="use a virtual display (default)"
)
group.add_argument("--no-headless", dest='headless', action="store_false")
parser.add_argument(
"-n", dest='num_crawlers', type=int,
choices=range(1, 9), default=4,
help="how many browsers to use in parallel "
"(default: %(default)s)"
)
parser.add_argument(
"-q", "--quiet", action="store_true", default=False,
help="turn off standard output"
)
parser.add_argument(
"-t", "--timeout", metavar='SECONDS',
type=int, default=20,
help="how many seconds to wait for pages to finish "
"loading before timing out (default: %(default)s)"
)
parser.add_argument(
"--urls", metavar='URL_FILE_PATH',
type=argparse.FileType('r'), default='urls.txt',
help="path to URL list file (default: %(default)s)"
)
return parser.parse_args()
| #!/usr/bin/env python3
# chameleon-crawler
#
# Copyright 2015 ghostwords.
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from os import path
import argparse
def is_valid_file(f, parser):
if path.isfile(f):
return f
raise argparse.ArgumentTypeError("%s does not exist!" % f)
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument("crx", metavar='CHAMELEON_CRX_FILE_PATH',
type=lambda x: is_valid_file(x, parser),
help="path to Chameleon CRX package")
group = parser.add_mutually_exclusive_group()
group.add_argument("--headless", action="store_true", default=True,
help="use a virtual display (default)")
group.add_argument("--no-headless", dest='headless', action="store_false")
parser.add_argument("-n", dest='num_crawlers', type=int,
choices=range(1, 9), default=4,
help="how many browsers to use in parallel "
"(default: %(default)s)")
parser.add_argument("-q", "--quiet", action="store_true", default=False,
help="turn off standard output")
parser.add_argument("-t", "--timeout", metavar='SECONDS',
type=int, default=20,
help="how many seconds to wait for pages to finish "
"loading before timing out (default: %(default)s)")
parser.add_argument("--urls", metavar='URL_FILE_PATH',
type=argparse.FileType('r'), default='urls.txt',
help="path to URL list file (default: %(default)s)")
return parser.parse_args()
| mpl-2.0 | Python |
696b4e093171e9d6f17502650f15c9299438b874 | Drop Py2 and six on tests/integration/modules/test_virtualenv_mod.py | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | tests/integration/modules/test_virtualenv_mod.py | tests/integration/modules/test_virtualenv_mod.py | import os
import tempfile
import salt.utils.path
from salt.modules.virtualenv_mod import KNOWN_BINARY_NAMES
from tests.support.case import ModuleCase
from tests.support.helpers import slowTest
from tests.support.runtests import RUNTIME_VARS
from tests.support.unit import skipIf
@skipIf(
salt.utils.path.which_bin(KNOWN_BINARY_NAMES) is None, "virtualenv not installed"
)
class VirtualenvModuleTest(ModuleCase):
"""
Validate the virtualenv module
"""
def setUp(self):
super().setUp()
self.venv_test_dir = tempfile.mkdtemp(dir=RUNTIME_VARS.TMP)
self.venv_dir = os.path.join(self.venv_test_dir, "venv")
@slowTest
def test_create_defaults(self):
"""
virtualenv.managed
"""
self.run_function("virtualenv.create", [self.venv_dir])
pip_file = os.path.join(self.venv_dir, "bin", "pip")
self.assertTrue(os.path.exists(pip_file))
@slowTest
def test_site_packages(self):
pip_bin = os.path.join(self.venv_dir, "bin", "pip")
self.run_function(
"virtualenv.create", [self.venv_dir], system_site_packages=True
)
with_site = self.run_function("pip.freeze", bin_env=pip_bin)
self.run_function("file.remove", [self.venv_dir])
self.run_function("virtualenv.create", [self.venv_dir])
without_site = self.run_function("pip.freeze", bin_env=pip_bin)
self.assertFalse(with_site == without_site)
@slowTest
def test_clear(self):
pip_bin = os.path.join(self.venv_dir, "bin", "pip")
self.run_function("virtualenv.create", [self.venv_dir])
self.run_function("pip.install", [], pkgs="pep8", bin_env=pip_bin)
self.run_function("virtualenv.create", [self.venv_dir], clear=True)
packages = self.run_function("pip.list", prefix="pep8", bin_env=pip_bin)
self.assertFalse("pep8" in packages)
def test_virtualenv_ver(self):
ret = self.run_function("virtualenv.virtualenv_ver", [self.venv_dir])
assert isinstance(ret, list)
assert all([isinstance(x, int) for x in ret])
def tearDown(self):
self.run_function("file.remove", [self.venv_test_dir])
| # -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, unicode_literals
import os
import tempfile
import salt.utils.path
from salt.modules.virtualenv_mod import KNOWN_BINARY_NAMES
from tests.support.case import ModuleCase
from tests.support.helpers import slowTest
from tests.support.runtests import RUNTIME_VARS
from tests.support.unit import skipIf
@skipIf(
salt.utils.path.which_bin(KNOWN_BINARY_NAMES) is None, "virtualenv not installed"
)
class VirtualenvModuleTest(ModuleCase):
"""
Validate the virtualenv module
"""
def setUp(self):
super(VirtualenvModuleTest, self).setUp()
self.venv_test_dir = tempfile.mkdtemp(dir=RUNTIME_VARS.TMP)
self.venv_dir = os.path.join(self.venv_test_dir, "venv")
@slowTest
def test_create_defaults(self):
"""
virtualenv.managed
"""
self.run_function("virtualenv.create", [self.venv_dir])
pip_file = os.path.join(self.venv_dir, "bin", "pip")
self.assertTrue(os.path.exists(pip_file))
@slowTest
def test_site_packages(self):
pip_bin = os.path.join(self.venv_dir, "bin", "pip")
self.run_function(
"virtualenv.create", [self.venv_dir], system_site_packages=True
)
with_site = self.run_function("pip.freeze", bin_env=pip_bin)
self.run_function("file.remove", [self.venv_dir])
self.run_function("virtualenv.create", [self.venv_dir])
without_site = self.run_function("pip.freeze", bin_env=pip_bin)
self.assertFalse(with_site == without_site)
@slowTest
def test_clear(self):
pip_bin = os.path.join(self.venv_dir, "bin", "pip")
self.run_function("virtualenv.create", [self.venv_dir])
self.run_function("pip.install", [], pkgs="pep8", bin_env=pip_bin)
self.run_function("virtualenv.create", [self.venv_dir], clear=True)
packages = self.run_function("pip.list", prefix="pep8", bin_env=pip_bin)
self.assertFalse("pep8" in packages)
def test_virtualenv_ver(self):
ret = self.run_function("virtualenv.virtualenv_ver", [self.venv_dir])
assert isinstance(ret, list)
assert all([isinstance(x, int) for x in ret])
def tearDown(self):
self.run_function("file.remove", [self.venv_test_dir])
| apache-2.0 | Python |
872320e02d5c922e177434f6b9fa70af8cf822b9 | Revert "RT-26" | unrealsolver/django-wkhtmltopdf,unrealsolver/django-wkhtmltopdf | wkhtmltopdf/__init__.py | wkhtmltopdf/__init__.py | import os
if 'DJANGO_SETTINGS_MODULE' in os.environ:
from .utils import *
__author__ = 'Incuna Ltd'
__version__ = '2.0.3'
| # Have to comment this import to perfrom pip install at the same time as django install
# import os
# if 'DJANGO_SETTINGS_MODULE' in os.environ:
# from .utils import *
__author__ = 'Incuna Ltd'
__version__ = '2.0.3'
| bsd-2-clause | Python |
563a82246180d949917bcd444411bbeb82604e97 | Add an assertion in search.py | luci/recipes-py,luci/recipes-py | recipe_modules/buildbucket/tests/search.py | recipe_modules/buildbucket/tests/search.py | # Copyright 2019 The LUCI Authors. All rights reserved.
# Use of this source code is governed under the Apache License, Version 2.0
# that can be found in the LICENSE file.
from recipe_engine import types
from google.protobuf import json_format
from PB.go.chromium.org.luci.buildbucket.proto import build as build_pb2
from PB.go.chromium.org.luci.buildbucket.proto import common as common_pb2
from PB.go.chromium.org.luci.buildbucket.proto import rpc as rpc_pb2
DEPS = [
'buildbucket',
'json',
'properties',
'runtime',
'step'
]
def RunSteps(api):
limit = api.properties.get('limit')
builds = api.buildbucket.search(
rpc_pb2.BuildPredicate(
gerrit_changes=list(api.buildbucket.build.input.gerrit_changes),
),
limit=limit,
)
assert limit is None or len(builds) <= limit
pres = api.step.active_result.presentation
for b in builds:
pres.logs['build %s' % b.id] = json_format.MessageToJson(b).splitlines()
def GenTests(api):
def test(test_name, tags=None, **req):
return (
api.test(test_name) +
api.runtime(is_luci=True, is_experimental=False) +
api.buildbucket.try_build(
project='chromium',
builder='Builder',
git_repo='https://chromium.googlesource.com/chromium/src',
)
)
yield (
test('basic')
)
yield (
test('two builds') +
api.buildbucket.simulated_search_results([
build_pb2.Build(id=1, status=common_pb2.SUCCESS),
build_pb2.Build(id=2, status=common_pb2.FAILURE),
])
)
yield (
test('search failed') +
api.step_data(
'buildbucket.search',
api.json.output_stream(
json_format.MessageToDict(rpc_pb2.BatchResponse(
responses=[dict(error=dict(message='there was a problem'))],
)),
),
)
)
yield (
test('limit') +
api.properties(limit=5) +
api.buildbucket.simulated_search_results([
build_pb2.Build(id=i+1, status=common_pb2.SUCCESS)
# Returning more to test trimming of the returned list.
for i in xrange(10)
])
)
| # Copyright 2019 The LUCI Authors. All rights reserved.
# Use of this source code is governed under the Apache License, Version 2.0
# that can be found in the LICENSE file.
from recipe_engine import types
from google.protobuf import json_format
from PB.go.chromium.org.luci.buildbucket.proto import build as build_pb2
from PB.go.chromium.org.luci.buildbucket.proto import common as common_pb2
from PB.go.chromium.org.luci.buildbucket.proto import rpc as rpc_pb2
DEPS = [
'buildbucket',
'json',
'properties',
'runtime',
'step'
]
def RunSteps(api):
builds = api.buildbucket.search(
rpc_pb2.BuildPredicate(
gerrit_changes=list(api.buildbucket.build.input.gerrit_changes),
),
limit=api.properties.get('limit'),
)
pres = api.step.active_result.presentation
for b in builds:
pres.logs['build %s' % b.id] = json_format.MessageToJson(b).splitlines()
def GenTests(api):
def test(test_name, tags=None, **req):
return (
api.test(test_name) +
api.runtime(is_luci=True, is_experimental=False) +
api.buildbucket.try_build(
project='chromium',
builder='Builder',
git_repo='https://chromium.googlesource.com/chromium/src',
)
)
yield (
test('basic')
)
yield (
test('two builds') +
api.buildbucket.simulated_search_results([
build_pb2.Build(id=1, status=common_pb2.SUCCESS),
build_pb2.Build(id=2, status=common_pb2.FAILURE),
])
)
yield (
test('search failed') +
api.step_data(
'buildbucket.search',
api.json.output_stream(
json_format.MessageToDict(rpc_pb2.BatchResponse(
responses=[dict(error=dict(message='there was a problem'))],
)),
),
)
)
yield (
test('limit') +
api.properties(limit=5) +
api.buildbucket.simulated_search_results([
build_pb2.Build(id=i+1, status=common_pb2.SUCCESS)
for i in xrange(10)
])
)
| apache-2.0 | Python |
ebe5a4ce8c12489bceb8991f627fdea29329e854 | Enable discovery server to reply also when started w/o net connection | knupouls/extremefeedbacklamp,knupouls/extremefeedbacklamp,dvaske/extremefeedbacklamp,dvaske/extremefeedbacklamp,dvaske/extremefeedbacklamp,knupouls/extremefeedbacklamp,switchgears/extremefeedbacklamp,switchgears/extremefeedbacklamp,switchgears/extremefeedbacklamp | xfd_discovery_server.py | xfd_discovery_server.py | #!/usr/bin/env python
#
# Author Aske Olsson [email protected]
#
import socket
import struct
import time
#MCAST_GRP = '224.1.1.1'
#MCAST_PORT = 5007
MCAST_ADDR = "239.77.124.213"
MCAST_PORT = 19418
MCAST_ANS_PORT = 19419
def socket_setup():
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind(('', MCAST_PORT))
mreq = struct.pack("4sl", socket.inet_aton(MCAST_ADDR), socket.INADDR_ANY)
sock.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, mreq)
except socket.error, e:
sock = None
return sock
def listen():
#ip = socket.gethostbyname(socket.gethostname())
myMAC = open('/sys/class/net/eth0/address').read()
print "listen loop"
sock = socket_setup()
while True:
print "lock aquired", sock
try:
if sock:
data, sender_addr = sock.recvfrom(1024)
print data, sender_addr
# Answer back
ans_sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
ans_sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, 2)
ans_sock.sendto("MAC=" + myMAC, (sender_addr[0], MCAST_ANS_PORT))
else:
print "setup socket"
sock = socket_setup()
time.sleep(1)
except socket.error, e:
sock = None
def main():
listen()
if __name__ == "__main__":
main()
| #!/usr/bin/env python
#
# Author Aske Olsson [email protected]
#
import socket
import struct
#MCAST_GRP = '224.1.1.1'
#MCAST_PORT = 5007
MCAST_ADDR = "239.77.124.213"
MCAST_PORT = 19418
MCAST_ANS_PORT = 19419
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind(('', MCAST_PORT))
mreq = struct.pack("4sl", socket.inet_aton(MCAST_ADDR), socket.INADDR_ANY)
sock.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, mreq)
#ip = socket.gethostbyname(socket.gethostname())
myMAC = open('/sys/class/net/eth0/address').read()
while True:
try:
data, sender_addr = sock.recvfrom(1024)
# print data, sender_addr
# Answer back
ans_sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
ans_sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, 2)
ans_sock.sendto("MAC=" + myMAC, (sender_addr[0], MCAST_ANS_PORT))
except Exception:
pass
| mit | Python |
2cf4a0b93db423207798ffd93b2e91cdb73b6d2b | Add identifier for UT Brownsville | texastribune/tx_salaries,texastribune/tx_salaries | tx_salaries/utils/transformers/ut_brownsville.py | tx_salaries/utils/transformers/ut_brownsville.py | from . import base
from . import mixins
class TransformedRecord(mixins.GenericCompensationMixin,
mixins.GenericDepartmentMixin, mixins.GenericIdentifierMixin,
mixins.GenericJobTitleMixin, mixins.GenericPersonMixin,
mixins.MembershipMixin, mixins.OrganizationMixin, mixins.PostMixin,
mixins.RaceMixin, base.BaseTransformedRecord):
MAP = {
'last_name': 'Last Name',
'first_name': 'First Name',
'middle_name': 'Middle Name',
'department': 'Department',
'job_title': 'Title',
'hire_date': 'Hire Date',
'compensation': 'Annualized',
'race': 'Race',
'gender': 'Gender'
}
NAME_FIELDS = ('first_name', 'last_name', )
ORGANIZATION_NAME = 'University of Texas at Brownsville'
ORGANIZATION_CLASSIFICATION = 'University'
# TODO not given on spreadsheet, but they appear to give part time
compensation_type = 'Full Time'
@property
def is_valid(self):
# Adjust to return False on invalid fields. For example:
return self.last_name.strip() != ''
@property
def identifier(self):
"""
Identifier for UT Brownsville
"""
excluded = [self.department_key, self.job_title_key,
self.hire_date_key, self.compensation_key]
return {
'scheme': 'tx_salaries_hash',
'identifier': base.create_hash_for_record(self.data,
exclude=excluded)
}
transform = base.transform_factory(TransformedRecord)
| from . import base
from . import mixins
class TransformedRecord(mixins.GenericCompensationMixin,
mixins.GenericDepartmentMixin, mixins.GenericIdentifierMixin,
mixins.GenericJobTitleMixin, mixins.GenericPersonMixin,
mixins.MembershipMixin, mixins.OrganizationMixin, mixins.PostMixin,
mixins.RaceMixin, base.BaseTransformedRecord):
MAP = {
'last_name': 'Last Name',
'first_name': 'First Name',
'department': 'Department',
'job_title': 'Title',
'hire_date': 'Hire Date',
'status': 'LABEL FOR FT/PT STATUS',
'compensation': 'Annualized',
'race': 'Race',
'gender': 'Gender'
}
NAME_FIELDS = ('first_name', 'last_name', )
ORGANIZATION_NAME = 'University of Texas at Brownsville'
ORGANIZATION_CLASSIFICATION = 'University'
# TODO not given on spreadsheet, but they appear to give part time
compensation_type = 'Full Time'
@property
def is_valid(self):
# Adjust to return False on invalid fields. For example:
return self.last_name.strip() != ''
transform = base.transform_factory(TransformedRecord)
| apache-2.0 | Python |
c3df6a10d008441c79eb07b889f52fe0de22538b | Fix the default prefix | miquella/powerline-vaulted-segment | powerline_vaulted_segment/vaulted.py | powerline_vaulted_segment/vaulted.py | from __future__ import (unicode_literals, division, absolute_import, print_function)
from powerline.theme import requires_segment_info
@requires_segment_info
def vaulted(pl, segment_info, prefix=''):
'''Return the current vaulted vault
:param string prefix:
The prefix to use in front of the vault name
'''
vault = segment_info['environ'].get('VAULTED_ENV', None)
if vault:
return '{0}{1}'.format(prefix, vault)
| from __future__ import (unicode_literals, division, absolute_import, print_function)
from powerline.theme import requires_segment_info
@requires_segment_info
def vaulted(pl, segment_info, prefix=None):
'''Return the current vaulted vault
:param string prefix:
The prefix to use in front of the vault name
'''
vault = segment_info['environ'].get('VAULTED_ENV', None)
if vault:
return '{0}{1}'.format(prefix, vault)
| mit | Python |
e4cc4447bf9aca4f579eef34baccd3aaf73939c3 | Print statement verwijderd uit functie | RobinvdGriend/hamming-code | hamming-code/hamming.py | hamming-code/hamming.py | from matrix import Matrix
#The encoding matrix
encoding_matrix = Matrix([
[1, 1, 0, 1],
[1, 0, 1, 1],
[1, 0, 0, 0],
[0, 1, 1, 1],
[0, 1, 0, 0],
[0, 0, 1, 0],
[0, 0, 0, 1]
])
#The parity checking matrix
checking_matrix = Matrix([
[1, 0, 1, 0, 1, 0, 1],
[0, 1, 1, 0, 0, 1, 1],
[0, 0, 0, 1, 1, 1, 1]
])
#Takes binary vector of length 4 and adds the parity bits
#Returns result as vector
def encodemessage(message):
vector_with_paritybits = encoding_matrix*(message.transpose())
return Matrix(vector_with_paritybits.getbinary())
#repairs message, may not work, can't test it yet
#Takes a matrix
def repairmessage(message):
vector = checking_matrix*message
checker = True
#checks if the return vector is the zero vector. If this is the case
#checker = True, and there is no mistake
for element in vector.values[0]:
if element == 1:
checker = False
if checker == False:
#finds out at what position the mistake is and saves it as
#counter
counter = 0
for i, element in enumerate(vector.values[0]):
counter += element * 2 ** i
else:
#in this case checker = True, so it returns the message
return message
new_message = message.values[0]
#fixes the message
if new_message[counter - 1] == 0:
new_message[counter - 1] = 1
else:
new_message[counter - 1] = 0
return Matrix(new_message)
#Example:
#boodschap = input('Vul hier je boodschap in: ')
#testvector = Matrix([[1, 0, 1, 1]])
#print(repairmessage(encodemessage(testvector)))
| from matrix import Matrix
#The encoding matrix
encoding_matrix = Matrix([
[1, 1, 0, 1],
[1, 0, 1, 1],
[1, 0, 0, 0],
[0, 1, 1, 1],
[0, 1, 0, 0],
[0, 0, 1, 0],
[0, 0, 0, 1]
])
#The parity checking matrix
checking_matrix = Matrix([
[1, 0, 1, 0, 1, 0, 1],
[0, 1, 1, 0, 0, 1, 1],
[0, 0, 0, 1, 1, 1, 1]
])
#Takes binary vector of length 4 and adds the parity bits
#Returns result as vector
def encodemessage(message):
print(message.transpose())
vector_with_paritybits = encoding_matrix*(message.transpose())
return Matrix(vector_with_paritybits.getbinary())
#repairs message, may not work, can't test it yet
#Takes a matrix
def repairmessage(message):
vector = checking_matrix*message
checker = True
#checks if the return vector is the zero vector. If this is the case
#checker = True, and there is no mistake
for element in vector.values[0]:
if element == 1:
checker = False
if checker == False:
#finds out at what position the mistake is and saves it as
#counter
counter = 0
for i, element in enumerate(vector.values[0]):
counter += element * 2 ** i
else:
#in this case checker = True, so it returns the message
return message
new_message = message.values[0]
#fixes the message
if new_message[counter - 1] == 0:
new_message[counter - 1] = 1
else:
new_message[counter - 1] = 0
return Matrix(new_message)
#Example:
#boodschap = input('Vul hier je boodschap in: ')
#testvector = Matrix([[1, 0, 1, 1]])
#print(repairmessage(encodemessage(testvector)))
| mit | Python |
825eb37e15e2fb08ac205b7495e93a91acb79c26 | Add function for flashing all form errors | hack4impact/clean-air-council,hack4impact/clean-air-council,hack4impact/clean-air-council | app/utils.py | app/utils.py | import re
from flask import url_for, flash
def register_template_utils(app):
"""Register Jinja 2 helpers (called from __init__.py)."""
@app.template_test()
def equalto(value, other):
return value == other
@app.template_global()
def is_hidden_field(field):
from wtforms.fields import HiddenField
return isinstance(field, HiddenField)
app.add_template_global(index_for_role)
def index_for_role(role):
return url_for(role.index)
def parse_phone_number(phone_number):
"""Make phone number conform to E.164 (https://en.wikipedia.org/wiki/E.164)
"""
stripped = re.sub(r'\D', '', phone_number)
if len(stripped) == 10:
stripped = '1' + stripped
stripped = '+' + stripped
return stripped
def flash_errors(form):
for field, errors in form.errors.items():
for error in errors:
flash(u"Error in the %s field - %s" % (
getattr(form, field).label.text,
error
))
| import re
from flask import url_for
def register_template_utils(app):
"""Register Jinja 2 helpers (called from __init__.py)."""
@app.template_test()
def equalto(value, other):
return value == other
@app.template_global()
def is_hidden_field(field):
from wtforms.fields import HiddenField
return isinstance(field, HiddenField)
app.add_template_global(index_for_role)
def index_for_role(role):
return url_for(role.index)
def parse_phone_number(phone_number):
"""Make phone number conform to E.164 (https://en.wikipedia.org/wiki/E.164)
"""
stripped = re.sub(r'\D', '', phone_number)
if len(stripped) == 10:
stripped = '1' + stripped
stripped = '+' + stripped
return stripped
| mit | Python |
c0e1bed70bc331041622e0db06871d4f3e3277f3 | Update activate-devices.py | JeffreyPowell/pi-heating-hub,JeffreyPowell/pi-heating-hub,JeffreyPowell/pi-heating-hub | cron/activate-devices.py | cron/activate-devices.py | #!/usr/bin/env python
import MySQLdb
#import datetime
#import urllib2
#import os
import datetime
import RPi.GPIO as GPIO
try:
import RPi.GPIO as GPIO
except RuntimeError:
print("Error importing RPi.GPIO!")
servername = "localhost"
username = "pi"
password = "password"
dbname = "pi_heating_db"
GPIO.setmode(GPIO.BOARD)
cnx = MySQLdb.connect(host=servername, user=username, passwd=password, db=dbname)
cursorselect = cnx.cursor()
query = ("SELECT * FROM devices;")
cursorselect.execute(query)
results_devices =cursorselect.fetchall()
cursorselect.close()
for result in results_devices:
print("* * * * * *")
DEVICE_PIN = int( result[2] )
DEVICE_VALUE = int( result[3] )
GPIO.setup(DEVICE_PIN, GPIO.OUT, initial=GPIO.LOW)
GPIO.output(DEVICE_PIN, DEVICE_VALUE)
print( DEVICE_PIN, DEVICE_VALUE )
print("- - -")
cnx.close()
| #!/usr/bin/env python
import MySQLdb
#import datetime
#import urllib2
#import os
import datetime
import RPi.GPIO as GPIO
try:
import RPi.GPIO as GPIO
except RuntimeError:
print("Error importing RPi.GPIO!")
servername = "localhost"
username = "pi"
password = "password"
dbname = "pi_heating_db"
GPIO.setmode(GPIO.BOARD)
cnx = MySQLdb.connect(host=servername, user=username, passwd=password, db=dbname)
cursorselect = cnx.cursor()
query = ("SELECT * FROM devices;")
cursorselect.execute(query)
results_devices =cursorselect.fetchall()
cursorselect.close()
for result in results_devices:
print("* * * * * *")
DEVICE_PIN = result[2]
DEVICE_VALUE = result[3]
GPIO.setup(DEVICE_PIN, GPIO.OUT, initial=GPIO.LOW)
GPIO.output(DEVICE_PIN, DEVICE_VALUE)
print( DEVICE_PIN, DEVICE_VALUE )
print("- - -")
cnx.close()
| apache-2.0 | Python |
8701318037b9d425149f0689fa137be78a782aa7 | return the name of the face found | adamreis/DANIEL-server | app/views.py | app/views.py | from app import app
from flask import Flask, request, jsonify
import kairos
DEFAULT_GALLERY = 'default_gallery'
# App Logic
@app.route('/', methods=['GET'])
def index():
return 'yo'
@app.route('/upload/<name>', methods=['POST'])
def upload(name):
img_url = request.form['img_url']
success = kairos.add_face_url(img_url, name, DEFAULT_GALLERY)
return jsonify({'success': success})
@app.route('/verify', methods=['GET'])
def verify():
link = request.args.get('img_url')
name = kairos.identify_face_url(img_url, DEFAULT_GALLERY)
allowed = name is not None
# TODO: open the door.
return jsonify({'allowed': allowed,
'name': name})
| from app import app
from flask import Flask, request, jsonify
import kairos
DEFAULT_GALLERY = 'default_gallery'
# App Logic
@app.route('/', methods=['GET'])
def index():
return 'yo'
@app.route('/upload/<name>', methods=['POST'])
def upload(name):
img_url = request.form['img_url']
success = kairos.add_face_url(img_url, name, DEFAULT_GALLERY)
return jsonify({'success': success})
@app.route('/verify', methods=['GET'])
def verify():
link = request.args.get('img_url')
allowed = kairos.check_face_url(img_url, DEFAULT_GALLERY)
return jsonify({'allowed': allowed})
| mit | Python |
23f87565a9074b7d6bd5e45b1ce8686ba49d3ce8 | Update nsoltSynthesis2dNetwork.py | shodimaggio/SaivDr,shodimaggio/SaivDr | appendix/pytorch/nsoltSynthesis2dNetwork.py | appendix/pytorch/nsoltSynthesis2dNetwork.py | import torch
import torch.nn as nn
from nsoltBlockIdct2dLayer import NsoltBlockIdct2dLayer
from nsoltFinalRotation2dLayer import NsoltFinalRotation2dLayer
from nsoltLayerExceptions import InvalidNumberOfChannels, InvalidPolyPhaseOrder, InvalidNumberOfVanishingMoments
class NsoltSynthesis2dNetwork(nn.Module):
def __init__(self,
number_of_channels=[],
decimation_factor=[],
polyphase_order=[0,0],
number_of_vanishing_moments=1):
super(NsoltSynthesis2dNetwork, self).__init__()
# Check and set parameters
# # of channels
if number_of_channels[0] != number_of_channels[1]:
raise InvalidNumberOfChannels(
'[%d %d] : Currently, Type-I NSOLT is only suported, where the symmetric and antisymmetric channel numbers should be the same.'\
%(number_of_channels[0],number_of_channels[1]))
self.number_of_channels = number_of_channels
# Decimaton factor
self.decimation_factor = decimation_factor
# Polyphase order
if any(torch.tensor(polyphase_order)%2):
raise InvalidPolyPhaseOrder(
'%d + %d : Currently, even polyphase orders are only supported.'\
%(polyphase_order[0],polyphase_order[1]))
self.polyphase_order = polyphase_order
# # of vanishing moments
if number_of_vanishing_moments < 0 \
or number_of_vanishing_moments > 1:
raise InvalidNumberOfVanishingMoments(
'%d : The number of vanishing moment must be either of 0 or 1.'\
%(number_of_vanishing_moments))
self.number_of_vanishing_moments = number_of_vanishing_moments
# Instantiation of layers
self.layerV0T = NsoltFinalRotation2dLayer(
number_of_channels=number_of_channels,
decimation_factor=decimation_factor,
name='V0~'
)
self.layerE0T = NsoltBlockIdct2dLayer(
decimation_factor=decimation_factor,
name='E0~'
)
def forward(self,x):
u = self.layerV0T.forward(x)
y = self.layerE0T.forward(u)
return y
| import torch
import torch.nn as nn
from nsoltBlockIdct2dLayer import NsoltBlockIdct2dLayer
from nsoltFinalRotation2dLayer import NsoltFinalRotation2dLayer
class NsoltSynthesis2dNetwork(nn.Module):
def __init__(self,
number_of_channels=[],
decimation_factor=[]):
super(NsoltSynthesis2dNetwork, self).__init__()
self.number_of_channels = number_of_channels
self.decimation_factor = decimation_factor
# Instantiation of layers
self.layerV0 = NsoltFinalRotation2dLayer(
number_of_channels=number_of_channels,
decimation_factor=decimation_factor,
name='V0'
)
self.layerE0 = NsoltBlockIdct2dLayer(
decimation_factor=decimation_factor,
name='E0'
)
def forward(self,x):
u = self.layerV0.forward(x)
y = self.layerE0.forward(u)
return y
| bsd-2-clause | Python |
0d056fefa1896a1e4d17b56f0e84dae106c17c57 | fix bug | czgu/metaHack,czgu/metaHack,czgu/metaHack,czgu/metaHack | meta/api/views.py | meta/api/views.py | from django.conf.urls import patterns, url
from django.shortcuts import render
from django.http import Http404
from django.http import JsonResponse
from django.views.decorators.csrf import csrf_exempt
from django.contrib.sites.shortcuts import get_current_site
import requests
from .forms import UploadImageForm
from .models import Picture
# Create your views here.
@csrf_exempt
def image_handler(request):
possibe_food = set([
'apple',
'banana',
'carrot',
'broccoli',
'pear'
])
if request.method != 'POST':
raise Http404('wrong method')
else:
form = UploadImageForm(request.POST, request.FILES)
current_site = get_current_site(request)
print current_site
if form.is_valid():
newpic = Picture(image=request.FILES['image'])
newpic.save()
auth = ('acc_2569f28daa2ca36', '5f3d54692a4dcdeda460024d50505ecd')
image_path = \
'http://' + str(current_site) + '/media/' + str(newpic.image.name)
r_url = 'https://api.imagga.com/v1/tagging?url=' + image_path
r = requests.get(r_url, auth=auth)
if r.status_code < 400:
data = r.json()
print data
foods = data['results'][0]['tags']
for food in foods:
if food['tag'] in possibe_food:
return JsonResponse({'food': food['tag']})
return JsonResponse({'food': foods[0]['tag']})
else:
raise Http404('Imagga error occured')
raise Http404('Unknown error occured')
def recipe_handler(request):
if request.method != 'POST':
return Http404('wrong method')
| from django.conf.urls import patterns, url
from django.shortcuts import render
from django.http import Http404
from django.http import JsonResponse
from django.views.decorators.csrf import csrf_exempt
from django.contrib.sites.shortcuts import get_current_site
import requests
from .forms import UploadImageForm
from .models import Picture
# Create your views here.
@csrf_exempt
def image_handler(request):
possibe_food = set([
'apple',
'banana',
'carrot',
'broccoli'
])
if request.method != 'POST':
raise Http404('wrong method')
else:
form = UploadImageForm(request.POST, request.FILES)
current_site = get_current_site(request)
print current_site
if form.is_valid():
newpic = Picture(image=request.FILES['image'])
newpic.save()
auth = ('acc_2569f28daa2ca36', '5f3d54692a4dcdeda460024d50505ecd')
image_path = \
'http://' + str(current_site) + '/media/' + str(newpic.image.name)
r_url = 'https://api.imagga.com/v1/tagging?url=' + image_path
r = requests.get(r_url, auth=auth)
if r.status_code < 400:
data = r.json()
print data
foods = data['results'][0]['tags']
for food in foods:
if food['tag'] in possibe_food:
return JsonResponse({'food': food['tag']})
return JsonResponse({'food': foods[0]['tag']})
else:
raise Http404('Imagga error occured')
raise Http404('Unknown error occured')
def recipe_handler(request):
if request.method != 'POST':
return Http404('wrong method')
| apache-2.0 | Python |
3380d0fed1a8d24eba8627bd65dccc1fb2f772dd | Update version to next release | minio/minio-py,NitishT/minio-py,NitishT/minio-py,minio/minio-py | minio/__init__.py | minio/__init__.py | # -*- coding: utf-8 -*-
# MinIO Python Library for Amazon S3 Compatible Cloud Storage,
# (C) 2015, 2016, 2017 MinIO, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
minio - MinIO Python Library for Amazon S3 Compatible Cloud Storage
~~~~~~~~~~~~~~~~~~~~~
>>> import minio
>>> minio = Minio('https://s3.amazonaws.com')
>>> for bucket in minio.list_buckets():
... print(bucket.name)
:copyright: (c) 2015, 2016, 2017 by MinIO, Inc.
:license: Apache 2.0, see LICENSE for more details.
"""
__title__ = 'minio-py'
__author__ = 'MinIO, Inc.'
__version__ = '5.0.2'
__license__ = 'Apache 2.0'
__copyright__ = 'Copyright 2015, 2016, 2017, 2018, 2019 MinIO, Inc.'
from .api import Minio
from .error import ResponseError
from .post_policy import PostPolicy
from .copy_conditions import CopyConditions
from .definitions import Bucket, Object
| # -*- coding: utf-8 -*-
# MinIO Python Library for Amazon S3 Compatible Cloud Storage,
# (C) 2015, 2016, 2017 MinIO, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
minio - MinIO Python Library for Amazon S3 Compatible Cloud Storage
~~~~~~~~~~~~~~~~~~~~~
>>> import minio
>>> minio = Minio('https://s3.amazonaws.com')
>>> for bucket in minio.list_buckets():
... print(bucket.name)
:copyright: (c) 2015, 2016, 2017 by MinIO, Inc.
:license: Apache 2.0, see LICENSE for more details.
"""
__title__ = 'minio-py'
__author__ = 'MinIO, Inc.'
__version__ = '5.0.1'
__license__ = 'Apache 2.0'
__copyright__ = 'Copyright 2015, 2016, 2017, 2018, 2019 MinIO, Inc.'
from .api import Minio
from .error import ResponseError
from .post_policy import PostPolicy
from .copy_conditions import CopyConditions
from .definitions import Bucket, Object
| apache-2.0 | Python |
4815315b2200cb8061640f6333f2cb96b0707965 | update admin to enable list edit | diegobz/django-admin-sso,flatfox-ag/django-admin-sso,frog32/django-admin-sso,matthiask/django-admin-sso,diegobz/django-admin-sso,frog32/django-admin-sso,flatfox-ag/django-admin-sso,allink/django-admin-sso,matthiask/django-admin-sso,allink/django-admin-sso | django_sso/admin.py | django_sso/admin.py | from django.contrib import admin
from django_sso import settings
from django_sso.models import Assignment
class AssignmentAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'username', 'username_mode', 'domain', 'user', 'weight')
list_editable = ('username', 'username_mode', 'domain', 'user', 'weight')
admin.site.register(Assignment, AssignmentAdmin)
if settings.DJANGO_SSO_ADD_LOGIN_BUTTON:
admin.site.login_template = 'django_sso/login.html'
| from django.contrib import admin
from django_sso import settings
from django_sso.models import Assignment
class AssignmentAdmin(admin.ModelAdmin):
pass
admin.site.register(Assignment, AssignmentAdmin)
if settings.DJANGO_SSO_ADD_LOGIN_BUTTON:
admin.site.login_template = 'django_sso/login.html'
| bsd-3-clause | Python |
5022cefce20ecf20d7689fa6d2c280f8774ee677 | add forgot password link to admin login | DjangoGirls/djangogirls,DjangoGirls/djangogirls,patjouk/djangogirls,patjouk/djangogirls,DjangoGirls/djangogirls,patjouk/djangogirls,patjouk/djangogirls | djangogirls/urls.py | djangogirls/urls.py | from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.views.generic.base import RedirectView
urlpatterns = patterns('',
# Redirect old links:
url(r'^pages/in-your-city/$', RedirectView.as_view(url='/organize/', permanent=True)),
url(r'^admin$', RedirectView.as_view(url='/admin/', permanent=True)),
# Admin link for password reset
# See: https://github.com/darklow/django-suit/blob/92a745d72935622220eca80edfce779419c30094/suit/templates/admin/login.html#L61
url(r'^admin/password_reset/$',
RedirectView.as_view(url='/account/password_reset', permanent=True),
name='admin_password_reset'),
# Regular links:
url(r'^community/', include('jobs.urls', namespace='jobs')),
url(r'^admin/', include(admin.site.urls)),
url(r'^pages/', include('django.contrib.flatpages.urls')),
url(r'^account/', include('django.contrib.auth.urls')),
url(r'', include('core.urls', namespace='core')),
url(r'', include('applications.urls', namespace='applications')),
url(r'^ckeditor/', include('ckeditor.urls')),
)
| from django.conf.urls import patterns, include, url
from django.contrib import admin
from django.views.generic.base import RedirectView
urlpatterns = patterns('',
# Redirect old links:
url(r'^pages/in-your-city/$', RedirectView.as_view(url='/organize/', permanent=True)),
url(r'^admin$', RedirectView.as_view(url='/admin/', permanent=True)),
# Regular links:
url(r'^community/', include('jobs.urls', namespace='jobs')),
url(r'^admin/', include(admin.site.urls)),
url(r'^pages/', include('django.contrib.flatpages.urls')),
url(r'^account/', include('django.contrib.auth.urls')),
url(r'', include('core.urls', namespace='core')),
url(r'', include('applications.urls', namespace='applications')),
url(r'^ckeditor/', include('ckeditor.urls')),
)
| bsd-3-clause | Python |
232d4c2c8876b05cec4a427fcdf141a036c97045 | Rename "Events" into "EventRegistry" and "trigger" into "notify" | pyofwave/PyOfWave,pyofwave/PyOfWave | pyofwave_server/pyofwave/core/operation.py | pyofwave_server/pyofwave/core/operation.py | """
Standard interface for connecting client protocols to the operation extensions.
"""
from delta import DeltaObserverPool as dop
import opdev, delta
# Perform operation
def _getChildren(tag):
rep = [tag.text, ]
for child in tag:
rep.append(child)
rep.append(child.tail)
return rep
def performOperation(event, operation):
""" Execute a operation."""
rep = opdev._receive[operation.tag](event, *_getChildren(operation), **operation.attrib)
EventRegisty.notify(operation)
return rep
# Events
def get(obj, prop, default = {}):
if not obj.get(prop):
obj[prop] = default
return obj[prop]
_handlers = {}
class EventRegisty(object):
"""Keeps track of all the events a user registers to."""
def __init__(self, user, callback):
self.user = user
self._callback = callback
def _handlers(self, url, operation):
# XXX : Why is it a list that is associated to an operation ?
# XXX : Is it possible to assign several callback to an operation ?
return get(get(_handlers, url), operation, [])
def register(self, url, operation):
# XXX: All registered operations will have the save callback
self._handlers(url, operation).append(self._callback)
def unregister(self, url, operation="*"):
url_handlers = get(_handlers, url)
if operation == "*":
for operation in url_handlers.keys():
operation_callback = self._handlers(url, operation)
if self._callback in operation_callback:
operation_callback.remove(self._callback)
else:
self._handlers(url, operation).remove(self._callback)
@staticmethod
def notify(operation, src = None):
if src == None:
src = operation.get("href", operation.get("src", ""))
for handler in _handlers.get(src, {}).get(operation.tag, []):
dop.apply_async(handler, (operation.tag))
@delta.alphaDeltaObservable.addObserver
@staticmethod
def applyDelta(doc, delta):
""" Calculate and send events. """
| """
Standard interface for connecting client protocols to the operation extensions.
"""
from delta import DeltaObserverPool as dop
import opdev, delta
# Perform operation
def _getChildren(tag):
rep = [tag.text, ]
for child in tag:
rep.append(child)
rep.append(child.tail)
return rep
def performOperation(event, operation):
""" Execute a operation."""
rep = opdev._receive[operation.tag](event, *_getChildren(operation), **operation.attrib)
Events.trigger(operation)
return rep
# Events
def get(obj, prop, default = {}):
if not obj.get(prop):
obj[prop] = default
return obj[prop]
_handlers = {}
class Events(object):
"""Keeps track of all the events a user registers to."""
def __init__(self, user, callback):
self.user = user
self._callback = callback
def _handlers(self, url, operation):
# XXX : Why is it a list that is associated to an operation ?
# XXX : Is it possible to assign several callback to an operation ?
return get(get(_handlers, url), operation, [])
def register(self, url, operation):
# XXX: All registered operations will have the save callback
self._handlers(url, operation).append(self._callback)
def unregister(self, url, operation="*"):
url_handlers = get(_handlers, url)
if operation == "*":
for operation in url_handlers.keys():
operation_callback = self._handlers(url, operation)
if self._callback in operation_callback:
operation_callback.remove(self._callback)
else:
self._handlers(url, operation).remove(self._callback)
@staticmethod
def trigger(operation, src = None):
if src == None:
src = operation.get("href", operation.get("src", ""))
for handler in _handlers.get(src, {}).get(operation.tag, []):
dop.apply_async(handler, (operation.tag))
@delta.alphaDeltaObservable.addObserver
@staticmethod
def applyDelta(doc, delta):
""" Calculate and send events. """
| mpl-2.0 | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.