commit
stringlengths 40
40
| subject
stringlengths 4
1.73k
| repos
stringlengths 5
127k
| old_file
stringlengths 2
751
| new_file
stringlengths 2
751
| new_contents
stringlengths 1
8.98k
| old_contents
stringlengths 0
6.59k
| license
stringclasses 13
values | lang
stringclasses 23
values |
---|---|---|---|---|---|---|---|---|
7cd3c0449b05e75ffbe5ba346bab3ff389f63b9d | clean up map_async_bench | hazelcast/hazelcast-python-client,hazelcast/hazelcast-python-client,cangencer/hazelcast-python-client | tests/benchmark/map_async_bench.py | tests/benchmark/map_async_bench.py | import threading
import random
import time
import logging
import sys
from os.path import dirname
sys.path.append(dirname(dirname(dirname(__file__))))
import hazelcast
REQ_COUNT = 50000
ENTRY_COUNT = 10 * 1000
VALUE_SIZE = 10000
GET_PERCENTAGE = 40
PUT_PERCENTAGE = 40
logging.basicConfig(format='%(asctime)s%(msecs)03d [%(name)s] %(levelname)s: %(message)s', datefmt="%H:%M%:%S,")
logging.getLogger().setLevel(logging.INFO)
logger = logging.getLogger("main")
config = hazelcast.ClientConfig()
config.group_config.name = "dev"
config.group_config.password = "dev-pass"
config.network_config.addresses.append("127.0.0.1:5701")
client = hazelcast.HazelcastClient(config)
class Test(object):
def __init__(self):
self.ops = 0
self.event = threading.Event()
def incr(self, _):
self.ops += 1
if self.ops == REQ_COUNT:
self.event.set()
def run(self):
my_map = client.get_map("default")
for _ in xrange(0, REQ_COUNT):
key = int(random.random() * ENTRY_COUNT)
operation = int(random.random() * 100)
if operation < GET_PERCENTAGE:
my_map.get_async(key, self.incr)
elif operation < GET_PERCENTAGE + PUT_PERCENTAGE:
my_map.put_async(key, "x" * VALUE_SIZE, -1, self.incr)
else:
my_map.remove_async(key, self.incr)
t = Test()
start = time.time()
t.run()
t.event.wait()
time_taken = time.time() - start
print("Took %s seconds for %d requests" % (time_taken, REQ_COUNT))
print("ops per second: %s" % (t.ops/time_taken))
| import threading
import random
import time
import logging
import sys
from os.path import dirname
sys.path.append(dirname(dirname(dirname(__file__))))
import hazelcast
REQ_COUNT = 20000
ENTRY_COUNT = 10 * 1000
VALUE_SIZE = 10000
GET_PERCENTAGE = 40
PUT_PERCENTAGE = 40
logging.basicConfig(format='%(asctime)s%(msecs)03d [%(name)s] %(levelname)s: %(message)s', datefmt="%H:%M%:%S,")
logging.getLogger().setLevel(logging.INFO)
logger = logging.getLogger("main")
config = hazelcast.ClientConfig()
config.group_config.name = "dev"
config.group_config.password = "dev-pass"
config.network_config.addresses.append("127.0.0.1:5701")
client = hazelcast.HazelcastClient(config)
class Test(object):
ops = 0
def get_cb(self, _):
self.ops += 1
def put_cb(self, _):
self.ops += 1
def remove_cb(self, _):
self.ops += 1
def run(self):
my_map = client.get_map("default")
for _ in xrange(0, REQ_COUNT):
key = int(random.random() * ENTRY_COUNT)
operation = int(random.random() * 100)
if operation < GET_PERCENTAGE:
my_map.get_async(key, self.get_cb)
elif operation < GET_PERCENTAGE + PUT_PERCENTAGE:
my_map.put_async(key, "x" * VALUE_SIZE, -1, self.put_cb)
else:
my_map.remove_async(key, self.remove_cb)
t = Test()
start = time.time()
t.run()
while t.ops != REQ_COUNT:
time.sleep(0.01)
print("ops per second: %d" % (t.ops/(time.time()-start)))
| apache-2.0 | Python |
29315213a8503de018a76badc71da3737d2b54c7 | Fix spiffsgen example test | espressif/esp-idf,espressif/esp-idf,espressif/esp-idf,espressif/esp-idf | examples/storage/spiffsgen/example_test.py | examples/storage/spiffsgen/example_test.py | from __future__ import print_function
import os
import hashlib
import ttfw_idf
@ttfw_idf.idf_example_test(env_tag='Example_GENERIC')
def test_examples_spiffsgen(env, extra_data):
# Test with default build configurations
dut = env.get_dut('spiffsgen', 'examples/storage/spiffsgen', dut_class=ttfw_idf.ESP32DUT)
dut.start_app()
base_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'spiffs_image')
# Expect hello.txt is read successfully
with open(os.path.join(base_dir, 'hello.txt'), 'r') as hello_txt:
dut.expect('Read from hello.txt: ' + hello_txt.read().rstrip())
# Expect alice.txt MD5 hash is computed accurately
with open(os.path.join(base_dir, 'sub', 'alice.txt'), 'rb') as alice_txt:
alice_md5 = hashlib.md5(alice_txt.read()).hexdigest()
dut.expect('Computed MD5 hash of alice.txt: ' + alice_md5)
if __name__ == '__main__':
test_examples_spiffsgen()
| from __future__ import print_function
import os
import hashlib
import ttfw_idf
@ttfw_idf.idf_example_test(env_tag='Example_GENERIC')
def test_examples_spiffsgen(env, extra_data):
# Test with default build configurations
dut = env.get_dut('spiffsgen', 'examples/storage/spiffsgen', dut_class=ttfw_idf.ESP32DUT)
dut.start_app()
base_dir = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'spiffs_image')
# Expect hello.txt is read successfully
with open(os.path.join(base_dir, 'hello.txt'), 'r') as hello_txt:
dut.expect('Read from hello.txt: ' + hello_txt.read())
# Expect alice.txt MD5 hash is computed accurately
with open(os.path.join(base_dir, 'sub', 'alice.txt'), 'rb') as alice_txt:
alice_md5 = hashlib.md5(alice_txt.read()).hexdigest()
dut.expect('Computed MD5 hash of alice.txt: ' + alice_md5)
if __name__ == '__main__':
test_examples_spiffsgen()
| apache-2.0 | Python |
7c762733311c6a52f0a7605a9495f8234c1d6ff2 | put portLo..Hi as arg | tttor/csipb-jamu-prj,tttor/csipb-jamu-prj,tttor/csipb-jamu-prj,tttor/csipb-jamu-prj,tttor/csipb-jamu-prj,tttor/csipb-jamu-prj,tttor/csipb-jamu-prj,tttor/csipb-jamu-prj,tttor/csipb-jamu-prj | predictor/server/server.py | predictor/server/server.py | #!/usr/bin/python
import sys
from datetime import datetime
from server_thread import ServerThread as Server
def main(argv):
if len(sys.argv)!=4:
print 'USAGE: phyton prediction_server.py [serverId] [portLo] [portHi]'
return
host = '127.0.0.1'
serverId = argv[1]
portLo,portHi = int(argv[2]),int(argv[3])
upAt = datetime.now().strftime("%Y:%m:%d %H:%M:%S")
print >> sys.stderr, '******************************************************'
print >> sys.stderr,"Ijah predictor server :)"
print >> sys.stderr,"[id= "+serverId+"]"
print >> sys.stderr,"[ports= "+str(portLo)+" to "+str(portHi)+"]"
print >> sys.stderr,"[upFrom= "+upAt+"]"
threadList = [Server(i,"serverThread_"+str(serverId)+"_"+str(i),host,port)
for i,port in enumerate(range(portLo, portHi+1))]
for t in threadList:
t.daemon=True
t.start()
while True:
pass
if __name__ == '__main__':
main(sys.argv)
| #!/usr/bin/python
import sys
from datetime import datetime
from server_thread import ServerThread as Server
from config import serverConfig as scfg
def main():
if len(sys.argv)!=2:
print 'USAGE: phyton prediction_server.py [serverId]'
return
serverId = sys.argv[1]
if serverId not in scfg['ports']:
print 'FATAL: serverId unknown'
return
host = scfg['host']
portLo,portHi = scfg['ports'][serverId]
upAt = datetime.now().strftime("%Y:%m:%d %H:%M:%S")
print >> sys.stderr, '******************************************************'
print >> sys.stderr,"Ijah predictor server :)"
print >> sys.stderr,"[id= "+serverId+"]"
print >> sys.stderr,"[ports= "+str(portLo)+" to "+str(portHi)+"]"
print >> sys.stderr,"[upFrom= "+upAt+"]"
threadList = [Server(i,"serverThread_"+str(serverId)+"_"+str(i),host,port)
for i,port in enumerate(range(portLo, portHi+1))]
for t in threadList:
t.daemon=True
t.start()
while True:
pass
if __name__ == '__main__':
main()
| mit | Python |
87ca8475f58b057e8043f8b398bd76123a89a733 | Revert "parsing html" | karlcow/webcompat,karlcow/webcompat | moz/minutes/helpers.py | moz/minutes/helpers.py | #!/usr/bin/env python
# encoding: utf-8
"""
helpers.py
Some modules to help with this project
Created by Karl Dubost on 2016-02-24.
Copyright (c) 2016 La Grange. All rights reserved.
MIT License
"""
import requests
def fetch_content(uri):
'''Fetch the URI and returns the raw content and its encoding'''
content = requests.get(uri)
return content.text, content.encoding
def main():
'''core program'''
pass
if __name__ == "__main__":
sys.exit(main())
| #!/usr/bin/env python
# encoding: utf-8
"""
helpers.py
Some modules to help with this project
Created by Karl Dubost on 2016-02-24.
Copyright (c) 2016 La Grange. All rights reserved.
MIT License
"""
import io
import sys
import lxml.html
import requests
def fetch_content(uri):
'''Fetch the URI and returns the raw content and its encoding'''
content = requests.get(uri)
return content.text, content.encoding
def html_parse(content):
'''returns a parsed HTML content'''
html = ''
try:
html = lxml.html.parse(io.StringIO(content))
except Exception, e:
raise e
return html
def main():
'''core program'''
pass
if __name__ == "__main__":
sys.exit(main())
| mit | Python |
8ecc26cffabb5a4c80b9a5574b102cc5c63312d3 | Update accounts.py | uw-it-aca/myuw,uw-it-aca/myuw,uw-it-aca/myuw,uw-it-aca/myuw | myuw/views/accounts.py | myuw/views/accounts.py | from myuw.views.page import page
from myuw.util.page_view import page_view
@page_view
def accounts(request):
return page(request, {}, template='accounts.html')
| from myuw.views.page import page
from myuw.util.page_view import page_view
@page_view
def accounts(request):
return page(request, {}, template='accounts.html') | apache-2.0 | Python |
9eebf1d43b93a6e1001186693d3a15ce2b5d568e | Add Bank and BankAccount models, add some fields to Supplier model | coyotevz/nobix-app | nbs/models/supplier.py | nbs/models/supplier.py | # -*- coding: utf-8 -*-
from nbs.models import db
from nbs.models.entity import Entity
from nbs.models.misc import FiscalDataMixin
class Supplier(Entity, FiscalDataMixin):
__tablename__ = 'supplier'
__mapper_args__ = {'polymorphic_identity': u'supplier'}
FREIGHT_SUPPLIER = 'FREIGHT_SUPPLIER'
FREIGHT_CUSTOMER = 'FREIGHT_CUSTOMER'
_freight_types = {
FREIGHT_SUPPLIER: 'Flete de proveedor',
FREIGHT_CUSTOMER: 'Flete de cliente',
}
supplier_id = db.Column(db.Integer, db.ForeignKey('entity.id'),
primary_key=True)
name = Entity._name_1
fancy_name = Entity._name_2
payment_term = db.Column(db.Integer) # in days
freight_type = db.Column(db.Enum(*_freight_types.keys(),
name='freight_type'), default=FREIGHT_CUSTOMER)
leap_time = db.Column(db.Integer) # in days
@property
def full_name(self):
fn = u" ({0})".format(self.fancy_name) if self.fancy_name else u""
return u"{0}{1}".format(self.name, fn)
class Bank(db.Model):
__tablename__ = 'bank'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.Unicode)
# TODO: Add bank logo, to quickly identify
class BankAccount(db.Model):
__tablename__ = 'bank_account'
TYPE_CC_PESOS = 'TYPE_CC_PESOS'
TYPE_CC_USD = 'TYPE_CC_USD'
TYPE_CA_PESOS = 'TYPE_CA_PESOS'
TYPE_CA_USD = 'TYPE_CA_USD'
TYPE_UNIQUE = 'TYPE_UNIQUE'
_account_type = {
TYPE_CC_PESOS: 'Cuenta Corriente en Pesos',
TYPE_CC_USD: 'Cuenta Corriente en Dólares',
TYPE_CA_PESOS: 'Caja de Ahorro en Pesos',
TYPE_CA_USD: 'Caja de Ahorro en Dólares',
TYPE_UNIQUE: 'Cuenta Única',
}
id = db.Column(db.Integer, primary_key=True)
bank_branch = db.Column(db.Unicode)
account_type = db.Column(db.Enum(*_account_type.keys(),
name='account_type'), default=TYPE_CC_PESOS)
account_number = db.Column(db.Unicode)
account_cbu = db.Column(db.Unicode)
account_owner = db.Column(db.Unicode)
bank_id = db.Column(db.Integer, db.ForeignKey('bank.id'))
bank = db.relationship(Bank, backref="accounts")
supplier_id = db.Column(db.Integer, db.ForeignKey('supplier.supplier_id'))
supplier = db.relationship(Supplier, backref='bank_accounts')
| # -*- coding: utf-8 -*-
from nbs.models import db
from nbs.models.entity import Entity
from nbs.models.misc import FiscalDataMixin
class Supplier(Entity, FiscalDataMixin):
__tablename__ = 'supplier'
__mapper_args__ = {'polymorphic_identity': u'supplier'}
supplier_id = db.Column(db.Integer, db.ForeignKey('entity.id'),
primary_key=True)
name = Entity._name_1
fancy_name = Entity._name_2
payment_term = db.Column(db.Integer)
@property
def full_name(self):
fn = u" ({0})".format(self.fancy_name) if self.fancy_name else u""
return u"{0}{1}".format(self.name, fn)
| mit | Python |
f57294c59e197c989536638776738b0ed0bcee1d | disable scheduler.tough_pepper_cases | Jonekee/chromium.src,mohamed--abdel-maksoud/chromium.src,hgl888/chromium-crosswalk-efl,mohamed--abdel-maksoud/chromium.src,axinging/chromium-crosswalk,fujunwei/chromium-crosswalk,hgl888/chromium-crosswalk-efl,Just-D/chromium-1,Jonekee/chromium.src,mohamed--abdel-maksoud/chromium.src,Jonekee/chromium.src,hgl888/chromium-crosswalk,fujunwei/chromium-crosswalk,ltilve/chromium,chuan9/chromium-crosswalk,axinging/chromium-crosswalk,chuan9/chromium-crosswalk,markYoungH/chromium.src,dednal/chromium.src,Chilledheart/chromium,Chilledheart/chromium,TheTypoMaster/chromium-crosswalk,ltilve/chromium,Jonekee/chromium.src,Jonekee/chromium.src,mohamed--abdel-maksoud/chromium.src,hgl888/chromium-crosswalk-efl,jaruba/chromium.src,dednal/chromium.src,jaruba/chromium.src,Pluto-tv/chromium-crosswalk,ltilve/chromium,PeterWangIntel/chromium-crosswalk,krieger-od/nwjs_chromium.src,Just-D/chromium-1,dushu1203/chromium.src,Chilledheart/chromium,fujunwei/chromium-crosswalk,jaruba/chromium.src,axinging/chromium-crosswalk,jaruba/chromium.src,M4sse/chromium.src,PeterWangIntel/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,jaruba/chromium.src,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk,ltilve/chromium,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk-efl,Fireblend/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,axinging/chromium-crosswalk,M4sse/chromium.src,jaruba/chromium.src,markYoungH/chromium.src,Just-D/chromium-1,markYoungH/chromium.src,ltilve/chromium,mohamed--abdel-maksoud/chromium.src,markYoungH/chromium.src,hgl888/chromium-crosswalk,chuan9/chromium-crosswalk,dednal/chromium.src,M4sse/chromium.src,mohamed--abdel-maksoud/chromium.src,M4sse/chromium.src,crosswalk-project/chromium-crosswalk-efl,TheTypoMaster/chromium-crosswalk,markYoungH/chromium.src,dednal/chromium.src,hgl888/chromium-crosswalk-efl,chuan9/chromium-crosswalk,Chilledheart/chromium,Just-D/chromium-1,M4sse/chromium.src,Pluto-tv/chromium-crosswalk,fujunwei/chromium-crosswalk,Jonekee/chromium.src,M4sse/chromium.src,TheTypoMaster/chromium-crosswalk,dednal/chromium.src,Fireblend/chromium-crosswalk,ltilve/chromium,fujunwei/chromium-crosswalk,hgl888/chromium-crosswalk-efl,hgl888/chromium-crosswalk,Just-D/chromium-1,jaruba/chromium.src,Jonekee/chromium.src,Pluto-tv/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,Chilledheart/chromium,Fireblend/chromium-crosswalk,Fireblend/chromium-crosswalk,Just-D/chromium-1,hgl888/chromium-crosswalk,axinging/chromium-crosswalk,axinging/chromium-crosswalk,markYoungH/chromium.src,axinging/chromium-crosswalk,chuan9/chromium-crosswalk,Chilledheart/chromium,dushu1203/chromium.src,chuan9/chromium-crosswalk,dednal/chromium.src,jaruba/chromium.src,PeterWangIntel/chromium-crosswalk,Jonekee/chromium.src,Chilledheart/chromium,dednal/chromium.src,PeterWangIntel/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,Just-D/chromium-1,mohamed--abdel-maksoud/chromium.src,hgl888/chromium-crosswalk-efl,dushu1203/chromium.src,Jonekee/chromium.src,Just-D/chromium-1,krieger-od/nwjs_chromium.src,krieger-od/nwjs_chromium.src,ltilve/chromium,fujunwei/chromium-crosswalk,M4sse/chromium.src,markYoungH/chromium.src,crosswalk-project/chromium-crosswalk-efl,Pluto-tv/chromium-crosswalk,chuan9/chromium-crosswalk,jaruba/chromium.src,Fireblend/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,axinging/chromium-crosswalk,Pluto-tv/chromium-crosswalk,fujunwei/chromium-crosswalk,Jonekee/chromium.src,fujunwei/chromium-crosswalk,krieger-od/nwjs_chromium.src,dednal/chromium.src,markYoungH/chromium.src,hgl888/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,hgl888/chromium-crosswalk,dednal/chromium.src,mohamed--abdel-maksoud/chromium.src,Fireblend/chromium-crosswalk,hgl888/chromium-crosswalk-efl,dushu1203/chromium.src,jaruba/chromium.src,krieger-od/nwjs_chromium.src,dushu1203/chromium.src,krieger-od/nwjs_chromium.src,hgl888/chromium-crosswalk,dushu1203/chromium.src,krieger-od/nwjs_chromium.src,markYoungH/chromium.src,dushu1203/chromium.src,crosswalk-project/chromium-crosswalk-efl,axinging/chromium-crosswalk,krieger-od/nwjs_chromium.src,M4sse/chromium.src,chuan9/chromium-crosswalk,Jonekee/chromium.src,hgl888/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,mohamed--abdel-maksoud/chromium.src,markYoungH/chromium.src,axinging/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,Chilledheart/chromium,dednal/chromium.src,PeterWangIntel/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,crosswalk-project/chromium-crosswalk-efl,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk-efl,TheTypoMaster/chromium-crosswalk,Fireblend/chromium-crosswalk,ltilve/chromium,Pluto-tv/chromium-crosswalk,Chilledheart/chromium,ltilve/chromium,crosswalk-project/chromium-crosswalk-efl,Just-D/chromium-1,PeterWangIntel/chromium-crosswalk,axinging/chromium-crosswalk,markYoungH/chromium.src,Pluto-tv/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,dushu1203/chromium.src,dushu1203/chromium.src,Fireblend/chromium-crosswalk,Fireblend/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,M4sse/chromium.src,dednal/chromium.src,krieger-od/nwjs_chromium.src,dushu1203/chromium.src,krieger-od/nwjs_chromium.src,M4sse/chromium.src,dushu1203/chromium.src,jaruba/chromium.src,TheTypoMaster/chromium-crosswalk,fujunwei/chromium-crosswalk,M4sse/chromium.src | tools/perf/benchmarks/scheduler.py | tools/perf/benchmarks/scheduler.py | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry import benchmark
from measurements import smoothness
import page_sets
class SchedulerToughSchedulingCases(benchmark.Benchmark):
"""Measures rendering statistics while interacting with pages that have
challenging scheduling properties.
https://docs.google.com/a/chromium.org/document/d/
17yhE5Po9By0sCdM1yZT3LiUECaUr_94rQt9j-4tOQIM/view"""
test = smoothness.Smoothness
page_set = page_sets.ToughSchedulingCasesPageSet
# Pepper plugin is not supported on android.
@benchmark.Disabled('android', 'win', 'mac') # crbug.com/384733
class SchedulerToughPepperCases(benchmark.Benchmark):
"""Measures rendering statistics while interacting with pages that have
pepper plugins"""
test = smoothness.Smoothness
page_set = page_sets.ToughPepperCasesPageSet
def CustomizeBrowserOptions(self, options):
# This is needed for testing pepper plugin.
options.AppendExtraBrowserArgs('--enable-pepper-testing')
| # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from telemetry import benchmark
from measurements import smoothness
import page_sets
class SchedulerToughSchedulingCases(benchmark.Benchmark):
"""Measures rendering statistics while interacting with pages that have
challenging scheduling properties.
https://docs.google.com/a/chromium.org/document/d/
17yhE5Po9By0sCdM1yZT3LiUECaUr_94rQt9j-4tOQIM/view"""
test = smoothness.Smoothness
page_set = page_sets.ToughSchedulingCasesPageSet
# Pepper plugin is not supported on android.
@benchmark.Disabled('android', 'win') # crbug.com/384733
class SchedulerToughPepperCases(benchmark.Benchmark):
"""Measures rendering statistics while interacting with pages that have
pepper plugins"""
test = smoothness.Smoothness
page_set = page_sets.ToughPepperCasesPageSet
def CustomizeBrowserOptions(self, options):
# This is needed for testing pepper plugin.
options.AppendExtraBrowserArgs('--enable-pepper-testing')
| bsd-3-clause | Python |
4fb1ad11add4436395f775a12f0d4e90b99d6594 | add ignore filtering | naver/hubblemon,naver/hubblemon,naver/hubblemon | psutil_mon/psutil_alarm.py | psutil_mon/psutil_alarm.py |
#
# Hubblemon - Yet another general purpose system monitor
#
# Copyright 2015 NAVER Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import socket, fnmatch, pickle, sys, os
import psutil_mon.settings
hubblemon_path = os.path.join(os.path.dirname(__file__), '..')
sys.path.append(hubblemon_path)
import common.settings
class psutil_alarm:
def __init__(self):
self.name = 'psutil'
self.sec_interval = 5 # 5 sec interval
def system_list_init(self):
pass
def select_conf(self, client, item, map):
key = '%s:%s' % (client, item)
# exact
if key in map:
return map[key]
# wild card match
for k, v in map.items():
# overwrite if match like net-*
if fnmatch.fnmatch(key, k):
if 'IGNORE' in v:
if fnmatch.fnmatch(key, v['IGNORE']): # skip IGNORE case
continue
return v
return {}
def get_conf(self, client, item): # client: machine name, item: items in psutil (ex, cpu, net, disk...)
# select exact conf
abs_conf = self.select_conf(client, item, psutil_mon.settings.alarm_conf_absolute)
lambda_conf = self.select_conf(client, item, psutil_mon.settings.alarm_conf_lambda)
message_head = '%s:%s' % (client, item)
return (message_head, abs_conf, lambda_conf)
|
#
# Hubblemon - Yet another general purpose system monitor
#
# Copyright 2015 NAVER Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import socket, fnmatch, pickle, sys, os
import psutil_mon.settings
hubblemon_path = os.path.join(os.path.dirname(__file__), '..')
sys.path.append(hubblemon_path)
import common.settings
class psutil_alarm:
def __init__(self):
self.name = 'psutil'
self.sec_interval = 5 # 5 sec interval
def system_list_init(self):
pass
def select_conf(self, client, item, map):
key = '%s:%s' % (client, item)
# exact
if key in map:
return map[key]
# wild card match
for k, v in map.items():
# overwrite if match like net-*
if fnmatch.fnmatch(key, k):
return map[k]
return {}
def get_conf(self, client, item): # client: machine name, item: items in psutil (ex, cpu, net, disk...)
# select exact conf
abs_conf = self.select_conf(client, item, psutil_mon.settings.alarm_conf_absolute)
lambda_conf = self.select_conf(client, item, psutil_mon.settings.alarm_conf_lambda)
message_head = '%s:%s' % (client, item)
return (message_head, abs_conf, lambda_conf)
| apache-2.0 | Python |
5cd9499fcc0c1f9b48216aeca11a7adcd8995a47 | Fix for MRV failing to enter enable mode | ktbyers/netmiko,ktbyers/netmiko | netmiko/mrv/mrv_ssh.py | netmiko/mrv/mrv_ssh.py | """MRV Communications Driver (OptiSwitch)."""
from __future__ import unicode_literals
import time
import re
from netmiko.cisco_base_connection import CiscoSSHConnection
class MrvOptiswitchSSH(CiscoSSHConnection):
"""MRV Communications Driver (OptiSwitch)."""
def session_preparation(self):
"""Prepare the session after the connection has been established."""
self._test_channel_read(pattern=r'[>#]')
self.set_base_prompt()
self.enable()
self.disable_paging(command="no cli-paging")
# Clear the read buffer
time.sleep(.3 * self.global_delay_factor)
self.set_base_prompt()
self.clear_buffer()
def enable(self, cmd='enable', pattern=r'#', re_flags=re.IGNORECASE):
"""Enable mode on MRV uses no password."""
output = ""
if not self.check_enable_mode():
self.write_channel(self.normalize_cmd(cmd))
output += self.read_until_prompt_or_pattern(pattern=pattern, re_flags=re_flags)
if not self.check_enable_mode():
msg = "Failed to enter enable mode. Please ensure you pass " \
"the 'secret' argument to ConnectHandler."
raise ValueError(msg)
return output
def save_config(self, cmd='save config flash', confirm=False):
"""Saves configuration."""
return super(MrvOptiswitchSSH, self).save_config(cmd=cmd, confirm=confirm)
| """MRV Communications Driver (OptiSwitch)."""
from __future__ import unicode_literals
import time
import re
from netmiko.cisco_base_connection import CiscoSSHConnection
class MrvOptiswitchSSH(CiscoSSHConnection):
"""MRV Communications Driver (OptiSwitch)."""
def session_preparation(self):
"""Prepare the session after the connection has been established."""
self._test_channel_read(pattern=r'[>#]')
self.enable()
self.set_base_prompt()
self.disable_paging(command="no cli-paging")
# Clear the read buffer
time.sleep(.3 * self.global_delay_factor)
self.clear_buffer()
def enable(self, cmd='enable', pattern=r'#', re_flags=re.IGNORECASE):
"""Enable mode on MRV uses no password."""
output = ""
if not self.check_enable_mode():
self.write_channel(self.normalize_cmd(cmd))
output += self.read_until_prompt_or_pattern(pattern=pattern, re_flags=re_flags)
if not self.check_enable_mode():
msg = "Failed to enter enable mode. Please ensure you pass " \
"the 'secret' argument to ConnectHandler."
raise ValueError(msg)
return output
def save_config(self, cmd='save config flash', confirm=False):
"""Saves configuration."""
return super(MrvOptiswitchSSH, self).save_config(cmd=cmd, confirm=confirm)
| mit | Python |
d24a8db471cc9a415e3e2081e702199990bd6ac4 | Add option to configure plot's linewidth | kinverarity1/pyexperiment,duerrp/pyexperiment,DeercoderResearch/pyexperiment,kinverarity1/pyexperiment,DeercoderResearch/pyexperiment,shaunstanislaus/pyexperiment,duerrp/pyexperiment,shaunstanislaus/pyexperiment,DeercoderResearch/pyexperiment,duerrp/pyexperiment,shaunstanislaus/pyexperiment,shaunstanislaus/pyexperiment,kinverarity1/pyexperiment,kinverarity1/pyexperiment,DeercoderResearch/pyexperiment | pyexperiment/utils/plot.py | pyexperiment/utils/plot.py | """Provides setup for matplotlib figures
Written by Peter Duerr.
"""
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
import matplotlib
from matplotlib import pyplot as plt
def setup_matplotlib(font_size=14,
label_size=14,
use_tex=True,
linewidth=2):
"""Setup basic style for matplotlib figures
"""
font_size = int(font_size)
font = {'family': 'normal',
'weight': 'normal',
'size': font_size}
# ** is elegant here
matplotlib.rc('font', **font) # pylint:disable=W0142
matplotlib.rc('text', usetex=use_tex)
matplotlib.rc('lines', linewidth=linewidth)
label_size = int(label_size)
matplotlib.rc('xtick', labelsize=label_size)
matplotlib.rc('ytick', labelsize=label_size)
def quit_figure_on_key(key, figure=None):
"""Add handler to figure (defaults to current figure) that closes it
on a key press event.
"""
def quit_on_keypress(event):
"""Quit the figure on key press
"""
if event.key == key:
plt.close(event.canvas.figure)
if figure is None:
figure = plt.gcf()
figure.canvas.mpl_connect('key_press_event', quit_on_keypress)
def setup_figure(name):
"""Setup a figure that can be closed by pressing 'q' and saved by
pressing 's'.
"""
fig = plt.figure()
fig.canvas.set_window_title(name)
quit_figure_on_key('q', fig)
| """Provides setup for matplotlib figures
Written by Peter Duerr.
"""
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
import matplotlib
from matplotlib import pyplot as plt
def setup_matplotlib(font_size=14,
label_size=14,
use_tex=True):
"""Setup basic style for matplotlib figures
"""
font_size = int(font_size)
font = {'family': 'normal',
'weight': 'normal',
'size': font_size}
# ** is elegant here
matplotlib.rc('font', **font) # pylint:disable=W0142
matplotlib.rc('text', usetex=use_tex)
label_size = int(label_size)
matplotlib.rc('xtick', labelsize=label_size)
matplotlib.rc('ytick', labelsize=label_size)
def quit_figure_on_key(key, figure=None):
"""Add handler to figure (defaults to current figure) that closes it
on a key press event.
"""
def quit_on_keypress(event):
"""Quit the figure on key press
"""
if event.key == key:
plt.close(event.canvas.figure)
if figure is None:
figure = plt.gcf()
figure.canvas.mpl_connect('key_press_event', quit_on_keypress)
def setup_figure(name):
"""Setup a figure that can be closed by pressing 'q' and saved by
pressing 's'.
"""
fig = plt.figure()
fig.canvas.set_window_title(name)
quit_figure_on_key('q', fig)
| mit | Python |
9c40fa22c395b3d1dba800f0826606ecf314ddb2 | test update | miketheman/opencomparison,nanuxbe/djangopackages,benracine/opencomparison,QLGu/djangopackages,benracine/opencomparison,pydanny/djangopackages,pydanny/djangopackages,audreyr/opencomparison,pydanny/djangopackages,nanuxbe/djangopackages,QLGu/djangopackages,QLGu/djangopackages,miketheman/opencomparison,audreyr/opencomparison,nanuxbe/djangopackages | apps/pypi/tests/test_slurper.py | apps/pypi/tests/test_slurper.py | from django.template.defaultfilters import slugify
from django.test import TestCase
from package.models import Package, Version
from pypi.slurper import Slurper
TEST_PACKAGE_NAME = 'Django'
TEST_PACKAGE_VERSION = '1.3'
TEST_PACKAGE_REPO_NAME = 'django-uni-form'
class SlurpAllTests(TestCase):
def test_get_latest_version_number(self):
slurper = Slurper(TEST_PACKAGE_NAME)
version = slurper.get_latest_version_number(TEST_PACKAGE_NAME)
self.assertEquals(version, TEST_PACKAGE_VERSION)
def test_get_or_create_package(self):
slurper = Slurper(TEST_PACKAGE_NAME)
version = slurper.get_latest_version_number(TEST_PACKAGE_NAME)
package, created = slurper.get_or_create_package(TEST_PACKAGE_NAME, version)
self.assertTrue(created)
self.assertTrue(isinstance(package, Package))
self.assertEquals(package.title, TEST_PACKAGE_NAME)
self.assertEquals(package.slug, slugify(TEST_PACKAGE_NAME))
def test_get_or_create_with_repo(self):
slurper = Slurper(TEST_PACKAGE_REPO_NAME)
version = slurper.get_latest_version_number(TEST_PACKAGE_REPO_NAME)
package, created = slurper.get_or_create_package(TEST_PACKAGE_REPO_NAME, version)
self.assertTrue(created)
self.assertTrue(isinstance(package, Package))
self.assertEquals(package.title, TEST_PACKAGE_REPO_NAME)
self.assertEquals(package.slug, slugify(TEST_PACKAGE_REPO_NAME))
def test_check_versions(self):
slurper = Slurper(TEST_PACKAGE_REPO_NAME)
version = slurper.get_latest_version_number(TEST_PACKAGE_REPO_NAME)
# make me a package (Actually, make me a billionare)
slurper.get_or_create_package(TEST_PACKAGE_REPO_NAME, version)
# fetch the package for testing
package = Package.objects.get(title=TEST_PACKAGE_REPO_NAME)
self.assertTrue(package.pypi_downloads > 1000) | from django.template.defaultfilters import slugify
from django.test import TestCase
from package.models import Package, Version
from pypi.slurper import Slurper
TEST_PACKAGE_NAME = 'Django'
TEST_PACKAGE_VERSION = '1.2.5'
TEST_PACKAGE_REPO_NAME = 'django-uni-form'
class SlurpAllTests(TestCase):
def test_get_latest_version_number(self):
slurper = Slurper(TEST_PACKAGE_NAME)
version = slurper.get_latest_version_number(TEST_PACKAGE_NAME)
self.assertEquals(version, TEST_PACKAGE_VERSION)
def test_get_or_create_package(self):
slurper = Slurper(TEST_PACKAGE_NAME)
version = slurper.get_latest_version_number(TEST_PACKAGE_NAME)
package, created = slurper.get_or_create_package(TEST_PACKAGE_NAME, version)
self.assertTrue(created)
self.assertTrue(isinstance(package, Package))
self.assertEquals(package.title, TEST_PACKAGE_NAME)
self.assertEquals(package.slug, slugify(TEST_PACKAGE_NAME))
def test_get_or_create_with_repo(self):
slurper = Slurper(TEST_PACKAGE_REPO_NAME)
version = slurper.get_latest_version_number(TEST_PACKAGE_REPO_NAME)
package, created = slurper.get_or_create_package(TEST_PACKAGE_REPO_NAME, version)
self.assertTrue(created)
self.assertTrue(isinstance(package, Package))
self.assertEquals(package.title, TEST_PACKAGE_REPO_NAME)
self.assertEquals(package.slug, slugify(TEST_PACKAGE_REPO_NAME))
def test_check_versions(self):
slurper = Slurper(TEST_PACKAGE_REPO_NAME)
version = slurper.get_latest_version_number(TEST_PACKAGE_REPO_NAME)
# make me a package (Actually, make me a billionare)
slurper.get_or_create_package(TEST_PACKAGE_REPO_NAME, version)
# fetch the package for testing
package = Package.objects.get(title=TEST_PACKAGE_REPO_NAME)
self.assertTrue(package.pypi_downloads > 1000) | mit | Python |
a961e11c5b3666f2504cf2a0d46028b5957cb9bf | Fix doctest | mabuchilab/QNET | qnet/misc/testing_tools.py | qnet/misc/testing_tools.py | """Collection of routines needed for testing. This includes proto-fixtures,
i.e. routines that should be imported and then turned into a fixture with the
pytest.fixture decorator.
See <https://pytest.org/latest/fixture.html>
"""
import os
from glob import glob
from collections import OrderedDict
from distutils import dir_util
from qnet.misc.trajectory_data import TrajectoryData
def datadir(tmpdir, request):
'''Proto-fixture responsible for searching a folder with the same name of
test module and, if available, moving all contents to a temporary directory
so tests can use them freely.
In any test, import the datadir routine and turn it into a fixture:
>>> import pytest
>>> import qnet.misc.testing_tools
>>> datadir = pytest.fixture(qnet.misc.testing_tools.datadir)
'''
# http://stackoverflow.com/questions/29627341/pytest-where-to-store-expected-data
filename = request.module.__file__
test_dir, _ = os.path.splitext(filename)
if os.path.isdir(test_dir):
dir_util.copy_tree(test_dir, str(tmpdir))
return str(tmpdir)
def qsd_traj(datadir, folder, seed):
"""Return a proto-fixture that returns a TrajectoryData instance based on
all the *.out file in the given folder (relative to the test datadir), and
with the given seed.
The returned function should be turned into a fixture:
>>> import pytest
>>> import qnet.misc.testing_tools
>>> from qnet.misc.testing_tools import qsd_traj
>>> datadir = pytest.fixture(qnet.misc.testing_tools.datadir)
>>> traj1 = pytest.fixture(qsd_traj(datadir, 'traj1', 102121))
"""
def proto_fixture(datadir):
operators = OrderedDict()
datafiles = sorted(glob(os.path.join(datadir, folder, '*.out')))
assert len(datafiles) >0, "No files *.out in %s"%folder
for file in datafiles:
op_name = os.path.splitext(os.path.split(file)[1])[0]
operators[op_name] = file
return TrajectoryData.from_qsd_data(operators, seed=seed)
import pytest # local import, so that qnet can be installed w/o pytest
return proto_fixture
| """Collection of routines needed for testing. This includes proto-fixtures,
i.e. routines that should be imported and then turned into a fixture with the
pytest.fixture decorator.
See <https://pytest.org/latest/fixture.html>
"""
import os
from glob import glob
from collections import OrderedDict
from distutils import dir_util
from qnet.misc.trajectory_data import TrajectoryData
def datadir(tmpdir, request):
'''Proto-fixture responsible for searching a folder with the same name of
test module and, if available, moving all contents to a temporary directory
so tests can use them freely.
In any test, import the datadir routine and turn it into a fixture:
>>> import pytest
>>> import qnet.misc.testing
>>> datadir = pytest.fixture(qnet.misc.testing.datadir)
'''
# http://stackoverflow.com/questions/29627341/pytest-where-to-store-expected-data
filename = request.module.__file__
test_dir, _ = os.path.splitext(filename)
if os.path.isdir(test_dir):
dir_util.copy_tree(test_dir, str(tmpdir))
return str(tmpdir)
def qsd_traj(datadir, folder, seed):
"""Return a proto-fixture that returns a TrajectoryData instance based on
all the *.out file in the given folder (relative to the test datadir), and
with the given seed.
The returned function should be turned into a fixture:
>>> import pytest
>>> import qnet.misc.testing
>>> datadir = pytest.fixture(qnet.misc.testing.datadir)
>>> traj1 = pytest.fixture(qsd_traj(datadir, 'traj1', 102121))
"""
def proto_fixture(datadir):
operators = OrderedDict()
datafiles = sorted(glob(os.path.join(datadir, folder, '*.out')))
assert len(datafiles) >0, "No files *.out in %s"%folder
for file in datafiles:
op_name = os.path.splitext(os.path.split(file)[1])[0]
operators[op_name] = file
return TrajectoryData.from_qsd_data(operators, seed=seed)
import pytest # local import, so that qnet can be installed w/o pytest
return proto_fixture
| mit | Python |
a6b49b92bd942655c0fe9a1c745e53ea19e070b5 | create a new django custom tag to replace a substring in a global string | muslih/alfanous,muslih/alfanous,muslih/alfanous,muslih/alfanous,muslih/alfanous,muslih/alfanous,muslih/alfanous | src/alfanous-django/wui/templatetags/custom_filters.py | src/alfanous-django/wui/templatetags/custom_filters.py | '''
Created on Dec 29, 2012
@author: assem
'''
from django.template import Library
register = Library()
@register.filter
def get_range( value ):
""" make a range from a number starting of 1 """
return range( 1, value + 1 )
@register.filter
def space_split( str ):
""" split a string counting on spaces """
return str.split()
@register.simple_tag
def string_replace( string, oldword, newword ):
""" replace all occurrences of oldword in string by newword """
return string.replace( oldword, newword )
@register.simple_tag
def build_search_link( params, query, page, filter ):
""" build a search link based on a new query
usage: {% build_search_link params query filter %}link</a>
"""
# create a mutuable params object
new_params = {}
for k,v in params.items():
new_params[k]=v
# update params
new_params["page"] = page
new_params["sortedby"] = "mushaf"
if filter == "True" and params["query"] != query:
new_params["query"] = "(" + params["query"] + ") + " + query;
else:
new_params["query"] = query;
return build_params( new_params )
def build_params(params):
""" Concatenate the params to build a url GET request """
get_request = ""
for k, v in params.items():
get_request = get_request + unicode( k ) + "=" + unicode( v ) + "&"
return get_request[:-1]
| '''
Created on Dec 29, 2012
@author: assem
'''
from django.template import Library
register = Library()
@register.filter
def get_range( value ):
""" make a range from a number starting of 1 """
return range( 1, value + 1 )
@register.filter
def space_split( str ):
""" split a string counting on spaces """
return str.split()
@register.simple_tag
def build_search_link( params, query, page, filter ):
""" build a search link based on a new query
usage: {% build_search_link params query filter %}link</a>
"""
# create a mutuable params object
new_params = {}
for k,v in params.items():
new_params[k]=v
# update params
new_params["page"] = page
new_params["sortedby"] = "mushaf"
if filter == "True" and params["query"] != query:
new_params["query"] = "(" + params["query"] + ") + " + query;
else:
new_params["query"] = query;
return build_params( new_params )
def build_params(params):
""" Concatenate the params to build a url GET request """
get_request = ""
for k, v in params.items():
get_request = get_request + unicode( k ) + "=" + unicode( v ) + "&"
return get_request[:-1]
| agpl-3.0 | Python |
728cfe8e3c40ecd4e0128030d1d66864816626c8 | use single pipe to avoid problems with Jenkins reading them concurrently (#552) | ros-infrastructure/ros_buildfarm,ros-infrastructure/ros_buildfarm,ros-infrastructure/ros_buildfarm | ros_buildfarm/catkin_workspace.py | ros_buildfarm/catkin_workspace.py | # Copyright 2014-2016 Open Source Robotics Foundation, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import shutil
import subprocess
def ensure_workspace_exists(workspace_root):
# ensure that workspace exists
assert os.path.exists(workspace_root), \
"Workspace root '%s' does not exist" % workspace_root
source_space = os.path.join(workspace_root, 'src')
assert os.path.exists(source_space), \
"Source space '%s' does not exist" % source_space
def clean_workspace(workspace_root):
# clean up build, devel and install spaces
build_space = os.path.join(workspace_root, 'build_isolated')
if os.path.exists(build_space):
shutil.rmtree(build_space)
devel_space = os.path.join(workspace_root, 'devel_isolated')
if os.path.exists(devel_space):
shutil.rmtree(devel_space)
install_space = os.path.join(workspace_root, 'install_isolated')
if os.path.exists(install_space):
shutil.rmtree(install_space)
test_results_dir = os.path.join(workspace_root, 'test_results')
if os.path.exists(test_results_dir):
shutil.rmtree(test_results_dir)
def call_catkin_make_isolated(
rosdistro_name, workspace_root, args, parent_result_spaces=None):
# command to run
script_name = 'catkin_make_isolated'
# use script from source space if available
source_space = os.path.join(workspace_root, 'src')
script_from_source = os.path.join(
source_space, 'catkin', 'bin', script_name)
if os.path.exists(script_from_source):
script_name = script_from_source
cmd = ' '.join(
['PYTHONIOENCODING=utf_8', 'PYTHONUNBUFFERED=1', script_name] + args)
# prepend setup files if available
if parent_result_spaces is None:
parent_result_spaces = ['/opt/ros/%s' % rosdistro_name]
for parent_result_space in reversed(parent_result_spaces):
setup_file = os.path.join(parent_result_space, 'setup.sh')
if os.path.exists(setup_file):
cmd = '. %s && %s' % (setup_file, cmd)
print("Invoking '%s' in '%s'" % (cmd, workspace_root))
return subprocess.call(
cmd, cwd=workspace_root, shell=True, stderr=subprocess.STDOUT)
| # Copyright 2014-2016 Open Source Robotics Foundation, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import shutil
import subprocess
def ensure_workspace_exists(workspace_root):
# ensure that workspace exists
assert os.path.exists(workspace_root), \
"Workspace root '%s' does not exist" % workspace_root
source_space = os.path.join(workspace_root, 'src')
assert os.path.exists(source_space), \
"Source space '%s' does not exist" % source_space
def clean_workspace(workspace_root):
# clean up build, devel and install spaces
build_space = os.path.join(workspace_root, 'build_isolated')
if os.path.exists(build_space):
shutil.rmtree(build_space)
devel_space = os.path.join(workspace_root, 'devel_isolated')
if os.path.exists(devel_space):
shutil.rmtree(devel_space)
install_space = os.path.join(workspace_root, 'install_isolated')
if os.path.exists(install_space):
shutil.rmtree(install_space)
test_results_dir = os.path.join(workspace_root, 'test_results')
if os.path.exists(test_results_dir):
shutil.rmtree(test_results_dir)
def call_catkin_make_isolated(
rosdistro_name, workspace_root, args, parent_result_spaces=None):
# command to run
script_name = 'catkin_make_isolated'
# use script from source space if available
source_space = os.path.join(workspace_root, 'src')
script_from_source = os.path.join(
source_space, 'catkin', 'bin', script_name)
if os.path.exists(script_from_source):
script_name = script_from_source
cmd = ' '.join(
['PYTHONIOENCODING=utf_8', 'PYTHONUNBUFFERED=1', script_name] + args)
# prepend setup files if available
if parent_result_spaces is None:
parent_result_spaces = ['/opt/ros/%s' % rosdistro_name]
for parent_result_space in reversed(parent_result_spaces):
setup_file = os.path.join(parent_result_space, 'setup.sh')
if os.path.exists(setup_file):
cmd = '. %s && %s' % (setup_file, cmd)
print("Invoking '%s' in '%s'" % (cmd, workspace_root))
return subprocess.call(cmd, cwd=workspace_root, shell=True)
| apache-2.0 | Python |
8369d189e822fa7496864cac4ddc906bf7c05fe3 | Convert gaphor/UML/classes/tests/test_interface.py to pytest | amolenaar/gaphor,amolenaar/gaphor | gaphor/UML/classes/tests/test_interface.py | gaphor/UML/classes/tests/test_interface.py | """Test classes."""
from gaphor import UML
from gaphor.UML.classes.interface import Folded, InterfaceItem
class TestInterface:
def test_interface_creation(self, case):
"""Test interface creation."""
iface = case.create(InterfaceItem, UML.Interface)
assert isinstance(iface.subject, UML.Interface)
def test_folded_interface_persistence(self, case):
"""Test folded interface saving/loading."""
iface = case.create(InterfaceItem, UML.Interface)
# note: assembly folded mode..
iface.folded = Folded.REQUIRED
data = case.save()
case.load(data)
interfaces = list(case.diagram.select(InterfaceItem))
assert len(interfaces) == 1
# ... gives provided folded mode on load;
# correct folded mode is determined by connections, which will be
# recreated later, i.e. required folded mode will be set when
# implementation connects to the interface and Folded.PROVIDED
# is equal to interfaces[0].folded
| """Test classes."""
from gaphor import UML
from gaphor.tests import TestCase
from gaphor.UML.classes.interface import Folded, InterfaceItem
class InterfaceTestCase(TestCase):
def test_interface_creation(self):
"""Test interface creation."""
iface = self.create(InterfaceItem, UML.Interface)
assert isinstance(iface.subject, UML.Interface)
def test_folded_interface_persistence(self):
"""Test folded interface saving/loading."""
iface = self.create(InterfaceItem, UML.Interface)
# note: assembly folded mode..
iface.folded = Folded.REQUIRED
data = self.save()
self.load(data)
interfaces = list(self.diagram.select(InterfaceItem))
assert len(interfaces) == 1
# ... gives provided folded mode on load;
# correct folded mode is determined by connections, which will be
# recreated later, i.e. required folded mode will be set when
# implementation connects to the interface and Folded.PROVIDED
# is equal to interfaces[0].folded
| lgpl-2.1 | Python |
a842439edb47524b64345d3a893199f3b92f2b14 | Fix top-level domain extraction from site name. | OSSystems/lava-server,OSSystems/lava-server,OSSystems/lava-server,Linaro/lava-server,Linaro/lava-server,Linaro/lava-server,Linaro/lava-server | google_analytics/templatetags/analytics.py | google_analytics/templatetags/analytics.py | from django import template
from django.db import models
from django.contrib.sites.models import Site
from django.template import Context, loader
register = template.Library()
Analytics = models.get_model('google_analytics', 'analytic')
def do_get_analytics(parser, token):
try:
# split_contents() knows not to split quoted strings.
tag_name, code = token.split_contents()
except ValueError:
code = None
if not code:
current_site = Site.objects.get_current()
else:
if not (code[0] == code[-1] and code[0] in ('"', "'")):
raise template.TemplateSyntaxError, "%r tag's argument should be in quotes" % tag_name
code = code[1:-1]
current_site = None
return AnalyticsNode(current_site, code)
class AnalyticsNode(template.Node):
def __init__(self, site=None, code=None):
self.site = site
self.code = code
def render(self, context):
content = ''
if self.site:
code_set = Analytics.objects.filter(site=self.site)
if code_set:
code = code_set[0].analytics_code
else:
return ''
elif self.code:
code = self.code
else:
return ''
if code.strip() != '':
t = loader.get_template('google_analytics/analytics_template.html')
c = Context({
'analytics_code': code,
'domain': '.'.join(self.site.domain.split('.')[-2:])
})
return t.render(c)
else:
return ''
register.tag('analytics', do_get_analytics)
| from django import template
from django.db import models
from django.contrib.sites.models import Site
from django.template import Context, loader
register = template.Library()
Analytics = models.get_model('google_analytics', 'analytic')
def do_get_analytics(parser, token):
try:
# split_contents() knows not to split quoted strings.
tag_name, code = token.split_contents()
except ValueError:
code = None
if not code:
current_site = Site.objects.get_current()
else:
if not (code[0] == code[-1] and code[0] in ('"', "'")):
raise template.TemplateSyntaxError, "%r tag's argument should be in quotes" % tag_name
code = code[1:-1]
current_site = None
return AnalyticsNode(current_site, code)
class AnalyticsNode(template.Node):
def __init__(self, site=None, code=None):
self.site = site
self.code = code
def render(self, context):
content = ''
if self.site:
code_set = Analytics.objects.filter(site=self.site)
if code_set:
code = code_set[0].analytics_code
else:
return ''
elif self.code:
code = self.code
else:
return ''
if code.strip() != '':
t = loader.get_template('google_analytics/analytics_template.html')
c = Context({
'analytics_code': code,
'domain': '.'.join(self.site.domain.split('.')[2:])
})
return t.render(c)
else:
return ''
register.tag('analytics', do_get_analytics)
| agpl-3.0 | Python |
0d2f35ddc27cf4c7155a4d1648c0bbfe0ff3a528 | Fix the bool name in the array API namespace | cupy/cupy,cupy/cupy,cupy/cupy,cupy/cupy | numpy/_array_api/dtypes.py | numpy/_array_api/dtypes.py | from .. import int8, int16, int32, int64, uint8, uint16, uint32, uint64, float32, float64
# Note: This name is changed
from .. import bool_ as bool
__all__ = ['int8', 'int16', 'int32', 'int64', 'uint8', 'uint16', 'uint32', 'uint64', 'float32', 'float64', 'bool']
| from .. import int8, int16, int32, int64, uint8, uint16, uint32, uint64, float32, float64, bool
__all__ = ['int8', 'int16', 'int32', 'int64', 'uint8', 'uint16', 'uint32', 'uint64', 'float32', 'float64', 'bool']
| mit | Python |
ff268941bfc588e21a2f460c034e3c0a99837d23 | Fix migration order (post-rebase) | OmeGak/indico,ThiefMaster/indico,DirkHoffmann/indico,indico/indico,mic4ael/indico,DirkHoffmann/indico,pferreir/indico,mvidalgarcia/indico,mvidalgarcia/indico,mic4ael/indico,DirkHoffmann/indico,OmeGak/indico,ThiefMaster/indico,pferreir/indico,OmeGak/indico,indico/indico,ThiefMaster/indico,indico/indico,pferreir/indico,OmeGak/indico,mvidalgarcia/indico,mvidalgarcia/indico,pferreir/indico,indico/indico,DirkHoffmann/indico,mic4ael/indico,mic4ael/indico,ThiefMaster/indico | migrations/versions/201502111317_233928da84b2_create_video_conference_rooms.py | migrations/versions/201502111317_233928da84b2_create_video_conference_rooms.py | """Create video conference rooms
Revision ID: 233928da84b2
Revises: 50c2b5ee2726
Create Date: 2015-02-11 13:17:44.365589
"""
import sqlalchemy as sa
from alembic import op
from indico.core.db.sqlalchemy import PyIntEnum
from indico.core.db.sqlalchemy import UTCDateTime
from indico.modules.vc.models.vc_rooms import VCRoomLinkType, VCRoomStatus
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '233928da84b2'
down_revision = '5583f647dff5'
def upgrade():
op.create_table('vc_rooms',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('type', sa.String(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.Column('status', PyIntEnum(VCRoomStatus), nullable=False),
sa.Column('created_by_id', sa.Integer(), nullable=False, index=True),
sa.Column('created_dt', UTCDateTime, nullable=False),
sa.Column('modified_dt', UTCDateTime, nullable=True),
sa.Column('data', postgresql.JSON(), nullable=False),
sa.PrimaryKeyConstraint('id'),
schema='events')
op.create_table('vc_room_events',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('event_id', sa.Integer(), autoincrement=False, nullable=False, index=True),
sa.Column('vc_room_id', sa.Integer(), nullable=False, index=True),
sa.Column('link_type', PyIntEnum(VCRoomLinkType), nullable=False),
sa.Column('link_id', sa.String(), nullable=True),
sa.Column('show', sa.Boolean(), nullable=False),
sa.Column('data', postgresql.JSON(), nullable=False),
sa.ForeignKeyConstraint(['vc_room_id'], ['events.vc_rooms.id']),
sa.PrimaryKeyConstraint('id'),
schema='events')
def downgrade():
op.drop_table('vc_room_events', schema='events')
op.drop_table('vc_rooms', schema='events')
| """Create video conference rooms
Revision ID: 233928da84b2
Revises: 50c2b5ee2726
Create Date: 2015-02-11 13:17:44.365589
"""
import sqlalchemy as sa
from alembic import op
from indico.core.db.sqlalchemy import PyIntEnum
from indico.core.db.sqlalchemy import UTCDateTime
from indico.modules.vc.models.vc_rooms import VCRoomLinkType, VCRoomStatus
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = '233928da84b2'
down_revision = '50c2b5ee2726'
def upgrade():
op.create_table('vc_rooms',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('type', sa.String(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.Column('status', PyIntEnum(VCRoomStatus), nullable=False),
sa.Column('created_by_id', sa.Integer(), nullable=False, index=True),
sa.Column('created_dt', UTCDateTime, nullable=False),
sa.Column('modified_dt', UTCDateTime, nullable=True),
sa.Column('data', postgresql.JSON(), nullable=False),
sa.PrimaryKeyConstraint('id'),
schema='events')
op.create_table('vc_room_events',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('event_id', sa.Integer(), autoincrement=False, nullable=False, index=True),
sa.Column('vc_room_id', sa.Integer(), nullable=False, index=True),
sa.Column('link_type', PyIntEnum(VCRoomLinkType), nullable=False),
sa.Column('link_id', sa.String(), nullable=True),
sa.Column('show', sa.Boolean(), nullable=False),
sa.Column('data', postgresql.JSON(), nullable=False),
sa.ForeignKeyConstraint(['vc_room_id'], ['events.vc_rooms.id']),
sa.PrimaryKeyConstraint('id'),
schema='events')
def downgrade():
op.drop_table('vc_room_events', schema='events')
op.drop_table('vc_rooms', schema='events')
| mit | Python |
8d40cd3dab606d558806fa00b0ed5df73c457045 | Fix for issue #2. | marcioreyes/bgui,Moguri/bgui,Remwrath/bgui,Moguri/bgui | bgui/frame.py | bgui/frame.py | from .gl_utils import *
from .widget import Widget, BGUI_DEFAULT
class Frame(Widget):
"""Frame for storing other widgets"""
theme_section = 'Frame'
theme_options = {
'Color1': (0, 0, 0, 0),
'Color2': (0, 0, 0, 0),
'Color3': (0, 0, 0, 0),
'Color4': (0, 0, 0, 0),
'BorderSize': 0,
'BorderColor': (0, 0, 0, 1),
}
def __init__(self, parent, name=None, border=None, aspect=None, size=[1, 1], pos=[0, 0],
sub_theme='', options=BGUI_DEFAULT):
"""
:param parent: the widget's parent
:param name: the name of the widget
:param border: the size of the border around the frame (0 for no border)
:param aspect: constrain the widget size to a specified aspect ratio
:param size: a tuple containing the width and height
:param pos: a tuple containing the x and y position
:param sub_theme: name of a sub_theme defined in the theme file (similar to CSS classes)
:param options: various other options
"""
Widget.__init__(self, parent, name, aspect, size, pos, sub_theme, options)
#: The colors for the four corners of the frame.
self.colors = [
self.theme['Color1'],
self.theme['Color2'],
self.theme['Color3'],
self.theme['Color4']
]
#: The color of the border around the frame.
self.border_color = self.theme['BorderColor']
#: The size of the border around the frame.
if border is not None:
self.border = border
else:
self.border = self.theme['BorderSize']
def _draw(self):
"""Draw the frame"""
# Enable alpha blending
glEnable(GL_BLEND)
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
# Enable polygon offset
glEnable(GL_POLYGON_OFFSET_FILL)
glPolygonOffset(1.0, 1.0)
glBegin(GL_QUADS)
for i in range(4):
glColor4f(self.colors[i][0], self.colors[i][1], self.colors[i][2], self.colors[i][3])
glVertex2f(self.gl_position[i][0], self.gl_position[i][1])
glEnd()
glDisable(GL_POLYGON_OFFSET_FILL)
# Draw an outline
if self.border > 0:
# border = self.border/2
r, g, b, a = self.border_color
glColor4f(r, g, b, a)
glPolygonMode(GL_FRONT, GL_LINE)
glLineWidth(self.border)
glBegin(GL_QUADS)
for i in range(4):
glVertex2f(self.gl_position[i][0], self.gl_position[i][1])
glEnd()
glLineWidth(1.0)
glPolygonMode(GL_FRONT, GL_FILL)
Widget._draw(self)
| from .gl_utils import *
from .widget import Widget, BGUI_DEFAULT
class Frame(Widget):
"""Frame for storing other widgets"""
theme_section = 'Frame'
theme_options = {
'Color1': (0, 0, 0, 0),
'Color2': (0, 0, 0, 0),
'Color3': (0, 0, 0, 0),
'Color4': (0, 0, 0, 0),
'BorderSize': 0,
'BorderColor': (0, 0, 0, 1),
}
def __init__(self, parent, name=None, border=None, aspect=None, size=[1, 1], pos=[0, 0],
sub_theme='', options=BGUI_DEFAULT):
"""
:param parent: the widget's parent
:param name: the name of the widget
:param border: the size of the border around the frame (0 for no border)
:param aspect: constrain the widget size to a specified aspect ratio
:param size: a tuple containing the width and height
:param pos: a tuple containing the x and y position
:param sub_theme: name of a sub_theme defined in the theme file (similar to CSS classes)
:param options: various other options
"""
Widget.__init__(self, parent, name, aspect, size, pos, sub_theme, options)
#: The colors for the four corners of the frame.
self.colors = [
self.theme['Color1'],
self.theme['Color2'],
self.theme['Color3'],
self.theme['Color4']
]
#: The color of the border around the frame.
self.border_color = self.theme['BorderColor']
#: The size of the border around the frame.
self.border = border if border else self.theme['BorderSize']
def _draw(self):
"""Draw the frame"""
# Enable alpha blending
glEnable(GL_BLEND)
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
# Enable polygon offset
glEnable(GL_POLYGON_OFFSET_FILL)
glPolygonOffset(1.0, 1.0)
glBegin(GL_QUADS)
for i in range(4):
glColor4f(self.colors[i][0], self.colors[i][1], self.colors[i][2], self.colors[i][3])
glVertex2f(self.gl_position[i][0], self.gl_position[i][1])
glEnd()
glDisable(GL_POLYGON_OFFSET_FILL)
# Draw an outline
if self.border > 0:
# border = self.border/2
r, g, b, a = self.border_color
glColor4f(r, g, b, a)
glPolygonMode(GL_FRONT, GL_LINE)
glLineWidth(self.border)
glBegin(GL_QUADS)
for i in range(4):
glVertex2f(self.gl_position[i][0], self.gl_position[i][1])
glEnd()
glLineWidth(1.0)
glPolygonMode(GL_FRONT, GL_FILL)
Widget._draw(self)
| mit | Python |
7a0560d8bd9dcb421b54522df92618d439941e69 | Change bill detail page to use session and identifier | jamesturk/tot,jamesturk/tot,jamesturk/tot,jamesturk/tot,jamesturk/tot | bills/urls.py | bills/urls.py | from . import views
from django.conf.urls import url
urlpatterns = [
url(r'^by_topic/', views.bill_list_by_topic),
url(r'^by_location', views.bill_list_by_location),
url(r'^latest_activity/', views.latest_bill_activity),
url(r'^latest/', views.latest_bill_actions),
url(r'^detail/(?P<bill_session>(.*))/(?P<bill_identifier>(.*))/$', views.bill_detail, name='bill_detail'),
]
| from . import views
from django.conf.urls import url
urlpatterns = [
url(r'^by_topic/', views.bill_list_by_topic),
url(r'^by_location', views.bill_list_by_location),
url(r'^latest_activity/', views.latest_bill_activity),
url(r'^latest/', views.latest_bill_actions),
url(r'^detail/(?P<bill_id>(.*))/$', views.bill_detail, name='bill_detail'),
]
| mit | Python |
14e2d2282b7c95a1bb6d475faa6d827d90609e16 | Define PostAdmin list_display. | jambonrose/DjangoUnleashed-1.8,jambonrose/DjangoUnleashed-1.8 | blog/admin.py | blog/admin.py | from django.contrib import admin
from .models import Post
@admin.register(Post)
class PostAdmin(admin.ModelAdmin):
list_display = ('title', 'pub_date')
| from django.contrib import admin
from .models import Post
admin.site.register(Post)
| bsd-2-clause | Python |
0d389018353f03d79332a1b40d6dc1881df91cd0 | Fix sorting of items in RSS feed | thelabnyc/wagtail_blog,thelabnyc/wagtail_blog | blog/views.py | blog/views.py | from django.contrib.syndication.views import Feed
from django.utils.feedgenerator import Atom1Feed
from .models import BlogIndexPage, BlogPage, BlogCategory
from django.shortcuts import get_object_or_404
from django.conf import settings
def tag_view(request, tag):
index = BlogIndexPage.objects.first()
return index.serve(request, tag=tag)
def category_view(request, category):
index = BlogIndexPage.objects.first()
return index.serve(request, category=category)
def author_view(request, author):
index = BlogIndexPage.objects.first()
return index.serve(request, author=author)
class LatestEntriesFeed(Feed):
'''
If a URL ends with "rss" try to find a matching BlogIndexPage
and return its items.
'''
def get_object(self, request, blog_slug):
return get_object_or_404(BlogIndexPage, slug=blog_slug)
def title(self, blog):
if blog.seo_title:
return blog.seo_title
return blog.title
def link(self, blog):
return blog.full_url
def description(self, blog):
return blog.search_description
def items(self, blog):
num = getattr(settings, 'BLOG_PAGINATION_PER_PAGE', 10)
return blog.get_descendants().order_by('-first_published_at')[:num]
def item_title(self, item):
return item.title
def item_description(self, item):
return item.specific.body
def item_link(self, item):
return item.full_url
class LatestEntriesFeedAtom(LatestEntriesFeed):
feed_type = Atom1Feed
class LatestCategoryFeed(Feed):
description = "A Blog"
def title(self, category):
return "Blog: " + category.name
def link(self, category):
return "/blog/category/" + category.slug
def get_object(self, request, category):
return get_object_or_404(BlogCategory, slug=category)
def items(self, obj):
return BlogPage.objects.filter(
categories__category=obj).order_by('-date')[:5]
def item_title(self, item):
return item.title
def item_description(self, item):
return item.body
| from django.contrib.syndication.views import Feed
from django.utils.feedgenerator import Atom1Feed
from .models import BlogIndexPage, BlogPage, BlogCategory
from django.shortcuts import get_object_or_404
from django.conf import settings
def tag_view(request, tag):
index = BlogIndexPage.objects.first()
return index.serve(request, tag=tag)
def category_view(request, category):
index = BlogIndexPage.objects.first()
return index.serve(request, category=category)
def author_view(request, author):
index = BlogIndexPage.objects.first()
return index.serve(request, author=author)
class LatestEntriesFeed(Feed):
'''
If a URL ends with "rss" try to find a matching BlogIndexPage
and return its items.
'''
def get_object(self, request, blog_slug):
return get_object_or_404(BlogIndexPage, slug=blog_slug)
def title(self, blog):
if blog.seo_title:
return blog.seo_title
return blog.title
def link(self, blog):
return blog.full_url
def description(self, blog):
return blog.search_description
def items(self, blog):
num = getattr(settings, 'BLOG_PAGINATION_PER_PAGE', 10)
return blog.get_descendants()[:num]
def item_title(self, item):
return item.title
def item_description(self, item):
return item.specific.body
def item_link(self, item):
return item.full_url
class LatestEntriesFeedAtom(LatestEntriesFeed):
feed_type = Atom1Feed
class LatestCategoryFeed(Feed):
description = "A Blog"
def title(self, category):
return "Blog: " + category.name
def link(self, category):
return "/blog/category/" + category.slug
def get_object(self, request, category):
return get_object_or_404(BlogCategory, slug=category)
def items(self, obj):
return BlogPage.objects.filter(
categories__category=obj).order_by('-date')[:5]
def item_title(self, item):
return item.title
def item_description(self, item):
return item.body
| apache-2.0 | Python |
3e2cbd52a916b767473335427702ecf3bae5a51d | create dir if image_filter not exist | ttchin/FaceDetected | CapturePictures.py | CapturePictures.py | #!/usr/bin/env python
import cv2
import os
import argparse
def capturePicturesByCamera(num = 300, saveDir = "./image_filter/"):
"""
Capture pictures with faces detected.
Args:
num (int): The number of pictures to capture. Default: 300.
saveDir (str): The directory to save the captured pictures. Default: "./image_filter/". Note: Please make sure the directory has been created.
Returns:
void
Todo:
* Handling of file path construction.
* Disable logging of cv2.
"""
face_cascade = cv2.CascadeClassifier('opencv_config/haarcascade_frontalface_default.xml')
cap = cv2.VideoCapture(0)
count = 1
while True:
# Capture frame-by-frame
ret, frame = cap.read()
# Detect faces in the gray frame
gray_frame = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
faces = face_cascade.detectMultiScale(gray_frame, 1.3, 5)
# The frame will be saved when the faces are detected
if len(faces) > 0:
# Save frame as JPEG file
frame_file_path = saveDir + ("frame%d.jpg" % count)
cv2.imwrite(frame_file_path, frame)
print("%d picture(s) captured & saved!" % count)
count += 1
# Draw rectangles which point out the faces
for (x, y, w, h) in faces:
cv2.rectangle(frame, (x, y), (x+w, y+h), (0, 255, 0), 2)
# Display the captured frame
cv2.imshow('Camera', frame)
# Wait for 'q' on the Camera window to quit before entire capturing job finished
if cv2.waitKey(1) & 0xFF == ord('q'):
cv2.destroyAllWindows()
break
if count > num:
cv2.destroyAllWindows()
break
cap.release()
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Capture pictures with faces detected.')
parser.add_argument('-n', type=int, help='the number of pictures to capture. Default: 300')
parser.add_argument('-d', type=str, help='the directory to save the captured pictures. Default: "./image_filter/". Note: Please make sure the directory has been created')
parser.set_defaults(n = 300, d = "./image_filter/")
args = parser.parse_args()
if not os.path.exists("./image_filter"):
os.makedirs('image_filter/after')
# Start the capturing
capturePicturesByCamera(args.n, args.d)
| #!/usr/bin/env python
import cv2
import sys
import argparse
def capturePicturesByCamera(num = 300, saveDir = "./image_filter/"):
"""
Capture pictures with faces detected.
Args:
num (int): The number of pictures to capture. Default: 300.
saveDir (str): The directory to save the captured pictures. Default: "./image_filter/". Note: Please make sure the directory has been created.
Returns:
void
Todo:
* Handling of file path construction.
* Disable logging of cv2.
"""
face_cascade = cv2.CascadeClassifier('opencv_config/haarcascade_frontalface_default.xml')
cap = cv2.VideoCapture(0)
count = 1
while True:
# Capture frame-by-frame
ret, frame = cap.read()
# Detect faces in the gray frame
gray_frame = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
faces = face_cascade.detectMultiScale(gray_frame, 1.3, 5)
# The frame will be saved when the faces are detected
if len(faces) > 0:
# Save frame as JPEG file
frame_file_path = saveDir + ("frame%d.jpg" % count)
cv2.imwrite(frame_file_path, frame)
print("%d picture(s) captured & saved!" % count)
count += 1
# Draw rectangles which point out the faces
for (x, y, w, h) in faces:
cv2.rectangle(frame, (x, y), (x+w, y+h), (0, 255, 0), 2)
# Display the captured frame
cv2.imshow('Camera', frame)
# Wait for 'q' on the Camera window to quit before entire capturing job finished
if cv2.waitKey(1) & 0xFF == ord('q'):
cv2.destroyAllWindows()
break
if count > num:
cv2.destroyAllWindows()
break
cap.release()
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Capture pictures with faces detected.')
parser.add_argument('-n', type=int, help='the number of pictures to capture. Default: 300')
parser.add_argument('-d', type=str, help='the directory to save the captured pictures. Default: "./image_filter/". Note: Please make sure the directory has been created')
parser.set_defaults(n = 300, d = "./image_filter/")
args = parser.parse_args()
# Start the capturing
capturePicturesByCamera(args.n, args.d)
| mit | Python |
d9f623baaa8e1d1075f9132108ed7bb11eea39b0 | Replace dask.get from core.get to async.get_sync | vikhyat/dask,cowlicks/dask,ContinuumIO/dask,blaze/dask,ContinuumIO/dask,mraspaud/dask,mrocklin/dask,cpcloud/dask,jakirkham/dask,chrisbarber/dask,jakirkham/dask,blaze/dask,mikegraham/dask,pombredanne/dask,gameduell/dask,pombredanne/dask,dask/dask,dask/dask,vikhyat/dask,mrocklin/dask,jcrist/dask,mraspaud/dask,jcrist/dask | dask/__init__.py | dask/__init__.py | from __future__ import absolute_import, division, print_function
from .core import istask
from .context import set_options
from .async import get_sync as get
try:
from .imperative import do, value
except ImportError:
pass
__version__ = '0.7.3'
| from __future__ import absolute_import, division, print_function
from .core import istask, get
from .context import set_options
try:
from .imperative import do, value
except ImportError:
pass
__version__ = '0.7.3'
| bsd-3-clause | Python |
de4f02fff4b23a442abe3062c2da4c52d8823627 | Fix spurious deprecation warning for fatal_warnings (#6237) | benjyw/pants,twitter/pants,wisechengyi/pants,tdyas/pants,tdyas/pants,jsirois/pants,jsirois/pants,wisechengyi/pants,twitter/pants,pantsbuild/pants,twitter/pants,wisechengyi/pants,pantsbuild/pants,pantsbuild/pants,wisechengyi/pants,twitter/pants,tdyas/pants,wisechengyi/pants,tdyas/pants,wisechengyi/pants,tdyas/pants,benjyw/pants,pantsbuild/pants,twitter/pants,wisechengyi/pants,benjyw/pants,tdyas/pants,twitter/pants,tdyas/pants,benjyw/pants,benjyw/pants,wisechengyi/pants,twitter/pants,pantsbuild/pants,twitter/pants,benjyw/pants,benjyw/pants,pantsbuild/pants,jsirois/pants,tdyas/pants,pantsbuild/pants,tdyas/pants,twitter/pants,wisechengyi/pants | src/python/pants/backend/jvm/subsystems/zinc_language_mixin.py | src/python/pants/backend/jvm/subsystems/zinc_language_mixin.py | # coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import absolute_import, division, print_function, unicode_literals
from builtins import object
from pants.base.deprecated import deprecated
class ZincLanguageMixin(object):
"""A mixin for subsystems for languages compiled with Zinc."""
@classmethod
def register_options(cls, register):
super(ZincLanguageMixin, cls).register_options(register)
# NB: This option is fingerprinted because the default value is not included in a target's
# fingerprint. This also has the effect of invalidating only the relevant tasks: ZincCompile
# in this case.
register('--strict-deps', advanced=True, default=False, fingerprint=True, type=bool,
help='The default for the "strict_deps" argument for targets of this language.')
register('--fatal-warnings', advanced=True, type=bool,
fingerprint=True,
removal_version='1.11.0.dev0',
removal_hint='Use --compiler-option-sets=fatal_warnings instead of fatal_warnings',
help='The default for the "fatal_warnings" argument for targets of this language.')
register('--compiler-option-sets', advanced=True, default=[], type=list,
fingerprint=True,
help='The default for the "compiler_option_sets" argument '
'for targets of this language.')
register('--zinc-file-manager', advanced=True, default=True, type=bool,
fingerprint=True,
help='Use zinc provided file manager to ensure transactional rollback.')
@property
def strict_deps(self):
"""When True, limits compile time deps to those that are directly declared by a target.
:rtype: bool
"""
return self.get_options().strict_deps
@property
@deprecated('1.11.0.dev0', 'Consume fatal_warnings from compiler_option_sets instead.')
def fatal_warnings(self):
"""If true, make warnings fatal for targets that do not specify fatal_warnings.
:rtype: bool
"""
return self.get_options().fatal_warnings
@property
def compiler_option_sets(self):
"""For every element in this list, enable the corresponding flags on compilation
of targets.
:rtype: list
"""
option_sets = self.get_options().compiler_option_sets
if 'fatal_warnings' not in option_sets and self.get_options().fatal_warnings:
option_sets.append('fatal_warnings')
return option_sets
@property
def zinc_file_manager(self):
"""If false, the default file manager will be used instead of the zinc provided one.
:rtype: bool
"""
return self.get_options().zinc_file_manager
| # coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import absolute_import, division, print_function, unicode_literals
from builtins import object
from pants.base.deprecated import deprecated
class ZincLanguageMixin(object):
"""A mixin for subsystems for languages compiled with Zinc."""
@classmethod
def register_options(cls, register):
super(ZincLanguageMixin, cls).register_options(register)
# NB: This option is fingerprinted because the default value is not included in a target's
# fingerprint. This also has the effect of invalidating only the relevant tasks: ZincCompile
# in this case.
register('--strict-deps', advanced=True, default=False, fingerprint=True, type=bool,
help='The default for the "strict_deps" argument for targets of this language.')
register('--fatal-warnings', advanced=True, type=bool,
fingerprint=True,
removal_version='1.11.0.dev0',
removal_hint='Use --compiler-option-sets=fatal_warnings instead of fatal_warnings',
help='The default for the "fatal_warnings" argument for targets of this language.')
register('--compiler-option-sets', advanced=True, default=[], type=list,
fingerprint=True,
help='The default for the "compiler_option_sets" argument '
'for targets of this language.')
register('--zinc-file-manager', advanced=True, default=True, type=bool,
fingerprint=True,
help='Use zinc provided file manager to ensure transactional rollback.')
@property
def strict_deps(self):
"""When True, limits compile time deps to those that are directly declared by a target.
:rtype: bool
"""
return self.get_options().strict_deps
@property
@deprecated('1.11.0.dev0', 'Consume fatal_warnings from compiler_option_sets instead.')
def fatal_warnings(self):
"""If true, make warnings fatal for targets that do not specify fatal_warnings.
:rtype: bool
"""
return self.get_options().fatal_warnings
@property
def compiler_option_sets(self):
"""For every element in this list, enable the corresponding flags on compilation
of targets.
:rtype: list
"""
option_sets = self.get_options().compiler_option_sets
if 'fatal_warnings' not in option_sets and self.fatal_warnings:
option_sets.append('fatal_warnings')
return option_sets
@property
def zinc_file_manager(self):
"""If false, the default file manager will be used instead of the zinc provided one.
:rtype: bool
"""
return self.get_options().zinc_file_manager
| apache-2.0 | Python |
93b752a251b43c268a6becb53ab298e958a46aeb | add Category Field in template | manducku/awesomepose,manducku/awesomepose,manducku/awesomepose,manducku/awesomepose | awesomepose/posts/forms/post.py | awesomepose/posts/forms/post.py | from django import forms
from django.forms import ModelMultipleChoiceField
from django_summernote.widgets import SummernoteWidget, SummernoteInplaceWidget
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit, Layout, Field, Fieldset, Button, Div
from crispy_forms.bootstrap import (
PrependedText, PrependedAppendedText, FormActions)
from mptt.forms import TreeNodeChoiceField
from posts.models import Post
from categories.models import Category
class PostForm(forms.ModelForm):
category = TreeNodeChoiceField(queryset=Category.objects.all(), level_indicator='----',)
def __init__(self, *args, **kwargs):
super(PostForm, self).__init__(*args, **kwargs)
self.fields['category'].label = "카테고리"
self.fields['title'].label = "제목"
self.fields['content'].label = "상세 리뷰"
self.fields['product_url'].label = "구매 주소"
self.helper = FormHelper()
self.helper.form_method = 'POST'
self.helper.label_class = 'control-label'
self.helper.layout = Layout(
Field('category', css_class='form-control col-lg-8', placeholder="제목을 입력해 주세요"),
Field('title', css_class='form-control', placeholder="제목을 입력해 주세요"),
Field('content', css_class='form-control', ),
Field('product_url', css_class='form-control', placeholder="구매처의 주소를 붙여넣어 주세요"),
FormActions(Submit('save', '저장하기', css_class='btn btn-primary'),
Button('cancel', 'Cancel', css_class='btn btn-default')
),
)
class Meta:
model = Post
widgets = {
'title': forms.TextInput(),
'content': SummernoteInplaceWidget(
),
'product_url': forms.TextInput(),
}
fields = ['category', 'title', 'content', 'product_url']
field_classes = {
'category': TreeNodeChoiceField,
}
| from django import forms
from django_summernote.widgets import SummernoteWidget, SummernoteInplaceWidget
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit, Layout, Field, Fieldset, Button
from crispy_forms.bootstrap import (
PrependedText, PrependedAppendedText, FormActions)
from posts.models import Post
class PostForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super(PostForm, self).__init__(*args, **kwargs)
self.fields['title'].label = "제목"
self.fields['content'].label = "상세 리뷰"
self.fields['product_url'].label = "구매 주소"
self.helper = FormHelper()
self.helper.form_method = 'POST'
self.helper.label_class = 'control-label'
self.helper.layout = Layout(
Field('title', css_class='form-control', placeholder="제목을 입력해 주세요"),
Field('content', css_class='form-control', ),
Field('product_url', css_class='form-control', placeholder="구매처의 주소를 붙여넣어 주세요"),
FormActions(Submit('save', '저장하기', css_class='btn btn-primary'),
Button('cancel', 'Cancel', css_class='btn btn-default')
),
)
class Meta:
model = Post
widgets = {
'title': forms.TextInput(),
'content': SummernoteInplaceWidget(
),
'product_url': forms.TextInput(),
}
fields = ['title', 'content', 'product_url']
| mit | Python |
ad2944a49b357494ff09a729b468f2fb19934909 | remove vertically-aligned assignments, per PEP8 | anl-mcampos/GuestBook,anl-mcampos/GuestBook | guestbook/__init__.py | guestbook/__init__.py | # coding: utf-8
import shelve
from datetime import datetime
from flask import Flask, request, render_template, redirect, escape, Markup
application = Flask(__name__)
DATA_FILE = 'guestbook.dat'
def save_data(name, comment, create_at):
database = shelve.open(DATA_FILE)
if 'greeting_list' not in database:
greeting_list = []
else:
greeting_list = database['greeting_list']
greeting_list.insert(0, {
'name': name,
'comment': comment,
'create_at': create_at
})
database['greeting_list'] = greeting_list
database.close()
def load_data():
database = shelve.open(DATA_FILE)
greeting_list = database.get('greeting_list', [])
database.close()
return greeting_list
@application.route('/')
def index():
greeting_list = load_data()
return render_template('index.html', greeting_list=greeting_list)
@application.route('/post', methods=['POST'])
def post():
name = request.form.get('name')
comment = request.form.get('comment')
create_at = datetime.now()
save_data(name, comment, create_at)
return redirect('/')
@application.template_filter('nl2br')
def nl2br_filter(s):
return escape(s).replace('\n', Markup('<br />'))
@application.template_filter('datetime_fmt')
def datetime_fmt_filter(dt):
return dt.strftime('%Y%m%d %H:%M:%S')
def main():
application.run('127.0.0.1', 8000)
if __name__ == "__main__":
application.run('127.0.0.1', 8000, debug=True)
| # coding: utf-8
import shelve
from datetime import datetime
from flask import Flask, request, render_template, redirect, escape, Markup
application = Flask(__name__)
DATA_FILE = 'guestbook.dat'
def save_data(name, comment, create_at):
database = shelve.open(DATA_FILE)
if 'greeting_list' not in database:
greeting_list = []
else:
greeting_list = database['greeting_list']
greeting_list.insert(0, {
'name': name,
'comment': comment,
'create_at': create_at
})
database['greeting_list'] = greeting_list
database.close()
def load_data():
database = shelve.open(DATA_FILE)
greeting_list = database.get('greeting_list', [])
database.close()
return greeting_list
@application.route('/')
def index():
greeting_list = load_data()
return render_template('index.html', greeting_list=greeting_list)
@application.route('/post', methods=['POST'])
def post():
name = request.form.get('name')
comment = request.form.get('comment')
create_at = datetime.now()
save_data(name, comment, create_at)
return redirect('/')
@application.template_filter('nl2br')
def nl2br_filter(s):
return escape(s).replace('\n', Markup('<br />'))
@application.template_filter('datetime_fmt')
def datetime_fmt_filter(dt):
return dt.strftime('%Y%m%d %H:%M:%S')
def main():
application.run('127.0.0.1', 8000)
if __name__ == "__main__":
application.run('127.0.0.1', 8000, debug=True)
| mit | Python |
f6efb0ff31ae8d0db5682cd7ad5b0921e3a4e924 | Bump version for new release. | promptworks/django_openstack_auth,promptworks/django_openstack_auth,gabrielhurley/django_openstack_auth,redhat-openstack/django_openstack_auth,jamielennox/django_openstack_auth,dan1/django_openstack_auth_proto,gabrielhurley/django_openstack_auth,redhat-openstack/django_openstack_auth,onsoku/codereview,onsoku/codereview,ging/django_openstack_auth,ging/django_openstack_auth,ashokkumarprajapati/openstack_auth_remote,dan1/django_openstack_auth_proto | openstack_auth/__init__.py | openstack_auth/__init__.py | # following PEP 386
__version__ = "1.0.7"
| # following PEP 386
__version__ = "1.0.6"
| apache-2.0 | Python |
7a99ade694c5844727ca33461dd3ad5271b61f14 | Improve q_n tests. | Duke-QCD/hic,jbernhard/hic | hic/test/test_flow.py | hic/test/test_flow.py | # -*- coding: utf-8 -*-
from __future__ import division
import numpy as np
from .. import flow
def test_qn(seed=1248):
# q_n(0) = 1
q = flow.qn(2, 0)
assert q == 1+0j, \
'Incorrect single-particle q_n ({} != 1).'.format(q)
# q_3(uniform phi) = -1
q = flow.qn(3, np.arange(-np.pi, np.pi, 10))
assert abs(q+1) < 1e-12, \
'Incorrect isotropic q_n ({} != -1).'.format(q)
# specific example
np.random.seed(seed)
phi = 2*np.pi*(np.random.rand(10) - .5)
q = np.array([flow.qn(n, phi) for n in range(2, 5)])
correct_q = np.array((
-0.23701789876111995+1.9307467860155012j,
0.7294873796006498+0.4925428484240118j,
2.0248053489550459-0.23452484252744438j
))
assert np.allclose(q, correct_q), \
'Incorrect random q_n.\n{} != {}'.format(q, correct_q)
def test_flow_cumulant():
pass
| # -*- coding: utf-8 -*-
from __future__ import division
import numpy as np
from .. import flow
def test_qn():
assert flow.qn(2, 0) == 1+0j, \
'Single-particle q_n.'
assert np.allclose(flow.qn(3, np.arange(-np.pi, np.pi, 10)), -1+0j), \
'Isotropic q_n.'
def test_flow_cumulant():
pass
| mit | Python |
7c10feeed640f4d1a66bb3207ade980733409ad9 | improve unit test | bitsteller/witica,bitsteller/witica,bitsteller/witica | witica/test_source.py | witica/test_source.py | # coding=utf-8
import os
import unittest
import pkg_resources
from witica.source import Source, SourceItemList
from witica.log import *
from witica.metadata import extractor
class TestSourceItemList(unittest.TestCase):
def setUp(self):
Logger.start(verbose=False)
self.resource_path = pkg_resources.resource_filename("witica","test/files")
source_config = {}
source_config["version"] = 1
source_config["path"] = self.resource_path
self.source = FolderSource("test", source_config)
extractor.register_default_extractors()
def tearDown(self):
extractor.registered_extractors = []
pkg_resources.cleanup_resources()
Logger.stop()
def test_match(self):
self.assertTrue(SourceItemList.match("test/*", "test/abc"))
self.assertFalse(SourceItemList.match("test/*", "test/abc/def"))
self.assertTrue(SourceItemList.match("test/**", "test/abc/def"))
self.assertTrue(SourceItemList.match("test/*/def", "test/abc/def"))
self.assertTrue(SourceItemList.match("test/**/de?", "test/abc/def"))
self.assertFalse(SourceItemList.match("test/**/def", "test/abc/ghi"))
def test_count_items(self):
self.assertEqual(9, len(self.source.items))
def test_item_exists(self):
self.assertTrue(self.source.items["simple"].exists)
class FolderSource(Source):
def __init__(self, source_id, config, prefix = ""):
super(FolderSource, self).__init__(source_id, config, prefix)
self.source_dir = config["path"]
self.state = {"cursor" : ""}
if not(os.path.exists(self.source_dir)):
raise IOError("Source folder '" + self.source_dir + "' does not exist.")
def update_cache(self):
pass
def update_change_status(self):
pass
def fetch_changes(self):
pass
def get_abs_meta_filename(self, local_filename):
return self.get_absolute_path(os.path.join('meta' + os.sep + local_filename))
def get_absolute_path(self, localpath):
return os.path.abspath(os.path.join(self.source_dir, localpath))
| # coding=utf-8
import os
import unittest
import pkg_resources
from witica.source import Source, SourceItemList
from witica.log import *
from witica.metadata import extractor
class TestSourceItemList(unittest.TestCase):
def setUp(self):
Logger.start(verbose=False)
self.resource_path = pkg_resources.resource_filename("witica","test/files")
source_config = {}
source_config["version"] = 1
source_config["path"] = self.resource_path
self.source = FolderSource("test", source_config)
extractor.register_default_extractors()
def tearDown(self):
extractor.registered_extractors = []
Logger.stop()
def test_match(self):
self.assertTrue(SourceItemList.match("test/*", "test/abc"))
self.assertFalse(SourceItemList.match("test/*", "test/abc/def"))
self.assertTrue(SourceItemList.match("test/**", "test/abc/def"))
self.assertTrue(SourceItemList.match("test/*/def", "test/abc/def"))
self.assertTrue(SourceItemList.match("test/**/de?", "test/abc/def"))
self.assertFalse(SourceItemList.match("test/**/def", "test/abc/ghi"))
def test_count_items(self):
self.assertEqual(9, len(self.source.items))
class FolderSource(Source):
def __init__(self, source_id, config, prefix = ""):
super(FolderSource, self).__init__(source_id, config, prefix)
self.source_dir = config["path"]
self.state = {"cursor" : ""}
if not(os.path.exists(self.source_dir)):
raise IOError("Source folder '" + self.source_dir + "' does not exist.")
def update_cache(self):
pass
def update_change_status(self):
pass
def fetch_changes(self):
pass
def get_abs_meta_filename(self, local_filename):
return self.get_absolute_path(os.path.join('meta' + os.sep + local_filename))
def get_absolute_path(self, localpath):
return os.path.abspath(os.path.join(self.source_dir, localpath))
| mit | Python |
1b7e6d41a6832ef7a8f9dafe0cd8580356f8e9da | check regex match before access in flickr module | gpodder/mygpo,gpodder/mygpo,gpodder/mygpo,gpodder/mygpo | mygpo/data/flickr.py | mygpo/data/flickr.py | #
# This file is part of gpodder.net.
#
# my.gpodder.org is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or (at your
# option) any later version.
#
# my.gpodder.org is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public
# License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with my.gpodder.org. If not, see <http://www.gnu.org/licenses/>.
#
import re
import urllib
from django.conf import settings
from mygpo.core.json import json
def get_photo_sizes(photo_id):
api_key = settings.FLICKR_API_KEY
request = 'http://api.flickr.com/services/rest/?method=flickr.photos.getSizes&api_key=%s&photo_id=%s&format=json' % (api_key, photo_id)
resp = urllib.urlopen(request).read()
extract_re = '^jsonFlickrApi\((.*)\)$'
m = re.match(extract_re, resp)
if not m:
return []
resp_obj = json.loads(m.group(1))
try:
return resp_obj['sizes']['size']
except KeyError:
return []
def get_photo_id(url):
photo_id_re = 'http://.*flickr.com/[^/]+/([^_]+)_.*'
return re.match(photo_id_re, url).group(1)
def is_flickr_image(url):
return re.search('flickr\.com.*\.(jpg|jpeg|png|gif)', url)
def get_display_photo(url, label='Medium'):
photo_id = get_photo_id(url)
sizes = get_photo_sizes(photo_id)
for s in sizes:
if s['label'] == label:
return s['source']
return url
| #
# This file is part of gpodder.net.
#
# my.gpodder.org is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or (at your
# option) any later version.
#
# my.gpodder.org is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public
# License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with my.gpodder.org. If not, see <http://www.gnu.org/licenses/>.
#
import re
import urllib
from django.conf import settings
from mygpo.core.json import json
def get_photo_sizes(photo_id):
api_key = settings.FLICKR_API_KEY
request = 'http://api.flickr.com/services/rest/?method=flickr.photos.getSizes&api_key=%s&photo_id=%s&format=json' % (api_key, photo_id)
resp = urllib.urlopen(request).read()
extract_re = '^jsonFlickrApi\((.*)\)$'
resp_content = re.match(extract_re, resp).group(1)
resp_obj = json.loads(resp_content)
try:
return resp_obj['sizes']['size']
except KeyError:
return []
def get_photo_id(url):
photo_id_re = 'http://.*flickr.com/[^/]+/([^_]+)_.*'
return re.match(photo_id_re, url).group(1)
def is_flickr_image(url):
return re.search('flickr\.com.*\.(jpg|jpeg|png|gif)', url)
def get_display_photo(url, label='Medium'):
photo_id = get_photo_id(url)
sizes = get_photo_sizes(photo_id)
for s in sizes:
if s['label'] == label:
return s['source']
return url
| agpl-3.0 | Python |
81d01175a7403b3e627738056ef9436e8172e51e | Enforce python 3.6 | wellcometrust/platform-api,wellcometrust/platform-api,wellcometrust/platform-api,wellcometrust/platform-api | shared_infra/lambdas/common/setup.py | shared_infra/lambdas/common/setup.py | import os
from setuptools import find_packages, setup
def local_file(name):
return os.path.relpath(os.path.join(os.path.dirname(__file__), name))
SOURCE = local_file('src')
setup(
name='wellcome_lambda_utils',
packages=find_packages(SOURCE),
package_dir={'': SOURCE},
version='1.0.0',
install_requires=['boto'],
python_requires='>=3.6',
description='Common lib for lambdas',
author='Wellcome digital platform',
author_email='[email protected]',
url='https://github.com/wellcometrust/platform',
keywords=['lambda', 'utils'],
classifiers=[],
)
| import os
from setuptools import find_packages, setup
def local_file(name):
return os.path.relpath(os.path.join(os.path.dirname(__file__), name))
SOURCE = local_file('src')
setup(
name='wellcome_lambda_utils',
packages=find_packages(SOURCE),
package_dir={'': SOURCE},
version='1.0.0',
install_requires=['boto'],
python_requires='>=3',
description='Common lib for lambdas',
author='Wellcome digital platform',
author_email='[email protected]',
url='https://github.com/wellcometrust/platform',
keywords=['lambda', 'utils'],
classifiers=[],
)
| mit | Python |
3abe25d2272e2a0111511b68407da0ef3c53f59e | Use wizard settings during samba provision | exekias/droplet,exekias/droplet,exekias/droplet | nazs/samba/module.py | nazs/samba/module.py | from nazs import module
from nazs.commands import run
from nazs.sudo import root
import os
import logging
from .models import DomainSettings
logger = logging.getLogger(__name__)
class Samba(module.Module):
"""
Samba 4 module, it deploys samba AD and file server
"""
ETC_FILE = '/etc/samba/smb.conf'
install_wizard = 'samba:install'
def install(self):
"""
Installation procedure, it writes basic smb.conf and uses samba-tool to
provision the domain
"""
domain_settings = DomainSettings.get()
with root():
if os.path.exists(self.ETC_FILE):
os.remove(self.ETC_FILE)
if domain_settings.mode == 'ad':
run("samba-tool domain provision "
" --domain='zentyal' "
" --workgroup='zentyal' "
"--realm='zentyal.lan' "
"--use-xattrs=yes "
"--use-rfc2307 "
"--server-role='domain controller' "
"--use-ntvfs "
"--adminpass='foobar1!'")
elif domain_settings.mode == 'member':
# TODO
pass
| from nazs import module
from nazs.commands import run
from nazs.sudo import root
import os
import logging
logger = logging.getLogger(__name__)
class Samba(module.Module):
"""
Samba 4 module, it deploys samba AD and file server
"""
ETC_FILE = '/etc/samba/smb.conf'
install_wizard = 'samba:install'
def install(self):
"""
Installation procedure, it writes basic smb.conf and uses samba-tool to
provision the domain
"""
with root():
if os.path.exists(self.ETC_FILE):
os.remove(self.ETC_FILE)
run("samba-tool domain provision "
" --domain='zentyal' "
" --workgroup='zentyal' "
"--realm='zentyal.lan' "
"--use-xattrs=yes "
"--use-rfc2307 "
"--server-role='domain controller' "
"--use-ntvfs "
"--adminpass='foobar1!'")
| agpl-3.0 | Python |
2383497f25e400aa27c600d3a30526d118e2a6dc | fix scan, follow new scan chain | SiLab-Bonn/pyBAR | host/test_register.py | host/test_register.py | from scan.scan import ScanBase
class TestRegisters(ScanBase):
def __init__(self, config_file, definition_file = None, bit_file = None, device = None, scan_identifier = "test_register", scan_data_path = None):
super(TestRegisters, self).__init__(config_file = config_file, definition_file = definition_file, bit_file = bit_file, device = device, scan_identifier = scan_identifier, scan_data_path = scan_data_path)
def scan(self, **kwargs):
number_of_errors = self.register_utils.test_global_register()
print 'Global Register Test: Found', number_of_errors, "error(s)"
number_of_errors = self.register_utils.test_pixel_register()
print 'Pixel Register Test: Found', number_of_errors, "error(s)"
sn = self.register_utils.read_chip_sn()
print "Chip S/N:", sn
if __name__ == "__main__":
import configuration
scan = TestRegisters(config_file = configuration.config_file, bit_file = configuration.bit_file, scan_data_path = configuration.scan_data_path)
scan.start()
scan.stop() | from scan.scan import ScanBase
class TestRegisters(ScanBase):
def __init__(self, config_file, definition_file = None, bit_file = None, device = None, scan_identifier = "test_register", scan_data_path = None):
super(TestRegisters, self).__init__(config_file = config_file, definition_file = definition_file, bit_file = bit_file, device = device, scan_identifier = scan_identifier, scan_data_path = scan_data_path)
def start(self, configure = True):
super(TestRegisters, self).start(configure)
number_of_errors = scan.register_utils.test_global_register()
print 'Global Register Test: Found', number_of_errors, "error(s)"
number_of_errors = scan.register_utils.test_pixel_register()
print 'Pixel Register Test: Found', number_of_errors, "error(s)"
sn = scan.register_utils.read_chip_sn()
print "Chip S/N:", sn
print 'Reset SRAM FIFO...'
scan.readout_utils.reset_sram_fifo()
print 'Done!'
if __name__ == "__main__":
import configuration
scan = TestRegisters(config_file = configuration.config_file, bit_file = configuration.bit_file, scan_data_path = configuration.scan_data_path)
scan.start()
| bsd-3-clause | Python |
63ed4199e5cb3f8eb9a6b294ac8c6df12f9b5f56 | Add last_request function to httprequest module | Multiposting/HTTPretty,Multiposting/HTTPretty,letolab/HTTPretty,gabrielfalcao/HTTPretty,demianbrecht/HTTPretty,xgess/HTTPretty,pxg/HTTPretty,andresriancho/HTTPretty,paylogic/HTTPretty,BenjamenMeyer/HTTPretty,codepython/HTTPretty,xgess/HTTPretty,BenjamenMeyer/HTTPretty,pxg/HTTPretty,demianbrecht/HTTPretty,BenjamenMeyer/HTTPretty,letolab/HTTPretty,letolab/HTTPretty,pxg/HTTPretty,xgess/HTTPretty,andresriancho/HTTPretty,andresriancho/HTTPretty,paylogic/HTTPretty,gabrielfalcao/HTTPretty,demianbrecht/HTTPretty,paylogic/HTTPretty,codepython/HTTPretty,codepython/HTTPretty,andresriancho/HTTPretty,Multiposting/HTTPretty | httpretty/__init__.py | httpretty/__init__.py | # #!/usr/bin/env python
# -*- coding: utf-8 -*-
# <HTTPretty - HTTP client mock for Python>
# Copyright (C) <2011-2013> Gabriel Falcão <[email protected]>
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use,
# copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following
# conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
from __future__ import unicode_literals
__version__ = version = '0.6.0'
import sys
from .core import httpretty, httprettified
from .errors import HTTPrettyError
from .core import URIInfo
HTTPretty = httpretty
activate = httprettified
SELF = sys.modules[__name__]
for attr in [name.decode() for name in httpretty.METHODS] + ['register_uri', 'enable', 'disable', 'is_enabled', 'Response']:
setattr(SELF, attr, getattr(httpretty, attr))
def last_request():
return httpretty.last_request
| # #!/usr/bin/env python
# -*- coding: utf-8 -*-
# <HTTPretty - HTTP client mock for Python>
# Copyright (C) <2011-2013> Gabriel Falcão <[email protected]>
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use,
# copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following
# conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
from __future__ import unicode_literals
__version__ = version = '0.6.0'
import sys
from .core import httpretty, httprettified
from .errors import HTTPrettyError
from .core import URIInfo
HTTPretty = httpretty
activate = httprettified
SELF = sys.modules[__name__]
for attr in [name.decode() for name in httpretty.METHODS] + ['register_uri', 'enable', 'disable', 'is_enabled', 'Response']:
setattr(SELF, attr, getattr(httpretty, attr))
| mit | Python |
2e4a934203b4d736a4180a970cacca508400ea7e | Update runcrons Command() to timezone.now() | Tivix/django-cron,philippeowagner/django-cronium,mozillazg/django-cron,eriktelepovsky/django-cron | django_cron/management/commands/runcrons.py | django_cron/management/commands/runcrons.py | from django.core.management.base import BaseCommand
from django.conf import settings
from django.core.cache import cache
from django.utils import timezone
from django_cron import CronJobManager
from datetime import datetime
from optparse import make_option
DEFAULT_LOCK_TIME = 15*60
def get_class( kls ):
"""TODO: move to django-common app.
Converts a string to a class. Courtesy: http://stackoverflow.com/questions/452969/does-python-have-an-equivalent-to-java-class-forname/452981#452981"""
parts = kls.split('.')
module = ".".join(parts[:-1])
m = __import__( module )
for comp in parts[1:]:
m = getattr(m, comp)
return m
CRONS_TO_RUN = map(lambda x: get_class(x), settings.CRON_CLASSES)
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--force', action='store_true', help='Force cron runs'),
)
def handle(self, *args, **options):
for cron_class in CRONS_TO_RUN:
if not cache.get(cron_class.__name__):
instance = cron_class()
timeout = DEFAULT_LOCK_TIME
try:
timeout = settings.DJANGO_CRON_LOCK_TIME
except:
pass
cache.set(cron_class.__name__, timezone.now(), timeout)
CronJobManager.run(instance, options['force'])
cache.delete(cron_class.__name__)
else:
print "%s failed: lock has been found. Other cron started at %s" % (cron_class.__name__, cache.get(cron_class.__name__)) | from django.core.management.base import BaseCommand
from django.conf import settings
from django.core.cache import cache
from django_cron import CronJobManager
from datetime import datetime
from optparse import make_option
DEFAULT_LOCK_TIME = 15*60
def get_class( kls ):
"""TODO: move to django-common app.
Converts a string to a class. Courtesy: http://stackoverflow.com/questions/452969/does-python-have-an-equivalent-to-java-class-forname/452981#452981"""
parts = kls.split('.')
module = ".".join(parts[:-1])
m = __import__( module )
for comp in parts[1:]:
m = getattr(m, comp)
return m
CRONS_TO_RUN = map(lambda x: get_class(x), settings.CRON_CLASSES)
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--force', action='store_true', help='Force cron runs'),
)
def handle(self, *args, **options):
for cron_class in CRONS_TO_RUN:
if not cache.get(cron_class.__name__):
instance = cron_class()
timeout = DEFAULT_LOCK_TIME
try:
timeout = settings.DJANGO_CRON_LOCK_TIME
except:
pass
cache.set(cron_class.__name__, datetime.now(), timeout)
CronJobManager.run(instance, options['force'])
cache.delete(cron_class.__name__)
else:
print "%s failed: lock has been found. Other cron started at %s" % (cron_class.__name__, cache.get(cron_class.__name__)) | mit | Python |
abbe9b391ed32a07c5e912e3683ff7668e12eeb5 | bump to new version | karec/oct-browser,karec/oct-browser | octbrowser/__init__.py | octbrowser/__init__.py | __version__ = '0.4.1'
| __version__ = '0.4'
| mit | Python |
1d2ea0c72d8700687761125e4eaf90ec52f419be | Fix ORM call and add progress check | dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq | custom/icds_reports/management/commands/update_aadhar_date.py | custom/icds_reports/management/commands/update_aadhar_date.py | from __future__ import absolute_import, print_function
from __future__ import unicode_literals
from django.core.management.base import BaseCommand
from django.db import connections
from corehq.apps.locations.models import SQLLocation
from corehq.sql_db.routers import db_for_read_write
from custom.icds_reports.models import ChildHealthMonthly
CHILD_TABLENAME = "config_report_icds-cas_static-child_health_cases_a46c129f"
PERSON_TABLENAME = "config_report_icds-cas_static-person_cases_v2_b4b5d57a"
UPDATE_QUERY = """
UPDATE "{child_tablename}" child SET
aadhar_date = person.aadhar_date
FROM "{person_tablename}" person
WHERE child.mother_id = person.doc_id AND child.supervisor_id = %(sup_id)s AND person.supervisor_id = %(sup_id)s
""".format(child_tablename=CHILD_TABLENAME, person_tablename=PERSON_TABLENAME)
def get_cursor(model):
db = db_for_read_write(model)
return connections[db].cursor()
class Command(BaseCommand):
def handle(self, *args, **options):
supervisor_ids = (
SQLLocation.objects
.filter(domain='icds-cas', location_type__name='supervisor')
.values_list('location_id', flat=True)
)
count = 0
num_ids = len(supervisor_ids)
for sup_id in supervisor_ids:
with get_cursor(ChildHealthMonthly) as cursor:
cursor.execute(UPDATE_QUERY, {"sup_id": sup_id})
count += 1
if count % 100 == 0:
print("{} / {}".format(count, num_ids))
| from __future__ import absolute_import, print_function
from __future__ import unicode_literals
from django.core.management.base import BaseCommand
from django.db import connections
from corehq.apps.locations.models import SQLLocation
from corehq.sql_db.routers import db_for_read_write
from custom.icds_reports.models import ChildHealthMonthly
CHILD_TABLENAME = "config_report_icds-cas_static-child_health_cases_a46c129f"
PERSON_TABLENAME = "config_report_icds-cas_static-person_cases_v2_b4b5d57a"
UPDATE_QUERY = """
UPDATE "{child_tablename}" child SET
aadhar_date = person.aadhar_date
FROM "{person_tablename}" person
WHERE child.mother_id = person.doc_id AND child.supervisor_id = %(sup_id)s AND person.supervisor_id = %(sup_id)s
""".format(child_tablename=CHILD_TABLENAME, person_tablename=PERSON_TABLENAME)
def get_cursor(model):
db = db_for_read_write(model)
return connections[db].cursor()
class Command(BaseCommand):
def handle(self, *args, **options):
supervisor_ids = (
SQLLocation.objects
.filter(domain='icds-cas', location_type__name='supervisor')
.values('location_id')
)
for sup_id in supervisor_ids:
with get_cursor(ChildHealthMonthly) as cursor:
cursor.execute(UPDATE_QUERY, {"sup_id": sup_id})
| bsd-3-clause | Python |
e2d8a32590c0865b2a8339d86af4eb9b34ea5d20 | Update __init__.py | r0h4n/node-agent,Tendrl/node_agent,Tendrl/node_agent,r0h4n/node-agent,Tendrl/node-agent,Tendrl/node-agent,r0h4n/node-agent,Tendrl/node-agent | tendrl/node_agent/objects/cluster_message/__init__.py | tendrl/node_agent/objects/cluster_message/__init__.py | from tendrl.commons import etcdobj
from tendrl.commons.message import Message as message
from tendrl.commons import objects
class ClusterMessage(objects.BaseObject, message):
internal = True
def __init__(self, **cluster_message):
self._defs = {}
message.__init__(self, **cluster_message)
objects.BaseObject.__init__(self)
self.value = 'clusters/%s/messages/%s'
self._etcd_cls = _ClusterMessageEtcd
def save(self):
super(ClusterMessage, self).save(update=False)
class _ClusterMessageEtcd(etcdobj.EtcdObj):
"""Cluster message object, lazily updated
"""
__name__ = 'clusters/%s/messages/%s'
_tendrl_cls = ClusterMessage
def render(self):
self.__name__ = self.__name__ % (
self.cluster_id, self.message_id
)
return super(_ClusterMessageEtcd, self).render()
| from tendrl.commons import etcdobj
from tendrl.commons.message import Message as message
from tendrl.commons import objects
class ClusterMessage(objects.BaseObject, message):
internal = True
def __init__(self, **cluster_message):
self._defs = {}
message.__init__(self, **cluster_message)
objects.BaseObject.__init__(self)
self.value = 'clusters/%s/messages/%s'
self._etcd_cls = _ClusterMessageEtcd
class _ClusterMessageEtcd(etcdobj.EtcdObj):
"""Cluster message object, lazily updated
"""
__name__ = 'clusters/%s/messages/%s'
_tendrl_cls = ClusterMessage
def render(self):
self.__name__ = self.__name__ % (
self.cluster_id, self.message_id
)
return super(_ClusterMessageEtcd, self).render()
| lgpl-2.1 | Python |
724b4c382015aa933659a24f7be3bd2cabbcb5eb | Add flag --exclusive setting whether to run as exclusive or not | RKBK/gpaw-customize-files,RKBK/gpaw-customize-files | sherlock.stanford.edu.run_gpaw.py | sherlock.stanford.edu.run_gpaw.py | #!/usr/bin/env python
from sys import argv
import os
job = argv[1]
nodes = argv[2]
time = argv[3] + ":00"
if '--exclusive' in argv:
is_exclusive = True
argv.remove('--exclusive')
else:
is_exclusive = False
if len(argv) > 4:
gpaw_options = ' '.join(argv[4:])
else:
gpaw_options = ' '
#options = '-l nodes=' + nodes +':ppn=2' + ' -l' +' walltime=' + time + ' -m abe'
#options = '-N ' + nodes +' -t ' + time + ' -J ' + job
options = ' -J ' + job
#dir = os.getcwd()
f = open('tmp.sh', 'w')
f.write("""\
#!/bin/bash\n""")
if is_exclusive:
f.write("""#SBATCH --exclusive\n""")
f.write("""\
#SBATCH -n %s
#SBATCH -t %s
#SBATCH -p iric,normal
# Add nodes that always fail
#SBATCH -x gpu-14-1,sh-20-35
# send email about job status changes
##SBATCH --mail-type=ALL
#Set an open-mpi parameter to suppress "fork()" warnings
# GPAW is written to use fork calls
export OMPI_MCA_mpi_warn_on_fork=0
#This next line decides which version of gpaw will be used
#source $HOME/environment_scripts/set_paths_gpaw_1.1.1b1_libxc-trunk.sh # gpaw version 1.1.1b
#source $HOME/environment_scripts/set_paths_gpaw_1.1.1b1_libxc-trunk_scalapack_libvdwxc.sh # gpaw version 1.1.1b with scalapack (does not work) and libvdwxc (works)
source $HOME/environment_scripts/set_paths_gpaw-trunk_scalapack_libvdwxc.sh # Gpaw trunk with mBEEF-vdW fixed for libvdwxc
srun `which gpaw-python` %s %s
""" % (nodes,time,job,gpaw_options))
f.close()
os.system('sbatch ' + options + ' tmp.sh')
| """This is the submission script for GPAW on Sherlock at Stanford"""
#!/usr/bin/env python
from sys import argv
import os
job = argv[1]
nodes = argv[2]
time = argv[3] + ":00"
if len(argv) > 4:
gpaw_options = ' '.join(argv[4:])
else:
gpaw_options = ' '
#options = '-l nodes=' + nodes +':ppn=2' + ' -l' +' walltime=' + time + ' -m abe'
#options = '-N ' + nodes +' -t ' + time + ' -J ' + job
options = ' -J ' + job
#dir = os.getcwd()
f = open('tmp.sh', 'w')
f.write("""\
#!/bin/bash
#SBATCH -n %s
#SBATCH -t %s
#SBATCH -p iric,normal
#SBATCH --exclusive
# Add nodes that always fail
#SBATCH -x gpu-14-1,sh-20-35
# send email about job status changes
##SBATCH --mail-type=ALL
#Set an open-mpi parameter to suppress "fork()" warnings
# GPAW is written to use fork calls
export OMPI_MCA_mpi_warn_on_fork=0
#This next line decides which version of gpaw will be used
source $HOME/environment_scripts/set_paths_gpaw-trunk_scalapack_libvdwxc.sh # Gpaw trunk with mBEEF-vdW fixed for libvdwxc
srun `which gpaw-python` %s %s
""" % (nodes,time,job,gpaw_options))
f.close()
os.system('sbatch ' + options + ' tmp.sh')
| mit | Python |
5952c372ae01672bfce450aec924628faecd3654 | bump version for release | GoodgameStudios/crossbar,RaitoBezarius/crossbar,w1z2g3/crossbar,w1z2g3/crossbar,erhuabushuo/crossbar,erhuabushuo/crossbar,erhuabushuo/crossbar,RaitoBezarius/crossbar,RaitoBezarius/crossbar,GoodgameStudios/crossbar,erhuabushuo/crossbar,RaitoBezarius/crossbar,NinjaMSP/crossbar,GoodgameStudios/crossbar,RaitoBezarius/crossbar,w1z2g3/crossbar,w1z2g3/crossbar,erhuabushuo/crossbar,w1z2g3/crossbar,GoodgameStudios/crossbar,GoodgameStudios/crossbar,NinjaMSP/crossbar,RaitoBezarius/crossbar,w1z2g3/crossbar,erhuabushuo/crossbar,NinjaMSP/crossbar,RaitoBezarius/crossbar,GoodgameStudios/crossbar,w1z2g3/crossbar,GoodgameStudios/crossbar,erhuabushuo/crossbar | crossbar/crossbar/__init__.py | crossbar/crossbar/__init__.py | ###############################################################################
##
## Copyright (C) 2011-2015 Tavendo GmbH
##
## This program is free software: you can redistribute it and/or modify
## it under the terms of the GNU Affero General Public License, version 3,
## as published by the Free Software Foundation.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU Affero General Public License for more details.
##
## You should have received a copy of the GNU Affero General Public License
## along with this program. If not, see <http://www.gnu.org/licenses/>.
##
###############################################################################
__doc__ = """
Crossbar.io - Unified application router.
Crossbar.io is an open-source server software that allows developers to create
distributed systems, composed of application components which are loosely coupled,
communicate in (soft) real-time and can be implemented in different languages.
Crossbar.io features:
- application routing core (RPC+PubSub)
- full WAMP v2 AP implementation
- application component hosting
- multi-process architecture
- and more
For more information, please go to
* Homepage: http://crossbar.io/
* Documentation: https://github.com/crossbario/crossbar/wiki
* Source code: https://github.com/crossbario/crossbar
Open-source licensed under the GNU Affero General Public License version 3.
Created by Tavendo GmbH. Get in contact at http://tavendo.com
"""
__version__ = "0.10.1"
| ###############################################################################
##
## Copyright (C) 2011-2015 Tavendo GmbH
##
## This program is free software: you can redistribute it and/or modify
## it under the terms of the GNU Affero General Public License, version 3,
## as published by the Free Software Foundation.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU Affero General Public License for more details.
##
## You should have received a copy of the GNU Affero General Public License
## along with this program. If not, see <http://www.gnu.org/licenses/>.
##
###############################################################################
__doc__ = """
Crossbar.io - Unified application router.
Crossbar.io is an open-source server software that allows developers to create
distributed systems, composed of application components which are loosely coupled,
communicate in (soft) real-time and can be implemented in different languages.
Crossbar.io features:
- application routing core (RPC+PubSub)
- full WAMP v2 AP implementation
- application component hosting
- multi-process architecture
- and more
For more information, please go to
* Homepage: http://crossbar.io/
* Documentation: https://github.com/crossbario/crossbar/wiki
* Source code: https://github.com/crossbario/crossbar
Open-source licensed under the GNU Affero General Public License version 3.
Created by Tavendo GmbH. Get in contact at http://tavendo.com
"""
__version__ = "0.10.0"
| agpl-3.0 | Python |
0b4fb3dd59ce0940026b1cf212adcf6d17bca7a0 | Refactor build_update_query (2) | AntoineToubhans/MongoTs | mongots/query.py | mongots/query.py | from datetime import datetime
AGGREGATION_KEYS = [
'',
'months.{month}.',
'months.{month}.days.{day}.',
'months.{month}.days.{day}.hours.{hour}.',
]
DATETIME_KEY = 'datetime'
def build_filter_query(timestamp, tags=None):
filters = tags or {}
filters[DATETIME_KEY] = datetime(timestamp.year, 1, 1)
return filters
def build_update_query(value, timestamp):
inc_values = {
'count': 1,
'sum': value,
}
datetime_args = {
'month': str(timestamp.month - 1), # Array index: range from 0 to 11
'day': str(timestamp.day - 1), # Array index: range from 0 to 27 / 28 / 29 or 30
'hour': str(timestamp.hour), # range from 0 to 23
}
inc_keys = [
key.format(**datetime_args)
for key in AGGREGATION_KEYS
]
inc_update = {
'%s%s' % (inc_key, aggregate_type): inc_values[aggregate_type]
for inc_key in inc_keys
for aggregate_type in inc_values
}
return {
'$inc': inc_update,
}
| from datetime import datetime
AGGREGATION_KEYS = [
'',
'months.{month}.',
'months.{month}.days.{day}.',
'months.{month}.days.{day}.hours.{hour}.',
]
DATETIME_KEY = 'datetime'
def build_filter_query(timestamp, tags=None):
filters = tags or {}
filters[DATETIME_KEY] = datetime(timestamp.year, 1, 1)
return filters
def build_update_query(value, timestamp):
datetime_args = {
'month': str(timestamp.month - 1), # Array index: range from 0 to 11
'day': str(timestamp.day - 1), # Array index: range from 0 to 27 / 28 / 29 or 30
'hour': str(timestamp.hour), # range from 0 to 23
}
inc_keys = [
key.format(**datetime_args)
for key in AGGREGATION_KEYS
]
inc_update = {
'%s%s' % (inc_key, aggregate_type): value if aggregate_type is "sum" else 1
for inc_key in inc_keys
for aggregate_type in ['count', 'sum']
}
return {
'$inc': inc_update,
}
| mit | Python |
51432aa92e233ba3c9db500e4e3d55b7067e906c | Add latest version of py-jinja2 (#13311) | iulian787/spack,iulian787/spack,iulian787/spack,LLNL/spack,LLNL/spack,iulian787/spack,LLNL/spack,LLNL/spack,iulian787/spack,LLNL/spack | var/spack/repos/builtin/packages/py-jinja2/package.py | var/spack/repos/builtin/packages/py-jinja2/package.py | # Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyJinja2(PythonPackage):
"""Jinja2 is a template engine written in pure Python. It provides
a Django inspired non-XML syntax but supports inline expressions
and an optional sandboxed environment."""
homepage = "https://palletsprojects.com/p/jinja/"
url = "https://pypi.io/packages/source/J/Jinja2/Jinja2-2.10.3.tar.gz"
import_modules = ['jinja2']
version('2.10.3', sha256='9fe95f19286cfefaa917656583d020be14e7859c6b0252588391e47db34527de')
version('2.10', sha256='f84be1bb0040caca4cea721fcbbbbd61f9be9464ca236387158b0feea01914a4')
version('2.9.6', sha256='ddaa01a212cd6d641401cb01b605f4a4d9f37bfc93043d7f760ec70fb99ff9ff')
version('2.8', sha256='bc1ff2ff88dbfacefde4ddde471d1417d3b304e8df103a7a9437d47269201bf4')
version('2.7.3', sha256='2e24ac5d004db5714976a04ac0e80c6df6e47e98c354cb2c0d82f8879d4f8fdb')
version('2.7.2', sha256='310a35fbccac3af13ebf927297f871ac656b9da1d248b1fe6765affa71b53235')
version('2.7.1', sha256='5cc0a087a81dca1c08368482fb7a92fe2bdd8cfbb22bc0fccfe6c85affb04c8b')
version('2.7', sha256='474f1518d189ae7e318b139fecc1d30b943f124448cfa0f09582ca23e069fa4d')
depends_on('py-setuptools', type='build')
depends_on('[email protected]:', type=('build', 'run'))
depends_on('[email protected]:', type=('build', 'run')) # optional, required for i18n
| # Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyJinja2(PythonPackage):
"""Jinja2 is a template engine written in pure Python. It provides
a Django inspired non-XML syntax but supports inline expressions
and an optional sandboxed environment."""
homepage = "http://jinja.pocoo.org/"
url = "https://pypi.io/packages/source/J/Jinja2/Jinja2-2.9.6.tar.gz"
import_modules = ['jinja2']
version('2.10', sha256='f84be1bb0040caca4cea721fcbbbbd61f9be9464ca236387158b0feea01914a4')
version('2.9.6', sha256='ddaa01a212cd6d641401cb01b605f4a4d9f37bfc93043d7f760ec70fb99ff9ff')
version('2.8', sha256='bc1ff2ff88dbfacefde4ddde471d1417d3b304e8df103a7a9437d47269201bf4')
version('2.7.3', sha256='2e24ac5d004db5714976a04ac0e80c6df6e47e98c354cb2c0d82f8879d4f8fdb')
version('2.7.2', sha256='310a35fbccac3af13ebf927297f871ac656b9da1d248b1fe6765affa71b53235')
version('2.7.1', sha256='5cc0a087a81dca1c08368482fb7a92fe2bdd8cfbb22bc0fccfe6c85affb04c8b')
version('2.7', sha256='474f1518d189ae7e318b139fecc1d30b943f124448cfa0f09582ca23e069fa4d')
depends_on('py-setuptools', type='build')
depends_on('py-markupsafe', type=('build', 'run'))
depends_on('[email protected]:', type=('build', 'run')) # optional, required for i18n
| lgpl-2.1 | Python |
77f155fec48c808724eff1b2631035d2526c170f | add version 2.11.3 (#23698) | LLNL/spack,LLNL/spack,LLNL/spack,LLNL/spack,LLNL/spack | var/spack/repos/builtin/packages/py-jinja2/package.py | var/spack/repos/builtin/packages/py-jinja2/package.py | # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyJinja2(PythonPackage):
"""Jinja2 is a template engine written in pure Python. It provides
a Django inspired non-XML syntax but supports inline expressions
and an optional sandboxed environment."""
homepage = "https://palletsprojects.com/p/jinja/"
pypi = "Jinja2/Jinja2-2.10.3.tar.gz"
version('2.11.3', sha256='a6d58433de0ae800347cab1fa3043cebbabe8baa9d29e668f1c768cb87a333c6')
version('2.10.3', sha256='9fe95f19286cfefaa917656583d020be14e7859c6b0252588391e47db34527de')
version('2.10.1', sha256='065c4f02ebe7f7cf559e49ee5a95fb800a9e4528727aec6f24402a5374c65013')
version('2.10', sha256='f84be1bb0040caca4cea721fcbbbbd61f9be9464ca236387158b0feea01914a4')
version('2.9.6', sha256='ddaa01a212cd6d641401cb01b605f4a4d9f37bfc93043d7f760ec70fb99ff9ff')
version('2.8', sha256='bc1ff2ff88dbfacefde4ddde471d1417d3b304e8df103a7a9437d47269201bf4')
version('2.7.3', sha256='2e24ac5d004db5714976a04ac0e80c6df6e47e98c354cb2c0d82f8879d4f8fdb')
version('2.7.2', sha256='310a35fbccac3af13ebf927297f871ac656b9da1d248b1fe6765affa71b53235')
version('2.7.1', sha256='5cc0a087a81dca1c08368482fb7a92fe2bdd8cfbb22bc0fccfe6c85affb04c8b')
version('2.7', sha256='474f1518d189ae7e318b139fecc1d30b943f124448cfa0f09582ca23e069fa4d')
depends_on('[email protected]:2.8,3.5:', type=('build', 'run'))
depends_on('py-setuptools', type='build')
depends_on('[email protected]:', type=('build', 'run'))
depends_on('[email protected]:', type=('build', 'run')) # optional, required for i18n
| # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyJinja2(PythonPackage):
"""Jinja2 is a template engine written in pure Python. It provides
a Django inspired non-XML syntax but supports inline expressions
and an optional sandboxed environment."""
homepage = "https://palletsprojects.com/p/jinja/"
pypi = "Jinja2/Jinja2-2.10.3.tar.gz"
version('2.10.3', sha256='9fe95f19286cfefaa917656583d020be14e7859c6b0252588391e47db34527de')
version('2.10.1', sha256='065c4f02ebe7f7cf559e49ee5a95fb800a9e4528727aec6f24402a5374c65013')
version('2.10', sha256='f84be1bb0040caca4cea721fcbbbbd61f9be9464ca236387158b0feea01914a4')
version('2.9.6', sha256='ddaa01a212cd6d641401cb01b605f4a4d9f37bfc93043d7f760ec70fb99ff9ff')
version('2.8', sha256='bc1ff2ff88dbfacefde4ddde471d1417d3b304e8df103a7a9437d47269201bf4')
version('2.7.3', sha256='2e24ac5d004db5714976a04ac0e80c6df6e47e98c354cb2c0d82f8879d4f8fdb')
version('2.7.2', sha256='310a35fbccac3af13ebf927297f871ac656b9da1d248b1fe6765affa71b53235')
version('2.7.1', sha256='5cc0a087a81dca1c08368482fb7a92fe2bdd8cfbb22bc0fccfe6c85affb04c8b')
version('2.7', sha256='474f1518d189ae7e318b139fecc1d30b943f124448cfa0f09582ca23e069fa4d')
depends_on('py-setuptools', type='build')
depends_on('[email protected]:', type=('build', 'run'))
depends_on('[email protected]:', type=('build', 'run')) # optional, required for i18n
| lgpl-2.1 | Python |
5b6445e519fa9c03d703144462004ac27b9079ba | Add latest version of joblib (#11495) | LLNL/spack,iulian787/spack,iulian787/spack,LLNL/spack,iulian787/spack,iulian787/spack,LLNL/spack,iulian787/spack,LLNL/spack,LLNL/spack | var/spack/repos/builtin/packages/py-joblib/package.py | var/spack/repos/builtin/packages/py-joblib/package.py | # Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyJoblib(PythonPackage):
"""Python function as pipeline jobs"""
homepage = "http://packages.python.org/joblib/"
url = "https://pypi.io/packages/source/j/joblib/joblib-0.13.2.tar.gz"
import_modules = [
'joblib', 'joblib.externals', 'joblib.externals.cloudpickle',
'joblib.externals.loky', 'joblib.externals.loky.backend'
]
version('0.13.2', sha256='315d6b19643ec4afd4c41c671f9f2d65ea9d787da093487a81ead7b0bac94524')
version('0.10.3', '455401ccfaf399538d8e5333086df2d3')
version('0.10.2', 'ebb42af4342c2445b175f86bd478d869')
version('0.10.0', '61e40322c4fed5c22905f67d7d1aa557')
| # Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyJoblib(PythonPackage):
"""Python function as pipeline jobs"""
homepage = "http://packages.python.org/joblib/"
url = "https://pypi.io/packages/source/j/joblib/joblib-0.10.3.tar.gz"
version('0.10.3', '455401ccfaf399538d8e5333086df2d3')
version('0.10.2', 'ebb42af4342c2445b175f86bd478d869')
version('0.10.0', '61e40322c4fed5c22905f67d7d1aa557')
# for testing
# depends_on('py-nose', type=('build', 'run'))
| lgpl-2.1 | Python |
350a5422ed1f874e7b2780348663f320a1af6676 | Update py-theano dependencies (#14015) | iulian787/spack,LLNL/spack,iulian787/spack,LLNL/spack,iulian787/spack,iulian787/spack,LLNL/spack,LLNL/spack,LLNL/spack,iulian787/spack | var/spack/repos/builtin/packages/py-theano/package.py | var/spack/repos/builtin/packages/py-theano/package.py | # Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyTheano(PythonPackage):
"""Optimizing compiler for evaluating mathematical expressions on CPUs
and GPUs."""
homepage = "http://deeplearning.net/software/theano/"
url = "https://pypi.io/packages/source/T/Theano/Theano-0.8.2.tar.gz"
git = "https://github.com/Theano/Theano.git"
version('master', branch='master')
version('1.0.4', sha256='35c9bbef56b61ffa299265a42a4e8f8cb5a07b2997dabaef0f8830b397086913')
version('1.0.2', sha256='6768e003d328a17011e6fca9126fbb8a6ffd3bb13cb21c450f3e724cca29abde')
version('1.0.1', sha256='88d8aba1fe2b6b75eacf455d01bc7e31e838c5a0fb8c13dde2d9472495ff4662')
version('0.8.2', sha256='7463c8f7ed1a787bf881f36d38a38607150186697e7ce7e78bfb94b7c6af8930')
variant('gpu', default=False,
description='Builds with support for GPUs via CUDA and cuDNN')
depends_on('[email protected]:2.8,3.3:')
depends_on('py-setuptools', type=('build', 'run'))
depends_on('[email protected]:', type=('build', 'run'))
depends_on('[email protected]:', type=('build', 'run'))
depends_on('[email protected]:', type=('build', 'run'))
depends_on('blas')
depends_on('cuda', when='+gpu')
depends_on('cudnn', when='+gpu')
depends_on('py-pygpu', when='+gpu', type=('build', 'run'))
depends_on('libgpuarray', when='+gpu')
depends_on('[email protected]:', type='test')
depends_on('py-parameterized', type='test')
depends_on('py-flake8', type='test')
| # Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyTheano(PythonPackage):
"""Optimizing compiler for evaluating mathematical expressions on CPUs
and GPUs."""
homepage = "http://deeplearning.net/software/theano/"
url = "https://pypi.io/packages/source/T/Theano/Theano-0.8.2.tar.gz"
git = "https://github.com/Theano/Theano.git"
version('master', branch='master')
version('1.0.4', sha256='35c9bbef56b61ffa299265a42a4e8f8cb5a07b2997dabaef0f8830b397086913')
version('1.0.2', sha256='6768e003d328a17011e6fca9126fbb8a6ffd3bb13cb21c450f3e724cca29abde')
version('1.0.1', sha256='88d8aba1fe2b6b75eacf455d01bc7e31e838c5a0fb8c13dde2d9472495ff4662')
version('0.8.2', sha256='7463c8f7ed1a787bf881f36d38a38607150186697e7ce7e78bfb94b7c6af8930')
variant('gpu', default=False,
description='Builds with support for GPUs via CUDA and cuDNN')
depends_on('[email protected]:2.8,3.3:')
depends_on('py-setuptools', type=('build', 'run'))
depends_on('[email protected]:', type=('build', 'run'))
depends_on('[email protected]:', type=('build', 'run'))
depends_on('[email protected]:', type=('build', 'run'))
depends_on('blas')
depends_on('cuda', when='+gpu')
depends_on('cudnn', when='+gpu')
depends_on('py-pygpu', when='+gpu', type=('build', 'run'))
depends_on('libgpuarray', when='+gpu')
depends_on('[email protected]:', type='test')
depends_on('[email protected]:', type='test')
| lgpl-2.1 | Python |
38199ce9cfb69b21e45e679d3a6604a72da7cc5b | add version 0.5.0 to r-forcats (#20972) | LLNL/spack,LLNL/spack,LLNL/spack,LLNL/spack,LLNL/spack | var/spack/repos/builtin/packages/r-forcats/package.py | var/spack/repos/builtin/packages/r-forcats/package.py | # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class RForcats(RPackage):
"""Tools for Working with Categorical Variables (Factors)
Helpers for reordering factor levels (including moving specified levels to
front, ordering by first appearance, reversing, and randomly shuffling),
and tools for modifying factor levels (including collapsing rare levels
into other, 'anonymising', and manually 'recoding')."""
homepage = "http://forcats.tidyverse.org/"
url = "https://cloud.r-project.org/src/contrib/forcats_0.2.0.tar.gz"
list_url = "https://cloud.r-project.org/src/contrib/Archive/forcats"
version('0.5.0', sha256='8f960e789333ec597ddf2d653a64e330f03b86f465e9b71f6779f227355d90c4')
version('0.4.0', sha256='7c83cb576aa6fe1379d7506dcc332f7560068b2025f9e3ab5cd0a5f28780d2b2')
version('0.3.0', sha256='95814610ec18b8a8830eba63751954387f9d21400d6ab40394ed0ff22c0cb657')
version('0.2.0', sha256='b5bce370422d4c0ec9509249ae645373949bfbe9217cdf50dce2bfbdad9f7cd7')
depends_on('[email protected]:', type=('build', 'run'))
depends_on('[email protected]:', when='@0.5.0:', type=('build', 'run'))
depends_on('r-ellipsis', when='@0.4.0:', type=('build', 'run'))
depends_on('r-magrittr', type=('build', 'run'))
depends_on('r-rlang', when='@0.4.0:', type=('build', 'run'))
depends_on('r-tibble', type=('build', 'run'))
| # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class RForcats(RPackage):
"""Helpers for reordering factor levels (including moving specified levels
to front, ordering by first appearance, reversing, and randomly
shuffling), and tools for modifying factor levels (including collapsing
rare levels into other, 'anonymising', and manually 'recoding')."""
homepage = "http://forcats.tidyverse.org/"
url = "https://cloud.r-project.org/src/contrib/forcats_0.2.0.tar.gz"
list_url = "https://cloud.r-project.org/src/contrib/Archive/forcats"
version('0.4.0', sha256='7c83cb576aa6fe1379d7506dcc332f7560068b2025f9e3ab5cd0a5f28780d2b2')
version('0.3.0', sha256='95814610ec18b8a8830eba63751954387f9d21400d6ab40394ed0ff22c0cb657')
version('0.2.0', sha256='b5bce370422d4c0ec9509249ae645373949bfbe9217cdf50dce2bfbdad9f7cd7')
depends_on('[email protected]:', type=('build', 'run'))
depends_on('r-tibble', type=('build', 'run'))
depends_on('r-magrittr', type=('build', 'run'))
depends_on('r-ellipsis', when='@0.4.0:', type=('build', 'run'))
depends_on('r-rlang', when='@0.4.0:', type=('build', 'run'))
| lgpl-2.1 | Python |
c4d809a3b8ccb24d684c489925dd6c9634dbdf55 | Remove use of DesiredCapabilities object, use Options object instead (#981) | cobrateam/splinter,cobrateam/splinter,cobrateam/splinter | splinter/driver/webdriver/firefox.py | splinter/driver/webdriver/firefox.py | # -*- coding: utf-8 -*-
# Copyright 2012 splinter authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
from selenium.webdriver import Firefox
from selenium.webdriver.firefox.firefox_profile import FirefoxProfile
from splinter.driver.webdriver import (
BaseWebDriver,
WebDriverElement as WebDriverElement,
)
from splinter.driver.webdriver.cookie_manager import CookieManager
from selenium.webdriver.firefox.options import Options
class WebDriver(BaseWebDriver):
driver_name = "Firefox"
def __init__(
self,
options=None,
profile=None,
extensions=None,
user_agent=None,
profile_preferences=None,
fullscreen=False,
wait_time=2,
capabilities=None,
headless=False,
incognito=False,
**kwargs
):
firefox_profile = FirefoxProfile(profile)
firefox_profile.set_preference("extensions.logging.enabled", False)
firefox_profile.set_preference("network.dns.disableIPv6", False)
options = options or Options()
if capabilities:
for key, value in capabilities.items():
options.set_capability(key, value)
if user_agent is not None:
firefox_profile.set_preference("general.useragent.override", user_agent)
if profile_preferences:
for key, value in profile_preferences.items():
firefox_profile.set_preference(key, value)
if extensions:
for extension in extensions:
firefox_profile.add_extension(extension)
if headless:
options.add_argument("--headless")
if incognito:
options.add_argument("-private")
self.driver = Firefox(
firefox_profile,
options=options,
**kwargs,
)
if fullscreen:
self.driver.fullscreen_window()
self.element_class = WebDriverElement
self._cookie_manager = CookieManager(self.driver)
super(WebDriver, self).__init__(wait_time)
| # -*- coding: utf-8 -*-
# Copyright 2012 splinter authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
from selenium.webdriver import DesiredCapabilities, Firefox
from selenium.webdriver.firefox.firefox_profile import FirefoxProfile
from splinter.driver.webdriver import (
BaseWebDriver,
WebDriverElement as WebDriverElement,
)
from splinter.driver.webdriver.cookie_manager import CookieManager
from selenium.webdriver.firefox.options import Options
class WebDriver(BaseWebDriver):
driver_name = "Firefox"
def __init__(
self,
options=None,
profile=None,
extensions=None,
user_agent=None,
profile_preferences=None,
fullscreen=False,
wait_time=2,
capabilities=None,
headless=False,
incognito=False,
**kwargs
):
firefox_profile = FirefoxProfile(profile)
firefox_profile.set_preference("extensions.logging.enabled", False)
firefox_profile.set_preference("network.dns.disableIPv6", False)
firefox_capabilities = DesiredCapabilities().FIREFOX
firefox_capabilities["marionette"] = True
options = options or Options()
if capabilities:
for key, value in capabilities.items():
# Selenium 3
firefox_capabilities[key] = value
# Selenium 4
options.set_capability(key, value)
if user_agent is not None:
firefox_profile.set_preference("general.useragent.override", user_agent)
if profile_preferences:
for key, value in profile_preferences.items():
firefox_profile.set_preference(key, value)
if extensions:
for extension in extensions:
firefox_profile.add_extension(extension)
if headless:
options.add_argument("--headless")
if incognito:
options.add_argument("-private")
self.driver = Firefox(
firefox_profile,
capabilities=firefox_capabilities,
options=options,
**kwargs
)
if fullscreen:
self.driver.fullscreen_window()
self.element_class = WebDriverElement
self._cookie_manager = CookieManager(self.driver)
super(WebDriver, self).__init__(wait_time)
| bsd-3-clause | Python |
546b55248457055c4803d7ea65c21b92276309bd | Reformat and update copyright. | uw-it-aca/spotseeker_server,uw-it-aca/spotseeker_server,uw-it-aca/spotseeker_server | spotseeker_server/views/add_image.py | spotseeker_server/views/add_image.py | # Copyright 2021 UW-IT, University of Washington
# SPDX-License-Identifier: Apache-2.0
""" Changes
=================================================================
[email protected]: adapt to a simplier RESTDispatch framework.
"""
from spotseeker_server.views.rest_dispatch import RESTDispatch, RESTException
from spotseeker_server.models import SpotImage, Spot
from django.http import HttpResponse
from spotseeker_server.require_auth import *
from PIL import Image
class AddImageView(RESTDispatch):
"""Saves a SpotImage for a particular Spot on POST to
/api/v1/spot/<spot id>/image.
"""
@user_auth_required
@admin_auth_required
def POST(self, request, spot_id):
spot = Spot.objects.get(pk=spot_id)
if "image" not in request.FILES:
raise RESTException("No image", 400)
args = {
"upload_application": request.META.get(
"SS_OAUTH_CONSUMER_NAME", ""
),
"upload_user": request.META.get("SS_OAUTH_USER", ""),
"description": request.POST.get("description", ""),
"display_index": request.POST.get("display_index"),
"image": request.FILES["image"],
}
if args["display_index"] is None:
# TODO: is there a better way?
# get display_indexes for all of the existing images
# and set the new one to the biggest + 1
indices = [img.display_index for img in spot.spotimage_set.all()]
if indices:
args["display_index"] = max(indices) + 1
else:
args["display_index"] = 0
image = spot.spotimage_set.create(**args)
response = HttpResponse(status=201)
response["Location"] = image.rest_url()
return response
| # Copyright 2021 UW-IT, University of Washington
# SPDX-License-Identifier: Apache-2.0
""" Copyright 2012, 2013 UW Information Technology, University of Washington
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Changes
=================================================================
[email protected]: adapt to a simplier RESTDispatch framework.
"""
from spotseeker_server.views.rest_dispatch import RESTDispatch, RESTException
from spotseeker_server.models import SpotImage, Spot
from django.http import HttpResponse
from spotseeker_server.require_auth import *
from PIL import Image
class AddImageView(RESTDispatch):
""" Saves a SpotImage for a particular Spot on POST to
/api/v1/spot/<spot id>/image.
"""
@user_auth_required
@admin_auth_required
def POST(self, request, spot_id):
spot = Spot.objects.get(pk=spot_id)
if "image" not in request.FILES:
raise RESTException("No image", 400)
args = {
'upload_application': request.META.get('SS_OAUTH_CONSUMER_NAME',
''),
'upload_user': request.META.get('SS_OAUTH_USER', ''),
'description': request.POST.get('description', ''),
'display_index': request.POST.get('display_index'),
'image': request.FILES['image']
}
if args['display_index'] is None:
# TODO: is there a better way?
# get display_indexes for all of the existing images
# and set the new one to the biggest + 1
indices = [img.display_index for img in spot.spotimage_set.all()]
if indices:
args['display_index'] = max(indices) + 1
else:
args['display_index'] = 0
image = spot.spotimage_set.create(**args)
response = HttpResponse(status=201)
response["Location"] = image.rest_url()
return response
| apache-2.0 | Python |
99241ab49a0a76472bb6f107a078248782af9626 | fix string_types in _compat | jmgc/myhdl-numeric,jmgc/myhdl-numeric,jmgc/myhdl-numeric | myhdl/_compat.py | myhdl/_compat.py | import sys
PY2 = sys.version_info[0] == 2
if not PY2:
string_types = (str,)
integer_types = (int,)
long = int
import builtins
else:
string_types = (str, unicode)
integer_types = (int, long)
long = long
import __builtin__ as builtins
| import sys
PY2 = sys.version_info[0] == 2
if not PY2:
string_types = (str, unicode)
integer_types = (int,)
long = int
import builtins
else:
string_types = (str,)
integer_types = (int, long)
long = long
import __builtin__ as builtins
| lgpl-2.1 | Python |
aa9ce7092801e7ed8f3f86df0d1067279d13784d | Add armv7 support to create_ios_framework script (#4942) | jason-simmons/flutter_engine,Hixie/sky_engine,devoncarew/engine,chinmaygarde/sky_engine,Hixie/sky_engine,cdotstout/sky_engine,krisgiesing/sky_engine,mikejurka/engine,flutter/engine,cdotstout/sky_engine,jamesr/sky_engine,chinmaygarde/flutter_engine,jason-simmons/sky_engine,chinmaygarde/flutter_engine,jamesr/sky_engine,devoncarew/sky_engine,devoncarew/engine,rmacnak-google/engine,devoncarew/sky_engine,Hixie/sky_engine,jason-simmons/flutter_engine,cdotstout/sky_engine,jamesr/sky_engine,jamesr/flutter_engine,devoncarew/sky_engine,devoncarew/sky_engine,jamesr/flutter_engine,jamesr/sky_engine,jason-simmons/flutter_engine,rmacnak-google/engine,Hixie/sky_engine,aam/engine,jamesr/flutter_engine,tvolkert/engine,jamesr/flutter_engine,chinmaygarde/flutter_engine,mikejurka/engine,devoncarew/engine,cdotstout/sky_engine,krisgiesing/sky_engine,jason-simmons/sky_engine,chinmaygarde/sky_engine,chinmaygarde/sky_engine,chinmaygarde/sky_engine,flutter/engine,tvolkert/engine,mikejurka/engine,Hixie/sky_engine,chinmaygarde/flutter_engine,aam/engine,tvolkert/engine,flutter/engine,tvolkert/engine,rmacnak-google/engine,cdotstout/sky_engine,aam/engine,krisgiesing/sky_engine,rmacnak-google/engine,Hixie/sky_engine,jason-simmons/sky_engine,devoncarew/engine,jason-simmons/flutter_engine,aam/engine,tvolkert/engine,jason-simmons/sky_engine,tvolkert/engine,rmacnak-google/engine,devoncarew/engine,jason-simmons/sky_engine,aam/engine,devoncarew/engine,Hixie/sky_engine,krisgiesing/sky_engine,krisgiesing/sky_engine,chinmaygarde/flutter_engine,jason-simmons/flutter_engine,mikejurka/engine,jamesr/flutter_engine,chinmaygarde/sky_engine,mikejurka/engine,mikejurka/engine,chinmaygarde/sky_engine,devoncarew/engine,aam/engine,chinmaygarde/flutter_engine,mikejurka/engine,cdotstout/sky_engine,mikejurka/engine,Hixie/sky_engine,jamesr/sky_engine,krisgiesing/sky_engine,flutter/engine,flutter/engine,jamesr/flutter_engine,jason-simmons/flutter_engine,devoncarew/sky_engine,jamesr/sky_engine,jason-simmons/flutter_engine,jamesr/sky_engine,chinmaygarde/flutter_engine,krisgiesing/sky_engine,aam/engine,aam/engine,jason-simmons/sky_engine,flutter/engine,mikejurka/engine,jamesr/flutter_engine,jamesr/flutter_engine,chinmaygarde/sky_engine,flutter/engine,jason-simmons/flutter_engine,cdotstout/sky_engine,devoncarew/sky_engine,rmacnak-google/engine,tvolkert/engine,jason-simmons/sky_engine,flutter/engine,rmacnak-google/engine,devoncarew/sky_engine,jamesr/flutter_engine | sky/tools/create_ios_framework.py | sky/tools/create_ios_framework.py | #!/usr/bin/env python
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import argparse
import subprocess
import shutil
import sys
import os
def main():
parser = argparse.ArgumentParser(description='Creates Flutter.framework')
parser.add_argument('--dst', type=str, required=True)
# TODO(cbracken) eliminate --device-out-dir and make armv7-out-dir and
# arm64-out-dir required once bot recipe is updated.
parser.add_argument('--device-out-dir', type=str, required=False)
parser.add_argument('--arm64-out-dir', type=str, required=False)
parser.add_argument('--armv7-out-dir', type=str, required=False)
parser.add_argument('--simulator-out-dir', type=str, required=True)
args = parser.parse_args()
if not (args.arm64_out_dir or args.device_out_dir):
print 'One of --device-out-dir or --arm64-out-dir must be specified'
fat_framework = os.path.join(args.dst, 'Flutter.framework')
arm64_framework = os.path.join(args.arm64_out_dir if args.arm64_out_dir else args.device_out_dir, 'Flutter.framework')
armv7_framework = os.path.join(args.armv7_out_dir, 'Flutter.framework') if args.armv7_out_dir else None
simulator_framework = os.path.join(args.simulator_out_dir, 'Flutter.framework')
arm64_dylib = os.path.join(arm64_framework, 'Flutter')
armv7_dylib = os.path.join(armv7_framework, 'Flutter') if args.armv7_out_dir else None
simulator_dylib = os.path.join(simulator_framework, 'Flutter')
if not os.path.isdir(arm64_framework):
print 'Cannot find iOS arm64 Framework at', arm64_framework
return 1
# TODO(cbracken): require armv7 once bot recipe is updated.
if armv7_framework and not os.path.isdir(armv7_framework):
print 'Cannot find iOS armv7 Framework at', armv7_framework
return 1
if not os.path.isdir(simulator_framework):
print 'Cannot find iOS simulator Framework at', simulator_framework
return 1
if not os.path.isfile(arm64_dylib):
print 'Cannot find iOS arm64 dylib at', arm64_dylib
return 1
# TODO(cbracken): require armv7 once bot recipe is updated.
if armv7_dylib and not os.path.isfile(armv7_dylib):
print 'Cannot find iOS armv7 dylib at', armv7_dylib
return 1
if not os.path.isfile(simulator_dylib):
print 'Cannot find iOS simulator dylib at', simulator_dylib
return 1
shutil.rmtree(fat_framework, True)
shutil.copytree(arm64_framework, fat_framework)
# TODO(cbracken): require armv7 once bot recipe is updated.
dylibs = [arm64_dylib, simulator_dylib]
if armv7_dylib:
dylibs += [armv7_dylib]
subprocess.call(['lipo'] + dylibs + [
'-create',
'-output',
os.path.join(fat_framework, 'Flutter')
])
if __name__ == '__main__':
sys.exit(main())
| #!/usr/bin/env python
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import argparse
import subprocess
import shutil
import sys
import os
def main():
parser = argparse.ArgumentParser(description='Creates Flutter.framework')
parser.add_argument('--dst', type=str, required=True)
parser.add_argument('--device-out-dir', type=str, required=True)
parser.add_argument('--simulator-out-dir', type=str, required=True)
args = parser.parse_args()
fat_framework = os.path.join(args.dst, 'Flutter.framework')
device_framework = os.path.join(args.device_out_dir, 'Flutter.framework')
simulator_framework = os.path.join(args.simulator_out_dir, 'Flutter.framework')
device_dylib = os.path.join(device_framework, 'Flutter')
simulator_dylib = os.path.join(simulator_framework, 'Flutter')
if not os.path.isdir(device_framework):
print 'Cannot find iOS device Framework at', device_framework
return 1
if not os.path.isdir(simulator_framework):
print 'Cannot find iOS simulator Framework at', simulator_framework
return 1
if not os.path.isfile(device_dylib):
print 'Cannot find iOS device dylib at', device_dylib
return 1
if not os.path.isfile(simulator_dylib):
print 'Cannot find iOS simulator dylib at', simulator_dylib
return 1
shutil.rmtree(fat_framework, True)
shutil.copytree(device_framework, fat_framework)
subprocess.call([
'lipo',
device_dylib,
simulator_dylib,
'-create',
'-output',
os.path.join(fat_framework, 'Flutter')
])
if __name__ == '__main__':
sys.exit(main())
| bsd-3-clause | Python |
a9cfd2bc842631431e20b6c13d3d98535b643b3b | Fix mispelling | infoxchange/ixdjango | ixdjango/management/commands/copystatic.py | ixdjango/management/commands/copystatic.py | """
Copy static files to nginx location
.. moduleauthor:: Infoxchange Development Team <[email protected]>
"""
import logging
import os
from shutil import copy2, copystat
from django.conf import settings
from django.core.management.base import NoArgsCommand
LOGGER = logging.getLogger(__name__)
def copytree(src, dst):
"""
A version of copytree I don't hate
"""
if not (os.path.exists(dst) and os.path.isdir(dst)):
LOGGER.info("Creating directory %s", dst)
os.makedirs(dst)
copystat(src, dst)
for name in os.listdir(src):
srcname = os.path.join(src, name)
dstname = os.path.join(dst, name)
if os.path.isdir(srcname):
LOGGER.debug("Copying directory %s", name)
copytree(srcname, dstname)
else:
LOGGER.debug("Copying file %s", name)
copy2(srcname, dstname)
class Command(NoArgsCommand):
"""
Copy static files to nginx location
"""
def handle_noargs(self, **options):
try:
static_dir = settings.NGINX_STATIC_DIR
except AttributeError:
static_dir = None
if not static_dir:
LOGGER.info("static dir not defined, copy static content skipped")
return
LOGGER.info("Copying static content to %s", static_dir)
copytree(settings.STATIC_ROOT, static_dir)
| """
Copy static files to nginx location
.. moduleauthor:: Infoxchange Development Team <[email protected]>
"""
import logging
import os
from shutil import copy2, copystat
from django.conf import settings
from django.core.management.base import NoArgsCommand
LOGGER = logging.getLogger(__name__)
def copytree(src, dst):
"""
A version of copytree I don't hate
"""
if not (os.path.exists(dst) and os.path.isdir(dst)):
LOGGER.INFO("Creating directory %s", dst)
os.makedirs(dst)
copystat(src, dst)
for name in os.listdir(src):
srcname = os.path.join(src, name)
dstname = os.path.join(dst, name)
if os.path.isdir(srcname):
LOGGER.DEBUG("Copying directory %s", name)
copytree(srcname, dstname)
else:
LOGGER.DEBUG("Copying file %s", name)
copy2(srcname, dstname)
class Command(NoArgsCommand):
"""
Copy static files to nginx location
"""
def handle_noargs(self, **options):
try:
static_dir = settings.NGINX_STATIC_DIR
except AttributeError:
static_dir = None
if not static_dir:
LOGGER.info("static dir not defined, copy static content skipped")
return
LOGGER.info("Copying static content to %s", static_dir)
copytree(settings.STATIC_ROOT, static_dir)
| mit | Python |
df85906e8e2a872ca99002b26af6ea5d495b23ca | fix wrong document string | schubergphilis/data-migrator,iheitlager/data-migrator | data_migrator/emitters/__init__.py | data_migrator/emitters/__init__.py | #!/usr/bin/python
# -*- coding: UTF-8 -*-
"""
This module contains all classes for models, managers and fields
* :class:`BaseEmitter`
* :class:`MySQLEmitter`
* ...
"""
from .mysql import MySQLEmitter
from .csv import CSVEmitter
| #!/usr/bin/python
# -*- coding: UTF-8 -*-
from .mysql import MySQLEmitter
from .csv import CSVEmitter
"""
This module contains all classes for models, managers and fields
* :class:`BaseEmitter`
* :class:`MySQLEmitter`
* ...
"""
| mit | Python |
0a05e6479ee907c3702cc895c5a180cd816a5433 | Build interdependencies. | DataONEorg/d1_python,DataONEorg/d1_python,DataONEorg/d1_python,DataONEorg/d1_python | d1_common_python/src/setup.py | d1_common_python/src/setup.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
:mod:`setup`
====================
:Synopsis: Create egg.
:Author: DataONE (Dahl)
"""
from setuptools import setup, find_packages
import d1_common
setup(
name='DataONE_Common',
version=d1_common.__version__,
author='DataONE Project',
author_email='[email protected]',
url='http://dataone.org',
description='Contains functionality common to projects that interact with the DataONE infrastructure via Python',
license='Apache License, Version 2.0',
packages=find_packages(),
# Dependencies that are available through PYPI / easy_install.
install_requires=[
'iso8601 >= 0.1',
'pyxb >= 1.1.2',
],
package_data={
# If any package contains *.txt or *.rst files, include them:
'': ['*.txt', '*.rst'],
}
)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
:mod:`setup`
====================
:Synopsis: Create egg.
:Author: DataONE (Dahl)
"""
from setuptools import setup, find_packages
setup(
name='Python DataONE Common',
#version=d1_client.__version__,
description='Contains functionality common to projects that interact with the DataONE infrastructure via Python',
author='DataONE Project',
url='http://dataone.org',
packages=find_packages(),
# Dependencies that are available through PYPI / easy_install.
install_requires=[
# iso860
'iso8601 >= 0.1',
# PyXB
'pyxb >= 1.1.2',
],
package_data={
# If any package contains *.txt or *.rst files, include them:
'': ['*.txt', '*.rst'],
}
)
| apache-2.0 | Python |
9664f6e6bf64e10fe0ce6fbfc3bbf20d4775cdb6 | Update MotorsControlFile.py | VitorHugoAguiar/ProBot,VitorHugoAguiar/ProBot,VitorHugoAguiar/ProBot,VitorHugoAguiar/ProBot | ProBot_BeagleBone/MotorsControlFile.py | ProBot_BeagleBone/MotorsControlFile.py | #!/usr/bin/python
# Python Standart Library Imports
import SabertoothFile
import PWMFile
import ProBotConstantsFile
# Initialization of classes from local files
Sabertooth = SabertoothFile.SabertoothClass()
PWM = PWMFile.PWMClass()
Pconst = ProBotConstantsFile.Constants()
class MotorsControlClass():
def MotorsControl(self,rightMotor, leftMotor, userChoice):
if userChoice=='1':
# Sending the values to the Sabertooth that is connected to the motors
Sabertooth.drive(Pconst.addr, 1, int(rightMotor))
Sabertooth.drive(Pconst.addr, 2, int(leftMotor))
if userChoice=='2':
# Sending the values to the pwm controller that is connected to the motors
PWM.PWM_Signals(int(rightMotor), int(leftMotor))
| #!/usr/bin/python
import SabertoothFile
import PWMFile
import ProBotConstantsFile
# Initialization of classes from local files
Sabertooth = SabertoothFile.SabertoothClass()
PWM = PWMFile.PWMClass()
Pconst = ProBotConstantsFile.Constants()
class MotorsControlClass():
def MotorsControl(self,rightMotor, leftMotor, userChoice):
if userChoice=='1':
# Sending the values to the Sabertooth that is connected to the motors
Sabertooth.drive(Pconst.addr, 1, int(rightMotor))
Sabertooth.drive(Pconst.addr, 2, int(leftMotor))
if userChoice=='2':
PWM.PWM_Signals(int(rightMotor), int(leftMotor))
| agpl-3.0 | Python |
50b19958b531cd94b537f3d911ce9b0c0b7f1ea2 | add ordereddictionary to store information about file .rooms loaded | develersrl/rooms,develersrl/rooms,develersrl/rooms,develersrl/rooms,develersrl/rooms,develersrl/rooms,develersrl/rooms | trunk/editor/structdata/project.py | trunk/editor/structdata/project.py | #!/usr/bin/env python
try:
from collections import OrderedDict
except ImportError:
from misc.dict import OrderedDict
from subject import Subject
class Project(Subject):
def __init__(self):
super(Project, self).__init__()
self.data = OrderedDict()
self.data['world'] = None
self.data['images'] = {}
self.data['items'] = OrderedDict()
self.data['vars'] = {}
self.data['events'] = OrderedDict()
self.data['rooms'] = OrderedDict()
g_project = Project()
| #!/usr/bin/env python
try:
from collections import OrderedDict
except ImportError:
from misc.dict import OrderedDict
from subject import Subject
class Project(Subject):
def __init__(self):
super(Project, self).__init__()
self.informations = None
self.images = {}
self.items = OrderedDict()
self.vars = {}
self.events = OrderedDict()
self.rooms = OrderedDict()
self.selected_room = None
g_project = Project()
| mit | Python |
01e9fa344259faa6eeb7f0480975547d375e132f | add function to change and image. The function remove the image from the dictionary and add an image with the new key and new path to file | develersrl/rooms,develersrl/rooms,develersrl/rooms,develersrl/rooms,develersrl/rooms,develersrl/rooms,develersrl/rooms | trunk/editor/structdata/project.py | trunk/editor/structdata/project.py | #!/usr/bin/env python
from misc.odict import OrderedDict
from subject import Subject
class Project(Subject):
def __init__(self):
super(Project, self).__init__()
self.data = OrderedDict()
self.data['world'] = None
self.data['images'] = {}
self.data['items'] = OrderedDict()
self.data['vars'] = {}
self.data['events'] = OrderedDict()
self.data['rooms'] = OrderedDict()
def changeImage(self, old_image, new_image):
image = self.data['images'].pop(old_image)
image.file = new_image
self.data['images'][new_image] = image
self.notify()
def changeEventName(self, old_name, new_name):
event = self.data['events'].pop(old_name)
event.setName(new_name)
self.data['events'][event.id] = event
self.notify()
def changeStartRoom(self, new_start_room_name):
self.data['world'].start = new_start_room_name
self.notify()
def changeRoomName(self, old_room_name, new_room_name):
room = self.data['rooms'].pop(old_room_name)
self.data['rooms'][new_room_name] = room
if self.data['world'].start == old_room_name:
self.changeStartRoom(new_room_name)
room.setName(new_room_name)
self.notify()
def removeRoom(self, room_name):
self.data['rooms'].pop(room_name)
if self.data['world'].start == room_name:
if g_project.data['rooms']:
new_start_room_name = g_project.data['rooms'].keys()[0]
else:
new_start_room_name = ""
self.changeStartRoom(new_start_room_name)
self.notify()
g_project = Project()
| #!/usr/bin/env python
from misc.odict import OrderedDict
from subject import Subject
class Project(Subject):
def __init__(self):
super(Project, self).__init__()
self.data = OrderedDict()
self.data['world'] = None
self.data['images'] = {}
self.data['items'] = OrderedDict()
self.data['vars'] = {}
self.data['events'] = OrderedDict()
self.data['rooms'] = OrderedDict()
def changeEventName(self, old_name, new_name):
event = self.data['events'].pop(old_name)
event.setName(new_name)
self.data['events'][event.id] = event
self.notify()
def changeStartRoom(self, new_start_room_name):
self.data['world'].start = new_start_room_name
self.notify()
def changeRoomName(self, old_room_name, new_room_name):
room = self.data['rooms'].pop(old_room_name)
self.data['rooms'][new_room_name] = room
if self.data['world'].start == old_room_name:
self.changeStartRoom(new_room_name)
room.setName(new_room_name)
self.notify()
def removeRoom(self, room_name):
self.data['rooms'].pop(room_name)
if self.data['world'].start == room_name:
if g_project.data['rooms']:
new_start_room_name = g_project.data['rooms'].keys()[0]
else:
new_start_room_name = ""
self.changeStartRoom(new_start_room_name)
self.notify()
g_project = Project()
| mit | Python |
b04693387be08c1ead880d0e7472026ed76dad80 | Fix django.conf.urls.defaults imports | ging/django_openstack_auth,promptworks/django_openstack_auth,ashokkumarprajapati/openstack_auth_remote,jamielennox/django_openstack_auth,dan1/django_openstack_auth_proto,dan1/django_openstack_auth_proto,redhat-openstack/django_openstack_auth,ging/django_openstack_auth,promptworks/django_openstack_auth,redhat-openstack/django_openstack_auth | openstack_auth/urls.py | openstack_auth/urls.py | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.conf.urls import patterns, url
from .utils import patch_middleware_get_user
patch_middleware_get_user()
urlpatterns = patterns('openstack_auth.views',
url(r"^login/$", "login", name='login'),
url(r"^logout/$", 'logout', name='logout'),
url(r'^switch/(?P<tenant_id>[^/]+)/$', 'switch', name='switch_tenants'),
url(r'^switch_services_region/(?P<region_name>[^/]+)/$', 'switch_region',
name='switch_services_region')
)
| # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.conf.urls.defaults import patterns, url
from .utils import patch_middleware_get_user
patch_middleware_get_user()
urlpatterns = patterns('openstack_auth.views',
url(r"^login/$", "login", name='login'),
url(r"^logout/$", 'logout', name='logout'),
url(r'^switch/(?P<tenant_id>[^/]+)/$', 'switch', name='switch_tenants'),
url(r'^switch_services_region/(?P<region_name>[^/]+)/$', 'switch_region',
name='switch_services_region')
)
| apache-2.0 | Python |
78705f598e7e3325e871bd17ff353a31c71bc399 | Extend all admin form to Container Admin Form (json field) | opps/opps,YACOWS/opps,williamroot/opps,williamroot/opps,YACOWS/opps,opps/opps,YACOWS/opps,jeanmask/opps,jeanmask/opps,YACOWS/opps,opps/opps,jeanmask/opps,williamroot/opps,opps/opps,jeanmask/opps,williamroot/opps | opps/articles/forms.py | opps/articles/forms.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from opps.core.widgets import OppsEditor
from opps.containers.forms import ContainerAdminForm
from .models import Post, Album, Link
class PostAdminForm(ContainerAdminForm):
multiupload_link = '/fileupload/image/'
class Meta:
model = Post
widgets = {'content': OppsEditor()}
class AlbumAdminForm(ContainerAdminForm):
multiupload_link = '/fileupload/image/'
class Meta:
model = Album
widgets = {
'headline': OppsEditor()
}
class LinkAdminForm(ContainerAdminForm):
class Meta:
model = Link
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django import forms
from .models import Post, Album, Link
from opps.core.widgets import OppsEditor
from opps.db.models.fields.jsonf import JSONFormField
from opps.fields.widgets import JSONField
from opps.fields.models import Field, FieldOption
class PostAdminForm(forms.ModelForm):
json = JSONFormField(widget=JSONField(attrs={'_model': 'Post'}), required=False)
multiupload_link = '/fileupload/image/'
def __init__(self, *args, **kwargs):
super(PostAdminForm, self).__init__(*args, **kwargs)
for field in Field.objects.filter(
application__contains=self._meta.model.__name__):
for fo in FieldOption.objects.filter(field=field):
self.fields[
'json_{}_{}'.format(
field.slug, fo.option.slug
)] = forms.CharField(required=False)
class Meta:
model = Post
widgets = {'content': OppsEditor()}
class AlbumAdminForm(forms.ModelForm):
multiupload_link = '/fileupload/image/'
class Meta:
model = Album
widgets = {
'headline': OppsEditor()
}
class LinkAdminForm(forms.ModelForm):
class Meta:
model = Link
| mit | Python |
67a7a3f5bc05265690a831dea7c4310af66870a8 | add channel obj on set_context_data * long_slug * level | jeanmask/opps,YACOWS/opps,opps/opps,opps/opps,williamroot/opps,YACOWS/opps,opps/opps,williamroot/opps,williamroot/opps,jeanmask/opps,jeanmask/opps,YACOWS/opps,williamroot/opps,opps/opps,jeanmask/opps,YACOWS/opps | opps/articles/utils.py | opps/articles/utils.py | # -*- coding: utf-8 -*-
from django.utils import timezone
from opps.articles.models import ArticleBox, Article
def set_context_data(self, SUPER, **kwargs):
context = super(SUPER, self).get_context_data(**kwargs)
article = Article.objects.filter(
site=self.site,
channel_long_slug__in=self.channel_long_slug,
date_available__lte=timezone.now(),
published=True)
context['posts'] = article.filter(child_class='Post')[:self.limit]
context['albums'] = article.filter(child_class='Album')[:self.limit]
context['channel'] = {}
context['channel']['long_slug'] = self.long_slug
if self.channel:
context['channel']['level'] = self.channel.get_level()
context['articleboxes'] = ArticleBox.objects.filter(
channel__long_slug=self.long_slug)
if self.slug:
context['articleboxes'] = context['articleboxes'].filter(
article__slug=self.slug)
return context
| # -*- coding: utf-8 -*-
from django.utils import timezone
from opps.articles.models import ArticleBox, Article
def set_context_data(self, SUPER, **kwargs):
context = super(SUPER, self).get_context_data(**kwargs)
article = Article.objects.filter(
site=self.site,
channel_long_slug__in=self.channel_long_slug,
date_available__lte=timezone.now(),
published=True)
context['posts'] = article.filter(child_class='Post')[:self.limit]
context['albums'] = article.filter(child_class='Album')[:self.limit]
context['channel_long_slug'] = self.long_slug
context['articleboxes'] = ArticleBox.objects.filter(
channel__long_slug=self.long_slug)
if self.slug:
context['articleboxes'] = context['articleboxes'].filter(
article__slug=self.slug)
return context
| mit | Python |
7b016e61f5f7218aaddbad12050d357357c5f3df | sort tests functions in bench_bgra2rgb.py | BoboTiG/python-mss | tests/bench_bgra2rgb.py | tests/bench_bgra2rgb.py | # coding: utf-8
"""
2018-03-19.
Maximum screenshots in 1 second by computing BGRA raw values to RGB.
GNU/Linux
pil_frombytes 139
mss_rgb 119
pil_frombytes_rgb 51
numpy_flip 31
numpy_slice 29
macOS
pil_frombytes 209
mss_rgb 174
pil_frombytes_rgb 113
numpy_flip 39
numpy_slice 36
Windows
pil_frombytes 81
mss_rgb 66
pil_frombytes_rgb 42
numpy_flip 25
numpy_slice 22
"""
from __future__ import print_function
import time
import numpy
from PIL import Image
import mss
def mss_rgb(im):
return im.rgb
def numpy_flip(im):
frame = numpy.array(im, dtype=numpy.uint8)
return numpy.flip(frame[:, :, :3], 2).tobytes()
def numpy_slice(im):
return numpy.array(im, dtype=numpy.uint8)[..., [2, 1, 0]].tobytes()
def pil_frombytes_rgb(im):
return Image.frombytes('RGB', im.size, im.rgb).tobytes()
def pil_frombytes(im):
return Image.frombytes('RGB', im.size, im.bgra, 'raw', 'BGRX').tobytes()
def benchmark():
with mss.mss() as sct:
im = sct.grab(sct.monitors[0])
for func in (pil_frombytes,
mss_rgb,
pil_frombytes_rgb,
numpy_flip,
numpy_slice):
count = 0
start = time.time()
while (time.time() - start) <= 1:
func(im)
im._ScreenShot__rgb = None
count += 1
print(func.__name__.ljust(17), count)
benchmark()
| # coding: utf-8
"""
2018-03-19.
Maximum screenshots in 1 second by computing BGRA raw values to RGB.
GNU/Linux
pil_frombytes_rgb 51
pil_frombytes 139
mss_rgb 119
numpy_flip 31
numpy_slice 29
macOS
pil_frombytes_rgb 113
pil_frombytes 209
mss_rgb 174
numpy_flip 39
numpy_slice 36
Windows
pil_frombytes_rgb 42
pil_frombytes 81
mss_rgb 66
numpy_flip 25
numpy_slice 22
"""
from __future__ import print_function
import time
import numpy
from PIL import Image
import mss
def mss_rgb(im):
return im.rgb
def numpy_flip(im):
frame = numpy.array(im, dtype=numpy.uint8)
return numpy.flip(frame[:, :, :3], 2).tobytes()
def numpy_slice(im):
return numpy.array(im, dtype=numpy.uint8)[..., [2, 1, 0]].tobytes()
def pil_frombytes_rgb(im):
return Image.frombytes('RGB', im.size, im.rgb).tobytes()
def pil_frombytes(im):
return Image.frombytes('RGB', im.size, im.bgra, 'raw', 'BGRX').tobytes()
def benchmark():
with mss.mss() as sct:
im = sct.grab(sct.monitors[0])
for func in (pil_frombytes_rgb,
pil_frombytes,
mss_rgb,
numpy_flip,
numpy_slice):
count = 0
start = time.time()
while (time.time() - start) <= 1:
func(im)
im._ScreenShot__rgb = None
count += 1
print(func.__name__.ljust(17), count)
benchmark()
| mit | Python |
9a83ec4c80bec0cec45904a8998cd82a99a9b1b2 | Save `resources` as extra data in its entirety | python-social-auth/social-core,python-social-auth/social-core | social_core/backends/atlassian.py | social_core/backends/atlassian.py | from social_core.backends.oauth import BaseOAuth2
class AtlassianOAuth2(BaseOAuth2):
name = 'atlassian'
AUTHORIZATION_URL = 'https://accounts.atlassian.com/authorize'
ACCESS_TOKEN_METHOD = 'POST'
ACCESS_TOKEN_URL = 'https://api.atlassian.com/oauth/token'
DEFAULT_SCOPE = ['read:jira-user', 'offline_access']
ID_KEY = 'accountId'
EXTRA_DATA = [
('resources', 'resources'),
('refresh_token', 'refresh_token'),
('expires_in', 'expires_in'),
]
def auth_params(self, state=None):
params = super(AtlassianOAuth2, self).auth_params(state)
params.update({'audience': 'api.atlassian.com',
'prompt': 'consent'})
return params
def get_user_details(self, response):
fullname, first_name, last_name = self.get_user_names(response['displayName'])
return {'username': response['name'],
'email': response['emailAddress'],
'fullname': fullname,
'first_name': first_name,
'last_name': last_name}
def user_data(self, access_token, *args, **kwargs):
resources = self.get_json('https://api.atlassian.com/oauth/token/accessible-resources',
headers={'Authorization': 'Bearer {}'.format(access_token)})
user_info = self.get_json('https://api.atlassian.com/ex/jira/{}/rest/api/2/myself'.format(resources[0]['id']),
headers={'Authorization': 'Bearer {}'.format(access_token)})
user_info['resources'] = resources
return user_info
| from social_core.backends.oauth import BaseOAuth2
class AtlassianOAuth2(BaseOAuth2):
name = 'atlassian'
AUTHORIZATION_URL = 'https://accounts.atlassian.com/authorize'
ACCESS_TOKEN_METHOD = 'POST'
ACCESS_TOKEN_URL = 'https://api.atlassian.com/oauth/token'
DEFAULT_SCOPE = ['read:jira-user', 'offline_access']
ID_KEY = 'accountId'
EXTRA_DATA = [
('resource_ids', 'resource_ids'),
('refresh_token', 'refresh_token'),
('expires_in', 'expires_in'),
]
def auth_params(self, state=None):
params = super(AtlassianOAuth2, self).auth_params(state)
params.update({'audience': 'api.atlassian.com',
'prompt': 'consent'})
return params
def get_user_details(self, response):
fullname, first_name, last_name = self.get_user_names(response['displayName'])
return {'username': response['name'],
'email': response['emailAddress'],
'fullname': fullname,
'first_name': first_name,
'last_name': last_name}
def user_data(self, access_token, *args, **kwargs):
resources = self.get_json('https://api.atlassian.com/oauth/token/accessible-resources',
headers={'Authorization': 'Bearer {}'.format(access_token)})
resource_ids = [resource['id'] for resource in resources]
user_info = self.get_json('https://api.atlassian.com/ex/jira/{}/rest/api/2/myself'.format(resource_ids[0]),
headers={'Authorization': 'Bearer {}'.format(access_token)})
user_info['resource_ids'] = resource_ids
return user_info
| bsd-3-clause | Python |
e66178cc0521426036d4c9166bf76e9379bc62ef | disable Run tests temporarily | cloudruninc/cloudrun-python | cloudrun/tests.py | cloudrun/tests.py | import pytest
import uuid
from .cloudrun import Cloudrun
from .run import Run
token = uuid.uuid4().hex
id = uuid.uuid4().hex
def test_cloudrun_init():
assert type(Cloudrun(token)) is Cloudrun
assert Cloudrun(token).token == token
#def test_run_init():
# assert type(Run(token,id)) is Run
# assert Run(token,id).token == token
# assert Run(token,id).id == id
#def test_cloudrun_get_run_returns_run():
# assert type(Cloudrun(token).get_run(id)) is Run
| import pytest
import uuid
from .cloudrun import Cloudrun
from .run import Run
token = uuid.uuid4().hex
id = uuid.uuid4().hex
def test_cloudrun_init():
assert type(Cloudrun(token)) is Cloudrun
assert Cloudrun(token).token == token
def test_run_init():
assert type(Run(token,id)) is Run
assert Run(token,id).token == token
assert Run(token,id).id == id
def test_cloudrun_get_run_returns_run():
assert type(Cloudrun(token).get_run(id)) is Run
| mit | Python |
5af29cfa071360265b1c31538f89e806ae4eabc4 | Fix #142: Testrunner and SOUTH_TESTS_MIGRATE broken on 1.1. | nimnull/django-south,RaD/django-south,RaD/django-south,nimnull/django-south,philipn/django-south,philipn/django-south,RaD/django-south | south/management/commands/test.py | south/management/commands/test.py | from django.core import management
from django.core.management.commands import test
from django.core.management.commands import syncdb
from django.conf import settings
from syncdb import Command as SyncDbCommand
class MigrateAndSyncCommand(SyncDbCommand):
option_list = SyncDbCommand.option_list
for opt in option_list:
if "--migrate" == opt.get_opt_string():
opt.default = True
break
class Command(test.Command):
def handle(self, *args, **kwargs):
management.get_commands()
if not hasattr(settings, "SOUTH_TESTS_MIGRATE") or not settings.SOUTH_TESTS_MIGRATE:
# point at the core syncdb command when creating tests
# tests should always be up to date with the most recent model structure
management._commands['syncdb'] = 'django.core'
else:
management._commands['syncdb'] = MigrateAndSyncCommand()
super(Command, self).handle(*args, **kwargs) | from django.core import management
from django.core.management.commands import test
from django.core.management.commands import syncdb
from django.conf import settings
class Command(test.Command):
def handle(self, *args, **kwargs):
if not hasattr(settings, "SOUTH_TESTS_MIGRATE") or not settings.SOUTH_TESTS_MIGRATE:
# point at the core syncdb command when creating tests
# tests should always be up to date with the most recent model structure
management.get_commands()
management._commands['syncdb'] = 'django.core'
super(Command, self).handle(*args, **kwargs) | apache-2.0 | Python |
39874a0ddb65582a04ea32fa2b05bacc968f56f3 | Update max-chunks-to-make-sorted-ii.py | kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,tudennis/LeetCode---kamyu104-11-24-2015,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode | Python/max-chunks-to-make-sorted-ii.py | Python/max-chunks-to-make-sorted-ii.py | # Time: O(nlogn)
# Space: O(n)
# This question is the same as "Max Chunks to Make Sorted"
# except the integers of the given array are not necessarily distinct,
# the input array could be up to length 2000, and the elements could be up to 10**8.
#
# Given an array arr of integers (not necessarily distinct),
# we split the array into some number of "chunks" (partitions),
# and individually sort each chunk.
# After concatenating them, the result equals the sorted array.
#
# What is the most number of chunks we could have made?
#
# Example 1:
#
# Input: arr = [5,4,3,2,1]
# Output: 1
# Explanation:
# Splitting into two or more chunks will not return the required result.
# For example, splitting into [5, 4], [3, 2, 1] will result in [4, 5, 1, 2, 3], which isn't sorted.
# Example 2:
#
# Input: arr = [2,1,3,4,4]
# Output: 4
# Explanation:
# We can split into two chunks, such as [2, 1], [3, 4, 4].
# However, splitting into [2, 1], [3], [4], [4] is the highest number of chunks possible.
#
# Note:
# - arr will have length in range [1, 2000].
# - arr[i] will be an integer in range [0, 10**8].
class Solution(object):
def maxChunksToSorted(self, arr):
"""
:type arr: List[int]
:rtype: int
"""
def compare(i1, i2):
return arr[i1]-arr[i2] if arr[i1] != arr[i2] else i1-i2
idxs = [i for i in xrange(len(arr))]
result, max_i = 0, 0
for i, v in enumerate(sorted(idxs, cmp=compare)):
max_i = max(max_i, v)
if max_i == i:
result += 1
return result
| # Time: O(nlogn)
# Space: O(n)
class Solution(object):
def maxChunksToSorted(self, arr):
"""
:type arr: List[int]
:rtype: int
"""
def compare(i1, i2):
return arr[i1]-arr[i2] if arr[i1] != arr[i2] else i1-i2
idxs = [i for i in xrange(len(arr))]
result, max_i = 0, 0
for i, v in enumerate(sorted(idxs, cmp=compare)):
max_i = max(max_i, v)
if max_i == i:
result += 1
return result
| mit | Python |
5c20418b8e5f6dc033d1a7c515d30d5e9b026db5 | Fix sampleproject view | JungDev/django-telegrambot,JungDev/django-telegrambot | sampleproject/bot/views.py | sampleproject/bot/views.py | from django.shortcuts import render
from django.conf import settings
from django_telegrambot.apps import DjangoTelegramBot
# Create your views here.
def index(request):
bot_list = DjangoTelegramBot.bots
context = {'bot_list': bot_list, 'update_mode':settings.DJANGO_TELEGRAMBOT['MODE']}
return render(request, 'bot/index.html', context)
| from django.shortcuts import render
from django.conf import settings
from django_telegrambot.apps import DjangoTelegramBot
# Create your views here.
def index(request):
bot_list = DjangoTelegramBot.bots
context = {'bot_list': bot_list, 'update_mode':settings.TELEGRAM_BOT_MODE}
return render(request, 'bot/index.html', context)
| bsd-3-clause | Python |
f0f31ea0a86620b77073b5da0dca386b337b98da | update prop2part tests | necozay/tulip-control,tulip-control/tulip-control,necozay/tulip-control,necozay/tulip-control,tulip-control/tulip-control,tulip-control/tulip-control,tulip-control/tulip-control,necozay/tulip-control,necozay/tulip-control | tests/prop2part_test.py | tests/prop2part_test.py | #!/usr/bin/env python
"""
Tests for abstract.prop2partition
"""
from tulip.abstract import prop2part
import tulip.polytope as pc
import numpy as np
def prop2part_test():
state_space = pc.Polytope.from_box(np.array([[0., 2.],[0., 2.]]))
cont_props = []
A = []
b = []
A.append(np.array([[1., 0.],
[-1., 0.],
[0., 1.],
[0., -1.]]))
b.append(np.array([[.5, 0., .5, 0.]]).T)
cont_props.append(pc.Polytope(A[0], b[0]))
A.append(np.array([[1., 0.],
[-1., 0.],
[0., 1.],
[0., -1.]]))
b.append(np.array([[2., -1.5, 2., -1.5]]).T)
cont_props.append(pc.Polytope(A[1], b[1]))
cont_props_dict = {"C"+str(i) : pc.Polytope(A[i], b[i]) for i in range(2)}
mypartition = prop2part(state_space, cont_props_dict)
print(mypartition)
ref_adjacency = np.array([[1,0,1],[0,1,1],[1,1,1]])
assert np.all(mypartition.adj.todense() == ref_adjacency)
assert len(mypartition.regions) == 3
for reg in mypartition.regions[0:2]:
assert len(reg.props) == 1
assert len(reg.list_poly) == 1
assert cont_props_dict == mypartition.prop_regions
assert len(mypartition.regions[2].props) == 0
assert len(mypartition.regions[2].list_poly) == 3
dum = state_space.copy()
for reg in mypartition.regions[0:2]:
dum = dum.diff(reg)
assert pc.is_empty(dum.diff(mypartition.regions[2]) )
assert pc.is_empty(mypartition.regions[2].diff(dum) )
| #!/usr/bin/env python
"""
Tests for abstract.prop2partition
"""
from tulip.abstract import prop2part
import tulip.polytope as pc
import numpy as np
def prop2part_test():
state_space = pc.Polytope.from_box(np.array([[0., 2.],[0., 2.]]))
cont_props = []
A = []
b = []
A.append(np.array([[1., 0.],
[-1., 0.],
[0., 1.],
[0., -1.]]))
b.append(np.array([[.5, 0., .5, 0.]]).T)
cont_props.append(pc.Polytope(A[0], b[0]))
A.append(np.array([[1., 0.],
[-1., 0.],
[0., 1.],
[0., -1.]]))
b.append(np.array([[2., -1.5, 2., -1.5]]).T)
cont_props.append(pc.Polytope(A[1], b[1]))
cont_props_dict = dict([("C"+str(i), pc.Polytope(A[i], b[i])) for i in range(2)])
mypartition = prop2part(state_space, cont_props_dict)
ref_adjacency = np.array([[1,0,1],[0,1,1],[1,1,1]])
assert np.all(mypartition.adj.todense() == ref_adjacency)
assert len(mypartition.regions) == 3
for reg in mypartition.regions[0:2]:
assert len(reg.props) == 2
assert len(reg.list_poly) == 1
i = [i for i in range(len(reg.props)) if reg.props[i] == 1]
assert len(i) == 1
i = i[0]
assert cont_props_dict == mypartition.cont_props
ref_V = pc.extreme(mypartition.cont_props)
ref_V = set([(v[0],v[1]) for v in ref_V.tolist()])
actual_V = pc.extreme(reg.list_poly[0])
actual_V = set([(v[0],v[1]) for v in actual_V.tolist()])
assert ref_V == actual_V
assert len(mypartition.regions[2].props) == 2
assert sum(mypartition.regions[2].props) == 0
assert len(mypartition.regions[2].list_poly) == 3
dum = state_space.copy()
for reg in mypartition.regions[0:2]:
dum = dum.diff(reg)
assert pc.is_empty(dum.diff(mypartition.regions[2]) )
assert pc.is_empty(mypartition.regions[2].diff(dum) )
| bsd-3-clause | Python |
d32b2494c1a72d040a651bbb2f0abb7a94c1d2db | remove stray line | jplusplus/statscraper | tests/test-datatypes.py | tests/test-datatypes.py | """Test datatypes."""
from statscraper.datatypes import Datatype
from statscraper import Dimension, DimensionValue
def test_allowed_values():
"""Datatypes shuold have allowed values."""
dt = Datatype("region")
assert("Ale kommun" in dt.allowed_values)
def test_b():
"""Dimension values should be translatable."""
d = Dimension("municipality", datatype="region", domain="sweden/municipalities")
dv = DimensionValue("Ale kommun", d)
assert(dv.translate("numerical") == "1440")
| """Test datatypes."""
from statscraper.datatypes import Datatype
from statscraper import Dimension, DimensionValue
def test_allowed_values():
"""Datatypes shuold have allowed values."""
dt = Datatype("region")
assert("Ale kommun" in dt.allowed_values)
def test_b():
"""Dimension values should be translatable."""
d = Dimension("municipality", datatype="region", domain="sweden/municipalities")
dv = DimensionValue("Ale kommun", d)
assert(dv.translate("numerical") == "1440")
| mit | Python |
e8cffceecf79b42790ccab1c61a2da06ae6529cd | comment no longer relevant. dealt with 2FA already | dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq | corehq/apps/sso/backends.py | corehq/apps/sso/backends.py | from django.contrib.auth.backends import ModelBackend
from django.contrib.auth.models import User
from corehq.apps.sso.models import IdentityProvider, AuthenticatedEmailDomain
from corehq.apps.sso.utils.user_helpers import get_email_domain_from_username
class SsoBackend(ModelBackend):
"""
Authenticates against an IdentityProvider and SAML2 session data.
"""
def authenticate(self, request, username, idp_slug, is_handshake_successful):
if not (request and username and idp_slug and is_handshake_successful):
return None
try:
identity_provider = IdentityProvider.objects.get(slug=idp_slug)
except IdentityProvider.DoesNotExist:
# not sure how we would even get here, but just in case
request.sso_login_error = f"Identity Provider {idp_slug} does not exist."
return None
if not identity_provider.is_active:
request.sso_login_error = f"This Identity Provider {idp_slug} is not active."
return None
email_domain = get_email_domain_from_username(username)
if not email_domain:
# not a valid username
request.sso_login_error = f"Username {username} is not valid."
return None
if not AuthenticatedEmailDomain.objects.filter(
email_domain=email_domain, identity_provider=identity_provider
).exists():
# if this user's email domain is not authorized by this identity
# do not continue with authentication
request.sso_login_error = (
f"The Email Domain {email_domain} is not allowed to "
f"authenticate with this Identity Provider ({idp_slug})."
)
return None
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
# todo handle user creation based on information from request/session
# do this prior to handling the invite scenario and new user scenario
request.sso_login_error = f"User {username} does not exist."
return None
request.sso_login_error = None
return user
| from django.contrib.auth.backends import ModelBackend
from django.contrib.auth.models import User
from corehq.apps.sso.models import IdentityProvider, AuthenticatedEmailDomain
from corehq.apps.sso.utils.user_helpers import get_email_domain_from_username
class SsoBackend(ModelBackend):
"""
Authenticates against an IdentityProvider and SAML2 session data.
"""
def authenticate(self, request, username, idp_slug, is_handshake_successful):
if not (request and username and idp_slug and is_handshake_successful):
return None
try:
identity_provider = IdentityProvider.objects.get(slug=idp_slug)
except IdentityProvider.DoesNotExist:
# not sure how we would even get here, but just in case
request.sso_login_error = f"Identity Provider {idp_slug} does not exist."
return None
if not identity_provider.is_active:
request.sso_login_error = f"This Identity Provider {idp_slug} is not active."
return None
email_domain = get_email_domain_from_username(username)
if not email_domain:
# not a valid username
request.sso_login_error = f"Username {username} is not valid."
return None
if not AuthenticatedEmailDomain.objects.filter(
email_domain=email_domain, identity_provider=identity_provider
).exists():
# if this user's email domain is not authorized by this identity
# do not continue with authentication
request.sso_login_error = (
f"The Email Domain {email_domain} is not allowed to "
f"authenticate with this Identity Provider ({idp_slug})."
)
return None
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
# todo handle user creation based on information from request/session
# do this prior to handling the invite scenario and new user scenario
request.sso_login_error = f"User {username} does not exist."
return None
request.sso_login_error = None
# todo what happens with 2FA required here?
return user
| bsd-3-clause | Python |
52c3981b8880085d060f874eb8feace6ac125411 | Replace exact equality assert with isclose in bands cli | Z2PackDev/TBmodels,Z2PackDev/TBmodels | tests/test_cli_bands.py | tests/test_cli_bands.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Author: Dominik Gresch <[email protected]>
import os
import pytest
import tempfile
import numpy as np
import bandstructure_utils as bs
from click.testing import CliRunner
import tbmodels
from tbmodels._cli import cli
from parameters import SAMPLES_DIR
def test_cli_bands():
samples_dir = os.path.join(SAMPLES_DIR, 'cli_bands')
runner = CliRunner()
with tempfile.NamedTemporaryFile() as out_file:
run = runner.invoke(
cli,
[
'bands',
'-o', out_file.name,
'-k', os.path.join(samples_dir, 'kpoints.hdf5'),
'-i', os.path.join(samples_dir, 'silicon_model.hdf5')
],
catch_exceptions=False
)
print(run.output)
res = bs.io.load(out_file.name)
reference = bs.io.load(os.path.join(samples_dir, 'silicon_bands.hdf5'))
np.testing.assert_allclose(bs.compare.difference(res, reference), 0)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Author: Dominik Gresch <[email protected]>
import os
import pytest
import tempfile
import bandstructure_utils as bs
from click.testing import CliRunner
import tbmodels
from tbmodels._cli import cli
from parameters import SAMPLES_DIR
def test_cli_bands():
samples_dir = os.path.join(SAMPLES_DIR, 'cli_bands')
runner = CliRunner()
with tempfile.NamedTemporaryFile() as out_file:
run = runner.invoke(
cli,
[
'bands',
'-o', out_file.name,
'-k', os.path.join(samples_dir, 'kpoints.hdf5'),
'-i', os.path.join(samples_dir, 'silicon_model.hdf5')
],
catch_exceptions=False
)
print(run.output)
res = bs.io.load(out_file.name)
reference = bs.io.load(os.path.join(samples_dir, 'silicon_bands.hdf5'))
assert bs.compare.difference(res, reference) == 0
| apache-2.0 | Python |
8b4b5eb2506feed164b69efa66b4cdae159182c3 | Fix pre-commit issues in the cli_parse tests. | Z2PackDev/TBmodels,Z2PackDev/TBmodels | tests/test_cli_parse.py | tests/test_cli_parse.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# (c) 2015-2018, ETH Zurich, Institut fuer Theoretische Physik
# Author: Dominik Gresch <[email protected]>
"""Tests for the 'parse' CLI command."""
import tempfile
import pytest
from click.testing import CliRunner
import tbmodels
from tbmodels._cli import cli
@pytest.mark.parametrize('pos_kind', ['wannier', 'nearest_atom'])
@pytest.mark.parametrize('prefix', ['silicon', 'bi'])
def test_cli_parse(models_equal, prefix, sample, pos_kind):
"""Test the 'parse' command with different 'prefix' and 'pos_kind'."""
runner = CliRunner()
with tempfile.NamedTemporaryFile() as out_file:
run = runner.invoke(
cli, ['parse', '-o', out_file.name, '-f',
sample(''), '-p', prefix, '--pos-kind', pos_kind],
catch_exceptions=False
)
print(run.output)
model_res = tbmodels.Model.from_hdf5_file(out_file.name)
model_reference = tbmodels.Model.from_wannier_folder(folder=sample(''), prefix=prefix, pos_kind=pos_kind)
models_equal(model_res, model_reference)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
# (c) 2015-2018, ETH Zurich, Institut fuer Theoretische Physik
# Author: Dominik Gresch <[email protected]>
import pytest
import tempfile
from click.testing import CliRunner
import tbmodels
from tbmodels._cli import cli
@pytest.mark.parametrize('pos_kind', ['wannier', 'nearest_atom'])
@pytest.mark.parametrize('prefix', ['silicon', 'bi'])
def test_cli_parse(models_equal, prefix, sample, pos_kind):
runner = CliRunner()
with tempfile.NamedTemporaryFile() as out_file:
run = runner.invoke(
cli, ['parse', '-o', out_file.name, '-f',
sample(''), '-p', prefix, '--pos-kind', pos_kind],
catch_exceptions=False
)
print(run.output)
model_res = tbmodels.Model.from_hdf5_file(out_file.name)
model_reference = tbmodels.Model.from_wannier_folder(folder=sample(''), prefix=prefix, pos_kind=pos_kind)
models_equal(model_res, model_reference)
| apache-2.0 | Python |
78434bafbcc60ba7207d63481d3179474ae939ed | change to using scontrol for getting job state by default | timothydmorton/lsst-utils,timothydmorton/lsst-utils | pipeline/pipeline/batch.py | pipeline/pipeline/batch.py | import os, re
import subprocess
import time
def write_slurm_script(filename, cmd, **batch_options):
with open(filename, 'w') as fout:
fout.write('#!/bin/bash\n')
for opts in batch_options.items():
fout.write('#SBATCH --{0}={1}\n'.format(*opts))
fout.write('\n')
fout.write('{0}\n'.format(cmd))
def get_job_status(jobid, wait=30):
"""Returns status of slurm job <jobid>
Currently parses output of `sacct`. Perhaps would
be a good idea to move this to pyslurm (though this would
add a dependency.)
"""
cmd = 'scontrol show job {0}'.format(jobid)
output = subprocess.check_output(cmd, shell=True)
m = re.search('JobState=(\w+)', output)
status = None
if m:
status = m.group(1)
else:
repeat = 0
while not m and repeat < wait:
cmd = 'sacct -b -j {0}'.format(jobid)
output = subprocess.check_output(cmd, shell=True)
m = re.search('{0}\s+([A-Z]+)'.format(jobid), output)
time.sleep(1)
repeat += 1
if m:
status = m.group(1)
if status is None:
raise ValueError('Job not found: {0}'.format(jobid))
else:
return status
| import os, re
import subprocess
import time
def write_slurm_script(filename, cmd, **batch_options):
with open(filename, 'w') as fout:
fout.write('#!/bin/bash\n')
for opts in batch_options.items():
fout.write('#SBATCH --{0}={1}\n'.format(*opts))
fout.write('\n')
fout.write('{0}\n'.format(cmd))
def get_job_status(jobid, wait=30):
"""Returns status of slurm job <jobid>
Currently parses output of `sacct`. Perhaps would
be a good idea to move this to pyslurm (though this would
add a dependency.)
"""
m = False
repeat = 0
while not m and repeat < wait:
cmd = 'sacct -b -j {0}'.format(jobid)
output = subprocess.check_output(cmd, shell=True)
m = re.search('{0}\s+([A-Z]+)'.format(jobid), output)
time.sleep(1)
repeat += 1
if not m:
raise ValueError('Job not found: {0}'.format(jobid))
return m.group(1)
| mit | Python |
8e5ffc7ed1db1d17e55cf538fc9858705ecc9dd2 | Bump version to 1.20.4 | platformio/platformio-api | platformio_api/__init__.py | platformio_api/__init__.py | # Copyright 2014-present Ivan Kravets <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import logging.config
import os
VERSION = (1, 20, 4)
__version__ = ".".join([str(s) for s in VERSION])
__title__ = "platformio-api"
__description__ = ("An API for PlatformIO")
__url__ = "https://github.com/ivankravets/platformio-api"
__author__ = "Ivan Kravets"
__email__ = "[email protected]"
__license__ = "MIT License"
__copyright__ = "Copyright (C) 2014-2017 Ivan Kravets"
config = dict(
SQLALCHEMY_DATABASE_URI=None,
GITHUB_LOGIN=None,
GITHUB_PASSWORD=None,
DL_PIO_DIR=None,
DL_PIO_URL=None,
MAX_DLFILE_SIZE=1024 * 1024 * 150, # 150 Mb
# Fuzzy search will not be applied to words shorter than the value below
SOLR_FUZZY_MIN_WORD_LENGTH=3,
LOGGING=dict(version=1)
)
assert "PIOAPI_CONFIG_PATH" in os.environ
with open(os.environ.get("PIOAPI_CONFIG_PATH")) as f:
config.update(json.load(f))
# configure logging for packages
logging.basicConfig()
logging.config.dictConfig(config['LOGGING'])
# setup time zone to UTC globally
os.environ['TZ'] = "+00:00"
try:
from time import tzset
tzset()
except ImportError:
pass
| # Copyright 2014-present Ivan Kravets <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import logging.config
import os
VERSION = (1, 20, 3)
__version__ = ".".join([str(s) for s in VERSION])
__title__ = "platformio-api"
__description__ = ("An API for PlatformIO")
__url__ = "https://github.com/ivankravets/platformio-api"
__author__ = "Ivan Kravets"
__email__ = "[email protected]"
__license__ = "MIT License"
__copyright__ = "Copyright (C) 2014-2017 Ivan Kravets"
config = dict(
SQLALCHEMY_DATABASE_URI=None,
GITHUB_LOGIN=None,
GITHUB_PASSWORD=None,
DL_PIO_DIR=None,
DL_PIO_URL=None,
MAX_DLFILE_SIZE=1024 * 1024 * 150, # 150 Mb
# Fuzzy search will not be applied to words shorter than the value below
SOLR_FUZZY_MIN_WORD_LENGTH=3,
LOGGING=dict(version=1)
)
assert "PIOAPI_CONFIG_PATH" in os.environ
with open(os.environ.get("PIOAPI_CONFIG_PATH")) as f:
config.update(json.load(f))
# configure logging for packages
logging.basicConfig()
logging.config.dictConfig(config['LOGGING'])
# setup time zone to UTC globally
os.environ['TZ'] = "+00:00"
try:
from time import tzset
tzset()
except ImportError:
pass
| apache-2.0 | Python |
d81c6e4ce44b0ee63fa116cb69efce17b8bb2c3f | test getting message via POP | mbdevpl/maildaemon,mbdevpl/maildaemon | test/test_pop_connection.py | test/test_pop_connection.py | """Tests for POP connection handling."""
import os
import pathlib
import unittest
from maildaemon.config import load_config
from maildaemon.pop_connection import POPConnection
_HERE = pathlib.Path(__file__).parent
_TEST_CONFIG_PATH = _HERE.joinpath('maildaemon_test_config.json')
@unittest.skipUnless(os.environ.get('TEST_COMM') or os.environ.get('CI'),
'skipping tests that require server connection')
class Tests(unittest.TestCase):
config = load_config(_TEST_CONFIG_PATH)
def test_retrieve_message_ids(self):
for connection_name in ['test-pop', 'test-pop-ssl']:
with self.subTest(msg=connection_name):
connection = POPConnection.from_dict(self.config['connections'][connection_name])
connection.connect()
ids = connection.retrieve_message_ids()
alive = connection.is_alive()
connection.disconnect()
self.assertIsInstance(ids, list, msg=connection)
self.assertTrue(alive, msg=connection)
def test_retrieve_message_lines(self):
for connection_name in ['test-pop', 'test-pop-ssl']:
with self.subTest(msg=connection_name):
connection = POPConnection.from_dict(self.config['connections'][connection_name])
connection.connect()
lines = connection.retrieve_message_lines(1)
self.assertGreater(len(lines), 0, msg=connection)
| """Tests for POP connection handling."""
import os
import pathlib
import unittest
from maildaemon.config import load_config
from maildaemon.pop_connection import POPConnection
_HERE = pathlib.Path(__file__).parent
_TEST_CONFIG_PATH = _HERE.joinpath('maildaemon_test_config.json')
@unittest.skipUnless(os.environ.get('TEST_COMM') or os.environ.get('CI'),
'skipping tests that require server connection')
class Tests(unittest.TestCase):
config = load_config(_TEST_CONFIG_PATH)
def test_retrieve_message_ids(self):
for connection_name in ['test-pop', 'test-pop-ssl']:
with self.subTest(msg=connection_name):
connection = POPConnection.from_dict(self.config['connections'][connection_name])
connection.connect()
ids = connection.retrieve_message_ids()
alive = connection.is_alive()
connection.disconnect()
self.assertIsInstance(ids, list, msg=connection)
self.assertTrue(alive, msg=connection)
| apache-2.0 | Python |
39a1c6c8c3795775dc8811e8e195feaa4e973cd8 | remove comments | RockefellerArchiveCenter/DACSspace | tests/test_validator.py | tests/test_validator.py | # from unittest.mock import patch
import json
import unittest
from dacsspace.validator import Validator
class TestValidator(unittest.TestCase):
def test_validator(self):
json_file = "/Users/aberish/Documents/GitHub/DACSspace/fixtures/resource.json"
with open(json_file, 'r') as f:
json_data = json.load(f)
result = Validator().validate_data(json_data)
self.assertTrue(isinstance(result, dict))
self.assertEqual(result["valid"], "True")
| # from unittest.mock import patch
import json
import unittest
from dacsspace.validator import Validator
class TestValidator(unittest.TestCase):
def test_validator(self):
json_file = "/Users/aberish/Documents/GitHub/DACSspace/fixtures/resource.json"
with open(json_file, 'r') as f:
json_data = json.load(f)
result = Validator().validate_data(json_data)
self.assertTrue(isinstance(result, dict))
self.assertEqual(result["valid"], "True")
# HELP: I don't know what this does, but I saw it used in other tests
# if __name__ == "__main__":
# unittest.main()
| mit | Python |
c2b55844bff3de39ac9a0a4bd8860306da731662 | fix for testing 401 after redirection | inveniosoftware/invenio-communities,inveniosoftware/invenio-communities,inveniosoftware/invenio-communities,inveniosoftware/invenio-communities | testsuite/test_views.py | testsuite/test_views.py | # -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2013, 2014 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Tests for communities views."""
from flask import url_for
from invenio.testsuite import InvenioTestCase, make_test_suite, run_test_suite
class CommunitiesViewTest(InvenioTestCase):
"""Test communities view functions."""
def test_home_communities_page_availability(self):
"""communities - availability of main page"""
response = self.client.get(url_for('communities.index'))
self.assert200(response)
def test_new_community_page_availability(self):
"""communities - availability of new community page"""
self.login('admin', '')
response = self.client.get(url_for('communities.new'))
self.assert200(response)
self.logout()
def test_new_community_page_unauthorized(self):
"""communities - new communities restricted to logged in users"""
response = self.client.get(url_for('communities.new'),
follow_redirects=True)
self.assert401(response)
TEST_SUITE = make_test_suite(CommunitiesViewTest)
if __name__ == "__main__":
run_test_suite(TEST_SUITE)
| # -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2013, 2014 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Tests for communities views."""
from flask import url_for, current_app
from invenio.testsuite import InvenioTestCase, make_test_suite, \
run_test_suite
class CommunitiesViewTest(InvenioTestCase):
""" Test communities view functions. """
def test_home_communities_page_availability(self):
"""communities - availability of main page"""
response = self.client.get(url_for('communities.index'))
self.assert200(response)
def test_new_community_page_availability(self):
"""communities - availability of new community page"""
self.login('admin', '')
response = self.client.get(url_for('communities.new'))
self.assert200(response)
self.logout()
def test_new_community_page_unauthorized(self):
"""communities - new communities restricted to logged in users"""
response = self.client.get(url_for('communities.new'))
self.assert401(response)
TEST_SUITE = make_test_suite(CommunitiesViewTest)
if __name__ == "__main__":
run_test_suite(TEST_SUITE) | mit | Python |
77199b8c6b06054c7741433ec2fadd654a636677 | add hour var | pjdufour/tilejet-logs,tilejet/tilejet-logs | tilejetlogs/tilelogs.py | tilejetlogs/tilelogs.py | def buildTileRequestDocument(tileorigin, tilesource, x, y, z, status, datetime, ip):
r = {
'ip': ip,
'origin': tileorigin if tileorigin else "",
'source': tilesource,
'location': z+'/'+x+'/'+y,
'z': z,
'status': status,
'year': datetime.strftime('%Y'),
'month': datetime.strftime('%Y-%m'),
'date': datetime.strftime('%Y-%m-%d'),
'hour': datetime.strftime('%Y-%m-%d-%H'),
'date_iso': datetime.isoformat()
}
return r
| def buildTileRequestDocument(tileorigin, tilesource, x, y, z, status, datetime, ip):
r = {
'ip': ip,
'origin': tileorigin if tileorigin else "",
'source': tilesource,
'location': z+'/'+x+'/'+y,
'z': z,
'status': status,
'year': datetime.strftime('%Y'),
'month': datetime.strftime('%Y-%m'),
'date': datetime.strftime('%Y-%m-%d'),
'date_iso': datetime.isoformat()
}
return r
| mit | Python |
d8556707aa3ab0bc89878e0b5daaaeb7b54616ae | Disable images | grave-w-grave/zulip,andersk/zulip,eeshangarg/zulip,hj3938/zulip,zorojean/zulip,willingc/zulip,qq1012803704/zulip,ericzhou2008/zulip,dxq-git/zulip,RobotCaleb/zulip,ahmadassaf/zulip,kou/zulip,bitemyapp/zulip,dawran6/zulip,bowlofstew/zulip,jackrzhang/zulip,showell/zulip,ericzhou2008/zulip,Batterfii/zulip,Gabriel0402/zulip,kokoar/zulip,Gabriel0402/zulip,mansilladev/zulip,qq1012803704/zulip,kaiyuanheshang/zulip,verma-varsha/zulip,wavelets/zulip,calvinleenyc/zulip,bluesea/zulip,bowlofstew/zulip,Vallher/zulip,swinghu/zulip,KJin99/zulip,praveenaki/zulip,zulip/zulip,ipernet/zulip,JPJPJPOPOP/zulip,jrowan/zulip,LAndreas/zulip,developerfm/zulip,PhilSk/zulip,KingxBanana/zulip,jrowan/zulip,lfranchi/zulip,EasonYi/zulip,hengqujushi/zulip,dhcrzf/zulip,firstblade/zulip,umkay/zulip,Cheppers/zulip,KJin99/zulip,reyha/zulip,glovebx/zulip,sharmaeklavya2/zulip,littledogboy/zulip,joshisa/zulip,Juanvulcano/zulip,littledogboy/zulip,yocome/zulip,wavelets/zulip,krtkmj/zulip,jphilipsen05/zulip,Suninus/zulip,Frouk/zulip,ufosky-server/zulip,dotcool/zulip,rht/zulip,jackrzhang/zulip,yocome/zulip,xuanhan863/zulip,johnny9/zulip,Suninus/zulip,LAndreas/zulip,suxinde2009/zulip,noroot/zulip,stamhe/zulip,atomic-labs/zulip,babbage/zulip,jeffcao/zulip,vabs22/zulip,paxapy/zulip,AZtheAsian/zulip,Suninus/zulip,SmartPeople/zulip,jackrzhang/zulip,ufosky-server/zulip,vabs22/zulip,jphilipsen05/zulip,rht/zulip,jimmy54/zulip,aps-sids/zulip,samatdav/zulip,yocome/zulip,hustlzp/zulip,wavelets/zulip,udxxabp/zulip,MayB/zulip,hafeez3000/zulip,vikas-parashar/zulip,ipernet/zulip,sonali0901/zulip,wweiradio/zulip,KingxBanana/zulip,zwily/zulip,JanzTam/zulip,mohsenSy/zulip,punchagan/zulip,gigawhitlocks/zulip,suxinde2009/zulip,eastlhu/zulip,KJin99/zulip,tdr130/zulip,natanovia/zulip,schatt/zulip,jimmy54/zulip,natanovia/zulip,wweiradio/zulip,luyifan/zulip,ryansnowboarder/zulip,shrikrishnaholla/zulip,susansls/zulip,johnny9/zulip,johnny9/zulip,SmartPeople/zulip,schatt/zulip,krtkmj/zulip,Drooids/zulip,ahmadassaf/zulip,SmartPeople/zulip,ashwinirudrappa/zulip,zofuthan/zulip,rht/zulip,umkay/zulip,ryansnowboarder/zulip,yocome/zulip,MayB/zulip,hafeez3000/zulip,jrowan/zulip,shaunstanislaus/zulip,he15his/zulip,rishig/zulip,paxapy/zulip,ryanbackman/zulip,stamhe/zulip,KingxBanana/zulip,vakila/zulip,itnihao/zulip,m1ssou/zulip,atomic-labs/zulip,xuxiao/zulip,showell/zulip,dnmfarrell/zulip,willingc/zulip,aps-sids/zulip,zofuthan/zulip,susansls/zulip,Diptanshu8/zulip,ryanbackman/zulip,hayderimran7/zulip,ufosky-server/zulip,itnihao/zulip,timabbott/zulip,hustlzp/zulip,alliejones/zulip,luyifan/zulip,wweiradio/zulip,rht/zulip,zacps/zulip,niftynei/zulip,ashwinirudrappa/zulip,praveenaki/zulip,Galexrt/zulip,jphilipsen05/zulip,ikasumiwt/zulip,shubhamdhama/zulip,vikas-parashar/zulip,hustlzp/zulip,dxq-git/zulip,SmartPeople/zulip,suxinde2009/zulip,itnihao/zulip,ryansnowboarder/zulip,shaunstanislaus/zulip,vikas-parashar/zulip,wavelets/zulip,andersk/zulip,wavelets/zulip,yuvipanda/zulip,lfranchi/zulip,Cheppers/zulip,RobotCaleb/zulip,ahmadassaf/zulip,wangdeshui/zulip,JanzTam/zulip,PaulPetring/zulip,proliming/zulip,eastlhu/zulip,RobotCaleb/zulip,DazWorrall/zulip,zorojean/zulip,Frouk/zulip,cosmicAsymmetry/zulip,moria/zulip,PhilSk/zulip,johnnygaddarr/zulip,vabs22/zulip,kaiyuanheshang/zulip,adnanh/zulip,zwily/zulip,saitodisse/zulip,LeeRisk/zulip,paxapy/zulip,aakash-cr7/zulip,noroot/zulip,moria/zulip,verma-varsha/zulip,huangkebo/zulip,shrikrishnaholla/zulip,jackrzhang/zulip,hafeez3000/zulip,guiquanz/zulip,levixie/zulip,joshisa/zulip,guiquanz/zulip,andersk/zulip,MariaFaBella85/zulip,tommyip/zulip,PhilSk/zulip,arpitpanwar/zulip,brockwhittaker/zulip,itnihao/zulip,praveenaki/zulip,tbutter/zulip,he15his/zulip,PhilSk/zulip,xuanhan863/zulip,mahim97/zulip,dotcool/zulip,tdr130/zulip,adnanh/zulip,peiwei/zulip,mansilladev/zulip,aps-sids/zulip,sharmaeklavya2/zulip,KJin99/zulip,synicalsyntax/zulip,dotcool/zulip,fw1121/zulip,mdavid/zulip,xuxiao/zulip,m1ssou/zulip,cosmicAsymmetry/zulip,armooo/zulip,calvinleenyc/zulip,vakila/zulip,mansilladev/zulip,PaulPetring/zulip,gigawhitlocks/zulip,sharmaeklavya2/zulip,themass/zulip,dnmfarrell/zulip,ufosky-server/zulip,ApsOps/zulip,christi3k/zulip,eastlhu/zulip,huangkebo/zulip,grave-w-grave/zulip,he15his/zulip,synicalsyntax/zulip,dattatreya303/zulip,vabs22/zulip,praveenaki/zulip,AZtheAsian/zulip,tiansiyuan/zulip,tommyip/zulip,joshisa/zulip,bastianh/zulip,karamcnair/zulip,alliejones/zulip,verma-varsha/zulip,Diptanshu8/zulip,dawran6/zulip,xuxiao/zulip,TigorC/zulip,shubhamdhama/zulip,Suninus/zulip,deer-hope/zulip,bowlofstew/zulip,blaze225/zulip,mahim97/zulip,glovebx/zulip,bowlofstew/zulip,proliming/zulip,Drooids/zulip,rishig/zulip,Cheppers/zulip,dhcrzf/zulip,jrowan/zulip,peiwei/zulip,umkay/zulip,verma-varsha/zulip,littledogboy/zulip,EasonYi/zulip,Jianchun1/zulip,dwrpayne/zulip,themass/zulip,johnnygaddarr/zulip,atomic-labs/zulip,technicalpickles/zulip,LAndreas/zulip,tiansiyuan/zulip,tdr130/zulip,blaze225/zulip,hustlzp/zulip,schatt/zulip,jerryge/zulip,ashwinirudrappa/zulip,calvinleenyc/zulip,levixie/zulip,TigorC/zulip,aliceriot/zulip,hj3938/zulip,ericzhou2008/zulip,ikasumiwt/zulip,johnny9/zulip,willingc/zulip,hj3938/zulip,dotcool/zulip,peiwei/zulip,vaidap/zulip,samatdav/zulip,m1ssou/zulip,brockwhittaker/zulip,hackerkid/zulip,jerryge/zulip,Batterfii/zulip,ryansnowboarder/zulip,fw1121/zulip,rht/zulip,voidException/zulip,LAndreas/zulip,blaze225/zulip,mdavid/zulip,itnihao/zulip,wavelets/zulip,JPJPJPOPOP/zulip,Batterfii/zulip,akuseru/zulip,Drooids/zulip,babbage/zulip,luyifan/zulip,jainayush975/zulip,MayB/zulip,Batterfii/zulip,brainwane/zulip,guiquanz/zulip,jessedhillon/zulip,developerfm/zulip,aliceriot/zulip,zhaoweigg/zulip,kokoar/zulip,thomasboyt/zulip,tbutter/zulip,yuvipanda/zulip,aliceriot/zulip,sonali0901/zulip,qq1012803704/zulip,sup95/zulip,showell/zulip,natanovia/zulip,JPJPJPOPOP/zulip,udxxabp/zulip,brockwhittaker/zulip,technicalpickles/zulip,bitemyapp/zulip,developerfm/zulip,codeKonami/zulip,developerfm/zulip,Vallher/zulip,timabbott/zulip,Diptanshu8/zulip,Batterfii/zulip,ufosky-server/zulip,saitodisse/zulip,Batterfii/zulip,samatdav/zulip,bitemyapp/zulip,wangdeshui/zulip,mansilladev/zulip,DazWorrall/zulip,mdavid/zulip,peiwei/zulip,stamhe/zulip,amallia/zulip,hengqujushi/zulip,dwrpayne/zulip,shaunstanislaus/zulip,dawran6/zulip,seapasulli/zulip,hackerkid/zulip,alliejones/zulip,ikasumiwt/zulip,guiquanz/zulip,alliejones/zulip,johnny9/zulip,ashwinirudrappa/zulip,babbage/zulip,ashwinirudrappa/zulip,m1ssou/zulip,alliejones/zulip,easyfmxu/zulip,Galexrt/zulip,lfranchi/zulip,seapasulli/zulip,blaze225/zulip,kou/zulip,hengqujushi/zulip,akuseru/zulip,dattatreya303/zulip,susansls/zulip,MayB/zulip,zachallaun/zulip,Qgap/zulip,rht/zulip,suxinde2009/zulip,tiansiyuan/zulip,codeKonami/zulip,schatt/zulip,Frouk/zulip,armooo/zulip,isht3/zulip,amyliu345/zulip,glovebx/zulip,krtkmj/zulip,voidException/zulip,jimmy54/zulip,esander91/zulip,timabbott/zulip,wdaher/zulip,hustlzp/zulip,showell/zulip,akuseru/zulip,proliming/zulip,itnihao/zulip,jimmy54/zulip,PaulPetring/zulip,dwrpayne/zulip,noroot/zulip,seapasulli/zulip,wdaher/zulip,umkay/zulip,ericzhou2008/zulip,codeKonami/zulip,tdr130/zulip,isht3/zulip,xuxiao/zulip,seapasulli/zulip,joyhchen/zulip,schatt/zulip,yuvipanda/zulip,m1ssou/zulip,mansilladev/zulip,jrowan/zulip,zwily/zulip,wweiradio/zulip,suxinde2009/zulip,guiquanz/zulip,thomasboyt/zulip,tommyip/zulip,hengqujushi/zulip,EasonYi/zulip,punchagan/zulip,punchagan/zulip,ryansnowboarder/zulip,mohsenSy/zulip,LeeRisk/zulip,moria/zulip,amallia/zulip,dhcrzf/zulip,seapasulli/zulip,niftynei/zulip,peguin40/zulip,aps-sids/zulip,ApsOps/zulip,moria/zulip,jerryge/zulip,bssrdf/zulip,firstblade/zulip,bitemyapp/zulip,showell/zulip,zulip/zulip,levixie/zulip,zhaoweigg/zulip,aps-sids/zulip,ipernet/zulip,LAndreas/zulip,bitemyapp/zulip,KingxBanana/zulip,niftynei/zulip,wdaher/zulip,ahmadassaf/zulip,JPJPJPOPOP/zulip,peguin40/zulip,jeffcao/zulip,zulip/zulip,karamcnair/zulip,technicalpickles/zulip,guiquanz/zulip,brainwane/zulip,pradiptad/zulip,vaidap/zulip,themass/zulip,tdr130/zulip,cosmicAsymmetry/zulip,shubhamdhama/zulip,dattatreya303/zulip,easyfmxu/zulip,PaulPetring/zulip,zhaoweigg/zulip,so0k/zulip,souravbadami/zulip,nicholasbs/zulip,stamhe/zulip,joyhchen/zulip,vikas-parashar/zulip,Diptanshu8/zulip,shaunstanislaus/zulip,tommyip/zulip,andersk/zulip,jessedhillon/zulip,bssrdf/zulip,gigawhitlocks/zulip,AZtheAsian/zulip,hayderimran7/zulip,ipernet/zulip,sharmaeklavya2/zulip,MayB/zulip,paxapy/zulip,PaulPetring/zulip,pradiptad/zulip,so0k/zulip,AZtheAsian/zulip,swinghu/zulip,DazWorrall/zulip,seapasulli/zulip,zhaoweigg/zulip,bluesea/zulip,reyha/zulip,armooo/zulip,sup95/zulip,peiwei/zulip,jainayush975/zulip,xuanhan863/zulip,ericzhou2008/zulip,armooo/zulip,wweiradio/zulip,bluesea/zulip,rht/zulip,natanovia/zulip,arpitpanwar/zulip,SmartPeople/zulip,so0k/zulip,amallia/zulip,brockwhittaker/zulip,mahim97/zulip,he15his/zulip,xuanhan863/zulip,adnanh/zulip,Juanvulcano/zulip,krtkmj/zulip,avastu/zulip,dwrpayne/zulip,kaiyuanheshang/zulip,swinghu/zulip,dxq-git/zulip,cosmicAsymmetry/zulip,RobotCaleb/zulip,nicholasbs/zulip,tommyip/zulip,niftynei/zulip,saitodisse/zulip,TigorC/zulip,so0k/zulip,joyhchen/zulip,dwrpayne/zulip,samatdav/zulip,hafeez3000/zulip,Qgap/zulip,mohsenSy/zulip,shrikrishnaholla/zulip,avastu/zulip,dnmfarrell/zulip,mansilladev/zulip,so0k/zulip,wweiradio/zulip,dotcool/zulip,LAndreas/zulip,udxxabp/zulip,isht3/zulip,hustlzp/zulip,jerryge/zulip,dnmfarrell/zulip,hayderimran7/zulip,SmartPeople/zulip,praveenaki/zulip,noroot/zulip,JanzTam/zulip,armooo/zulip,aakash-cr7/zulip,voidException/zulip,Drooids/zulip,dxq-git/zulip,gkotian/zulip,developerfm/zulip,gkotian/zulip,easyfmxu/zulip,Juanvulcano/zulip,luyifan/zulip,alliejones/zulip,dnmfarrell/zulip,jeffcao/zulip,codeKonami/zulip,Galexrt/zulip,natanovia/zulip,gigawhitlocks/zulip,udxxabp/zulip,sonali0901/zulip,timabbott/zulip,eeshangarg/zulip,eastlhu/zulip,glovebx/zulip,littledogboy/zulip,Qgap/zulip,shrikrishnaholla/zulip,LeeRisk/zulip,esander91/zulip,hustlzp/zulip,PhilSk/zulip,he15his/zulip,LAndreas/zulip,gigawhitlocks/zulip,aliceriot/zulip,fw1121/zulip,vabs22/zulip,kokoar/zulip,moria/zulip,aliceriot/zulip,Galexrt/zulip,j831/zulip,arpith/zulip,rishig/zulip,dhcrzf/zulip,kaiyuanheshang/zulip,Diptanshu8/zulip,andersk/zulip,suxinde2009/zulip,shrikrishnaholla/zulip,sonali0901/zulip,deer-hope/zulip,hengqujushi/zulip,esander91/zulip,codeKonami/zulip,andersk/zulip,zachallaun/zulip,amanharitsh123/zulip,jonesgithub/zulip,johnnygaddarr/zulip,vaidap/zulip,glovebx/zulip,Suninus/zulip,atomic-labs/zulip,brainwane/zulip,JanzTam/zulip,eeshangarg/zulip,m1ssou/zulip,deer-hope/zulip,he15his/zulip,swinghu/zulip,amallia/zulip,yuvipanda/zulip,suxinde2009/zulip,wangdeshui/zulip,vikas-parashar/zulip,jimmy54/zulip,j831/zulip,joshisa/zulip,j831/zulip,Qgap/zulip,tdr130/zulip,armooo/zulip,firstblade/zulip,ApsOps/zulip,Cheppers/zulip,Cheppers/zulip,firstblade/zulip,adnanh/zulip,Frouk/zulip,thomasboyt/zulip,wangdeshui/zulip,hayderimran7/zulip,tiansiyuan/zulip,cosmicAsymmetry/zulip,joshisa/zulip,jeffcao/zulip,jonesgithub/zulip,aakash-cr7/zulip,MariaFaBella85/zulip,dattatreya303/zulip,brainwane/zulip,shrikrishnaholla/zulip,grave-w-grave/zulip,DazWorrall/zulip,proliming/zulip,easyfmxu/zulip,isht3/zulip,eeshangarg/zulip,PaulPetring/zulip,lfranchi/zulip,Jianchun1/zulip,esander91/zulip,ericzhou2008/zulip,pradiptad/zulip,vabs22/zulip,proliming/zulip,thomasboyt/zulip,hj3938/zulip,jainayush975/zulip,bowlofstew/zulip,hayderimran7/zulip,souravbadami/zulip,themass/zulip,tommyip/zulip,atomic-labs/zulip,niftynei/zulip,MariaFaBella85/zulip,zwily/zulip,swinghu/zulip,noroot/zulip,MayB/zulip,jerryge/zulip,akuseru/zulip,yuvipanda/zulip,jackrzhang/zulip,arpitpanwar/zulip,technicalpickles/zulip,ryanbackman/zulip,krtkmj/zulip,amanharitsh123/zulip,eeshangarg/zulip,stamhe/zulip,levixie/zulip,shubhamdhama/zulip,mahim97/zulip,tbutter/zulip,kou/zulip,sonali0901/zulip,littledogboy/zulip,xuxiao/zulip,TigorC/zulip,zofuthan/zulip,bastianh/zulip,susansls/zulip,eastlhu/zulip,j831/zulip,shrikrishnaholla/zulip,j831/zulip,codeKonami/zulip,mohsenSy/zulip,vakila/zulip,willingc/zulip,vakila/zulip,jonesgithub/zulip,Vallher/zulip,firstblade/zulip,ahmadassaf/zulip,wweiradio/zulip,itnihao/zulip,Gabriel0402/zulip,bssrdf/zulip,dnmfarrell/zulip,kou/zulip,tbutter/zulip,so0k/zulip,cosmicAsymmetry/zulip,TigorC/zulip,arpitpanwar/zulip,mohsenSy/zulip,Qgap/zulip,nicholasbs/zulip,Drooids/zulip,levixie/zulip,Gabriel0402/zulip,grave-w-grave/zulip,voidException/zulip,vakila/zulip,Batterfii/zulip,bluesea/zulip,peguin40/zulip,rishig/zulip,akuseru/zulip,voidException/zulip,qq1012803704/zulip,xuanhan863/zulip,praveenaki/zulip,fw1121/zulip,mahim97/zulip,joyhchen/zulip,hengqujushi/zulip,bssrdf/zulip,LeeRisk/zulip,ahmadassaf/zulip,arpitpanwar/zulip,zhaoweigg/zulip,ufosky-server/zulip,zacps/zulip,saitodisse/zulip,tbutter/zulip,noroot/zulip,mdavid/zulip,eastlhu/zulip,Qgap/zulip,yuvipanda/zulip,shaunstanislaus/zulip,souravbadami/zulip,yocome/zulip,tdr130/zulip,zofuthan/zulip,shaunstanislaus/zulip,TigorC/zulip,sharmaeklavya2/zulip,avastu/zulip,pradiptad/zulip,shubhamdhama/zulip,yocome/zulip,udxxabp/zulip,peiwei/zulip,avastu/zulip,amyliu345/zulip,joyhchen/zulip,souravbadami/zulip,jimmy54/zulip,dnmfarrell/zulip,babbage/zulip,Juanvulcano/zulip,zulip/zulip,PhilSk/zulip,gigawhitlocks/zulip,DazWorrall/zulip,Jianchun1/zulip,MariaFaBella85/zulip,dattatreya303/zulip,natanovia/zulip,timabbott/zulip,Vallher/zulip,gigawhitlocks/zulip,dhcrzf/zulip,EasonYi/zulip,krtkmj/zulip,adnanh/zulip,jessedhillon/zulip,jimmy54/zulip,calvinleenyc/zulip,ikasumiwt/zulip,jessedhillon/zulip,mdavid/zulip,mdavid/zulip,aps-sids/zulip,tbutter/zulip,johnnygaddarr/zulip,bastianh/zulip,pradiptad/zulip,amallia/zulip,akuseru/zulip,JPJPJPOPOP/zulip,KingxBanana/zulip,kokoar/zulip,bluesea/zulip,JPJPJPOPOP/zulip,verma-varsha/zulip,jeffcao/zulip,xuxiao/zulip,MayB/zulip,karamcnair/zulip,jphilipsen05/zulip,KJin99/zulip,Suninus/zulip,bastianh/zulip,gkotian/zulip,wdaher/zulip,qq1012803704/zulip,glovebx/zulip,luyifan/zulip,hj3938/zulip,jackrzhang/zulip,deer-hope/zulip,arpith/zulip,dotcool/zulip,akuseru/zulip,JanzTam/zulip,wangdeshui/zulip,dotcool/zulip,jeffcao/zulip,zulip/zulip,vikas-parashar/zulip,jainayush975/zulip,xuxiao/zulip,hackerkid/zulip,susansls/zulip,zorojean/zulip,Vallher/zulip,RobotCaleb/zulip,esander91/zulip,zachallaun/zulip,thomasboyt/zulip,synicalsyntax/zulip,amallia/zulip,nicholasbs/zulip,zorojean/zulip,synicalsyntax/zulip,levixie/zulip,kaiyuanheshang/zulip,thomasboyt/zulip,amanharitsh123/zulip,zwily/zulip,bssrdf/zulip,ipernet/zulip,sup95/zulip,sup95/zulip,Gabriel0402/zulip,vakila/zulip,aliceriot/zulip,rishig/zulip,joshisa/zulip,MariaFaBella85/zulip,nicholasbs/zulip,zulip/zulip,andersk/zulip,jerryge/zulip,hayderimran7/zulip,brainwane/zulip,rishig/zulip,bluesea/zulip,saitodisse/zulip,pradiptad/zulip,themass/zulip,Galexrt/zulip,vaidap/zulip,ericzhou2008/zulip,karamcnair/zulip,lfranchi/zulip,showell/zulip,jphilipsen05/zulip,natanovia/zulip,Diptanshu8/zulip,hackerkid/zulip,timabbott/zulip,jainayush975/zulip,AZtheAsian/zulip,saitodisse/zulip,Cheppers/zulip,hayderimran7/zulip,thomasboyt/zulip,proliming/zulip,eeshangarg/zulip,zorojean/zulip,ryanbackman/zulip,PaulPetring/zulip,Jianchun1/zulip,dhcrzf/zulip,udxxabp/zulip,gkotian/zulip,bssrdf/zulip,Qgap/zulip,johnny9/zulip,bitemyapp/zulip,jphilipsen05/zulip,atomic-labs/zulip,bluesea/zulip,ikasumiwt/zulip,showell/zulip,he15his/zulip,nicholasbs/zulip,jackrzhang/zulip,adnanh/zulip,zofuthan/zulip,ashwinirudrappa/zulip,moria/zulip,Cheppers/zulip,Vallher/zulip,zachallaun/zulip,alliejones/zulip,dwrpayne/zulip,lfranchi/zulip,hafeez3000/zulip,vaidap/zulip,jeffcao/zulip,zulip/zulip,jessedhillon/zulip,themass/zulip,blaze225/zulip,qq1012803704/zulip,codeKonami/zulip,arpith/zulip,jonesgithub/zulip,niftynei/zulip,huangkebo/zulip,luyifan/zulip,zachallaun/zulip,wangdeshui/zulip,kaiyuanheshang/zulip,zorojean/zulip,dhcrzf/zulip,stamhe/zulip,christi3k/zulip,vaidap/zulip,kou/zulip,Jianchun1/zulip,zacps/zulip,JanzTam/zulip,dawran6/zulip,umkay/zulip,Juanvulcano/zulip,swinghu/zulip,aakash-cr7/zulip,wavelets/zulip,so0k/zulip,ApsOps/zulip,Juanvulcano/zulip,karamcnair/zulip,huangkebo/zulip,jonesgithub/zulip,Vallher/zulip,sup95/zulip,sup95/zulip,zhaoweigg/zulip,jessedhillon/zulip,deer-hope/zulip,Frouk/zulip,zachallaun/zulip,souravbadami/zulip,reyha/zulip,mohsenSy/zulip,shaunstanislaus/zulip,christi3k/zulip,developerfm/zulip,bowlofstew/zulip,EasonYi/zulip,johnnygaddarr/zulip,peiwei/zulip,synicalsyntax/zulip,qq1012803704/zulip,paxapy/zulip,eastlhu/zulip,johnny9/zulip,avastu/zulip,AZtheAsian/zulip,jainayush975/zulip,aakash-cr7/zulip,umkay/zulip,timabbott/zulip,huangkebo/zulip,Drooids/zulip,peguin40/zulip,willingc/zulip,m1ssou/zulip,amyliu345/zulip,Gabriel0402/zulip,grave-w-grave/zulip,Frouk/zulip,schatt/zulip,dxq-git/zulip,tiansiyuan/zulip,ApsOps/zulip,praveenaki/zulip,shubhamdhama/zulip,amyliu345/zulip,reyha/zulip,christi3k/zulip,ufosky-server/zulip,gkotian/zulip,DazWorrall/zulip,babbage/zulip,swinghu/zulip,tiansiyuan/zulip,LeeRisk/zulip,amanharitsh123/zulip,ryanbackman/zulip,esander91/zulip,eeshangarg/zulip,noroot/zulip,christi3k/zulip,calvinleenyc/zulip,easyfmxu/zulip,arpith/zulip,hafeez3000/zulip,zachallaun/zulip,amyliu345/zulip,Gabriel0402/zulip,MariaFaBella85/zulip,EasonYi/zulip,ahmadassaf/zulip,technicalpickles/zulip,arpith/zulip,nicholasbs/zulip,Suninus/zulip,fw1121/zulip,blaze225/zulip,glovebx/zulip,brainwane/zulip,reyha/zulip,deer-hope/zulip,udxxabp/zulip,tbutter/zulip,ikasumiwt/zulip,samatdav/zulip,ashwinirudrappa/zulip,zorojean/zulip,mansilladev/zulip,zwily/zulip,kaiyuanheshang/zulip,reyha/zulip,firstblade/zulip,fw1121/zulip,hackerkid/zulip,voidException/zulip,johnnygaddarr/zulip,avastu/zulip,joshisa/zulip,yocome/zulip,MariaFaBella85/zulip,Galexrt/zulip,kokoar/zulip,EasonYi/zulip,souravbadami/zulip,peguin40/zulip,RobotCaleb/zulip,huangkebo/zulip,gkotian/zulip,jonesgithub/zulip,bastianh/zulip,proliming/zulip,ryansnowboarder/zulip,arpitpanwar/zulip,babbage/zulip,jonesgithub/zulip,hengqujushi/zulip,hafeez3000/zulip,sharmaeklavya2/zulip,stamhe/zulip,deer-hope/zulip,krtkmj/zulip,hackerkid/zulip,KingxBanana/zulip,brockwhittaker/zulip,dattatreya303/zulip,ryanbackman/zulip,KJin99/zulip,punchagan/zulip,Drooids/zulip,peguin40/zulip,arpitpanwar/zulip,easyfmxu/zulip,synicalsyntax/zulip,pradiptad/zulip,amallia/zulip,willingc/zulip,easyfmxu/zulip,aps-sids/zulip,DazWorrall/zulip,dawran6/zulip,dxq-git/zulip,punchagan/zulip,bowlofstew/zulip,christi3k/zulip,Jianchun1/zulip,yuvipanda/zulip,Frouk/zulip,voidException/zulip,sonali0901/zulip,zofuthan/zulip,wangdeshui/zulip,avastu/zulip,RobotCaleb/zulip,willingc/zulip,esander91/zulip,aakash-cr7/zulip,punchagan/zulip,brockwhittaker/zulip,technicalpickles/zulip,isht3/zulip,ApsOps/zulip,susansls/zulip,zwily/zulip,adnanh/zulip,kokoar/zulip,wdaher/zulip,seapasulli/zulip,technicalpickles/zulip,JanzTam/zulip,aliceriot/zulip,jrowan/zulip,umkay/zulip,ryansnowboarder/zulip,amanharitsh123/zulip,babbage/zulip,vakila/zulip,schatt/zulip,calvinleenyc/zulip,LeeRisk/zulip,jessedhillon/zulip,luyifan/zulip,johnnygaddarr/zulip,xuanhan863/zulip,tommyip/zulip,dxq-git/zulip,KJin99/zulip,littledogboy/zulip,punchagan/zulip,fw1121/zulip,atomic-labs/zulip,developerfm/zulip,wdaher/zulip,firstblade/zulip,mdavid/zulip,bitemyapp/zulip,ikasumiwt/zulip,kou/zulip,kou/zulip,zofuthan/zulip,littledogboy/zulip,zacps/zulip,verma-varsha/zulip,dawran6/zulip,LeeRisk/zulip,paxapy/zulip,huangkebo/zulip,synicalsyntax/zulip,saitodisse/zulip,gkotian/zulip,tiansiyuan/zulip,dwrpayne/zulip,ipernet/zulip,karamcnair/zulip,amanharitsh123/zulip,zacps/zulip,Galexrt/zulip,joyhchen/zulip,jerryge/zulip,zacps/zulip,armooo/zulip,xuanhan863/zulip,hj3938/zulip,lfranchi/zulip,zhaoweigg/zulip,rishig/zulip,arpith/zulip,ipernet/zulip,themass/zulip,isht3/zulip,karamcnair/zulip,hj3938/zulip,ApsOps/zulip,kokoar/zulip,bastianh/zulip,bssrdf/zulip,wdaher/zulip,grave-w-grave/zulip,guiquanz/zulip,amyliu345/zulip,levixie/zulip,bastianh/zulip,moria/zulip,samatdav/zulip,hackerkid/zulip,brainwane/zulip,mahim97/zulip,shubhamdhama/zulip,j831/zulip | zephyr/lib/bugdown.py | zephyr/lib/bugdown.py | import re
import markdown
class Bugdown(markdown.Extension):
def extendMarkdown(self, md, md_globals):
del md.inlinePatterns['image_link']
del md.inlinePatterns['image_reference']
# We need to re-initialize the markdown engine every 30 messages
# due to some sort of performance leak in the markdown library.
MAX_MD_ENGINE_USES = 30
_md_engine = None
_use_count = 0
# A link starts after whitespace, and cannot contain spaces,
# end parentheses, or end brackets (which would confuse Markdown).
# FIXME: Use one of the actual linkification extensions.
_link_regex = re.compile(r'(\s|\A)(?P<url>https?://[^\s\])]+)')
# Pad heading markers to make Markdown ignore them
# FIXME: Write a real extension for the markdown library
_heading_regex = re.compile(r'^([#-=])', flags=re.MULTILINE)
def _linkify(match):
url = match.group('url')
return ' [%s](%s) ' % (url, url)
def convert(md):
"""Convert Markdown to HTML, with Humbug-specific settings and hacks."""
global _md_engine, _use_count
if _md_engine is None:
_md_engine = markdown.Markdown(
extensions = ['fenced_code', 'codehilite', 'nl2br', Bugdown()],
safe_mode = 'escape',
output_format = 'xhtml')
md = _heading_regex.sub(r' \1', md)
md = _link_regex.sub(_linkify, md)
try:
html = _md_engine.convert(md)
except:
# FIXME: Do something more reasonable here!
html = '<p>[Humbug note: Sorry, we could not understand the formatting of your message]</p>'
_use_count += 1
if _use_count >= MAX_MD_ENGINE_USES:
_md_engine = None
_use_count = 0
return html
| import re
import markdown
# We need to re-initialize the markdown engine every 30 messages
# due to some sort of performance leak in the markdown library.
MAX_MD_ENGINE_USES = 30
_md_engine = None
_use_count = 0
# A link starts after whitespace, and cannot contain spaces,
# end parentheses, or end brackets (which would confuse Markdown).
# FIXME: Use one of the actual linkification extensions.
_link_regex = re.compile(r'(\s|\A)(?P<url>https?://[^\s\])]+)')
# Pad heading markers to make Markdown ignore them
# FIXME: Write a real extension for the markdown library
_heading_regex = re.compile(r'^([#-=])', flags=re.MULTILINE)
def _linkify(match):
url = match.group('url')
return ' [%s](%s) ' % (url, url)
def convert(md):
"""Convert Markdown to HTML, with Humbug-specific settings and hacks."""
global _md_engine, _use_count
if _md_engine is None:
_md_engine = markdown.Markdown(
extensions = ['fenced_code', 'codehilite', 'nl2br'],
safe_mode = 'escape',
output_format = 'xhtml')
md = _heading_regex.sub(r' \1', md)
md = _link_regex.sub(_linkify, md)
try:
html = _md_engine.convert(md)
except:
# FIXME: Do something more reasonable here!
html = '<p>[Humbug note: Sorry, we could not understand the formatting of your message]</p>'
_use_count += 1
if _use_count >= MAX_MD_ENGINE_USES:
_md_engine = None
_use_count = 0
return html
| apache-2.0 | Python |
c8a280d6466623b8d76fa01c12ebf295151d35d6 | remove primary key constraint | DarioValocchi/son-sp-infrabstract,skolome/son-sp-infrabstract,DarioValocchi/son-sp-infrabstract,DarioValocchi/son-sp-infrabstract,skolome/son-sp-infrabstract,skolome/son-sp-infrabstract | wim-adaptor/vtn-api/database/sqlalchemy_declaritive.py | wim-adaptor/vtn-api/database/sqlalchemy_declaritive.py | import os
import sys
from sqlalchemy import create_engine, Column, ForeignKey, Integer, String
from sqlalchemy.orm import relationship
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
class Connectivity(Base):
__tablename__ = 'connectivity'
# define the columns for the table
segment = Column(String(250),nullable=False)
bridge_name = Column(String(250), nullable=False)
port_id = Column(String(250))
location = Column(String(250))
# Create engine that stores data in the local directory's
engine = create_engine('sqlite:///wim_info.db')
# Create the Table
Base.metadata.create_all(engine)
| import os
import sys
from sqlalchemy import create_engine, Column, ForeignKey, Integer, String
from sqlalchemy.orm import relationship
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
class Connectivity(Base):
__tablename__ = 'connectivity'
# define the columns for the table
segment = Column(String(250),nullable=False)
bridge_name = Column(String(250), nullable=False)
port_id = Column(String(250),primary_key=True)
location = Column(String(250))
# Create engine that stores data in the local directory's
engine = create_engine('sqlite:///wim_info.db')
# Create the Table
Base.metadata.create_all(engine)
| apache-2.0 | Python |
f984db30c4d4cab1377d21a73ec0b802590f8a51 | Update sqlalchemy migrate scripts for postgres | changsimon/trove,redhat-openstack/trove,cp16net/trove,zhangg/trove,zhangg/trove,zhujzhuo/openstack-trove,cp16net/trove,zhujzhuo/openstack-trove,hplustree/trove,redhat-openstack/trove,redhat-openstack/trove,fabian4/trove,mmasaki/trove,changsimon/trove,changsimon/trove,fabian4/trove,mmasaki/trove,zhujzhuo/openstack-trove,cp16net/trove,hplustree/trove,openstack/trove,fabian4/trove,mmasaki/trove,openstack/trove | trove/db/sqlalchemy/migrate_repo/versions/014_update_instance_flavor_id.py | trove/db/sqlalchemy/migrate_repo/versions/014_update_instance_flavor_id.py | # Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy.schema import MetaData
from trove.db.sqlalchemy.migrate_repo.schema import Integer
from trove.db.sqlalchemy.migrate_repo.schema import String
from trove.db.sqlalchemy.migrate_repo.schema import Table
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
# pgsql <= 8.3 was lax about char->other casting but this was tightened up
# in 8.4+. We now have to specify the USING clause for the cast to succeed.
# NB: The generated sqlalchemy query doesn't support this, so this override
# is needed.
if migrate_engine.name == 'postgresql':
migrate_engine.execute('ALTER TABLE instances ALTER COLUMN flavor_id '
'TYPE INTEGER USING flavor_id::integer')
else:
instances = Table('instances', meta, autoload=True)
#modify column
instances.c.flavor_id.alter(type=Integer())
def downgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
# int->char casts in pgsql still work fine without any USING clause,
# so downgrade is not affected.
# modify column:
instances = Table('instances', meta, autoload=True)
instances.c.flavor_id.alter(type=String(36))
| # Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from sqlalchemy.schema import MetaData
from trove.db.sqlalchemy.migrate_repo.schema import Integer
from trove.db.sqlalchemy.migrate_repo.schema import String
from trove.db.sqlalchemy.migrate_repo.schema import Table
def upgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
instances = Table('instances', meta, autoload=True)
#modify column
instances.c.flavor_id.alter(type=Integer())
def downgrade(migrate_engine):
meta = MetaData()
meta.bind = migrate_engine
# modify column:
instances = Table('instances', meta, autoload=True)
instances.c.flavor_id.alter(type=String(36))
| apache-2.0 | Python |
dbe1ac7fda9188e59479ff4716141651d627f76c | Fix cheroot.test.test_errors doc spelling | cherrypy/cheroot | cheroot/test/test_errors.py | cheroot/test/test_errors.py | """Test suite for ``cheroot.errors``."""
import pytest
from cheroot import errors
from .._compat import IS_LINUX, IS_MACOS, IS_WINDOWS
@pytest.mark.parametrize(
'err_names,err_nums',
(
(('', 'some-nonsense-name'), []),
(
(
'EPROTOTYPE', 'EAGAIN', 'EWOULDBLOCK',
'WSAEWOULDBLOCK', 'EPIPE',
),
(91, 11, 32) if IS_LINUX else
(32, 35, 41) if IS_MACOS else
(32, 10041, 11, 10035) if IS_WINDOWS else
(),
),
),
)
def test_plat_specific_errors(err_names, err_nums):
"""Test that ``plat_specific_errors`` gets correct error numbers list."""
actual_err_nums = errors.plat_specific_errors(*err_names)
assert len(actual_err_nums) == len(err_nums)
assert sorted(actual_err_nums) == sorted(err_nums)
| """Test suite for ``cheroot.errors``."""
import pytest
from cheroot import errors
from .._compat import IS_LINUX, IS_MACOS, IS_WINDOWS
@pytest.mark.parametrize(
'err_names,err_nums',
(
(('', 'some-nonsense-name'), []),
(
(
'EPROTOTYPE', 'EAGAIN', 'EWOULDBLOCK',
'WSAEWOULDBLOCK', 'EPIPE',
),
(91, 11, 32) if IS_LINUX else
(32, 35, 41) if IS_MACOS else
(32, 10041, 11, 10035) if IS_WINDOWS else
(),
),
),
)
def test_plat_specific_errors(err_names, err_nums):
"""Test that plat_specific_errors retrieves correct err num list."""
actual_err_nums = errors.plat_specific_errors(*err_names)
assert len(actual_err_nums) == len(err_nums)
assert sorted(actual_err_nums) == sorted(err_nums)
| bsd-3-clause | Python |
423a15d7c8841b40bddbd129b2abfb1135f0b7c0 | fix date parsing in logsearch | hs-jenkins-bot/Singularity,HubSpot/Singularity,grepsr/Singularity,hs-jenkins-bot/Singularity,grepsr/Singularity,HubSpot/Singularity,grepsr/Singularity,HubSpot/Singularity,grepsr/Singularity,andrhamm/Singularity,andrhamm/Singularity,grepsr/Singularity,HubSpot/Singularity,andrhamm/Singularity,hs-jenkins-bot/Singularity,andrhamm/Singularity,hs-jenkins-bot/Singularity,HubSpot/Singularity,grepsr/Singularity,andrhamm/Singularity,hs-jenkins-bot/Singularity | scripts/logfetch/search.py | scripts/logfetch/search.py | import os
import re
import sys
import fnmatch
import logfetch_base
from termcolor import colored
def find_cached_logs(args):
matching_logs = []
log_fn_match = get_matcher(args)
for filename in os.listdir(args.dest):
if fnmatch.fnmatch(filename, log_fn_match) and in_date_range(args, filename):
if args.verbose:
sys.stderr.write(colored('Including log {0}\n'.format(filename), 'magenta'))
matching_logs.append('{0}/{1}'.format(args.dest, filename))
else:
if args.verbose:
sys.stderr.write(colored('Excluding log {0}, not in date range\n'.format(filename), 'magenta'))
return matching_logs
def in_date_range(args, filename):
timestamps = re.findall(r"-\d{13}-", filename)
if timestamps:
return logfetch_base.is_in_date_range(args, int(str(timestamps[-1]).replace("-", "")[0:-3]))
else:
return True
def get_matcher(args):
if args.taskId:
if 'filename' in args.file_pattern and args.logtype:
return '{0}*{1}*'.format(args.taskId, args.logtype)
else:
return '{0}*'.format(args.taskId)
elif args.deployId and args.requestId:
if 'filename' in args.file_pattern and args.logtype:
return '{0}-{1}*{2}*'.format(args.requestId, args.deployId, args.logtype)
else:
return '{0}-{1}*'.format(args.requestId, args.deployId)
else:
if 'filename' in args.file_pattern and args.logtype:
return '{0}*{1}*'.format(args.requestId, args.logtype)
else:
return '{0}*'.format(args.requestId) | import os
import re
import sys
import fnmatch
import logfetch_base
from termcolor import colored
def find_cached_logs(args):
matching_logs = []
log_fn_match = get_matcher(args)
for filename in os.listdir(args.dest):
if fnmatch.fnmatch(filename, log_fn_match) and in_date_range(args, filename):
if args.verbose:
sys.stderr.write(colored('Including log {0}\n'.format(filename), 'magenta'))
matching_logs.append('{0}/{1}'.format(args.dest, filename))
else:
if args.verbose:
sys.stderr.write(colored('Excluding log {0}, not in date range\n'.format(filename), 'magenta'))
return matching_logs
def in_date_range(args, filename):
timestamps = re.findall(r"\d{13}", filename)
if timestamps:
return logfetch_base.is_in_date_range(args, int(str(timestamps[-1])[0:-3]))
else:
return True
def get_matcher(args):
if args.taskId:
if 'filename' in args.file_pattern and args.logtype:
return '{0}*{1}*'.format(args.taskId, args.logtype)
else:
return '{0}*'.format(args.taskId)
elif args.deployId and args.requestId:
if 'filename' in args.file_pattern and args.logtype:
return '{0}-{1}*{2}*'.format(args.requestId, args.deployId, args.logtype)
else:
return '{0}-{1}*'.format(args.requestId, args.deployId)
else:
if 'filename' in args.file_pattern and args.logtype:
return '{0}*{1}*'.format(args.requestId, args.logtype)
else:
return '{0}*'.format(args.requestId) | apache-2.0 | Python |
fc1d468d6602022405d4959ea8d12c825a1916f0 | Add AuthToken model | Kromey/fbxnano,Kromey/fbxnano,Kromey/fbxnano,Kromey/fbxnano,Kromey/akwriters,Kromey/akwriters,Kromey/akwriters,Kromey/akwriters | passwordless/models.py | passwordless/models.py | from datetime import timedelta
import uuid
from django.db import models
from django.utils import timezone
# Create your models here.
class User(models.Model):
"""
User model
This User model eschews passwords, relying instead on emailed OTP tokens.
"""
username = models.CharField(max_length=30, unique=True)
email = models.EmailField(null=True)
is_active = models.BooleanField(default=False)
is_superuser = models.BooleanField(default=False)
date_joined = models.DateTimeField(auto_now_add=True)
@property
def is_authenticated(self):
#Used to tell authenticated Users from anonymous ones
return True
@property
def is_anonymous(self):
#This is not an anonymous user
return False
def __str__(self):
return self.username
class AnonymousUser:
"""
An object to represent an anonymous/unauthenticated user
"""
username = ''
email = None
is_active = False
is_superuser = False
date_joined = None
@property
def is_authenticated(self):
#Anonymous sessions are not authenticated
return False
@property
def is_anonymous(self):
return True
def __str__(self):
return "Anonymous User"
def make_token():
"""
Generate a random token suitable for activation/confirmation via email
A hex-encoded random UUID has plenty of entropy to be secure enough for our
needs.
"""
return uuid.uuid4().hex
class AuthToken(models.Model):
"""
OTP Token for passwordless authentication
"""
user = models.OneToOneField(User, primary_key=True)
token = models.CharField(max_length=40, default=make_token)
date_sent = models.DateTimeField(default=timezone.now)
_expiration_hours = 24
@property
def expiration_date(self):
return self.date_sent + timedelta(hours=self._expiration_hours)
@property
def is_valid(self):
return self.expiration_date >= timezone.now()
| from django.db import models
# Create your models here.
class User(models.Model):
"""
User model
This User model eschews passwords, relying instead on emailed OTP tokens.
"""
username = models.CharField(max_length=30, unique=True)
email = models.EmailField(null=True)
is_active = models.BooleanField(default=False)
is_superuser = models.BooleanField(default=False)
date_joined = models.DateTimeField(auto_now_add=True)
@property
def is_authenticated(self):
#Used to tell authenticated Users from anonymous ones
return True
@property
def is_anonymous(self):
#This is not an anonymous user
return False
def __str__(self):
return self.username
class AnonymousUser:
"""
An object to represent an anonymous/unauthenticated user
"""
username = ''
email = None
is_active = False
is_superuser = False
date_joined = None
@property
def is_authenticated(self):
#Anonymous sessions are not authenticated
return False
@property
def is_anonymous(self):
return True
def __str__(self):
return "Anonymous User"
| mit | Python |
5114bf3960b944c193c37ef8ecbcac50ae098d02 | Add InvalidLengthError class | thombashi/pathvalidate | pathvalidate/_error.py | pathvalidate/_error.py | # encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <[email protected]>
"""
from __future__ import absolute_import
from __future__ import unicode_literals
class NullNameError(ValueError):
"""
Raised when a name is empty.
"""
class InvalidCharError(ValueError):
"""
Raised when includes invalid character(s) within a string.
"""
class InvalidLengthError(ValueError):
"""
Raised when a string too long/short.
"""
| # encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <[email protected]>
"""
from __future__ import absolute_import
from __future__ import unicode_literals
class NullNameError(ValueError):
"""
Raised when a name is empty.
"""
class InvalidCharError(ValueError):
"""
Raised when includes invalid character(s) within a string.
"""
| mit | Python |
042edce052d5307fff8dfbce8c08b72fb72af7f1 | Remove some noise | okfn/ckanext-groupadmin,okfn/ckanext-groupadmin,okfn/ckanext-groupadmin | ckanext/groupadmin/authz.py | ckanext/groupadmin/authz.py | '''This module monkey patches functions in ckan/authz.py and replaces the
default roles with custom roles and decorates
has_user_permission_for_group_org_org to allow a GroupAdmin to admin groups.
GroupAdmins can manage all organizations/groups, but have no other sysadmin
powers.
'''
from ckan import authz, model
from ckan.plugins import toolkit
from ckanext.groupadmin.model import GroupAdmin
authz.ROLE_PERMISSIONS.update({'group_admin': ['read', 'manage_group']})
def _trans_role_group_admin():
return toolkit._('Group Admin')
authz._trans_role_group_admin = _trans_role_group_admin
def is_group_admin_decorator(method):
def decorate_has_user_permission_for_group_or_org(group_id, user_name,
permission):
user_id = authz.get_user_id_for_username(user_name, allow_none=True)
if not user_id:
return False
if GroupAdmin.is_user_group_admin(model.Session, user_id):
return True
return method(group_id, user_name, permission)
return decorate_has_user_permission_for_group_or_org
authz.has_user_permission_for_group_or_org = is_group_admin_decorator(
authz.has_user_permission_for_group_or_org)
| '''This module monkey patches functions in ckan/authz.py and replaces the
default roles with custom roles and decorates
has_user_permission_for_group_org_org to allow a GroupAdmin to admin groups.
GroupAdmins can manage all organizations/groups, but have no other sysadmin
powers.
'''
from ckan import authz, model
from ckan.common import OrderedDict
from ckan.plugins import toolkit
from ckanext.groupadmin.model import GroupAdmin
import logging
log = logging.getLogger(__name__)
old_auth_roles = authz.ROLE_PERMISSIONS
authz.ROLE_PERMISSIONS.update({'group_admin': ['read', 'manage_group']})
log.info(authz.ROLE_PERMISSIONS)
def _trans_role_group_admin():
return toolkit._('Group Admin')
authz._trans_role_group_admin = _trans_role_group_admin
def is_group_admin_decorator(method):
def decorate_has_user_permission_for_group_or_org(group_id, user_name,
permission):
user_id = authz.get_user_id_for_username(user_name, allow_none=True)
if not user_id:
return False
if GroupAdmin.is_user_group_admin(model.Session, user_id):
return True
return method(group_id, user_name, permission)
return decorate_has_user_permission_for_group_or_org
authz.has_user_permission_for_group_or_org = is_group_admin_decorator(
authz.has_user_permission_for_group_or_org)
| agpl-3.0 | Python |
afb400e16c1335531f259218a8b9937de48644e9 | Update stream health health api url | polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon | polyaxon/checks/streams.py | polyaxon/checks/streams.py | from checks.base import Check
from checks.results import Result
from libs.api import get_settings_ws_api_url
from libs.http import safe_request
class StreamsCheck(Check):
@classmethod
def run(cls):
response = safe_request('{}/_health'.format(get_settings_ws_api_url()), 'GET')
status_code = response.status_code
if status_code == 200:
result = Result()
else:
result = Result(message='Service is not healthy, response {}'.format(status_code),
severity=Result.ERROR)
return {'STREAMS': result}
| from checks.base import Check
from checks.results import Result
from libs.api import get_settings_ws_api_url
from libs.http import safe_request
class StreamsCheck(Check):
@classmethod
def run(cls):
response = safe_request(get_settings_ws_api_url(), 'GET')
status_code = response.status_code
if status_code == 200:
result = Result()
else:
result = Result(message='Service is not healthy, response {}'.format(status_code),
severity=Result.ERROR)
return {'STREAMS': result}
| apache-2.0 | Python |
35f9f3b3a1ca9174194975e5281682c2712b653f | add get_absolute_url to article categories too | GISAElkartea/bidasoamedia,GISAElkartea/bidasoamedia,GISAElkartea/bidasoamedia | project/articles/models.py | project/articles/models.py | from django.db import models
from django.utils.translation import ugettext as _
from django.utils.timezone import now
from markitup.fields import MarkupField
from autoslug import AutoSlugField
from sorl.thumbnail import ImageField
class Category(models.Model):
class Meta:
verbose_name = _('Category')
verbose_name_plural = _('Categories')
name = models.CharField(max_length=25, verbose_name=_('name'))
slug = AutoSlugField(populate_from='name', unique=True)
def __unicode__(self):
return self.name
@models.permalink
def get_absolute_url(self):
return 'articles:category', (), {'slug': self.slug}
class ArticleManager(models.Manager):
def published(self):
q = self.get_query_set()
return q.filter(pub_date__lte=now())
class Article(models.Model):
objects = ArticleManager()
class Meta:
ordering = ('-pub_date',)
verbose_name = _('Article')
verbose_name_plural = _('Articles')
title = models.CharField(max_length=100, verbose_name=_('title'))
description = MarkupField(blank=True, verbose_name=_('description'),
help_text=_('populated from body if not given'))
body = MarkupField(verbose_name=_('body'))
image = ImageField(blank=True, upload_to='images',
verbose_name=_('image'))
pub_date = models.DateTimeField(default=now,
verbose_name=_('publication date'))
categories = models.ManyToManyField(Category, blank=True, null=True,
verbose_name=_('categories'))
slug = AutoSlugField(populate_from='title', unique=True)
def __unicode__(self):
return self.title
@models.permalink
def get_absolute_url(self):
return 'articles:detail', (), {'slug': self.slug}
| from django.db import models
from django.utils.translation import ugettext as _
from django.utils.timezone import now
from markitup.fields import MarkupField
from autoslug import AutoSlugField
from sorl.thumbnail import ImageField
class Category(models.Model):
class Meta:
verbose_name = _('Category')
verbose_name_plural = _('Categories')
name = models.CharField(max_length=25, verbose_name=_('name'))
slug = AutoSlugField(populate_from='name', unique=True)
def __unicode__(self):
return self.name
class ArticleManager(models.Manager):
def published(self):
q = self.get_query_set()
return q.filter(pub_date__lte=now())
class Article(models.Model):
objects = ArticleManager()
class Meta:
ordering = ('-pub_date',)
verbose_name = _('Article')
verbose_name_plural = _('Articles')
title = models.CharField(max_length=100, verbose_name=_('title'))
description = MarkupField(blank=True, verbose_name=_('description'),
help_text=_('populated from body if not given'))
body = MarkupField(verbose_name=_('body'))
image = ImageField(blank=True, upload_to='images',
verbose_name=_('image'))
pub_date = models.DateTimeField(default=now,
verbose_name=_('publication date'))
categories = models.ManyToManyField(Category, blank=True, null=True,
verbose_name=_('categories'))
slug = AutoSlugField(populate_from='title', unique=True)
def __unicode__(self):
return self.title
@models.permalink
def get_absolute_url(self):
return 'articles:detail', (), {'slug': self.slug}
| agpl-3.0 | Python |
edb04d8e0ae03c9244b7d934fd713efbb94d5a58 | Add api url to album and link | williamroot/opps,opps/opps,YACOWS/opps,jeanmask/opps,jeanmask/opps,YACOWS/opps,williamroot/opps,opps/opps,williamroot/opps,jeanmask/opps,opps/opps,williamroot/opps,opps/opps,YACOWS/opps,jeanmask/opps,YACOWS/opps | opps/api/urls.py | opps/api/urls.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf.urls import patterns, url, include
from tastypie.api import Api
from opps.containers.api import Container
from opps.articles.api import Post, Album, Link
from .conf import settings
_api = Api(api_name=settings.OPPS_API_NAME)
_api.register(Container())
_api.register(Post())
_api.register(Album())
_api.register(Link())
urlpatterns = patterns(
'',
url(r'^', include(_api.urls)),
)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.conf.urls import patterns, url, include
from tastypie.api import Api
from opps.containers.api import Container
from opps.articles.api import Post
from .conf import settings
_api = Api(api_name=settings.OPPS_API_NAME)
_api.register(Container())
_api.register(Post())
urlpatterns = patterns(
'',
url(r'^', include(_api.urls)),
)
| mit | Python |
e82d477194393ff3142f6c25c5db4c7b7f2a98a5 | Call ConsoleViewer init | simpleai-team/simpleai,iamaziz/simpleai,printedheart/simpleai,emoron/simpleai,emoron/simpleai,printedheart/simpleai,matuu/simpleai,iamaziz/simpleai,printedheart/simpleai,simpleai-team/simpleai,simpleai-team/simpleai,emoron/simpleai,matuu/simpleai,iamaziz/simpleai,matuu/simpleai | simpleai/search/viewers.py | simpleai/search/viewers.py | # coding: utf-8
from os import path
from threading import Thread
from time import sleep
class DummyViewer(object):
def start(self):
pass
def new_iteration(self, fringe):
pass
def chosen_node(self, node, is_goal):
pass
def expanded(self, node, successors):
pass
class ConsoleViewer(DummyViewer):
def __init__(self, interactive=True):
self.interactive = interactive
def pause(self):
if self.interactive:
raw_input('> press Enter ')
def output(self, *args):
print ' '.join(map(str, args))
def new_iteration(self, fringe):
self.output(' **** New iteration ****')
self.output(len(fringe), 'elements in fringe:', fringe)
self.pause()
def chosen_node(self, node, is_goal):
self.output('Chosen node:', node)
if is_goal:
self.output('Is goal!')
else:
self.output('Not goal')
self.pause()
def expanded(self, node, successors):
self.output('Expand:', node)
self.output(len(successors), 'successors:', successors)
self.pause()
class WebViewer(ConsoleViewer):
def __init__(self, host='127.0.0.1', port=8000):
super(WebViewer, self).__init__(interactive=True)
self.host = host
self.port = port
self.paused = True
self.events = []
web_template_path = path.join(path.dirname(__file__), 'web_viewer.html')
self.web_template = open(web_template_path).read()
def start(self):
from bottle import route, run
route('/')(self.web_status)
route('/next')(self.web_next)
t = Thread(target=run, kwargs=dict(host=self.host, port=self.port))
t.daemon = True
t.start()
self.pause()
def web_status(self):
from bottle import template
return template(self.web_template, events=self.events)
def web_next(self):
from bottle import redirect
self.paused = False
while not self.paused:
sleep(0.1)
redirect('/')
def pause(self):
self.paused = True
while self.paused:
sleep(0.1)
def output(self, *args):
self.events.append(' '.join(map(str, args)))
| # coding: utf-8
from os import path
from threading import Thread
from time import sleep
class DummyViewer(object):
def start(self):
pass
def new_iteration(self, fringe):
pass
def chosen_node(self, node, is_goal):
pass
def expanded(self, node, successors):
pass
class ConsoleViewer(DummyViewer):
def __init__(self, interactive=True):
self.interactive = interactive
def pause(self):
if self.interactive:
raw_input('> press Enter ')
def output(self, *args):
print ' '.join(map(str, args))
def new_iteration(self, fringe):
self.output(' **** New iteration ****')
self.output(len(fringe), 'elements in fringe:', fringe)
self.pause()
def chosen_node(self, node, is_goal):
self.output('Chosen node:', node)
if is_goal:
self.output('Is goal!')
else:
self.output('Not goal')
self.pause()
def expanded(self, node, successors):
self.output('Expand:', node)
self.output(len(successors), 'successors:', successors)
self.pause()
class WebViewer(ConsoleViewer):
def __init__(self, host='127.0.0.1', port=8000):
self.host = host
self.port = port
self.paused = True
self.events = []
web_template_path = path.join(path.dirname(__file__), 'web_viewer.html')
self.web_template = open(web_template_path).read()
def start(self):
from bottle import route, run
route('/')(self.web_status)
route('/next')(self.web_next)
t = Thread(target=run, kwargs=dict(host=self.host, port=self.port))
t.daemon = True
t.start()
self.pause()
def web_status(self):
from bottle import template
return template(self.web_template, events=self.events)
def web_next(self):
from bottle import redirect
self.paused = False
while not self.paused:
sleep(0.1)
redirect('/')
def pause(self):
self.paused = True
while self.paused:
sleep(0.1)
def output(self, *args):
self.events.append(' '.join(map(str, args)))
| mit | Python |
cfa8b88e3d86e560415260eb596dd3bbdab52736 | Fix test of auto_backup_download | VitalPet/addons-onestein,VitalPet/addons-onestein,VitalPet/addons-onestein | auto_backup_download/tests/test_auto_backup_download.py | auto_backup_download/tests/test_auto_backup_download.py | # -*- coding: utf-8 -*-
# Copyright 2017 Onestein (<http://www.onestein.eu>)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo.tests import common
from odoo.exceptions import Warning
class TestAutoBackupDownload(common.TransactionCase):
def test_01_create_not_existing(self):
backup_dir = self.env.ref(
'auto_backup_download.default_backup_directory')
# test method get_dir()
with self.assertRaises(Warning):
backup_dir.get_dir()
# test reload list of directory
with self.assertRaises(Warning):
backup_dir.reload()
def test_02_create_existing(self):
backup_dir = self.env.ref(
'auto_backup_download.default_backup_directory')
self.env['db.backup'].create({
'name': 'Test Backup 1',
'folder': '/tmp'
})
# test method get_dir()
full_dir = backup_dir.get_dir()
self.assertEqual(full_dir[-1], '/')
# test computed field file_ids
self.assertGreaterEqual(len(backup_dir.file_ids), 0)
# test count list of directory
self.assertEqual(len(full_dir.file_ids), full_dir.file_count)
# test reload list of directory
full_dir.reload()
self.assertEqual(len(full_dir.file_ids), full_dir.file_count)
| # -*- coding: utf-8 -*-
# Copyright 2017 Onestein (<http://www.onestein.eu>)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo.tests import common
from odoo.exceptions import Warning
class TestAutoBackupDownload(common.TransactionCase):
def test_01_create_not_existing(self):
backup_dir = self.env.ref(
'auto_backup_download.default_backup_directory')
# test method get_dir()
with self.assertRaises(Warning):
backup_dir.get_dir()
# test computed field file_ids
self.assertEqual(len(backup_dir.file_ids), 0)
# test count list of directory
self.assertEqual(len(backup_dir.file_ids), backup_dir.file_count)
# test reload list of directory
with self.assertRaises(Warning):
backup_dir.reload()
self.assertEqual(len(backup_dir.file_ids), 0)
self.assertEqual(len(backup_dir.file_ids), backup_dir.file_count)
def test_02_create_existing(self):
backup_dir = self.env.ref(
'auto_backup_download.default_backup_directory')
self.env['db.backup'].create({
'name': 'Test Backup 1',
'folder': '/tmp'
})
# test method get_dir()
full_dir = backup_dir.get_dir()
self.assertEqual(full_dir[-1], '/')
# test computed field file_ids
self.assertGreaterEqual(len(backup_dir.file_ids), 0)
# test count list of directory
self.assertEqual(len(full_dir.file_ids), full_dir.file_count)
# test reload list of directory
full_dir.reload()
self.assertEqual(len(full_dir.file_ids), full_dir.file_count)
| agpl-3.0 | Python |
c3996af1f7b201355d1cbcd6ef4c8fe420c8b67e | Fix lint | deniscostadsc/playground,deniscostadsc/playground,deniscostadsc/playground,deniscostadsc/playground,deniscostadsc/playground,deniscostadsc/playground,deniscostadsc/playground,deniscostadsc/playground,deniscostadsc/playground,deniscostadsc/playground,deniscostadsc/playground,deniscostadsc/playground,deniscostadsc/playground,deniscostadsc/playground | solutions/uri/1028/1028.py | solutions/uri/1028/1028.py | def gcd(a, b):
while b > 0:
a, b = b, a % b
return a
n = int(input())
for line in range(n):
a, b = map(int, input().split())
print(gcd(a, b))
| import sys
def gcd(a, b):
while b > 0:
a, b = b, a % b
return a
n = int(input())
for line in range(n):
a, b = map(int, input().split())
print(gcd(a, b))
| mit | Python |
5fd1f7cbe9534a47c4dc837773f22f6f177fdcf5 | Update affineHacker: fixed imports and typo | JoseALermaIII/python-tutorials,JoseALermaIII/python-tutorials | books/CrackingCodesWithPython/Chapter15/affineHacker.py | books/CrackingCodesWithPython/Chapter15/affineHacker.py | # Affine Cipher Hacker
# https://www.nostarch.com/crackingcodes/ (BSD Licensed)
from books.CrackingCodesWithPython.pyperclip import copy
from books.CrackingCodesWithPython.Chapter14.affineCipher import decryptMessage, SYMBOLS, getKeyParts
from books.CrackingCodesWithPython.Chapter13.cryptomath import gcd
from books.CrackingCodesWithPython.Chapter11.detectEnglish import isEnglish
SILENT_MODE = False
def main():
# You might want to copy & paste this text from the source code at
# https://www.nostarch.com/crackingcodes/.
myMessage = """5QG9ol3La6QI93!xQxaia6faQL9QdaQG1!!axQARLa!!A
uaRLQADQALQG93!xQxaGaAfaQ1QX3o1RQARL9Qda!AafARuQLX1LQALQI1
iQX3o1RN"Q-5!1RQP36ARu"""
hackedMessage = hackAffine(myMessage)
if hackedMessage != None:
# The plaintext is displayed on the screen. For the convenience of
# the user, we copy the text of the code to the clipboard:
print('Copying hacked message to clipboard:')
print(hackedMessage)
copy(hackedMessage)
else:
print('Failed to hack encryption.')
def hackAffine(message):
print('Hacking...')
# Python programs can be stopped at any time by pressing Ctrl-C (on
# Windows) or Ctrl-D (on macOS and Linux):
print('(Press Ctrl-C or Ctrl-D to quit at any time.)')
# Brute-force by looping through every possible key:
for key in range(len(SYMBOLS) ** 2):
keyA = getKeyParts(key)[0]
if gcd(keyA, len(SYMBOLS)) != 1:
continue
decryptedText = decryptMessage(key, message)
if not SILENT_MODE:
print('Tried Key %s... (%s)' % (key, decryptedText[:40]))
if isEnglish(decryptedText):
# Check with the user if the decrypted key has been found:
print()
print('Possible encryption hack:')
print('Key: %s' % (key))
print('Decrypted message: ' + decryptedText[:200])
print()
print('Enter D for done, or just press Enter to continue hacking:')
response = input('> ')
if response.strip().upper().startswith('D'):
return decryptedText
return None
# If affineHacker.py is run (instead of imported as a module), call
# the main() function:
if __name__ == '__main__':
main() | # Affine Cipher Hacker
# https://www.nostarch.com/crackingcodes/ (BSD Licensed)
import pyperclip, affineCipher, detectEnglish, cryptomath
SILENT_MODE = False
def main():
# You might want to copy & paste this text from the source code at
# https://www.nostarch.com/crackingcodes/.
myMessage = """5QG9ol3La6QI93!xQxaia6faQL9QdaQG1!!axQARLa!!A
uaRLQADQALQG93!xQxaGaAfaQ1QX3o1RQARL9Qda!AafARuQLX1LQALQI1
iQX3o1RN"Q-5!1RQP36ARu"""
hackedMessage = hackAffine(myMessage)
if hackedMessage != None:
# The plaintext is displayed on the screen. For the convenience of
# the user, we copy the text of the code to the clipboard:
print('Copying hacked message to clipboard:')
print(hackedMessage)
pyperclip.copy(hackedMessage)
else:
print('Failed to hack encryption.')
def hackAffine(message):
print('Hacking...')
# Python programs can be stopped at any time by pressing Ctrl-C (on
# Windows) or Ctrl-D (on macOS and Linux):
print('(Press Ctrl-C or Ctrl-D to quit at any time.)')
# Brute-force by looping through every possible key:
for key in range(len(affineCipher.SYMBOLS) ** 2):
keyA = affineCipher.getKeyParts(key)[0]
if cryptomath.gcd(keyA, len(affineCipher.SYMBOLS)) ! = 1:
continue
decryptedText = affineCipher.decryptMessage(key, message)
if not SILENT_MODE:
print('Tried Key %s... (%s)' % (key, decryptedText[:40]))
if detectEnglish.isEnglish(decryptedText):
# Check with the user if the decrypted key has been found:
print()
print('Possible encryption hack:')
print('Key: %s' % (key))
print('Decrypted message: ' + decryptedText[:200])
print()
print('Enter D for done, or just press Enter to continue hacking:')
response = input('> ')
if response.strip().upper().startswith('D'):
return decryptedText
return None
# If affineHacker.py is run (instead of imported as a module), call
# the main() function:
if __name__ == '__main__':
main() | mit | Python |
d607de07ae3aaa2a245b8eb90cb42ca3e29f6e33 | add lambda sample | r569594043/PythonBeginner | 05.Function.py | 05.Function.py | #-*- encoding: utf-8 -*-
# Error
#def func():
def func():
pass
def func(num, num1=1, num2=2):
print(num, num1, num2)
func(1, 3, 4) # 1 3 4
func(5) # 5 1 2
# Error
#func()
def func(**args):
for k, v in args.items():
print('key: ' + k, 'value: ' + v)
for k in args.keys():
print('key: ' + k, 'value: ' + args[k])
func(name = "rxb", age = "24")
def func(name, age):
print('name: ' + name, 'age: ' + age)
people = {"name": "rxb", "age": "24"}
func(**people) # name: rxb age: 24
def func(num, *args):
print(num)
for a in args:
print(a)
func(1, 2, 3, 4, 5, 6)
def func(num, num1):
print(num, num1)
func(num1 = 2, num = 1) # 1 2
d = {
"num": 3,
"num1": 4
}
func(**d) # 3 4
t = (4, 5)
func(*t) # 4 5
def func():
'''
The documentation of the func
'''
print("func")
print(func.__doc__)
l = lambda num1, num2: num1 + num2
print(l(2, 3)) # 5
def func2(func, num1, num2):
return func(num1, num2)
def func(num1, num2):
return num1 + num2
print(func2(func, 3, 4)) # 7
print(func2(lambda a, b: a - b, 7, 4)) # 3 | #-*- encoding: utf-8 -*-
# Error
#def func():
def func():
pass
def func(num, num1=1, num2=2):
print(num, num1, num2)
func(1, 3, 4) # 1 3 4
func(5) # 5 1 2
# Error
#func()
def func(**args):
for k, v in args.items():
print('key: ' + k, 'value: ' + v)
for k in args.keys():
print('key: ' + k, 'value: ' + args[k])
func(name = "rxb", age = "24")
def func(name, age):
print('name: ' + name, 'age: ' + age)
people = {"name": "rxb", "age": "24"}
func(**people) # name: rxb age: 24
def func(num, *args):
print(num)
for a in args:
print(a)
func(1, 2, 3, 4, 5, 6)
def func(num, num1):
print(num, num1)
func(num1 = 2, num = 1) # 1 2
d = {
"num": 3,
"num1": 4
}
func(**d) # 3 4
t = (4, 5)
func(*t) # 4 5
def func():
'''
The documentation of the func
'''
print("func")
print(func.__doc__)
l = lambda num1, num2: num1 + num2
print(l(2, 3)) # 5
| apache-2.0 | Python |
b439017a21ac01ee7fda275753effaf5d103a120 | Change IP. | Scifabric/pybossa,geotagx/pybossa,geotagx/pybossa,Scifabric/pybossa,PyBossa/pybossa,PyBossa/pybossa | pybossa/signer/__init__.py | pybossa/signer/__init__.py | # -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2015 SciFabric LTD.
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
from itsdangerous import URLSafeTimedSerializer
from werkzeug import generate_password_hash, check_password_hash
class Signer(object):
def __init__(self, app=None):
self.app = app
if app is not None: # pragma: no cover
self.init_app(app)
def init_app(self, app):
key = app.config['ITSDANGEROUSKEY']
self.signer = URLSafeTimedSerializer(key)
def loads(self, string, **kwargs):
return self.signer.loads(string, **kwargs)
def dumps(self, key, **kwargs):
return self.signer.dumps(key, **kwargs)
def generate_password_hash(self, password):
return generate_password_hash(password)
def check_password_hash(self, passwd_hash, password):
return check_password_hash(passwd_hash, password)
| # -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2013 SF Isle of Man Limited
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
from itsdangerous import URLSafeTimedSerializer
from werkzeug import generate_password_hash, check_password_hash
class Signer(object):
def __init__(self, app=None):
self.app = app
if app is not None: # pragma: no cover
self.init_app(app)
def init_app(self, app):
key = app.config['ITSDANGEROUSKEY']
self.signer = URLSafeTimedSerializer(key)
def loads(self, string, **kwargs):
return self.signer.loads(string, **kwargs)
def dumps(self, key, **kwargs):
return self.signer.dumps(key, **kwargs)
def generate_password_hash(self, password):
return generate_password_hash(password)
def check_password_hash(self, passwd_hash, password):
return check_password_hash(passwd_hash, password)
| agpl-3.0 | Python |
54d39aaf8c31a5827ae7338fefe7a1d6a19d52cf | Add missing docstring. | 40323230/Pyslvs-PyQt5,KmolYuan/Pyslvs-PyQt5,KmolYuan/Pyslvs-PyQt5 | pyslvs_ui/info/__init__.py | pyslvs_ui/info/__init__.py | # -*- coding: utf-8 -*-
"""'info' module contains Pyslvs program information."""
__all__ = [
'KERNELS',
'SYS_INFO',
'ARGUMENTS',
'HAS_SLVS',
'Kernel',
'check_update',
'PyslvsAbout',
'html',
'logger',
'XStream',
'size_format',
]
__author__ = "Yuan Chang"
__copyright__ = "Copyright (C) 2016-2020"
__license__ = "AGPL"
__email__ = "[email protected]"
from .info import KERNELS, SYS_INFO, ARGUMENTS, HAS_SLVS, Kernel
from .about import PyslvsAbout, html, check_update
from .logging_handler import logger, XStream
def size_format(num: float) -> str:
"""Calculate file size."""
if num <= 0:
return "0 B"
for u in ('', 'K', 'M', 'G', 'T', 'P', 'E', 'Z'):
if abs(num) < 1024:
return f"{num:3.1f} {u}B"
num /= 1024
return f"{num:.1f} YB"
| # -*- coding: utf-8 -*-
"""'info' module contains Pyslvs program information."""
__all__ = [
'KERNELS',
'SYS_INFO',
'ARGUMENTS',
'HAS_SLVS',
'Kernel',
'check_update',
'PyslvsAbout',
'html',
'logger',
'XStream',
'size_format',
]
__author__ = "Yuan Chang"
__copyright__ = "Copyright (C) 2016-2020"
__license__ = "AGPL"
__email__ = "[email protected]"
from .info import KERNELS, SYS_INFO, ARGUMENTS, HAS_SLVS, Kernel
from .about import PyslvsAbout, html, check_update
from .logging_handler import logger, XStream
def size_format(num: float) -> str:
if num <= 0:
return "0 B"
for u in ('', 'K', 'M', 'G', 'T', 'P', 'E', 'Z'):
if abs(num) < 1024:
return f"{num:3.1f} {u}B"
num /= 1024
return f"{num:.1f} YB"
| agpl-3.0 | Python |
7b19611d30dfc9091823ae3d960ab2790dfe9cfc | Apply a blur filter automatically for each detected face | symisc/pixlab,symisc/pixlab,symisc/pixlab | python/blur_human_faces.py | python/blur_human_faces.py | import requests
import json
imgUrl = 'https://pixlab.io/images/m3.jpg' # Target picture we want to blur any face on
# Detect all human faces in a given image via /facedetect first and blur all of them later via /mogrify.
# https://pixlab.io/cmd?id=facedetect and https://pixlab.io/cmd?id=mogrify for additional information.
req = requests.get('https://api.pixlab.io/facedetect',params={
'img': imgUrl,
'key':'PIXLAB_API_KEY',
})
reply = req.json()
if reply['status'] != 200:
print (reply['error'])
exit();
total = len(reply['faces']) # Total detected faces
print(str(total)+" faces were detected")
if total < 1:
# No faces were detected, exit immediately
exit()
# Pass the facial coordinates for each detected face untouched to mogrify
coordinates = reply['faces']
# Call mogrify & blur the face(s)
req = requests.post('https://api.pixlab.io/mogrify',headers={'Content-Type':'application/json'},data=json.dumps({
'img': imgUrl,
'key':'PIXLAB_API_KEY',
'cord': coordinates # The field of interest
}))
reply = req.json()
if reply['status'] != 200:
print (reply['error'])
else:
print ("Blurred Picture URL: "+ reply['ssl_link'])
| import requests
import json
imgUrl = 'https://pixlab.io/images/m3.jpg' # Target picture we want to blur any face on
# Detect all human faces in a given image via /facedetect first and blur all of them later via /mogrify.
# https://pixlab.io/cmd?id=facedetect and https://pixlab.io/cmd?id=mogrify for additional information.
req = requests.get('https://api.pixlab.io/facedetect',params={
'img': imgUrl,
'key':'PIXLAB_API_KEY',
})
reply = req.json()
if reply['status'] != 200:
print (reply['error'])
exit();
total = len(reply['faces']) # Total detected faces
print(str(total)+" faces were detected")
if total < 1:
# No faces were detected, exit immediately
exit()
# Pass the detected faces coordinates untouched to mogrify
coordinates = reply['faces']
# Call mogrify & blur the face(s)
req = requests.post('https://api.pixlab.io/mogrify',headers={'Content-Type':'application/json'},data=json.dumps({
'img': imgUrl,
'key':'PIXLAB_API_KEY',
'cord': coordinates # The field of interest
}))
reply = req.json()
if reply['status'] != 200:
print (reply['error'])
else:
print ("Blurred Picture URL: "+ reply['ssl_link'])
| bsd-2-clause | Python |
8a950dbfb1281216ed270bf6363c7a71d857133f | Make datetime and time +00:00 handling behavior consistent. Fix #3. | pydanny/webhooks | webhooks/encoders.py | webhooks/encoders.py | """
Serialize data to/from JSON
Inspired by https://github.com/django/django/blob/master/django/core/serializers/json.py
"""
# Avoid shadowing the standard library json module
from __future__ import absolute_import
from __future__ import unicode_literals
import datetime
import decimal
import json
class WebHooksJSONEncoder(json.JSONEncoder):
"""
A JSONEncoder that can encode date/time and decimal types.
"""
def default(self, o):
# See "Date Time String Format" in the ECMA-262 specification.
if isinstance(o, datetime.datetime):
r = o.isoformat()
if o.microsecond:
r = r[:23] + r[26:]
if r.endswith('+00:00'):
r = r[:-6] + 'Z'
return r
elif isinstance(o, datetime.date):
return o.isoformat()
elif isinstance(o, datetime.time):
r = o.isoformat()
if o.microsecond:
r = r[:12]
if r.endswith('+00:00'):
r = r[:-6] + 'Z'
return r
elif isinstance(o, decimal.Decimal):
return str(o)
else:
return super(WebHooksJSONEncoder, self).default(o)
| """
Serialize data to/from JSON
Inspired by https://github.com/django/django/blob/master/django/core/serializers/json.py
"""
# Avoid shadowing the standard library json module
from __future__ import absolute_import
from __future__ import unicode_literals
import datetime
import decimal
import json
class WebHooksJSONEncoder(json.JSONEncoder):
"""
A JSONEncoder that can encode date/time and decimal types.
"""
def default(self, o):
# See "Date Time String Format" in the ECMA-262 specification.
if isinstance(o, datetime.datetime):
r = o.isoformat()
if o.microsecond:
r = r[:23] + r[26:]
if r.endswith('+00:00'):
r = r[:-6] + 'Z'
return r
elif isinstance(o, datetime.date):
return o.isoformat()
elif isinstance(o, datetime.time):
r = o.isoformat()
if o.microsecond:
r = r[:12]
return r
elif isinstance(o, decimal.Decimal):
return str(o)
else:
return super(WebHooksJSONEncoder, self).default(o)
| bsd-3-clause | Python |
8120b641ccb66b088fa70c028e5be542bf561dfd | Update lex_attrs.py (#5608) | spacy-io/spaCy,spacy-io/spaCy,honnibal/spaCy,honnibal/spaCy,explosion/spaCy,honnibal/spaCy,spacy-io/spaCy,spacy-io/spaCy,explosion/spaCy,explosion/spaCy,spacy-io/spaCy,explosion/spaCy,spacy-io/spaCy,explosion/spaCy,explosion/spaCy,honnibal/spaCy | spacy/lang/hy/lex_attrs.py | spacy/lang/hy/lex_attrs.py | # coding: utf8
from __future__ import unicode_literals
from ...attrs import LIKE_NUM
_num_words = [
"զրո",
"մեկ",
"երկու",
"երեք",
"չորս",
"հինգ",
"վեց",
"յոթ",
"ութ",
"ինը",
"տասը",
"տասնմեկ",
"տասներկու",
"տասներեք",
"տասնչորս",
"տասնհինգ",
"տասնվեց",
"տասնյոթ",
"տասնութ",
"տասնինը",
"քսան" "երեսուն",
"քառասուն",
"հիսուն",
"վաթսուն",
"յոթանասուն",
"ութսուն",
"իննսուն",
"հարյուր",
"հազար",
"միլիոն",
"միլիարդ",
"տրիլիոն",
"քվինտիլիոն",
]
def like_num(text):
if text.startswith(("+", "-", "±", "~")):
text = text[1:]
text = text.replace(",", "").replace(".", "")
if text.isdigit():
return True
if text.count("/") == 1:
num, denom = text.split("/")
if num.isdigit() and denom.isdigit():
return True
if text.lower() in _num_words:
return True
return False
LEX_ATTRS = {LIKE_NUM: like_num}
| # coding: utf8
from __future__ import unicode_literals
from ...attrs import LIKE_NUM
_num_words = [
"զրօ",
"մէկ",
"երկու",
"երեք",
"չորս",
"հինգ",
"վեց",
"յոթ",
"ութ",
"ինը",
"տասը",
"տասնմեկ",
"տասներկու",
"տասներեք",
"տասնչորս",
"տասնհինգ",
"տասնվեց",
"տասնյոթ",
"տասնութ",
"տասնինը",
"քսան" "երեսուն",
"քառասուն",
"հիսուն",
"վաթցսուն",
"յոթանասուն",
"ութսուն",
"ինիսուն",
"հարյուր",
"հազար",
"միլիոն",
"միլիարդ",
"տրիլիոն",
"քվինտիլիոն",
]
def like_num(text):
if text.startswith(("+", "-", "±", "~")):
text = text[1:]
text = text.replace(",", "").replace(".", "")
if text.isdigit():
return True
if text.count("/") == 1:
num, denom = text.split("/")
if num.isdigit() and denom.isdigit():
return True
if text.lower() in _num_words:
return True
return False
LEX_ATTRS = {LIKE_NUM: like_num}
| mit | Python |
cd8024c762bf5bae8caf210b9224548bee55ee04 | Bump version to 6.1.5a3 | platformio/platformio-core,platformio/platformio-core | platformio/__init__.py | platformio/__init__.py | # Copyright (c) 2014-present PlatformIO <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
VERSION = (6, 1, "5a3")
__version__ = ".".join([str(s) for s in VERSION])
__title__ = "platformio"
__description__ = (
"A professional collaborative platform for embedded development. "
"Cross-platform IDE and Unified Debugger. "
"Static Code Analyzer and Remote Unit Testing. "
"Multi-platform and Multi-architecture Build System. "
"Firmware File Explorer and Memory Inspection. "
"IoT, Arduino, CMSIS, ESP-IDF, FreeRTOS, libOpenCM3, mbedOS, Pulp OS, SPL, "
"STM32Cube, Zephyr RTOS, ARM, AVR, Espressif (ESP8266/ESP32), FPGA, "
"MCS-51 (8051), MSP430, Nordic (nRF51/nRF52), NXP i.MX RT, PIC32, RISC-V, "
"STMicroelectronics (STM8/STM32), Teensy"
)
__url__ = "https://platformio.org"
__author__ = "PlatformIO Labs"
__email__ = "[email protected]"
__license__ = "Apache Software License"
__copyright__ = "Copyright 2014-present PlatformIO Labs"
__accounts_api__ = "https://api.accounts.platformio.org"
__registry_mirror_hosts__ = [
"registry.platformio.org",
"registry.nm1.platformio.org",
]
__pioremote_endpoint__ = "ssl:host=remote.platformio.org:port=4413"
__default_requests_timeout__ = (10, None) # (connect, read)
__core_packages__ = {
"contrib-piohome": "~3.4.2",
"contrib-pysite": "~2.%d%d.0" % (sys.version_info.major, sys.version_info.minor),
"tool-scons": "~4.40400.0",
"tool-cppcheck": "~1.270.0",
"tool-clangtidy": "~1.120001.0",
"tool-pvs-studio": "~7.18.0",
}
__check_internet_hosts__ = [
"185.199.110.153", # Github.com
"88.198.170.159", # platformio.org
"github.com",
] + __registry_mirror_hosts__
| # Copyright (c) 2014-present PlatformIO <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
VERSION = (6, 1, "5a2")
__version__ = ".".join([str(s) for s in VERSION])
__title__ = "platformio"
__description__ = (
"A professional collaborative platform for embedded development. "
"Cross-platform IDE and Unified Debugger. "
"Static Code Analyzer and Remote Unit Testing. "
"Multi-platform and Multi-architecture Build System. "
"Firmware File Explorer and Memory Inspection. "
"IoT, Arduino, CMSIS, ESP-IDF, FreeRTOS, libOpenCM3, mbedOS, Pulp OS, SPL, "
"STM32Cube, Zephyr RTOS, ARM, AVR, Espressif (ESP8266/ESP32), FPGA, "
"MCS-51 (8051), MSP430, Nordic (nRF51/nRF52), NXP i.MX RT, PIC32, RISC-V, "
"STMicroelectronics (STM8/STM32), Teensy"
)
__url__ = "https://platformio.org"
__author__ = "PlatformIO Labs"
__email__ = "[email protected]"
__license__ = "Apache Software License"
__copyright__ = "Copyright 2014-present PlatformIO Labs"
__accounts_api__ = "https://api.accounts.platformio.org"
__registry_mirror_hosts__ = [
"registry.platformio.org",
"registry.nm1.platformio.org",
]
__pioremote_endpoint__ = "ssl:host=remote.platformio.org:port=4413"
__default_requests_timeout__ = (10, None) # (connect, read)
__core_packages__ = {
"contrib-piohome": "~3.4.2",
"contrib-pysite": "~2.%d%d.0" % (sys.version_info.major, sys.version_info.minor),
"tool-scons": "~4.40400.0",
"tool-cppcheck": "~1.270.0",
"tool-clangtidy": "~1.120001.0",
"tool-pvs-studio": "~7.18.0",
}
__check_internet_hosts__ = [
"185.199.110.153", # Github.com
"88.198.170.159", # platformio.org
"github.com",
] + __registry_mirror_hosts__
| apache-2.0 | Python |
12c22ebdf3c7e84f5f9c6b32329f343c8317f11b | Correct comments | owainkenwayucl/stats-plus-plus,owainkenwayucl/stats-plus-plus,owainkenwayucl/stats-plus-plus,owainkenwayucl/stats-plus-plus | python/dbtools/__init__.py | python/dbtools/__init__.py | '''
This library provides database access routines.
It's based on the re-usable parts of tailoredstats.
Owain Kenway
'''
'''
Generally abstract away DB queries, such that all complexity is replaced with:
dbtools.dbquery(db, query)
'''
def dbquery(db, query, mysqlhost="mysql.external.legion.ucl.ac.uk", mysqlport = 3306 ):
from auth.secrets import Secrets
import MySQLdb # Note need mysqlclient package from pypi
# Set up our authentication.
s = Secrets()
# Connect to database.
conn = MySQLdb.Connect(host=mysqlhost,
port=mysqlport,
user=s.dbuser,
passwd=s.dbpasswd,
db=db)
# Set up cursor.
cursor = conn.cursor(MySQLdb.cursors.DictCursor)
print(">>> DEBUG SQL query: " + query)
# Run query.
cursor.execute(query)
# Dump output.
output = cursor.fetchall()
# Tidy up.
cursor.close()
conn.close()
return output
# Generate a valid SQL list from a python one.
def sqllist(pylist):
sqlstr="("
if type(pylist) == str:
sqlstr = sqlstr + "'" + pylist + "')"
else:
for a in pylist:
if sqlstr!= "(":
sqlstr = sqlstr + ", "
sqlstr = sqlstr + "'" + a + "'"
sqlstr = sqlstr + ")"
return sqlstr
# Build owner limit string for queries.
def onlimits(users="*"):
query = ""
# if users != * then construct a node list.
if users != "*":
userlist = sqllist(users)
query = query + " and owner in " + userlist
return query
| '''
This library provides database access routines.
It's based on the re-usable parts of tailoredstats.
Owain Kenway
'''
'''
Generally abstract away DB queries, such that all complexity is replaced with:
dbtools.dbquery(db, query)
'''
def dbquery(db, query, mysqlhost="mysql.external.legion.ucl.ac.uk", mysqlport = 3306 ):
from auth.secrets import Secrets
import MySQLdb # Note need mysqlclient package from pypi
# Set up our authentication.
s = Secrets()
# Connect to database.
conn = MySQLdb.Connect(host=mysqlhost,
port=mysqlport,
user=s.dbuser,
passwd=s.dbpasswd,
db=db)
# Set up cursor.
cursor = conn.cursor(MySQLdb.cursors.DictCursor)
print(">>> DEBUG SQL query: " + query)
# Run query.
cursor.execute(query)
# Dump output.
output = cursor.fetchall()
# Tidy up.
cursor.close()
conn.close()
return output
# Generate a valid SQL list from a python one.
def sqllist(pylist):
sqlstr="("
if type(pylist) == str:
sqlstr = sqlstr + "'" + pylist + "')"
else:
for a in pylist:
if sqlstr!= "(":
sqlstr = sqlstr + ", "
sqlstr = sqlstr + "'" + a + "'"
sqlstr = sqlstr + ")"
return sqlstr
# Build owner/node limit string for queries.
def onlimits(users="*"):
query = ""
# if users != * then construct a node list.
if users != "*":
userlist = sqllist(users)
query = query + " and owner in " + userlist
return query
| mit | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.