commit
stringlengths 40
40
| subject
stringlengths 4
1.73k
| repos
stringlengths 5
127k
| old_file
stringlengths 2
751
| new_file
stringlengths 2
751
| new_contents
stringlengths 1
8.98k
| old_contents
stringlengths 0
6.59k
| license
stringclasses 13
values | lang
stringclasses 23
values |
---|---|---|---|---|---|---|---|---|
d134e9c461af2c9b67673aa97fc15a302dcbc58c
|
Add comments
|
PierreRust/beets,sadatay/beets,madmouser1/beets,krig/beets,mosesfistos1/beetbox,diego-plan9/beets,LordSputnik/beets,sampsyo/beets,ibmibmibm/beets,YetAnotherNerd/beets,mathstuf/beets,shamangeorge/beets,moodboom/beets,pdf/beets,dfc/beets,kareemallen/beets,LordSputnik/beets,shamangeorge/beets,tima/beets,pkess/beets,mathstuf/beets,bj-yinyan/beets,sampsyo/beets,lengtche/beets,LordSputnik/beets,sampsyo/beets,imsparsh/beets,Wen777/beets,moodboom/beets,drm00/beets,Andypsamp/CODjunit,parapente/beets,Dishwishy/beets,asteven/beets,Kraymer/beets,beetbox/beets,Andypsamp/CODfinalJUNIT,randybias/beets,Andypsamp/CODfinalJUNIT,randybias/beets,jcoady9/beets,swt30/beets,mathstuf/beets,SusannaMaria/beets,SusannaMaria/beets,artemutin/beets,bj-yinyan/beets,mried/beets,madmouser1/beets,m-urban/beets,jayme-github/beets,Freso/beets,sadatay/beets,kareemallen/beets,madmouser1/beets,ibmibmibm/beets,lengtche/beets,beetbox/beets,MyTunesFreeMusic/privacy-policy,bj-yinyan/beets,arabenjamin/beets,parapente/beets,ibmibmibm/beets,jayme-github/beets,kelvinhammond/beets,multikatt/beets,gabrielaraujof/beets,diego-plan9/beets,PierreRust/beets,pkess/beets,lightwang1/beets,drm00/beets,parapente/beets,mathstuf/beets,tima/beets,Dishwishy/beets,krig/beets,kelvinhammond/beets,ttsda/beets,marcuskrahl/beets,mosesfistos1/beetbox,mried/beets,Andypsamp/CODfinalJUNIT,artemutin/beets,pdf/beets,swt30/beets,jmwatte/beets,Wen777/beets,ttsda/beets,pkess/beets,gabrielaraujof/beets,untitaker/beets,shanemikel/beets,gabrielaraujof/beets,untitaker/beets,jackwilsdon/beets,jbaiter/beets,artemutin/beets,sadatay/beets,ruippeixotog/beets,randybias/beets,marcuskrahl/beets,arabenjamin/beets,arabenjamin/beets,m-urban/beets,asteven/beets,shamangeorge/beets,MyTunesFreeMusic/privacy-policy,Dishwishy/beets,jackwilsdon/beets,jcoady9/beets,jackwilsdon/beets,Kraymer/beets,asteven/beets,andremiller/beets,MyTunesFreeMusic/privacy-policy,jmwatte/beets,andremiller/beets,dfc/beets,SusannaMaria/beets,ruippeixotog/beets,jcoady9/beets,PierreRust/beets,tima/beets,ibmibmibm/beets,marcuskrahl/beets,Andypsamp/CODjunit,lightwang1/beets,pkess/beets,YetAnotherNerd/beets,xsteadfastx/beets,kelvinhammond/beets,kareemallen/beets,Freso/beets,sampsyo/beets,ttsda/beets,moodboom/beets,diego-plan9/beets,xsteadfastx/beets,Freso/beets,multikatt/beets,randybias/beets,swt30/beets,shamangeorge/beets,MyTunesFreeMusic/privacy-policy,m-urban/beets,mried/beets,ruippeixotog/beets,lightwang1/beets,gabrielaraujof/beets,shanemikel/beets,arabenjamin/beets,drm00/beets,Andypsamp/CODfinalJUNIT,drm00/beets,YetAnotherNerd/beets,swt30/beets,bj-yinyan/beets,jackwilsdon/beets,xsteadfastx/beets,Dishwishy/beets,ruippeixotog/beets,Wen777/beets,Kraymer/beets,imsparsh/beets,jmwatte/beets,diego-plan9/beets,moodboom/beets,lengtche/beets,marcuskrahl/beets,PierreRust/beets,andremiller/beets,imsparsh/beets,dfc/beets,multikatt/beets,multikatt/beets,tima/beets,Freso/beets,beetbox/beets,jbaiter/beets,SusannaMaria/beets,ttsda/beets,m-urban/beets,beetbox/beets,Andypsamp/CODjunit,artemutin/beets,sadatay/beets,Andypsamp/CODjunit,LordSputnik/beets,Andypsamp/CODjunit,madmouser1/beets,Kraymer/beets,kelvinhammond/beets,mosesfistos1/beetbox,asteven/beets,shanemikel/beets,krig/beets,untitaker/beets,dfc/beets,untitaker/beets,shanemikel/beets,jmwatte/beets,mosesfistos1/beetbox,Andypsamp/CODfinalJUNIT,lightwang1/beets,imsparsh/beets,kareemallen/beets,lengtche/beets,xsteadfastx/beets,parapente/beets,YetAnotherNerd/beets,mried/beets,jcoady9/beets
|
beetsplug/smartplaylist.py
|
beetsplug/smartplaylist.py
|
# This file is part of beets.
# Copyright 2013, Dang Mai <[email protected]>.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
"""Generates smart playlists based on beets queries.
"""
from __future__ import print_function
from beets.plugins import BeetsPlugin
from beets import config, ui
from beets.util import normpath, syspath
import os
# Global variables so that smartplaylist can detect database changes and run
# only once before beets exits.
database_changed = False
library = None
def update_playlists(lib):
print("Updating smart playlists...")
playlists = config['smartplaylist']['playlists'].get(list)
playlist_dir = config['smartplaylist']['playlist_dir'].get(unicode)
relative_to = config['smartplaylist']['relative_to'].get()
if relative_to:
relative_to = normpath(relative_to)
for playlist in playlists:
items = lib.items(playlist['query'])
if relative_to:
paths = [os.path.relpath(item.path, relative_to) for item in items]
else:
paths = [item.path for item in items]
basename = playlist['name'].encode('utf8')
m3u_path = os.path.join(playlist_dir, basename)
with open(syspath(m3u_path), 'w') as f:
for path in paths:
f.write(path + '\n')
print("... Done")
class SmartPlaylistPlugin(BeetsPlugin):
def __init__(self):
super(SmartPlaylistPlugin, self).__init__()
self.config.add({
'relative_to': None,
'playlists': []
})
def commands(self):
def update(lib, opts, args):
update_playlists(lib)
spl_update = ui.Subcommand('spl_update',
help='update the smart playlists')
spl_update.func = update
return [spl_update]
@SmartPlaylistPlugin.listen('database_change')
def handle_change(lib):
global library
global database_changed
library = lib
database_changed = True
@SmartPlaylistPlugin.listen('cli_exit')
def update():
if database_changed:
update_playlists(library)
|
from __future__ import print_function
from beets.plugins import BeetsPlugin
from beets import config, ui
from beets.util import normpath, syspath
import os
database_changed = False
library = None
def update_playlists(lib):
print("Updating smart playlists...")
playlists = config['smartplaylist']['playlists'].get(list)
playlist_dir = config['smartplaylist']['playlist_dir'].get(unicode)
relative_to = config['smartplaylist']['relative_to'].get()
if relative_to:
relative_to = normpath(relative_to)
for playlist in playlists:
items = lib.items(playlist['query'])
if relative_to:
paths = [os.path.relpath(item.path, relative_to) for item in items]
else:
paths = [item.path for item in items]
basename = playlist['name'].encode('utf8')
m3u_path = os.path.join(playlist_dir, basename)
with open(syspath(m3u_path), 'w') as f:
for path in paths:
f.write(path + '\n')
print("... Done")
class SmartPlaylistPlugin(BeetsPlugin):
def __init__(self):
super(SmartPlaylistPlugin, self).__init__()
self.config.add({
'relative_to': None,
'playlists': []
})
def commands(self):
def update(lib, opts, args):
update_playlists(lib)
spl_update = ui.Subcommand('spl_update',
help='update the smart playlists')
spl_update.func = update
return [spl_update]
@SmartPlaylistPlugin.listen('database_change')
def handle_change(lib):
global library
global database_changed
library = lib
database_changed = True
@SmartPlaylistPlugin.listen('cli_exit')
def update():
if database_changed:
update_playlists(library)
|
mit
|
Python
|
d043eef098be68690b9d6cd5790b667cdb2d825b
|
Add comments about security issue
|
stuffy-the-dragon/wKRApp,bafana5/wKRApp,stuffy-the-dragon/wKRApp,stuffy-the-dragon/wKRApp,bafana5/wKRApp,bafana5/wKRApp
|
runserver.py
|
runserver.py
|
from wKRApp import app
# 2 security flaws, need to sort out
# 1. the key should be randomy generated
# 2. the key should be set in a config file that is then imported in.
app.secret_key = "my precious"
app.run(debug=True)
|
from wKRApp import app
app.secret_key = "my precious" # 2 security flaws, need to sort out
app.run(debug=True)
|
mit
|
Python
|
db4b77ee5be099cf0ac751956d010777e1ff6640
|
Add DefinitionNotFoundError
|
onitake/Uranium,onitake/Uranium
|
UM/Settings/SettingsError.py
|
UM/Settings/SettingsError.py
|
# Copyright (c) 2015 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
class SettingsError(Exception):
pass
class InvalidFileError(SettingsError):
def __init__(self, path):
super().__init__("File {0} is an invalid settings file".format(path))
class InvalidVersionError(SettingsError):
def __init__(self, path):
super().__init__("Invalid version for file {0}".format(path))
class DefinitionNotFoundError(SettingsError):
def __init__(self, type_id):
super().__init__("Could not find machine definition {0}".format(type_id))
|
# Copyright (c) 2015 Ultimaker B.V.
# Uranium is released under the terms of the AGPLv3 or higher.
class SettingsError(Exception):
pass
class InvalidFileError(SettingsError):
def __init__(self, path):
super().__init__("File {0} is an invalid settings file".format(path))
class InvalidVersionError(SettingsError):
def __init__(self, path):
super().__init__("Invalid version for file {0}".format(path))
|
agpl-3.0
|
Python
|
f5fcebed51923422aa82ac35576350889129e377
|
Sort interstitial_sites by their voronoi radii.
|
henniggroup/MPInterfaces,joshgabriel/MPInterfaces,joshgabriel/MPInterfaces,henniggroup/MPInterfaces
|
twod_materials/ion_intercalation/startup.py
|
twod_materials/ion_intercalation/startup.py
|
from pymatgen.core.structure import Structure
from pymatgen.core.periodic_table import Element
from pymatgen.analysis.defects.point_defects import (
Interstitial, ValenceIonicRadiusEvaluator
)
import operator
def inject_ions(ion, atomic_fraction):
"""
Adds ions to a percentage of interstitial sites into the POSCAR
that results in an at% less than or equal to the specified
atomic_fraction. Starts by filling interstitial sites with the
largest voronoi radius, and then works downward.
args:
specie (str): name of ion to intercalate
atomic_fraction (int): < 1.0
"""
specie = Element(ion)
structure = Structure.from_file('POSCAR')
# If the structure isn't big enough to accomodate such a small
# atomic fraction, multiply it in the x direction.
n_ions = 1.
while not n_ions / structure.num_sites <= atomic_fraction:
structure.make_supercell([2, 1, 1])
evaluator = ValenceIonicRadiusEvaluator(structure)
interstitial = Interstitial(structure, radii=evaluator.radii,
valences=evaluator.valences)
interstitial_sites = [
(site._fcoords, site.properties.get('voronoi_radius', None))
for site in interstitial._defect_sites
]
# Sort the interstitial sites by their voronoi radii.
interstitial_sites.sort(key=operator.itemgetter(1))
interstitial_sites.reverse()
while n_ions / (structure.num_sites + 1) <= atomic_fraction:
try:
structure.append(species=specie,
coords=interstitial_sites[int(n_ions) - 1][0],
validate_proximity=True)
except IndexError:
raise ValueError('The atomic_fraction specified exceeds the '
'number of available interstitial sites in this '
'structure. Please choose a smaller '
'atomic_fraction.')
n_ions += 1
return structure
|
from pymatgen.core.structure import Structure
from pymatgen.core.periodic_table import Element
from pymatgen.analysis.defects.point_defects import (
Interstitial, ValenceIonicRadiusEvaluator
)
def inject_ions(ion, atomic_fraction):
"""
Adds ions to a percentage of interstitial sites into the POSCAR
that results in an at% less than or equal to the specified
atomic_fraction.
args:
specie (str): name of ion to intercalate
atomic_fraction (int): < 1.0
"""
specie = Element(ion)
structure = Structure.from_file('POSCAR')
n_ions = 1.
while not n_ions / structure.num_sites <= atomic_fraction:
structure.make_supercell([2, 1, 1])
evaluator = ValenceIonicRadiusEvaluator(structure)
interstitial = Interstitial(structure, radii=evaluator.radii,
valences=evaluator.valences)
interstitial_sites = [site._fcoords for site in interstitial._defect_sites]
while n_ions / (structure.num_sites + 1) <= atomic_fraction:
try:
structure.append(species=specie,
coords=interstitial_sites[int(n_ions) - 1],
validate_proximity=True)
except IndexError:
raise ValueError('The atomic_fraction specified exceeds the '
'number of available interstitial sites in this '
'structure. Please choose a smaller '
'atomic_fraction.')
n_ions += 1
return structure
|
mit
|
Python
|
cf439125f2350597f66796c89a088cd443ed77cd
|
Fix search results disappearing after they are viewed
|
yrro/igd-exporter,yrro/temper-exporter,yrro/temper-exporter,yrro/igd-exporter
|
igd_exporter/exporter.py
|
igd_exporter/exporter.py
|
import cgi
import socket
import urllib
import wsgiref.util
import prometheus_client
from . import igd
def wsgi_app(environ, start_response):
'''
Base WSGI application that routes requests to other applications.
'''
name = wsgiref.util.shift_path_info(environ)
if name == '':
return front(environ, start_response)
if name == 'probe':
return probe(environ, start_response)
elif name == 'metrics':
return prometheus_app(environ, start_response)
return not_found(environ, start_response)
def front(environ, start_response):
'''
Front page, containing links to the expoter's own metrics, as well as links
to probe discovered devices.
'''
global targets
start_response('200 OK', [('Content-Type', 'text/html')])
if environ['REQUEST_METHOD'] == 'POST':
form = cgi.FieldStorage(fp=environ['wsgi.input'], environ=environ, strict_parsing=1, encoding='latin1')
if form.getfirst('search') == '1':
targets = list(igd.search(5))
return [
b'<html>'
b'<head><title>WSG exporter</title></head>'
b'<body>'
b'<h1>IGD Exporter</h1>',
b'<form method="post"><p><input type="hidden" name="search" value="1"><button type="submit">Search</button> for devices on local network (5 second timeout)</input></form>',
*[b'<p><a href="/probe?target=%s">Probe %s</a>' % (urllib.parse.quote_plus(target).encode('latin1'), target.encode('latin1')) for target in targets],
b'<p><a href="/metrics">Metrics</a>'
b'</body>'
b'</html>'
]
# Discovered devices are kept in this list.
targets = []
def probe(environ, start_response):
'''
Performs a probe using the given root device URL.
'''
qs = urllib.parse.parse_qs(environ['QUERY_STRING'])
body = igd.probe(qs['target'][0])
start_response('200 OK', [('Content-Type', 'text/plain; charset=utf-8; version=0.0.4')])
return body
prometheus_app = prometheus_client.make_wsgi_app()
def not_found(environ, start_response):
'''
How did we get here?
'''
start_response('404 Not Found', [('Content-Type', 'text/plain')])
return [b'Not Found\r\n']
|
import cgi
import socket
import urllib
import wsgiref.util
import prometheus_client
from . import igd
def wsgi_app(environ, start_response):
'''
Base WSGI application that routes requests to other applications.
'''
name = wsgiref.util.shift_path_info(environ)
if name == '':
return front(environ, start_response)
if name == 'probe':
return probe(environ, start_response)
elif name == 'metrics':
return prometheus_app(environ, start_response)
return not_found(environ, start_response)
def front(environ, start_response):
'''
Front page, containing links to the expoter's own metrics, as well as links
to probe discovered devices.
'''
global targets
start_response('200 OK', [('Content-Type', 'text/html')])
if environ['REQUEST_METHOD'] == 'POST':
form = cgi.FieldStorage(fp=environ['wsgi.input'], environ=environ, strict_parsing=1, encoding='latin1')
if form.getfirst('search') == '1':
targets = igd.search(5)
return [
b'<html>'
b'<head><title>WSG exporter</title></head>'
b'<body>'
b'<h1>IGD Exporter</h1>',
b'<form method="post"><p><input type="hidden" name="search" value="1"><button type="submit">Search</button> for devices on local network (5 second timeout)</input></form>',
*[b'<p><a href="/probe?target=%s">Probe %s</a>' % (urllib.parse.quote_plus(target).encode('latin1'), target.encode('latin1')) for target in targets],
b'<p><a href="/metrics">Metrics</a>'
b'</body>'
b'</html>'
]
# Discovered devices are kept in this list.
targets = []
def probe(environ, start_response):
'''
Performs a probe using the given root device URL.
'''
qs = urllib.parse.parse_qs(environ['QUERY_STRING'])
body = igd.probe(qs['target'][0])
start_response('200 OK', [('Content-Type', 'text/plain; charset=utf-8; version=0.0.4')])
return body
prometheus_app = prometheus_client.make_wsgi_app()
def not_found(environ, start_response):
'''
How did we get here?
'''
start_response('404 Not Found', [('Content-Type', 'text/plain')])
return [b'Not Found\r\n']
|
mit
|
Python
|
50d2bb0908ea3b45d1ab6da494953a88362a67d4
|
add value summary
|
polltooh/CNN_LSTM
|
save_func.py
|
save_func.py
|
import tensorflow as tf
import time
def add_train_var():
""" add all trainable variable to summary"""
for var in tf.trainable_variables():
tf.histogram_summary(var.op.name, var)
def add_loss(loss_scope = 'losses'):
""" add all losses to summary """
for l in tf.get_collection(loss_scope):
tf.scalar_summary(l.op.name, l)
def restore_model(sess, saver, model_dir, model_name = None):
""" restore model:
if model_name is None, restore the last one
"""
if model_name is None:
ckpt = tf.train.get_checkpoint_state(FLAGS.model_dir)
if ckpt and ckpt.all_model_checkpoint_paths[-1]:
print("restore " + ckpt.all_model_checkpoint_paths[-1])
saver.restore(sess, ckpt.all_model_checkpoint_paths[-1])
else:
print('no check point')
else:
print("restore " + model_name)
saver.restore(sess, model_dir + '/' + model_name)
def save_model(sess, saver, model_dir, iteration):
""" save the current model"""
curr_time = time.strftime("%Y%m%d_%H%M")
model_name = model_dir + '/' + curr_time + \
'_iter_' + str(iteration) + '_model.ckpt'
saver.save(sess, model_name)
def add_value_sum(summary_writer, value, name):
""" add python value to tensorboard """
return tf.Summary(value = [tf.Summary.Value(tag = name, simple_value = value)])
|
import tensorflow as tf
import time
def add_train_var():
""" add all trainable variable to summary"""
for var in tf.trainable_variables():
tf.histogram_summary(var.op.name, var)
def add_loss(loss_scope = 'losses'):
""" add all losses to summary """
for l in tf.get_collection(loss_scope):
tf.scalar_summary(l.op.name, l)
def restore_model(sess, saver, model_dir, model_name = None):
""" restore model:
if model_name is None, restore the last one
"""
if model_name is None:
ckpt = tf.train.get_checkpoint_state(FLAGS.model_dir)
if ckpt and ckpt.all_model_checkpoint_paths[-1]:
print("restore " + ckpt.all_model_checkpoint_paths[-1])
saver.restore(sess, ckpt.all_model_checkpoint_paths[-1])
else:
print('no check point')
else:
print("restore " + model_name)
saver.restore(sess, model_dir + '/' + model_name)
def save_model(sess, saver, model_dir, iteration):
""" save the current model"""
curr_time = time.strftime("%Y%m%d_%H%M")
model_name = model_dir + '/' + curr_time + \
'_iter_' + str(iteration) + '_model.ckpt'
saver.save(sess, model_name)
|
mit
|
Python
|
d67249d0efb074ebcf5e5b20c5f21d92ffe81dae
|
Bump version to 0.1.7
|
gams/openkongqi,gams/openkongqi
|
openkongqi/__init__.py
|
openkongqi/__init__.py
|
# -*- coding: utf-8 -*-
__version__ = '0.1.7'
__author__ = "Stefan Berder"
__contact__ = '[email protected]'
|
# -*- coding: utf-8 -*-
__version__ = '0.1.6'
__author__ = "Stefan Berder"
__contact__ = '[email protected]'
|
apache-2.0
|
Python
|
e8cae50b026f9136b9b5afc35a92a74f33937205
|
Update 1482-maritime_boundary-buffered_land.py
|
mapzen/vector-datasource,mapzen/vector-datasource,mapzen/vector-datasource
|
integration-test/1482-maritime_boundary-buffered_land.py
|
integration-test/1482-maritime_boundary-buffered_land.py
|
from . import FixtureTest
# Adds tests for OSM features (but not NE features)
class MaritimeBoundary(FixtureTest):
def test_usa_canada_country_boundary(self):
# country boundary of USA and Canada
self.load_fixtures([
'https://www.openstreetmap.org/relation/148838',
'https://www.openstreetmap.org/relation/1428125',
'file://integration-test/fixtures/buffered_land/'
'1482-buffered_land-usa-can-wash-idaho.shp',
], clip=self.tile_bbox(8, 44, 87, padding=0.1))
self.assert_has_feature(
8, 44, 87, "boundaries",
{"kind": "country"})
def test_usa_canada_country_boundary_not_maritime_boundary(self):
# country boundary of USA and Canada
self.load_fixtures([
'https://www.openstreetmap.org/relation/148838',
'https://www.openstreetmap.org/relation/1428125',
'file://integration-test/fixtures/buffered_land/'
'1482-buffered_land-usa-can-wash-idaho.shp',
], clip=self.tile_bbox(8, 44, 87, padding=0.1))
self.assert_no_matching_feature(
8, 44, 87, "boundaries",
{"kind": "country", "maritime_boundary": 1})
def test_washington_idaho_region_boundary(self):
# region boundary between Washington - Idaho
self.load_fixtures([
'https://www.openstreetmap.org/relation/165479',
'https://www.openstreetmap.org/relation/162116',
'file://integration-test/fixtures/buffered_land/'
'1482-buffered_land-usa-can-wash-idaho.shp',
], clip=self.tile_bbox(8, 44, 88, padding=0.1))
self.assert_has_feature(
8, 44, 88, "boundaries",
{"kind": "region"})
def test_washington_idaho_region_boundary_not_maritime_boundary(self):
# region boundary between Washington - Idaho
self.load_fixtures([
'https://www.openstreetmap.org/relation/165479',
'https://www.openstreetmap.org/relation/162116',
'file://integration-test/fixtures/buffered_land/'
'1482-buffered_land-usa-can-wash-idaho.shp',
], clip=self.tile_bbox(8, 44, 88, padding=0.1))
self.assert_no_matching_feature(
8, 44, 88, "boundaries",
{"kind": "region", "maritime_boundary": 1})
|
from . import FixtureTest
# Adds tests for OSM features (but not NE features)
class MaritimeBoundary(FixtureTest):
def test_usa_canada_country_boundary(self):
# country boundary of USA and Canada
self.load_fixtures([
'https://www.openstreetmap.org/relation/148838',
'https://www.openstreetmap.org/relation/1428125',
], clip=self.tile_bbox(8, 44, 87, padding=0.1))
self.assert_has_feature(
8, 44, 87, "boundaries",
{"kind": "country", "maritime_boundary": false})
def test_washington_idaho_region_boundary(self):
# region boundary between Washington - Idaho
self.load_fixtures([
'https://www.openstreetmap.org/relation/165479',
'https://www.openstreetmap.org/relation/162116',
], clip=self.tile_bbox(8, 44, 88, padding=0.1))
self.assert_has_feature(
8, 44, 88, "boundaries",
{"kind": "region", "maritime_boundary": false})
|
mit
|
Python
|
5cf2e157da810dfe9e0436f9e76f5f339444031a
|
Add doxygen to the linux bootstrap
|
EricssonResearch/cerbero,ylatuya/cerbero,BigBrother-International/gst-cerbero,multipath-rtp/cerbero,freedesktop-unofficial-mirror/gstreamer-sdk__cerbero,nirbheek/cerbero-old,EricssonResearch/cerbero,shoreflyer/cerbero,ikonst/cerbero,shoreflyer/cerbero,jackjansen/cerbero-2013,freedesktop-unofficial-mirror/gstreamer__cerbero,cee1/cerbero-mac,freedesktop-unofficial-mirror/gstreamer__cerbero,atsushieno/cerbero,sdroege/cerbero,OptoFidelity/cerbero,jackjansen/cerbero,ikonst/cerbero,ikonst/cerbero,brion/cerbero,freedesktop-unofficial-mirror/gstreamer__cerbero,nzjrs/cerbero,flexVDI/cerbero,justinjoy/cerbero,lubosz/cerbero,sdroege/cerbero,brion/cerbero,jackjansen/cerbero,cee1/cerbero-mac,lubosz/cerbero,jackjansen/cerbero-2013,superdump/cerbero,justinjoy/cerbero,cee1/cerbero-mac,centricular/cerbero,ramaxlo/cerbero,freedesktop-unofficial-mirror/gstreamer__sdk__cerbero,brion/cerbero,ramaxlo/cerbero,atsushieno/cerbero,BigBrother-International/gst-cerbero,OptoFidelity/cerbero,jackjansen/cerbero-2013,GStreamer/cerbero,jackjansen/cerbero,davibe/cerbero,EricssonResearch/cerbero,superdump/cerbero,brion/cerbero,davibe/cerbero,superdump/cerbero,BigBrother-International/gst-cerbero,nicolewu/cerbero,sdroege/cerbero,nicolewu/cerbero,ylatuya/cerbero,ford-prefect/cerbero,GStreamer/cerbero,freedesktop-unofficial-mirror/gstreamer-sdk__cerbero,nirbheek/cerbero,shoreflyer/cerbero,nirbheek/cerbero,flexVDI/cerbero,nirbheek/cerbero,multipath-rtp/cerbero,freedesktop-unofficial-mirror/gstreamer__sdk__cerbero,flexVDI/cerbero,ramaxlo/cerbero,centricular/cerbero,EricssonResearch/cerbero,GStreamer/cerbero,sdroege/cerbero,ford-prefect/cerbero,atsushieno/cerbero,jackjansen/cerbero-2013,nirbheek/cerbero-old,AlertMe/cerbero,nirbheek/cerbero-old,nirbheek/cerbero-old,shoreflyer/cerbero,justinjoy/cerbero,OptoFidelity/cerbero,justinjoy/cerbero,multipath-rtp/cerbero,jackjansen/cerbero,flexVDI/cerbero,fluendo/cerbero,nzjrs/cerbero,centricular/cerbero,ikonst/cerbero,brion/cerbero,fluendo/cerbero,freedesktop-unofficial-mirror/gstreamer-sdk__cerbero,centricular/cerbero,ramaxlo/cerbero,nzjrs/cerbero,centricular/cerbero,fluendo/cerbero,GStreamer/cerbero,ikonst/cerbero,davibe/cerbero,freedesktop-unofficial-mirror/gstreamer__sdk__cerbero,AlertMe/cerbero,nicolewu/cerbero,EricssonResearch/cerbero,multipath-rtp/cerbero,OptoFidelity/cerbero,flexVDI/cerbero,nzjrs/cerbero,BigBrother-International/gst-cerbero,GStreamer/cerbero,freedesktop-unofficial-mirror/gstreamer__sdk__cerbero,fluendo/cerbero,multipath-rtp/cerbero,davibe/cerbero,freedesktop-unofficial-mirror/gstreamer__cerbero,fluendo/cerbero,ford-prefect/cerbero,superdump/cerbero,ylatuya/cerbero,ramaxlo/cerbero,AlertMe/cerbero,nirbheek/cerbero,shoreflyer/cerbero,ylatuya/cerbero,nzjrs/cerbero,ford-prefect/cerbero,sdroege/cerbero,lubosz/cerbero,AlertMe/cerbero,lubosz/cerbero,atsushieno/cerbero,BigBrother-International/gst-cerbero,cee1/cerbero-mac,freedesktop-unofficial-mirror/gstreamer-sdk__cerbero,atsushieno/cerbero,AlertMe/cerbero,jackjansen/cerbero-2013,freedesktop-unofficial-mirror/gstreamer-sdk__cerbero
|
cerbero/bootstrap/linux.py
|
cerbero/bootstrap/linux.py
|
# cerbero - a multi-platform build system for Open Source software
# Copyright (C) 2012 Andoni Morales Alastruey <[email protected]>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Library General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Library General Public License for more details.
#
# You should have received a copy of the GNU Library General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
from cerbero.bootstrap import BootstraperBase
from cerbero.bootstrap.bootstraper import register_bootstraper
from cerbero.config import Distro
from cerbero.errors import FatalError
from cerbero.utils import shell, _, user_is_root
class UnixBootstraper (BootstraperBase):
tool = ''
packages = []
def start(self):
shell.call('%s %s' % (self.tool, ' '.join(self.packages)))
class DebianBootstraper (UnixBootstraper):
tool = 'sudo apt-get install'
packages = ['autotools-dev', 'automake', 'autoconf', 'libtool', 'g++',
'autopoint', 'make', 'cmake', 'bison', 'flex', 'yasm',
'pkg-config', 'gtk-doc-tools', 'libxv-dev', 'libx11-dev',
'libpulse-dev', 'python2.7-dev', 'texinfo', 'gettext',
'build-essential', 'pkg-config', 'doxygen']
class RedHatBootstraper (UnixBootstraper):
tool = 'yum install'
packages = ['']
def register_all():
register_bootstraper(Distro.DEBIAN, DebianBootstraper)
register_bootstraper(Distro.REDHAT, RedHatBootstraper)
|
# cerbero - a multi-platform build system for Open Source software
# Copyright (C) 2012 Andoni Morales Alastruey <[email protected]>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Library General Public
# License as published by the Free Software Foundation; either
# version 2 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Library General Public License for more details.
#
# You should have received a copy of the GNU Library General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
from cerbero.bootstrap import BootstraperBase
from cerbero.bootstrap.bootstraper import register_bootstraper
from cerbero.config import Distro
from cerbero.errors import FatalError
from cerbero.utils import shell, _, user_is_root
class UnixBootstraper (BootstraperBase):
tool = ''
packages = []
def start(self):
shell.call('%s %s' % (self.tool, ' '.join(self.packages)))
class DebianBootstraper (UnixBootstraper):
tool = 'sudo apt-get install'
packages = ['autotools-dev', 'automake', 'autoconf', 'libtool', 'g++',
'autopoint', 'make', 'cmake', 'bison', 'flex', 'yasm',
'pkg-config', 'gtk-doc-tools', 'libxv-dev', 'libx11-dev',
'libpulse-dev', 'python2.7-dev', 'texinfo', 'gettext',
'build-essential', 'pkg-config']
class RedHatBootstraper (UnixBootstraper):
tool = 'yum install'
packages = ['']
def register_all():
register_bootstraper(Distro.DEBIAN, DebianBootstraper)
register_bootstraper(Distro.REDHAT, RedHatBootstraper)
|
lgpl-2.1
|
Python
|
0543774cffde0ad6eafe4bebc77df04c03027cf7
|
Remove container specific import from __init__.py (#14)
|
SergeyPirogov/testcontainers-python
|
testcontainers/__init__.py
|
testcontainers/__init__.py
|
from testcontainers.core.generic import GenericContainer
from testcontainers.core.waiting_utils import wait_container_is_ready, wait_for
|
from testcontainers.selenium import BrowserWebDriverContainer
from testcontainers.mysql import MySqlContainer
from testcontainers.postgres import PostgresContainer
from testcontainers.oracle import OracleDbContainer
from testcontainers.core.generic import GenericContainer
from testcontainers.core.waiting_utils import wait_container_is_ready, wait_for
|
apache-2.0
|
Python
|
3c54116f4c31a7e7392bdd7fd8fdc6773621d9f6
|
fix bad fix of sync contrib-base branch :-P (#9088)
|
demisto/content,demisto/content,demisto/content,VirusTotal/content,demisto/content,VirusTotal/content,VirusTotal/content,VirusTotal/content
|
Utils/contribution_sync/sync_contrib_base.py
|
Utils/contribution_sync/sync_contrib_base.py
|
#!/usr/bin/env python3
import os
import sys
import urllib3
from github import Github, enable_console_debug_logging
from github.Repository import Repository
from typing import List
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
def get_master_commit_sha(repo: Repository) -> str: # noqa: E999
'''Return the sha commit of the master branch
Args:
repo (Repository): The repository whose master branch will be queried
Returns:
(str): The commit sha of the master branch's HEAD
'''
branch_data = repo.get_branch('master')
commit_sha = branch_data.commit.sha
return commit_sha
def get_branch_names_with_contrib(repo: Repository) -> List[str]: # noqa: E999
'''Return the list of branches that have the prefix of "contrib/" and that are base branches of open PRs
Args:
repo (Repository): The repository whose branches will be searched and listed
Returns:
(List[str]): List of branch names that have the "contrib/" prefix and are base branches of open PRs
'''
branch_names = []
open_prs_head_refs = {open_pr.head.ref for open_pr in repo.get_pulls(state='OPEN')}
for branch in repo.get_branches():
if branch.name.startswith('contrib/'):
prs_with_branch_as_base = repo.get_pulls(state='OPEN', base=branch.name)
if prs_with_branch_as_base.totalCount >= 1 and branch.name not in open_prs_head_refs:
branch_names.append(branch.name)
return branch_names
def main():
debug_mode = len(sys.argv) >= 2 and 'debug' in sys.argv[1].casefold()
if debug_mode:
enable_console_debug_logging()
gh = Github(os.getenv('CONTENTBOT_GH_ADMIN_TOKEN'), verify=False)
organization = 'demisto'
repo = 'content'
content_repo = gh.get_repo(f'{organization}/{repo}')
master_sha = get_master_commit_sha(content_repo)
contrib_base_branches = get_branch_names_with_contrib(content_repo)
for branch_name in contrib_base_branches:
git_ref = content_repo.get_git_ref(f'heads/{branch_name}')
print(f'Updating branch "{branch_name}" to sha "{master_sha}"')
git_ref.edit(master_sha, force=True)
if debug_mode:
print(f'{contrib_base_branches=}')
if __name__ == "__main__":
main()
|
#!/usr/bin/env python3
import os
import sys
import urllib3
from github import Github, enable_console_debug_logging
from github.Repository import Repository
from typing import List
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
def get_master_commit_sha(repo: Repository) -> str: # noqa: E999
'''Return the sha commit of the master branch
Args:
repo (Repository): The repository whose master branch will be queried
Returns:
(str): The commit sha of the master branch's HEAD
'''
branch_data = repo.get_branch('master')
commit_sha = branch_data.commit.sha
return commit_sha
def get_branch_names_with_contrib(repo: Repository) -> List[str]: # noqa: E999
'''Return the list of branches that have the prefix of "contrib/" and that are base branches of open PRs
Args:
repo (Repository): The repository whose branches will be searched and listed
Returns:
(List[str]): List of branch names that have the "contrib/" prefix and are base branches of open PRs
'''
branch_names = []
for branch in repo.get_branches():
if branch.name.startswith('contrib/'):
prs_with_branch_as_base = repo.get_pulls(state='OPEN', base=branch.name)
if prs_with_branch_as_base.totalCount >= 1:
prs_with_branch_as_head = repo.get_pulls(state='OPEN', head=branch.name)
if prs_with_branch_as_head.totalCount == 0:
branch_names.append(branch.name)
return branch_names
def main():
debug_mode = len(sys.argv) >= 2 and 'debug' in sys.argv[1].casefold
if debug_mode:
enable_console_debug_logging()
gh = Github(os.getenv('CONTENTBOT_GH_ADMIN_TOKEN'), verify=False)
organization = 'demisto'
repo = 'content'
content_repo = gh.get_repo(f'{organization}/{repo}')
master_sha = get_master_commit_sha(content_repo)
contrib_base_branches = get_branch_names_with_contrib(content_repo)
for branch_name in contrib_base_branches:
git_ref = content_repo.get_git_ref(f'heads/{branch_name}')
print(f'Updating branch "{branch_name}" to sha "{master_sha}"')
git_ref.edit(master_sha, force=True)
if debug_mode:
print(f'{contrib_base_branches=}')
if __name__ == "__main__":
main()
|
mit
|
Python
|
315c3bc1e2961dc9de5d77fed5d232f106e59968
|
improve a couple of comments
|
alces/gitlab-rest-client
|
users.py
|
users.py
|
'''
working with gitlab's users
'''
from crud import Crud
from utils import filter_dict
import random
class Users (Crud):
def __init__(self):
Crud.__init__(self, 'users', lambda x: x['username'])
# generate random password
rand_pass = lambda s, l = 10: ''.join(chr(random.randint(64, 122)) for x in xrange(l))
'''
create a new user
'''
def add(self, sysNam, login, fullName, email, **opts):
return Crud.add(self, sysNam, dict(
[('name', fullName), ('username', login), ('email', email)]
+ ('password' in opts and [] or [('password', self.rand_pass())])
+ opts.items()))
# for reusing users' cache between the calls of get_user()
_usrs = Users()
'''
if a user represented by usrDict is found in the system, then return its id
else - create a new one and return its id
'''
def get_user(sysNam, usrDict):
try:
usr = _usrs.by_name(sysNam, usrDict['username'])
except KeyError:
# add the 1st identity to a top level of the users' dict
# ('cause POST API call to /users works with only one extern_uuid)
dictWithUuid = filter_dict(dict(usrDict.items() + (usrDict['identities'] and usrDict['identities'][0].items() or [])),
'admin',
'bio',
'can_create_group',
'extern_uid',
'linkedin',
'password',
'projects_limit',
'provider',
'skype',
'twitter',
'website_url')
usr = _usrs.add(sysNam, usrDict['username'], usrDict['name'], usrDict['email'], confirm = False, **dictWithUuid)
# rebuild the cache after adding a new user
_usrs.clr_cache(sysNam)
return usr['id']
|
'''
working with gitlab's users
'''
from crud import Crud
from utils import filter_dict
import random
class Users (Crud):
def __init__(self):
Crud.__init__(self, 'users', lambda x: x['username'])
# generate random password
rand_pass = lambda s, l = 10: ''.join(chr(random.randint(64, 122)) for x in xrange(l))
'''
create a new user
'''
def add(self, sysNam, login, fullName, email, **opts):
return Crud.add(self, sysNam, dict(
[('name', fullName), ('username', login), ('email', email)]
+ ('password' in opts and [] or [('password', self.rand_pass())])
+ opts.items()))
# for saving users' cache between the calls of get_user()
_usrs = Users()
'''
if a user represented by usrDict is found in the system, then return its id
else - create a new one and return its id
'''
def get_user(sysNam, usrDict):
try:
usr = _usrs.by_name(sysNam, usrDict['username'])
except KeyError:
# add the 1st identity to a users' dict
dictWithUuid = filter_dict(dict(usrDict.items() + (usrDict['identities'] and usrDict['identities'][0].items() or [])),
'admin',
'bio',
'can_create_group',
'extern_uid',
'linkedin',
'password',
'projects_limit',
'provider',
'skype',
'twitter',
'website_url')
usr = _usrs.add(sysNam, usrDict['username'], usrDict['name'], usrDict['email'], confirm = False, **dictWithUuid)
# rebuilding of cache after adding a new user is needed
_usrs.clr_cache(sysNam)
return usr['id']
|
bsd-2-clause
|
Python
|
c63681e6fc3d5e336a13698a473f4aaf61fdb9b1
|
Remove width and x arguments until fixed positioning.
|
RyanMcG/center_dmenu
|
center_dmenu.py
|
center_dmenu.py
|
#!/usr/bin/env python2
# Copyright 2013 Ryan McGowan
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from Xlib import display
import sys
from os import system
def get_dimensions():
current_display = display.Display()
current_screen = current_display.screen()
return (current_screen['width_in_pixels'],
current_screen['height_in_pixels'],
current_screen['width_in_mms'],
current_screen['height_in_mms'])
def parse_dmenu_args(args):
x_width, x_height, mms_width, mms_height = get_dimensions()
num_args = len(args)
# Do some math to determine a multiplier to go from points to pixels.
pixels_per_point = x_height / (mms_height / 25.4) / 72
# 20% padding means only 80% of the screen is used by dmenu with 10%
# padding on each side.
padding = .24
typeface = 'Inconsolata'
# Font size and lineheight are in points
font_size = 10
line_height = 24
# Get arguments from the command line.
if num_args > 1:
padding = float(args[1])
if num_args > 2:
line_height = int(args[2])
if num_args > 3:
font_size = int(args[3])
if num_args > 4:
typeface = args[4]
# Set some default values for dmenu args
dmenu_run_args = {
'x': int(round(padding * x_width / 2.0, 0)),
'height': int(round(line_height * pixels_per_point, 0)),
'extra_args': "-fn '{0}:size={1}'".format(typeface, font_size)
}
# Determine propper height and width for input into dmenu
dmenu_run_args['width'] = x_width - (2 * dmenu_run_args['x'])
dmenu_run_args['y'] = (x_height - dmenu_run_args['height']) / 2
return dmenu_run_args
def main(args):
dmenu_run_args = parse_dmenu_args(args)
return system(("dmenu_run {extra_args} -y {y}"
" -h {height}").format(**dmenu_run_args))
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
#!/usr/bin/env python2
# Copyright 2013 Ryan McGowan
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from Xlib import display
import sys
from os import system
def get_dimensions():
current_display = display.Display()
current_screen = current_display.screen()
return (current_screen['width_in_pixels'],
current_screen['height_in_pixels'],
current_screen['width_in_mms'],
current_screen['height_in_mms'])
def parse_dmenu_args(args):
x_width, x_height, mms_width, mms_height = get_dimensions()
num_args = len(args)
# Do some math to determine a multiplier to go from points to pixels.
pixels_per_point = x_height / (mms_height / 25.4) / 72
# 20% padding means only 80% of the screen is used by dmenu with 10%
# padding on each side.
padding = .24
typeface = 'Inconsolata'
# Font size and lineheight are in points
font_size = 10
line_height = 24
# Get arguments from the command line.
if num_args > 1:
padding = float(args[1])
if num_args > 2:
line_height = int(args[2])
if num_args > 3:
font_size = int(args[3])
if num_args > 4:
typeface = args[4]
# Set some default values for dmenu args
dmenu_run_args = {
'x': int(round(padding * x_width / 2.0, 0)),
'height': int(round(line_height * pixels_per_point, 0)),
'extra_args': "-fn '{0}:size={1}'".format(typeface, font_size)
}
# Determine propper height and width for input into dmenu
dmenu_run_args['width'] = x_width - (2 * dmenu_run_args['x'])
dmenu_run_args['y'] = (x_height - dmenu_run_args['height']) / 2
return dmenu_run_args
def main(args):
dmenu_run_args = parse_dmenu_args(args)
return system(("dmenu_run {extra_args} -w {width} -x {x} -y {y}"
" -h {height}").format(**dmenu_run_args))
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
apache-2.0
|
Python
|
999ae0c40a7a101672c735a552a7e79fcc6e9f96
|
Add desk_update url to admin urls
|
caneruguz/osf.io,monikagrabowska/osf.io,mfraezz/osf.io,aaxelb/osf.io,pattisdr/osf.io,baylee-d/osf.io,mattclark/osf.io,acshi/osf.io,alexschiller/osf.io,adlius/osf.io,mfraezz/osf.io,TomBaxter/osf.io,mluo613/osf.io,TomBaxter/osf.io,acshi/osf.io,felliott/osf.io,chrisseto/osf.io,sloria/osf.io,Johnetordoff/osf.io,CenterForOpenScience/osf.io,crcresearch/osf.io,HalcyonChimera/osf.io,felliott/osf.io,alexschiller/osf.io,binoculars/osf.io,chennan47/osf.io,alexschiller/osf.io,chennan47/osf.io,caseyrollins/osf.io,mluo613/osf.io,mluo613/osf.io,cslzchen/osf.io,Johnetordoff/osf.io,brianjgeiger/osf.io,hmoco/osf.io,chennan47/osf.io,Nesiehr/osf.io,CenterForOpenScience/osf.io,chrisseto/osf.io,monikagrabowska/osf.io,HalcyonChimera/osf.io,chrisseto/osf.io,hmoco/osf.io,acshi/osf.io,aaxelb/osf.io,mattclark/osf.io,acshi/osf.io,HalcyonChimera/osf.io,baylee-d/osf.io,brianjgeiger/osf.io,sloria/osf.io,baylee-d/osf.io,cwisecarver/osf.io,leb2dg/osf.io,mfraezz/osf.io,saradbowman/osf.io,mfraezz/osf.io,cslzchen/osf.io,HalcyonChimera/osf.io,Johnetordoff/osf.io,felliott/osf.io,Nesiehr/osf.io,cwisecarver/osf.io,aaxelb/osf.io,pattisdr/osf.io,erinspace/osf.io,icereval/osf.io,mluo613/osf.io,crcresearch/osf.io,icereval/osf.io,brianjgeiger/osf.io,hmoco/osf.io,alexschiller/osf.io,Nesiehr/osf.io,laurenrevere/osf.io,erinspace/osf.io,TomBaxter/osf.io,adlius/osf.io,saradbowman/osf.io,monikagrabowska/osf.io,laurenrevere/osf.io,sloria/osf.io,caneruguz/osf.io,caneruguz/osf.io,binoculars/osf.io,mattclark/osf.io,erinspace/osf.io,caseyrollins/osf.io,leb2dg/osf.io,hmoco/osf.io,adlius/osf.io,felliott/osf.io,aaxelb/osf.io,monikagrabowska/osf.io,CenterForOpenScience/osf.io,CenterForOpenScience/osf.io,chrisseto/osf.io,binoculars/osf.io,leb2dg/osf.io,leb2dg/osf.io,laurenrevere/osf.io,caneruguz/osf.io,pattisdr/osf.io,caseyrollins/osf.io,brianjgeiger/osf.io,adlius/osf.io,monikagrabowska/osf.io,cslzchen/osf.io,acshi/osf.io,Johnetordoff/osf.io,cwisecarver/osf.io,icereval/osf.io,mluo613/osf.io,cslzchen/osf.io,alexschiller/osf.io,Nesiehr/osf.io,cwisecarver/osf.io,crcresearch/osf.io
|
admin/common_auth/urls.py
|
admin/common_auth/urls.py
|
from __future__ import absolute_import
from django.conf.urls import url
from django.core.urlresolvers import reverse_lazy
from django.contrib.auth.views import password_change, password_change_done
from admin.common_auth import views
urlpatterns = [
url(r'^login/?$', views.LoginView.as_view(), name='login'),
url(r'^logout/$', views.logout_user, name='logout'),
url(r'^register/$', views.RegisterUser.as_view(), name='register'),
url(r'^password_change/$', password_change,
{'post_change_redirect': reverse_lazy('auth:password_change_done')},
name='password_change'),
url(r'^password_change/done/$', password_change_done,
{'template_name': 'password_change_done.html'},
name='password_change_done'),
url(r'^settings/desk/$', views.DeskUserCreateFormView.as_view(), name='desk'),
url(r'^settings/desk/update/$', views.DeskUserUpdateFormView.as_view(), name='desk_update'),
]
|
from __future__ import absolute_import
from django.conf.urls import url
from django.core.urlresolvers import reverse_lazy
from django.contrib.auth.views import password_change, password_change_done
from admin.common_auth import views
urlpatterns = [
url(r'^login/?$', views.LoginView.as_view(), name='login'),
url(r'^logout/$', views.logout_user, name='logout'),
url(r'^register/$', views.RegisterUser.as_view(), name='register'),
url(r'^password_change/$', password_change,
{'post_change_redirect': reverse_lazy('auth:password_change_done')},
name='password_change'),
url(r'^password_change/done/$', password_change_done,
{'template_name': 'password_change_done.html'},
name='password_change_done'),
url(r'^settings/desk/$', views.DeskUserFormView.as_view(), name='desk'),
]
|
apache-2.0
|
Python
|
bce0c2853e6c7901280c9989893e6230ec29dbe4
|
Allow util.retry to return result of task coroutine
|
aiorchestra/aiorchestra
|
aiorchestra/core/utils.py
|
aiorchestra/core/utils.py
|
# Author: Denys Makogon
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import asyncio
class Singleton(type):
_instance = None
def __call__(cls, *args, **kwargs):
if not cls._instance:
cls._instance = super(Singleton, cls).__call__(*args, **kwargs)
return cls._instance
async def retry(fn, args=None, kwargs=None, exceptions=None,
task_retries=1, task_retry_interval=10):
args = args or []
kwargs = kwargs or {}
while task_retries > 0:
try:
result = await fn(*args, **kwargs)
if result:
return result
except Exception as e:
if not exceptions or not isinstance(e, exceptions):
raise e
if task_retry_interval:
await asyncio.sleep(task_retry_interval)
task_retries -= 1
raise Exception("exiting retry loop")
def operation(action):
async def wraps(*args, **kwargs):
source = list(args)[0]
source.context.logger.debug(
'[{0}] - staring task "{1}" execution.'
.format(source.name, action.__name__))
try:
await action(*args, **kwargs)
source.context.logger.debug(
'[{0}] - ending task "{1}" execution'
.format(source.name, action.__name__))
except Exception as ex:
source.context.logger.error(
'[{0}] - error during task "{1}" execution. '
'Reason: {2}.'
.format(source.name, action.__name__, str(ex)))
raise ex
return wraps
|
# Author: Denys Makogon
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import asyncio
class Singleton(type):
_instance = None
def __call__(cls, *args, **kwargs):
if not cls._instance:
cls._instance = super(Singleton, cls).__call__(*args, **kwargs)
return cls._instance
async def retry(fn, args=None, kwargs=None, exceptions=None,
task_retries=1, task_retry_interval=10):
args = args or []
kwargs = kwargs or {}
while task_retries > 0:
try:
result = await fn(*args, **kwargs)
if result:
return
except Exception as e:
if not exceptions or not isinstance(e, exceptions):
raise e
if task_retry_interval:
await asyncio.sleep(task_retry_interval)
task_retries -= 1
raise Exception("exiting retry loop")
def operation(action):
async def wraps(*args, **kwargs):
source = list(args)[0]
source.context.logger.debug(
'[{0}] - staring task "{1}" execution.'
.format(source.name, action.__name__))
try:
await action(*args, **kwargs)
source.context.logger.debug(
'[{0}] - ending task "{1}" execution'
.format(source.name, action.__name__))
except Exception as ex:
source.context.logger.error(
'[{0}] - error during task "{1}" execution. '
'Reason: {2}.'
.format(source.name, action.__name__, str(ex)))
raise ex
return wraps
|
apache-2.0
|
Python
|
54c18ae0c6372922fbf108e4ad659fa81ec66284
|
Fix test on Win
|
platformio/platformio,platformio/platformio-core,platformio/platformio-core
|
tests/commands/pkg/test_exec.py
|
tests/commands/pkg/test_exec.py
|
# Copyright (c) 2014-present PlatformIO <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=unused-argument
import pytest
from platformio.package.commands.exec import package_exec_cmd
def test_pkg_not_installed(clirunner, validate_cliresult, isolated_pio_core):
result = clirunner.invoke(
package_exec_cmd,
["--", "openocd"],
)
with pytest.raises(
AssertionError,
match=("Could not find a package with 'openocd' executable file"),
):
validate_cliresult(result)
def test_pkg_specified(clirunner, validate_cliresult, isolated_pio_core, strip_ansi):
# with install
result = clirunner.invoke(
package_exec_cmd,
["-p", "platformio/tool-openocd", "--", "openocd", "--version"],
obj=dict(force_click_stream=True),
)
validate_cliresult(result)
output = strip_ansi(result.output)
assert "Tool Manager: Installing platformio/tool-openocd" in output
assert "Open On-Chip Debugger" in output
def test_unrecognized_options(
clirunner, validate_cliresult, isolated_pio_core, strip_ansi
):
# unrecognized option
result = clirunner.invoke(
package_exec_cmd,
["--", "openocd", "--test-unrecognized"],
obj=dict(force_click_stream=True),
)
with pytest.raises(
AssertionError,
match=(r"openocd: (unrecognized|unknown) option"),
):
validate_cliresult(result)
|
# Copyright (c) 2014-present PlatformIO <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=unused-argument
import pytest
from platformio.package.commands.exec import package_exec_cmd
def test_pkg_not_installed(clirunner, validate_cliresult, isolated_pio_core):
result = clirunner.invoke(
package_exec_cmd,
["--", "openocd"],
)
with pytest.raises(
AssertionError,
match=("Could not find a package with 'openocd' executable file"),
):
validate_cliresult(result)
def test_pkg_specified(clirunner, validate_cliresult, isolated_pio_core, strip_ansi):
# with install
result = clirunner.invoke(
package_exec_cmd,
["-p", "platformio/tool-openocd", "--", "openocd", "--version"],
obj=dict(force_click_stream=True),
)
validate_cliresult(result)
output = strip_ansi(result.output)
assert "Tool Manager: Installing platformio/tool-openocd" in output
assert "Open On-Chip Debugger" in output
def test_unrecognized_options(
clirunner, validate_cliresult, isolated_pio_core, strip_ansi
):
# unrecognized option
result = clirunner.invoke(
package_exec_cmd,
["--", "openocd", "--test-unrecognized"],
obj=dict(force_click_stream=True),
)
with pytest.raises(
AssertionError,
match=("openocd: unrecognized option"),
):
validate_cliresult(result)
|
apache-2.0
|
Python
|
ab13d8157090e452c1cffec2b917800cd9c5ed56
|
Fix whitespace problem and always replace window['title'] None, with empty string
|
alexoneill/py3status,ultrabug/py3status,vvoland/py3status,valdur55/py3status,guiniol/py3status,tobes/py3status,Andrwe/py3status,ultrabug/py3status,Andrwe/py3status,valdur55/py3status,docwalter/py3status,valdur55/py3status,ultrabug/py3status,guiniol/py3status,tobes/py3status
|
py3status/modules/window_title.py
|
py3status/modules/window_title.py
|
# -*- coding: utf-8 -*-
"""
Display the current window title.
Configuration parameters:
cache_timeout: How often we refresh this module in seconds (default 0.5)
format: display format for window_title (default '{title}')
max_width: If width of title is greater, shrink it and add '...'
(default 120)
Requires:
i3-py: (https://github.com/ziberna/i3-py)
`pip install i3-py`
If payload from server contains wierd utf-8
(for example one window have something bad in title) - the plugin will
give empty output UNTIL this window is closed.
I can't fix or workaround that in PLUGIN, problem is in i3-py library.
@author shadowprince
@license Eclipse Public License
"""
import i3
def find_focused(tree):
if type(tree) == list:
for el in tree:
res = find_focused(el)
if res:
return res
elif type(tree) == dict:
if tree['focused']:
return tree
else:
return find_focused(tree['nodes'] + tree['floating_nodes'])
class Py3status:
"""
"""
# available configuration parameters
cache_timeout = 0.5
format = '{title}'
max_width = 120
def __init__(self):
self.title = ''
def window_title(self):
window = find_focused(i3.get_tree())
if window['name'] is None:
window['name'] = ''
transformed = False
if window and 'name' in window and window['name'] != self.title:
self.title = (len(window['name']) > self.max_width and
u"...{}".format(window['name'][-(self.max_width - 3):]) or
window['name'])
transformed = True
return {
'cached_until': self.py3.time_in(self.cache_timeout),
'full_text': self.py3.safe_format(self.format, {'title': self.title}),
'transformed': transformed
}
if __name__ == "__main__":
"""
Run module in test mode.
"""
from py3status.module_test import module_test
module_test(Py3status)
|
# -*- coding: utf-8 -*-
"""
Display the current window title.
Configuration parameters:
cache_timeout: How often we refresh this module in seconds (default 0.5)
format: display format for window_title (default '{title}')
max_width: If width of title is greater, shrink it and add '...'
(default 120)
Requires:
i3-py: (https://github.com/ziberna/i3-py)
`pip install i3-py`
If payload from server contains wierd utf-8
(for example one window have something bad in title) - the plugin will
give empty output UNTIL this window is closed.
I can't fix or workaround that in PLUGIN, problem is in i3-py library.
@author shadowprince
@license Eclipse Public License
"""
import i3
def find_focused(tree):
if type(tree) == list:
for el in tree:
res = find_focused(el)
if res:
return res
elif type(tree) == dict:
if tree['focused']:
return tree
else:
return find_focused(tree['nodes'] + tree['floating_nodes'])
class Py3status:
"""
"""
# available configuration parameters
cache_timeout = 0.5
format = '{title}'
max_width = 120
def __init__(self):
self.title = ''
def window_title(self):
window = find_focused(i3.get_tree())
transformed = False
if window and 'name' in window and window['name'] != self.title:
if window['name'] is None:
window['name'] = ''
self.title = (len(window['name']) > self.max_width and
u"...{}".format(window['name'][-(self.max_width - 3):]) or
window['name'])
transformed = True
return {
'cached_until': self.py3.time_in(self.cache_timeout),
'full_text': self.py3.safe_format(self.format, {'title': self.title}),
'transformed': transformed
}
if __name__ == "__main__":
"""
Run module in test mode.
"""
from py3status.module_test import module_test
module_test(Py3status)
|
bsd-3-clause
|
Python
|
39824268150fb5b9cb86c599dad81b18f7ef7142
|
Add the Award and AwardGrant models to the init set
|
leotrubach/sourceforge-allura,apache/incubator-allura,lym/allura-git,apache/allura,Bitergia/allura,heiths/allura,Bitergia/allura,leotrubach/sourceforge-allura,heiths/allura,heiths/allura,apache/allura,leotrubach/sourceforge-allura,apache/incubator-allura,lym/allura-git,Bitergia/allura,apache/incubator-allura,leotrubach/sourceforge-allura,apache/allura,Bitergia/allura,apache/allura,heiths/allura,Bitergia/allura,lym/allura-git,apache/incubator-allura,lym/allura-git,lym/allura-git,apache/allura,heiths/allura
|
pyforge/pyforge/model/__init__.py
|
pyforge/pyforge/model/__init__.py
|
# -*- coding: utf-8 -*-
"""The application's model objects"""
from .session import ProjectSession
from .project import Theme, Neighborhood, NeighborhoodFile, Project, ProjectFile, AppConfig, SearchConfig, ScheduledMessage
from .discuss import Discussion, Thread, PostHistory, Post, Attachment
from .artifact import Artifact, Message, VersionedArtifact, Snapshot, ArtifactLink, nonce, Feed, Award, AwardGrant
from .auth import User, ProjectRole, OpenId, EmailAddress
from .openid_model import OpenIdStore, OpenIdAssociation, OpenIdNonce
from .filesystem import File
from .tag import TagEvent, Tag, UserTags
from .types import ArtifactReference, ArtifactReferenceType
from .session import main_doc_session, main_orm_session
from .session import project_doc_session, project_orm_session
from .session import artifact_orm_session
from ming.orm.mapped_class import MappedClass
MappedClass.compile_all()
|
# -*- coding: utf-8 -*-
"""The application's model objects"""
from .session import ProjectSession
from .project import Theme, Neighborhood, NeighborhoodFile, Project, ProjectFile, AppConfig, SearchConfig, ScheduledMessage
from .discuss import Discussion, Thread, PostHistory, Post, Attachment
from .artifact import Artifact, Message, VersionedArtifact, Snapshot, ArtifactLink, nonce, Feed
from .auth import User, ProjectRole, OpenId, EmailAddress
from .openid_model import OpenIdStore, OpenIdAssociation, OpenIdNonce
from .filesystem import File
from .tag import TagEvent, Tag, UserTags
from .types import ArtifactReference, ArtifactReferenceType
from .session import main_doc_session, main_orm_session
from .session import project_doc_session, project_orm_session
from .session import artifact_orm_session
from ming.orm.mapped_class import MappedClass
MappedClass.compile_all()
|
apache-2.0
|
Python
|
803368f1741a9558ea84092dc975c1a10f51fa79
|
Change url in dashboard administrador
|
erikiado/jp2_online,erikiado/jp2_online,erikiado/jp2_online
|
administracion/urls.py
|
administracion/urls.py
|
from django.conf.urls import url
from .views import admin_main_dashboard, admin_users_dashboard, \
admin_users_create, admin_users_edit, admin_users_edit_form, \
admin_users_delete_modal, admin_users_delete, list_studies
app_name = 'administracion'
# Urls en espanol
urlpatterns = [
url(r'^principal/$', admin_main_dashboard, name='main'),
url(r'^usuarios/nuevo/', admin_users_create, name='users_add'),
url(r'^usuarios/editar/(\d+)/', admin_users_edit_form, name='users_edit_form'),
url(r'^usuarios/editar/guardar/', admin_users_edit, name='users_edit'),
url(r'^usuarios/borrar/(\d+)/', admin_users_delete_modal, name='users_delete_modal'),
url(r'^usuarios/borrar/confirmar/', admin_users_delete, name='users_delete'),
url(r'^usuarios/', admin_users_dashboard, name='users'),
url(r'^principal/(?P<status_study>[\w\-]+)/$', list_studies, name='main_estudios'),
]
|
from django.conf.urls import url
from .views import admin_main_dashboard, admin_users_dashboard, \
admin_users_create, admin_users_edit, admin_users_edit_form, \
admin_users_delete_modal, admin_users_delete, list_studies
app_name = 'administracion'
# Urls en espanol
urlpatterns = [
url(r'^principal/', admin_main_dashboard, name='main'),
url(r'^usuarios/nuevo/', admin_users_create, name='users_add'),
url(r'^usuarios/editar/(\d+)/', admin_users_edit_form, name='users_edit_form'),
url(r'^usuarios/editar/guardar/', admin_users_edit, name='users_edit'),
url(r'^usuarios/borrar/(\d+)/', admin_users_delete_modal, name='users_delete_modal'),
url(r'^usuarios/borrar/confirmar/', admin_users_delete, name='users_delete'),
url(r'^usuarios/', admin_users_dashboard, name='users'),
url(r'^principal/(?P<status_study>[\w\-]+)/$', list_studies, name='main_estudios'),
]
|
mit
|
Python
|
b488b884cf802546a795b0084003a9200e0d646b
|
Implement update_cmd in cli
|
hackebrot/cibopath
|
cibopath/cli.py
|
cibopath/cli.py
|
# -*- coding: utf-8 -*-
import logging
import click
from cibopath import __version__
from cibopath.user_config import UserConfig
from cibopath.log import create_logger
from cibopath.scraper import load_templates
from cibopath.templates import dump
@click.group()
@click.pass_context
@click.option(
'-v', '--verbose',
is_flag=True, help='Print debug information'
)
@click.option(
'-c', '--config-file',
type=click.Path(), default='~/.cibopathrc',
help='Config file to hold settings'
)
@click.version_option(__version__, u'-V', u'--version', prog_name='cibopath')
def cli(ctx, verbose, config_file):
"""Cibopath - Search Cookiecutters on GitHub."""
ctx.obj = UserConfig(config_file)
logger = create_logger()
if verbose:
logger.setLevel(logging.DEBUG)
logger.debug('Logger initialized')
else:
logger.setLevel(logging.INFO)
@click.pass_obj
def _default_username(config):
try:
return config.get_value('github', 'username')
except KeyError:
return None
@click.pass_obj
def _default_token(config):
try:
return config.get_value('github', 'token')
except KeyError:
return None
@cli.command('update')
@click.option('-u', '--username', required=True, default=_default_username)
@click.option('-t', '--token', required=True, default=_default_token)
def update_cmd(username, token):
logger = logging.getLogger('cibopath')
logger.debug(
'username:{username} token:{token}'
''.format(username=username, token=token)
)
templates = load_templates(username, token)
logger.debug('Found {} templates'.format(len(templates)))
dump(templates)
logger.debug('Successfully updated templates')
def _show_user_config(ctx, param, value):
if not value or ctx.resilient_parsing:
return
click.echo(ctx.obj.text)
ctx.exit()
def _validate_variable(ctx, param, value):
try:
section, key = value.split('.')
return section, key
except ValueError:
raise click.BadParameter('variable needs to be in format section.key')
@cli.command('config')
@click.pass_obj
@click.option(
'--list', 'show_config', is_flag=True, default=False,
is_eager=True, expose_value=False, callback=_show_user_config
)
@click.argument('variable', callback=_validate_variable)
@click.argument('value')
def config_cmd(config, variable, value):
config.set_value(*variable, value)
main = cli
|
# -*- coding: utf-8 -*-
import logging
import click
from cibopath import __version__
from cibopath.user_config import UserConfig
from cibopath.log import create_logger
@click.group()
@click.pass_context
@click.option(
'-v', '--verbose',
is_flag=True, help='Print debug information'
)
@click.option(
'-c', '--config-file',
type=click.Path(), default='~/.cibopathrc',
help='Config file to hold settings'
)
@click.version_option(__version__, u'-V', u'--version', prog_name='cibopath')
def cli(ctx, verbose, config_file):
"""Cibopath - Search Cookiecutters on GitHub."""
ctx.obj = UserConfig(config_file)
logger = create_logger()
if verbose:
logger.setLevel(logging.DEBUG)
logger.debug('Logger initialized')
else:
logger.setLevel(logging.INFO)
@click.pass_obj
def _default_username(config):
try:
return config.get_value('github', 'username')
except KeyError:
return None
@click.pass_obj
def _default_token(config):
try:
return config.get_value('github', 'token')
except KeyError:
return None
@cli.command('update')
@click.option('-u', '--username', required=True, default=_default_username)
@click.option('-t', '--token', required=True, default=_default_token)
def update_cmd(username, token):
logger = logging.getLogger('cibopath')
logger.debug(
'username:{username} token:{token}'
''.format(username=username, token=token)
)
def _show_user_config(ctx, param, value):
if not value or ctx.resilient_parsing:
return
click.echo(ctx.obj.text)
ctx.exit()
def _validate_variable(ctx, param, value):
try:
section, key = value.split('.')
return section, key
except ValueError:
raise click.BadParameter('variable needs to be in format section.key')
@cli.command('config')
@click.pass_obj
@click.option(
'--list', 'show_config', is_flag=True, default=False,
is_eager=True, expose_value=False, callback=_show_user_config
)
@click.argument('variable', callback=_validate_variable)
@click.argument('value')
def config_cmd(config, variable, value):
config.set_value(*variable, value)
main = cli
|
bsd-3-clause
|
Python
|
30230a29ea8ee40121a83bb5a3203f80a296cf0e
|
Bump to v0.5.0
|
bradwright/flask-mustachejs,bradleywright/flask-mustachejs,bradleywright/flask-mustachejs,bradwright/flask-mustachejs
|
setup.py
|
setup.py
|
"""
----------------
Flask-Mustache
----------------
`Mustache`__ integration for Flask.
__ http://mustache.github.com/
Flask-Mustache adds template helpers and context processors to assist
Flask developers with integrating the Mustache library into their
development process.
"""
from setuptools import setup
setup(
name='Flask-MustacheJS',
version='0.5.0',
url='https://github.com/bradleywright/flask-mustachejs',
license='BSD',
author='Bradley Wright',
author_email='[email protected]',
description='Mustache integration in Flask, with Jinja and client-side libraries.',
long_description=__doc__,
packages=['flask_mustache'],
zip_safe=False,
include_package_data=True,
# include static assets
package_data = {
'': ['*.jinja', '*.js']
},
platforms='any',
install_requires=[
'Flask',
'pystache'
],
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
|
"""
----------------
Flask-Mustache
----------------
`Mustache`__ integration for Flask.
__ http://mustache.github.com/
Flask-Mustache adds template helpers and context processors to assist
Flask developers with integrating the Mustache library into their
development process.
"""
from setuptools import setup
setup(
name='Flask-MustacheJS',
version='0.4.9',
url='https://github.com/bradleywright/flask-mustachejs',
license='BSD',
author='Bradley Wright',
author_email='[email protected]',
description='Mustache integration in Flask, with Jinja and client-side libraries.',
long_description=__doc__,
packages=['flask_mustache'],
zip_safe=False,
include_package_data=True,
# include static assets
package_data = {
'': ['*.jinja', '*.js']
},
platforms='any',
install_requires=[
'Flask',
'pystache'
],
classifiers=[
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
|
bsd-3-clause
|
Python
|
b617b778d2e442d8e2ab6a2098f7799bfb83bf85
|
Bump version to 0.8.1
|
skwashd/python-acquia-cloud
|
setup.py
|
setup.py
|
#!/usr/bin/env python
"""Setup ACAPI package."""
import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__name__), "README.md")) as f:
long_description = f.read()
setup(
name="acapi",
version="0.8.1",
description="Acquia Cloud API client.",
long_description=long_description,
author="Dave Hall",
author_email="[email protected]",
url="http://github.com/skwashd/python-acquia-cloud",
install_requires=["requests==2.22.0", "requests-cache==0.5.2"],
license="MIT",
classifiers=[
"Development Status :: 4 - Beta",
"Topic :: Internet",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
],
packages=["acapi", "acapi.resources"],
)
|
#!/usr/bin/env python
"""Setup ACAPI package."""
import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__name__), "README.md")) as f:
long_description = f.read()
setup(
name="acapi",
version="0.8.0",
description="Acquia Cloud API client.",
long_description=long_description,
author="Dave Hall",
author_email="[email protected]",
url="http://github.com/skwashd/python-acquia-cloud",
install_requires=["requests==2.22.0", "requests-cache==0.5.2"],
license="MIT",
classifiers=[
"Development Status :: 4 - Beta",
"Topic :: Internet",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
],
packages=["acapi", "acapi.resources"],
)
|
mit
|
Python
|
c7db3806a57935921c733609a265e505654ab601
|
Add Korean language
|
alephobjects/Cura,alephobjects/Cura,alephobjects/Cura
|
Cura/util/resources.py
|
Cura/util/resources.py
|
#coding:utf8
"""
Helper module to get easy access to the path where resources are stored.
This is because the resource location is depended on the packaging method and OS
"""
__copyright__ = "Copyright (C) 2013 David Braam - Released under terms of the AGPLv3 License"
import os
import sys
import glob
import gettext
if sys.platform.startswith('darwin'):
try:
#Foundation import can crash on some MacOS installs
from Foundation import *
except:
pass
if sys.platform.startswith('darwin'):
if hasattr(sys, 'frozen'):
try:
resourceBasePath = NSBundle.mainBundle().resourcePath()
except:
resourceBasePath = os.path.join(os.path.dirname(__file__), "../../../../../")
else:
resourceBasePath = os.path.join(os.path.dirname(__file__), "../../resources")
else:
resourceBasePath = os.path.join(os.path.dirname(__file__), "../../resources")
def getPathForResource(dir, subdir, resource_name):
assert os.path.isdir(dir), "{p} is not a directory".format(p=dir)
path = os.path.normpath(os.path.join(dir, subdir, resource_name))
if not os.path.isfile(path):
return None
return path
def getPathForImage(name):
return getPathForResource(resourceBasePath, 'images', name)
def getPathForMesh(name):
return getPathForResource(resourceBasePath, 'meshes', name)
def getPathForFirmware(name):
return getPathForResource(resourceBasePath, 'firmware', name)
def getDefaultMachineProfiles():
path = os.path.normpath(os.path.join(resourceBasePath, 'machine_profiles', '*.ini'))
return glob.glob(path)
def setupLocalization(selectedLanguage = None):
#Default to english
languages = ['en']
if selectedLanguage is not None:
for item in getLanguageOptions():
if item[1] == selectedLanguage and item[0] is not None:
languages = [item[0]]
locale_path = os.path.normpath(os.path.join(resourceBasePath, 'locale'))
translation = gettext.translation('Cura', locale_path, languages, fallback=True)
#translation.ugettext = lambda message: u'#' + message
translation.install(unicode=True)
def getLanguageOptions():
return [
['en', 'English'],
['de', 'Deutsch'],
['fr', 'French'],
['ko', 'Korean'],
# ['zh', 'Chinese'],
# ['nl', 'Nederlands'],
# ['es', 'Spanish'],
# ['po', 'Polish']
]
|
#coding:utf8
"""
Helper module to get easy access to the path where resources are stored.
This is because the resource location is depended on the packaging method and OS
"""
__copyright__ = "Copyright (C) 2013 David Braam - Released under terms of the AGPLv3 License"
import os
import sys
import glob
import gettext
if sys.platform.startswith('darwin'):
try:
#Foundation import can crash on some MacOS installs
from Foundation import *
except:
pass
if sys.platform.startswith('darwin'):
if hasattr(sys, 'frozen'):
try:
resourceBasePath = NSBundle.mainBundle().resourcePath()
except:
resourceBasePath = os.path.join(os.path.dirname(__file__), "../../../../../")
else:
resourceBasePath = os.path.join(os.path.dirname(__file__), "../../resources")
else:
resourceBasePath = os.path.join(os.path.dirname(__file__), "../../resources")
def getPathForResource(dir, subdir, resource_name):
assert os.path.isdir(dir), "{p} is not a directory".format(p=dir)
path = os.path.normpath(os.path.join(dir, subdir, resource_name))
if not os.path.isfile(path):
return None
return path
def getPathForImage(name):
return getPathForResource(resourceBasePath, 'images', name)
def getPathForMesh(name):
return getPathForResource(resourceBasePath, 'meshes', name)
def getPathForFirmware(name):
return getPathForResource(resourceBasePath, 'firmware', name)
def getDefaultMachineProfiles():
path = os.path.normpath(os.path.join(resourceBasePath, 'machine_profiles', '*.ini'))
return glob.glob(path)
def setupLocalization(selectedLanguage = None):
#Default to english
languages = ['en']
if selectedLanguage is not None:
for item in getLanguageOptions():
if item[1] == selectedLanguage and item[0] is not None:
languages = [item[0]]
locale_path = os.path.normpath(os.path.join(resourceBasePath, 'locale'))
translation = gettext.translation('Cura', locale_path, languages, fallback=True)
#translation.ugettext = lambda message: u'#' + message
translation.install(unicode=True)
def getLanguageOptions():
return [
['en', 'English'],
['de', 'Deutsch'],
['fr', 'French'],
# ['zh', 'Chinese'],
# ['nl', 'Nederlands'],
# ['es', 'Spanish'],
# ['po', 'Polish']
]
|
agpl-3.0
|
Python
|
ff8b7ddbea7980c8464957880a1ae07afed49c64
|
Update antibody_lot.py
|
ENCODE-DCC/snovault,4dn-dcic/fourfront,T2DREAM/t2dream-portal,hms-dbmi/fourfront,philiptzou/clincoded,hms-dbmi/fourfront,ENCODE-DCC/encoded,kidaa/encoded,T2DREAM/t2dream-portal,kidaa/encoded,philiptzou/clincoded,hms-dbmi/fourfront,philiptzou/clincoded,philiptzou/clincoded,4dn-dcic/fourfront,ClinGen/clincoded,4dn-dcic/fourfront,kidaa/encoded,ClinGen/clincoded,4dn-dcic/fourfront,ENCODE-DCC/snovault,kidaa/encoded,ENCODE-DCC/encoded,ENCODE-DCC/encoded,ENCODE-DCC/encoded,philiptzou/clincoded,ClinGen/clincoded,T2DREAM/t2dream-portal,ENCODE-DCC/snovault,ClinGen/clincoded,ClinGen/clincoded,ENCODE-DCC/snovault,hms-dbmi/fourfront,hms-dbmi/fourfront,T2DREAM/t2dream-portal,ENCODE-DCC/snovault,kidaa/encoded
|
src/encoded/audit/antibody_lot.py
|
src/encoded/audit/antibody_lot.py
|
from ..auditor import (
AuditFailure,
audit_checker,
)
@audit_checker('antibody_lot')
def audit_antibody_lot_target(value, system):
'''
Antibody lots should not have associated characterizations
for different target labels
'''
if value['status'] in ['not pursued', 'deleted']:
return
if not value['characterizations']:
return
for char in value['characterizations']:
if char['target']['@id'] not in value['targets']:
detail = 'The antibody_lot {} has a characterization {} with target {}, which is not in the targets list'.format(
value['accession'],
char['uuid'],
char['target']['label']
)
yield AuditFailure('target mismatch', detail, level='ERROR')
|
from ..auditor import (
AuditFailure,
audit_checker,
)
@audit_checker('antibody_lot')
def audit_antibody_lot_target(value, system):
'''
Antibody lots should not have associated characterizations
for different target labels
'''
if value['status'] in ['not pursued', 'deleted']:
return
if not value['characterizations']:
return
for char in value['characterizations']:
if char['target']['@id'] not in value['targets']:
detail = 'The antibody_lot {} has a characterization {} with target {}, which is not in the targets list'.format(
value['accession'],
char['target']['label'],
char['uuid'])
yield AuditFailure('target mismatch', detail, level='ERROR')
|
mit
|
Python
|
bff7cf3d3cc2b636fb46a37b1edb60f7935e646e
|
Update consultants model
|
renalreg/radar,renalreg/radar,renalreg/radar,renalreg/radar
|
radar/radar/models/consultants.py
|
radar/radar/models/consultants.py
|
from sqlalchemy import Integer, Column, String
from sqlalchemy.orm import relationship
from radar.database import db
class Consultant(db.Model):
__tablename__ = 'consultants'
id = Column(Integer, primary_key=True)
title = Column(String, nullable=False)
first_name = Column(String, nullable=False)
last_name = Column(String, nullable=False)
email = Column(String)
telephone_number = Column(String)
gmc_number = Column(Integer)
organisation_consultants = relationship('OrganisationConsultant')
@property
def organisations(self):
return [x.organisation for x in self.organisation_consultants]
|
from sqlalchemy import Integer, Column, String
from sqlalchemy.orm import relationship
from radar.database import db
class Consultant(db.Model):
__tablename__ = 'consultants'
id = Column(Integer, primary_key=True)
first_name = Column(String, nullable=False)
last_name = Column(String, nullable=False)
organisation_consultants = relationship('OrganisationConsultant')
@property
def organisations(self):
return [x.organisation for x in self.organisation_consultants]
|
agpl-3.0
|
Python
|
c26dc22512e9b36a497930df544891cd0d96310b
|
Remove useless function
|
james9909/IntroCTF,james9909/IntroCTF,james9909/IntroCTF,james9909/IntroCTF,james9909/IntroCTF,james9909/IntroCTF
|
utils.py
|
utils.py
|
import string
import random
from werkzeug.security import generate_password_hash, check_password_hash
from flask import current_app as app, url_for, redirect, session
from functools import wraps
def hash_password(s):
return generate_password_hash(s)
def check_password(hashed_password, try_password):
return check_password_hash(hashed_password, try_password)
def generate_string(length):
return "".join([random.choice(string.letters + string.digits) for x in range(length)])
def admins_only(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if "admin" not in session or not session['admin']:
session.clear()
return redirect(url_for("index"))
return f(*args, **kwargs)
return decorated_function
def redirect_if_not_logged_in(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if "tid" not in session or "logged_in" not in session or not session["logged_in"]:
return redirect(url_for("index"))
return f(*args, **kwargs)
return decorated_function
|
import string
import random
from werkzeug.security import generate_password_hash, check_password_hash
from flask import current_app as app, url_for, redirect, session
from functools import wraps
def hash_password(s):
return generate_password_hash(s)
def check_password(hashed_password, try_password):
return check_password_hash(hashed_password, try_password)
def generate_string(length):
return "".join([random.choice(string.letters + string.digits) for x in range(length)])
def admins_only(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if "admin" not in session or not session['admin']:
session.clear()
return redirect(url_for("index"))
return f(*args, **kwargs)
return decorated_function
def redirect_if_not_logged_in(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if "tid" not in session or "logged_in" not in session or not session["logged_in"]:
return redirect(url_for("index"))
return f(*args, **kwargs)
return decorated_function
def convert(dictionary):
"""Recursively converts dictionary keys to strings."""
if not isinstance(dictionary, dict):
return dictionary
return dict((str(k), convert(v))
for k, v in dictionary.items())
|
mit
|
Python
|
3450712ec629c1720b6a6af28835d95a91b8fce7
|
Use classifiers to specify the license.
|
jongracecox/anybadge,jongracecox/anybadge
|
setup.py
|
setup.py
|
#!/usr/bin/python
import os
import re
from setuptools import setup
from m2r import parse_from_file
import restructuredtext_lint
# Parser README.md into reStructuredText format
rst_readme = parse_from_file('README.md')
# Validate the README, checking for errors
errors = restructuredtext_lint.lint(rst_readme)
# Raise an exception for any errors found
if errors:
print(rst_readme)
raise ValueError('README.md contains errors: ',
', '.join([e.message for e in errors]))
# Attempt to get version number from TravisCI environment variable
version = os.environ.get('TRAVIS_TAG', default='0.0.0')
# Remove leading 'v'
version = re.sub('^v', '', version)
setup(
name='anybadge',
description='Simple, flexible badge generator for project badges.',
long_description=rst_readme,
version=version,
author='Jon Grace-Cox',
author_email='[email protected]',
py_modules=['anybadge', 'anybadge_server'],
setup_requires=['setuptools', 'wheel'],
tests_require=['unittest'],
install_requires=[],
data_files=[],
options={
'bdist_wheel': {'universal': True}
},
url='https://github.com/jongracecox/anybadge',
entry_points={
'console_scripts': ['anybadge=anybadge:main',
'anybadge-server=anybadge_server:main'],
},
classifiers=[
'License :: OSI Approved :: MIT License'
]
)
|
#!/usr/bin/python
import os
import re
from setuptools import setup
from m2r import parse_from_file
import restructuredtext_lint
# Parser README.md into reStructuredText format
rst_readme = parse_from_file('README.md')
# Validate the README, checking for errors
errors = restructuredtext_lint.lint(rst_readme)
# Raise an exception for any errors found
if errors:
print(rst_readme)
raise ValueError('README.md contains errors: ',
', '.join([e.message for e in errors]))
# Attempt to get version number from TravisCI environment variable
version = os.environ.get('TRAVIS_TAG', default='0.0.0')
# Remove leading 'v'
version = re.sub('^v', '', version)
setup(
name='anybadge',
description='Simple, flexible badge generator for project badges.',
long_description=rst_readme,
version=version,
author='Jon Grace-Cox',
author_email='[email protected]',
py_modules=['anybadge', 'anybadge_server'],
setup_requires=['setuptools', 'wheel'],
tests_require=['unittest'],
install_requires=[],
data_files=[],
options={
'bdist_wheel': {'universal': True}
},
url='https://github.com/jongracecox/anybadge',
entry_points={
'console_scripts': ['anybadge=anybadge:main',
'anybadge-server=anybadge_server:main'],
}
)
|
mit
|
Python
|
22cb94902f5bbe32d636009c2599eae7aa66282c
|
fix extraction(closes #4319)
|
ozburo/youtube-dl,Tatsh/youtube-dl,remitamine/youtube-dl,vinegret/youtube-dl,nyuszika7h/youtube-dl,Orochimarufan/youtube-dl,Orochimarufan/youtube-dl,rg3/youtube-dl,remitamine/youtube-dl,yan12125/youtube-dl,yan12125/youtube-dl,spvkgn/youtube-dl,vinegret/youtube-dl,spvkgn/youtube-dl,rg3/youtube-dl,nyuszika7h/youtube-dl,ozburo/youtube-dl,Tatsh/youtube-dl
|
youtube_dl/extractor/stretchinternet.py
|
youtube_dl/extractor/stretchinternet.py
|
from __future__ import unicode_literals
from .common import InfoExtractor
from ..utils import int_or_none
class StretchInternetIE(InfoExtractor):
_VALID_URL = r'https?://portal\.stretchinternet\.com/[^/]+/(?:portal|full)\.htm\?.*?\beventId=(?P<id>\d+)'
_TEST = {
'url': 'https://portal.stretchinternet.com/umary/portal.htm?eventId=573272&streamType=video',
'info_dict': {
'id': '573272',
'ext': 'mp4',
'title': 'University of Mary Wrestling vs. Upper Iowa',
'timestamp': 1575668361,
'upload_date': '20191206',
}
}
def _real_extract(self, url):
video_id = self._match_id(url)
event = self._download_json(
'https://api.stretchinternet.com/trinity/event/tcg/' + video_id,
video_id)[0]
return {
'id': video_id,
'title': event['title'],
'timestamp': int_or_none(event.get('dateCreated'), 1000),
'url': 'https://' + event['media'][0]['url'],
}
|
from __future__ import unicode_literals
from .common import InfoExtractor
from ..utils import int_or_none
class StretchInternetIE(InfoExtractor):
_VALID_URL = r'https?://portal\.stretchinternet\.com/[^/]+/portal\.htm\?.*?\beventId=(?P<id>\d+)'
_TEST = {
'url': 'https://portal.stretchinternet.com/umary/portal.htm?eventId=313900&streamType=video',
'info_dict': {
'id': '313900',
'ext': 'mp4',
'title': 'Augustana (S.D.) Baseball vs University of Mary',
'description': 'md5:7578478614aae3bdd4a90f578f787438',
'timestamp': 1490468400,
'upload_date': '20170325',
}
}
def _real_extract(self, url):
video_id = self._match_id(url)
stream = self._download_json(
'https://neo-client.stretchinternet.com/streamservice/v1/media/stream/v%s'
% video_id, video_id)
video_url = 'https://%s' % stream['source']
event = self._download_json(
'https://neo-client.stretchinternet.com/portal-ws/getEvent.json',
video_id, query={
'clientID': 99997,
'eventID': video_id,
'token': 'asdf',
})['event']
title = event.get('title') or event['mobileTitle']
description = event.get('customText')
timestamp = int_or_none(event.get('longtime'))
return {
'id': video_id,
'title': title,
'description': description,
'timestamp': timestamp,
'url': video_url,
}
|
unlicense
|
Python
|
2b8535c34d92089fe84203f1f06e82472397eaea
|
Update version number
|
fgaudin/aemanager,fgaudin/aemanager,fgaudin/aemanager
|
core/context_processors.py
|
core/context_processors.py
|
from django.conf import settings
def common(request=None):
return {'logo_url': settings.LOGO_URL,
'parent_site_url': settings.PARENT_SITE_URL,
'version': '1.4',
'GOOGLE_API_KEY': settings.GOOGLE_API_KEY,
'demo_mode': settings.DEMO}
|
from django.conf import settings
def common(request=None):
return {'logo_url': settings.LOGO_URL,
'parent_site_url': settings.PARENT_SITE_URL,
'version': '1.3',
'GOOGLE_API_KEY': settings.GOOGLE_API_KEY,
'demo_mode': settings.DEMO}
|
agpl-3.0
|
Python
|
62ccee03efd3fb5d53139f89ae974708d3a82e32
|
Add switches for cProfiling and verbosity output
|
OceanPARCELS/parcels,OceanPARCELS/parcels
|
tests/example_peninsula.py
|
tests/example_peninsula.py
|
from parcels import NEMOGrid, Particle, ParticleSet
from argparse import ArgumentParser
def pensinsula_example(filename, npart, degree=3, verbose=False):
"""Example configuration of particle flow around an idealised Peninsula
:arg filename: Basename of the input grid file set
:arg npart: Number of particles to intialise"""
# Open grid file set
grid = NEMOGrid(filename, degree=degree)
# Initialise particles
pset = ParticleSet(npart, grid)
for p in range(npart):
lat = p * grid.lat_u.valid_max / npart + 0.45 / 1.852 / 60.
pset.add_particle(Particle(lon=3 / 1.852 / 60., lat=lat))
if verbose:
print "Initial particle positions:"
for p in pset._particles:
print p
# Advect the particles for 24h
time = 86400.
dt = 36.
timesteps = int(time / dt)
pset.advect(timesteps=timesteps, dt=dt)
if verbose:
print "Final particle positions:"
for p in pset._particles:
print p
if __name__ == "__main__":
p = ArgumentParser(description="""
Example of particle advection around an idealised peninsula""")
p.add_argument('-p', '--particles', type=int, default=20,
help='Number of particles to advect')
p.add_argument('-d', '--degree', type=int, default=3,
help='Degree of spatial interpolation')
p.add_argument('-v', '--verbose', action='store_true', default=False,
help='Print particle information before and after execution')
p.add_argument('--profiling', action='store_true', default=False,
help='Print profiling information after run')
args = p.parse_args()
if args.profiling:
from cProfile import runctx
from pstats import Stats
runctx("pensinsula_example('peninsula', args.particles, degree=args.degree, verbose=args.verbose)",
globals(), locals(), "Profile.prof")
Stats("Profile.prof").strip_dirs().sort_stats("time").print_stats(10)
else:
pensinsula_example('peninsula', args.particles, degree=args.degree,
verbose=args.verbose)
|
from parcels import NEMOGrid, Particle, ParticleSet
from argparse import ArgumentParser
def pensinsula_example(filename, npart, degree=3):
"""Example configuration of particle flow around an idealised Peninsula
:arg filename: Basename of the input grid file set
:arg npart: Number of particles to intialise"""
# Open grid file set
grid = NEMOGrid(filename, degree=degree)
# Initialise particles
pset = ParticleSet(npart, grid)
for p in range(npart):
lat = p * grid.lat_u.valid_max / npart + 0.45 / 1.852 / 60.
pset.add_particle(Particle(lon=3 / 1.852 / 60., lat=lat))
print "Initial particle positions:"
for p in pset._particles:
print p
# Advect the particles for 24h
time = 86400.
dt = 36.
timesteps = int(time / dt)
pset.advect(timesteps=timesteps, dt=dt)
print "Final particle positions:"
for p in pset._particles:
print p
if __name__ == "__main__":
p = ArgumentParser(description="""
Example of particle advection around an idealised peninsula""")
p.add_argument('-p', '--particles', type=int, default=20,
help='Number of particles to advect')
p.add_argument('-d', '--degree', type=int, default=3,
help='Degree of spatial interpolation')
args = p.parse_args()
pensinsula_example('peninsula', args.particles, degree=args.degree)
|
mit
|
Python
|
d5ed783c7dc691d7d0b847aa243989b626d90e9b
|
Add return None
|
bowen0701/algorithms_data_structures
|
alg_decimal_to_base.py
|
alg_decimal_to_base.py
|
from __future__ import print_function
from ds_stack import Stack
def convert_decimal_to_base2(dec_num):
"""Convert decimal number to binary number."""
rem_stack = Stack()
while dec_num > 0:
rem = dec_num % 2
rem_stack.push(rem)
dec_num = dec_num // 2
bin_str = ''
while not rem_stack.is_empty():
bin_str = bin_str + str(rem_stack.pop())
return bin_str
def convert_decimal_to_base(dec_num, base):
"""Convert decimal number to any base."""
rem_stack = Stack()
digits = '0123456789ABCDEF'
while dec_num > 0:
rem = dec_num % base
rem_stack.push(rem)
dec_num = dec_num // base
bin_str = ''
while not rem_stack.is_empty():
bin_str = bin_str + digits[rem_stack.pop()]
return bin_str
def _recur_decimal_to_base(dec_num, base, rem_stack):
digits = '0123456789ABCDEF'
if dec_num < base:
rem_stack.push(digits[dec_num])
else:
rem_stack.push(digits[dec_num % base])
_recur_decimal_to_base(
dec_num // base, base, rem_stack)
return None
def convert_decimal_to_base_by_recur(dec_num, base):
"""Convert decimal number to any base by recussion with Stack."""
rem_stack = Stack()
_recur_decimal_to_base(dec_num, base, rem_stack)
bin_str = ''
while not rem_stack.is_empty():
bin_str = bin_str + rem_stack.pop()
return bin_str
def main():
dec_num = 1024
print('Convert {} to base 2: {}'
.format(dec_num, convert_decimal_to_base2(dec_num)))
dec_num = 233
print('Convert {} to base 2: {}'
.format(dec_num, convert_decimal_to_base2(dec_num)))
print('Convert {} to base 8: {}'
.format(dec_num, convert_decimal_to_base(dec_num, 8)))
print('Convert {} to base 16: {}'
.format(dec_num, convert_decimal_to_base(dec_num, 16)))
print('Convert {} to base 16: {}'
.format(dec_num, convert_decimal_to_base_by_recur(dec_num, 16)))
if __name__ == '__main__':
main()
|
from __future__ import print_function
from ds_stack import Stack
def convert_decimal_to_base2(dec_num):
"""Convert decimal number to binary number."""
rem_stack = Stack()
while dec_num > 0:
rem = dec_num % 2
rem_stack.push(rem)
dec_num = dec_num // 2
bin_str = ''
while not rem_stack.is_empty():
bin_str = bin_str + str(rem_stack.pop())
return bin_str
def convert_decimal_to_base(dec_num, base):
"""Convert decimal number to any base."""
rem_stack = Stack()
digits = '0123456789ABCDEF'
while dec_num > 0:
rem = dec_num % base
rem_stack.push(rem)
dec_num = dec_num // base
bin_str = ''
while not rem_stack.is_empty():
bin_str = bin_str + digits[rem_stack.pop()]
return bin_str
def _recur_decimal_to_base(dec_num, base, rem_stack):
digits = '0123456789ABCDEF'
if dec_num < base:
rem_stack.push(digits[dec_num])
else:
rem_stack.push(digits[dec_num % base])
_recur_decimal_to_base(
dec_num // base, base, rem_stack)
def convert_decimal_to_base_by_recur(dec_num, base):
"""Convert decimal number to any base by recussion with Stack."""
rem_stack = Stack()
_recur_decimal_to_base(dec_num, base, rem_stack)
bin_str = ''
while not rem_stack.is_empty():
bin_str = bin_str + rem_stack.pop()
return bin_str
def main():
dec_num = 1024
print('Convert {} to base 2: {}'
.format(dec_num, convert_decimal_to_base2(dec_num)))
dec_num = 233
print('Convert {} to base 2: {}'
.format(dec_num, convert_decimal_to_base2(dec_num)))
print('Convert {} to base 8: {}'
.format(dec_num, convert_decimal_to_base(dec_num, 8)))
print('Convert {} to base 16: {}'
.format(dec_num, convert_decimal_to_base(dec_num, 16)))
print('Convert {} to base 16: {}'
.format(dec_num, convert_decimal_to_base_by_recur(dec_num, 16)))
if __name__ == '__main__':
main()
|
bsd-2-clause
|
Python
|
0205e519c2662bf33b59e20668f90a17a50c29e1
|
Add github URL to setup.py
|
google/ml_collections,google/ml_collections
|
setup.py
|
setup.py
|
# Copyright 2020 The ML Collections Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python 3
"""Setup for pip package."""
from setuptools import find_namespace_packages
from setuptools import setup
def _parse_requirements(requirements_txt_path):
with open(requirements_txt_path) as fp:
return fp.read().splitlines()
_VERSION = '0.1.0'
setup(
name='ml_collections',
version=_VERSION,
author='ML Collections Authors',
author_email='[email protected]',
description='ML Collections is a library of Python collections designed for ML usecases.',
long_description=open('README.md').read(),
long_description_content_type='text/markdown',
url='https://github.com/google/ml_collections',
license='Apache 2.0',
# Contained modules and scripts.
packages=find_namespace_packages(exclude=['*_test.py']),
install_requires=_parse_requirements('requirements.txt'),
tests_require=_parse_requirements('requirements-test.txt'),
python_requires='>=2.6',
include_package_data=True,
zip_safe=False,
# PyPI package information.
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python'
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
# Copyright 2020 The ML Collections Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python 3
"""Setup for pip package."""
from setuptools import find_namespace_packages
from setuptools import setup
def _parse_requirements(requirements_txt_path):
with open(requirements_txt_path) as fp:
return fp.read().splitlines()
_VERSION = '0.1.0'
setup(
name='ml_collections',
version=_VERSION,
author='ML Collections Authors',
author_email='[email protected]',
description='ML Collections is a library of Python collections designed for ML usecases.',
long_description=open('README.md').read(),
long_description_content_type='text/markdown',
# TODO(mohitreddy): Uncomment once private repo is created.
# url='https://github.com/google/ml_collections',
license='Apache 2.0',
# Contained modules and scripts.
packages=find_namespace_packages(exclude=['*_test.py']),
install_requires=_parse_requirements('requirements.txt'),
tests_require=_parse_requirements('requirements-test.txt'),
# TODO(mohitreddy): Double check python versions supported.
python_requires='>=2.6',
include_package_data=True,
zip_safe=False,
# PyPI package information.
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python'
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
apache-2.0
|
Python
|
f2b796b94ea1cd9c71500521404ef39d10ca091d
|
improve to_big_endian_binary function
|
ethereum/pyethereum,ethermarket/pyethereum,holiman/pyethereum,ethereum/pyethereum,shahankhatch/pyethereum,ebuchman/daoist_protocol,shahankhatch/pyethereum,harlantwood/pyethereum,jnnk/pyethereum,ddworken/pyethereum,karlfloersch/pyethereum,ethers/pyethereum,pipermerriam/pyethereum,inzem77/pyethereum,vaporry/pyethereum,ckeenan/pyethereum,pipermerriam/pyethereum,ebuchman/daoist_protocol,karlfloersch/pyethereum
|
utils.py
|
utils.py
|
def to_big_endian_binary(val):
s = '%x' % val
if len(s) & 1:
s = '0' + s
return s.decode('hex')
|
from binascii import unhexlify
def to_big_endian_binary(val):
# one (1) hex digit per four (4) bits
width = val.bit_length()
# unhexlify wants an even multiple of eight (8) bits, but we don't
# want more digits than we need (hence the ternary-ish 'or')
width += 8 - ((width % 8) or 8)
# format width specifier: four (4) bits per hex digit
fmt = '%%0%dx' % (width // 4)
# prepend zero (0) to the width, to zero-pad the output
return unhexlify(fmt % val)
|
mit
|
Python
|
4d85b334298bcfc58c9bfd2bdfae123302caa48e
|
Bump coveralls from 2.1.0 to 2.1.1 (#18)
|
glidernet/python-ogn-client
|
setup.py
|
setup.py
|
#!/usr/bin/env python3
from os import path
from setuptools import setup, find_packages
from ogn.client.settings import PACKAGE_VERSION
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='ogn-client',
version=PACKAGE_VERSION,
description='A python module for the Open Glider Network',
long_description=long_description,
long_description_content_type='text/markdown',
url='https://github.com/glidernet/python-ogn-client',
author='Konstantin Gründger aka Meisterschueler, Fabian P. Schmidt aka kerel',
author_email='[email protected]',
license='AGPLv3',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Topic :: Scientific/Engineering :: GIS',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9'
],
keywords='gliding ogn',
packages=['ogn.{}'.format(package) for package in find_packages(where='ogn')],
python_requires='>=3',
install_requires=[],
extras_require={
'dev': [
'nose==1.3.7',
'coveralls==2.1.1',
'flake8==3.8.3'
]
},
zip_safe=False
)
|
#!/usr/bin/env python3
from os import path
from setuptools import setup, find_packages
from ogn.client.settings import PACKAGE_VERSION
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='ogn-client',
version=PACKAGE_VERSION,
description='A python module for the Open Glider Network',
long_description=long_description,
long_description_content_type='text/markdown',
url='https://github.com/glidernet/python-ogn-client',
author='Konstantin Gründger aka Meisterschueler, Fabian P. Schmidt aka kerel',
author_email='[email protected]',
license='AGPLv3',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Topic :: Scientific/Engineering :: GIS',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9'
],
keywords='gliding ogn',
packages=['ogn.{}'.format(package) for package in find_packages(where='ogn')],
python_requires='>=3',
install_requires=[],
extras_require={
'dev': [
'nose==1.3.7',
'coveralls==2.1.0',
'flake8==3.8.3'
]
},
zip_safe=False
)
|
agpl-3.0
|
Python
|
8fc2e0ebf9fe8f753f7e9cdc6ad67ed22604e022
|
support img src attr too
|
xsteadfastx/pelican-plugins,olgabot/pelican-plugins,farseerfc/pelican-plugins,ziaa/pelican-plugins,pestrickland/pelican-plugins,andreas-h/pelican-plugins,Neurita/pelican-plugins,joachimneu/pelican-plugins,cmacmackin/pelican-plugins,florianjacob/pelican-plugins,jfosorio/pelican-plugins,yuanboshe/pelican-plugins,lazycoder-ru/pelican-plugins,cctags/pelican-plugins,shireenrao/pelican-plugins,jantman/pelican-plugins,phrawzty/pelican-plugins,farseerfc/pelican-plugins,karya0/pelican-plugins,mwcz/pelican-plugins,mortada/pelican-plugins,lindzey/pelican-plugins,UHBiocomputation/pelican-plugins,clokep/pelican-plugins,proteansec/pelican-plugins,talha131/pelican-plugins,joachimneu/pelican-plugins,phrawzty/pelican-plugins,proteansec/pelican-plugins,danmackinlay/pelican-plugins,howthebodyworks/pelican-plugins,proteansec/pelican-plugins,gjreda/pelican-plugins,pxquim/pelican-plugins,prisae/pelican-plugins,yuanboshe/pelican-plugins,ziaa/pelican-plugins,prisae/pelican-plugins,MarkusH/pelican-plugins,florianjacob/pelican-plugins,jantman/pelican-plugins,ziaa/pelican-plugins,wilsonfreitas/pelican-plugins,andreas-h/pelican-plugins,cmacmackin/pelican-plugins,talha131/pelican-plugins,benjaminabel/pelican-plugins,publicus/pelican-plugins,cctags/pelican-plugins,howthebodyworks/pelican-plugins,MarkusH/pelican-plugins,phrawzty/pelican-plugins,ingwinlu/pelican-plugins,pestrickland/pelican-plugins,ingwinlu/pelican-plugins,xsteadfastx/pelican-plugins,andreas-h/pelican-plugins,pelson/pelican-plugins,pestrickland/pelican-plugins,M157q/pelican-plugins,publicus/pelican-plugins,Samael500/pelican-plugins,seandavi/pelican-plugins,if1live/pelican-plugins,farseerfc/pelican-plugins,mwcz/pelican-plugins,frickp/pelican-plugins,mortada/pelican-plugins,Xion/pelican-plugins,amitsaha/pelican-plugins,doctorwidget/pelican-plugins,amitsaha/pelican-plugins,MarkusH/pelican-plugins,makefu/pelican-plugins,farseerfc/pelican-plugins,xsteadfastx/pelican-plugins,Neurita/pelican-plugins,mwcz/pelican-plugins,lindzey/pelican-plugins,howthebodyworks/pelican-plugins,danmackinlay/pelican-plugins,rlaboiss/pelican-plugins,M157q/pelican-plugins,seandavi/pelican-plugins,jakevdp/pelican-plugins,Xion/pelican-plugins,doctorwidget/pelican-plugins,UHBiocomputation/pelican-plugins,lele1122/pelican-plugins,karya0/pelican-plugins,davidmarquis/pelican-plugins,shireenrao/pelican-plugins,howthebodyworks/pelican-plugins,lindzey/pelican-plugins,ingwinlu/pelican-plugins,MarkusH/pelican-plugins,proteansec/pelican-plugins,lele1122/pelican-plugins,lazycoder-ru/pelican-plugins,kdheepak89/pelican-plugins,mortada/pelican-plugins,jakevdp/pelican-plugins,joachimneu/pelican-plugins,andreas-h/pelican-plugins,olgabot/pelican-plugins,danmackinlay/pelican-plugins,UHBiocomputation/pelican-plugins,cctags/pelican-plugins,kdheepak89/pelican-plugins,talha131/pelican-plugins,davidmarquis/pelican-plugins,wilsonfreitas/pelican-plugins,if1live/pelican-plugins,lele1122/pelican-plugins,mortada/pelican-plugins,barrysteyn/pelican-plugins,cmacmackin/pelican-plugins,gjreda/pelican-plugins,goerz/pelican-plugins,farseerfc/pelican-plugins,ingwinlu/pelican-plugins,Neurita/pelican-plugins,florianjacob/pelican-plugins,gjreda/pelican-plugins,jfosorio/pelican-plugins,karya0/pelican-plugins,publicus/pelican-plugins,jfosorio/pelican-plugins,mitchins/pelican-plugins,davidmarquis/pelican-plugins,mortada/pelican-plugins,seandavi/pelican-plugins,M157q/pelican-plugins,amitsaha/pelican-plugins,shireenrao/pelican-plugins,jantman/pelican-plugins,cctags/pelican-plugins,xsteadfastx/pelican-plugins,ziaa/pelican-plugins,makefu/pelican-plugins,benjaminabel/pelican-plugins,prisae/pelican-plugins,barrysteyn/pelican-plugins,Samael500/pelican-plugins,rlaboiss/pelican-plugins,jakevdp/pelican-plugins,pxquim/pelican-plugins,seandavi/pelican-plugins,joachimneu/pelican-plugins,clokep/pelican-plugins,mikitex70/pelican-plugins,mwcz/pelican-plugins,Neurita/pelican-plugins,talha131/pelican-plugins,yuanboshe/pelican-plugins,jantman/pelican-plugins,wilsonfreitas/pelican-plugins,pxquim/pelican-plugins,Samael500/pelican-plugins,davidmarquis/pelican-plugins,barrysteyn/pelican-plugins,mitchins/pelican-plugins,karya0/pelican-plugins,pelson/pelican-plugins,pelson/pelican-plugins,clokep/pelican-plugins,doctorwidget/pelican-plugins,publicus/pelican-plugins,makefu/pelican-plugins,mitchins/pelican-plugins,amitsaha/pelican-plugins,kdheepak89/pelican-plugins,M157q/pelican-plugins,shireenrao/pelican-plugins,benjaminabel/pelican-plugins,florianjacob/pelican-plugins,if1live/pelican-plugins,pestrickland/pelican-plugins,jakevdp/pelican-plugins,if1live/pelican-plugins,Xion/pelican-plugins,kdheepak89/pelican-plugins,rlaboiss/pelican-plugins,gjreda/pelican-plugins,goerz/pelican-plugins,goerz/pelican-plugins,lindzey/pelican-plugins,UHBiocomputation/pelican-plugins,Samael500/pelican-plugins,jfosorio/pelican-plugins,frickp/pelican-plugins,prisae/pelican-plugins,danmackinlay/pelican-plugins,barrysteyn/pelican-plugins,lazycoder-ru/pelican-plugins,benjaminabel/pelican-plugins,lele1122/pelican-plugins,wilsonfreitas/pelican-plugins,cmacmackin/pelican-plugins,frickp/pelican-plugins,MarkusH/pelican-plugins,frickp/pelican-plugins,goerz/pelican-plugins,clokep/pelican-plugins,mikitex70/pelican-plugins,olgabot/pelican-plugins,mitchins/pelican-plugins,mikitex70/pelican-plugins,pelson/pelican-plugins,Xion/pelican-plugins,lazycoder-ru/pelican-plugins,phrawzty/pelican-plugins,doctorwidget/pelican-plugins,talha131/pelican-plugins,rlaboiss/pelican-plugins,yuanboshe/pelican-plugins,mikitex70/pelican-plugins,makefu/pelican-plugins,olgabot/pelican-plugins,pxquim/pelican-plugins
|
interlinks/interlinks.py
|
interlinks/interlinks.py
|
# -*- coding: utf-8 -*-
"""
Interlinks
=========================
This plugin allows you to include "interwiki" or shortcuts links into the blog, as keyword>rest_of_url
"""
from bs4 import BeautifulSoup
from pelican import signals
import re
interlinks = {}
def getSettings (generator):
global interlinks
interlinks = {'this': generator.settings['SITEURL']+"/"}
if 'INTERLINKS' in generator.settings:
for key, value in generator.settings['INTERLINKS'].items():
interlinks[key] = value
def content_object_init(instance):
if instance._content is not None:
content = instance._content
# use Python's built-in parser so no duplicated html & body tags appear, or use tag.unwrap()
text = BeautifulSoup(content, "html.parser")
if 'a' in content:
for link in text.find_all(href=re.compile("(.+?)>")):
url = link.get('href')
m = re.search(r"(.+?)>", url).groups()
name = m[0]
if name in interlinks:
hi = url.replace(name+">",interlinks[name])
link['href'] = hi
if 'img' in content:
for img in text.find_all('img', src=re.compile("(.+?)>")):
url = img.get('src')
m = re.search(r"(.+?)>", url).groups()
name = m[0]
if name in interlinks:
hi = url.replace(name+">",interlinks[name])
img['src'] = hi
instance._content = text.decode()
def register():
signals.generator_init.connect(getSettings)
signals.content_object_init.connect(content_object_init)
|
# -*- coding: utf-8 -*-
"""
Interlinks
=========================
This plugin allows you to include "interwiki" or shortcuts links into the blog, as keyword>rest_of_url
"""
from bs4 import BeautifulSoup
from pelican import signals
import re
interlinks = {}
def getSettings (generator):
global interlinks
interlinks = {'this': generator.settings['SITEURL']+"/"}
if 'INTERLINKS' in generator.settings:
for key, value in generator.settings['INTERLINKS'].items():
interlinks[key] = value
def content_object_init(instance):
if instance._content is not None:
content = instance._content
# use Python's built-in parser so no duplicated html & body tags appear, or use tag.unwrap()
text = BeautifulSoup(content, "html.parser")
if 'a' in content:
for link in text.find_all(href=re.compile("(.+?)>")):
url = link.get('href')
m = re.search(r"(.+?)>", url).groups()
name = m[0]
if name in interlinks:
hi = url.replace(name+">",interlinks[name])
link['href'] = hi
instance._content = text.decode()
def register():
signals.generator_init.connect(getSettings)
signals.content_object_init.connect(content_object_init)
|
agpl-3.0
|
Python
|
24f93c560c2fa19c512d2d88b8e1219690e2db68
|
Bump the version up to 0.8 for release
|
cognitect/transit-python,dand-oss/transit-python,cognitect/transit-python,dand-oss/transit-python
|
setup.py
|
setup.py
|
#!/usr/bin/env python
## Copyright 2014 Cognitect. All Rights Reserved.
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS-IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
from setuptools import setup, find_packages
import subprocess
revision = subprocess.check_output("./bin/revision")
setup(name="transit-python",
version="0.8."+revision,
description="Transit marshalling for Python",
author="Cognitect",
url="https://github.com/cognitect/transit-python",
packages=find_packages(),
install_requires=["python-dateutil", "msgpack-python"])
|
#!/usr/bin/env python
## Copyright 2014 Cognitect. All Rights Reserved.
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS-IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
from setuptools import setup, find_packages
import subprocess
revision = subprocess.check_output("./bin/revision")
setup(name="transit-python",
version="0.1."+revision,
description="Transit marshalling for Python",
author="Cognitect",
url="https://github.com/cognitect/transit-python",
packages=find_packages(),
install_requires=["python-dateutil", "msgpack-python"])
|
apache-2.0
|
Python
|
f6a2a21d91e06d417da8cd93fb2a28f28385ed19
|
fix test
|
piotrmaslanka/satella,piotrmaslanka/satella
|
tests/test_coding/test_algos.py
|
tests/test_coding/test_algos.py
|
# coding=UTF-8
from __future__ import print_function, absolute_import, division
import six
import unittest
from satella.coding import merge_dicts
class TestMergeDicts(unittest.TestCase):
def test_merge_dicts(self):
tak = merge_dicts({'kupujemy': 'tak'}, {'kupujemy': 'nie'})
nie = merge_dicts({'kupujemy': 'nie'}, {'kupujemy': 'tak'})
self.assertEquals(tak['kupujemy'], 'nie')
self.assertEquals(nie['kupujemy'], 'tak')
def test_merge_lists(self):
tak = merge_dicts({'kupujemy': ['tak']}, {'kupujemy': ['nie']})
self.assertEqual(set(tak['kupujemy']), set(['tak', 'nie']))
|
# coding=UTF-8
from __future__ import print_function, absolute_import, division
import six
import unittest
from satella.coding import merge_dicts
class TestMergeDicts(unittest.TestCase):
def test_merge_dicts(self):
tak = merge_dicts({'kupujemy': 'tak'}, {'kupujemy': 'nie'})
nie = merge_dicts({'kupujemy': 'nie'}, {'kupujemy': 'tak'})
self.assertEquals(tak['kupujemy'], 'nie')
self.assertEquals(nie['kupujemy'], 'tak')
def test_merge_lists(self):
tak = merge_dicts({'kupujemy': ['tak']}, {'kupujemy': ['nie']})
self.assertEqual(set(tak), set(['tak', 'nie']))
|
mit
|
Python
|
b09197a38ebbf32abe45a787c475ae6706beaa53
|
set ignore property
|
huahbo/pyamg,kidaa/pyamg,huahbo/pyamg,pyamg/pyamg,huahbo/pyamg,pyamg/pyamg,huahbo/pyamg,kidaa/pyamg,kidaa/pyamg,pyamg/pyamg,kidaa/pyamg
|
pyamg/relaxation/info.py
|
pyamg/relaxation/info.py
|
"""
Relaxation methods
------------------
The multigrid cycle is formed by two complementary procedures: relaxation and
coarse-grid correction. The role of relaxation is to rapidly damp oscillatory
(high-frequency) errors out of the approximate solution. When the error is
smooth, it can then be accurately represented on the coarser grid, where a
solution, or approximate solution, can be computed.
Iterative methods for linear systems that have an error smoothing property
are valid relaxation methods. Since the purpose of a relaxation method is
to smooth oscillatory errors, its effectiveness on non-oscillatory errors
is not important. This point explains why simple iterative methods like
Gauss-Seidel iteration are effective relaxation methods while being very
slow to converge to the solution of Ax=b.
PyAMG implements relaxation methods of the following varieties:
1. Jacobi iteration
2. Gauss-Seidel iteration
3. Successive Over-Relaxation
4. Polynomial smoothing (e.g. Chebyshev)
5. Kaczmarz iteration
Refer to the docstrings of the individual methods for additional information.
"""
#TODO: explain separation of basic methods from interface methods.
#TODO: explain why each class of methods exist (parallel vs. serial, SPD vs. indefinite)
postpone_import = 1
|
"""
Relaxation methods
------------------
The multigrid cycle is formed by two complementary procedures: relaxation and
coarse-grid correction. The role of relaxation is to rapidly damp oscillatory
(high-frequency) errors out of the approximate solution. When the error is
smooth, it can then be accurately represented on the coarser grid, where a
solution, or approximate solution, can be computed.
Iterative methods for linear systems that have an error smoothing property
are valid relaxation methods. Since the purpose of a relaxation method is
to smooth oscillatory errors, its effectiveness on non-oscillatory errors
is not important. This point explains why simple iterative methods like
Gauss-Seidel iteration are effective relaxation methods while being very
slow to converge to the solution of Ax=b.
PyAMG implements relaxation methods of the following varieties:
1. Jacobi iteration
2. Gauss-Seidel iteration
3. Successive Over-Relaxation
4. Polynomial smoothing (e.g. Chebyshev)
5. Kaczmarz iteration
Refer to the docstrings of the individual methods for additional information.
"""
#TODO: explain separation of basic methods from interface methods.
postpone_import = 1
|
mit
|
Python
|
be8625d983f147385956079c1c1b4bbc2b3ccb17
|
fix flake8
|
pnuckowski/aioresponses
|
aioresponses/compat.py
|
aioresponses/compat.py
|
# -*- coding: utf-8 -*-
import asyncio # noqa: F401
import sys
from typing import Dict, Optional, Tuple, Union # noqa
from urllib.parse import parse_qsl, urlencode
from aiohttp import __version__ as aiohttp_version, StreamReader
from multidict import MultiDict
from pkg_resources import parse_version
from yarl import URL
if sys.version_info < (3, 7):
from re import _pattern_type as Pattern
else:
from re import Pattern
AIOHTTP_VERSION = parse_version(aiohttp_version)
if AIOHTTP_VERSION >= parse_version('3.0.0'):
from aiohttp.client_proto import ResponseHandler
def stream_reader_factory( # noqa
loop: 'Optional[asyncio.AbstractEventLoop]' = None
):
protocol = ResponseHandler(loop=loop)
return StreamReader(protocol, limit=2 ** 16, loop=loop)
else:
def stream_reader_factory(loop=None):
return StreamReader()
def merge_params(url: 'Union[URL, str]', params: 'Dict' = None) -> 'URL':
url = URL(url)
if params:
query_params = MultiDict(url.query)
query_params.extend(url.with_query(params).query)
return url.with_query(query_params)
return url
def normalize_url(url: 'Union[URL, str]') -> 'URL':
"""Normalize url to make comparisons."""
url = URL(url)
return url.with_query(urlencode(sorted(parse_qsl(url.query_string))))
try:
from aiohttp import RequestInfo
except ImportError:
class RequestInfo(object):
__slots__ = ('url', 'method', 'headers', 'real_url')
def __init__(
self, url: URL, method: str, headers: Dict, real_url: str
):
self.url = url
self.method = method
self.headers = headers
self.real_url = real_url
__all__ = [
'URL',
'Pattern',
'RequestInfo',
'AIOHTTP_VERSION',
'merge_params',
'stream_reader_factory',
'normalize_url',
]
|
# -*- coding: utf-8 -*-
import asyncio # noqa: F401
import sys
from typing import Dict, Optional, Tuple, Union # noqa
from urllib.parse import parse_qsl, urlencode
from aiohttp import __version__ as aiohttp_version, StreamReader
from multidict import MultiDict
from pkg_resources import parse_version
from yarl import URL
if sys.version_info < (3, 7):
from re import _pattern_type as Pattern
else:
from re import Pattern
AIOHTTP_VERSION = parse_version(aiohttp_version)
if AIOHTTP_VERSION >= parse_version('3.0.0'):
from aiohttp.client_proto import ResponseHandler
def stream_reader_factory( # noqa
loop: 'Optional[asyncio.AbstractEventLoop]' = None
):
protocol = ResponseHandler(loop=loop)
return StreamReader(protocol, limit=2 ** 16, loop=loop)
else:
def stream_reader_factory(loop=None):
return StreamReader()
def merge_params(url: 'Union[URL, str]', params: 'Dict' = None) -> 'URL':
url = URL(url)
if params:
query_params = MultiDict(url.query)
query_params.extend(url.with_query(params).query)
return url.with_query(query_params)
return url
def normalize_url(url: 'Union[URL, str]') -> 'URL':
"""Normalize url to make comparisons."""
url = URL(url)
return url.with_query(urlencode(sorted(parse_qsl(url.query_string))))
try:
from aiohttp import RequestInfo
except ImportError:
class RequestInfo(object):
__slots__ = ('url', 'method', 'headers', 'real_url')
def __init__(self, url: URL, method: str, headers: Dict, real_url: str):
self.url = url
self.method = method
self.headers = headers
self.real_url = real_url
__all__ = [
'URL',
'Pattern',
'RequestInfo',
'AIOHTTP_VERSION',
'merge_params',
'stream_reader_factory',
'normalize_url',
]
|
mit
|
Python
|
5eabe658d3c20f25fa78d1fc4fe2d2d692390e75
|
Make requests.get(...) a bit more robust
|
ericvrp/PowerToThePeople,ericvrp/PowerToThePeople
|
PowerToThePeople.py
|
PowerToThePeople.py
|
#!/usr/bin/env python
import serial
from requests import get
from requests.exceptions import Timeout, ConnectionError
from time import time, strftime, asctime
from sys import stdout
from subprocess import check_output
try:
from config import *
except ImportError:
from defaults import *
print 'Warning! copy defaults.py to config.py and edit that file!'
PVOUTPUT_INTERVAL = 300 #5 minutes between sending updates
def main():
usbDevice = check_output('ls /dev/ttyACM*', shell=True).strip()
ser = serial.Serial(usbDevice, 115200)
ser.flushInput()
ser.readline() #Skip first led flash to get a proper duration after this
lastPvOutputTime = lastLedFlashTime = time() #first impression duration will be inaccurate
nLedFlashes = 0
while True:
s = ser.readline()
#print 'Arduino: ', s,
now = time()
watt = 3600 / (now - lastLedFlashTime)
lastLedFlashTime = now
nLedFlashes += 1
print '%s : %4d Watt' % (asctime(), watt)
try:
r = get('http://127.0.0.1:8083/watt/%d Watt' % watt, timeout=1.0) #update webcache
except Timeout:
print 'Warning: webcache update failed'
if now >= lastPvOutputTime + PVOUTPUT_INTERVAL: #XXX should post average power consumption
watt_average = nLedFlashes * 3600 / (now - lastPvOutputTime)
#print 'Watt Average %d' % watt_average
payload = {
'key' : pvoutput_key,
'sid' : pvoutput_sid,
'd' : strftime('%Y%m%d'),
't' : strftime('%H:%M'),
'v4' : watt_average
}
try:
r = get('http://pvoutput.org/service/r2/addstatus.jsp', params=payload, timeout=5.0)
except ConnectionError:
print 'Warning: pvoutput update failed'
lastPvOutputTime = now
nLedFlashes = 0
stdout.flush()
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
import serial
from requests import get
from time import time, strftime, asctime
from sys import stdout
from subprocess import check_output
try:
from config import *
except ImportError:
from defaults import *
print 'Warning! copy defaults.py to config.py and edit that file!'
PVOUTPUT_INTERVAL = 300 #5 minutes between sending updates
def main():
usbDevice = check_output('ls /dev/ttyACM*', shell=True).strip()
ser = serial.Serial(usbDevice, 115200)
ser.flushInput()
ser.readline() #Skip first led flash to get a proper duration after this
lastPvOutputTime = lastLedFlashTime = time() #first impression duration will be inaccurate
nLedFlashes = 0
while True:
s = ser.readline()
#print 'Arduino: ', s,
now = time()
watt = 3600 / (now - lastLedFlashTime)
lastLedFlashTime = now
nLedFlashes += 1
print '%s : %4d Watt' % (asctime(), watt)
r = get('http://localhost:8083/watt/%d Watt' % watt) #update webcache
if now >= lastPvOutputTime + PVOUTPUT_INTERVAL: #XXX should post average power consumption
watt_average = nLedFlashes * 3600 / (now - lastPvOutputTime)
#print 'Watt Average %d' % watt_average
payload = {
'key' : pvoutput_key,
'sid' : pvoutput_sid,
'd' : strftime('%Y%m%d'),
't' : strftime('%H:%M'),
'v4' : watt_average
}
r = get('http://pvoutput.org/service/r2/addstatus.jsp', params=payload)
lastPvOutputTime = now
nLedFlashes = 0
stdout.flush()
if __name__ == '__main__':
main()
|
mit
|
Python
|
20db5eb25162665e817bef993ea84bbd1b9e3a45
|
Update setup.py
|
oemof/feedinlib
|
setup.py
|
setup.py
|
import sys
import os
from setuptools import setup
setup(name='feedinlib',
version='0.0.12',
description='Creating time series from pv or wind power plants.',
url='http://github.com/oemof/feedinlib',
author='oemof developer group',
author_email='[email protected]',
license='GPL3',
packages=['feedinlib'],
zip_safe=False,
install_requires=['numpy >= 1.7.0',
'pandas >= 0.13.1',
'pvlib[optional] >= 0.6.0',
'windpowerlib >= 0.2.0',
'scipy',
'shapely'])
|
# -*- coding: utf-8 -*-
"""
@author: uwe
"""
import sys
import os
from setuptools import setup
setup(name='feedinlib',
version='0.0.12',
description='Creating time series from pv or wind power plants.',
url='http://github.com/oemof/feedinlib',
author='oemof developer group',
author_email='[email protected]',
license='GPL3',
packages=['feedinlib'],
zip_safe=False,
install_requires=['numpy >= 1.7.0',
'pandas >= 0.13.1',
'pvlib[optional] >= 0.5.0',
'windpowerlib >= 0.0.6',
'scipy',
'shapely'])
|
mit
|
Python
|
888f2ee4c423e18a40cbcaec3eb9f4f29f993e44
|
add mock payment as default for OrderPaymentFactory
|
onepercentclub/bluebottle,jfterpstra/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,jfterpstra/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,jfterpstra/bluebottle,jfterpstra/bluebottle
|
bluebottle/test/factory_models/payments.py
|
bluebottle/test/factory_models/payments.py
|
import factory
from bluebottle.payments.models import Payment, OrderPayment
from bluebottle.payments_logger.models import PaymentLogEntry
from .orders import OrderFactory
class OrderPaymentFactory(factory.DjangoModelFactory):
FACTORY_FOR = OrderPayment
payment_method = 'mock'
amount = 100
order = factory.SubFactory(OrderFactory)
class PaymentFactory(factory.DjangoModelFactory):
FACTORY_FOR = Payment
order_payment = factory.SubFactory(OrderPaymentFactory)
|
import factory
from bluebottle.payments.models import Payment, OrderPayment
from bluebottle.payments_logger.models import PaymentLogEntry
from .orders import OrderFactory
class OrderPaymentFactory(factory.DjangoModelFactory):
FACTORY_FOR = OrderPayment
amount = 100
order = factory.SubFactory(OrderFactory)
class PaymentFactory(factory.DjangoModelFactory):
FACTORY_FOR = Payment
order_payment = factory.SubFactory(OrderPaymentFactory)
|
bsd-3-clause
|
Python
|
ed13a4d6ea21842568d1ef63797d50169b6dd040
|
Add rpath
|
patricksnape/staged-recipes,petrushy/staged-recipes,mariusvniekerk/staged-recipes,mariusvniekerk/staged-recipes,birdsarah/staged-recipes,Juanlu001/staged-recipes,kwilcox/staged-recipes,conda-forge/staged-recipes,conda-forge/staged-recipes,ocefpaf/staged-recipes,Juanlu001/staged-recipes,ocefpaf/staged-recipes,asmeurer/staged-recipes,ReimarBauer/staged-recipes,petrushy/staged-recipes,patricksnape/staged-recipes,kwilcox/staged-recipes,jakirkham/staged-recipes,hadim/staged-recipes,johanneskoester/staged-recipes,jakirkham/staged-recipes,johanneskoester/staged-recipes,ReimarBauer/staged-recipes,scopatz/staged-recipes,goanpeca/staged-recipes,asmeurer/staged-recipes,SylvainCorlay/staged-recipes,jochym/staged-recipes,chrisburr/staged-recipes,stuertz/staged-recipes,goanpeca/staged-recipes,jochym/staged-recipes,birdsarah/staged-recipes,hadim/staged-recipes,chrisburr/staged-recipes,dschreij/staged-recipes,igortg/staged-recipes,scopatz/staged-recipes,dschreij/staged-recipes,stuertz/staged-recipes,SylvainCorlay/staged-recipes,igortg/staged-recipes
|
recipes/py2app/fix_macos_rpath.py
|
recipes/py2app/fix_macos_rpath.py
|
"""
Tool for initial rpath fix for prebuilt binaries
"""
from __future__ import absolute_import, division, print_function
import os
import glob
from subprocess import CalledProcessError, check_output
# =============================================================================
if __name__ == '__main__':
main_files = glob.glob('py2app/apptemplate/prebuilt/main*')
secondary_files = glob.glob('py2app/apptemplate/prebuilt/secondary*')
for bin_file in main_files + secondary_files:
if os.path.isfile(bin_file):
print(bin_file)
libraries = list()
try:
libraries = check_output(['otool', '-L', bin_file]).decode('utf8').split('\n')
except CalledProcessError:
pass
for line in libraries[1:]:
lib = line.strip().split()
if len(lib) > 0:
lib = lib[0]
new_lib = None
if 'libgcc_s' in lib:
new_lib = os.path.join('@rpath', 'lib', lib.split('/')[-1])
if new_lib is not None:
print('Changing {lib} to {new_lib}'.format(lib=lib, new_lib=new_lib))
cmd = ['install_name_tool', '-change', lib, new_lib, bin_file]
print(' '.join(cmd))
output = check_output(cmd)
cmd = ['install_name_tool', '-add_rpath', os.getenv('PREFIX'), bin_file]
print(' '.join(cmd))
output = check_output(cmd)
|
"""
Tool for initial rpath fix for prebuilt binaries
"""
from __future__ import absolute_import, division, print_function
import os
import glob
from subprocess import CalledProcessError, check_output
# =============================================================================
if __name__ == '__main__':
main_files = glob.glob('py2app/apptemplate/prebuilt/main*')
secondary_files = glob.glob('py2app/apptemplate/prebuilt/secondary*')
for bin_file in main_files + secondary_files:
if os.path.isfile(bin_file):
print(bin_file)
libraries = list()
try:
libraries = check_output(['otool', '-L', bin_file]).decode('utf8').split('\n')
except CalledProcessError:
pass
for line in libraries[1:]:
lib = line.strip().split()
if len(lib) > 0:
lib = lib[0]
new_lib = None
if 'libgcc_s' in lib:
new_lib = os.path.join('@rpath', lib.split('/')[-1])
if new_lib is not None:
print('Changing {lib} to {new_lib}'.format(lib=lib, new_lib=new_lib))
cmd = ['install_name_tool', '-change', lib, new_lib, bin_file]
print(' '.join(cmd))
output = check_output(cmd)
|
bsd-3-clause
|
Python
|
cfabd36edd10819151caa25e8a30ef2938a55905
|
add django-compat as requirement
|
arteria/django-ar-organizations,arteria/django-ar-organizations
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from os.path import join, dirname
from setuptools import setup, find_packages
import organizations as app
def long_description():
try:
return open(join(dirname(__file__), 'README.rst')).read()
except IOError:
return "LONG_DESCRIPTION Error"
setup(
author="Ben Lopatin + arteria GmbH",
author_email="[email protected]",
name='django-ar-organizations',
version=app.__version__,
description='Group accounts for Django',
long_description=long_description(),
url='https://github.com/wellfire/django-organizations/',
license='BSD License',
platforms=['OS Independent'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
],
install_requires=[
'Django>=1.4',
'django-extensions>=0.9',
'django-compat>=1.0.2',
],
# test_suite='tests.runtests.runtests',
include_package_data=True,
packages=find_packages(exclude=["tests.tests", "tests.test_app", "tests"]),
zip_safe=False
)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from os.path import join, dirname
from setuptools import setup, find_packages
import organizations as app
def long_description():
try:
return open(join(dirname(__file__), 'README.rst')).read()
except IOError:
return "LONG_DESCRIPTION Error"
setup(
author="Ben Lopatin + arteria GmbH",
author_email="[email protected]",
name='django-ar-organizations',
version=app.__version__,
description='Group accounts for Django',
long_description=long_description(),
url='https://github.com/wellfire/django-organizations/',
license='BSD License',
platforms=['OS Independent'],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
],
install_requires=[
'Django>=1.4',
'django-extensions>=0.9',
],
#test_suite='tests.runtests.runtests',
include_package_data=True,
packages=find_packages(exclude=["tests.tests", "tests.test_app", "tests"]),
zip_safe=False
)
|
bsd-2-clause
|
Python
|
333df12d64b7d0724a90c155858e3a8421967aa0
|
Add test for copy_reads_file()
|
igboyes/virtool,virtool/virtool,igboyes/virtool,virtool/virtool
|
tests/samples/test_fake.py
|
tests/samples/test_fake.py
|
import os
import pytest
from virtool.samples.fake import create_fake_sample, create_fake_samples, copy_reads_file, READ_FILES_PATH
from virtool.fake.wrapper import FakerWrapper
from virtool.samples.db import LIST_PROJECTION
@pytest.fixture
def app(dbi, pg, run_in_thread, tmp_path):
return {
"db": dbi,
"fake": FakerWrapper(),
"pg": pg,
"run_in_thread": run_in_thread,
"settings": {
"data_path": tmp_path
},
}
@pytest.mark.parametrize("paired", [True, False])
@pytest.mark.parametrize("finalized", [True, False])
async def test_create_fake_unpaired(paired, finalized, app, snapshot,
static_time):
fake_sample = await create_fake_sample(app,
"sample_1",
"bob",
paired=paired,
finalized=finalized)
for key in LIST_PROJECTION:
assert key in fake_sample
if finalized is True:
assert len(fake_sample["reads"]) == (2 if paired else 1)
assert fake_sample["ready"] is True
snapshot.assert_match(fake_sample)
async def test_create_fake_samples(app, snapshot, dbi, static_time):
samples = await create_fake_samples(app)
assert len(samples) == 3
for sample in samples:
snapshot.assert_match(sample)
assert os.listdir(app["settings"]["data_path"] / "samples" / "LB1U6zCj") == ["reads_1.fq.gz"]
assert set(os.listdir(app["settings"]["data_path"] / "samples" / "2x6YnyMt")) == {"reads_1.fq.gz", "reads_2.fq.gz"}
async def test_copy_reads_file(app):
file_path = READ_FILES_PATH / "paired_1.fq.gz"
await copy_reads_file(app, file_path, "reads_1.fq.gz", "sample_1")
assert os.listdir(app["settings"]["data_path"] / "samples" / "sample_1") == ["reads_1.fq.gz"]
|
import os
import pytest
from virtool.samples.fake import create_fake_sample, create_fake_samples
from virtool.fake.wrapper import FakerWrapper
from virtool.samples.db import LIST_PROJECTION
@pytest.fixture
def app(dbi, pg, run_in_thread, tmp_path):
return {
"db": dbi,
"fake": FakerWrapper(),
"pg": pg,
"run_in_thread": run_in_thread,
"settings": {
"data_path": tmp_path
},
}
@pytest.mark.parametrize("paired", [True, False])
@pytest.mark.parametrize("finalized", [True, False])
async def test_create_fake_unpaired(paired, finalized, app, snapshot,
static_time):
fake_sample = await create_fake_sample(app,
"sample_1",
"bob",
paired=paired,
finalized=finalized)
for key in LIST_PROJECTION:
assert key in fake_sample
if finalized is True:
assert len(fake_sample["reads"]) == (2 if paired else 1)
assert fake_sample["ready"] is True
snapshot.assert_match(fake_sample)
async def test_create_fake_samples(app, snapshot, dbi, static_time):
samples = await create_fake_samples(app)
assert len(samples) == 3
for sample in samples:
snapshot.assert_match(sample)
assert os.listdir(app["settings"]["data_path"] / "samples" / "LB1U6zCj") == ["reads_1.fq.gz"]
assert set(os.listdir(app["settings"]["data_path"] / "samples" / "2x6YnyMt")) == {"reads_1.fq.gz", "reads_2.fq.gz"}
|
mit
|
Python
|
e822a1c863d5ff2b37f1123f2a5fae63061f7d44
|
fix heartbeat origin
|
alerta/alerta-contrib,msupino/alerta-contrib,msupino/alerta-contrib,alerta/alerta-contrib,alerta/alerta-contrib
|
alert-sqs/alert-sqs.py
|
alert-sqs/alert-sqs.py
|
#!/usr/bin/env python
import os
import settings
from alert import Alert, Heartbeat, ApiClient
from kombu import BrokerConnection
from Queue import Empty
__version__ = '3.0.0'
from kombu.utils.debug import setup_logging
# setup_logging(loglevel='DEBUG', loggers=[''])
def main():
broker_url = getattr(settings, 'broker_url', 'sqs://')
transport_options = getattr(settings, 'transport_options', {'region': 'eu-west-1'})
sqs_queue = getattr(settings, 'sqs_queue', 'alerta')
connection = BrokerConnection(broker_url, transport_options=transport_options)
queue = connection.SimpleQueue(sqs_queue)
api = ApiClient()
while True:
try:
message = queue.get(block=True, timeout=20)
print message.payload
api.send_alert(Alert(**message.payload))
message.ack()
except Empty:
pass
except (KeyboardInterrupt, SystemExit):
break
api.send_heartbeat(Heartbeat(origin='alert-sqs/%s' % os.uname()[1], tags=[__version__]))
queue.close()
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
import settings
from alert import Alert, Heartbeat, ApiClient
from kombu import BrokerConnection
from Queue import Empty
__version__ = '3.0.0'
from kombu.utils.debug import setup_logging
# setup_logging(loglevel='DEBUG', loggers=[''])
def main():
broker_url = getattr(settings, 'broker_url', 'sqs://')
transport_options = getattr(settings, 'transport_options', {'region': 'eu-west-1'})
sqs_queue = getattr(settings, 'sqs_queue', 'alerta')
connection = BrokerConnection(broker_url, transport_options=transport_options)
queue = connection.SimpleQueue(sqs_queue)
api = ApiClient()
while True:
try:
message = queue.get(block=True, timeout=20)
api.send_alert(Alert(**message.payload))
message.ack()
except Empty:
pass
except (KeyboardInterrupt, SystemExit):
break
api.send_heartbeat(Heartbeat(origin='alert-sqs', tags=[__version__]))
queue.close()
if __name__ == '__main__':
main()
|
mit
|
Python
|
da006dee5771313c5e67f0ce8150bb3a216a0697
|
Bump the minor version number to reflect the relatively large scale removal of functionality.
|
pwcazenave/PyFVCOM
|
PyFVCOM/__init__.py
|
PyFVCOM/__init__.py
|
"""
The FVCOM Python toolbox (PyFVCOM)
"""
__version__ = '1.4.0'
__author__ = 'Pierre Cazenave'
__credits__ = ['Pierre Cazenave']
__license__ = 'MIT'
__maintainer__ = 'Pierre Cazenave'
__email__ = '[email protected]'
import inspect
from warnings import warn
# Import everything!
from PyFVCOM import buoy_tools
from PyFVCOM import cst_tools
from PyFVCOM import ctd_tools
from PyFVCOM import grid_tools
from PyFVCOM import ll2utm
from PyFVCOM import ocean_tools
from PyFVCOM import stats_tools
from PyFVCOM import tide_tools
from PyFVCOM import tidal_ellipse
from PyFVCOM import process_results
from PyFVCOM import read_results
|
"""
The FVCOM Python toolbox (PyFVCOM)
"""
__version__ = '1.3.4'
__author__ = 'Pierre Cazenave'
__credits__ = ['Pierre Cazenave']
__license__ = 'MIT'
__maintainer__ = 'Pierre Cazenave'
__email__ = '[email protected]'
import inspect
from warnings import warn
# Import everything!
from PyFVCOM import buoy_tools
from PyFVCOM import cst_tools
from PyFVCOM import ctd_tools
from PyFVCOM import grid_tools
from PyFVCOM import ll2utm
from PyFVCOM import ocean_tools
from PyFVCOM import stats_tools
from PyFVCOM import tide_tools
from PyFVCOM import tidal_ellipse
from PyFVCOM import process_results
from PyFVCOM import read_results
|
mit
|
Python
|
7c66a0b34806af9cf1ac6722318534643dea3865
|
Add classifiers
|
thombashi/sqlitebiter,thombashi/sqlitebiter
|
setup.py
|
setup.py
|
from __future__ import with_statement
import os.path
import setuptools
import sqlitebiter
REQUIREMENT_DIR = "requirements"
with open("README.rst") as fp:
long_description = fp.read()
with open(os.path.join("docs", "pages", "introduction", "summary.txt")) as f:
summary = f.read()
with open(os.path.join(REQUIREMENT_DIR, "requirements.txt")) as f:
install_requires = [line.strip() for line in f if line.strip()]
with open(os.path.join(REQUIREMENT_DIR, "test_requirements.txt")) as f:
tests_require = [line.strip() for line in f if line.strip()]
setuptools.setup(
name="sqlitebiter",
version=sqlitebiter.VERSION,
url="https://github.com/thombashi/sqlitebiter",
bugtrack_url="https://github.com/thombashi/sqlitebiter/issues",
author="Tsuyoshi Hombashi",
author_email="[email protected]",
description=summary,
include_package_data=True,
install_requires=install_requires,
keywords=["SQLite", "converter", "CSV", "JSON", "Excel", "Google Sheets"],
license="MIT License",
long_description=long_description,
packages=setuptools.find_packages(exclude=['test*']),
setup_requires=["pytest-runner"],
tests_require=tests_require,
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Console",
"Intended Audience :: Information Technology",
"License :: OSI Approved :: MIT License",
"Operating System :: Microsoft",
"Operating System :: Microsoft :: Windows",
"Operating System :: POSIX",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Topic :: Database",
],
entry_points={
"console_scripts": [
"sqlitebiter=sqlitebiter.sqlitebiter:cmd",
],
}
)
|
from __future__ import with_statement
import os.path
import setuptools
import sqlitebiter
REQUIREMENT_DIR = "requirements"
with open("README.rst") as fp:
long_description = fp.read()
with open(os.path.join("docs", "pages", "introduction", "summary.txt")) as f:
summary = f.read()
with open(os.path.join(REQUIREMENT_DIR, "requirements.txt")) as f:
install_requires = [line.strip() for line in f if line.strip()]
with open(os.path.join(REQUIREMENT_DIR, "test_requirements.txt")) as f:
tests_require = [line.strip() for line in f if line.strip()]
setuptools.setup(
name="sqlitebiter",
version=sqlitebiter.VERSION,
url="https://github.com/thombashi/sqlitebiter",
bugtrack_url="https://github.com/thombashi/sqlitebiter/issues",
author="Tsuyoshi Hombashi",
author_email="[email protected]",
description=summary,
include_package_data=True,
install_requires=install_requires,
keywords=["SQLite", "converter", "CSV", "JSON", "Excel", "Google Sheets"],
license="MIT License",
long_description=long_description,
packages=setuptools.find_packages(exclude=['test*']),
setup_requires=["pytest-runner"],
tests_require=tests_require,
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Console",
"Intended Audience :: Information Technology",
"License :: OSI Approved :: MIT License",
"Operating System :: POSIX",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Topic :: Database",
],
entry_points={
"console_scripts": [
"sqlitebiter=sqlitebiter.sqlitebiter:cmd",
],
}
)
|
mit
|
Python
|
cddf9b83383adfc41e80c441b4f8f3219893cc86
|
Bump version for release
|
andrewgross/pyrelic
|
setup.py
|
setup.py
|
# #!/usr/bin/env python
# -*- coding: utf-8 -*-
import re
import os
from setuptools import setup, find_packages
def parse_requirements():
"""
Rudimentary parser for the `requirements.txt` file
We just want to separate regular packages from links to pass them to the
`install_requires` and `dependency_links` params of the `setup()`
function properly.
"""
try:
requirements = \
map(str.strip, local_file('requirements.txt'))
except IOError:
raise RuntimeError("Couldn't find the `requirements.txt' file :(")
links = []
pkgs = []
for req in requirements:
if not req:
continue
if 'http:' in req or 'https:' in req:
links.append(req)
name, version = re.findall("\#egg=([^\-]+)-(.+$)", req)[0]
pkgs.append('{0}=={1}'.format(name, version))
else:
pkgs.append(req)
return pkgs, links
local_file = lambda f: \
open(os.path.join(os.path.dirname(__file__), f)).readlines()
#install_requires, dependency_links = parse_requirements()
if __name__ == '__main__':
packages = find_packages(exclude=['*tests*'])
print packages
setup(
name="pyrelic",
license="GPL",
version='0.6.1',
description=u'Python API Wrapper for NewRelic API',
author=u'Andrew Gross',
author_email=u'[email protected]',
package_data={ "pyrelic.packages.requests": ["*.pem"] },
include_package_data=True,
url='https://github.com/andrewgross/pyrelic',
packages=packages,
classifiers=(
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)',
'Natural Language :: English',
'Operating System :: Microsoft',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
)
)
|
# #!/usr/bin/env python
# -*- coding: utf-8 -*-
import re
import os
from setuptools import setup, find_packages
def parse_requirements():
"""
Rudimentary parser for the `requirements.txt` file
We just want to separate regular packages from links to pass them to the
`install_requires` and `dependency_links` params of the `setup()`
function properly.
"""
try:
requirements = \
map(str.strip, local_file('requirements.txt'))
except IOError:
raise RuntimeError("Couldn't find the `requirements.txt' file :(")
links = []
pkgs = []
for req in requirements:
if not req:
continue
if 'http:' in req or 'https:' in req:
links.append(req)
name, version = re.findall("\#egg=([^\-]+)-(.+$)", req)[0]
pkgs.append('{0}=={1}'.format(name, version))
else:
pkgs.append(req)
return pkgs, links
local_file = lambda f: \
open(os.path.join(os.path.dirname(__file__), f)).readlines()
#install_requires, dependency_links = parse_requirements()
if __name__ == '__main__':
packages = find_packages(exclude=['*tests*'])
print packages
setup(
name="pyrelic",
license="GPL",
version='0.6.0',
description=u'Python API Wrapper for NewRelic API',
author=u'Andrew Gross',
author_email=u'[email protected]',
package_data={ "pyrelic.packages.requests": ["*.pem"] },
include_package_data=True,
url='https://github.com/andrewgross/pyrelic',
packages=packages,
classifiers=(
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)',
'Natural Language :: English',
'Operating System :: Microsoft',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
)
)
|
mit
|
Python
|
55d9ed499d842246c74bc72ff0e141fa22fde9d8
|
add numpexpr dependency
|
davidkuep/pyiso,emunsing/pyiso,emunsing/pyiso
|
setup.py
|
setup.py
|
from setuptools import setup
import codecs
import os
import re
# to release:
# python setup.py register sdist bdist_egg upload
here = os.path.abspath(os.path.dirname(__file__))
# Read the version number from a source file.
# Why read it, and not import?
# see https://groups.google.com/d/topic/pypa-dev/0PkjVpcxTzQ/discussion
# https://github.com/pypa/sampleproject/blob/master/setup.py
def find_version(*file_paths):
# Open in Latin-1 so that we avoid encoding errors.
# Use codecs.open for Python 2 compatibility
with codecs.open(os.path.join(here, *file_paths), 'r', 'latin1') as f:
version_file = f.read()
# The version line must have the form
# __version__ = 'ver'
version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]",
version_file, re.M)
if version_match:
return version_match.group(1)
raise RuntimeError("Unable to find version string.")
# Get the long description from the relevant file
with codecs.open('README.md', encoding='utf-8') as f:
long_description = f.read()
setup(
name='pyiso',
packages=['pyiso'],
version=find_version('pyiso', '__init__.py'),
description='Python client libraries for ISO and other power grid data sources.',
long_description=long_description,
author='Anna Schneider',
author_email='[email protected]',
url='https://github.com/WattTime/pyiso',
license='Apache',
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Information Analysis',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
],
test_suite = 'nose.collector',
install_requires=[
'beautifulsoup4',
'pandas==0.14.1',
'python-dateutil',
'pytz',
'requests',
'xlrd',
'selenium',
'celery>=3.1',
'numexpr',
],
)
|
from setuptools import setup
import codecs
import os
import re
# to release:
# python setup.py register sdist bdist_egg upload
here = os.path.abspath(os.path.dirname(__file__))
# Read the version number from a source file.
# Why read it, and not import?
# see https://groups.google.com/d/topic/pypa-dev/0PkjVpcxTzQ/discussion
# https://github.com/pypa/sampleproject/blob/master/setup.py
def find_version(*file_paths):
# Open in Latin-1 so that we avoid encoding errors.
# Use codecs.open for Python 2 compatibility
with codecs.open(os.path.join(here, *file_paths), 'r', 'latin1') as f:
version_file = f.read()
# The version line must have the form
# __version__ = 'ver'
version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]",
version_file, re.M)
if version_match:
return version_match.group(1)
raise RuntimeError("Unable to find version string.")
# Get the long description from the relevant file
with codecs.open('README.md', encoding='utf-8') as f:
long_description = f.read()
setup(
name='pyiso',
packages=['pyiso'],
version=find_version('pyiso', '__init__.py'),
description='Python client libraries for ISO and other power grid data sources.',
long_description=long_description,
author='Anna Schneider',
author_email='[email protected]',
url='https://github.com/WattTime/pyiso',
license='Apache',
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Information Analysis',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
],
test_suite = 'nose.collector',
install_requires=[
'beautifulsoup4',
'pandas==0.14.1',
'python-dateutil',
'pytz',
'requests',
'xlrd',
'selenium',
'celery>=3.1',
],
)
|
apache-2.0
|
Python
|
f096dee1623936ed06340df1ee081a1f77eb8b77
|
Simplify plugin info declaration
|
chfw/pyexcel-xlsx,chfw/pyexcel-xlsx
|
pyexcel_xlsx/__init__.py
|
pyexcel_xlsx/__init__.py
|
"""
pyexcel_xlsx
~~~~~~~~~~~~~~~~~~~
The lower level xlsx file format handler using openpyxl
:copyright: (c) 2015-2017 by Onni Software Ltd & its contributors
:license: New BSD License
"""
# flake8: noqa
# this line has to be place above all else
# because of dynamic import
from pyexcel_io.plugins import IORegistry
from pyexcel_io.io import get_data as read_data, isstream, store_data as write_data
__FILE_TYPE__ = 'xlsx'
__pyexcel_io_plugins__ = IORegistry(__name__).add_a_reader(
submodule='xlsxr',
file_types=[__FILE_TYPE__, 'xlsm'],
stream_type='binary'
).add_a_writer(
submodule='xlsxw',
file_types=[__FILE_TYPE__, 'xlsm'],
stream_type='binary'
)
def save_data(afile, data, file_type=None, **keywords):
"""standalone module function for writing module supported file type"""
if isstream(afile) and file_type is None:
file_type = __FILE_TYPE__
write_data(afile, data, file_type=file_type, **keywords)
def get_data(afile, file_type=None, **keywords):
"""standalone module function for reading module supported file type"""
if isstream(afile) and file_type is None:
file_type = __FILE_TYPE__
return read_data(afile, file_type=file_type, **keywords)
|
"""
pyexcel_xlsx
~~~~~~~~~~~~~~~~~~~
The lower level xlsx file format handler using openpyxl
:copyright: (c) 2015-2017 by Onni Software Ltd & its contributors
:license: New BSD License
"""
# flake8: noqa
# this line has to be place above all else
# because of dynamic import
__FILE_TYPE__ = 'xlsx'
__pyexcel_io_plugins__ = [
{
'plugin_type': 'pyexcel-io reader',
'submodule': 'xlsxr',
'file_types': [__FILE_TYPE__, 'xlsm'],
'stream_type': 'binary'
},
{
'plugin_type': 'pyexcel-io writer',
'submodule': 'xlsxw',
'file_types': [__FILE_TYPE__, 'xlsm'],
'stream_type': 'binary'
},
]
from pyexcel_io.io import get_data as read_data, isstream, store_data as write_data
def save_data(afile, data, file_type=None, **keywords):
"""standalone module function for writing module supported file type"""
if isstream(afile) and file_type is None:
file_type = __FILE_TYPE__
write_data(afile, data, file_type=file_type, **keywords)
def get_data(afile, file_type=None, **keywords):
"""standalone module function for reading module supported file type"""
if isstream(afile) and file_type is None:
file_type = __FILE_TYPE__
return read_data(afile, file_type=file_type, **keywords)
|
bsd-3-clause
|
Python
|
f1ab27dcb52212c3c818c3ef6d9be9410610c2d6
|
make these tests pass, please :)
|
jplusplus/statscraper
|
tests/test_base_scraper.py
|
tests/test_base_scraper.py
|
from unittest import TestCase
from statscraper import BaseScraper, Dataset, Dimension, ROOT
class Scraper(BaseScraper):
def _fetch_itemslist(self, item):
yield Dataset("Dataset_1")
yield Dataset("Dataset_2")
yield Dataset("Dataset_3")
def _fetch_dimensions(self, dataset):
yield Dimension(u"date")
yield Dimension(u"municipality")
def _fetch_data(self, dataset, query=None):
yield {
"date": "2017-08-10",
"municipality": "Robertsfors",
"value": 127
}
class TestBaseScraper(TestCase):
def test_init(self):
""" Extending the basescraper """
scraper = Scraper()
self.assertTrue(scraper.current_item.id == ROOT)
def test_inspect_item(self):
""" Fecthing items from an itemlist """
scraper = Scraper()
self.assertTrue(scraper.items[0] == scraper.items.get("Dataset_1"))
def test_select_item(self):
scraper = Scraper()
scraper.select("Dataset_1")
self.assertTrue(isinstance(scraper.current_item, Dataset))
def test_select_missing_item(self):
# Should throw something like a KeyError?
scraper = Scraper()
scraper.select("non_existing_item")
def test_fetch_dataset(self):
scraper = Scraper()
dataset = scraper.items[0]
self.assertTrue(dataset.data[0]["municipality"] == "Robertsfors")
def test_select_dimension(self):
# I want to be able to select a
scraper = Scraper()
scraper.select("Dataset_1")
dataset = scraper.current_item
dim = dataset.dimension("date")
self.assertTrue(isinstance(dim, Dimension))
# Or is "select" a better method name?
dim = dataset.get("date")
self.assertTrue(isinstance(dim, Dimension))
|
from unittest import TestCase
from statscraper import BaseScraper, Dataset, Dimension, ROOT
class Scraper(BaseScraper):
def _fetch_itemslist(self, item):
yield Dataset("Dataset_1")
yield Dataset("Dataset_2")
yield Dataset("Dataset_3")
def _fetch_dimensions(self, dataset):
yield Dimension(u"date")
yield Dimension(u"municipality")
def _fetch_data(self, dataset, query=None):
yield {
"date": "2017-08-10",
"municipality": "Robertsfors",
"value": 127
}
class TestBaseScraper(TestCase):
def test_init(self):
""" Extending the basescraper """
scraper = Scraper()
self.assertTrue(scraper.current_item.id == ROOT)
def test_inspect_item(self):
""" Fecthing items from an itemlist """
scraper = Scraper()
self.assertTrue(scraper.items[0] == scraper.items.get("Dataset_1"))
def test_select_item(self):
scraper = Scraper()
scraper.select("Dataset_1")
self.assertTrue(isinstance(scraper.current_item, Dataset))
def test_fetch_dataset(self):
scraper = Scraper()
dataset = scraper.items[0]
self.assertTrue(dataset.data[0]["municipality"] == "Robertsfors")
|
mit
|
Python
|
408ef23f0227650c77dbaf3efae0dd569fb076dd
|
update version for release
|
marianoguerra/rst2html5,emi80/rst2html5,emi80/rst2html5,marianoguerra/rst2html5,wdv4758h/rst2html5tools,marianoguerra/rst2html5,keith-gray-powereng/rst2html5,wdv4758h/rst2html5tools,keith-gray-powereng/rst2html5
|
setup.py
|
setup.py
|
#!/usr/bin/env python
try:
from setuptools import setup
except ImportError:
import distribute_setup
distribute_setup.use_setuptools()
from setuptools import setup
setup(
name='rst2html5-tools',
version='0.2.6',
author='Mariano Guerra',
description="Transform reStructuredText documents to html5 + twitter's bootstrap css, deck.js or reveal.js",
author_email='[email protected]',
url='https://github.com/marianoguerra/rst2html5',
long_description=open('README.rst').read(),
packages=['html5css3'],
package_data={'html5css3': ['thirdparty/*/*.*']},
include_package_data=True,
install_requires=['docutils'],
entry_points={
'console_scripts': [
'rst2html5 = html5css3.main:main',
]},
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Documentation',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Text Processing',
'Topic :: Text Processing :: Markup',
'Topic :: Utilities',
],
)
|
#!/usr/bin/env python
try:
from setuptools import setup
except ImportError:
import distribute_setup
distribute_setup.use_setuptools()
from setuptools import setup
setup(
name='rst2html5-tools',
version='0.2.5',
author='Mariano Guerra',
description="Transform reStructuredText documents to html5 + twitter's bootstrap css, deck.js or reveal.js",
author_email='[email protected]',
url='https://github.com/marianoguerra/rst2html5',
long_description=open('README.rst').read(),
packages=['html5css3'],
package_data={'html5css3': ['thirdparty/*/*.*']},
include_package_data=True,
install_requires=['docutils'],
entry_points={
'console_scripts': [
'rst2html5 = html5css3.main:main',
]},
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Documentation',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Text Processing',
'Topic :: Text Processing :: Markup',
'Topic :: Utilities',
],
)
|
mit
|
Python
|
77dc6134be66bf16e346d6120c361ca2b11899f3
|
Add events
|
kalaytan/findatapy,cuemacro/findatapy
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
setup(name='findatapy',
version='0.05',
description='Market data library',
author='Saeed Amen',
author_email='[email protected]',
license='Apache 2.0',
keywords = ['pandas', 'data', 'Bloomberg', 'tick', 'stocks', 'equities'],
url = 'https://github.com/cuemacro/findatapy',
packages = find_packages(),
include_package_data = True,
install_requires = ['pandas',
'twython',
'pytz',
'requests',
'numpy',
'pandas_datareader'],
zip_safe=False)
|
from setuptools import setup, find_packages
setup(name='findatapy',
version='0.05',
description='Market data library',
author='Saeed Amen',
author_email='[email protected]',
license='Apache 2.0',
keywords = ['pandas', 'data', 'Bloomberg', 'tick', 'stocks', 'equities'],
url = 'https://github.com/cuemacro/findatapy',
packages = find_packages(),
include_package_data = True,
install_requires = ['pandas',
'twython',
'pytz',
'requests',
'numpy'],
zip_safe=False)
|
apache-2.0
|
Python
|
e385a57804329356a2f4e7c44532cfa052441555
|
Fix test data broken due to updated behavior of PyFile#getImportBlock()
|
apixandru/intellij-community,signed/intellij-community,idea4bsd/idea4bsd,ibinti/intellij-community,da1z/intellij-community,allotria/intellij-community,xfournet/intellij-community,allotria/intellij-community,apixandru/intellij-community,hurricup/intellij-community,ibinti/intellij-community,youdonghai/intellij-community,ibinti/intellij-community,mglukhikh/intellij-community,youdonghai/intellij-community,suncycheng/intellij-community,apixandru/intellij-community,youdonghai/intellij-community,da1z/intellij-community,youdonghai/intellij-community,semonte/intellij-community,michaelgallacher/intellij-community,xfournet/intellij-community,mglukhikh/intellij-community,suncycheng/intellij-community,da1z/intellij-community,da1z/intellij-community,mglukhikh/intellij-community,da1z/intellij-community,michaelgallacher/intellij-community,allotria/intellij-community,youdonghai/intellij-community,semonte/intellij-community,xfournet/intellij-community,FHannes/intellij-community,allotria/intellij-community,fitermay/intellij-community,ibinti/intellij-community,semonte/intellij-community,fitermay/intellij-community,ThiagoGarciaAlves/intellij-community,michaelgallacher/intellij-community,semonte/intellij-community,asedunov/intellij-community,FHannes/intellij-community,signed/intellij-community,mglukhikh/intellij-community,semonte/intellij-community,mglukhikh/intellij-community,ibinti/intellij-community,hurricup/intellij-community,hurricup/intellij-community,xfournet/intellij-community,da1z/intellij-community,FHannes/intellij-community,semonte/intellij-community,mglukhikh/intellij-community,asedunov/intellij-community,idea4bsd/idea4bsd,allotria/intellij-community,ThiagoGarciaAlves/intellij-community,michaelgallacher/intellij-community,FHannes/intellij-community,apixandru/intellij-community,fitermay/intellij-community,youdonghai/intellij-community,xfournet/intellij-community,vvv1559/intellij-community,vvv1559/intellij-community,ThiagoGarciaAlves/intellij-community,signed/intellij-community,semonte/intellij-community,hurricup/intellij-community,allotria/intellij-community,idea4bsd/idea4bsd,semonte/intellij-community,idea4bsd/idea4bsd,allotria/intellij-community,fitermay/intellij-community,hurricup/intellij-community,vvv1559/intellij-community,signed/intellij-community,asedunov/intellij-community,asedunov/intellij-community,michaelgallacher/intellij-community,youdonghai/intellij-community,da1z/intellij-community,vvv1559/intellij-community,vvv1559/intellij-community,semonte/intellij-community,xfournet/intellij-community,allotria/intellij-community,xfournet/intellij-community,FHannes/intellij-community,allotria/intellij-community,semonte/intellij-community,signed/intellij-community,allotria/intellij-community,ibinti/intellij-community,fitermay/intellij-community,idea4bsd/idea4bsd,asedunov/intellij-community,fitermay/intellij-community,apixandru/intellij-community,hurricup/intellij-community,semonte/intellij-community,suncycheng/intellij-community,fitermay/intellij-community,ibinti/intellij-community,vvv1559/intellij-community,da1z/intellij-community,mglukhikh/intellij-community,idea4bsd/idea4bsd,asedunov/intellij-community,fitermay/intellij-community,youdonghai/intellij-community,signed/intellij-community,ThiagoGarciaAlves/intellij-community,michaelgallacher/intellij-community,signed/intellij-community,michaelgallacher/intellij-community,apixandru/intellij-community,FHannes/intellij-community,suncycheng/intellij-community,asedunov/intellij-community,hurricup/intellij-community,xfournet/intellij-community,allotria/intellij-community,ibinti/intellij-community,ibinti/intellij-community,xfournet/intellij-community,idea4bsd/idea4bsd,suncycheng/intellij-community,ibinti/intellij-community,apixandru/intellij-community,mglukhikh/intellij-community,idea4bsd/idea4bsd,fitermay/intellij-community,idea4bsd/idea4bsd,ibinti/intellij-community,ThiagoGarciaAlves/intellij-community,signed/intellij-community,fitermay/intellij-community,FHannes/intellij-community,apixandru/intellij-community,mglukhikh/intellij-community,idea4bsd/idea4bsd,apixandru/intellij-community,michaelgallacher/intellij-community,apixandru/intellij-community,FHannes/intellij-community,michaelgallacher/intellij-community,asedunov/intellij-community,da1z/intellij-community,FHannes/intellij-community,hurricup/intellij-community,suncycheng/intellij-community,vvv1559/intellij-community,fitermay/intellij-community,FHannes/intellij-community,asedunov/intellij-community,vvv1559/intellij-community,mglukhikh/intellij-community,youdonghai/intellij-community,vvv1559/intellij-community,da1z/intellij-community,mglukhikh/intellij-community,FHannes/intellij-community,ibinti/intellij-community,semonte/intellij-community,da1z/intellij-community,suncycheng/intellij-community,youdonghai/intellij-community,fitermay/intellij-community,hurricup/intellij-community,asedunov/intellij-community,vvv1559/intellij-community,FHannes/intellij-community,signed/intellij-community,hurricup/intellij-community,idea4bsd/idea4bsd,ThiagoGarciaAlves/intellij-community,vvv1559/intellij-community,suncycheng/intellij-community,ThiagoGarciaAlves/intellij-community,michaelgallacher/intellij-community,hurricup/intellij-community,hurricup/intellij-community,ThiagoGarciaAlves/intellij-community,fitermay/intellij-community,signed/intellij-community,idea4bsd/idea4bsd,xfournet/intellij-community,xfournet/intellij-community,xfournet/intellij-community,asedunov/intellij-community,hurricup/intellij-community,mglukhikh/intellij-community,semonte/intellij-community,apixandru/intellij-community,signed/intellij-community,ibinti/intellij-community,allotria/intellij-community,apixandru/intellij-community,da1z/intellij-community,signed/intellij-community,ThiagoGarciaAlves/intellij-community,michaelgallacher/intellij-community,da1z/intellij-community,youdonghai/intellij-community,signed/intellij-community,FHannes/intellij-community,ThiagoGarciaAlves/intellij-community,suncycheng/intellij-community,michaelgallacher/intellij-community,asedunov/intellij-community,mglukhikh/intellij-community,apixandru/intellij-community,idea4bsd/idea4bsd,youdonghai/intellij-community,apixandru/intellij-community,vvv1559/intellij-community,xfournet/intellij-community,ThiagoGarciaAlves/intellij-community,youdonghai/intellij-community,suncycheng/intellij-community,vvv1559/intellij-community,allotria/intellij-community,suncycheng/intellij-community,suncycheng/intellij-community,asedunov/intellij-community,ThiagoGarciaAlves/intellij-community
|
python/testData/refactoring/move/relativeImportsInsideMovedModule/after/src/subpkg1/mod1.py
|
python/testData/refactoring/move/relativeImportsInsideMovedModule/after/src/subpkg1/mod1.py
|
import
from
from
import pkg1.subpkg2 as foo
from pkg1 import subpkg2
from pkg1 import subpkg2 as bar
from pkg1.subpkg2 import
from pkg1.subpkg2 import mod2
from pkg1.subpkg2.mod2 import VAR
from . import mod3
print(subpkg2, mod3, mod2, foo, bar, VAR)
|
from pkg1 import subpkg2
from pkg1.subpkg2 import mod2
from pkg1.subpkg2.mod2 import VAR
from . import mod3
# malformed imports
from
from import
from pkg1.subpkg2 import
# absolute imports
import pkg1.subpkg2 as foo
from pkg1 import subpkg2 as bar
print(subpkg2, mod3, mod2, foo, bar, VAR)
|
apache-2.0
|
Python
|
72c669d71b797268870f00e2aa1c00018bcd638b
|
add local_asn test
|
BRCDcomm/pynos,brocade/pynos,SivagnanamCiena/pynos
|
tests/versions/base/test_bgp.py
|
tests/versions/base/test_bgp.py
|
#!/usr/bin/env python
"""
Copyright 2015 Brocade Communications Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest
import xml.etree.ElementTree as ET
import pynos.versions.base.bgp
import pynos.utilities
class TestBGP(unittest.TestCase):
"""
BGP unit tests. Compare expected XML to generated XML.
"""
def setUp(self):
self.bgp = pynos.versions.base.bgp.BGP(pynos.utilities.return_xml)
def test_local_asn(self):
expected = '<config>'\
'<rbridge-id xmlns="urn:brocade.com:mgmt:brocade-rbridge">'\
'<rbridge-id>2</rbridge-id><router>'\
'<bgp xmlns="urn:brocade.com:mgmt:brocade-bgp">'\
'<vrf-name>x</vrf-name>'\
'<router-bgp-cmds-holder><router-bgp-attributes>'\
'<local-as>65535</local-as></router-bgp-attributes>'\
'</router-bgp-cmds-holder></bgp></router></rbridge-id>'\
'</config>'
result = self.bgp.local_asn(local_as='65535', rbridge_id='2', vrf='x')
result = ET.tostring(result)
self.assertEquals(expected, result)
def test_local_asn_exception(self):
with self.assertRaises(KeyError):
self.bgp.local_asn(rbridge='2', vrf='x')
|
apache-2.0
|
Python
|
|
f8da511cb61072b566ebd0113edd125395b8d422
|
Fix connection
|
dimitri-yatsenko/datajoint-python,datajoint/datajoint-python,eywalker/datajoint-python
|
tests/test_reconnection.py
|
tests/test_reconnection.py
|
"""
Collection of test cases to test connection module.
"""
from nose.tools import assert_true, assert_false, assert_equal, raises
import datajoint as dj
import numpy as np
from datajoint import DataJointError
from . import CONN_INFO, PREFIX
class TestReconnect:
"""
test reconnection
"""
def setup(self):
self.conn = dj.conn(reset=True, **CONN_INFO)
def test_close(self):
assert_true(self.conn.is_connected, "Connection should be alive")
self.conn.close()
assert_false(self.conn.is_connected, "Connection should now be closed")
def test_reconnect(self):
assert_true(self.conn.is_connected, "Connection should be alive")
self.conn.close()
self.conn.query('SHOW DATABASES;', reconnect=True).fetchall()
assert_true(self.conn.is_connected, "Connection should be alive")
@raises(DataJointError)
def reconnect_throws_error_in_transaction(self):
assert_true(self.conn.is_connected, "Connection should be alive")
self.conn.close()
with self.conn.transaction:
self.conn.query('SHOW DATABASES;', reconnect=True).fetchall()
|
"""
Collection of test cases to test connection module.
"""
from nose.tools import assert_true, assert_false, assert_equal, raises
import datajoint as dj
import numpy as np
from datajoint import DataJointError
from . import CONN_INFO, PREFIX
class TestReconnect:
"""
test reconnection
"""
@classmethod
def setup_class(cls):
cls.conn = dj.conn(reset=True, **CONN_INFO)
def test_close(self):
assert_true(self.conn.is_connected, "Connection should be alive")
self.conn.close()
assert_false(self.conn.is_connected, "Connection should now be closed")
def test_reconnect(self):
assert_true(self.conn.is_connected, "Connection should be alive")
self.conn.close()
self.conn.query('SHOW DATABASES;', reconnect=True).fetchall()
assert_true(self.conn.is_connected, "Connection should be alive")
@raises(DataJointError)
def reconnect_throws_error_in_transaction(self):
assert_true(self.conn.is_connected, "Connection should be alive")
self.conn.close()
with self.conn.transaction:
self.conn.query('SHOW DATABASES;', reconnect=True).fetchall()
|
lgpl-2.1
|
Python
|
be56cb9f15e7ea0348937c9c86518786e138e023
|
update setup.py
|
scheunemann/KASPAR,scheunemann/KASPAR,scheunemann/KASPAR
|
setup.py
|
setup.py
|
from setuptools import setup
def readme():
with open('README.md') as f:
return f.read()
setup(name='kaspar_gui',
version='0.1',
description='Internet based Front-End for the KASPAR Robot',
long_description=readme(),
classifiers=[
'Development Status :: 1 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
],
url='http://github.com/uh-nmb/KASPAR',
author='Nathan Burke',
author_email='[email protected]',
license='MIT',
packages=['kasparGUI'],
install_requires=[
'python-dateutil',
'gevent',
'flask',
'werkzeug',
'flask-restless',
'flask-sqlalchemy',
'flask-socketio',
'sqlalchemy',
'robotActionController'
],
dependancy_links=['git+ssh://[email protected]/uh-nmb/robotActionController'],
data_files=[('/etc/init.d', ['systemConfigs/etc/init.d/kasparweb', ]),
('/etc/rsyslog.d', ['systemConfigs/etc/rsyslog.d/kaspar_log.conf', ]),
('/udev/rules.d', ['systemConfigs/udev/rules.d/98-keyPad.rules', 'systemConfigs/udev/rules.d/98-serial.rules', 'systemConfigs/udev/rules.d/99-input.rules'])],
include_package_data=True,
zip_safe=False)
|
from setuptools import setup
def readme():
with open('README.md') as f:
return f.read()
setup(name='kaspar_gui',
version='0.1',
description='Internet based Front-End for the KASPAR Robot',
long_description=readme(),
classifiers=[
'Development Status :: 1 - Beta',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
],
url='http://github.com/uh-nmb/KASPAR',
author='Nathan Burke',
author_email='[email protected]',
license='MIT',
packages=['kasparGUI'],
install_requires=[
'dateutil',
'gevent',
'flask',
'werkzeug'
'flask-restless',
'flask-sqlalchemy',
'flask-socketio',
'sqlalchemy',
'robotActionController'
],
dependancy_links=['git+ssh://[email protected]/uh-nmb/robotActionController'],
data_files=[('/etc/init.d', ['systemConfigs/etc/init.d/kasparweb', ]),
('/etc/rsyslog.d', ['systemConfigs/etc/rsyslog.d/kaspar_log.conf', ]),
('/udev/rules.d', ['systemConfigs/udev/rules.d/98-keyPad.rules', 'systemConfigs/udev/rules.d/98-serial.rules', 'systemConfigs/udev/rules.d/99-input.rules'])],
include_package_data=True,
zip_safe=False)
|
mit
|
Python
|
e2e6cdac88ee03f78713ac4a50d0003a471a0027
|
Add Python 3.9 to the list of supported versions.
|
sibson/redbeat
|
setup.py
|
setup.py
|
from setuptools import setup
long_description = open('README.rst').read()
setup(
name="celery-redbeat",
description="A Celery Beat Scheduler using Redis for persistent storage",
long_description=long_description,
version="2.0.0",
url="https://github.com/sibson/redbeat",
license="Apache License, Version 2.0",
author="Marc Sibson",
author_email="[email protected]",
keywords="python celery beat redis".split(),
packages=["redbeat"],
classifiers=[
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: Apache Software License',
'Topic :: System :: Distributed Computing',
'Topic :: Software Development :: Object Brokering',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Programming Language :: Python :: Implementation :: CPython',
'Operating System :: OS Independent',
],
install_requires=['redis>=3.2', 'celery>=4.2', 'python-dateutil', 'tenacity'],
tests_require=['pytest'],
)
|
from setuptools import setup
long_description = open('README.rst').read()
setup(
name="celery-redbeat",
description="A Celery Beat Scheduler using Redis for persistent storage",
long_description=long_description,
version="2.0.0",
url="https://github.com/sibson/redbeat",
license="Apache License, Version 2.0",
author="Marc Sibson",
author_email="[email protected]",
keywords="python celery beat redis".split(),
packages=["redbeat"],
classifiers=[
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: Apache Software License',
'Topic :: System :: Distributed Computing',
'Topic :: Software Development :: Object Brokering',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: Implementation :: CPython',
'Operating System :: OS Independent',
],
install_requires=['redis>=3.2', 'celery>=4.2', 'python-dateutil', 'tenacity'],
tests_require=['pytest'],
)
|
apache-2.0
|
Python
|
29c40e1e5048c5f8d76486020be6464de0e2adc7
|
add more dependency
|
pipehappy1/super-engine,aissehust/sesame-paste-noodle
|
setup.py
|
setup.py
|
from setuptools import find_packages
from setuptools import setup
install_requires = [
'numpy',
'theano',
'pyyaml',
'h5py',
]
setup(
name="TheFramework",
version="0.0.1",
description="A nn lib",
packages=find_packages(),
include_package_data=False,
zip_safe=False,
install_requires=install_requires,
)
|
from setuptools import find_packages
from setuptools import setup
install_requires = [
'numpy',
'theano',
]
setup(
name="TheFramework",
version="0.0.1",
description="A nn lib",
packages=find_packages(),
include_package_data=False,
zip_safe=False,
install_requires=install_requires,
)
|
bsd-3-clause
|
Python
|
67e6036c564f4e2eb9acf650acf5c33813af3003
|
make serve_image return an image
|
agnethesoraa/placepuppy,agnethesoraa/placepuppy
|
views.py
|
views.py
|
from flask import Flask, render_template, make_response
from PIL import Image
import StringIO
app = Flask(__name__)
@app.route('/')
def hello_world():
return render_template('index.html')
@app.route('/<width>x<height>')
@app.route('/<width>X<height>')
def serve_image(width, height):
stringfile = StringIO.StringIO()
im = Image.open("static/images/annie.jpg")
im.save(stringfile, 'JPEG')
response = make_response(stringfile.getvalue())
response.headers["Content-Type"] = "image/jpeg"
return response
if __name__ == '__main__':
app.run(debug=True, host='0.0.0.0')
|
from flask import Flask, render_template
app = Flask(__name__)
@app.route('/')
def hello_world():
return render_template('index.html')
"""@app.route('/<username>')
def hello_world2(username):
return 'Hello %s' % username"""
if __name__ == '__main__':
app.run(debug=True)
|
mit
|
Python
|
b14f520fbb15c9f455339631ed90b0d926befb27
|
Bump version
|
thombashi/DataProperty
|
setup.py
|
setup.py
|
# encoding: utf-8
import io
import sys
import os.path
import setuptools
MISC_DIR = "misc"
REQUIREMENT_DIR = "requirements"
with io.open("README.rst", encoding="utf8") as f:
long_description = f.read()
with io.open(os.path.join(MISC_DIR, "summary.txt"), encoding="utf8") as f:
summary = f.read()
with open(os.path.join(REQUIREMENT_DIR, "requirements.txt")) as f:
install_requires = [line.strip() for line in f if line.strip()]
with open(os.path.join(REQUIREMENT_DIR, "test_requirements.txt")) as f:
tests_require = [line.strip() for line in f if line.strip()]
needs_pytest = set(["pytest", "test", "ptr"]).intersection(sys.argv)
pytest_runner = ["pytest-runner"] if needs_pytest else []
author = "Tsuyoshi Hombashi"
email = "[email protected]"
project_name = "DataProperty"
setuptools.setup(
name=project_name,
version="0.13.1",
url="https://github.com/thombashi/" + project_name,
bugtrack_url="https://github.com/thombashi/{:s}/issues".format(
project_name),
author=author,
author_email=email,
description=summary,
include_package_data=True,
install_requires=install_requires,
keywords=["data", "property"],
license="MIT License",
long_description=long_description,
maintainer=author,
maintainer_email=email,
packages=setuptools.find_packages(exclude=["test*"]),
setup_requires=[] + pytest_runner,
tests_require=tests_require,
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: Microsoft :: Windows",
"Operating System :: POSIX",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Topic :: Software Development :: Libraries",
"Topic :: Software Development :: Libraries :: Python Modules",
],
)
|
# encoding: utf-8
import io
import sys
import os.path
import setuptools
MISC_DIR = "misc"
REQUIREMENT_DIR = "requirements"
with io.open("README.rst", encoding="utf8") as f:
long_description = f.read()
with io.open(os.path.join(MISC_DIR, "summary.txt"), encoding="utf8") as f:
summary = f.read()
with open(os.path.join(REQUIREMENT_DIR, "requirements.txt")) as f:
install_requires = [line.strip() for line in f if line.strip()]
with open(os.path.join(REQUIREMENT_DIR, "test_requirements.txt")) as f:
tests_require = [line.strip() for line in f if line.strip()]
needs_pytest = set(["pytest", "test", "ptr"]).intersection(sys.argv)
pytest_runner = ["pytest-runner"] if needs_pytest else []
author = "Tsuyoshi Hombashi"
email = "[email protected]"
project_name = "DataProperty"
setuptools.setup(
name=project_name,
version="0.13.0",
url="https://github.com/thombashi/" + project_name,
bugtrack_url="https://github.com/thombashi/{:s}/issues".format(
project_name),
author=author,
author_email=email,
description=summary,
include_package_data=True,
install_requires=install_requires,
keywords=["data", "property"],
license="MIT License",
long_description=long_description,
maintainer=author,
maintainer_email=email,
packages=setuptools.find_packages(exclude=["test*"]),
setup_requires=[] + pytest_runner,
tests_require=tests_require,
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: Microsoft :: Windows",
"Operating System :: POSIX",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Topic :: Software Development :: Libraries",
"Topic :: Software Development :: Libraries :: Python Modules",
],
)
|
mit
|
Python
|
9c07d26072c15147e47c15edd5c4d356686b14d7
|
Upgrade these.
|
CodeForAfrica/grano-client,granoproject/grano-client
|
setup.py
|
setup.py
|
import os
from setuptools import setup, find_packages
VERSION = os.path.join(os.path.dirname(__file__), 'VERSION')
VERSION = open(VERSION, 'r').read().strip()
README = os.path.join(os.path.dirname(__file__), 'README.rst')
README = open(README, 'r').read().strip()
setup(
name='grano-client',
version=VERSION,
description="Client library for grano, a social network analysis tool.",
long_description=README,
classifiers=[
],
keywords='data client rest grano sna ddj journalism',
author='Code for Africa',
author_email='[email protected]',
url='https://github.com/CodeForAfrica/grano-client',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
zip_safe=False,
install_requires=[
"requests>=2.17.3",
"PyYAML>=3.12"
],
tests_require=[],
entry_points=\
""" """,
)
|
import os
from setuptools import setup, find_packages
VERSION = os.path.join(os.path.dirname(__file__), 'VERSION')
VERSION = open(VERSION, 'r').read().strip()
README = os.path.join(os.path.dirname(__file__), 'README.rst')
README = open(README, 'r').read().strip()
setup(
name='grano-client',
version=VERSION,
description="Client library for grano, a social network analysis tool.",
long_description=README,
classifiers=[
],
keywords='data client rest grano sna ddj journalism',
author='Code for Africa',
author_email='[email protected]',
url='https://github.com/granoproject/grano-client',
license='MIT',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
zip_safe=False,
install_requires=[
"requests>=2.2.0",
"PyYAML==3.10"
],
tests_require=[],
entry_points=\
""" """,
)
|
mit
|
Python
|
7689719e0ba8f577acbe5d919828a1abc5437be4
|
update version
|
lisunshiny/lunchboy
|
setup.py
|
setup.py
|
from setuptools import setup
setup(name='lunchboy',
version='0.2',
description='Lunch without #lunch',
url='http://github.com/lisunshiny/lunchboy',
author='Liann Sun',
author_email='[email protected]',
license='MIT',
packages=['lunchboy'],
install_requires=['Scrapy'],
scripts=['bin/lunch', 'bin/menu'],
zip_safe=False)
|
from setuptools import setup
setup(name='lunchboy',
version='0.1',
description='Lunch without #lunch',
url='http://github.com/lisunshiny/lunchboy',
author='Liann Sun',
author_email='[email protected]',
license='MIT',
packages=['lunchboy'],
install_requires=['Scrapy'],
scripts=['bin/lunch', 'bin/menu'],
zip_safe=False)
|
mit
|
Python
|
f3f2408370e76ec8338bfc1f816ca875c75acf5c
|
remove ez_setup
|
nnugumanov/yandex-tank,yandex/yandex-tank,f2nd/yandex-tank,nnugumanov/yandex-tank,fomars/yandex-tank,fomars/yandex-tank,ei-grad/yandex-tank,direvius/yandex-tank,nettorta/yandex-tank,yandex/yandex-tank,netortik/yandex-tank,ei-grad/yandex-tank,netortik/yandex-tank,asekretenko/yandex-tank,f2nd/yandex-tank,asekretenko/yandex-tank,nettorta/yandex-tank,direvius/yandex-tank
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='yandextank',
version='1.7.8',
description='a performance measurement tool',
longer_description='''
Yandex.Tank is a performance measurement and load testing automatization tool.
It uses other load generators such as JMeter, ab or phantom inside of it for
load generation and provides a common configuration system for them and
analytic tools for the results they produce.
''',
maintainer='Alexey Lavrenuke (load testing)',
maintainer_email='[email protected]',
url='http://yandex.github.io/yandex-tank/',
packages=find_packages(exclude=["tests"]),
install_requires=[
'psutil',
'ipaddr',
'progressbar',
'importlib',
],
license='LGPLv2',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: GNU Lesser General Public License v2 or later (LGPLv2+)',
'Operating System :: POSIX',
'Topic :: Software Development :: Quality Assurance',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Testing :: Traffic Generation',
],
entry_points={
'console_scripts': [
'yandex-tank = yandextank.core.cli:main',
],
},
package_data={
'yandextank.core': ['config/*'],
'yandextank.plugins.GraphiteUploader': ['config/*'],
'yandextank.plugins.JMeter': ['config/*'],
'yandextank.plugins.Monitoring': ['config/*'],
'yandextank.plugins.Phantom': ['config/*'],
'yandextank.plugins.TipsAndTricks': ['config/*'],
},
)
|
#!/usr/bin/env python
import ez_setup
ez_setup.use_setuptools()
from setuptools import setup, find_packages
setup(
name='yandextank',
version='1.7.7',
description='a performance measurement tool',
longer_description='''
Yandex.Tank is a performance measurement and load testing automatization tool.
It uses other load generators such as JMeter, ab or phantom inside of it for
load generation and provides a common configuration system for them and
analytic tools for the results they produce.
''',
maintainer='Alexey Lavrenuke (load testing)',
maintainer_email='[email protected]',
url='http://yandex.github.io/yandex-tank/',
packages=find_packages(exclude=["tests"]),
install_requires=[
'psutil',
'ipaddr',
'progressbar',
'importlib',
],
license='LGPLv2',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Environment :: Web Environment',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: GNU Lesser General Public License v2 or later (LGPLv2+)',
'Operating System :: POSIX',
'Topic :: Software Development :: Quality Assurance',
'Topic :: Software Development :: Testing',
'Topic :: Software Development :: Testing :: Traffic Generation',
],
entry_points={
'console_scripts': [
'yandex-tank = yandextank.core.cli:main',
],
},
package_data={
'yandextank.core': ['config/*'],
'yandextank.plugins.GraphiteUploader': ['config/*'],
'yandextank.plugins.JMeter': ['config/*'],
'yandextank.plugins.Monitoring': ['config/*'],
'yandextank.plugins.Phantom': ['config/*'],
'yandextank.plugins.TipsAndTricks': ['config/*'],
},
)
|
lgpl-2.1
|
Python
|
60b310d8fbd6b6130b4e8f23d20fc374eee65c74
|
Bump version
|
SUNET/eduid-common
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
version = '0.3.1b2'
requires = [
'setuptools >= 2.2',
'eduid-userdb >= 0.0.5',
]
# Flavours
webapp_requires = [
'Flask>=0.12,<0.13',
'pysaml2 >= 4.0.3rc1', # version sync with dashboard to avoid pip catastrophes
'redis >= 2.10.5',
'pwgen == 0.4',
'vccs_client >= 0.4.1',
'PyNaCl >= 1.0.1',
'python-etcd >= 0.4.3',
'PyYAML >= 3.11',
'bleach>=2.0.0',
'marshmallow>=2.10,<2.11',
'Flask-Mail == 0.9.1',
'eduid_msg >= 0.10.2',
'eduid-am >= 0.6.2b2',
'statsd==3.2.1',
]
webapp_extras = webapp_requires + []
idp_requires = [
'pysaml2 >= 1.2.0beta2',
'redis >= 2.10.5',
'vccs_client >= 0.4.2',
'PyNaCl >= 1.0.1',
'statsd==3.2.1',
]
idp_extras = idp_requires + []
# No dependecies flavor, let the importing application handle dependencies
nodeps_requires = requires
test_requires = [
'mock == 1.0.1',
]
testing_extras = test_requires + webapp_extras + [
'nose',
'coverage',
'nosexcover',
]
long_description = open('README.txt').read()
setup(name='eduid-common',
version=version,
description="Common code for eduID applications",
long_description=long_description,
classifiers=[
"Programming Language :: Python",
],
keywords='',
author='SUNET',
author_email='',
url='https://github.com/SUNET/',
license='bsd',
packages=find_packages('src'),
package_dir={'': 'src'},
namespace_packages=['eduid_common'],
include_package_data=True,
zip_safe=False,
install_requires=requires,
tests_require=test_requires,
extras_require={
'testing': testing_extras,
'webapp': webapp_extras,
'idp': idp_extras,
'nodeps': []
},
entry_points="""
""",
)
|
from setuptools import setup, find_packages
version = '0.3.1b1'
requires = [
'setuptools >= 2.2',
'eduid-userdb >= 0.0.5',
]
# Flavours
webapp_requires = [
'Flask>=0.12,<0.13',
'pysaml2 >= 4.0.3rc1', # version sync with dashboard to avoid pip catastrophes
'redis >= 2.10.5',
'pwgen == 0.4',
'vccs_client >= 0.4.1',
'PyNaCl >= 1.0.1',
'python-etcd >= 0.4.3',
'PyYAML >= 3.11',
'bleach>=2.0.0',
'marshmallow>=2.10,<2.11',
'Flask-Mail == 0.9.1',
'eduid_msg >= 0.10.2',
'eduid-am >= 0.6.2b2',
'statsd==3.2.1',
]
webapp_extras = webapp_requires + []
idp_requires = [
'pysaml2 >= 1.2.0beta2',
'redis >= 2.10.5',
'vccs_client >= 0.4.2',
'PyNaCl >= 1.0.1',
'statsd==3.2.1',
]
idp_extras = idp_requires + []
# No dependecies flavor, let the importing application handle dependencies
nodeps_requires = requires
test_requires = [
'mock == 1.0.1',
]
testing_extras = test_requires + webapp_extras + [
'nose',
'coverage',
'nosexcover',
]
long_description = open('README.txt').read()
setup(name='eduid-common',
version=version,
description="Common code for eduID applications",
long_description=long_description,
classifiers=[
"Programming Language :: Python",
],
keywords='',
author='SUNET',
author_email='',
url='https://github.com/SUNET/',
license='bsd',
packages=find_packages('src'),
package_dir={'': 'src'},
namespace_packages=['eduid_common'],
include_package_data=True,
zip_safe=False,
install_requires=requires,
tests_require=test_requires,
extras_require={
'testing': testing_extras,
'webapp': webapp_extras,
'idp': idp_extras,
'nodeps': []
},
entry_points="""
""",
)
|
bsd-3-clause
|
Python
|
83b51969d55a81c34cae483d11901fe90e1c2fa9
|
fix importlib for rtfd
|
toinsson/pyrealsense,toinsson/pyrealsense,toinsson/pyrealsense
|
pyrealsense/importlib.py
|
pyrealsense/importlib.py
|
# -*- coding: utf-8 -*-
# Licensed under the Apache-2.0 License, see LICENSE for details.
"""This module loads rsutilwrapper and librealsense library."""
import ctypes
import sys
import os
import warnings
os_name = sys.platform
lrs_prefix_mapping = {'darwin': 'lib', 'linux': 'lib', 'linux2': 'lib', 'win32': ''}
lrs_suffix_mapping = {'darwin': '.dylib', 'linux': '.so', 'linux2': '.so', 'win32': '.dll'}
try:
lrs_prefix = lrs_prefix_mapping[os_name]
lrs_suffix = lrs_suffix_mapping[os_name]
except KeyError:
raise OSError('OS not supported.')
## import C lib
try:
lrs = ctypes.CDLL(lrs_prefix+'realsense'+lrs_suffix)
except OSError:
warnings.warn("librealsense not found.")
lrs = None
## try import since docs will crash here
try:
import rsutilwrapper
except ImportError:
warnings.warn("rsutilwrapper not found.")
rsutilwrapper = None
|
# -*- coding: utf-8 -*-
# Licensed under the Apache-2.0 License, see LICENSE for details.
"""This module loads rsutilwrapper and librealsense library."""
import ctypes
import sys
import os
os_name = sys.platform
lrs_prefix_mapping = {'darwin': 'lib', 'linux': 'lib', 'linux2': 'lib', 'win32': ''}
lrs_suffix_mapping = {'darwin': '.dylib', 'linux': '.so', 'linux2': '.so', 'win32': '.dll'}
try:
lrs_prefix = lrs_prefix_mapping[os_name]
lrs_suffix = lrs_suffix_mapping[os_name]
except KeyError:
raise OSError('OS not supported.')
## import C lib
try:
lrs = ctypes.CDLL(lrs_prefix+'realsense'+lrs_suffix)
except OSError:
import warnings
warnings.warn("librealsense not found.")
lrs = None
## try import since docs will crash here
try:
from . import rsutilwrapper
except ImportError:
warnings.warn("rsutilwrapper not found.")
rsutilwrapper = None
|
apache-2.0
|
Python
|
238dd56b20418178ac8b4357ac70491b73b52dda
|
Add new interface.
|
Kegbot/kegbot-server,Kegbot/kegbot-server,Kegbot/kegbot-server,Kegbot/kegbot-server,Kegbot/kegbot-server
|
pykeg/core/Interfaces.py
|
pykeg/core/Interfaces.py
|
"""
This library defines a set of interfaces used by parts of the kegbot.
In general, the interfaces defined here are nothing more than a well-known
class name and one or more function prototypes, which define the interface.
Modules wishing to advertise implementation of one or more of these interfaces
may do so by subclassing that interface. An implementation of a particular
interface must interface all functions defined by that interface.
"""
class AbstractInterfaceError(Exception):
pass
class IRelay:
""" Relay interface """
STATUS_ENABLED = 1
STATUS_DISABLED = 0
STATUS_UNKNOWN = -1
def Enable(self):
raise AbstractInterfaceError
def Disable(self):
raise AbstractInterfaceError
def Status(self):
raise AbstractInterfaceError
class ITemperatureSensor:
def SensorName(self):
""" Return a descriptive string name """
raise AbstractInterfaceError
def GetTemperature(self):
"""
Get the last recorded temperature.
Returns a tuple of (float temp_in_c, float last_reading_timestamp). If
last_reading_timestamp is not none, then it is the approximate timestamp
of the last temperature reading.
"""
raise AbstractInterfaceError
class IFlowmeter:
def GetTicks(self):
"""
Get monotonically increasing tick value. Returns integer.
"""
raise AbstractInterfaceError
class IAuthDevice:
""" Interface for an access control device """
def AuthorizedUsers(self):
""" Return a list of all newly authorized users """
raise AbstractInterfaceError
class IDisplayDevice:
""" A device that can handle alerts """
def Activity(self):
""" Register that some activity has occured at this instant in time """
raise AbstractInterfaceError
def Alert(self, message):
""" A string message to raise """
raise AbstractInterfaceError
class IFlowListener:
""" Something that can listen to flow events """
def FlowStart(self, flow):
""" Called when a flow is started """
raise AbstractInterfaceError
def FlowUpdate(self, flow):
""" Called periodically during the life of a flow """
raise AbstractInterfaceError
def FlowEnd(self, flow, drink):
""" Called at the end of a flow """
raise AbstractInterfaceError
class IThermoListener:
""" Something interested in periodic temperature events """
def ThermoUpdate(self, sensor, temperature):
raise AbstractInterfaceError
class IEventListener:
def PostEvent(self, ev):
raise AbstractInterfaceError
|
"""
This library defines a set of interfaces used by parts of the kegbot.
In general, the interfaces defined here are nothing more than a well-known
class name and one or more function prototypes, which define the interface.
Modules wishing to advertise implementation of one or more of these interfaces
may do so by subclassing that interface. An implementation of a particular
interface must interface all functions defined by that interface.
"""
class AbstractInterfaceError(Exception):
pass
class IRelay:
""" Relay interface """
STATUS_ENABLED = 1
STATUS_DISABLED = 0
STATUS_UNKNOWN = -1
def Enable(self):
raise AbstractInterfaceError
def Disable(self):
raise AbstractInterfaceError
def Status(self):
raise AbstractInterfaceError
class ITemperatureSensor:
def SensorName(self):
""" Return a descriptive string name """
raise AbstractInterfaceError
def GetTemperature(self):
"""
Get the last recorded temperature.
Returns a tuple of (float temp_in_c, float last_reading_timestamp). If
last_reading_timestamp is not none, then it is the approximate timestamp
of the last temperature reading.
"""
raise AbstractInterfaceError
class IFlowmeter:
def GetTicks(self):
"""
Get monotonically increasing tick value. Returns integer.
"""
raise AbstractInterfaceError
class IAuthDevice:
""" Interface for an access control device """
def AuthorizedUsers(self):
""" Return a list of all newly authorized users """
raise AbstractInterfaceError
class IDisplayDevice:
""" A device that can handle alerts """
def Activity(self):
""" Register that some activity has occured at this instant in time """
raise AbstractInterfaceError
def Alert(self, message):
""" A string message to raise """
raise AbstractInterfaceError
class IFlowListener:
""" Something that can listen to flow events """
def FlowStart(self, flow):
""" Called when a flow is started """
raise AbstractInterfaceError
def FlowUpdate(self, flow):
""" Called periodically during the life of a flow """
raise AbstractInterfaceError
def FlowEnd(self, flow, drink):
""" Called at the end of a flow """
raise AbstractInterfaceError
class IThermoListener:
""" Something interested in periodic temperature events """
def ThermoUpdate(self, sensor, temperature):
raise AbstractInterfaceError
|
mit
|
Python
|
71b7faf519a45de7fc349930cf2d4268e27ae36c
|
Bump version to 0.8.0
|
okfn/datapackage-py,okfn/datapackage-py,okfn/datapackage-model-py,datapackages/datapackage-py,datapackages/datapackage-py,okfn/datapackage-model-py
|
setup.py
|
setup.py
|
import os
import fnmatch
from setuptools import setup, find_packages
from codecs import open
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
def schema_files():
'''Return all CSV and JSON files paths in datapackage/schemas
The paths are relative to ./datapackage
'''
def recursive_glob(path, patterns):
results = []
for base, dirs, files in os.walk(path):
matching_files = []
for pattern in patterns:
matching_files.extend(fnmatch.filter(files, pattern))
results.extend(os.path.join(base, f) for f in matching_files)
return results
base_folder = 'datapackage'
remove_base_folder = lambda path: path[len(base_folder) + 1:]
path = os.path.join(base_folder, 'schemas')
files_paths = recursive_glob(path, ['*.csv', '*.json'])
return [remove_base_folder(f) for f in files_paths]
setup(
name='datapackage',
version='0.8.0',
description=(
'Utilities to work with Data Packages as defined on dataprotocols.org'
),
long_description=long_description,
url='https://github.com/frictionlessdata/datapackage-py',
author='Open Knowledge Foundation',
author_email='[email protected]',
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Information Technology',
'Topic :: Utilities',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
keywords='data dataprotocols jsontableschema frictionlessdata datascience',
packages=find_packages(exclude=['tests']),
package_data={'datapackage': schema_files()},
install_requires=[
'six >= 1.10.0',
'requests >= 2.8.0',
'jsonschema >= 2.5.1',
'tabulator >= 0.4.0',
'jsontableschema >= 0.5.1',
'unicodecsv>=0.14',
],
)
|
import os
import fnmatch
from setuptools import setup, find_packages
from codecs import open
here = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
def schema_files():
'''Return all CSV and JSON files paths in datapackage/schemas
The paths are relative to ./datapackage
'''
def recursive_glob(path, patterns):
results = []
for base, dirs, files in os.walk(path):
matching_files = []
for pattern in patterns:
matching_files.extend(fnmatch.filter(files, pattern))
results.extend(os.path.join(base, f) for f in matching_files)
return results
base_folder = 'datapackage'
remove_base_folder = lambda path: path[len(base_folder) + 1:]
path = os.path.join(base_folder, 'schemas')
files_paths = recursive_glob(path, ['*.csv', '*.json'])
return [remove_base_folder(f) for f in files_paths]
setup(
name='datapackage',
version='0.7.0',
description=(
'Utilities to work with Data Packages as defined on dataprotocols.org'
),
long_description=long_description,
url='https://github.com/frictionlessdata/datapackage-py',
author='Open Knowledge Foundation',
author_email='[email protected]',
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Information Technology',
'Topic :: Utilities',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
keywords='data dataprotocols jsontableschema frictionlessdata datascience',
packages=find_packages(exclude=['tests']),
package_data={'datapackage': schema_files()},
install_requires=[
'six >= 1.10.0',
'requests >= 2.8.0',
'jsonschema >= 2.5.1',
'tabulator >= 0.4.0',
'jsontableschema >= 0.5.1',
'unicodecsv>=0.14',
],
)
|
mit
|
Python
|
b0878122e5ef212592a678f61698d726a7f8d768
|
Fix query string order
|
otknoy/michishiki_api_server
|
post.py
|
post.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import cgi
import sqlite3
import time
import config
def fs2dict(fs):
'''Field strage to dict'''
params = {}
for k in fs.keys():
params[k] = fs[k].value
return params
def valid(qs):
required_keys = ['title', 'comment', 'posted_by', 'latitude', 'longitude']
return all([qs.has_key(k) for k in required_keys])
def post(title, comment, posted_by, latitude, longitude):
rate = 0
created_at = int(time.time())
updated_at = created_at
sql = u'insert into posts (id, title, comment, posted_by, rate, latitude, longitude, created_at, updated_at) values (null,?,?,?,?,?,?,?,?);'
con = sqlite3.connect(config.db_path, isolation_level=None)
con.execute(sql, (title, comment, posted_by, rate, latitude, longitude, created_at, updated_at))
con.close()
if __name__ == '__main__':
qs = fs2dict(cgi.FieldStorage())
keys = ['title', 'comment', 'posted_by', 'latitude', 'longitude']
if valid(qs):
query_string = [qs[k].decode('utf-8') for k in keys]
post(*query_string)
result = '{"message": "Successfully posted!"}'
else:
result = '{"message": "Invalid query string"}'
import utils
utils.cgi_header()
print result
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import cgi
import sqlite3
import time
import config
def fs2dict(fs):
'''Field strage to dict'''
params = {}
for k in fs.keys():
params[k] = fs[k].value
return params
def valid(qs):
required_keys = ['title', 'comment', 'posted_by', 'latitude', 'longitude']
return all([qs.has_key(k) for k in required_keys])
def post(title, comment, posted_by, latitude, longitude):
rate = 0
created_at = int(time.time())
updated_at = created_at
sql = u'insert into posts (id, title, comment, posted_by, rate, latitude, longitude, created_at, updated_at) values (null,?,?,?,?,?,?,?,?);'
con = sqlite3.connect(config.db_path, isolation_level=None)
con.execute(sql, (title, comment, posted_by, rate, latitude, longitude, created_at, updated_at))
con.close()
if __name__ == '__main__':
qs = fs2dict(cgi.FieldStorage())
if valid(qs):
query_string = [qs[k].decode('utf-8') for k in ['latitude', 'longitude', 'title', 'comment', 'posted_by']]
post(*query_string)
result = '{"message": "Successfully posted!"}'
else:
result = '{"message": "Invalid query string"}'
import utils
utils.cgi_header()
print result
|
mit
|
Python
|
084893374cf5a1585f8b7c18747ec8b11e0c0ce4
|
Update 02-02_cleanse.py
|
mrkowalski/kaggle_santander
|
scikit/src/nosql/02-02_cleanse.py
|
scikit/src/nosql/02-02_cleanse.py
|
import commons, sys, os
import logging as log
import pandas as pd
import xgboost as xgb
import numpy as np
from sklearn.externals import joblib
from sklearn.model_selection import train_test_split
from sklearn.metrics import accuracy_score, f1_score, precision_score, recall_score, classification_report, confusion_matrix
def add_activations(df):
for ind in commons.indicators:
log.info("Adding activations for {}".format(ind))
ind_prev = ind + "_1"
res = df[ind].sub(df[ind_prev])
res[res < 0] = 0
df["act_" + ind] = res.fillna(0)
return df
|
import commons, sys, os
import logging as log
import pandas as pd
import xgboost as xgb
import numpy as np
from sklearn.externals import joblib
from sklearn.model_selection import train_test_split
from sklearn.metrics import accuracy_score, f1_score, precision_score, recall_score, classification_report, confusion_matrix
def add_activations(df):
for ind in commons.indicators:
log.info("Adding activations for {}".format(ind))
ind_prev = ind + "_1"
res = df[ind].sub(df[ind_prev])
res[res < 0] = 0
df["act_" + ind] = res.fillna(0)
return df
|
mit
|
Python
|
d7a8192c5f1bbb8fc076ceef3a6b835cd37050d8
|
update classifiers
|
WebSQL/sdk
|
setup.py
|
setup.py
|
#!/usr/bin/env python
#from setuptools import setup
from setuptools.command.bdist_rpm import bdist_rpm
from distutils.core import setup
import os
try:
from sphinx.setup_command import BuildDoc as _BuildDoc
class BuildDoc(_BuildDoc):
def finalize_options(self):
super().finalize_options()
if not self.project:
self.project = self.distribution.name
if not self.version:
self.version = self.distribution.version
except ImportError:
BuildDoc = None
def readme():
with open(os.path.join('README')) as r:
return r.read()
setup(
name="sqltoolchain",
version="0.0.1",
description='The toolkit to make work with SQL easier',
packages=["sqltoolchain", "sqltoolchain.syntax"],
requires=["pyparsing"],
author="@bg",
author_email='[email protected]',
maintainer='@bg',
maintainer_email='[email protected]',
url='https://github.com/WebSQL/toolkit',
license='MIT',
long_description=readme(),
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Other Environment",
"License :: OSI Approved :: MIT License",
"Operating System :: MacOS :: MacOS X",
"Operating System :: OS Independent",
"Operating System :: POSIX",
"Operating System :: POSIX :: Linux",
"Operating System :: Unix",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Topic :: Database",
"Topic :: Database :: Database Engines/Servers",
],
entry_points={
'console_scripts': [
'sql-pygen=sqltoolchain.pygen:main',
'sql-preprocessor=sqltoolchain.preprocessor:main',
],
}
)
|
#!/usr/bin/env python
#from setuptools import setup
from setuptools.command.bdist_rpm import bdist_rpm
from distutils.core import setup
import os
try:
from sphinx.setup_command import BuildDoc as _BuildDoc
class BuildDoc(_BuildDoc):
def finalize_options(self):
super().finalize_options()
if not self.project:
self.project = self.distribution.name
if not self.version:
self.version = self.distribution.version
except ImportError:
BuildDoc = None
def readme():
with open(os.path.join('README')) as r:
return r.read()
setup(
name="sqltoolchain",
version="0.0.1",
description='The toolkit to make work with SQL easier',
packages=["sqltoolchain", "sqltoolchain.syntax"],
requires=["pyparsing"],
author="@bg",
author_email='[email protected]',
maintainer='@bg',
maintainer_email='[email protected]',
url='https://github.com/WebSQL/toolkit',
license='MIT',
long_description=readme(),
classifiers=[
"Development Status :: 5 - Beta",
"Environment :: Other Environment",
"License :: OSI Approved :: GNU General Public License (GPL)",
"Operating System :: MacOS :: MacOS X",
"Operating System :: OS Independent",
"Operating System :: POSIX",
"Operating System :: POSIX :: Linux",
"Operating System :: Unix",
"Programming Language :: C",
"Programming Language :: Python3",
"Topic :: Database",
"Topic :: Database :: Database Engines/Servers",
],
entry_points={
'console_scripts': [
'sql-pygen=sqltoolchain.pygen:main',
'sql-preprocessor=sqltoolchain.preprocessor:main',
],
}
)
|
mit
|
Python
|
b3066ad8e5af59d12a8b28f0e6b69e0305535094
|
edit doc
|
ymizushi/nicosearch,ymizushi/nicosearch
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(
name = "nicosearch",
py_modules=['nicosearch'],
version = "0.0.4",
license = 'MIT License',
download_url = "http://backloglib.googlecode.com/files/backloglib-0.1.1.tar.g://github.com/ymizushi/nicosearch/archive/master.zip",
platforms = ['POSIX'],
description = "https://github.com/ymizushi/nicosearch",
author = "ymizushi",
author_email = "[email protected]",
url = "https://github.com/ymizushi/nicosearch",
keywords = ["search", "niconico"],
classifiers = [
'License :: OSI Approved :: MIT License',
"Programming Language :: Python",
"Development Status :: 4 - Beta",
"Environment :: Console",
"Intended Audience :: Developers",
"Topic :: Utilities",
"Topic :: Software Development",
],
long_description = open('README.md').read()
)
|
from distutils.core import setup
setup(
name = "nicosearch",
py_modules=['nicosearch'],
version = "0.0.3",
license = open('./LICENSE').read(),
download_url = "http://backloglib.googlecode.com/files/backloglib-0.1.1.tar.g://github.com/ymizushi/nicosearch/archive/master.zip",
platforms = ['POSIX'],
description = "https://github.com/ymizushi/nicosearch",
author = "ymizushi",
author_email = "[email protected]",
url = "https://github.com/ymizushi/nicosearch",
keywords = ["search", "niconico"],
classifiers = [
'License :: OSI Approved :: MIT License',
"Programming Language :: Python",
"Development Status :: 4 - Beta",
"Environment :: Console",
"Intended Audience :: Developers",
"Topic :: Utilities",
"Topic :: Software Development",
],
long_description = open('README.md').read()
)
|
mit
|
Python
|
1102293fd73c4091fd21b011d4e790da6df23031
|
remove README deps
|
googlesamples/assistant-sdk-python
|
setup.py
|
setup.py
|
# Copyright 2014 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import find_packages
from setuptools import setup
DEPENDENCIES = [
'google-auth==0.8.0',
'googleapis-common-protos==1.5.0',
'grpcio==1.1.0',
'requests==2.13.0',
'requests-oauthlib==0.8.0',
'six==1.10.0',
'urllib3[secure]==1.20',
]
def load_test_suite():
import unittest
test_loader = unittest.TestLoader()
test_suite = test_loader.discover('tests', pattern='test_*.py')
return test_suite
setup(
name='google-assistant',
version='0.0.1',
author='Google Embedded Assistant team',
author_email='[email protected]',
description='Google Embedded Assistant Sample client',
long_description='Google Embedded Assistant Sample client',
url='TODO(proppy) add external repo url',
packages=find_packages(exclude=('tests')),
namespace_packages=('googlesamples',),
install_requires=DEPENDENCIES,
extras_require={
'MAIN': ['tqdm==4.11.2', 'PyAudio==0.2.10']
},
setup_requires=['flake8'],
tests_require=['flake8'],
test_suite='setup.load_test_suite',
entry_points={
'console_scripts': [
'googlesamples-assistant'
'=googlesamples.assistant.__main__:main [MAIN]'
],
},
license='Apache 2.0',
keywords='google assistant client sample',
classifiers=(
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX',
'Operating System :: Microsoft :: Windows',
'Operating System :: MacOS :: MacOS X',
'Operating System :: OS Independent',
'Topic :: Internet :: WWW/HTTP',
),
)
|
# Copyright 2014 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import find_packages
from setuptools import setup
DEPENDENCIES = [
'google-auth==0.8.0',
'googleapis-common-protos==1.5.0',
'grpcio==1.1.0',
'requests==2.13.0',
'requests-oauthlib==0.8.0',
'six==1.10.0',
'urllib3[secure]==1.20',
]
with open('README.md', 'r') as f:
long_description = f.read()
def load_test_suite():
import unittest
test_loader = unittest.TestLoader()
test_suite = test_loader.discover('tests', pattern='test_*.py')
return test_suite
setup(
name='google-assistant',
version='0.0.1',
author='Google Embedded Assistant team',
author_email='[email protected]',
description='Google Embedded Assistant Sample client',
long_description=long_description,
url='TODO(proppy) add external repo url',
packages=find_packages(exclude=('tests')),
namespace_packages=('googlesamples',),
install_requires=DEPENDENCIES,
extras_require={
'MAIN': ['tqdm==4.11.2', 'PyAudio==0.2.10']
},
setup_requires=['flake8'],
tests_require=['flake8'],
test_suite='setup.load_test_suite',
entry_points={
'console_scripts': [
'googlesamples-assistant'
'=googlesamples.assistant.__main__:main [MAIN]'
],
},
license='Apache 2.0',
keywords='google assistant client sample',
classifiers=(
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX',
'Operating System :: Microsoft :: Windows',
'Operating System :: MacOS :: MacOS X',
'Operating System :: OS Independent',
'Topic :: Internet :: WWW/HTTP',
),
)
|
apache-2.0
|
Python
|
81c5d5eea267cd35517bae1ed50d4bdeb8b3a62c
|
clean up interface class
|
morefigs/pymba
|
pymba/vimba_interface.py
|
pymba/vimba_interface.py
|
from ctypes import byref
from .vimba_object import VimbaObject
from .vimba_exception import VimbaException
from . import vimba_c
class VimbaInterface(VimbaObject):
"""
A Vimba interface object. This class provides the minimal access
to Vimba functions required to control the interface.
"""
def __init__(self, id_string: str):
self._id_string = id_string
super().__init__()
@property
def id_string(self):
return self._id_string
def open(self):
"""
Open the interface.
"""
error = vimba_c.vmb_interface_open(self._id_string,
byref(self._handle))
if error:
raise VimbaException(error)
def close(self):
"""
Close the interface.
"""
error = vimba_c.vmb_interface_close(self._handle)
if error:
raise VimbaException(error)
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from . import vimba_structure as structs
from .vimba_object import VimbaObject
from .vimba_exception import VimbaException
from .vimba_dll import VimbaDLL
from ctypes import *
# interface features are automatically readable as object attributes.
class VimbaInterface(VimbaObject):
"""
A Vimba interface object. This class provides the minimal access
to Vimba functions required to control the interface.
"""
@property
def interfaceIdString(self):
return self._interfaceIdString
# own handle is inherited as self._handle
def __init__(self, interfaceIdString):
# call super constructor
super(VimbaInterface, self).__init__()
# set ID
self._interfaceIdString = interfaceIdString
def openInterface(self):
"""
Open the interface.
"""
errorCode = VimbaDLL.interfaceOpen(self._interfaceIdString,
byref(self._handle))
if errorCode != 0:
raise VimbaException(errorCode)
def closeInterface(self):
"""
Close the interface.
"""
errorCode = VimbaDLL.interfaceClose(self._handle)
if errorCode != 0:
raise VimbaException(errorCode)
|
mit
|
Python
|
440c8e679b5939da0f5e32342440f7151c11bb61
|
Add checking value of "XWALK_OS_ANDROID" during parsing xwalk deps
|
hgl888/crosswalk-efl,minggangw/crosswalk,lincsoon/crosswalk,tomatell/crosswalk,alex-zhang/crosswalk,crosswalk-project/crosswalk,RafuCater/crosswalk,weiyirong/crosswalk-1,stonegithubs/crosswalk,stonegithubs/crosswalk,mrunalk/crosswalk,xzhan96/crosswalk,darktears/crosswalk,jondong/crosswalk,chuan9/crosswalk,crosswalk-project/crosswalk-efl,leonhsl/crosswalk,axinging/crosswalk,xzhan96/crosswalk,crosswalk-project/crosswalk,leonhsl/crosswalk,axinging/crosswalk,mrunalk/crosswalk,Bysmyyr/crosswalk,alex-zhang/crosswalk,leonhsl/crosswalk,heke123/crosswalk,lincsoon/crosswalk,marcuspridham/crosswalk,weiyirong/crosswalk-1,chuan9/crosswalk,hgl888/crosswalk-efl,siovene/crosswalk,lincsoon/crosswalk,bestwpw/crosswalk,jpike88/crosswalk,Bysmyyr/crosswalk,baleboy/crosswalk,xzhan96/crosswalk,jondong/crosswalk,weiyirong/crosswalk-1,baleboy/crosswalk,tedshroyer/crosswalk,darktears/crosswalk,zeropool/crosswalk,PeterWangIntel/crosswalk,crosswalk-project/crosswalk-efl,bestwpw/crosswalk,siovene/crosswalk,crosswalk-project/crosswalk,ZhengXinCN/crosswalk,hgl888/crosswalk,chuan9/crosswalk,marcuspridham/crosswalk,PeterWangIntel/crosswalk,hgl888/crosswalk,crosswalk-project/crosswalk,Bysmyyr/crosswalk,minggangw/crosswalk,dreamsxin/crosswalk,zeropool/crosswalk,heke123/crosswalk,marcuspridham/crosswalk,rakuco/crosswalk,axinging/crosswalk,pk-sam/crosswalk,DonnaWuDongxia/crosswalk,amaniak/crosswalk,Bysmyyr/crosswalk,DonnaWuDongxia/crosswalk,zliang7/crosswalk,baleboy/crosswalk,darktears/crosswalk,amaniak/crosswalk,Pluto-tv/crosswalk,Pluto-tv/crosswalk,jondong/crosswalk,dreamsxin/crosswalk,tedshroyer/crosswalk,Bysmyyr/crosswalk,crosswalk-project/crosswalk,pk-sam/crosswalk,lincsoon/crosswalk,jondong/crosswalk,RafuCater/crosswalk,hgl888/crosswalk,baleboy/crosswalk,hgl888/crosswalk,heke123/crosswalk,tomatell/crosswalk,crosswalk-project/crosswalk-efl,zeropool/crosswalk,ZhengXinCN/crosswalk,heke123/crosswalk,darktears/crosswalk,marcuspridham/crosswalk,alex-zhang/crosswalk,xzhan96/crosswalk,RafuCater/crosswalk,jondwillis/crosswalk,XiaosongWei/crosswalk,crosswalk-project/crosswalk,zliang7/crosswalk,minggangw/crosswalk,leonhsl/crosswalk,weiyirong/crosswalk-1,xzhan96/crosswalk,rakuco/crosswalk,tedshroyer/crosswalk,DonnaWuDongxia/crosswalk,lincsoon/crosswalk,tomatell/crosswalk,zeropool/crosswalk,bestwpw/crosswalk,crosswalk-project/crosswalk-efl,siovene/crosswalk,ZhengXinCN/crosswalk,lincsoon/crosswalk,pk-sam/crosswalk,hgl888/crosswalk-efl,zeropool/crosswalk,pk-sam/crosswalk,rakuco/crosswalk,Pluto-tv/crosswalk,marcuspridham/crosswalk,RafuCater/crosswalk,rakuco/crosswalk,pk-sam/crosswalk,tedshroyer/crosswalk,mrunalk/crosswalk,hgl888/crosswalk,jondwillis/crosswalk,XiaosongWei/crosswalk,marcuspridham/crosswalk,ZhengXinCN/crosswalk,jpike88/crosswalk,crosswalk-project/crosswalk-efl,dreamsxin/crosswalk,crosswalk-project/crosswalk,chuan9/crosswalk,axinging/crosswalk,jpike88/crosswalk,dreamsxin/crosswalk,weiyirong/crosswalk-1,ZhengXinCN/crosswalk,amaniak/crosswalk,DonnaWuDongxia/crosswalk,xzhan96/crosswalk,stonegithubs/crosswalk,lincsoon/crosswalk,rakuco/crosswalk,RafuCater/crosswalk,minggangw/crosswalk,marcuspridham/crosswalk,rakuco/crosswalk,Bysmyyr/crosswalk,Pluto-tv/crosswalk,leonhsl/crosswalk,xzhan96/crosswalk,ZhengXinCN/crosswalk,lincsoon/crosswalk,hgl888/crosswalk,stonegithubs/crosswalk,rakuco/crosswalk,PeterWangIntel/crosswalk,amaniak/crosswalk,minggangw/crosswalk,zeropool/crosswalk,heke123/crosswalk,baleboy/crosswalk,hgl888/crosswalk,PeterWangIntel/crosswalk,darktears/crosswalk,siovene/crosswalk,chuan9/crosswalk,dreamsxin/crosswalk,Pluto-tv/crosswalk,Pluto-tv/crosswalk,axinging/crosswalk,jondong/crosswalk,DonnaWuDongxia/crosswalk,PeterWangIntel/crosswalk,amaniak/crosswalk,tedshroyer/crosswalk,pk-sam/crosswalk,XiaosongWei/crosswalk,zliang7/crosswalk,crosswalk-project/crosswalk,PeterWangIntel/crosswalk,PeterWangIntel/crosswalk,RafuCater/crosswalk,crosswalk-project/crosswalk-efl,leonhsl/crosswalk,zliang7/crosswalk,tomatell/crosswalk,crosswalk-project/crosswalk-efl,heke123/crosswalk,mrunalk/crosswalk,mrunalk/crosswalk,alex-zhang/crosswalk,baleboy/crosswalk,stonegithubs/crosswalk,minggangw/crosswalk,DonnaWuDongxia/crosswalk,dreamsxin/crosswalk,stonegithubs/crosswalk,hgl888/crosswalk,jondong/crosswalk,bestwpw/crosswalk,baleboy/crosswalk,jondwillis/crosswalk,zliang7/crosswalk,heke123/crosswalk,zliang7/crosswalk,Pluto-tv/crosswalk,jpike88/crosswalk,mrunalk/crosswalk,amaniak/crosswalk,weiyirong/crosswalk-1,chuan9/crosswalk,ZhengXinCN/crosswalk,alex-zhang/crosswalk,jpike88/crosswalk,darktears/crosswalk,axinging/crosswalk,marcuspridham/crosswalk,jondong/crosswalk,weiyirong/crosswalk-1,zliang7/crosswalk,jondwillis/crosswalk,pk-sam/crosswalk,XiaosongWei/crosswalk,dreamsxin/crosswalk,bestwpw/crosswalk,Bysmyyr/crosswalk,jondwillis/crosswalk,DonnaWuDongxia/crosswalk,amaniak/crosswalk,heke123/crosswalk,alex-zhang/crosswalk,baleboy/crosswalk,XiaosongWei/crosswalk,axinging/crosswalk,darktears/crosswalk,jondwillis/crosswalk,siovene/crosswalk,bestwpw/crosswalk,XiaosongWei/crosswalk,jpike88/crosswalk,zeropool/crosswalk,tomatell/crosswalk,tedshroyer/crosswalk,zliang7/crosswalk,tomatell/crosswalk,RafuCater/crosswalk,XiaosongWei/crosswalk,siovene/crosswalk,alex-zhang/crosswalk,minggangw/crosswalk,jondwillis/crosswalk,siovene/crosswalk,rakuco/crosswalk,xzhan96/crosswalk,hgl888/crosswalk-efl,hgl888/crosswalk-efl,hgl888/crosswalk-efl,darktears/crosswalk,tedshroyer/crosswalk,tomatell/crosswalk,bestwpw/crosswalk,Bysmyyr/crosswalk,leonhsl/crosswalk,chuan9/crosswalk,jpike88/crosswalk,minggangw/crosswalk,hgl888/crosswalk-efl,jondong/crosswalk,stonegithubs/crosswalk
|
tools/generate_gclient-xwalk.py
|
tools/generate_gclient-xwalk.py
|
#!/usr/bin/env python
# Copyright (c) 2013 Intel Corporation. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
This script is responsible for generating .gclient-xwalk in the top-level
source directory from DEPS.xwalk.
User-configurable values such as |cache_dir| are fetched from .gclient instead.
"""
import logging
import optparse
import os
import pprint
CROSSWALK_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
GCLIENT_ROOT = os.path.dirname(os.path.dirname(CROSSWALK_ROOT))
def ParseGClientConfig():
"""
Parses the top-level .gclient file (NOT .gclient-xwalk) and returns the
values set there as a dictionary.
"""
with open(os.path.join(GCLIENT_ROOT, '.gclient')) as dot_gclient:
config = {}
exec(dot_gclient, config)
return config
def GenerateGClientXWalk(options):
with open(os.path.join(CROSSWALK_ROOT, 'DEPS.xwalk')) as deps_file:
deps_contents = deps_file.read()
if os.environ.get('XWALK_OS_ANDROID') == '1':
deps_contents += 'target_os = [\'android\']\n'
gclient_config = ParseGClientConfig()
if options.cache_dir:
logging.warning('--cache_dir is deprecated and will be removed in '
'Crosswalk 8. You should set cache_dir in .gclient '
'instead.')
cache_dir = options.cache_dir
else:
cache_dir = gclient_config.get('cache_dir')
deps_contents += 'cache_dir = %s\n' % pprint.pformat(cache_dir)
with open(os.path.join(GCLIENT_ROOT, '.gclient-xwalk'), 'w') as gclient_file:
gclient_file.write(deps_contents)
def main():
option_parser = optparse.OptionParser()
# TODO(rakuco): Remove in Crosswalk 8.
option_parser.add_option('--cache-dir',
help='DEPRECATED Set "cache_dir" in .gclient-xwalk '
'to this directory, so that all git '
'repositories are cached there.')
options, _ = option_parser.parse_args()
GenerateGClientXWalk(options)
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
# Copyright (c) 2013 Intel Corporation. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
This script is responsible for generating .gclient-xwalk in the top-level
source directory from DEPS.xwalk.
User-configurable values such as |cache_dir| are fetched from .gclient instead.
"""
import logging
import optparse
import os
import pprint
CROSSWALK_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
GCLIENT_ROOT = os.path.dirname(os.path.dirname(CROSSWALK_ROOT))
def ParseGClientConfig():
"""
Parses the top-level .gclient file (NOT .gclient-xwalk) and returns the
values set there as a dictionary.
"""
with open(os.path.join(GCLIENT_ROOT, '.gclient')) as dot_gclient:
config = {}
exec(dot_gclient, config)
return config
def GenerateGClientXWalk(options):
with open(os.path.join(CROSSWALK_ROOT, 'DEPS.xwalk')) as deps_file:
deps_contents = deps_file.read()
if 'XWALK_OS_ANDROID' in os.environ:
deps_contents += 'target_os = [\'android\']\n'
gclient_config = ParseGClientConfig()
if options.cache_dir:
logging.warning('--cache_dir is deprecated and will be removed in '
'Crosswalk 8. You should set cache_dir in .gclient '
'instead.')
cache_dir = options.cache_dir
else:
cache_dir = gclient_config.get('cache_dir')
deps_contents += 'cache_dir = %s\n' % pprint.pformat(cache_dir)
with open(os.path.join(GCLIENT_ROOT, '.gclient-xwalk'), 'w') as gclient_file:
gclient_file.write(deps_contents)
def main():
option_parser = optparse.OptionParser()
# TODO(rakuco): Remove in Crosswalk 8.
option_parser.add_option('--cache-dir',
help='DEPRECATED Set "cache_dir" in .gclient-xwalk '
'to this directory, so that all git '
'repositories are cached there.')
options, _ = option_parser.parse_args()
GenerateGClientXWalk(options)
if __name__ == '__main__':
main()
|
bsd-3-clause
|
Python
|
3c7758ce4f4ee844212e0dc86e3e35a5ea34d13f
|
Update setup.py
|
virtool/virtool,igboyes/virtool,virtool/virtool,igboyes/virtool
|
setup.py
|
setup.py
|
from setuptools import setup
classifiers=[
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
]
setup(
name="virtool",
classifiers=classifiers,
install_requires=[
"aiofiles",
"aiohttp",
"aiojobs",
"aionotify",
"aioredis",
"arrow",
"bcrypt",
"biopython",
"Cerberus",
"cchardet",
"click",
"coloredlogs",
"coverage",
"dictdiffer",
"Mako",
"motor",
"psutil",
"semver",
"sentry-sdk",
"uvloop",
"visvalingamwyatt"
],
py_modules=["virtool"],
entry_points='''
[console_scripts]
virtool=virtool.config:entry
'''
)
|
from cx_Freeze import setup, Executable
build_exe_options = {
"bin_includes": [
"libssl.so",
"libz.so"
],
"bin_path_includes": [
"/usr/lib/x86_64-linux-gnu"
],
"include_files": [
("client/dist", "client"),
"LICENSE",
"templates",
"readme.md"
],
"includes": [
"asyncio.base_events"
],
"packages": [
"asyncio",
"idna",
"gzip",
"motor",
"numpy",
"uvloop",
"sentry_sdk",
"ssl"
]
}
options = {
"build_exe": build_exe_options
}
executables = [
Executable('run.py', base="Console")
]
classifiers=[
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7"
]
setup(name="virtool", executables=executables, options=options, classifiers=classifiers)
|
mit
|
Python
|
7d97f7e6d7c467fda4b2aea4d028ee376f9c71d3
|
Bump version.
|
sarenji/pyrc
|
setup.py
|
setup.py
|
from distutils.core import setup
setup(name='pyrc',
version='0.6.1',
description='Simple, clean Python IRC library',
author='David Peter',
author_email='[email protected]',
url='http://github.com/sarenji/pyrc',
packages=['pyrc', 'pyrc/utils'],
classifiers=[
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'License :: OSI Approved :: MIT License',
'Environment :: Console',
'Development Status :: 3 - Alpha',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
|
from distutils.core import setup
setup(name='pyrc',
version='0.6.0',
description='Simple, clean Python IRC library',
author='David Peter',
author_email='[email protected]',
url='http://github.com/sarenji/pyrc',
packages=['pyrc', 'pyrc/utils'],
classifiers=[
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'License :: OSI Approved :: MIT License',
'Environment :: Console',
'Development Status :: 3 - Alpha',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
|
mit
|
Python
|
4ed7c876e825b6fa28d31ed257ecbd0023cff605
|
handle missing db
|
davidszotten/pytest-cagoule
|
pytest_cagoule/select.py
|
pytest_cagoule/select.py
|
from itertools import chain
import os
import re
import sqlite3
import six
from . import DB_FILE
spec_re = re.compile(
r'(?P<filename>[^:]+)(:(?P<start_line>\d+))?(-(?P<end_line>\d+))?'
)
def parse_spec(spec):
match = spec_re.match(spec)
if match is None:
return []
matches = match.groupdict()
filename = matches['filename']
start_line = matches.get('start_line')
if start_line is not None:
start_line = int(start_line)
end_line = matches.get('end_line')
if end_line is not None:
end_line = int(end_line)
return filename, start_line, end_line
def get_query(specs):
query_list = []
params_list = []
for spec in specs:
query, params = get_spec_filter(spec)
query_list.append(query)
params_list.append(params)
if query_list:
clauses = '\n OR '.join(map("({})".format, query_list))
filters = """
WHERE
{}
""".format(clauses)
else:
return None, None
full_params = tuple(chain(*params_list))
full_query = """
SELECT DISTINCT(node_id) FROM coverage
{}
ORDER BY node_id
""".format(filters)
return full_query, full_params
def get_spec_filter(spec):
# TODO: find where to best do this
if isinstance(spec, six.string_types):
spec = parse_spec(spec)
filename, start_line, end_line = spec
filename = os.path.abspath(filename)
lines_query, line_params = get_line_number_filter(start_line, end_line)
query = 'filename = ? ' + lines_query
params = (filename,) + line_params
return query, params
def get_line_number_filter(start_line, end_line):
if start_line is None:
return '', ()
if end_line is None:
end_line = start_line
lines = tuple(range(start_line, end_line + 1))
query = 'AND ({})'.format(
' OR '.join('line = ?' for line in lines)
)
return query, lines
def get_nodes_from_db(specs):
query, params = get_query(specs)
if query is None:
return []
if not os.path.exists(DB_FILE):
return []
connection = sqlite3.connect(DB_FILE)
cursor = connection.cursor()
cursor.execute(query, params)
return list(node_id for (node_id,) in cursor.fetchall())
def get_node_ids(specs):
return get_nodes_from_db(specs)
|
from itertools import chain
import os
import re
import sqlite3
import six
from . import DB_FILE
spec_re = re.compile(
r'(?P<filename>[^:]+)(:(?P<start_line>\d+))?(-(?P<end_line>\d+))?'
)
def parse_spec(spec):
match = spec_re.match(spec)
if match is None:
return []
matches = match.groupdict()
filename = matches['filename']
start_line = matches.get('start_line')
if start_line is not None:
start_line = int(start_line)
end_line = matches.get('end_line')
if end_line is not None:
end_line = int(end_line)
return filename, start_line, end_line
def get_query(specs):
query_list = []
params_list = []
for spec in specs:
query, params = get_spec_filter(spec)
query_list.append(query)
params_list.append(params)
if query_list:
clauses = '\n OR '.join(map("({})".format, query_list))
filters = """
WHERE
{}
""".format(clauses)
else:
return None, None
full_params = tuple(chain(*params_list))
full_query = """
SELECT DISTINCT(node_id) FROM coverage
{}
ORDER BY node_id
""".format(filters)
return full_query, full_params
def get_spec_filter(spec):
# TODO: find where to best do this
if isinstance(spec, six.string_types):
spec = parse_spec(spec)
filename, start_line, end_line = spec
filename = os.path.abspath(filename)
lines_query, line_params = get_line_number_filter(start_line, end_line)
query = 'filename = ? ' + lines_query
params = (filename,) + line_params
return query, params
def get_line_number_filter(start_line, end_line):
if start_line is None:
return '', ()
if end_line is None:
end_line = start_line
lines = tuple(range(start_line, end_line + 1))
query = 'AND ({})'.format(
' OR '.join('line = ?' for line in lines)
)
return query, lines
def get_nodes_from_db(specs):
query, params = get_query(specs)
if query is None:
return []
connection = sqlite3.connect(DB_FILE)
cursor = connection.cursor()
cursor.execute(query, params)
return list(node_id for (node_id,) in cursor.fetchall())
def get_node_ids(specs):
return get_nodes_from_db(specs)
|
mit
|
Python
|
ec4bbc6c6b766ac1c530cf3f1b4ebab40c60fe01
|
Update instrument.py
|
JamesKBowler/fxcmminer
|
fx_collect/instrument.py
|
fx_collect/instrument.py
|
class InstrumentAttributes(object):
def __init__(
self, broker, instrument, time_frames,
market_status, last_update, utc_now, wk_str, wk_end
):
# Start of Trading Week
self.utc_now = utc_now
self.wk_str = wk_str
self.wk_end = wk_end
self.str_hour = wk_str.hour
self.td = wk_str.hour - 22
# Passport
self.instrument = instrument
self.market_status = market_status
self.last_update = last_update
self.time_frames = time_frames
# Time frame storage dict
self.attrib = {}
for time_frame in time_frames:
self.attrib[time_frame] = {
'db_min' : None,
'db_max' : None,
'finbar' : None
}
def update_instrument_status(
self, lastupdate, market_status, utc_now
):
self.utc_now = utc_now
self.last_update = lastupdate
self.market_status = market_status
def update_database_datetime(
self, time_frame, pdfm, pdto
):
if pdfm < self.attrib[time_frame]['db_min']:
self.attrib[time_frame]['db_min'] = pdfm
if pdto >= self.attrib[time_frame]['db_max']:
self.attrib[time_frame]['db_max'] = pdto
|
class InstrumentAttributes(object):
def __init__(
self, broker, instrument, time_frames,
market_status, last_update, utc_now, wk_str, wk_end
):
# Start of Trading Week
self.utc_now = utc_now
self.wk_str = wk_str
self.wk_end = wk_end
self.str_hour = wk_str.hour
self.td = wk_str.hour - 22
# Passport
self.instrument = instrument
self.market_status = market_status
self.last_update = last_update
self.time_frames = time_frames
# Time frame storage dict
self.attrib = {}
for time_frame in time_frames:
self.attrib[time_frame] = {
'db_min' : None,
'db_max' : None,
'finbar' : None
}
def update_instrument_status(
self, lastupdate, market_status, utc_now
):
self.utc_now = utc_now
self.last_update = lastupdate
self.market_status = market_status
def _update_database_datetime(
self, time_frame, pdfm, pdto
):
if pdfm < self.attrib[time_frame]['db_min']:
self.attrib[time_frame]['db_min'] = pdfm
if pdto >= self.attrib[time_frame]['db_max']:
self.attrib[time_frame]['db_max'] = pdto
|
mit
|
Python
|
5cbc61943b3488719c3e0de2596ce64458935538
|
add include_package_data to setup.py
|
eliasdorneles/scrapyd,2014fgq/scrapyd,masterPPP/scrapyd,masterPPP/scrapyd,eliasdorneles/scrapyd,dfockler/scrapyd,aivarsk/scrapyd,dfockler/scrapyd,aivarsk/scrapyd,wujuguang/scrapyd,2014fgq/scrapyd,scrapy/scrapyd,CENDARI/scrapyd,sigma-random/scrapyd,nyov/scrapyd,sigma-random/scrapyd,nyov/scrapyd,wujuguang/scrapyd,CENDARI/scrapyd
|
setup.py
|
setup.py
|
from os.path import join, dirname
with open(join(dirname(__file__), 'scrapyd/VERSION')) as f:
version = f.read().strip()
setup_args = {
'name': 'Scrapyd',
'version': version,
'url': 'https://github.com/scrapy/scrapyd',
'description': 'A service for running Scrapy spiders, with an HTTP API',
'long_description': open('README.rst').read(),
'author': 'Scrapy developers',
'maintainer': 'Scrapy developers',
'maintainer_email': '[email protected]',
'license': 'BSD',
'packages': ['scrapyd'],
'scripts': ['bin/scrapyd'],
'include_package_data': True,
'classifiers': [
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Environment :: No Input/Output (Daemon)',
'Topic :: Internet :: WWW/HTTP',
],
}
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
else:
setup_args['install_requires'] = ['Twisted>=8.0', 'Scrapy>=0.17']
setup(**setup_args)
|
from os.path import join, dirname
with open(join(dirname(__file__), 'scrapyd/VERSION')) as f:
version = f.read().strip()
setup_args = {
'name': 'Scrapyd',
'version': version,
'url': 'https://github.com/scrapy/scrapyd',
'description': 'A service for running Scrapy spiders, with an HTTP API',
'long_description': open('README.rst').read(),
'author': 'Scrapy developers',
'maintainer': 'Scrapy developers',
'maintainer_email': '[email protected]',
'license': 'BSD',
'packages': ['scrapyd'],
'scripts': ['bin/scrapyd'],
'classifiers': [
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Environment :: No Input/Output (Daemon)',
'Topic :: Internet :: WWW/HTTP',
],
}
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
else:
setup_args['install_requires'] = ['Twisted>=8.0', 'Scrapy>=0.17']
setup(**setup_args)
|
bsd-3-clause
|
Python
|
a6effe7080fb66f7bd4e930727ed5d1ecff21523
|
Fix setup requirements to not contain transitional dependencies and exact versions
|
martinsmid/pytest-ui
|
setup.py
|
setup.py
|
from setuptools import setup
from pytui.settings import VERSION
setup(
name='pytest-ui',
description='Text User Interface for running python tests',
version=VERSION,
license='MIT',
platforms=['linux', 'osx', 'win32'],
packages=['pytui'],
url='https://github.com/martinsmid/pytest-ui',
author_email='[email protected]',
author='Martin Smid',
entry_points={
'console_scripts': [
'pytui = pytui.ui:main',
]
},
install_requires=[
'future',
'pytest',
'tblib',
'urwid',
],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Operating System :: POSIX',
'Operating System :: Microsoft :: Windows',
'Operating System :: MacOS :: MacOS X',
'Topic :: Software Development :: Testing',
'Topic :: Utilities',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
)
|
from setuptools import setup
from pytui.settings import VERSION
setup(
name='pytest-ui',
description='Text User Interface for running python tests',
version=VERSION,
license='MIT',
platforms=['linux', 'osx', 'win32'],
packages=['pytui'],
url='https://github.com/martinsmid/pytest-ui',
author_email='[email protected]',
author='Martin Smid',
entry_points={
'console_scripts': [
'pytui = pytui.ui:main',
]
},
install_requires=[
'attrs==17.4.0',
'future==0.16.0',
'pluggy==0.6.0',
'py==1.5.2',
'pytest==3.4.1',
'six==1.11.0',
'tblib==1.3.2',
'urwid==2.0.1',
],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Operating System :: POSIX',
'Operating System :: Microsoft :: Windows',
'Operating System :: MacOS :: MacOS X',
'Topic :: Software Development :: Testing',
'Topic :: Utilities',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
],
)
|
mit
|
Python
|
131cead153dd29cacf03fbf841f26fc85482b57c
|
Set version redactor 0.2 on setup file
|
jeanmask/opps,jeanmask/opps,opps/opps,williamroot/opps,jeanmask/opps,williamroot/opps,YACOWS/opps,opps/opps,jeanmask/opps,williamroot/opps,YACOWS/opps,opps/opps,YACOWS/opps,williamroot/opps,opps/opps,YACOWS/opps
|
setup.py
|
setup.py
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
from setuptools import setup, find_packages
import opps
install_requires = ["Django==1.5",
"south>=0.7",
"Pillow==1.7.8",
"thumbor==3.7.1",
"django-tagging==0.3.1",
"django-redis",
"django-redactor==0.2"]
classifiers = ["Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Framework :: Django",
'Programming Language :: Python',
"Programming Language :: Python :: 2.7",
"Operating System :: OS Independent",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
'Topic :: Software Development :: Libraries :: Python Modules',]
try:
long_description = open('README.md').read()
except:
long_description = opps.__description__
setup(name='opps',
version = opps.__version__,
description = opps.__description__,
long_description = long_description,
classifiers = classifiers,
keywords = 'opps cms django apps magazines websites',
author = opps.__author__,
author_email = opps.__email__,
url = 'http://oppsproject.org',
download_url = "https://github.com/avelino/opps/tarball/master",
license = opps.__license__,
packages = find_packages(exclude=('doc',)),
package_dir = {'opps': 'opps'},
install_requires = install_requires,
include_package_data = True,
)
|
#!/usr/bin/env python
# -*- coding:utf-8 -*-
from setuptools import setup, find_packages
import opps
install_requires = ["Django==1.5",
"south>=0.7",
"Pillow==1.7.8",
"thumbor==3.7.1",
"django-tagging==0.3.1",
"django-redis",
"django-redactor"]
classifiers = ["Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Framework :: Django",
'Programming Language :: Python',
"Programming Language :: Python :: 2.7",
"Operating System :: OS Independent",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
'Topic :: Software Development :: Libraries :: Python Modules',]
try:
long_description = open('README.md').read()
except:
long_description = opps.__description__
setup(name='opps',
version = opps.__version__,
description = opps.__description__,
long_description = long_description,
classifiers = classifiers,
keywords = 'opps cms django apps magazines websites',
author = opps.__author__,
author_email = opps.__email__,
url = 'http://oppsproject.org',
download_url = "https://github.com/avelino/opps/tarball/master",
license = opps.__license__,
packages = find_packages(exclude=('doc',)),
package_dir = {'opps': 'opps'},
install_requires = install_requires,
include_package_data = True,
)
|
mit
|
Python
|
72e4efe764dfcb85b633e59fbebd3aa82a95f6de
|
Use setuptools.
|
eddieantonio/sentinel
|
setup.py
|
setup.py
|
from setuptools import setup
from sentinel import __version__ as VERSION
from codecs import open
with open('README.rst', encoding='UTF-8') as readme:
long_description = readme.read()
setup(
name='sentinel',
version=VERSION,
url='https://github.com/eddieantonio/sentinel',
license='MIT',
author='Eddie Antonio Santos',
author_email='[email protected]',
description='Create sentinel and singleton objects',
long_description=long_description,
py_modules=['sentinel'],
platforms='any',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Topic :: Software Development :: Libraries :: Python Modules',
'Programming Language :: Python',
],
download_url = 'https://github.com/eddieantonio/sentinel/tarball/v' + VERSION,
)
|
from distutils.core import setup
from sentinel import __version__ as VERSION
from codecs import open
with open('README.rst', encoding='UTF-8') as readme:
long_description = readme.read()
setup(
name='sentinel',
version=VERSION,
url='https://github.com/eddieantonio/sentinel',
license='MIT',
author='Eddie Antonio Santos',
author_email='[email protected]',
description='Create sentinel and singleton objects',
long_description=long_description,
py_modules=['sentinel'],
platforms='any',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Topic :: Software Development :: Libraries :: Python Modules',
'Programming Language :: Python',
],
download_url = 'https://github.com/eddieantonio/sentinel/tarball/v' + VERSION,
)
|
mit
|
Python
|
1bfb63c704ae9d947310c8f0f8250ef43aae6217
|
Update setup.py
|
williamjacksn/python-rainwave-client
|
setup.py
|
setup.py
|
from setuptools import setup
import rainwaveclient
setup(
name='python-rainwave-client',
version=rainwaveclient.__version__,
author=rainwaveclient.__author__,
author_email='[email protected]',
url='https://github.com/williamjacksn/python-rainwave-client',
description='Python client library for Rainwave',
packages=['rainwaveclient'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Libraries'
],
license=open('LICENSE').read()
)
|
from setuptools import setup
import rainwaveclient
setup(
name='python-rainwave-client',
version=rainwaveclient.__version__,
author=rainwaveclient.__author__,
author_email='[email protected]',
url='https://gutter.readthedocs.org/',
description='Python Rainwave client library',
packages=['rainwaveclient'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Libraries'
],
license=open('LICENSE').read()
)
|
mit
|
Python
|
32c4ac486ded1ef4d4e37f182072bb1a3350db0c
|
Update 1.1 -> 1.2
|
desolat/python-junit-xml,kyrus/python-junit-xml
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup, find_packages
import os
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(name='junit-xml',
author='Brian Beyer',
author_email='[email protected]',
url='https://github.com/kyrus/python-junit-xml',
license='MIT',
packages=find_packages(),
description='Creates JUnit XML test result documents that can be read by tools such as Jenkins',
long_description=read('README.rst'),
version = "1.2",
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: Freely Distributable',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Build Tools',
'Topic :: Software Development :: Testing',
],
)
|
#!/usr/bin/env python
from setuptools import setup, find_packages
import os
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(name='junit-xml',
author='Brian Beyer',
author_email='[email protected]',
url='https://github.com/kyrus/python-junit-xml',
license='MIT',
packages=find_packages(),
description='Creates JUnit XML test result documents that can be read by tools such as Jenkins',
long_description=read('README.rst'),
version = "1.1",
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: Freely Distributable',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Build Tools',
'Topic :: Software Development :: Testing',
],
)
|
mit
|
Python
|
81a0ffba0a744df61da246be5a1729981c2a32b4
|
Bump version (1.0.2 → 1.1.0).
|
renstrom/webassets-browserify,dudymas/webassets-browserify
|
setup.py
|
setup.py
|
# -*- coding: utf-8 -*-
"""
Browserify filter for webassets
-------------------------------
Filter for for compiling assets using `Browserify <http://browserify.org>`_ and
`webassets <http://webassets.readthedocs.org>`_.
Basic usage
```````````
.. code:: python
from webassets.filter import register_filter
from webassets_browserify import Browserify
register_filter(Browserify)
Usage with Django
`````````````````
This requires `django-assets <http://django-assets.readthedocs.org>`_.
.. code:: python
from django_assets import Bundle, register
from webassets.filter import register_filter
from webassets_browserify import Browserify
register_filter(Browserify)
js = Bundle('js/main.js', filters='browserify', output='bundle.js',
depends='js/**/*.js')
register('js_all', js)
"""
from setuptools import setup, find_packages
setup(name='webassets-browserify',
version='1.1.0',
description='Browserify filter for webassets',
long_description=__doc__,
author='Peter Renström',
license='MIT',
url='https://github.com/renstrom/webassets-browserify',
packages=find_packages(),
keywords=['browserify', 'webassets', 'django assets'],
install_requires=['webassets'],
test_suite='webassets_browserify.tests',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4'
])
|
# -*- coding: utf-8 -*-
"""
Browserify filter for webassets
-------------------------------
Filter for for compiling assets using `Browserify <http://browserify.org>`_ and
`webassets <http://webassets.readthedocs.org>`_.
Basic usage
```````````
.. code:: python
from webassets.filter import register_filter
from webassets_browserify import Browserify
register_filter(Browserify)
Usage with Django
`````````````````
This requires `django-assets <http://django-assets.readthedocs.org>`_.
.. code:: python
from django_assets import Bundle, register
from webassets.filter import register_filter
from webassets_browserify import Browserify
register_filter(Browserify)
js = Bundle('js/main.js', filters='browserify', output='bundle.js',
depends='js/**/*.js')
register('js_all', js)
"""
from setuptools import setup, find_packages
setup(name='webassets-browserify',
version='1.0.2',
description='Browserify filter for webassets',
long_description=__doc__,
author='Peter Renström',
license='MIT',
url='https://github.com/renstrom/webassets-browserify',
packages=find_packages(),
keywords=['browserify', 'webassets', 'django assets'],
install_requires=['webassets'],
test_suite='webassets_browserify.tests',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4'
])
|
mit
|
Python
|
a0e56119990f8d0e25cd8835e050d354e4a3a4d7
|
update author and email
|
FallenWarrior2k/cardinal.py,FallenWarrior2k/cardinal.py
|
setup.py
|
setup.py
|
import sys
from pathlib import Path
from setuptools import find_namespace_packages, setup
from setuptools.command.test import test as TestCommand
class Tox(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
# import here, cause outside the eggs aren't loaded
import tox
errcode = tox.cmdline(self.test_args)
sys.exit(errcode)
# Check if Git is present before enabling setuptools_scm
version_kwargs = {}
git_root = Path(__file__).resolve().parent / '.git'
if git_root.exists():
version_kwargs.update({
'use_scm_version': True,
'setup_requires': ['setuptools_scm']
})
setup(
name='cardinal.py',
**version_kwargs,
description='A growing bot for managing a Discord server',
author='FallenWarrior2k',
author_email='[email protected]',
url='https://github.com/FallenWarrior2k/cardinal.py',
platforms='any',
packages=find_namespace_packages(where='src'),
package_dir={'': 'src'},
package_data={
'cardinal': [
'db/migrations/alembic.ini',
'db/migrations/env.py',
'db/migrations/versions/*.py'
]
},
install_requires=[
'discord.py>=1.5',
'SQLAlchemy>=1.3',
'aioitertools>=0.7',
'alembic',
'dependency_injector',
'markdownify'
],
tests_require=['tox'],
extras_require={
'tests': ['tox']
},
cmdclass={'test': Tox}
)
|
import sys
from pathlib import Path
from setuptools import find_namespace_packages, setup
from setuptools.command.test import test as TestCommand
class Tox(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
# import here, cause outside the eggs aren't loaded
import tox
errcode = tox.cmdline(self.test_args)
sys.exit(errcode)
# Check if Git is present before enabling setuptools_scm
version_kwargs = {}
git_root = Path(__file__).resolve().parent / '.git'
if git_root.exists():
version_kwargs.update({
'use_scm_version': True,
'setup_requires': ['setuptools_scm']
})
setup(
name='cardinal.py',
**version_kwargs,
description='A growing bot for managing a Discord server',
author='Simon Engmann',
author_email='[email protected]',
url='https://github.com/FallenWarrior2k/cardinal.py',
platforms='any',
packages=find_namespace_packages(where='src'),
package_dir={'': 'src'},
package_data={
'cardinal': [
'db/migrations/alembic.ini',
'db/migrations/env.py',
'db/migrations/versions/*.py'
]
},
install_requires=[
'discord.py>=1.5',
'SQLAlchemy>=1.3',
'aioitertools>=0.7',
'alembic',
'dependency_injector',
'markdownify'
],
tests_require=['tox'],
extras_require={
'tests': ['tox']
},
cmdclass={'test': Tox}
)
|
mit
|
Python
|
ff6b42693e71f36882a1f56c3ffb310812efb043
|
Update the setup.py to register and upload
|
educreations/django-ormcache
|
setup.py
|
setup.py
|
#!/usr/bin/env python
import os
import sys
from setuptools import setup
if sys.argv[-1] == 'publish':
os.system('python setup.py register sdist upload')
os.system('python setup.py register bdist_wheel upload')
sys.exit()
readme_text = open("README.rst", "r").read()
setup(
name="django-ormcache",
version="0.2",
description="ORM cache for Django",
license="MIT",
keywords="cache django",
author="Corey Farwell",
author_email="[email protected]",
maintainer="Corey Farwell",
maintainer_email="[email protected]",
url="https://github.com/educreations/django-ormcache",
long_description=readme_text,
packages=["ormcache"],
package_dir={"ormcache": "ormcache"},
install_requires=['Django'],
classifiers=[
'Framework :: Django',
"Intended Audience :: Developers",
'License :: OSI Approved :: MIT License',
"Operating System :: OS Independent",
"Programming Language :: Python",
'Programming Language :: Python :: 2.7',
'Topic :: Software Development',
]
)
|
#!/usr/bin/env python
import os
import sys
from setuptools import setup
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
os.system('python setup.py bdist_wheel upload')
sys.exit()
readme_text = open("README.rst", "rb").read()
setup(
name="django-ormcache",
version="0.2",
description="ORM cache for Django",
license="MIT",
keywords="cache django",
author="Corey Farwell",
author_email="[email protected]",
maintainer="Corey Farwell",
maintainer_email="[email protected]",
url="https://github.com/educreations/django-ormcache",
long_description=readme_text,
packages=["ormcache"],
package_dir={"ormcache": "ormcache"},
install_requires=['Django'],
classifiers=[
'Framework :: Django',
"Intended Audience :: Developers",
'License :: OSI Approved :: MIT License',
"Operating System :: OS Independent",
"Programming Language :: Python",
'Programming Language :: Python :: 2.7',
'Topic :: Software Development',
]
)
|
mit
|
Python
|
23d275d0f9e4ba1a4ab57918615467867988446d
|
Fix setup.py typo
|
Almad/django-sane-testing
|
setup.py
|
setup.py
|
#!/usr/bin/env python
try:
import ez_setup
ez_setup.use_setuptools()
except ImportError:
pass
from setuptools import setup
project_dir = 'djangosanetesting'
name = 'djangosanetesting'
version = '0.5.6'
setup(
name = name,
version = version,
url = 'http://devel.almad.net/trac/django-sane-testing/',
author = 'Lukas Linhart',
author_email = '[email protected]',
description = u'''
Django: Sane testing
======================
django-sane-testing integrates Django with Nose testing framework. Goal is to provide nose goodies to Django testing and to support feasible integration or functional testing of Django applications, for example by providing more control over transaction/database handling.
Thus, there is a way to start HTTP server for non-WSGI testing - like using Selenium or Windmill.
Selenium has also been made super easy - just start --with-selenium, inherit from SeleniumTestCase and use self.selenium.
Package is documented - see docs/ or http://getthedocs.org/Almad/djangosanetesting.
''',
packages = ['djangosanetesting', 'djangosanetesting.selenium'],
scripts = [],
requires = ['Django (>=1.0)', 'nose (>=0.10)'],
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.5",
"Programming Language :: Python :: 2.6",
"Topic :: Software Development :: Testing",
"Topic :: Software Development :: Libraries :: Python Modules",
],
entry_points = {
'nose.plugins.0.10': [
'djangoliveserver = %s.noseplugins:DjangoLiveServerPlugin' % name,
'cherrypyliveserver = %s.noseplugins:CherryPyLiveServerPlugin' % name,
'django = %s.noseplugins:DjangoPlugin' % name,
'selenium = %s.noseplugins:SeleniumPlugin' % name,
'sanetestselection = %s.noseplugins:SaneTestSelectionPlugin' % name,
'djangotranslations = %s.noseplugins:DjangoTranslationPlugin' % name,
]
}
)
|
#!/usr/bin/env python
try:
import ez_setup
ez_setup.use_setuptools()
except ImportError:
pass
from setuptools import setup
project_dir = 'djangosanetesting'
name = 'djangosanetesting'
version = '0.5.6'
setup(
name = name,
version = version,
url = 'http://devel.almad.net/trac/django-sane-testing/',
author = 'Lukas Linhart',
author_email = '[email protected]',
description = u'''
Django: Sane testing
======================
django-sane-testing integrates Django with Nose testing framework. Goal is to provide nose goodies to Django testing and to support feasible integration or functional testing of Django applications, for example by providing more control over transaction/database handling.
Thus, there is a way to start HTTP server for non-WSGI testing - like using Selenium or Windmill.
Selenium has also been made super easy - just start --with-selenium, inherit from SeleniumTestCase and use self.selenium.
Package is documented - see docs/ or http://getthedocs.org/Almad/djangosanetesting.
'''
packages = ['djangosanetesting', 'djangosanetesting.selenium'],
scripts = [],
requires = ['Django (>=1.0)', 'nose (>=0.10)'],
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.5",
"Programming Language :: Python :: 2.6",
"Topic :: Software Development :: Testing",
"Topic :: Software Development :: Libraries :: Python Modules",
],
entry_points = {
'nose.plugins.0.10': [
'djangoliveserver = %s.noseplugins:DjangoLiveServerPlugin' % name,
'cherrypyliveserver = %s.noseplugins:CherryPyLiveServerPlugin' % name,
'django = %s.noseplugins:DjangoPlugin' % name,
'selenium = %s.noseplugins:SeleniumPlugin' % name,
'sanetestselection = %s.noseplugins:SaneTestSelectionPlugin' % name,
'djangotranslations = %s.noseplugins:DjangoTranslationPlugin' % name,
]
}
)
|
bsd-3-clause
|
Python
|
355a264a3b82d378d77a47916b217be8d573ad25
|
Add a module docstring to `jacquard.storage.base`
|
prophile/jacquard,prophile/jacquard
|
jacquard/storage/base.py
|
jacquard/storage/base.py
|
"""Base class for storage engine implementations."""
import abc
import contextlib
from .utils import TransactionMap
class KVStore(metaclass=abc.ABCMeta):
@abc.abstractmethod
def __init__(self, connection_string):
pass
@abc.abstractmethod
def begin(self):
pass
@abc.abstractmethod
def commit(self, changes, deletions):
pass
@abc.abstractmethod
def rollback(self):
pass
@abc.abstractmethod
def keys(self):
pass
@abc.abstractmethod
def get(self, key):
pass
def encode_key(self, key):
return key
def decode_key(self, key):
return key
@contextlib.contextmanager
def transaction(self):
self.begin()
transaction_map = TransactionMap(self)
try:
yield transaction_map
except Exception:
self.rollback()
raise
if (
not transaction_map.changes and
not transaction_map.deletions
):
# Don't bother running a commit if nothing actually changed
self.rollback()
else:
self.commit(
transaction_map.changes,
transaction_map.deletions,
)
|
import abc
import contextlib
from .utils import TransactionMap
class KVStore(metaclass=abc.ABCMeta):
@abc.abstractmethod
def __init__(self, connection_string):
pass
@abc.abstractmethod
def begin(self):
pass
@abc.abstractmethod
def commit(self, changes, deletions):
pass
@abc.abstractmethod
def rollback(self):
pass
@abc.abstractmethod
def keys(self):
pass
@abc.abstractmethod
def get(self, key):
pass
def encode_key(self, key):
return key
def decode_key(self, key):
return key
@contextlib.contextmanager
def transaction(self):
self.begin()
transaction_map = TransactionMap(self)
try:
yield transaction_map
except Exception:
self.rollback()
raise
if (
not transaction_map.changes and
not transaction_map.deletions
):
# Don't bother running a commit if nothing actually changed
self.rollback()
else:
self.commit(
transaction_map.changes,
transaction_map.deletions,
)
|
mit
|
Python
|
acf63adc560a693145856bc800f1d4afb79a2dcd
|
Remove specified pypi build
|
thouska/spotpy,thouska/spotpy,thouska/spotpy
|
setup.py
|
setup.py
|
# Copyright (c) 2015, Tobias Houska
from setuptools import setup, find_packages
import os
setup(
name = 'spotpy',
version = '1.5.11',
description = 'A Statistical Parameter Optimization Tool',
long_description=open(os.path.join(os.path.dirname(__file__),
"README.rst")).read(),
author = 'Tobias Houska, Philipp Kraft, Alejandro Chamorro-Chavez and Lutz Breuer',
author_email = '[email protected]',
url = 'https://spotpy.readthedocs.io/en/latest/',
license = 'MIT',
install_requires=[
'scipy', 'numpy', 'logging'],
packages=find_packages(exclude=["tests*", "docs*"]),
use_2to3 = True,
keywords = 'Monte Carlo, MCMC, MLE, SCE-UA, Simulated Annealing, DE-MCz, DREAM, ROPE, Artifical Bee Colony, DDS, PA-DDS, Uncertainty, Calibration, Model, Signatures',
classifiers = [
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Topic :: Software Development :: Libraries :: Python Modules'],
)
|
# Copyright (c) 2015, Tobias Houska
from setuptools import setup, find_packages
import os
# Type of python distribution
[bdist_wheel]
universal=0
setup(
name = 'spotpy',
version = '1.5.11',
description = 'A Statistical Parameter Optimization Tool',
long_description=open(os.path.join(os.path.dirname(__file__),
"README.rst")).read(),
author = 'Tobias Houska, Philipp Kraft, Alejandro Chamorro-Chavez and Lutz Breuer',
author_email = '[email protected]',
url = 'https://spotpy.readthedocs.io/en/latest/',
license = 'MIT',
install_requires=[
'scipy', 'numpy', 'logging'],
packages=find_packages(exclude=["tests*", "docs*"]),
use_2to3 = True,
keywords = 'Monte Carlo, MCMC, MLE, SCE-UA, Simulated Annealing, DE-MCz, DREAM, ROPE, Artifical Bee Colony, DDS, PA-DDS, Uncertainty, Calibration, Model, Signatures',
classifiers = [
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Topic :: Software Development :: Libraries :: Python Modules'],
)
|
mit
|
Python
|
b2e4882c8a58af7f2c8b207d0941b759471b20a1
|
add scipy.stats.expon.pdf
|
tensorflow/probability,google/jax,tensorflow/probability,google/jax,google/jax,google/jax
|
jax/scipy/stats/expon.py
|
jax/scipy/stats/expon.py
|
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as onp
import scipy.stats as osp_stats
from ... import lax
from ...numpy.lax_numpy import _promote_args_like, _wraps, where, inf
@_wraps(osp_stats.expon.logpdf)
def logpdf(x, loc=0, scale=1):
x, loc, scale = _promote_args_like(osp_stats.expon.logpdf, x, loc, scale)
log_scale = lax.log(scale)
linear_term = lax.div(lax.sub(x, loc), scale)
log_probs = lax.neg(lax.add(linear_term, log_scale))
return where(lax.le(x, loc), -inf, log_probs)
@_wraps(osp_stats.expon.pdf)
def pdf(x, loc=0, scale=1):
return lax.exp(logpdf(x, loc, scale))
|
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as onp
import scipy.stats as osp_stats
from ... import lax
from ...numpy.lax_numpy import _promote_args_like, _wraps, where, inf
@_wraps(osp_stats.expon.logpdf)
def logpdf(x, loc=0, scale=1):
x, loc, scale = _promote_args_like(osp_stats.expon.logpdf, x, loc, scale)
log_scale = lax.log(scale)
linear_term = lax.div(lax.sub(x, loc), scale)
log_probs = lax.neg(lax.add(linear_term, log_scale))
return where(lax.le(x, loc), -inf, log_probs)
|
apache-2.0
|
Python
|
24d35c62409cb37fe5a8c0d0646d3e393fec5928
|
Bump patch
|
egtaonline/GameAnalysis
|
gameanalysis/__init__.py
|
gameanalysis/__init__.py
|
__version__ = '4.1.1'
|
__version__ = '4.1.0'
|
apache-2.0
|
Python
|
38fb1ef71f827ff8483984ed9b7844dbdd945643
|
Add dependency link to daploader from pypi to overide Openshift's cache
|
devassistant/dapi,devassistant/dapi,devassistant/dapi
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from setuptools import setup
setup(
name='Dapi',
version='1.0',
description='DevAssistant Package Index',
author='Miro Hroncok',
author_email='[email protected]',
url='https://github.com/hroncok/dapi',
license='AGPLv3',
install_requires=[
'Django==1.6',
'psycopg2',
'South',
'daploader>=0.0.5',
'PyYAML',
'python-social-auth',
'django-taggit',
'django-simple-captcha',
'django-haystack',
'whoosh',
'djangorestframework',
'django-gravatar2',
'markdown2',
'Markdown',
],
dependency_links = [
'git+git://github.com/omab/python-social-auth.git@c5dd3339#egg=python-social-auth',
'git+git://github.com/tomchristie/django-rest-framework.git@03b4c60b#egg=djangorestframework',
'https://pypi.python.org/packages/source/d/daploader/daploader-0.0.5.tar.gz'
]
)
|
#!/usr/bin/env python
from setuptools import setup
setup(
name='Dapi',
version='1.0',
description='DevAssistant Package Index',
author='Miro Hroncok',
author_email='[email protected]',
url='https://github.com/hroncok/dapi',
license='AGPLv3',
install_requires=[
'Django==1.6',
'psycopg2',
'South',
'daploader>=0.0.5',
'PyYAML',
'python-social-auth',
'django-taggit',
'django-simple-captcha',
'django-haystack',
'whoosh',
'djangorestframework',
'django-gravatar2',
'markdown2',
'Markdown',
],
dependency_links = [
'git+git://github.com/omab/python-social-auth.git@c5dd3339#egg=python-social-auth',
'git+git://github.com/tomchristie/django-rest-framework.git@03b4c60b#egg=djangorestframework',
]
)
|
agpl-3.0
|
Python
|
42151375b9c8bc25f12e8aebd01e63998a1aae82
|
Set the slug for runner factory
|
lutris/website,lutris/website,Turupawn/website,Turupawn/website,Turupawn/website,lutris/website,lutris/website,Turupawn/website
|
games/tests/factories.py
|
games/tests/factories.py
|
import factory
from django.utils.text import slugify
from django.db.models.signals import post_save
from games import models
from accounts.models import User
from accounts.signals import create_library
class PlatformFactory(factory.DjangoModelFactory):
FACTORY_FOR = models.Platform
name = 'Amiga'
class GameFactory(factory.DjangoModelFactory):
FACTORY_FOR = models.Game
name = factory.Iterator(['Quake', 'Unreal', 'Serious Sam',
'Duke 3D', 'Deus Ex'])
year = 1999
is_public = True
class UserFactory(factory.DjangoModelFactory):
FACTORY_FOR = User
first_name = "Tester"
last_name = "Testing"
username = factory.Sequence(lambda n: 'user%d' % n)
email = "[email protected]"
is_active = True
class UserNoLibraryFactory(UserFactory):
@classmethod
def _create(cls, target_class, *args, **kwargs):
post_save.disconnect(create_library, User)
user = super(UserFactory, cls)._create(target_class, *args, **kwargs)
user.set_password('password')
user.save()
post_save.connect(create_library, User)
return user
class GameLibraryFactory(factory.DjangoModelFactory):
FACTORY_FOR = models.GameLibrary
user = factory.SubFactory(UserNoLibraryFactory)
@factory.post_generation
def games(self, create, extracted, **kwargs):
if not create:
return
if extracted:
for game in extracted:
self.games.add(game)
class RunnerFactory(factory.DjangoModelFactory):
FACTORY_FOR = models.Runner
name = factory.Sequence(lambda n: 'runner%s' % n)
@factory.post_generation
def set_slug(self, create, extracted, **kwargs):
if not create:
return
self.slug = slugify(unicode(self.name))
class InstallerFactory(factory.DjangoModelFactory):
FACTORY_FOR = models.Installer
runner = factory.SubFactory(RunnerFactory)
version = 'test'
published = True
user = factory.SubFactory(UserNoLibraryFactory)
|
import factory
from django.db.models.signals import post_save
from games import models
from accounts.models import User
from accounts.signals import create_library
class PlatformFactory(factory.DjangoModelFactory):
FACTORY_FOR = models.Platform
name = 'Amiga'
class GameFactory(factory.DjangoModelFactory):
FACTORY_FOR = models.Game
name = factory.Iterator(['Quake', 'Unreal', 'Serious Sam',
'Duke 3D', 'Deus Ex'])
year = 1999
is_public = True
class UserFactory(factory.DjangoModelFactory):
FACTORY_FOR = User
first_name = "Tester"
last_name = "Testing"
username = factory.Sequence(lambda n: 'user%d' % n)
email = "[email protected]"
is_active = True
class UserNoLibraryFactory(UserFactory):
@classmethod
def _create(cls, target_class, *args, **kwargs):
post_save.disconnect(create_library, User)
user = super(UserFactory, cls)._create(target_class, *args, **kwargs)
user.set_password('password')
user.save()
post_save.connect(create_library, User)
return user
class GameLibraryFactory(factory.DjangoModelFactory):
FACTORY_FOR = models.GameLibrary
user = factory.SubFactory(UserNoLibraryFactory)
@factory.post_generation
def games(self, create, extracted, **kwargs):
if not create:
return
if extracted:
for game in extracted:
self.games.add(game)
class RunnerFactory(factory.DjangoModelFactory):
FACTORY_FOR = models.Runner
name = factory.Sequence(lambda n: 'runner%s' % n)
class InstallerFactory(factory.DjangoModelFactory):
FACTORY_FOR = models.Installer
runner = factory.SubFactory(RunnerFactory)
version = 'test'
published = True
user = factory.SubFactory(UserNoLibraryFactory)
|
agpl-3.0
|
Python
|
f4685ae393a7cbaeea972b85d4e43c0a623722e9
|
Bump version to 0.1.4 in setup.py
|
refinery29/chassis,refinery29/chassis
|
setup.py
|
setup.py
|
"""Chassis: Opinionated REST Framework."""
from setuptools import find_packages, setup
setup(
name='chassis',
version='0.1.4',
packages=find_packages(),
description="Opinionated REST Framework",
author="Refinery 29",
author_email="[email protected]",
url="https://github.com/refinery29/chassis",
download_url="https://github.com/refinery29/chassis/archive/v0.1.4.tar.gz",
keywords=['Tornado', 'RESTful', 'REST', 'API', 'JSON', 'framework'],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Internet :: WWW/HTTP :: HTTP Servers',
'Topic :: Software Development :: Libraries :: Application Frameworks'
],
install_requires=[
'tornado',
'six'
],
long_description="""\
Chassis is Refinery29's framework layer on top of Tornado for rapidly
building performant, self-documenting JSON-based REST APIs.
"""
)
# TODO: Add validate-email==1.3 dependency
|
"""Chassis: Opinionated REST Framework."""
from setuptools import find_packages, setup
setup(
name='chassis',
version='0.1.3',
packages=find_packages(),
description="Opinionated REST Framework",
author="Refinery 29",
author_email="[email protected]",
url="https://github.com/refinery29/chassis",
download_url="https://github.com/refinery29/chassis/archive/v0.1.3.tar.gz",
keywords=['Tornado', 'RESTful', 'REST', 'API', 'JSON', 'framework'],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Internet :: WWW/HTTP :: HTTP Servers',
'Topic :: Software Development :: Libraries :: Application Frameworks'
],
install_requires=[
'tornado',
'six'
],
long_description="""\
Chassis is Refinery29's framework layer on top of Tornado for rapidly
building performant, self-documenting JSON-based REST APIs.
"""
)
# TODO: Add validate-email==1.3 dependency
|
mit
|
Python
|
6dca6694619a04b21b723adaf20551376ab99acd
|
Change the name of the project to oslo.config
|
metacloud/oslo.config,citrix-openstack-build/oslo.config,varunarya10/oslo.config,tianshan/oslo.config,magic0704/oslo.config,JioCloud/oslo.config,shad7/oslo.config,citrix-openstack/build-oslo.config
|
setup.py
|
setup.py
|
#!/usr/bin/python
# Copyright 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import setuptools
from magic.openstack.common import setup
package = 'oslo.config'
version = '1.1.0'
requires = setup.parse_requirements()
depend_links = setup.parse_dependency_links()
tests_require = setup.parse_requirements(['tools/test-requires'])
setuptools.setup(
name=package,
version=setup.get_version(package, version),
description='Oslo configuration API',
long_description='The Oslo configuration API supports parsing command '
'line arguments and .ini style configuration files.',
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2.6', ],
author='OpenStack',
author_email='[email protected]',
url='http://www.openstack.org/',
license='Apache Software License',
packages=['oslo', 'oslo.config'],
namespace_packages=['oslo'],
cmdclass=setup.get_cmdclass(),
install_requires=requires,
tests_require=tests_require,
dependency_links=depend_links,
)
|
#!/usr/bin/python
# Copyright 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import setuptools
from magic.openstack.common import setup
package = 'oslo-config'
version = '1.1.0'
requires = setup.parse_requirements()
depend_links = setup.parse_dependency_links()
tests_require = setup.parse_requirements(['tools/test-requires'])
setuptools.setup(
name=package,
version=setup.get_version(package, version),
description='Oslo configuration API',
long_description='The Oslo configuration API supports parsing command '
'line arguments and .ini style configuration files.',
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2.6', ],
author='OpenStack',
author_email='[email protected]',
url='http://www.openstack.org/',
license='Apache Software License',
packages=['oslo', 'oslo.config'],
namespace_packages=['oslo'],
cmdclass=setup.get_cmdclass(),
install_requires=requires,
tests_require=tests_require,
dependency_links=depend_links,
)
|
apache-2.0
|
Python
|
13d6f562102decb402e840b8e48c7c5a7b4e1497
|
Update version
|
AnyBody-Research-Group/AnyPyTools
|
anypytools/__init__.py
|
anypytools/__init__.py
|
# -*- coding: utf-8 -*-
"""AnyPyTools library."""
import os
import sys
import platform
import logging
if "FOR_DISABLE_CONSOLE_CTRL_HANDLER" not in os.environ:
os.environ["FOR_DISABLE_CONSOLE_CTRL_HANDLER"] = "1"
from anypytools.abcutils import AnyPyProcess, execute_anybodycon
from anypytools.macroutils import AnyMacro
from anypytools import macro_commands
from anypytools.tools import (
ABOVE_NORMAL_PRIORITY_CLASS,
BELOW_NORMAL_PRIORITY_CLASS,
IDLE_PRIORITY_CLASS,
NORMAL_PRIORITY_CLASS,
)
logger = logging.getLogger("abt.anypytools")
logger.addHandler(logging.NullHandler())
__all__ = [
"datautils",
"h5py_wrapper",
"AnyPyProcess",
"AnyMacro",
"macro_commands",
"print_versions",
"execute_anybodycon",
"ABOVE_NORMAL_PRIORITY_CLASS",
"BELOW_NORMAL_PRIORITY_CLASS",
"IDLE_PRIORITY_CLASS",
"NORMAL_PRIORITY_CLASS",
]
__version__ = "1.3.0"
def print_versions():
"""Print all the versions of software that AnyPyTools relies on."""
import numpy as np
import scipy as sp
print("-=" * 38)
print("AnyPyTools version: %s" % __version__)
print("NumPy version: %s" % np.__version__)
print("SciPy version: %s" % sp.__version__)
print("Python version: %s" % sys.version)
(sysname, _, release, version, machine, processor) = platform.uname()
print("Platform: %s-%s-%s (%s)" % (sysname, release, machine, version))
if not processor:
processor = "not recognized"
print("Processor: %s" % processor)
print("Byte-ordering: %s" % sys.byteorder)
print("-=" * 38)
|
# -*- coding: utf-8 -*-
"""AnyPyTools library."""
import os
import sys
import platform
import logging
if "FOR_DISABLE_CONSOLE_CTRL_HANDLER" not in os.environ:
os.environ["FOR_DISABLE_CONSOLE_CTRL_HANDLER"] = "1"
from anypytools.abcutils import AnyPyProcess, execute_anybodycon
from anypytools.macroutils import AnyMacro
from anypytools import macro_commands
from anypytools.tools import (
ABOVE_NORMAL_PRIORITY_CLASS,
BELOW_NORMAL_PRIORITY_CLASS,
IDLE_PRIORITY_CLASS,
NORMAL_PRIORITY_CLASS,
)
logger = logging.getLogger("abt.anypytools")
logger.addHandler(logging.NullHandler())
__all__ = [
"datautils",
"h5py_wrapper",
"AnyPyProcess",
"AnyMacro",
"macro_commands",
"print_versions",
"execute_anybodycon",
"ABOVE_NORMAL_PRIORITY_CLASS",
"BELOW_NORMAL_PRIORITY_CLASS",
"IDLE_PRIORITY_CLASS",
"NORMAL_PRIORITY_CLASS",
]
__version__ = "1.2.2"
def print_versions():
"""Print all the versions of software that AnyPyTools relies on."""
import numpy as np
import scipy as sp
print("-=" * 38)
print("AnyPyTools version: %s" % __version__)
print("NumPy version: %s" % np.__version__)
print("SciPy version: %s" % sp.__version__)
print("Python version: %s" % sys.version)
(sysname, _, release, version, machine, processor) = platform.uname()
print("Platform: %s-%s-%s (%s)" % (sysname, release, machine, version))
if not processor:
processor = "not recognized"
print("Processor: %s" % processor)
print("Byte-ordering: %s" % sys.byteorder)
print("-=" * 38)
|
mit
|
Python
|
02ed373ec7818d51ba881c973125bf4d995e04c7
|
bump to 0.0.10
|
botstory/botstory,hyzhak/bot-story,hyzhak/bot-story,botstory/botstory
|
setup.py
|
setup.py
|
import os
from setuptools import setup, find_packages
longDesc = ""
if os.path.exists("README.md"):
longDesc = open("README.md").read().strip()
setup(
name='botstory',
packages=find_packages(),
version='0.0.10',
description='Async framework for bots',
license='MIT',
long_description=longDesc,
author='Eugene Krevenets',
author_email='[email protected]',
url='https://github.com/hyzhak/bot-story',
download_url='https://github.com/hyzhak/bot-story/tarball/0.0.1',
keywords=['bot', 'ai', 'nlp', 'asyncio'],
classifiers=[
'Development Status :: 1 - Planning',
'Intended Audience :: Developers',
'Topic :: Communications :: Chat',
# Not early because of async/await
'Programming Language :: Python :: 3.5',
],
install_requires=[
'aiohttp==1.0.5',
'motor==0.7b0',
'yarl==0.4.3',
],
)
|
import os
from setuptools import setup, find_packages
longDesc = ""
if os.path.exists("README.md"):
longDesc = open("README.md").read().strip()
setup(
name='botstory',
packages=find_packages(),
version='0.0.9',
description='Async framework for bots',
license='MIT',
long_description=longDesc,
author='Eugene Krevenets',
author_email='[email protected]',
url='https://github.com/hyzhak/bot-story',
download_url='https://github.com/hyzhak/bot-story/tarball/0.0.1',
keywords=['bot', 'ai', 'nlp', 'asyncio'],
classifiers=[
'Development Status :: 1 - Planning',
'Intended Audience :: Developers',
'Topic :: Communications :: Chat',
# Not early because of async/await
'Programming Language :: Python :: 3.5',
],
install_requires=[
'aiohttp==1.0.5',
'motor==0.7b0',
'yarl==0.4.3',
],
)
|
mit
|
Python
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.