commit
stringlengths 40
40
| subject
stringlengths 4
1.73k
| repos
stringlengths 5
127k
| old_file
stringlengths 2
751
| new_file
stringlengths 2
751
| new_contents
stringlengths 1
8.98k
| old_contents
stringlengths 0
6.59k
| license
stringclasses 13
values | lang
stringclasses 23
values |
---|---|---|---|---|---|---|---|---|
edbc9f2c31f98e1447c352058aa05e6884a0927b | Create fold_eigenvalues.py | mlaloux/QGIS_structural_geology | fold_eigenvalues.py | fold_eigenvalues.py | #Definition of inputs and outputs
#==================================
##[Mes scripts GEOL]=group
##entree=vector
##dip_dir=field entree
##dip=field entree
#Algorithm body
#==================================
from qgis.core import *
from apsg import *
layer = processing.getObject(entree)
dipdir = layer.fieldNameIndex(dip_dir)
dip = layer.fieldNameIndex(dip)
if layer.selectedFeatureCount():
g= Group([Vec3(Fol(elem.attributes()[dipdir],elem.attributes()[dip])) for elem in layer.selectedFeatures()],name='plis')
else:
g= Group([Vec3(Fol(elem.attributes()[dipdir],elem.attributes()[dip])) for elem in layer.getFeatures()],name='plis')
resultat= "fold plunge: : " + str(int(round(Ortensor(g).eigenlins.data[2].dd[1]))) + " -> " + str(int(round(Ortensor(g).eigenlins.data[2].dd[0])))
s = StereoNet()
a = s.ax
s.line(g.aslin, 'b.',markersize=18)
s.line(Ortensor(g).eigenlins.data[0],'g.',markersize=18)
s.plane(Ortensor(g).eigenfols.data[0],'g')
s.line(Ortensor(g).eigenlins.data[1],'c.',markersize=18)
s.plane(Ortensor(g).eigenfols.data[1],'c')
s.line(Ortensor(g).eigenlins.data[2],'r.',markersize=18)
s.plane(Ortensor(g).eigenfols.data[2],'r')
a.set_title(resultat, y=1.06, size=14, color='red')
s.show()
| apache-2.0 | Python |
|
f55771da6a617c71f2eb620c11fb54e033c64338 | Migrate upload-orange-metadata process type | genialis/resolwe-bio,genialis/resolwe-bio,genialis/resolwe-bio,genialis/resolwe-bio | resolwe_bio/migrations/0002_metadata_table_type.py | resolwe_bio/migrations/0002_metadata_table_type.py | from django.db import migrations
from resolwe.flow.migration_ops import ResolweProcessChangeType
class Migration(migrations.Migration):
"""
Change the ``upload-orange-metadata`` process type.
"""
dependencies = [
("resolwe_bio", "0001_squashed_0015_sample_indices"),
]
operations = [
ResolweProcessChangeType(
process="upload-orange-metadata",
new_type="data:metadata:unique:",
),
]
| apache-2.0 | Python |
|
4170807e4a1c70eef6416fe3f1661c9c1c99a9da | Add pysal test | Kaggle/docker-python,Kaggle/docker-python | tests/test_pysal.py | tests/test_pysal.py | import unittest
from pysal.weights import lat2W
class TestPysal(unittest.TestCase):
def test_distance_band(self):
w = lat2W(4,4)
self.assertEqual(16, w.n) | apache-2.0 | Python |
|
484a2bf0c28aa2bbc910ca20849840bf518d4329 | Add utils.banners test case | gogoair/foremast,gogoair/foremast | tests/test_utils.py | tests/test_utils.py | # Foremast - Pipeline Tooling
#
# Copyright 2016 Gogo, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test utils."""
from unittest import mock
from foremast.utils.banners import banner
@mock.patch('foremast.utils.banners.LOG')
def test_utils_banner(mock_log):
banner('test', border='+', width=10)
mock_log.info.assert_called_with('+' * 10)
| apache-2.0 | Python |
|
45efbbdfd62cd0f9f8232bfd7ebd1aae0ac6cd17 | Create humidity.py | ariegg/webiopi-drivers,ariegg/webiopi-drivers | abstractions/sensor/humidity/humidity.py | abstractions/sensor/humidity/humidity.py | # This code has to be added to __init__.py in folder .../devices/sensor
class Humidity():
def __family__(self):
return "Humidity"
def __getHumidity__(self):
raise NotImplementedError
@api("Humidity", 0)
@request("GET", "sensor/humidity/*")
@response(contentType=M_JSON)
def humidityWildcard(self):
values = {}
humidity = self.__getHumidity__()
values["float"] = "%f" % humidity
values["percent"] = "%d" % (humidity * 100)
return values
@api("Humidity")
@request("GET", "sensor/humidity/float")
@response("%f")
def getHumidity(self):
return self.__getHumidity__()
@api("Humidity")
@request("GET", "sensor/humidity/percent")
@response("%d")
def getHumidityPercent(self):
return self.__getHumidity__() * 100
| apache-2.0 | Python |
|
c9bd5ba167284d79ae0cbe7aaaf9ec8536bef918 | add hiprec.py | ultimate-pa/benchexec,martin-neuhaeusser/benchexec,martin-neuhaeusser/benchexec,ultimate-pa/benchexec,martin-neuhaeusser/benchexec,dbeyer/benchexec,sosy-lab/benchexec,sosy-lab/benchexec,sosy-lab/benchexec,sosy-lab/benchexec,ultimate-pa/benchexec,dbeyer/benchexec,IljaZakharov/benchexec,dbeyer/benchexec,sosy-lab/benchexec,ultimate-pa/benchexec,IljaZakharov/benchexec,IljaZakharov/benchexec,ultimate-pa/benchexec,sosy-lab/benchexec,martin-neuhaeusser/benchexec,dbeyer/benchexec,ultimate-pa/benchexec,IljaZakharov/benchexec | benchexec/tools/hiprec.py | benchexec/tools/hiprec.py | #!/usr/bin/env python
"""
BenchExec is a framework for reliable benchmarking.
This file is part of BenchExec.
Copyright (C) 2007-2015 Dirk Beyer
All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# prepare for Python 3
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
import subprocess
import sys
import os
import re
import benchexec.util as util
import benchexec.tools.template
import benchexec.result as result
sys.dont_write_bytecode = True # prevent creation of .pyc files
if __name__ == "__main__":
sys.path.append(os.path.join(os.path.dirname(__file__), os.path.pardir, os.path.pardir))
import benchexec.result as result
import benchexec.util as util
import benchexec.tools.template
from benchexec.model import SOFTTIMELIMIT
REQUIRED_PATHS = [
"hiprec",
]
class Tool(benchexec.tools.template.BaseTool):
"""
Tool wrapper for HIPrec.
"""
def executable(self):
executable = util.find_executable('hiprec')
return executable
def working_directory(self, executable):
return os.curdir
def name(self):
return 'hiprec'
def cmdline(self, executable, options, tasks, propertyfile=None, rlimits={}):
return [executable] + options + tasks
def determine_result(self, returncode, returnsignal, output, isTimeout):
"""
@param returncode: code returned by CPAchecker
@param returnsignal: signal, which terminated CPAchecker
@param output: the output of CPAchecker
@return: status of CPAchecker after executing a run
"""
for line in output:
if line.startswith('Verification result: '):
line = line[22:].strip()
if line.startswith('TRUE'):
newStatus = result.RESULT_TRUE_PROP
elif line.startswith('FALSE'):
newStatus = result.RESULT_FALSE_REACH
else:
newStatus = result.RESULT_UNKNOWN
if not status:
status = newStatus
elif newStatus != result.RESULT_UNKNOWN:
status = "{0} ({1})".format(status, newStatus)
if not status:
status = result.RESULT_UNKNOWN
return status
| apache-2.0 | Python |
|
d726fd9b05b846097ee877ad0897f8416dbceaf7 | Add missing __init__ | olgabot/pelican-plugins,talha131/pelican-plugins,UHBiocomputation/pelican-plugins,MarkusH/pelican-plugins,kdheepak89/pelican-plugins,rlaboiss/pelican-plugins,olgabot/pelican-plugins,clokep/pelican-plugins,rlaboiss/pelican-plugins,howthebodyworks/pelican-plugins,FuzzJunket/pelican-plugins,yuanboshe/pelican-plugins,pestrickland/pelican-plugins,olgabot/pelican-plugins,if1live/pelican-plugins,gjreda/pelican-plugins,pxquim/pelican-plugins,benjaminabel/pelican-plugins,mortada/pelican-plugins,yuanboshe/pelican-plugins,if1live/pelican-plugins,karya0/pelican-plugins,prisae/pelican-plugins,pestrickland/pelican-plugins,mwcz/pelican-plugins,phrawzty/pelican-plugins,phrawzty/pelican-plugins,pestrickland/pelican-plugins,lazycoder-ru/pelican-plugins,clokep/pelican-plugins,wilsonfreitas/pelican-plugins,joachimneu/pelican-plugins,mitchins/pelican-plugins,ziaa/pelican-plugins,jfosorio/pelican-plugins,mikitex70/pelican-plugins,pelson/pelican-plugins,barrysteyn/pelican-plugins,mortada/pelican-plugins,proteansec/pelican-plugins,howthebodyworks/pelican-plugins,amitsaha/pelican-plugins,samueljohn/pelican-plugins,rlaboiss/pelican-plugins,gjreda/pelican-plugins,lindzey/pelican-plugins,ingwinlu/pelican-plugins,mwcz/pelican-plugins,prisae/pelican-plugins,davidmarquis/pelican-plugins,Neurita/pelican-plugins,cctags/pelican-plugins,cmacmackin/pelican-plugins,samueljohn/pelican-plugins,benjaminabel/pelican-plugins,barrysteyn/pelican-plugins,goerz/pelican-plugins,samueljohn/pelican-plugins,davidmarquis/pelican-plugins,makefu/pelican-plugins,pelson/pelican-plugins,andreas-h/pelican-plugins,yuanboshe/pelican-plugins,jantman/pelican-plugins,jcdubacq/pelican-plugins,mwcz/pelican-plugins,Neurita/pelican-plugins,davidmarquis/pelican-plugins,seandavi/pelican-plugins,Samael500/pelican-plugins,danmackinlay/pelican-plugins,proteansec/pelican-plugins,pelson/pelican-plugins,makefu/pelican-plugins,MarkusH/pelican-plugins,ingwinlu/pelican-plugins,M157q/pelican-plugins,amitsaha/pelican-plugins,wilsonfreitas/pelican-plugins,danmackinlay/pelican-plugins,andreas-h/pelican-plugins,Neurita/pelican-plugins,andreas-h/pelican-plugins,rlaboiss/pelican-plugins,jfosorio/pelican-plugins,publicus/pelican-plugins,howthebodyworks/pelican-plugins,barrysteyn/pelican-plugins,lindzey/pelican-plugins,joachimneu/pelican-plugins,lele1122/pelican-plugins,shireenrao/pelican-plugins,M157q/pelican-plugins,phrawzty/pelican-plugins,cmacmackin/pelican-plugins,doctorwidget/pelican-plugins,benjaminabel/pelican-plugins,znegva/pelican-plugins,cmacmackin/pelican-plugins,cctags/pelican-plugins,clokep/pelican-plugins,frickp/pelican-plugins,MarkusH/pelican-plugins,phrawzty/pelican-plugins,proteansec/pelican-plugins,mikitex70/pelican-plugins,frickp/pelican-plugins,publicus/pelican-plugins,danmackinlay/pelican-plugins,publicus/pelican-plugins,mortada/pelican-plugins,MarkusH/pelican-plugins,samueljohn/pelican-plugins,if1live/pelican-plugins,jfosorio/pelican-plugins,kdheepak89/pelican-plugins,karya0/pelican-plugins,cmacmackin/pelican-plugins,clokep/pelican-plugins,farseerfc/pelican-plugins,FuzzJunket/pelican-plugins,wilsonfreitas/pelican-plugins,M157q/pelican-plugins,shireenrao/pelican-plugins,seandavi/pelican-plugins,farseerfc/pelican-plugins,prisae/pelican-plugins,karya0/pelican-plugins,Neurita/pelican-plugins,jprine/pelican-plugins,barrysteyn/pelican-plugins,olgabot/pelican-plugins,mortada/pelican-plugins,mikitex70/pelican-plugins,makefu/pelican-plugins,jantman/pelican-plugins,talha131/pelican-plugins,florianjacob/pelican-plugins,ingwinlu/pelican-plugins,shireenrao/pelican-plugins,FuzzJunket/pelican-plugins,danmackinlay/pelican-plugins,Xion/pelican-plugins,davidmarquis/pelican-plugins,proteansec/pelican-plugins,jantman/pelican-plugins,florianjacob/pelican-plugins,howthebodyworks/pelican-plugins,jakevdp/pelican-plugins,karya0/pelican-plugins,mitchins/pelican-plugins,zonca/pelican-plugins,amitsaha/pelican-plugins,znegva/pelican-plugins,gw0/pelican-plugins,xsteadfastx/pelican-plugins,jantman/pelican-plugins,lazycoder-ru/pelican-plugins,UHBiocomputation/pelican-plugins,goerz/pelican-plugins,mitchins/pelican-plugins,goerz/pelican-plugins,benjaminabel/pelican-plugins,talha131/pelican-plugins,pestrickland/pelican-plugins,M157q/pelican-plugins,ziaa/pelican-plugins,lazycoder-ru/pelican-plugins,joachimneu/pelican-plugins,UHBiocomputation/pelican-plugins,kdheepak89/pelican-plugins,frickp/pelican-plugins,florianjacob/pelican-plugins,florianjacob/pelican-plugins,mwcz/pelican-plugins,gjreda/pelican-plugins,pxquim/pelican-plugins,xsteadfastx/pelican-plugins,ingwinlu/pelican-plugins,Samael500/pelican-plugins,makefu/pelican-plugins,Samael500/pelican-plugins,MarkusH/pelican-plugins,gw0/pelican-plugins,lindzey/pelican-plugins,lindzey/pelican-plugins,pxquim/pelican-plugins,amitsaha/pelican-plugins,ziaa/pelican-plugins,lele1122/pelican-plugins,wilsonfreitas/pelican-plugins,doctorwidget/pelican-plugins,jcdubacq/pelican-plugins,lazycoder-ru/pelican-plugins,mitchins/pelican-plugins,ziaa/pelican-plugins,pelson/pelican-plugins,mortada/pelican-plugins,talha131/pelican-plugins,lele1122/pelican-plugins,farseerfc/pelican-plugins,Samael500/pelican-plugins,doctorwidget/pelican-plugins,kdheepak89/pelican-plugins,jakevdp/pelican-plugins,shireenrao/pelican-plugins,FuzzJunket/pelican-plugins,goerz/pelican-plugins,cctags/pelican-plugins,UHBiocomputation/pelican-plugins,yuanboshe/pelican-plugins,jakevdp/pelican-plugins,Xion/pelican-plugins,seandavi/pelican-plugins,farseerfc/pelican-plugins,frickp/pelican-plugins,publicus/pelican-plugins,xsteadfastx/pelican-plugins,xsteadfastx/pelican-plugins,if1live/pelican-plugins,znegva/pelican-plugins,Xion/pelican-plugins,jfosorio/pelican-plugins,Xion/pelican-plugins,jakevdp/pelican-plugins,jprine/pelican-plugins,prisae/pelican-plugins,talha131/pelican-plugins,joachimneu/pelican-plugins,mikitex70/pelican-plugins,cctags/pelican-plugins,lele1122/pelican-plugins,doctorwidget/pelican-plugins,seandavi/pelican-plugins,pxquim/pelican-plugins,gjreda/pelican-plugins,farseerfc/pelican-plugins,andreas-h/pelican-plugins | gallery/__init__.py | gallery/__init__.py | from .gallery import *
| agpl-3.0 | Python |
|
76ea7119e075cf6eb86d64768e90cfda124cedf9 | Add benchmarking script | drobilla/serd,drobilla/serd | serd_bench.py | serd_bench.py | #!/usr/bin/env python
import optparse
import os
import subprocess
import sys
class WorkingDirectory:
"Scoped context for changing working directory"
def __init__(self, working_dir):
self.original_dir = os.getcwd()
self.working_dir = working_dir
def __enter__(self):
os.chdir(self.working_dir)
return self
def __exit__(self, type, value, traceback):
os.chdir(self.original_dir)
def filename(n):
"Filename for a generated file with n statements"
return 'gen%d.ttl' % n
def gen(sp2b_dir, n_min, n_max, step):
"Generate files with n_min ... n_max statements if they are not present"
with WorkingDirectory(sp2b_dir) as dir:
for n in range(n_min, n_max + step, step):
out_path = os.path.join(dir.original_dir, 'build', filename(n))
if not os.path.exists(out_path):
subprocess.call(['sp2b_gen', '-t', str(n), out_path])
def write_header(results, progs):
"Write the header line for TSV output"
results.write('n')
for prog in progs:
results.write('\t' + os.path.basename(prog.split()[0]))
results.write('\n')
def parse_time(report):
"Return user time and max RSS from a /usr/bin/time -v report"
time = memory = None
for line in report.split('\n'):
if line.startswith('\tUser time'):
time = float(line[line.find(':') + 1:])
elif line.startswith('\tMaximum resident set'):
memory = float(line[line.find(':') + 1:]) * 1024
return (time, memory)
def run(progs, n_min, n_max, step):
"Benchmark each program with n_min ... n_max statements"
with WorkingDirectory('build'):
results = {'time': open('serdi-time.txt', 'w'),
'throughput': open('serdi-throughput.txt', 'w'),
'memory': open('serdi-memory.txt', 'w')}
# Write TSV header for all output files
for name, f in results.iteritems():
write_header(f, progs)
for n in range(n_min, n_max + step, step):
# Add first column (n) to rows
rows = {}
for name, _ in results.iteritems():
rows[name] = [str(n)]
# Run each program and fill rows with measurements
for prog in progs:
cmd = '/usr/bin/time -v ' + prog + ' ' + filename(n)
with open(filename(n) + '.out', 'w') as out:
sys.stderr.write(cmd + '\n')
proc = subprocess.Popen(
cmd.split(), stdout=out, stderr=subprocess.PIPE)
time, memory = parse_time(proc.communicate()[1])
rows['time'] += ['%.07f' % time]
rows['throughput'] += ['%d' % (n / time)]
rows['memory'] += [str(memory)]
# Write rows to output files
for name, f in results.iteritems():
f.write('\t'.join(rows[name]) + '\n')
for name, _ in results.iteritems():
sys.stderr.write('wrote build/serdi-%s.txt\n' % name)
if __name__ == "__main__":
class OptParser(optparse.OptionParser):
def format_epilog(self, formatter):
return self.expand_prog_name(self.epilog)
opt = OptParser(
usage='%prog [OPTION]... SP2B_DIR',
description='Benchmark RDF reading and writing commands\n',
epilog='''
Example:
%prog --max 100000 \\
--run 'rapper -i turtle -o turtle' \\
--run 'riot --output=ttl' \\
--run 'rdfpipe -i turtle -o turtle' /path/to/sp2b/src/
''')
opt.add_option('--max', type='int', default=1000000,
help='maximum triple count')
opt.add_option('--run', type='string', action='append', default=[],
help='additional command to run (input file is appended)')
(options, args) = opt.parse_args()
if len(args) != 1:
opt.print_usage()
sys.exit(1)
progs = ['serdi -b -f -i turtle -o turtle'] + options.run
min_n = options.max / 10
max_n = options.max
step = min_n
gen(str(args[0]), min_n, max_n, step)
run(progs, min_n, max_n, step)
| isc | Python |
|
c206969facfc0e46d7ec4d3f60ce2e6a07956dbd | Use filfinder to get the average radial width of features in the moment 0 | e-koch/VLA_Lband,e-koch/VLA_Lband | 14B-088/HI/analysis/run_filfinder.py | 14B-088/HI/analysis/run_filfinder.py |
from fil_finder import fil_finder_2D
from basics import BubbleFinder2D
from spectral_cube.lower_dimensional_structures import Projection
from astropy.io import fits
from radio_beam import Beam
from astropy.wcs import WCS
import astropy.units as u
import matplotlib.pyplot as p
'''
Filaments in M33? Why not?
'''
mom0_fits = fits.open("/home/eric/MyRAID/M33/14B-088/HI/full_imaging/M33_14B-088_HI.clean.image.pbcov_gt_0.3_masked.mom0.fits")[0]
mom0 = Projection(mom0_fits.data, wcs=WCS(mom0_fits.header))
mom0.meta['beam'] = Beam.from_fits_header(mom0_fits.header)
# Create the bubble mask instead of letting FilFinder to do it.
bub = BubbleFinder2D(mom0, sigma=80.)
fils = fil_finder_2D(mom0.value, mom0.header, 10, distance=0.84e6)
fils.mask = ~(bub.mask.copy())
fils.medskel()
fils.analyze_skeletons()
# So at least on of the radial profiles fails. BUT the second fit is to a
# skeleton that is essentially the entire disk, so plot without interactivity
# and save the plot and the parameters shown in verbose mode.
p.ioff()
fils.find_widths(verbose=True, max_distance=500, auto_cut=False, try_nonparam=False)
# Fit Parameters: [ 541.31726502 129.85351117 180.0710914 304.01262168
# Fit Errors: [ 0.89151974 0.48394493 0.27313627 1.1462345 ]
| mit | Python |
|
da2de3d9d4b36bf2068dbe5b80d785748f532292 | Add __init__.py for the schedule package | PyGotham/pygotham,djds23/pygotham-1,PyGotham/pygotham,djds23/pygotham-1,pathunstrom/pygotham,djds23/pygotham-1,pathunstrom/pygotham,pathunstrom/pygotham,PyGotham/pygotham,djds23/pygotham-1,PyGotham/pygotham,pathunstrom/pygotham,djds23/pygotham-1,pathunstrom/pygotham,PyGotham/pygotham | pygotham/schedule/__init__.py | pygotham/schedule/__init__.py | """Schedule package."""
| bsd-3-clause | Python |
|
da5fed886d519b271a120820668d21518872f52c | Remove Duplicates from Sorted Array problem | zsmountain/leetcode,zsmountain/leetcode,zsmountain/leetcode | remove_duplicates_from_sorted_array.py | remove_duplicates_from_sorted_array.py | '''
Given a sorted array, remove the duplicates in place such that each element appear only once and return the new length.
Do not allocate extra space for another array, you must do this in place with constant memory.
For example,
Given input array A = [1,1,2],
Your function should return length = 2, and A is now [1,2].
'''
'''
Use two pointers. Quite straightforward.
'''
class Solution:
# @param a list of integers
# @return an integer
def removeDuplicates(self, A):
if len(A) < 2:
return len(A)
p1 = 0
p2 = 1
while p2 < len(A):
while p2 < len(A) and A[p1] == A[p2]:
p2 += 1
p1 += 1
if p2 < len(A):
A[p1] = A[p2]
return p1
if __name__ == '__main__':
s = Solution()
A = [1, 1, 2, 2, 3]
print s.removeDuplicates(A)
print A
A = [1, 1]
print s.removeDuplicates(A)
print A
| apache-2.0 | Python |
|
14302f83d755d2319a00db123dab14b300c8c93f | Add python patch script | rephormat/ripme,sleaze/ripme,sleaze/ripme,rephormat/ripme,sleaze/ripme,rephormat/ripme,metaprime/ripme,metaprime/ripme,metaprime/ripme | patch.py | patch.py | import json
import subprocess
# This script will:
# - read current version
# - increment patch version
# - update version in a few places
# - insert new line in ripme.json with message
message = raw_input('message: ')
with open('ripme.json') as dataFile:
ripmeJson = json.load(dataFile)
currentVersion = ripmeJson["latestVersion"]
print 'Current version ' + currentVersion
versionFields = currentVersion.split('.')
patchCur = int(versionFields[2])
patchNext = patchCur + 1
majorMinor = versionFields[:2]
majorMinor.append(str(patchNext))
nextVersion = '.'.join(majorMinor)
print 'Updating to ' + nextVersion
substrExpr = 's/' + currentVersion + '/' + nextVersion + '/'
subprocess.call(['sed', '-i', '-e', substrExpr, 'src/main/java/com/rarchives/ripme/ui/UpdateUtils.java'])
subprocess.call(['git', 'grep', 'DEFAULT_VERSION.*' + nextVersion,
'src/main/java/com/rarchives/ripme/ui/UpdateUtils.java'])
substrExpr = 's/\\\"latestVersion\\\": \\\"' + currentVersion + '\\\"/\\\"latestVersion\\\": \\\"' +\
nextVersion + '\\\"/'
subprocess.call(['sed', '-i', '-e', substrExpr, 'ripme.json'])
subprocess.call(['git', 'grep', 'latestVersion', 'ripme.json'])
substrExpr = 's/<version>' + currentVersion + '/<version>' + nextVersion + '/'
subprocess.call(['sed', '-i', '-e', substrExpr, 'pom.xml'])
subprocess.call(['git', 'grep', '<version>' + nextVersion + '</version>', 'pom.xml'])
commitMessage = nextVersion + ': ' + message
changeLogLine = ' \"' + commitMessage + '\",\n'
dataFile = open("ripme.json", "r")
ripmeJsonLines = dataFile.readlines()
ripmeJsonLines.insert(3, changeLogLine)
outputContent = ''.join(ripmeJsonLines)
dataFile.close()
dataFile = open("ripme.json", "w")
dataFile.write(outputContent)
dataFile.close()
subprocess.call(['git', 'add', '-u'])
subprocess.call(['git', 'commit', '-m', commitMessage])
subprocess.call(['git', 'tag', nextVersion])
| mit | Python |
|
048e6960d9e6408ef5dbfad2e32d2d1768ead1da | set P(A) | Adamssss/projectEuler | pb151.py | pb151.py | import math
import time
import random
t1 = time.time()
# A1:16
# A2:8
# A3:4
# A4:2
# A5:1
'''
def getRandom(n):
return random.randint(1,n)
def getbatch(env,l):
i = getRandom(l)-1
t = env[i]
env.pop(i)
if t == 1:
return env
if t == 2:
return env+[1]
if t == 4:
return env+[1,2]
if t == 8:
return env+[1,2,4]
def testweek():
env = [1,2,4,8]
el = 4
count = 0
for i in range(14):
env = getbatch(env,el)
el = len(env)
if el == 1:
count += 1
return count
N = 600000000
total = 0
for i in range(N):
total += testweek()
avg = total/N
k = math.pow(10,6)
print(round(avg*k)/k)
'''
def atone(s):
if s == [1,0,0,0]:
return 0
po = 0
pb = 0
for i in range(4):
if s[i] == 0:
continue
pb += s[i]
t = s[:]
t[i] -= 1
for j in range(i):
t[j] += 1
pt = atone(t)
if sum(t) == 1 and t[0] != 1:
pt += 1
po += s[i]*pt
return po/pb
avg = atone([1,1,1,1])
k = math.pow(10,6)
print(round(avg*k)/k)
print("time:",time.time()-t1)
| mit | Python |
|
f0684a5bb5860c2b9caffefb47dc55781092819e | Add eTools engine | asciimoo/searx,dalf/searx,asciimoo/searx,dalf/searx,asciimoo/searx,asciimoo/searx,dalf/searx,dalf/searx | searx/engines/etools.py | searx/engines/etools.py | """
eTools (Web)
@website https://www.etools.ch
@provide-api no
@using-api no
@results HTML
@stable no (HTML can change)
@parse url, title, content
"""
from lxml import html
from searx.engines.xpath import extract_text
from searx.url_utils import quote
from searx.utils import eval_xpath
categories = ['general']
paging = False
language_support = False
safesearch = True
base_url = 'https://www.etools.ch'
search_path = '/searchAdvancedSubmit.do'\
'?query={search_term}'\
'&pageResults=20'\
'&safeSearch={safesearch}'
def request(query, params):
if params['safesearch']:
safesearch = 'true'
else:
safesearch = 'false'
params['url'] = base_url + search_path.format(search_term=quote(query), safesearch=safesearch)
return params
def response(resp):
results = []
dom = html.fromstring(resp.text)
for result in eval_xpath(dom, '//table[@class="result"]//td[@class="record"]'):
url = eval_xpath(result, './a/@href')[0]
title = extract_text(eval_xpath(result, './a//text()'))
content = extract_text(eval_xpath(result, './/div[@class="text"]//text()'))
results.append({'url': url,
'title': title,
'content': content})
return results
| agpl-3.0 | Python |
|
4523621d2dd8913cb9c4156bf20e800652318a9d | add whileloop | BhaskarNaidu/python | whileloop.py | whileloop.py | a = 1
while a < 10:
print (a)
a = a+1
| apache-2.0 | Python |
|
bd7a84353b298ad14634e5c9a7b442146e9bfeeb | Create __init__.py | sopython/kesh,sopython/kesh | kesh/__init__.py | kesh/__init__.py | # Empty __init__.py
| bsd-3-clause | Python |
|
66d7ebe5210669284a335f83e2b8af7392285baa | add holistic video-to-pose | AmitMY/pose-format,AmitMY/pose-format,AmitMY/pose-format | pose_format/utils/holistic.py | pose_format/utils/holistic.py | import mediapipe as mp
import numpy as np
from tqdm import tqdm
from .openpose import hand_colors
from ..numpy.pose_body import NumPyPoseBody
from ..pose import Pose
from ..pose_header import PoseHeaderComponent, PoseHeaderDimensions, PoseHeader
mp_holistic = mp.solutions.holistic
BODY_POINTS = mp_holistic.PoseLandmark._member_names_
BODY_LIMBS = [(int(a), int(b)) for a, b in mp_holistic.POSE_CONNECTIONS]
HAND_POINTS = mp_holistic.HandLandmark._member_names_
HAND_LIMBS = [(int(a), int(b)) for a, b in mp_holistic.HAND_CONNECTIONS]
FACE_POINTS = [str(i) for i in range(468)]
FACE_LIMBS = [(int(a), int(b)) for a, b in mp_holistic.FACE_CONNECTIONS]
def component_points(component, width: int, height: int, num: int):
if component is not None:
lm = component.landmark
return np.array([[p.x * width, p.y * height, p.z] for p in lm]), np.ones(num)
return np.zeros((num, 3)), np.zeros(num)
def body_points(component, width: int, height: int, num: int):
if component is not None:
lm = component.landmark
return np.array([[p.x * width, p.y * height, p.z] for p in lm]), np.array([p.visibility for p in lm])
return np.zeros((num, 3)), np.zeros(num)
def process_holistic(frames: list, fps: float, w: int, h: int, kinect=None, progress=False):
holistic = mp_holistic.Holistic(static_image_mode=False)
datas = []
confs = []
for i, frame in enumerate(tqdm(frames, disable=not progress)):
results = holistic.process(frame)
body_data, body_confidence = body_points(results.pose_landmarks, w, h, 33)
face_data, face_confidence = component_points(results.face_landmarks, w, h, 468)
lh_data, lh_confidence = component_points(results.left_hand_landmarks, w, h, 21)
rh_data, rh_confidence = component_points(results.right_hand_landmarks, w, h, 21)
data = np.concatenate([body_data, face_data, lh_data, rh_data])
conf = np.concatenate([body_confidence, face_confidence, lh_confidence, rh_confidence])
if kinect is not None:
kinect_depth = []
for x, y, z in np.array(data, dtype="int32"):
if 0 < x < w and 0 < y < h:
kinect_depth.append(kinect[i, y, x, 0])
else:
kinect_depth.append(0)
kinect_vec = np.expand_dims(np.array(kinect_depth), axis=-1)
data = np.concatenate([data, kinect_vec], axis=-1)
datas.append(data)
confs.append(conf)
pose_body_data = np.expand_dims(np.stack(datas), axis=1)
pose_body_conf = np.expand_dims(np.stack(confs), axis=1)
return NumPyPoseBody(data=pose_body_data, confidence=pose_body_conf, fps=fps)
def load_holistic(frames: list, fps: float = 24, width=1000, height=1000, depth=0, kinect=None):
pf = "XYZC" if kinect is None else "XYZKC"
Holistic_Hand_Component = lambda name: PoseHeaderComponent(name=name, points=HAND_POINTS,
limbs=HAND_LIMBS, colors=hand_colors, point_format=pf)
Holistic_Components = [
PoseHeaderComponent(name="POSE_LANDMARKS", points=BODY_POINTS, limbs=BODY_LIMBS,
colors=[(255, 0, 0)], point_format=pf),
PoseHeaderComponent(name="FACE_LANDMARKS", points=FACE_POINTS, limbs=FACE_LIMBS,
colors=[(128, 0, 0)], point_format=pf),
Holistic_Hand_Component("LEFT_HAND_LANDMARKS"),
Holistic_Hand_Component("RIGHT_HAND_LANDMARKS"),
]
dimensions = PoseHeaderDimensions(width=width, height=height, depth=depth)
header: PoseHeader = PoseHeader(version=0.1, dimensions=dimensions, components=Holistic_Components)
body: NumPyPoseBody = process_holistic(frames, fps, width, height, kinect)
return Pose(header, body) | mit | Python |
|
9b169bf42bfb2c674460fc317cfb96f929ba0953 | Add tests suite for text modifier tags. | TamiaLab/PySkCode | tests/tests_tags/tests_textmodifiers.py | tests/tests_tags/tests_textmodifiers.py | """
SkCode text modifier tags test code.
"""
import unittest
from skcode.etree import TreeNode
from skcode.tags.textmodifiers import TextModifierBaseTagOptions
from skcode.tags import (LowerCaseTextTagOptions,
UpperCaseTextTagOptions,
CapitalizeTextTagOptions,
DEFAULT_RECOGNIZED_TAGS)
class TextModifierTagsTestCase(unittest.TestCase):
""" Tests suite for text modifier tags module. """
def test_tag_and_aliases_in_default_recognized_tags_dict(self):
""" Test the presence of the tag and aliases in the dictionary of default recognized tags. """
self.assertIn('lowercase', DEFAULT_RECOGNIZED_TAGS)
self.assertIsInstance(DEFAULT_RECOGNIZED_TAGS['lowercase'], LowerCaseTextTagOptions)
self.assertIn('uppercase', DEFAULT_RECOGNIZED_TAGS)
self.assertIsInstance(DEFAULT_RECOGNIZED_TAGS['uppercase'], UpperCaseTextTagOptions)
self.assertIn('capitalize', DEFAULT_RECOGNIZED_TAGS)
self.assertIsInstance(DEFAULT_RECOGNIZED_TAGS['capitalize'], CapitalizeTextTagOptions)
def test_tag_constant_values(self):
""" Test tag constants. """
opts = TextModifierBaseTagOptions('lowercase')
self.assertFalse(opts.newline_closes)
self.assertFalse(opts.same_tag_closes)
self.assertFalse(opts.standalone)
self.assertTrue(opts.parse_embedded)
self.assertFalse(opts.swallow_trailing_newline)
self.assertTrue(opts.inline)
self.assertFalse(opts.close_inlines)
self.assertFalse(opts.make_paragraphs_here)
def test_render_html_lowercase(self):
""" Test the ``render_html`` method. """
opts = LowerCaseTextTagOptions()
tree_node = TreeNode(None, 'lowercase', opts)
self.assertEqual('<p class="text-lowercase">test</p>\n', opts.render_html(tree_node, 'test'))
def test_render_html_uppercase(self):
""" Test the ``render_html`` method. """
opts = UpperCaseTextTagOptions()
tree_node = TreeNode(None, 'uppercase', opts)
self.assertEqual('<p class="text-uppercase">test</p>\n', opts.render_html(tree_node, 'test'))
def test_render_html_capitilize(self):
""" Test the ``render_html`` method. """
opts = CapitalizeTextTagOptions()
tree_node = TreeNode(None, 'capitalize', opts)
self.assertEqual('<p class="text-capitalize">test</p>\n', opts.render_html(tree_node, 'test'))
def test_render_text_lowercase(self):
""" Test the ``render_text`` method. """
opts = LowerCaseTextTagOptions()
tree_node = TreeNode(None, 'lowercase', opts)
self.assertEqual('test', opts.render_text(tree_node, 'teST'))
def test_render_text_uppercase(self):
""" Test the ``render_text`` method. """
opts = UpperCaseTextTagOptions()
tree_node = TreeNode(None, 'uppercase', opts)
self.assertEqual('TEST', opts.render_text(tree_node, 'teST'))
def test_render_text_capitilize(self):
""" Test the ``render_text`` method. """
opts = CapitalizeTextTagOptions()
tree_node = TreeNode(None, 'capitalize', opts)
self.assertEqual('Test', opts.render_text(tree_node, 'test'))
def test_render_skcode_lowercase(self):
""" Test the ``render_skcode`` method. """
opts = LowerCaseTextTagOptions()
tree_node = TreeNode(None, 'lowercase', opts)
self.assertEqual('[lowercase]test[/lowercase]', opts.render_skcode(tree_node, 'test'))
def test_render_skcode_uppercase(self):
""" Test the ``render_skcode`` method. """
opts = UpperCaseTextTagOptions()
tree_node = TreeNode(None, 'uppercase', opts)
self.assertEqual('[uppercase]test[/uppercase]', opts.render_skcode(tree_node, 'test'))
def test_render_skcode_capitilize(self):
""" Test the ``render_skcode`` method. """
opts = CapitalizeTextTagOptions()
tree_node = TreeNode(None, 'capitalize', opts)
self.assertEqual('[capitalize]test[/capitalize]', opts.render_skcode(tree_node, 'test'))
| agpl-3.0 | Python |
|
11f47fcad839b198d134f34b4489537360703a07 | Add helpers.py | ViderumGlobal/ckanext-orgdashboards,ViderumGlobal/ckanext-orgdashboards,ViderumGlobal/ckanext-orgdashboards,ViderumGlobal/ckanext-orgdashboards | ckanext/orgdashboards/tests/helpers.py | ckanext/orgdashboards/tests/helpers.py | from ckan.tests import factories
def create_mock_data(**kwargs):
mock_data = {}
mock_data['organization'] = factories.Organization()
mock_data['organization_name'] = mock_data['organization']['name']
mock_data['organization_id'] = mock_data['organization']['id']
mock_data['dataset'] = factories.Dataset(owner_org=mock_data['organization_id'])
mock_data['dataset_name'] = mock_data['dataset']['name']
mock_data['package_id'] = mock_data['dataset']['id']
mock_data['resource'] = factories.Resource(package_id=mock_data['package_id'])
mock_data['resource_name'] = mock_data['resource']['name']
mock_data['resource_id'] = mock_data['resource']['id']
mock_data['resource_view'] = factories.ResourceView(
resource_id=mock_data['resource_id'])
mock_data['resource_view_title'] = mock_data['resource_view']['title']
mock_data['context'] = {
'user': factories._get_action_user_name(kwargs)
}
return mock_data | agpl-3.0 | Python |
|
46e1afd7faae8bd8c62f6b4f5c01322804e68163 | add script to visualize simulation coefficient (us, g, us') | fmilano/mitk,fmilano/mitk,fmilano/mitk,fmilano/mitk,iwegner/MITK,RabadanLab/MITKats,iwegner/MITK,fmilano/mitk,fmilano/mitk,iwegner/MITK,RabadanLab/MITKats,MITK/MITK,MITK/MITK,RabadanLab/MITKats,iwegner/MITK,RabadanLab/MITKats,MITK/MITK,RabadanLab/MITKats,MITK/MITK,RabadanLab/MITKats,MITK/MITK,fmilano/mitk,iwegner/MITK,MITK/MITK,iwegner/MITK | Modules/Biophotonics/python/iMC/script_visualize_simulation_coefficients.py | Modules/Biophotonics/python/iMC/script_visualize_simulation_coefficients.py | '''
Created on Sep 22, 2015
@author: wirkert
'''
import math
import numpy as np
import matplotlib.pyplot as plt
from mc.usuag import UsG
if __name__ == '__main__':
# set up plots
f, axarr = plt.subplots(1, 4)
usplt = axarr[0]
usplt.grid()
usplt.set_xlabel("wavelengths [nm]")
usplt.set_ylabel("us [cm-1]")
usplt.set_title("scattering coefficient")
gplt = axarr[1]
gplt.grid()
gplt.set_xlabel("wavelengths [nm]")
gplt.set_ylabel("g")
gplt.set_title("anisotropy factor")
usrplt = axarr[2]
usrplt.grid()
usrplt.set_xlabel("wavelengths [nm]")
usrplt.set_ylabel("us' [cm-1]")
usrplt.set_title("reduced scattering coefficient")
aniplt = axarr[3]
aniplt.grid()
aniplt.set_xlabel("x = ka = size parameter")
aniplt.set_ylabel("g")
aniplt.set_xscale('log')
aniplt.set_title("anisotropy")
# set up simulation
usg = UsG()
usg.dsp = 0.04
# usg.n_medium = 1.33
# usg.n_particle = 1.40
wavelengths = np.arange(400, 700, 10) * 10 ** -9
plt_range = np.array([0.4 / 2. * 10 ** -6])
# np.linspace(2., 3., 10) * 10 ** -6
# np.array([579. / 2. * 10 ** -9])
# np.linspace(0.1, 0.74, 10) * 10 ** -6
for i, d in enumerate(plt_range):
# set and calculate values
usg.r = d / 2.
us = [usg(w)[0] for w in wavelengths]
g = [usg(w)[1] for w in wavelengths]
g = np.array(g) / np.array(g) * 0.92
# plot stuff
# from blue to red: the color of the plotted curves
plt_color = (1. / float(len(plt_range)) * i,
0.,
1. - (1. / float(len(plt_range)) * i))
# plot scattering coefficient
usplt.plot(wavelengths * 10 ** 9, np.array(us) / 100., color=plt_color)
# plot anisotropy factor
gplt.plot(wavelengths * 10 ** 9, g, color=plt_color)
# plot reduced scattering coefficient
usrplt.plot(wavelengths * 10 ** 9, np.array(us) * (1.0 - np.array(g)) / 100.,
color=plt_color)
aniplt.plot(2. * math.pi * usg.r / wavelengths * usg.n_medium, g)
plt.show()
| bsd-3-clause | Python |
|
b9c9a1f5cfea61050803ecc442232f2f8b4d7011 | Create yaml2json.py | cristicalin/tools | yaml2json.py | yaml2json.py | #!/usr/bin/python
import sys
import yaml
import json
if __name__ == '__main__':
content = yaml.load(sys.stdin)
print json.dumps(content, indent=2)
| mit | Python |
|
7714b3c640a3d6d7fae9dba3496adfddd9354e0e | Add CFFI binding generator | thread/wide-product,thread/wide-product | build_wide.py | build_wide.py | import cffi
ffibuilder = cffi.FFI()
ffibuilder.set_source(
'_wide',
r"""
#include "wide.c"
""",
extra_compile_args=['-Werror', '-fno-unwind-tables', '-fomit-frame-pointer'],
)
ffibuilder.cdef(
r"""
typedef uint32_t wp_index;
typedef double wp_number;
wp_index wide_product(
wp_index height,
const wp_number* a_data,
const wp_index* a_indices,
const wp_index* a_indptr,
wp_index a_width,
wp_index a_nnz,
const wp_number* b_data,
const wp_index* b_indices,
const wp_index* b_indptr,
wp_index b_width,
wp_index b_nnz,
wp_number* out_data,
wp_index* out_indices,
wp_index* out_indptr
);
wp_index wide_product_max_nnz(
const wp_index* a_indptr,
const wp_index* b_indptr,
wp_index height
);
""",
)
if __name__ == '__main__':
ffibuilder.compile(verbose=True)
| mit | Python |
|
6610483e55f5371d5dcfe06e984f791c3f051e4a | fix InMoov launching button | MyRobotLab/myrobotlab,MyRobotLab/myrobotlab,MyRobotLab/myrobotlab,MyRobotLab/myrobotlab,MyRobotLab/myrobotlab,MyRobotLab/myrobotlab,MyRobotLab/myrobotlab | src/main/resources/resource/Intro/InMoov01_start.py | src/main/resources/resource/Intro/InMoov01_start.py | #########################################
# InMoov01_start.py
# categories: inmoov
# more info @: http://myrobotlab.org/service/InMoov
#########################################
# uncomment for virtual hardware
# Platform.setVirtual(True)
i01 = Runtime.start('i01', 'InMoov2') | apache-2.0 | Python |
|
9ba00cc698a5ce38d8cfb8eb6e921df0e24525cc | Create netstew.py | jpegleg/netstew | netstew.py | netstew.py | #!/opt/anaconda/bin/python2.7
# Print the links to stndard out.
from bs4 import BeautifulSoup
soup = BeautifulSoup(open("index.html"))
for link in soup.find_all('a'):
print(link.get('href'))
| mit | Python |
|
2e3af241d989bf2b62bba5e344240246e8ff516b | add leave module | sammdot/circa | modules/leave.py | modules/leave.py | class LeaveModule:
def __init__(self, circa):
self.circa = circa
def onload(self):
self.circa.add_listener("cmd.leave", self.leave)
self.circa.add_listener("cmd.goaway", self.leave)
self.circa.add_listener("cmd.quit", self.quit)
def onunload(self):
self.circa.remove_listener("cmd.leave", self.leave)
self.circa.remove_listener("cmd.goaway", self.leave)
self.circa.remove_listener("cmd.quit", self.quit)
def leave(self, fr, to, text):
if self.circa.is_admin(fr) and fr != to:
self.circa.part(to)
def quit(self, fr, to, text):
if self.circa.is_admin(fr):
self.circa.close()
module = LeaveModule
| bsd-3-clause | Python |
|
3411020a0445afcb626e7079ae2f4d17a02d27a0 | Add simple YTid2AmaraID mapper. | danielhollas/AmaraUpload,danielhollas/AmaraUpload | map_ytid2amaraid.py | map_ytid2amaraid.py | #!/usr/bin/env python3
import argparse, sys
from pprint import pprint
from amara_api import *
from utils import answer_me
def read_cmd():
"""Function for reading command line options."""
desc = "Program for mapping YouTube IDs to Amara IDs. If given video is not on Amara, it is created."
parser = argparse.ArgumentParser(description=desc)
parser.add_argument('input_file',metavar='INPUT_FILE', help='Text file containing YouTube IDs and possibly filenames.')
parser.add_argument('-l','--lang',dest='lang',required = True, help='Which language do we copy?')
parser.add_argument('-c','--credentials',dest='apifile',default='myapi.txt', help='Text file containing your API key and username on the first line.')
return parser.parse_args()
opts = read_cmd()
infile = opts.input_file
apifile = opts.apifile
lang = opts.lang
# We suppose that the original language is English
if lang == "en":
is_original = True # is lang the original language of the video?
else:
is_original = False
# List ytids may also contain filenames
ytids = []
# Reading file with YT id's
with open(infile, "r") as f:
for line in f:
ytids.append(line.split())
# File 'apifile' should contain only one line with your Amara API key and Amara username.
# Amara API can be found in Settins->Account-> API Access (bottom-right corner)
file = open(apifile, "r")
API_KEY, USERNAME = file.read().split()[0:]
print('Using Amara username: '+USERNAME)
#print('Using Amara API key: '+API_KEY)
amara_headers = {
'Content-Type': 'application/json',
'X-api-username': USERNAME,
'X-api-key': API_KEY,
'format': 'json'
}
if len(ytids) < 20: # Do not print for large inputs
print("This is what I got from the input file:")
print(ytids)
answer = answer_me("Should I proceed?")
if not answer:
sys.exit(1)
# Main loop
for i in range(len(ytids)):
ytid_from = ytids[i][0]
sys.stdout.flush()
sys.stderr.flush()
video_url = 'https://www.youtube.com/watch?v='+ytid_from
# Now check whether the video is already on Amara
# If not, create it.
amara_response = check_video( video_url, amara_headers)
if amara_response['meta']['total_count'] == 0:
amara_response = add_video(video_url, lang, amara_headers)
amara_id = amara_response['id']
amara_title = amara_response['title']
print(ytid_from, AMARA_BASE_URL+'cs/subtitles/editor/'+amara_id+'/'+lang)
else:
amara_id = amara_response['objects'][0]['id']
amara_title = amara_response['objects'][0]['title']
print(ytid_from, AMARA_BASE_URL+'cs/subtitles/editor/'+amara_id+'/'+lang)
| mit | Python |
|
9fef390248387e02498d18ab7bba5b23e3632c7b | Add missing file | AudioCommons/ac-mediator,AudioCommons/ac-mediator,AudioCommons/ac-mediator | api/constants.py | api/constants.py | QUERY_PARAM_QUERY = 'q'
QUERY_PARAM_SORT = 's'
QUERY_PARAM_SIZE = 'size'
QUERY_PARAM_PAGE = 'page'
QUERY_PARAM_FIELDS = 'fields'
QUERY_PARAM_OFFSET = 'offset'
QUERY_PARAM_INCLUDE = 'include'
QUERY_PARAM_EXCLUDE = 'exclude'
QUERY_PARAM_WAIT_UNTIL_COMPLETE = 'wuc'
| apache-2.0 | Python |
|
8f9c979fc2936d53321a377c67cbf2e3b4667f95 | Create status_light.py | grvrulz/BookPlayer,nerk/BookPlayer | status_light.py | status_light.py | import time
class StatusLight(object):
"""available patterns for the status light"""
patterns = {
'blink_fast' : (.1, [False, True]),
'blink' : (.5, [False, True]),
}
"""placeholder for pattern to tenmporarily interrupt
status light with different pattern"""
interrupt_pattern = [0, []]
"""continue flashing, controlled by the stop"""
cont = True
def interrupt(self, action, repeat = 1):
"""Interupt the current status of the light with a names action
parameters: action the name of the action
repeat: the number of times to repeatthe interruption"""
self.interrupt_pattern[0] = self.patterns[action][0]
for i in range(0, repeat):
self.interrupt_pattern[1].extend(list(self.patterns[action][1][:]))
def do(self, action):
"""Perform a status light action
paramaters: action: the name of tehe action"""
if(len(self.interrupt_pattern[1])):
# if the interrupt_pattern is not empty, prioritize it
time.sleep(self.interrupt_pattern[0])
self.set_state(self.interrupt_pattern[1].pop(0))
return self.do(action)
for state in self.patterns[action][1]:
# peform the regular action when not interrupted
time.sleep(self.patterns[action][0])
self.set_state(state)
if self.cont:
# continue of not stopped
self.do(action)
def off(self, state):
"""Turn off status light"""
self.cont = False
self.set_state(state)
def set_state(self, state):
"""Turn the light on or off"""
print 'set state to %s' % state
if __name__ == '__main__':
light = StatusLight()
light.interrupt('blink_fast', 3)
light.do('blink')
| mit | Python |
|
a4ad0ffbda8beb4c2ea4ef0d181ec9ef0de3d1e1 | add the md5 by python | MiracleWong/aming_python,MiracleWong/aming_python | SystemInfo/1_hashlib.py | SystemInfo/1_hashlib.py | #!/usr/bin/python
#-*- coding:utf-8 -*-
import hashlib
import sys
def md5sum(f):
m = hashlib.md5()
with open(f) as fd:
while True:
data = fd.read(4096)
if data:
m.update(data)
else:
break
return m.hexdigest()
if __name__ == '__main__':
print md5sum(sys.argv[1])
| mit | Python |
|
0abb8f6d266408f20c751726460ae2d87f307583 | solve 1 problem | Shuailong/Leetcode | solutions/factorial-trailing-zeroes.py | solutions/factorial-trailing-zeroes.py | #!/usr/bin/env python
# encoding: utf-8
"""
factorial-trailing-zeroes.py
Created by Shuailong on 2016-02-21.
https://leetcode.com/problems/factorial-trailing-zeroes/.
"""
class Solution(object):
def trailingZeroes(self, n):
"""
:type n: int
:rtype: int
"""
count = 0
max_iter = 15 # 5**14 > max_int
for fact in range(1, max_iter):
count += n / 5 ** fact
return count
def main():
solution = Solution()
n = 25
for n in range(1, 100):
print solution.trailingZeroes2(n), solution.trailingZeroes(n)
if __name__ == '__main__':
main()
| mit | Python |
|
c8fa91104d712bf2743b07b5edd5f38a040d6507 | Add unit tests for invoke_post_run | nzlosh/st2,tonybaloney/st2,Plexxi/st2,StackStorm/st2,nzlosh/st2,Plexxi/st2,tonybaloney/st2,tonybaloney/st2,Plexxi/st2,nzlosh/st2,StackStorm/st2,StackStorm/st2,Plexxi/st2,StackStorm/st2,nzlosh/st2 | st2common/tests/unit/test_runners_utils.py | st2common/tests/unit/test_runners_utils.py | # Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from st2common.runners import utils
from st2common.services import executions as exe_svc
from st2common.util import action_db as action_db_utils
from st2tests import base
from st2tests import fixturesloader
from st2tests import config as tests_config
tests_config.parse_args()
FIXTURES_PACK = 'generic'
TEST_FIXTURES = {
'liveactions': ['liveaction1.yaml'],
'actions': ['local.yaml'],
'executions': ['execution1.yaml'],
'runners': ['run-local.yaml']
}
class RunnersUtilityTests(base.CleanDbTestCase):
def __init__(self, *args, **kwargs):
super(RunnersUtilityTests, self).__init__(*args, **kwargs)
self.models = None
def setUp(self):
super(RunnersUtilityTests, self).setUp()
loader = fixturesloader.FixturesLoader()
self.models = loader.save_fixtures_to_db(
fixtures_pack=FIXTURES_PACK,
fixtures_dict=TEST_FIXTURES
)
self.liveaction_db = self.models['liveactions']['liveaction1.yaml']
exe_svc.create_execution_object(self.liveaction_db)
self.action_db = action_db_utils.get_action_by_ref(self.liveaction_db.action)
@mock.patch.object(action_db_utils, 'get_action_by_ref', mock.MagicMock(return_value=None))
def test_invoke_post_run_action_provided(self):
utils.invoke_post_run(self.liveaction_db, action_db=self.action_db)
action_db_utils.get_action_by_ref.assert_not_called()
def test_invoke_post_run_action_exists(self):
utils.invoke_post_run(self.liveaction_db)
@mock.patch.object(action_db_utils, 'get_action_by_ref', mock.MagicMock(return_value=None))
@mock.patch.object(action_db_utils, 'get_runnertype_by_name', mock.MagicMock(return_value=None))
def test_invoke_post_run_action_does_not_exist(self):
utils.invoke_post_run(self.liveaction_db)
action_db_utils.get_action_by_ref.assert_called_once()
action_db_utils.get_runnertype_by_name.assert_not_called()
| apache-2.0 | Python |
|
8977f320979998c9f18cfa7629c1811c7082dddf | Add setup.py (sigh) | shiplu/webxpath | setup.py | setup.py | import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="webxpath", # Replace with your own username
version="0.0.2",
author="Shiplu Mokaddim",
author_email="[email protected]",
description="Run XPath query and expressions against websites",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/shiplu/webxpath",
packages=setuptools.find_packages(),
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: Apache License 2.0",
"Operating System :: OS Independent",
],
python_requires=">=3.6",
)
| apache-2.0 | Python |
|
9af2c53af417295842f8ae329a8bb8abc99f693d | add setup.py file | sigurdga/samklang-blog,sigurdga/samklang-blog | setup.py | setup.py | #!/usr/bin/env python
from distutils.core import setup
setup(
name = 's7n-blog',
version = "1a1",
packages = ['s7n', 's7n.blog'],
)
| agpl-3.0 | Python |
|
e1be390ab7a90d1efdb75a0b2e04c6414645a23c | Create setup.py | planetarypy/pdsspect | setup.py | setup.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('HISTORY.rst') as history_file:
history = history_file.read().replace('.. :changelog:', '')
requirements = [
# TODO: put package requirements here
]
setup(
name='pdsspect',
version='0.1.0',
description="PDS Image Viewer",
long_description=readme + '\n\n' + history,
author="PlanetaryPy Developers",
author_email='[email protected]',
url='https://github.com/planetarypy/pdsspect',
packages=[
'pdsspect',
],
package_dir={'pdsspect':
'pdsspect'},
include_package_data=True,
install_requires=[
'ginga==2.6.0',
'planetaryimage>=0.5.0',
'matplotlib>=1.5.1',
'QtPy>=1.2.1'
],
license="BSD",
zip_safe=False,
keywords='pdsspect',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
entry_points={
'console_scripts': [
'pdsspect = pdsspect.pdsspect:cli'
],
}
)
| bsd-3-clause | Python |
|
18c0682306ee383d0eaad467d8fd7c9f74bb6e4f | add setup.py | BYUFLOWLab/pyoptsparsewrapper | setup.py | setup.py | #!/usr/bin/env python
# encoding: utf-8
from setuptools import setup # , find_packages
setup(
name='pyoptwrapper',
version='1.0',
description='wrapper to pyopt',
author='Andrew Ning',
author_email='[email protected]',
py_modules=['pyoptwrapper'],
license='Apache License, Version 2.0',
zip_safe=False
) | mit | Python |
|
a03fa3d725f296d3fa3fda323171924671ec65c0 | add setup.py for setuptools support | devkev/mtools,yaowenqiang/mtools,ces0712/mtools,ces0712/mtools,devkev/mtools,parkhyeonghwa/mtools,devkev/mtools,yaowenqiang/mtools,gormanb/mtools,ces0712/mtools,aerrami/mtools,parkhyeonghwa/mtools,parkhyeonghwa/mtools,tomzhang/mtools,gormanb/mtools,rueckstiess/mtools,yaowenqiang/mtools,gormanb/mtools,corymintz/mtools,aerrami/mtools,aerrami/mtools,tomzhang/mtools,corymintz/mtools,rueckstiess/mtools,corymintz/mtools,tomzhang/mtools | setup.py | setup.py | from setuptools import setup, find_packages
setup(
name='mtools',
version='1.0.0',
packages=find_packages(),
scripts=['scripts/mlaunch','scripts/mlog2json','scripts/mlogdistinct',
'scripts/mlogfilter','scripts/mlogmerge','scripts/mlogversion',
'scripts/mlogvis','scripts/mplotqueries'],
include_package_data=True,
author='Thomas Rueckstiess',
author_email='[email protected]',
url='https://github.com/rueckstiess/mtools',
description='Useful scripts to parse and visualize MongoDB log files.',
) | apache-2.0 | Python |
|
b106d4fdaf1667061879dd170ddeec1bde2042aa | Add setup.py. | praekelt/twitty-twister,dustin/twitty-twister | setup.py | setup.py | from distutils.core import setup
setup(name='twittytwister',
version='0.1',
description='Twitter client for Twisted Python',
author='Dustin Sallings',
author_email='[email protected]',
url='http://github.com/dustin/twitty-twister/',
license='MIT',
platforms='any',
packages=['twittytwister'],
)
| mit | Python |
|
e0efdff7380101437c75ce6a50dd93302a3315e2 | Increase version dependency. | binarydud/pyres,guaijiao/pyres,TylerLubeck/pyres,Affectiva/pyres | setup.py | setup.py | from setuptools import setup, find_packages
version='0.9'
setup(
name='pyres',
version=version,
description='Python resque clone',
author='Matt George',
author_email='[email protected]',
maintainer='Matt George',
license='MIT',
url='http://github.com/binarydud/pyres',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
download_url='http://cloud.github.com/downloads/binarydud/pyres/pyres-%s.tar.gz' % version,
include_package_data=True,
package_data={'resweb': ['templates/*.mustache','media/*']},
scripts=[
'scripts/pyres_worker',
'scripts/pyres_web',
'scripts/pyres_scheduler',
'scripts/pyres_manager'],
install_requires=[
'simplejson>=2.0.9',
'itty>=0.6.2',
'redis>=1.34.1',
'pystache>=0.1.0',
'setproctitle==1.0'
],
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python'],
)
| from setuptools import setup, find_packages
version='0.9'
setup(
name='pyres',
version=version,
description='Python resque clone',
author='Matt George',
author_email='[email protected]',
maintainer='Matt George',
license='MIT',
url='http://github.com/binarydud/pyres',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
download_url='http://cloud.github.com/downloads/binarydud/pyres/pyres-%s.tar.gz' % version,
include_package_data=True,
package_data={'resweb': ['templates/*.mustache','media/*']},
scripts=[
'scripts/pyres_worker',
'scripts/pyres_web',
'scripts/pyres_scheduler',
'scripts/pyres_manager'],
install_requires=[
'simplejson>=2.0.9',
'itty>=0.6.2',
'redis==1.34.1',
'pystache>=0.1.0',
'setproctitle==1.0'
],
classifiers = [
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python'],
) | mit | Python |
8781799d2511dbafa7b11f2f8fb45356031a619b | Bump the sqlalchemy-citext version requirement | robhudson/warehouse,mattrobenolt/warehouse,mattrobenolt/warehouse,robhudson/warehouse,techtonik/warehouse,mattrobenolt/warehouse,techtonik/warehouse | setup.py | setup.py | #!/usr/bin/env python
# Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from setuptools import setup, find_packages
about = {}
with open("warehouse/__about__.py") as fp:
exec(fp.read(), about)
setup(
name=about["__title__"],
version=about["__version__"],
description=about["__summary__"],
long_description=open("README.rst").read(),
license=about["__license__"],
url=about["__uri__"],
author=about["__author__"],
author_email=about["__email__"],
classifiers=[
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
],
packages=find_packages(),
package_data={
"warehouse": ["*.yml"],
"warehouse.legacy": ["templates/*.html"],
"warehouse.migrations": ["*.mako", "versions/*.py"],
},
install_requires=[
"alembic",
"Jinja2",
"psycopg2cffi-compat>=1.1",
"PyYAML",
"six",
"SQLAlchemy",
"sqlalchemy-citext>=1.2.0",
"Werkzeug",
],
entry_points={
"console_scripts": [
"warehouse = warehouse.__main__:main",
],
},
zip_safe=False,
)
| #!/usr/bin/env python
# Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from setuptools import setup, find_packages
about = {}
with open("warehouse/__about__.py") as fp:
exec(fp.read(), about)
setup(
name=about["__title__"],
version=about["__version__"],
description=about["__summary__"],
long_description=open("README.rst").read(),
license=about["__license__"],
url=about["__uri__"],
author=about["__author__"],
author_email=about["__email__"],
classifiers=[
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
],
packages=find_packages(),
package_data={
"warehouse": ["*.yml"],
"warehouse.legacy": ["templates/*.html"],
"warehouse.migrations": ["*.mako", "versions/*.py"],
},
install_requires=[
"alembic",
"Jinja2",
"psycopg2cffi-compat>=1.1",
"PyYAML",
"six",
"SQLAlchemy",
"sqlalchemy-citext>=1.1.0",
"Werkzeug",
],
entry_points={
"console_scripts": [
"warehouse = warehouse.__main__:main",
],
},
zip_safe=False,
)
| apache-2.0 | Python |
18d899f36a140e677637118039e245127b0d138a | remove the long description | wintersandroid/tvrenamr,ghickman/tvrenamr | setup.py | setup.py | from setuptools import setup, find_packages
from tvrenamr import get_version
setup(
name = 'tvrenamr',
version = get_version(),
description = 'Rename tv show files using online databases',
author = 'George Hickman',
author_email = '[email protected]',
url = 'http://github.com/ghickman/tvrenamr',
license = 'MIT',
packages = find_packages(exclude=['tests']),
entry_points = {'console_scripts': ['tvr = tvrenamr.tvrenamr:run',],},
classifiers = [
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: MIT License',
'Operating System :: MacOS',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2.6',
'Topic :: Multimedia',
'Topic :: Utilities',
'Natural Language :: English'],
install_requires = ('lxml', 'pyyaml',)
)
| from os.path import dirname, join
from setuptools import setup, find_packages
from tvrenamr import get_version
def fread(fname):
return open(join(dirname(__file__), fname)).read()
setup(
name = 'tvrenamr',
version = get_version(),
description = 'Rename tv show files using online databases',
long_description = fread('README.markdown'),
author = 'George Hickman',
author_email = '[email protected]',
url = 'http://github.com/ghickman/tvrenamr',
license = 'MIT',
packages = find_packages(exclude=['tests']),
entry_points = {'console_scripts': ['tvr = tvrenamr.tvrenamr:run',],},
classifiers = [
'Environment :: Console',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: MIT License',
'Operating System :: MacOS',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2.6',
'Topic :: Multimedia',
'Topic :: Utilities',
'Natural Language :: English'],
install_requires = ('lxml', 'pyyaml',)
)
| mit | Python |
934e73247156b28d919957d738d8a5b03e403160 | Add setup.py. | peteflorence/simple_img_gallery,peteflorence/simple_img_gallery | setup.py | setup.py | """
setup.py for simple_img_gallery.
"""
from distutils.core import setup
setup(name="simple_img_gallery",
version="0.0.1",
description="Simple image gallery generation.",
author="Pete Florence",
author_email="",
url="https://github.com/peteflorence/simple_img_gallery",
scripts=['generate_gallery.py'])
| mit | Python |
|
ff5c68ccd566ba388f919bb663c5055685be3070 | Add initial setup.py | speechkey/mdx_picture | setup.py | setup.py | #!/usr/bin/env python
from setuptools import setup
setup(
name='mdx_picture',
version='1.0',
author='Artem Grebenkin',
author_email='[email protected]',
description='Python-Markdown extension supports the <picture> tag.',
url='http://www.artemgrebenkin.com/',
py_modules=['mdx_picture'],
install_requires=['Markdown>=2.0'],
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: BSD License',
'Intended Audience :: Developers',
'Environment :: Web Environment',
'Programming Language :: Python',
'Topic :: Text Processing :: Filters',
'Topic :: Text Processing :: Markup :: HTML'
]
)
| mit | Python |
|
6972c0a6fc0431c7e41b110ea8c41dd9a4ed076c | Add distutils setup script | HearthSim/python-fsb5 | setup.py | setup.py | #!/usr/bin/env python3
from distutils.core import setup
setup(
name='python-fsb5',
version='1.0',
author='Simon Pinfold',
author_email='[email protected]',
description='Library and to extract audio from FSB5 (FMOD Sample Bank) files',
download_url='https://github.com/synap5e/python-fsb5/tarball/master',
license='MIT',
url='https://github.com/synap5e/python-fsb5',
)
| mit | Python |
|
0bf30432084a5b6e71ea2ac36af165f7c4cee133 | Add setup.py | skwashd/python-acquia-cloud | setup.py | setup.py | #!/usr/bin/env python
from setuptools import setup
setup(name='acapi',
version='0.1',
description='Acquia Cloud API client.',
author='Dave Hall',
author_email='[email protected]',
url='http://github.com/skwashd/python-acquia-cloud',
install_requires=['httplib2==0.9', 'simplejson==3.5.3', 'six==1.7.3'],
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Topic :: Internet',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
packages=[
'acapi',
'acapi.compat',
'acapi.resources',
],
)
| mit | Python |
|
28970e7d54186e1bf360cb91389b9ba6b3df4679 | Add script to validate mvn repositories | avikurapati/elasticsearch,weipinghe/elasticsearch,dongjoon-hyun/elasticsearch,JSCooke/elasticsearch,sneivandt/elasticsearch,socialrank/elasticsearch,hafkensite/elasticsearch,mortonsykes/elasticsearch,caengcjd/elasticsearch,wimvds/elasticsearch,yanjunh/elasticsearch,obourgain/elasticsearch,socialrank/elasticsearch,kalimatas/elasticsearch,HonzaKral/elasticsearch,achow/elasticsearch,lks21c/elasticsearch,LewayneNaidoo/elasticsearch,bawse/elasticsearch,dongjoon-hyun/elasticsearch,girirajsharma/elasticsearch,bestwpw/elasticsearch,mmaracic/elasticsearch,lydonchandra/elasticsearch,robin13/elasticsearch,ricardocerq/elasticsearch,qwerty4030/elasticsearch,franklanganke/elasticsearch,vroyer/elasticassandra,clintongormley/elasticsearch,fernandozhu/elasticsearch,palecur/elasticsearch,mnylen/elasticsearch,fforbeck/elasticsearch,pritishppai/elasticsearch,wangtuo/elasticsearch,franklanganke/elasticsearch,PhaedrusTheGreek/elasticsearch,zkidkid/elasticsearch,xuzha/elasticsearch,bawse/elasticsearch,a2lin/elasticsearch,HonzaKral/elasticsearch,naveenhooda2000/elasticsearch,mnylen/elasticsearch,MaineC/elasticsearch,AndreKR/elasticsearch,kingaj/elasticsearch,btiernay/elasticsearch,scottsom/elasticsearch,karthikjaps/elasticsearch,kalimatas/elasticsearch,geidies/elasticsearch,MichaelLiZhou/elasticsearch,nazarewk/elasticsearch,qwerty4030/elasticsearch,andrejserafim/elasticsearch,AndreKR/elasticsearch,adrianbk/elasticsearch,strapdata/elassandra,jango2015/elasticsearch,tebriel/elasticsearch,MaineC/elasticsearch,dpursehouse/elasticsearch,nellicus/elasticsearch,mortonsykes/elasticsearch,mikemccand/elasticsearch,JervyShi/elasticsearch,gmarz/elasticsearch,wuranbo/elasticsearch,ESamir/elasticsearch,sneivandt/elasticsearch,schonfeld/elasticsearch,fernandozhu/elasticsearch,jpountz/elasticsearch,mjason3/elasticsearch,vietlq/elasticsearch,wenpos/elasticsearch,mbrukman/elasticsearch,elancom/elasticsearch,mm0/elasticsearch,martinstuga/elasticsearch,achow/elasticsearch,myelin/elasticsearch,MetSystem/elasticsearch,18098924759/elasticsearch,mgalushka/elasticsearch,maddin2016/elasticsearch,jchampion/elasticsearch,ZTE-PaaS/elasticsearch,kingaj/elasticsearch,knight1128/elasticsearch,fred84/elasticsearch,awislowski/elasticsearch,mapr/elasticsearch,schonfeld/elasticsearch,glefloch/elasticsearch,schonfeld/elasticsearch,avikurapati/elasticsearch,GlenRSmith/elasticsearch,pablocastro/elasticsearch,robin13/elasticsearch,cnfire/elasticsearch-1,snikch/elasticsearch,cnfire/elasticsearch-1,Uiho/elasticsearch,nomoa/elasticsearch,iacdingping/elasticsearch,socialrank/elasticsearch,ulkas/elasticsearch,a2lin/elasticsearch,gingerwizard/elasticsearch,areek/elasticsearch,andrejserafim/elasticsearch,caengcjd/elasticsearch,jimczi/elasticsearch,Brijeshrpatel9/elasticsearch,lydonchandra/elasticsearch,djschny/elasticsearch,MaineC/elasticsearch,himanshuag/elasticsearch,fred84/elasticsearch,pablocastro/elasticsearch,nrkkalyan/elasticsearch,elancom/elasticsearch,henakamaMSFT/elasticsearch,apepper/elasticsearch,YosuaMichael/elasticsearch,ivansun1010/elasticsearch,ricardocerq/elasticsearch,mjason3/elasticsearch,kunallimaye/elasticsearch,springning/elasticsearch,kingaj/elasticsearch,strapdata/elassandra,camilojd/elasticsearch,mapr/elasticsearch,apepper/elasticsearch,nazarewk/elasticsearch,geidies/elasticsearch,jpountz/elasticsearch,camilojd/elasticsearch,s1monw/elasticsearch,girirajsharma/elasticsearch,nazarewk/elasticsearch,nilabhsagar/elasticsearch,LewayneNaidoo/elasticsearch,Brijeshrpatel9/elasticsearch,spiegela/elasticsearch,rajanm/elasticsearch,JackyMai/elasticsearch,schonfeld/elasticsearch,rento19962/elasticsearch,martinstuga/elasticsearch,shreejay/elasticsearch,F0lha/elasticsearch,uschindler/elasticsearch,MaineC/elasticsearch,ZTE-PaaS/elasticsearch,IanvsPoplicola/elasticsearch,zkidkid/elasticsearch,mgalushka/elasticsearch,Helen-Zhao/elasticsearch,artnowo/elasticsearch,zhiqinghuang/elasticsearch,zhiqinghuang/elasticsearch,pranavraman/elasticsearch,cwurm/elasticsearch,vietlq/elasticsearch,MisterAndersen/elasticsearch,maddin2016/elasticsearch,MisterAndersen/elasticsearch,C-Bish/elasticsearch,vietlq/elasticsearch,shreejay/elasticsearch,18098924759/elasticsearch,rmuir/elasticsearch,Brijeshrpatel9/elasticsearch,mgalushka/elasticsearch,rmuir/elasticsearch,mohit/elasticsearch,lydonchandra/elasticsearch,winstonewert/elasticsearch,ckclark/elasticsearch,avikurapati/elasticsearch,knight1128/elasticsearch,Collaborne/elasticsearch,nazarewk/elasticsearch,YosuaMichael/elasticsearch,vroyer/elassandra,Charlesdong/elasticsearch,KimTaehee/elasticsearch,sc0ttkclark/elasticsearch,clintongormley/elasticsearch,yanjunh/elasticsearch,davidvgalbraith/elasticsearch,jbertouch/elasticsearch,knight1128/elasticsearch,KimTaehee/elasticsearch,jprante/elasticsearch,onegambler/elasticsearch,Rygbee/elasticsearch,onegambler/elasticsearch,davidvgalbraith/elasticsearch,martinstuga/elasticsearch,artnowo/elasticsearch,dongjoon-hyun/elasticsearch,franklanganke/elasticsearch,nilabhsagar/elasticsearch,shreejay/elasticsearch,IanvsPoplicola/elasticsearch,Charlesdong/elasticsearch,himanshuag/elasticsearch,masterweb121/elasticsearch,jpountz/elasticsearch,kalburgimanjunath/elasticsearch,fforbeck/elasticsearch,mortonsykes/elasticsearch,yanjunh/elasticsearch,rhoml/elasticsearch,s1monw/elasticsearch,ouyangkongtong/elasticsearch,socialrank/elasticsearch,achow/elasticsearch,mbrukman/elasticsearch,nellicus/elasticsearch,camilojd/elasticsearch,adrianbk/elasticsearch,elasticdog/elasticsearch,lmtwga/elasticsearch,achow/elasticsearch,zhiqinghuang/elasticsearch,vroyer/elassandra,btiernay/elasticsearch,davidvgalbraith/elasticsearch,rajanm/elasticsearch,JervyShi/elasticsearch,jeteve/elasticsearch,nellicus/elasticsearch,hafkensite/elasticsearch,lzo/elasticsearch-1,scorpionvicky/elasticsearch,C-Bish/elasticsearch,nellicus/elasticsearch,GlenRSmith/elasticsearch,MichaelLiZhou/elasticsearch,andrestc/elasticsearch,springning/elasticsearch,episerver/elasticsearch,clintongormley/elasticsearch,MetSystem/elasticsearch,markharwood/elasticsearch,mcku/elasticsearch,zhiqinghuang/elasticsearch,glefloch/elasticsearch,mapr/elasticsearch,pranavraman/elasticsearch,naveenhooda2000/elasticsearch,zhiqinghuang/elasticsearch,sneivandt/elasticsearch,jeteve/elasticsearch,nellicus/elasticsearch,ThiagoGarciaAlves/elasticsearch,djschny/elasticsearch,mbrukman/elasticsearch,nknize/elasticsearch,hafkensite/elasticsearch,pablocastro/elasticsearch,Rygbee/elasticsearch,jango2015/elasticsearch,yongminxia/elasticsearch,tebriel/elasticsearch,weipinghe/elasticsearch,sreeramjayan/elasticsearch,coding0011/elasticsearch,brandonkearby/elasticsearch,JackyMai/elasticsearch,lks21c/elasticsearch,palecur/elasticsearch,andrestc/elasticsearch,masaruh/elasticsearch,girirajsharma/elasticsearch,nomoa/elasticsearch,mcku/elasticsearch,sneivandt/elasticsearch,JervyShi/elasticsearch,gingerwizard/elasticsearch,mbrukman/elasticsearch,strapdata/elassandra5-rc,IanvsPoplicola/elasticsearch,strapdata/elassandra,obourgain/elasticsearch,alexshadow007/elasticsearch,gingerwizard/elasticsearch,nezirus/elasticsearch,strapdata/elassandra,LeoYao/elasticsearch,strapdata/elassandra5-rc,StefanGor/elasticsearch,jbertouch/elasticsearch,rhoml/elasticsearch,Rygbee/elasticsearch,elancom/elasticsearch,rajanm/elasticsearch,rlugojr/elasticsearch,fred84/elasticsearch,KimTaehee/elasticsearch,pablocastro/elasticsearch,Charlesdong/elasticsearch,jeteve/elasticsearch,bestwpw/elasticsearch,kingaj/elasticsearch,ulkas/elasticsearch,Collaborne/elasticsearch,pritishppai/elasticsearch,masterweb121/elasticsearch,masaruh/elasticsearch,masaruh/elasticsearch,mohit/elasticsearch,scottsom/elasticsearch,gfyoung/elasticsearch,pritishppai/elasticsearch,kunallimaye/elasticsearch,artnowo/elasticsearch,JervyShi/elasticsearch,tahaemin/elasticsearch,myelin/elasticsearch,Shepard1212/elasticsearch,rento19962/elasticsearch,iacdingping/elasticsearch,kalburgimanjunath/elasticsearch,vroyer/elassandra,scorpionvicky/elasticsearch,Stacey-Gammon/elasticsearch,ZTE-PaaS/elasticsearch,palecur/elasticsearch,wenpos/elasticsearch,btiernay/elasticsearch,StefanGor/elasticsearch,fernandozhu/elasticsearch,xingguang2013/elasticsearch,myelin/elasticsearch,weipinghe/elasticsearch,nknize/elasticsearch,dpursehouse/elasticsearch,huanzhong/elasticsearch,obourgain/elasticsearch,C-Bish/elasticsearch,Collaborne/elasticsearch,LeoYao/elasticsearch,rento19962/elasticsearch,sc0ttkclark/elasticsearch,gingerwizard/elasticsearch,lmtwga/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,bestwpw/elasticsearch,nrkkalyan/elasticsearch,camilojd/elasticsearch,bestwpw/elasticsearch,StefanGor/elasticsearch,clintongormley/elasticsearch,yanjunh/elasticsearch,elasticdog/elasticsearch,markwalkom/elasticsearch,cwurm/elasticsearch,jpountz/elasticsearch,snikch/elasticsearch,ZTE-PaaS/elasticsearch,huanzhong/elasticsearch,martinstuga/elasticsearch,kunallimaye/elasticsearch,masterweb121/elasticsearch,rmuir/elasticsearch,wangtuo/elasticsearch,weipinghe/elasticsearch,kalburgimanjunath/elasticsearch,yongminxia/elasticsearch,episerver/elasticsearch,alexshadow007/elasticsearch,pozhidaevak/elasticsearch,sc0ttkclark/elasticsearch,cnfire/elasticsearch-1,mapr/elasticsearch,Stacey-Gammon/elasticsearch,andrestc/elasticsearch,tebriel/elasticsearch,njlawton/elasticsearch,andrejserafim/elasticsearch,drewr/elasticsearch,tahaemin/elasticsearch,jeteve/elasticsearch,vroyer/elasticassandra,LeoYao/elasticsearch,djschny/elasticsearch,iacdingping/elasticsearch,strapdata/elassandra,diendt/elasticsearch,gingerwizard/elasticsearch,Shepard1212/elasticsearch,njlawton/elasticsearch,drewr/elasticsearch,tahaemin/elasticsearch,18098924759/elasticsearch,ckclark/elasticsearch,rajanm/elasticsearch,jchampion/elasticsearch,fred84/elasticsearch,kaneshin/elasticsearch,caengcjd/elasticsearch,yynil/elasticsearch,ricardocerq/elasticsearch,sc0ttkclark/elasticsearch,schonfeld/elasticsearch,davidvgalbraith/elasticsearch,ESamir/elasticsearch,lks21c/elasticsearch,liweinan0423/elasticsearch,Collaborne/elasticsearch,xingguang2013/elasticsearch,sreeramjayan/elasticsearch,alexshadow007/elasticsearch,uschindler/elasticsearch,elasticdog/elasticsearch,MisterAndersen/elasticsearch,rajanm/elasticsearch,nrkkalyan/elasticsearch,kaneshin/elasticsearch,markharwood/elasticsearch,nezirus/elasticsearch,PhaedrusTheGreek/elasticsearch,JSCooke/elasticsearch,artnowo/elasticsearch,adrianbk/elasticsearch,gmarz/elasticsearch,wittyameta/elasticsearch,Ansh90/elasticsearch,Ansh90/elasticsearch,scottsom/elasticsearch,F0lha/elasticsearch,a2lin/elasticsearch,drewr/elasticsearch,MetSystem/elasticsearch,s1monw/elasticsearch,winstonewert/elasticsearch,jango2015/elasticsearch,lzo/elasticsearch-1,wuranbo/elasticsearch,HonzaKral/elasticsearch,rmuir/elasticsearch,vietlq/elasticsearch,vietlq/elasticsearch,PhaedrusTheGreek/elasticsearch,YosuaMichael/elasticsearch,cnfire/elasticsearch-1,cwurm/elasticsearch,petabytedata/elasticsearch,himanshuag/elasticsearch,knight1128/elasticsearch,brandonkearby/elasticsearch,mm0/elasticsearch,Charlesdong/elasticsearch,s1monw/elasticsearch,kunallimaye/elasticsearch,mmaracic/elasticsearch,adrianbk/elasticsearch,mnylen/elasticsearch,petabytedata/elasticsearch,jpountz/elasticsearch,kalburgimanjunath/elasticsearch,wittyameta/elasticsearch,JSCooke/elasticsearch,MisterAndersen/elasticsearch,myelin/elasticsearch,JackyMai/elasticsearch,mmaracic/elasticsearch,polyfractal/elasticsearch,petabytedata/elasticsearch,lzo/elasticsearch-1,kaneshin/elasticsearch,zkidkid/elasticsearch,lydonchandra/elasticsearch,coding0011/elasticsearch,camilojd/elasticsearch,Uiho/elasticsearch,sdauletau/elasticsearch,Ansh90/elasticsearch,avikurapati/elasticsearch,huanzhong/elasticsearch,ThiagoGarciaAlves/elasticsearch,Shepard1212/elasticsearch,coding0011/elasticsearch,fforbeck/elasticsearch,lmtwga/elasticsearch,xuzha/elasticsearch,MetSystem/elasticsearch,sdauletau/elasticsearch,huanzhong/elasticsearch,rmuir/elasticsearch,tahaemin/elasticsearch,jeteve/elasticsearch,sdauletau/elasticsearch,uschindler/elasticsearch,awislowski/elasticsearch,MichaelLiZhou/elasticsearch,springning/elasticsearch,ThiagoGarciaAlves/elasticsearch,mjason3/elasticsearch,LeoYao/elasticsearch,sc0ttkclark/elasticsearch,nknize/elasticsearch,sreeramjayan/elasticsearch,fernandozhu/elasticsearch,girirajsharma/elasticsearch,yynil/elasticsearch,alexshadow007/elasticsearch,mcku/elasticsearch,Ansh90/elasticsearch,pritishppai/elasticsearch,Stacey-Gammon/elasticsearch,ckclark/elasticsearch,markwalkom/elasticsearch,jango2015/elasticsearch,sreeramjayan/elasticsearch,zkidkid/elasticsearch,xuzha/elasticsearch,elancom/elasticsearch,jimczi/elasticsearch,caengcjd/elasticsearch,socialrank/elasticsearch,MichaelLiZhou/elasticsearch,JSCooke/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,elasticdog/elasticsearch,wuranbo/elasticsearch,PhaedrusTheGreek/elasticsearch,shreejay/elasticsearch,gfyoung/elasticsearch,mgalushka/elasticsearch,wimvds/elasticsearch,lks21c/elasticsearch,jbertouch/elasticsearch,achow/elasticsearch,mortonsykes/elasticsearch,nazarewk/elasticsearch,Charlesdong/elasticsearch,scottsom/elasticsearch,Collaborne/elasticsearch,caengcjd/elasticsearch,dpursehouse/elasticsearch,wenpos/elasticsearch,wangtuo/elasticsearch,jchampion/elasticsearch,mbrukman/elasticsearch,mikemccand/elasticsearch,rlugojr/elasticsearch,trangvh/elasticsearch,mnylen/elasticsearch,achow/elasticsearch,lmtwga/elasticsearch,petabytedata/elasticsearch,kunallimaye/elasticsearch,18098924759/elasticsearch,jimczi/elasticsearch,knight1128/elasticsearch,areek/elasticsearch,franklanganke/elasticsearch,ouyangkongtong/elasticsearch,markharwood/elasticsearch,nrkkalyan/elasticsearch,snikch/elasticsearch,avikurapati/elasticsearch,himanshuag/elasticsearch,vroyer/elasticassandra,ESamir/elasticsearch,uschindler/elasticsearch,LeoYao/elasticsearch,kaneshin/elasticsearch,onegambler/elasticsearch,ckclark/elasticsearch,jprante/elasticsearch,Charlesdong/elasticsearch,andrestc/elasticsearch,mjason3/elasticsearch,infusionsoft/elasticsearch,kunallimaye/elasticsearch,xingguang2013/elasticsearch,strapdata/elassandra5-rc,C-Bish/elasticsearch,vietlq/elasticsearch,polyfractal/elasticsearch,dongjoon-hyun/elasticsearch,ulkas/elasticsearch,zhiqinghuang/elasticsearch,girirajsharma/elasticsearch,knight1128/elasticsearch,njlawton/elasticsearch,PhaedrusTheGreek/elasticsearch,mm0/elasticsearch,rhoml/elasticsearch,maddin2016/elasticsearch,xingguang2013/elasticsearch,rlugojr/elasticsearch,pozhidaevak/elasticsearch,AndreKR/elasticsearch,pozhidaevak/elasticsearch,infusionsoft/elasticsearch,JackyMai/elasticsearch,umeshdangat/elasticsearch,socialrank/elasticsearch,nezirus/elasticsearch,IanvsPoplicola/elasticsearch,lydonchandra/elasticsearch,i-am-Nathan/elasticsearch,zhiqinghuang/elasticsearch,obourgain/elasticsearch,rmuir/elasticsearch,coding0011/elasticsearch,himanshuag/elasticsearch,mcku/elasticsearch,martinstuga/elasticsearch,kunallimaye/elasticsearch,masaruh/elasticsearch,awislowski/elasticsearch,infusionsoft/elasticsearch,wittyameta/elasticsearch,achow/elasticsearch,robin13/elasticsearch,lzo/elasticsearch-1,gfyoung/elasticsearch,coding0011/elasticsearch,polyfractal/elasticsearch,wangtuo/elasticsearch,adrianbk/elasticsearch,jpountz/elasticsearch,winstonewert/elasticsearch,trangvh/elasticsearch,GlenRSmith/elasticsearch,dpursehouse/elasticsearch,infusionsoft/elasticsearch,mmaracic/elasticsearch,StefanGor/elasticsearch,ThiagoGarciaAlves/elasticsearch,wittyameta/elasticsearch,mapr/elasticsearch,gingerwizard/elasticsearch,girirajsharma/elasticsearch,wbowling/elasticsearch,qwerty4030/elasticsearch,ouyangkongtong/elasticsearch,palecur/elasticsearch,wittyameta/elasticsearch,vietlq/elasticsearch,martinstuga/elasticsearch,mikemccand/elasticsearch,kalimatas/elasticsearch,Rygbee/elasticsearch,apepper/elasticsearch,artnowo/elasticsearch,cnfire/elasticsearch-1,ckclark/elasticsearch,henakamaMSFT/elasticsearch,umeshdangat/elasticsearch,ESamir/elasticsearch,nrkkalyan/elasticsearch,lks21c/elasticsearch,GlenRSmith/elasticsearch,18098924759/elasticsearch,naveenhooda2000/elasticsearch,ThiagoGarciaAlves/elasticsearch,lmtwga/elasticsearch,xuzha/elasticsearch,weipinghe/elasticsearch,snikch/elasticsearch,episerver/elasticsearch,ricardocerq/elasticsearch,s1monw/elasticsearch,episerver/elasticsearch,adrianbk/elasticsearch,C-Bish/elasticsearch,weipinghe/elasticsearch,pranavraman/elasticsearch,ckclark/elasticsearch,yongminxia/elasticsearch,rhoml/elasticsearch,jchampion/elasticsearch,infusionsoft/elasticsearch,djschny/elasticsearch,Brijeshrpatel9/elasticsearch,nellicus/elasticsearch,jprante/elasticsearch,ulkas/elasticsearch,rento19962/elasticsearch,AndreKR/elasticsearch,diendt/elasticsearch,JSCooke/elasticsearch,scottsom/elasticsearch,dongjoon-hyun/elasticsearch,clintongormley/elasticsearch,pranavraman/elasticsearch,yanjunh/elasticsearch,YosuaMichael/elasticsearch,naveenhooda2000/elasticsearch,awislowski/elasticsearch,franklanganke/elasticsearch,areek/elasticsearch,mohit/elasticsearch,i-am-Nathan/elasticsearch,wimvds/elasticsearch,elancom/elasticsearch,ulkas/elasticsearch,pozhidaevak/elasticsearch,nilabhsagar/elasticsearch,Rygbee/elasticsearch,infusionsoft/elasticsearch,sneivandt/elasticsearch,andrestc/elasticsearch,apepper/elasticsearch,lzo/elasticsearch-1,Uiho/elasticsearch,hafkensite/elasticsearch,strapdata/elassandra5-rc,zkidkid/elasticsearch,a2lin/elasticsearch,henakamaMSFT/elasticsearch,qwerty4030/elasticsearch,wbowling/elasticsearch,mcku/elasticsearch,jbertouch/elasticsearch,wbowling/elasticsearch,ulkas/elasticsearch,gingerwizard/elasticsearch,JervyShi/elasticsearch,markwalkom/elasticsearch,Stacey-Gammon/elasticsearch,gfyoung/elasticsearch,mnylen/elasticsearch,btiernay/elasticsearch,jeteve/elasticsearch,areek/elasticsearch,karthikjaps/elasticsearch,weipinghe/elasticsearch,YosuaMichael/elasticsearch,tahaemin/elasticsearch,liweinan0423/elasticsearch,springning/elasticsearch,xingguang2013/elasticsearch,lzo/elasticsearch-1,btiernay/elasticsearch,hafkensite/elasticsearch,ouyangkongtong/elasticsearch,JackyMai/elasticsearch,sdauletau/elasticsearch,schonfeld/elasticsearch,ivansun1010/elasticsearch,clintongormley/elasticsearch,elasticdog/elasticsearch,caengcjd/elasticsearch,glefloch/elasticsearch,wittyameta/elasticsearch,mm0/elasticsearch,springning/elasticsearch,drewr/elasticsearch,himanshuag/elasticsearch,scorpionvicky/elasticsearch,mcku/elasticsearch,mikemccand/elasticsearch,ckclark/elasticsearch,MichaelLiZhou/elasticsearch,mgalushka/elasticsearch,episerver/elasticsearch,pablocastro/elasticsearch,iacdingping/elasticsearch,wuranbo/elasticsearch,Uiho/elasticsearch,onegambler/elasticsearch,mjason3/elasticsearch,snikch/elasticsearch,robin13/elasticsearch,kalburgimanjunath/elasticsearch,glefloch/elasticsearch,rajanm/elasticsearch,lydonchandra/elasticsearch,kaneshin/elasticsearch,davidvgalbraith/elasticsearch,kalimatas/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,pozhidaevak/elasticsearch,bestwpw/elasticsearch,Collaborne/elasticsearch,henakamaMSFT/elasticsearch,trangvh/elasticsearch,YosuaMichael/elasticsearch,ESamir/elasticsearch,LeoYao/elasticsearch,jango2015/elasticsearch,kingaj/elasticsearch,LewayneNaidoo/elasticsearch,wimvds/elasticsearch,i-am-Nathan/elasticsearch,tahaemin/elasticsearch,gfyoung/elasticsearch,fforbeck/elasticsearch,geidies/elasticsearch,diendt/elasticsearch,tahaemin/elasticsearch,infusionsoft/elasticsearch,jango2015/elasticsearch,dpursehouse/elasticsearch,karthikjaps/elasticsearch,nezirus/elasticsearch,ulkas/elasticsearch,ouyangkongtong/elasticsearch,maddin2016/elasticsearch,areek/elasticsearch,pritishppai/elasticsearch,MetSystem/elasticsearch,F0lha/elasticsearch,mikemccand/elasticsearch,jimczi/elasticsearch,uschindler/elasticsearch,mgalushka/elasticsearch,Ansh90/elasticsearch,18098924759/elasticsearch,KimTaehee/elasticsearch,pranavraman/elasticsearch,apepper/elasticsearch,shreejay/elasticsearch,nilabhsagar/elasticsearch,rento19962/elasticsearch,18098924759/elasticsearch,sc0ttkclark/elasticsearch,andrejserafim/elasticsearch,brandonkearby/elasticsearch,rento19962/elasticsearch,kalburgimanjunath/elasticsearch,yynil/elasticsearch,schonfeld/elasticsearch,pritishppai/elasticsearch,LeoYao/elasticsearch,wittyameta/elasticsearch,Brijeshrpatel9/elasticsearch,gmarz/elasticsearch,jango2015/elasticsearch,Brijeshrpatel9/elasticsearch,yynil/elasticsearch,JervyShi/elasticsearch,mcku/elasticsearch,onegambler/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,lzo/elasticsearch-1,nknize/elasticsearch,bawse/elasticsearch,ThiagoGarciaAlves/elasticsearch,cwurm/elasticsearch,mm0/elasticsearch,MetSystem/elasticsearch,polyfractal/elasticsearch,umeshdangat/elasticsearch,MisterAndersen/elasticsearch,tebriel/elasticsearch,markharwood/elasticsearch,kingaj/elasticsearch,Helen-Zhao/elasticsearch,kalimatas/elasticsearch,ZTE-PaaS/elasticsearch,karthikjaps/elasticsearch,tebriel/elasticsearch,mbrukman/elasticsearch,cnfire/elasticsearch-1,Helen-Zhao/elasticsearch,F0lha/elasticsearch,sdauletau/elasticsearch,onegambler/elasticsearch,trangvh/elasticsearch,petabytedata/elasticsearch,scorpionvicky/elasticsearch,Shepard1212/elasticsearch,winstonewert/elasticsearch,wbowling/elasticsearch,iacdingping/elasticsearch,wbowling/elasticsearch,jprante/elasticsearch,spiegela/elasticsearch,MaineC/elasticsearch,apepper/elasticsearch,huanzhong/elasticsearch,markwalkom/elasticsearch,fforbeck/elasticsearch,jchampion/elasticsearch,Rygbee/elasticsearch,areek/elasticsearch,rento19962/elasticsearch,jimczi/elasticsearch,karthikjaps/elasticsearch,fred84/elasticsearch,LewayneNaidoo/elasticsearch,wbowling/elasticsearch,drewr/elasticsearch,njlawton/elasticsearch,Uiho/elasticsearch,wuranbo/elasticsearch,liweinan0423/elasticsearch,Rygbee/elasticsearch,rlugojr/elasticsearch,spiegela/elasticsearch,kaneshin/elasticsearch,mmaracic/elasticsearch,strapdata/elassandra5-rc,yynil/elasticsearch,mohit/elasticsearch,Ansh90/elasticsearch,caengcjd/elasticsearch,iacdingping/elasticsearch,nrkkalyan/elasticsearch,fernandozhu/elasticsearch,Ansh90/elasticsearch,elancom/elasticsearch,i-am-Nathan/elasticsearch,andrejserafim/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,btiernay/elasticsearch,kalburgimanjunath/elasticsearch,ivansun1010/elasticsearch,apepper/elasticsearch,mm0/elasticsearch,kingaj/elasticsearch,nilabhsagar/elasticsearch,wimvds/elasticsearch,nomoa/elasticsearch,rlugojr/elasticsearch,sdauletau/elasticsearch,lmtwga/elasticsearch,bestwpw/elasticsearch,rhoml/elasticsearch,geidies/elasticsearch,masterweb121/elasticsearch,pritishppai/elasticsearch,gmarz/elasticsearch,andrestc/elasticsearch,nomoa/elasticsearch,myelin/elasticsearch,xuzha/elasticsearch,StefanGor/elasticsearch,davidvgalbraith/elasticsearch,KimTaehee/elasticsearch,winstonewert/elasticsearch,lmtwga/elasticsearch,andrestc/elasticsearch,MetSystem/elasticsearch,cnfire/elasticsearch-1,karthikjaps/elasticsearch,Uiho/elasticsearch,diendt/elasticsearch,ivansun1010/elasticsearch,brandonkearby/elasticsearch,markwalkom/elasticsearch,a2lin/elasticsearch,njlawton/elasticsearch,LewayneNaidoo/elasticsearch,AndreKR/elasticsearch,cwurm/elasticsearch,snikch/elasticsearch,umeshdangat/elasticsearch,btiernay/elasticsearch,liweinan0423/elasticsearch,mgalushka/elasticsearch,sdauletau/elasticsearch,i-am-Nathan/elasticsearch,alexshadow007/elasticsearch,naveenhooda2000/elasticsearch,mapr/elasticsearch,socialrank/elasticsearch,jbertouch/elasticsearch,glefloch/elasticsearch,franklanganke/elasticsearch,nknize/elasticsearch,polyfractal/elasticsearch,MichaelLiZhou/elasticsearch,GlenRSmith/elasticsearch,xuzha/elasticsearch,Uiho/elasticsearch,bestwpw/elasticsearch,mbrukman/elasticsearch,petabytedata/elasticsearch,wenpos/elasticsearch,pranavraman/elasticsearch,xingguang2013/elasticsearch,diendt/elasticsearch,IanvsPoplicola/elasticsearch,onegambler/elasticsearch,YosuaMichael/elasticsearch,AndreKR/elasticsearch,djschny/elasticsearch,robin13/elasticsearch,wenpos/elasticsearch,ivansun1010/elasticsearch,rhoml/elasticsearch,ouyangkongtong/elasticsearch,trangvh/elasticsearch,Stacey-Gammon/elasticsearch,drewr/elasticsearch,HonzaKral/elasticsearch,mnylen/elasticsearch,Helen-Zhao/elasticsearch,petabytedata/elasticsearch,springning/elasticsearch,MichaelLiZhou/elasticsearch,hafkensite/elasticsearch,spiegela/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,Shepard1212/elasticsearch,PhaedrusTheGreek/elasticsearch,nellicus/elasticsearch,Helen-Zhao/elasticsearch,pranavraman/elasticsearch,geidies/elasticsearch,ouyangkongtong/elasticsearch,camilojd/elasticsearch,mohit/elasticsearch,KimTaehee/elasticsearch,liweinan0423/elasticsearch,pablocastro/elasticsearch,elancom/elasticsearch,sreeramjayan/elasticsearch,bawse/elasticsearch,yynil/elasticsearch,yongminxia/elasticsearch,geidies/elasticsearch,hafkensite/elasticsearch,SaiprasadKrishnamurthy/elasticsearch,F0lha/elasticsearch,gmarz/elasticsearch,jbertouch/elasticsearch,scorpionvicky/elasticsearch,yongminxia/elasticsearch,andrejserafim/elasticsearch,yongminxia/elasticsearch,bawse/elasticsearch,ivansun1010/elasticsearch,areek/elasticsearch,wangtuo/elasticsearch,jchampion/elasticsearch,springning/elasticsearch,sc0ttkclark/elasticsearch,wimvds/elasticsearch,wbowling/elasticsearch,masterweb121/elasticsearch,diendt/elasticsearch,markharwood/elasticsearch,xingguang2013/elasticsearch,tebriel/elasticsearch,KimTaehee/elasticsearch,nezirus/elasticsearch,mnylen/elasticsearch,palecur/elasticsearch,djschny/elasticsearch,sreeramjayan/elasticsearch,markwalkom/elasticsearch,huanzhong/elasticsearch,mortonsykes/elasticsearch,mmaracic/elasticsearch,drewr/elasticsearch,adrianbk/elasticsearch,Brijeshrpatel9/elasticsearch,wimvds/elasticsearch,masterweb121/elasticsearch,henakamaMSFT/elasticsearch,knight1128/elasticsearch,obourgain/elasticsearch,umeshdangat/elasticsearch,Charlesdong/elasticsearch,djschny/elasticsearch,markharwood/elasticsearch,franklanganke/elasticsearch,yongminxia/elasticsearch,masaruh/elasticsearch,jeteve/elasticsearch,mm0/elasticsearch,F0lha/elasticsearch,nrkkalyan/elasticsearch,PhaedrusTheGreek/elasticsearch,nomoa/elasticsearch,pablocastro/elasticsearch,iacdingping/elasticsearch,karthikjaps/elasticsearch,masterweb121/elasticsearch,Collaborne/elasticsearch,huanzhong/elasticsearch,spiegela/elasticsearch,ESamir/elasticsearch,qwerty4030/elasticsearch,lydonchandra/elasticsearch,awislowski/elasticsearch,himanshuag/elasticsearch,ricardocerq/elasticsearch,polyfractal/elasticsearch,brandonkearby/elasticsearch,jprante/elasticsearch,maddin2016/elasticsearch | dev-tools/validate-maven-repository.py | dev-tools/validate-maven-repository.py | # Licensed to Elasticsearch under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Elasticsearch licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on
# an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
# Helper python script to check if a sonatype staging repo contains
# all the required files compared to a local repository
#
# The script does the following steps
#
# 1. Scans the local maven repo for all files in /org/elasticsearch
# 2. Opens a HTTP connection to the staging repo
# 3. Executes a HEAD request for each file found in step one
# 4. Compares the content-length response header with the real file size
# 5. Return an error if those two numbers differ
#
# A pre requirement to run this, is to find out via the oss.sonatype.org web UI, how that repo is named
# - After logging in you go to 'Staging repositories' and search for the one you just created
# - Click into the `Content` tab
# - Open any artifact (not a directory)
# - Copy the link of `Repository Path` on the right and reuse that part of the URL
#
# Alternatively you can just use the name of the repository and reuse the rest (ie. the repository
# named for the example below would have been named orgelasticsearch-1012)
#
#
# Example call
# python dev-tools/validate-maven-repository.py /path/to/repo/org/elasticsearch/ \
# https://oss.sonatype.org/service/local/repositories/orgelasticsearch-1012/content/org/elasticsearch
import sys
import os
import httplib
import urlparse
import re
# Draw a simple progress bar, a couple of hundred HEAD requests might take a while
# Note, when drawing this, it uses the carriage return character, so you should not
# write anything in between
def drawProgressBar(percent, barLen = 40):
sys.stdout.write("\r")
progress = ""
for i in range(barLen):
if i < int(barLen * percent):
progress += "="
else:
progress += " "
sys.stdout.write("[ %s ] %.2f%%" % (progress, percent * 100))
sys.stdout.flush()
if __name__ == "__main__":
if len(sys.argv) != 3:
print 'Usage: %s <localRep> <stagingRepo> [user:pass]' % (sys.argv[0])
print ''
print 'Example: %s /tmp/my-maven-repo/org/elasticsearch https://oss.sonatype.org/service/local/repositories/orgelasticsearch-1012/content/org/elasticsearch' % (sys.argv[0])
else:
sys.argv[1] = re.sub('/$', '', sys.argv[1])
sys.argv[2] = re.sub('/$', '', sys.argv[2])
localMavenRepo = sys.argv[1]
endpoint = sys.argv[2]
filesToCheck = []
foundSignedFiles = False
for root, dirs, files in os.walk(localMavenRepo):
for file in files:
# no metadata files (they get renamed from maven-metadata-local.xml to maven-metadata.xml while deploying)
# no .properties and .repositories files (they dont get uploaded)
if not file.startswith('maven-metadata') and not file.endswith('.properties') and not file.endswith('.repositories'):
filesToCheck.append(os.path.join(root, file))
if file.endswith('.asc'):
foundSignedFiles = True
print "Need to check %i files" % len(filesToCheck)
if not foundSignedFiles:
print '### Warning: No signed .asc files found'
# set up http
parsed_uri = urlparse.urlparse(endpoint)
domain = parsed_uri.netloc
if parsed_uri.scheme == 'https':
conn = httplib.HTTPSConnection(domain)
else:
conn = httplib.HTTPConnection(domain)
#conn.set_debuglevel(5)
drawProgressBar(0)
errors = []
for idx, file in enumerate(filesToCheck):
request_uri = parsed_uri.path + file[len(localMavenRepo):]
conn.request("HEAD", request_uri)
res = conn.getresponse()
res.read() # useless call for head, but prevents httplib.ResponseNotReady raise
absolute_url = parsed_uri.scheme + '://' + parsed_uri.netloc + request_uri
if res.status == 200:
content_length = res.getheader('content-length')
local_file_size = os.path.getsize(file)
if int(content_length) != int(local_file_size):
errors.append('LENGTH MISMATCH: %s differs in size. local %s <=> %s remote' % (absolute_url, content_length, local_file_size))
elif res.status == 404:
errors.append('MISSING: %s' % absolute_url)
elif res.status == 301 or res.status == 302:
errors.append('REDIRECT: %s to %s' % (absolute_url, res.getheader('location')))
else:
errors.append('ERROR: %s http response: %s %s' %(absolute_url, res.status, res.reason))
# update progressbar at the end
drawProgressBar((idx+1)/float(len(filesToCheck)))
print
if len(errors) != 0:
print 'The following errors occured (%s out of %s files)' % (len(errors), len(filesToCheck))
print
for error in errors:
print error
sys.exit(-1)
| apache-2.0 | Python |
|
3a235f8525ae89ae91c333f7cd10ed307c33011c | Exclude local data from package. | JostCrow/django-forms-builder,stephenmcd/django-forms-builder,nimbis/django-forms-builder,stephenmcd/django-forms-builder,iddqd1/django-forms-builder,vinnyrose/django-forms-builder,vinnyrose/django-forms-builder,simas/django-forms-builder,JostCrow/django-forms-builder,GetHappie/django-forms-builder,Afnarel/django-forms-builder,bichocj/django-forms-builder,frontendr/django-forms-builder,bichocj/django-forms-builder,ixc/django-forms-builder,frontendr/django-forms-builder,iddqd1/django-forms-builder,nimbis/django-forms-builder,ixc/django-forms-builder,simas/django-forms-builder,GetHappie/django-forms-builder,Afnarel/django-forms-builder | setup.py | setup.py |
from __future__ import with_statement
import os
from setuptools import setup, find_packages
exclude = ["forms_builder/example_project/dev.db",
"forms_builder/example_project/local_settings.py"]
exclude = dict([(e, None) for e in exclude])
for e in exclude:
if e.endswith(".py"):
try:
os.remove("%sc" % e)
except:
pass
try:
with open(e, "r") as f:
exclude[e] = (f.read(), os.stat(e))
os.remove(e)
except Exception, e:
import pdb; pdb.set_trace()
pass
import pdb; pdb.set_trace()
try:
setup(
name = "django-forms-builder",
version = __import__("forms_builder").__version__,
author = "Stephen McDonald",
author_email = "[email protected]",
description = ("A Django reusable app providing the ability for admin "
"users to create their own forms."),
long_description = open("README.rst").read(),
url = "http://github.com/stephenmcd/django-forms-builder",
zip_safe = False,
include_package_data = True,
packages = find_packages(),
install_requires = [
"sphinx-me >= 0.1.2",
"django-email-extras >= 0.1.7",
"django",
],
classifiers = [
"Development Status :: 5 - Production/Stable",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Framework :: Django",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
"Topic :: Internet :: WWW/HTTP :: Site Management",
]
)
finally:
for e in exclude:
if exclude[e] is not None:
data, stat = exclude[e]
try:
with open(e, "w") as f:
f.write(data)
os.chown(e, stat.st_uid, stat.st_gid)
os.chmod(e, stat.st_mode)
except:
pass
|
from setuptools import setup, find_packages
setup(
name = "django-forms-builder",
version = __import__("forms_builder").__version__,
author = "Stephen McDonald",
author_email = "[email protected]",
description = ("A Django reusable app providing the ability for admin "
"users to create their own forms."),
long_description = open("README.rst").read(),
url = "http://github.com/stephenmcd/django-forms-builder",
zip_safe = False,
include_package_data = True,
packages = find_packages(),
install_requires = [
"sphinx-me >= 0.1.2",
"django-email-extras >= 0.1.7",
"django",
],
classifiers = [
"Development Status :: 5 - Production/Stable",
"Environment :: Web Environment",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Framework :: Django",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
"Topic :: Internet :: WWW/HTTP :: Site Management",
]
)
| bsd-2-clause | Python |
207116ee7ba8d8da521f497997da90066831a551 | Add codemod to replace __unicode__ with __str__ | edx/repo-tools,edx/repo-tools | django3_codemods/replace_unicode_with_str.py | django3_codemods/replace_unicode_with_str.py | import sys
from bowler import Query
(
Query(sys.argv[1])
.select_function("__unicode__")
.rename('__str__')
.idiff()
),
(
Query(sys.argv[1])
.select_method("__unicode__")
.is_call()
.rename('__str__')
.idiff()
)
| apache-2.0 | Python |
|
d85a68e36443bfcdeed2d8f1f3960d1596ef762a | Create catchtheball.py | kapoorkkartik/pythongames | catchtheball.py | catchtheball.py | import simplegui
import random
FRAME_WIDTH=STAGE_WIDTH=GROUND_WIDTH=821
FRAME_HEIGHT=498
STAGE_HEIGHT=FRAME_HEIGHT-30
PADDLE_HEIGHT=STAGE_HEIGHT
PADDLE_WIDTH=8
PADDLE_POS=[STAGE_WIDTH/2,PADDLE_HEIGHT]
image=simplegui.load_image("http://mrnussbaum.com/calendarclowns1/images/game_background.png")
list_of_balls=[]
colors=['Aqua','Blue','Fuchsia','Gray',
'Green','Lime','Maroon','Navy','Olive',
'Orange','Purple','Red','Silver','Teal',
'White','Yellow']
class Ball:
def __init__(self,color,radius,x_location):
self.radius=radius
self.color=color
self.location=[x_location,0]
def timer():
radius = 10
color = random.choice(colors)
x_location = random.randint(20, STAGE_WIDTH-20)
new_ball = Ball(color,radius, x_location)
list_of_balls.append(new_ball)
def draw(canvas):
canvas.draw_image(image,[FRAME_WIDTH/2,FRAME_HEIGHT/2],[FRAME_WIDTH,FRAME_HEIGHT],[FRAME_WIDTH/2,FRAME_HEIGHT/2],[FRAME_WIDTH,FRAME_HEIGHT])
for ball in list_of_balls:
ball.location[1]+=5
canvas.draw_circle(ball.location,ball.radius,10,ball.color,ball.color)
frame=simplegui.create_frame("ball",FRAME_WIDTH,FRAME_HEIGHT)
timer=simplegui.create_timer(2000,timer)
frame.set_draw_handler(draw)
frame.start()
timer.start()
| apache-2.0 | Python |
|
54bb69cd3646246975f723923254549bc5f11ca0 | Add default paver commands | ella/citools,ella/citools | citools/paver.py | citools/paver.py | @task
@consume_args
@needs('unit', 'integrate')
def test():
""" Run whole testsuite """
def djangonize_test_environment(test_project_module):
sys.path.insert(0, abspath(join(dirname(__file__))))
sys.path.insert(0, abspath(join(dirname(__file__), "tests")))
sys.path.insert(0, abspath(join(dirname(__file__), "tests", test_project_module)))
os.environ['DJANGO_SETTINGS_MODULE'] = "%s.settings" % test_project_module
def run_tests(test_project_module, nose_args):
djangonize_test_environment(test_project_module)
import nose
os.chdir(abspath(join(dirname(__file__), "tests", test_project_module)))
argv = ["--with-django"] + nose_args
nose.run_exit(
argv = ["nosetests"] + argv,
defaultTest = test_project_module
)
@task
@consume_args
def unit(args):
""" Run unittests """
run_tests(test_project_module="unit_project", nose_args=[]+args)
@task
@consume_args
def integrate(args):
""" Run integration tests """
run_tests(test_project_module="example_project", nose_args=["--with-selenium", "--with-djangoliveserver"]+args)
@task
def install_dependencies():
sh('pip install --upgrade -r requirements.txt')
@task
def bootstrap():
options.virtualenv = {'packages_to_install' : ['pip']}
call_task('paver.virtual.bootstrap')
sh("python bootstrap.py")
path('bootstrap.py').remove()
print '*'*80
if sys.platform in ('win32', 'winnt'):
print "* Before running other commands, You now *must* run %s" % os.path.join("bin", "activate.bat")
else:
print "* Before running other commands, You now *must* run source %s" % os.path.join("bin", "activate")
print '*'*80
@task
@needs('install_dependencies')
def prepare():
""" Prepare complete environment """
| bsd-3-clause | Python |
|
0575a141153fb07a5f03c0681cdf727450348fc0 | Create space.py | Chuck8521/LunchtimeBoredom,Chuck8521/LunchtimeBoredom,Chuck8521/LunchtimeBoredom | space.py | space.py | def ParentOf(n, arr):
if arr[n] == n:
return n
else:
return ParentOf(arr[n],arr)
n, p = list(map(int, input().split()))
arr = []
for t in range(0,n):
arr.append(t)
for q in range(p):
#Quick Union the line
first, sec = list(map(int,input().split()))
arr[first] = ParentOf(sec)
#Get number of people in each group
groups = []
for q in range(0,n):
groups[q] = arr.count(q)
#groups is accurate if 0's removed
trueG = []
for t in groups:
if t != 0:
trueG.append(t)
ways = 0
for index, a in enumerate(trueG):
i = index + 1
while i < len(trueG):
ways += a * trueG[i]
i += 1
print(str(ways))
| mit | Python |
|
3c074ab5c630590ca32f8951eecb3087afd8ae01 | add solution for Binary Tree Level Order Traversal II | zhyu/leetcode,zhyu/leetcode | src/binaryTreeLevelOrderTraversalII.py | src/binaryTreeLevelOrderTraversalII.py | # Definition for a binary tree node
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
# @param root, a tree node
# @return a list of lists of integers
def levelOrderBottom(self, root):
self.res = []
self._dfs(root, 0)
return reversed(self.res)
def _dfs(self, root, level):
if not root:
return
if len(self.res) == level:
self.res.append([root.val])
else:
self.res[level].append(root.val)
self._dfs(root.left, level+1)
self._dfs(root.right, level+1)
| mit | Python |
|
f22b6368bdfe91cff06ede51c1caad04f769b437 | add management command to load location type into supply point | puttarajubr/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,SEL-Columbia/commcare-hq,SEL-Columbia/commcare-hq,dimagi/commcare-hq,SEL-Columbia/commcare-hq | custom/colalife/management/commands/load_location_type_into_supply_point.py | custom/colalife/management/commands/load_location_type_into_supply_point.py | from corehq.apps.commtrack.models import SupplyPointCase
from corehq.apps.locations.models import Location
from django.core.management import BaseCommand
class Command(BaseCommand):
help = 'Store location type with supply point.'
def handle(self, *args, **options):
for location_type in ["wholesaler", "retailer"]:
for location in Location.filter_by_type("colalifezambia", location_type):
supply_point_case = SupplyPointCase.get_by_location(location)
supply_point_case.location_type = location_type
supply_point_case.save()
| bsd-3-clause | Python |
|
3e02fe79f4fad6f5252af750a13d74d7a4f82cc5 | read in the file, probably badly | kinsamanka/machinekit,narogon/linuxcnc,cdsteinkuehler/MachineKit,bmwiedemann/linuxcnc-mirror,cnc-club/linuxcnc,ikcalB/linuxcnc-mirror,unseenlaser/machinekit,ianmcmahon/linuxcnc-mirror,unseenlaser/linuxcnc,kinsamanka/machinekit,bmwiedemann/linuxcnc-mirror,ArcEye/MK-Qt5,cnc-club/linuxcnc,kinsamanka/machinekit,RunningLight/machinekit,araisrobo/linuxcnc,EqAfrica/machinekit,bobvanderlinden/machinekit,bmwiedemann/linuxcnc-mirror,aschiffler/linuxcnc,Cid427/machinekit,ianmcmahon/linuxcnc-mirror,ArcEye/machinekit-testing,kinsamanka/machinekit,unseenlaser/machinekit,Cid427/machinekit,yishinli/emc2,mhaberler/machinekit,ianmcmahon/linuxcnc-mirror,yishinli/emc2,narogon/linuxcnc,cdsteinkuehler/linuxcnc,ArcEye/machinekit-testing,ArcEye/machinekit-testing,kinsamanka/machinekit,araisrobo/machinekit,kinsamanka/machinekit,RunningLight/machinekit,bobvanderlinden/machinekit,ArcEye/machinekit-testing,bmwiedemann/linuxcnc-mirror,unseenlaser/linuxcnc,yishinli/emc2,araisrobo/machinekit,ianmcmahon/linuxcnc-mirror,jaguarcat79/ILC-with-LinuxCNC,strahlex/machinekit,aschiffler/linuxcnc,mhaberler/machinekit,Cid427/machinekit,ArcEye/MK-Qt5,cdsteinkuehler/linuxcnc,araisrobo/linuxcnc,ikcalB/linuxcnc-mirror,EqAfrica/machinekit,RunningLight/machinekit,araisrobo/linuxcnc,RunningLight/machinekit,aschiffler/linuxcnc,bobvanderlinden/machinekit,jaguarcat79/ILC-with-LinuxCNC,cdsteinkuehler/linuxcnc,unseenlaser/linuxcnc,mhaberler/machinekit,cnc-club/linuxcnc,cnc-club/linuxcnc,kinsamanka/machinekit,ArcEye/machinekit-testing,cnc-club/linuxcnc,cdsteinkuehler/linuxcnc,ikcalB/linuxcnc-mirror,cdsteinkuehler/MachineKit,cdsteinkuehler/MachineKit,narogon/linuxcnc,cnc-club/linuxcnc,strahlex/machinekit,aschiffler/linuxcnc,araisrobo/machinekit,araisrobo/machinekit,araisrobo/linuxcnc,araisrobo/machinekit,ArcEye/machinekit-testing,EqAfrica/machinekit,jaguarcat79/ILC-with-LinuxCNC,yishinli/emc2,ArcEye/MK-Qt5,ArcEye/MK-Qt5,strahlex/machinekit,kinsamanka/machinekit,Cid427/machinekit,cdsteinkuehler/MachineKit,RunningLight/machinekit,araisrobo/machinekit,araisrobo/machinekit,jaguarcat79/ILC-with-LinuxCNC,bmwiedemann/linuxcnc-mirror,bmwiedemann/linuxcnc-mirror,cdsteinkuehler/linuxcnc,jaguarcat79/ILC-with-LinuxCNC,bobvanderlinden/machinekit,aschiffler/linuxcnc,ikcalB/linuxcnc-mirror,ianmcmahon/linuxcnc-mirror,ArcEye/machinekit-testing,Cid427/machinekit,strahlex/machinekit,araisrobo/linuxcnc,EqAfrica/machinekit,araisrobo/machinekit,bobvanderlinden/machinekit,unseenlaser/machinekit,cdsteinkuehler/MachineKit,bobvanderlinden/machinekit,cnc-club/linuxcnc,strahlex/machinekit,unseenlaser/machinekit,mhaberler/machinekit,Cid427/machinekit,bobvanderlinden/machinekit,ArcEye/MK-Qt5,unseenlaser/machinekit,unseenlaser/linuxcnc,Cid427/machinekit,araisrobo/machinekit,RunningLight/machinekit,bobvanderlinden/machinekit,ArcEye/MK-Qt5,ikcalB/linuxcnc-mirror,narogon/linuxcnc,mhaberler/machinekit,EqAfrica/machinekit,EqAfrica/machinekit,RunningLight/machinekit,unseenlaser/machinekit,bmwiedemann/linuxcnc-mirror,unseenlaser/machinekit,unseenlaser/linuxcnc,ArcEye/MK-Qt5,narogon/linuxcnc,ianmcmahon/linuxcnc-mirror,strahlex/machinekit,ikcalB/linuxcnc-mirror,ArcEye/machinekit-testing,ArcEye/MK-Qt5,unseenlaser/machinekit,mhaberler/machinekit,cdsteinkuehler/linuxcnc,mhaberler/machinekit,mhaberler/machinekit,ianmcmahon/linuxcnc-mirror,RunningLight/machinekit,Cid427/machinekit,cdsteinkuehler/MachineKit,strahlex/machinekit,EqAfrica/machinekit,ikcalB/linuxcnc-mirror,EqAfrica/machinekit | src/emc/usr_intf/touchy/filechooser.py | src/emc/usr_intf/touchy/filechooser.py | # Touchy is Copyright (c) 2009 Chris Radek <[email protected]>
#
# Touchy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# Touchy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
import dircache
import os
class filechooser:
def __init__(self, gtk, emc, labels, eventboxes, program):
self.labels = labels
self.eventboxes = eventboxes
self.numlabels = len(labels)
self.program = program
self.gtk = gtk
self.emc = emc
self.emccommand = emc.command()
self.fileoffset = 0
self.dir = os.path.join(os.getenv('HOME'), 'emc2', 'nc_files')
self.files = dircache.listdir(self.dir)
self.selected = -1
self.populate()
def populate(self):
files = self.files[self.fileoffset:]
for i in range(self.numlabels):
l = self.labels[i]
e = self.eventboxes[i]
if i < len(files):
l.set_text(files[i])
else:
l.set_text('')
if self.selected == self.fileoffset + i:
e.modify_bg(self.gtk.STATE_NORMAL, self.gtk.gdk.color_parse('#fff'))
else:
e.modify_bg(self.gtk.STATE_NORMAL, self.gtk.gdk.color_parse('#ccc'))
def select(self, eventbox, event):
n = int(eventbox.get_name()[20:])
self.selected = self.fileoffset + n
self.emccommand.mode(self.emc.MODE_MDI)
fn = os.path.join(self.dir, self.labels[n].get_text())
f = file(fn, 'r')
self.lines = f.readlines()
f.close()
self.emccommand.program_open(fn)
self.populate()
def up(self, b):
self.fileoffset -= self.numlabels
if self.fileoffset < 0:
self.fileoffset = 0
self.populate()
def down(self, b):
self.fileoffset += self.numlabels
self.populate()
| # Touchy is Copyright (c) 2009 Chris Radek <[email protected]>
#
# Touchy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# Touchy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
import dircache
import os
class filechooser:
def __init__(self, gtk, emc, labels, eventboxes, program):
self.labels = labels
self.eventboxes = eventboxes
self.numlabels = len(labels)
self.program = program
self.gtk = gtk
self.emc = emc
self.emccommand = emc.command()
self.fileoffset = 0
self.dir = os.path.join(os.getenv('HOME'), 'emc2', 'nc_files')
self.files = dircache.listdir(self.dir)
self.selected = -1
self.populate()
def populate(self):
files = self.files[self.fileoffset:]
for i in range(self.numlabels):
l = self.labels[i]
e = self.eventboxes[i]
if i < len(files):
l.set_text(files[i])
else:
l.set_text('')
if self.selected == self.fileoffset + i:
e.modify_bg(self.gtk.STATE_NORMAL, self.gtk.gdk.color_parse('#fff'))
else:
e.modify_bg(self.gtk.STATE_NORMAL, self.gtk.gdk.color_parse('#ccc'))
def select(self, eventbox, event):
n = int(eventbox.get_name()[20:])
self.selected = self.fileoffset + n
self.emccommand.mode(self.emc.MODE_MDI)
self.emccommand.program_open(os.path.join(self.dir, self.labels[n].get_text()))
self.populate()
def up(self, b):
self.fileoffset -= self.numlabels
if self.fileoffset < 0:
self.fileoffset = 0
self.populate()
def down(self, b):
self.fileoffset += self.numlabels
self.populate()
| lgpl-2.1 | Python |
2c98e54c7f2138b4472336520ab18af8f49b9b48 | test networks on test data corresponding to each dataset | santiagolopezg/MODS_ConvNet | test_network.py | test_network.py | import keras
from keras.optimizers import SGD, adadelta, rmsprop, adam
from keras.preprocessing.image import ImageDataGenerator
from keras.utils import np_utils
from keras.metrics import matthews_correlation, precision, recall
import cPickle
import numpy as np
import getpass
username = getpass.getuser()
from foo_two import foo
def get_data(n_dataset):
f = file('MODS_all_data_bw_224_224_{0}.pkl'.format(n_dataset),'rb')
data = cPickle.load(f)
f.close()
training_data = data[0]
validation_data = data[1]
t_data = training_data[0]
t_label = training_data[1]
test_data = validation_data[0]
test_label = validation_data[1]
t_data = np.array(t_data)
t_label = np.array(t_label)
test_data = np.array(test_data)
test_label = np.array(test_label)
t_data = t_data.reshape(t_data.shape[0], 1, 224, 224)
test_data = test_data.reshape(test_data.shape[0], 1, 224, 224)
#less precision means less memory needed: 64 -> 32 (half the memory used)
t_data = t_data.astype('float32')
test_data = test_data.astype('float32')
return (t_data, t_label), (test_data, test_label)
class LossAccHistory(keras.callbacks.Callback):
def on_train_begin(self, logs={}):
self.losses = []
self.accu = []
def on_batch_end(self, batch, logs={}):
self.losses.append(logs.get('loss'))
self.accu.append(logs.get('acc'))
nb_classes = 2
nb_epoch = 100
data_augmentation = True
n_dataset = 7
plot_loss = True
#Hyperparameters for tuning
dropout = 0.5 #[0.0, 0.25, 0.5, 0.7]
batch_size = 16 #[32, 70, 100, 150]
optimizer = 'rmsprop' #['sgd', 'adadelta']
test_metrics = []
model = foo()
for i in xrange(n_dataset):
history = LossAccHistory()
# the data, shuffled and split between train and test sets
(X_train, y_train), (X_test, y_test) = get_data(i)
Y_test = np_utils.to_categorical(y_test, nb_classes)
X_test /= 255
print(X_test.shape[0], 'test samples')
#Shows all layers and names
for v, layer in enumerate(model.layers):
print(v, layer.name)
print('Training of the network, using real-time data augmentation.')
model.compile(loss='binary_crossentropy',
optimizer= rmsprop(lr=0.001), #adadelta
metrics=['accuracy', 'matthews_correlation', 'precision', 'recall'])
score = model.evaluate(X_test, Y_test, verbose=1)
print (model.metrics_names, score)
print('Test loss:', score[0])
print('Test accuracy:', score[1])
m = (model.metrics_names, score, 'dataset {0}'.format(i))
test_metrics.append(m)
model.reset_states()
#save test metrics to txt file
file = open('cut_MODS_test_metrics.txt', 'w')
for i in test_metrics:
file.write('%s\n' % i)
file.close()
print test_metrics
| mit | Python |
|
ed94317df99493c24c58a1e1aa553a8f822e793f | Test cases | gsnbng/erpnext,gsnbng/erpnext,gsnbng/erpnext,gsnbng/erpnext | erpnext/accounts/report/sales_payment_summary/test_sales_payment_summary.py | erpnext/accounts/report/sales_payment_summary/test_sales_payment_summary.py | # Copyright (c) 2018, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import unittest
import frappe
import erpnext
from erpnext.accounts.report.sales_payment_summary.sales_payment_summary import get_mode_of_payments, get_mode_of_payment_details
from frappe.utils import nowdate
from erpnext.accounts.doctype.payment_entry.payment_entry import get_payment_entry
test_dependencies = ["Sales Invoice"]
class TestSalesPaymentSummary(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_get_mode_of_payments(self):
si = frappe.get_all("Sales Invoice", fields=["name", "docstatus"])
filters = get_filters()
for invoice in si[:2]:
doc = frappe.get_doc("Sales Invoice", invoice.name)
new_doc = frappe.copy_doc(doc)
new_doc.insert()
new_doc.submit()
try:
new_doc.submit()
except Exception as e:
pass
if int(new_doc.name[-3:])%2 == 0:
bank_account = "_Test Cash - _TC"
mode_of_payment = "Cash"
else:
bank_account = "_Test Bank - _TC"
mode_of_payment = "Credit Card"
pe = get_payment_entry("Sales Invoice", new_doc.name, bank_account=bank_account)
pe.reference_no = "_Test"
pe.reference_date = nowdate()
pe.mode_of_payment = mode_of_payment
pe.insert()
pe.submit()
mop = get_mode_of_payments(filters)
self.assertTrue('Credit Card' in mop.values()[0])
self.assertTrue('Cash' in mop.values()[0])
# Cancel all Cash payment entry and check if this mode of payment is still fetched.
payment_entries = frappe.get_all("Payment Entry", filters={"mode_of_payment": "Cash", "docstatus": 1}, fields=["name", "docstatus"])
for payment_entry in payment_entries:
pe = frappe.get_doc("Payment Entry", payment_entry.name)
pe.cancel()
mop = get_mode_of_payments(filters)
self.assertTrue('Credit Card' in mop.values()[0])
self.assertTrue('Cash' not in mop.values()[0])
def test_get_mode_of_payments_details(self):
si = frappe.get_all("Sales Invoice", fields=["name", "docstatus"])
filters = get_filters()
for invoice in si[:2]:
doc = frappe.get_doc("Sales Invoice", invoice.name)
new_doc = frappe.copy_doc(doc)
new_doc.insert()
new_doc.submit()
try:
new_doc.submit()
except Exception as e:
pass
if int(new_doc.name[-3:])%2 == 0:
bank_account = "_Test Cash - _TC"
mode_of_payment = "Cash"
else:
bank_account = "_Test Bank - _TC"
mode_of_payment = "Credit Card"
pe = get_payment_entry("Sales Invoice", new_doc.name, bank_account=bank_account)
pe.reference_no = "_Test"
pe.reference_date = nowdate()
pe.mode_of_payment = mode_of_payment
pe.insert()
pe.submit()
mopd = get_mode_of_payment_details(filters)
mopd_values = mopd.values()[0]
for mopd_value in mopd_values:
if mopd_value[0] == "Credit Card":
cc_init_amount = mopd_value[1]
# Cancel one Credit Card Payment Entry and check that it is not fetched in mode of payment details.
payment_entries = frappe.get_all("Payment Entry", filters={"mode_of_payment": "Credit Card", "docstatus": 1}, fields=["name", "docstatus"])
for payment_entry in payment_entries[:1]:
pe = frappe.get_doc("Payment Entry", payment_entry.name)
pe.cancel()
mopd = get_mode_of_payment_details(filters)
mopd_values = mopd.values()[0]
for mopd_value in mopd_values:
if mopd_value[0] == "Credit Card":
cc_final_amount = mopd_value[1]
self.assertTrue(cc_init_amount > cc_final_amount)
def get_filters():
return {
"from_date": "1900-01-01",
"to_date": nowdate(),
"company": "_Test Company"
} | agpl-3.0 | Python |
|
37793ec10e2b27e64efaa3047ae89a6d10a6634d | Update urlrewrite_redirect.py | tobinjt/Flexget,crawln45/Flexget,crawln45/Flexget,ianstalk/Flexget,xfouloux/Flexget,tsnoam/Flexget,jawilson/Flexget,qvazzler/Flexget,oxc/Flexget,OmgOhnoes/Flexget,malkavi/Flexget,tsnoam/Flexget,Danfocus/Flexget,JorisDeRieck/Flexget,ibrahimkarahan/Flexget,Flexget/Flexget,antivirtel/Flexget,ZefQ/Flexget,tvcsantos/Flexget,patsissons/Flexget,JorisDeRieck/Flexget,offbyone/Flexget,sean797/Flexget,vfrc2/Flexget,ZefQ/Flexget,qk4l/Flexget,LynxyssCZ/Flexget,malkavi/Flexget,spencerjanssen/Flexget,drwyrm/Flexget,sean797/Flexget,Flexget/Flexget,lildadou/Flexget,jacobmetrick/Flexget,thalamus/Flexget,spencerjanssen/Flexget,Danfocus/Flexget,OmgOhnoes/Flexget,sean797/Flexget,dsemi/Flexget,ianstalk/Flexget,jawilson/Flexget,Danfocus/Flexget,crawln45/Flexget,oxc/Flexget,malkavi/Flexget,jacobmetrick/Flexget,grrr2/Flexget,v17al/Flexget,LynxyssCZ/Flexget,v17al/Flexget,JorisDeRieck/Flexget,LynxyssCZ/Flexget,cvium/Flexget,vfrc2/Flexget,tarzasai/Flexget,Pretagonist/Flexget,tarzasai/Flexget,malkavi/Flexget,tvcsantos/Flexget,Flexget/Flexget,oxc/Flexget,poulpito/Flexget,offbyone/Flexget,thalamus/Flexget,OmgOhnoes/Flexget,antivirtel/Flexget,crawln45/Flexget,tsnoam/Flexget,jawilson/Flexget,v17al/Flexget,qk4l/Flexget,offbyone/Flexget,lildadou/Flexget,vfrc2/Flexget,ratoaq2/Flexget,Pretagonist/Flexget,cvium/Flexget,qk4l/Flexget,Danfocus/Flexget,Pretagonist/Flexget,ibrahimkarahan/Flexget,ratoaq2/Flexget,LynxyssCZ/Flexget,xfouloux/Flexget,camon/Flexget,JorisDeRieck/Flexget,grrr2/Flexget,lildadou/Flexget,tobinjt/Flexget,xfouloux/Flexget,camon/Flexget,poulpito/Flexget,ratoaq2/Flexget,grrr2/Flexget,cvium/Flexget,patsissons/Flexget,poulpito/Flexget,tobinjt/Flexget,jawilson/Flexget,qvazzler/Flexget,spencerjanssen/Flexget,dsemi/Flexget,patsissons/Flexget,Flexget/Flexget,ianstalk/Flexget,antivirtel/Flexget,tobinjt/Flexget,qvazzler/Flexget,drwyrm/Flexget,thalamus/Flexget,drwyrm/Flexget,gazpachoking/Flexget,ibrahimkarahan/Flexget,ZefQ/Flexget,jacobmetrick/Flexget,dsemi/Flexget,tarzasai/Flexget,gazpachoking/Flexget | flexget/plugins/urlrewrite_redirect.py | flexget/plugins/urlrewrite_redirect.py | from __future__ import unicode_literals, division, absolute_import
import logging
from flexget import plugin
from flexget.event import event
log = logging.getLogger('urlrewrite_redirect')
class UrlRewriteRedirect(object):
"""Rewrites urls which actually redirect somewhere else."""
def __init__(self):
self.processed = set()
def on_task_start(self):
self.processed = set()
def url_rewritable(self, task, entry):
if not any(entry['url'].startswith(adapter) for adapter in task.requests.adapters):
return False
return entry['url'] not in self.processed
def url_rewrite(self, task, entry):
try:
# Don't accidentally go online in unit tests
if task.manager.unit_test:
return
auth = None
if 'download_auth' in entry:
auth = entry['download_auth']
log.debug('Custom auth enabled for %s url_redirect: %s' % (entry['title'], entry['download_auth']))
r = task.requests.head(entry['url'], auth=auth)
if 300 <= r.status_code < 400 and 'location' in r.headers:
entry['url'] = r.headers['location']
except Exception:
pass
finally:
# Make sure we don't try to rewrite this url again
self.processed.add(entry['url'])
@event('plugin.register')
def register_plugin():
plugin.register(UrlRewriteRedirect, 'urlrewrite_redirect', groups=['urlrewriter'], api_ver=2)
| from __future__ import unicode_literals, division, absolute_import
import logging
from flexget import plugin
from flexget.event import event
log = logging.getLogger('urlrewrite_redirect')
class UrlRewriteRedirect(object):
"""Rewrites urls which actually redirect somewhere else."""
def __init__(self):
self.processed = set()
def on_task_start(self):
self.processed = set()
def url_rewritable(self, task, entry):
if not any(entry['url'].startswith(adapter) for adapter in task.requests.adapters):
return False
return entry['url'] not in self.processed
def url_rewrite(self, task, entry):
try:
# Don't accidentally go online in unit tests
if task.manager.unit_test:
return
r = task.requests.head(entry['url'])
if 300 <= r.status_code < 400 and 'location' in r.headers:
entry['url'] = r.headers['location']
except Exception:
pass
finally:
# Make sure we don't try to rewrite this url again
self.processed.add(entry['url'])
@event('plugin.register')
def register_plugin():
plugin.register(UrlRewriteRedirect, 'urlrewrite_redirect', groups=['urlrewriter'], api_ver=2)
| mit | Python |
e4ca040124e26b06a11e7fb51c3622a213285d24 | Create thresholding.py | jeffsp/kaggle_denoising,jeffsp/kaggle_denoising | thresholding.py | thresholding.py | import numpy as np
from PIL import Image
def discretize(a):
return np.uint8((a > 50)*255)
image_id = 101
dirty_image_path = "../input/train/%d.png" % image_id
clean_image_path = "../input/train_cleaned/%d.png" % image_id
dirty = Image.open(dirty_image_path)
clean = Image.open(clean_image_path)
dirty.save("dirty.png")
clean.save("clean.png")
clean_array = np.asarray(clean)
dirty_array = np.asarray(dirty)
discretized_array = discretize(dirty_array)
Image.fromarray(discretized_array).save("discretized.png")
html = """<html>
<body>
<h1>Thresholding</h1>
<p>This is a very simple attempt to clean up an image by thresholding the pixel value at 50. (Under 50 goes to 0, above 50 goes to 255.)</p>
<h2>Dirty image</h2>
<img src="dirty.png">
<h2>Cleaned up by thresholding</h2>
<img src="discretized.png">
<h2>Original clean image</h2>
<img src="clean.png">
</body>
</html>
"""
with open("output.html", "w") as output_file:
output_file.write(html)
| mit | Python |
|
d78444cdb6018e2fe49905638ce7645e8de5738b | add util/csv_melt.py | shenwei356/bio_scripts,shenwei356/bio_scripts,shenwei356/bio_scripts,shenwei356/bio_scripts | util/csv_melt.py | util/csv_melt.py | #!/usr/bin/env python
# https://github.com/shenwei356/bio_scripts"
import argparse
import csv
import re
import sys
import pandas as pd
parser = argparse.ArgumentParser(
description="Melt CSV file, you can append new column",
epilog="https://github.com/shenwei356/bio_scripts")
parser.add_argument(
'key',
type=str,
help=
'Column name of key in csvfile. Multiple values shoud be separated by comma')
parser.add_argument('csvfile', type=str, help='CSV file with head row!')
parser.add_argument('--var_name',
type=str,
default='var_name',
help='name to use for the "variable" column')
parser.add_argument('--value_name',
type=str,
default='value_name',
help='name to use for the "value" column')
parser.add_argument('-a',
'--append',
type=str,
help='another column. format: column=value')
parser.add_argument('-o', '--outfile', type=str, help='output file')
parser.add_argument('--fs', type=str, default=",", help='field separator [,]')
parser.add_argument('--fs-out',
type=str,
help='field separator of ouput [same as --fs]')
parser.add_argument('--qc', type=str, default='"', help='Quote char["]')
parser.add_argument('-t',
action='store_true',
help='field separator is "\\t". Quote char is "\\t"')
args = parser.parse_args()
if args.t:
args.fs, args.qc = '\t', '\t'
if not args.fs_out:
args.fs_out = args.fs
pattern = '^([^=]+)=([^=]+)$'
if args.append:
if not re.search(pattern, args.append):
sys.stderr.write("bad format for option -a: {}".format(args.append))
sys.exit(1)
colname, colvalue = re.findall(pattern, args.append)[0]))
keys = list()
if ',' in args.key:
keys = [k for k in args.key.split(',')]
else:
keys = [args.key]
# ------------------------------------------------------------
df = pd.read_csv(args.csvfile,
sep=args.fs,
quotechar=args.qc) # , index_col=keys)
df = pd.melt(df,
id_vars=keys,
var_name=args.var_name,
value_name=args.value_name)
if args.append:
df[colname] = pd.Series([colvalue] * len(df))
if args.outfile:
df.to_csv(args.outfile, sep=args.fs, quotechar=args.qc, index=0)
else:
df.to_csv(sys.stdout, sep=args.fs, quotechar=args.qc, index=0)
| mit | Python |
|
326010629c6d5bb1274d1db1231f5b84c394b4e4 | Add some api tests for ZHA (#20909) | sander76/home-assistant,mKeRix/home-assistant,turbokongen/home-assistant,partofthething/home-assistant,nugget/home-assistant,robbiet480/home-assistant,leppa/home-assistant,titilambert/home-assistant,joopert/home-assistant,aronsky/home-assistant,rohitranjan1991/home-assistant,nkgilley/home-assistant,jabesq/home-assistant,sander76/home-assistant,joopert/home-assistant,tboyce1/home-assistant,GenericStudent/home-assistant,soldag/home-assistant,w1ll1am23/home-assistant,adrienbrault/home-assistant,jamespcole/home-assistant,Teagan42/home-assistant,lukas-hetzenecker/home-assistant,balloob/home-assistant,jamespcole/home-assistant,leppa/home-assistant,sdague/home-assistant,aequitas/home-assistant,w1ll1am23/home-assistant,qedi-r/home-assistant,jabesq/home-assistant,nugget/home-assistant,jawilson/home-assistant,auduny/home-assistant,tboyce1/home-assistant,aronsky/home-assistant,Teagan42/home-assistant,molobrakos/home-assistant,auduny/home-assistant,GenericStudent/home-assistant,tchellomello/home-assistant,mezz64/home-assistant,fbradyirl/home-assistant,jnewland/home-assistant,toddeye/home-assistant,sdague/home-assistant,titilambert/home-assistant,mKeRix/home-assistant,molobrakos/home-assistant,jawilson/home-assistant,toddeye/home-assistant,kennedyshead/home-assistant,fbradyirl/home-assistant,soldag/home-assistant,FreekingDean/home-assistant,DavidLP/home-assistant,tboyce021/home-assistant,home-assistant/home-assistant,rohitranjan1991/home-assistant,nugget/home-assistant,qedi-r/home-assistant,HydrelioxGitHub/home-assistant,kennedyshead/home-assistant,auduny/home-assistant,balloob/home-assistant,Danielhiversen/home-assistant,Cinntax/home-assistant,HydrelioxGitHub/home-assistant,pschmitt/home-assistant,fbradyirl/home-assistant,MartinHjelmare/home-assistant,tboyce1/home-assistant,jabesq/home-assistant,nkgilley/home-assistant,molobrakos/home-assistant,Danielhiversen/home-assistant,mKeRix/home-assistant,MartinHjelmare/home-assistant,MartinHjelmare/home-assistant,aequitas/home-assistant,balloob/home-assistant,tboyce021/home-assistant,DavidLP/home-assistant,aequitas/home-assistant,postlund/home-assistant,tboyce1/home-assistant,rohitranjan1991/home-assistant,mKeRix/home-assistant,postlund/home-assistant,HydrelioxGitHub/home-assistant,jnewland/home-assistant,lukas-hetzenecker/home-assistant,FreekingDean/home-assistant,DavidLP/home-assistant,jnewland/home-assistant,partofthething/home-assistant,robbiet480/home-assistant,adrienbrault/home-assistant,home-assistant/home-assistant,tchellomello/home-assistant,turbokongen/home-assistant,jamespcole/home-assistant,mezz64/home-assistant,Cinntax/home-assistant,pschmitt/home-assistant | tests/components/zha/test_api.py | tests/components/zha/test_api.py | """Test ZHA API."""
from unittest.mock import Mock
import pytest
from homeassistant.const import ATTR_ENTITY_ID
from homeassistant.components.switch import DOMAIN
from homeassistant.components.zha.api import (
async_load_api, WS_ENTITIES_BY_IEEE, WS_ENTITY_CLUSTERS, ATTR_IEEE, TYPE,
ID, NAME, WS_ENTITY_CLUSTER_ATTRIBUTES, WS_ENTITY_CLUSTER_COMMANDS
)
from homeassistant.components.zha.core.const import (
ATTR_CLUSTER_ID, ATTR_CLUSTER_TYPE, IN
)
from .common import async_init_zigpy_device
@pytest.fixture
async def zha_client(hass, config_entry, zha_gateway, hass_ws_client):
"""Test zha switch platform."""
from zigpy.zcl.clusters.general import OnOff
# load the ZHA API
async_load_api(hass, Mock(), zha_gateway)
# create zigpy device
await async_init_zigpy_device(
hass, [OnOff.cluster_id], [], None, zha_gateway)
# load up switch domain
await hass.config_entries.async_forward_entry_setup(
config_entry, DOMAIN)
await hass.async_block_till_done()
return await hass_ws_client(hass)
async def test_entities_by_ieee(hass, config_entry, zha_gateway, zha_client):
"""Test getting entity refs by ieee address."""
await zha_client.send_json({
ID: 5,
TYPE: WS_ENTITIES_BY_IEEE,
})
msg = await zha_client.receive_json()
assert '00:0d:6f:00:0a:90:69:e7' in msg['result']
assert len(msg['result']['00:0d:6f:00:0a:90:69:e7']) == 2
async def test_entity_clusters(hass, config_entry, zha_gateway, zha_client):
"""Test getting entity cluster info."""
await zha_client.send_json({
ID: 5,
TYPE: WS_ENTITY_CLUSTERS,
ATTR_ENTITY_ID: 'switch.fakemanufacturer_fakemodel_0a9069e7_1_6',
ATTR_IEEE: '00:0d:6f:00:0a:90:69:e7'
})
msg = await zha_client.receive_json()
assert len(msg['result']) == 1
cluster_info = msg['result'][0]
assert cluster_info[TYPE] == IN
assert cluster_info[ID] == 6
assert cluster_info[NAME] == 'OnOff'
async def test_entity_cluster_attributes(
hass, config_entry, zha_gateway, zha_client):
"""Test getting entity cluster attributes."""
await zha_client.send_json({
ID: 5,
TYPE: WS_ENTITY_CLUSTER_ATTRIBUTES,
ATTR_ENTITY_ID: 'switch.fakemanufacturer_fakemodel_0a9069e7_1_6',
ATTR_IEEE: '00:0d:6f:00:0a:90:69:e7',
ATTR_CLUSTER_ID: 6,
ATTR_CLUSTER_TYPE: IN
})
msg = await zha_client.receive_json()
attributes = msg['result']
assert len(attributes) == 4
for attribute in attributes:
assert attribute[ID] is not None
assert attribute[NAME] is not None
async def test_entity_cluster_commands(
hass, config_entry, zha_gateway, zha_client):
"""Test getting entity cluster commands."""
await zha_client.send_json({
ID: 5,
TYPE: WS_ENTITY_CLUSTER_COMMANDS,
ATTR_ENTITY_ID: 'switch.fakemanufacturer_fakemodel_0a9069e7_1_6',
ATTR_IEEE: '00:0d:6f:00:0a:90:69:e7',
ATTR_CLUSTER_ID: 6,
ATTR_CLUSTER_TYPE: IN
})
msg = await zha_client.receive_json()
commands = msg['result']
assert len(commands) == 6
for command in commands:
assert command[ID] is not None
assert command[NAME] is not None
assert command[TYPE] is not None
| mit | Python |
|
0b1fc2eb8dad6e5b41e80c5b0d97b9f8a20f9afa | Add utils.py | ssut/py-krcurrency | krcurrency/utils.py | krcurrency/utils.py | """:mod:`krcurrency.utils` --- Helpers
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
"""
from bs4 import BeautifulSoup as BS
import requests
__all__ = 'request',
def request(url, encoding='utf-8', parselib='lxml'):
"""url로 요청한 후 돌려받은 값을 BeautifulSoup 객체로 변환해서 반환합니다.
"""
r = requests.get(url)
if r.status_code != 200:
return None
soup = None
try:
soup = BeautifulSoup(r.text, parselib)
except Exception as e:
pass
return soup
| mit | Python |
|
76a2c80b015228dd4c6aa932ca9b2faece23a714 | Create multiplesof3and5.py | GirlGeekUpNorth/Project-Euler | multiplesof3and5.py | multiplesof3and5.py | #If we list all the natural numbers below 10 that are multiples of 3 or 5, we get 3, 5, 6 and 9.
#The sum of these multiples is 23.
#Find the sum of all the multiples of 3 or 5 below 1000.
answer = 0
for i in range (1,1000);
if i%3 = 0 or i%5 = 0;
answer = answer + i
else;
continue
print answer
| mit | Python |
|
2d1624f088431e5f71214988499f732695a82b16 | Bump version 0.15.0rc3 --> 0.15.0rc4 | lbryio/lbry,lbryio/lbry,zestyr/lbry,lbryio/lbry,zestyr/lbry,zestyr/lbry | lbrynet/__init__.py | lbrynet/__init__.py | import logging
__version__ = "0.15.0rc4"
version = tuple(__version__.split('.'))
logging.getLogger(__name__).addHandler(logging.NullHandler())
| import logging
__version__ = "0.15.0rc3"
version = tuple(__version__.split('.'))
logging.getLogger(__name__).addHandler(logging.NullHandler())
| mit | Python |
5dd4deba3d5a53406e735aadad5ac917919b3852 | add tests for PlotableObject | morgenst/PyAnalysisTools,morgenst/PyAnalysisTools,morgenst/PyAnalysisTools | tests/unit/TestPlotableObject.py | tests/unit/TestPlotableObject.py | import os
import unittest
import ROOT
from PyAnalysisTools.PlottingUtils import PlotableObject as po
cwd = os.path.dirname(__file__)
ROOT.gROOT.SetBatch(True)
class TestPlotableObject(unittest.TestCase):
def test_ctor(self):
obj = po.PlotableObject()
self.assertIsNone(obj.plot_object)
self.assertTrue(obj.is_ref)
self.assertEqual(-1, obj.ref_id)
self.assertEqual('', obj.label)
self.assertIsNone(obj.cuts)
self.assertIsNone(obj.process)
self.assertEqual('Marker', obj.draw_option)
self.assertEqual('Marker', obj.draw)
self.assertEqual(1, obj.marker_color)
self.assertEqual(1, obj.marker_size)
self.assertEqual(1, obj.marker_style)
self.assertEqual(1, obj.line_color)
self.assertEqual(1, obj.line_width)
self.assertEqual(1, obj.line_style)
self.assertEqual(0, obj.fill_color)
self.assertEqual(0, obj.fill_style)
def tests_palettes(self):
color_palette = [ROOT.kGray + 3, ROOT.kRed + 2, ROOT.kAzure + 4, ROOT.kSpring - 6, ROOT.kOrange - 3,
ROOT.kCyan - 3, ROOT.kPink - 2, ROOT.kSpring - 9, ROOT.kMagenta - 5]
marker_style_palette_filled = [20, 22, 23, 33, 34, 29, 2]
marker_style_palette_empty = [24, 26, 32, 27, 28, 30, 5]
line_style_palette_homogen = [1, 1, 1, 1, 1]
line_style_palette_heterogen = [1, 1, 4, 8, 6]
fill_style_palette_left = [3305, 3315, 3325, 3335, 3345, 3365, 3375, 3385]
fill_style_palette_right = [3359, 3351, 3352, 3353, 3354, 3356, 3357, 3358]
self.assertEqual(color_palette, po.color_palette)
self.assertEqual(marker_style_palette_filled, po.marker_style_palette_filled)
self.assertEqual(marker_style_palette_empty, po.marker_style_palette_empty)
self.assertEqual(line_style_palette_homogen, po.line_style_palette_homogen)
self.assertEqual(line_style_palette_heterogen, po.line_style_palette_heterogen)
self.assertEqual(fill_style_palette_left, po.fill_style_palette_left)
self.assertEqual(fill_style_palette_right, po.fill_style_palette_right)
| mit | Python |
|
854a1ab7c13b4d4d8e28ab13f0cdaef5c1fcb9a6 | Create solution.py | lilsweetcaligula/Online-Judges,lilsweetcaligula/Online-Judges,lilsweetcaligula/Online-Judges | hackerrank/algorithms/warmup/easy/compare_the_triplets/py/solution.py | hackerrank/algorithms/warmup/easy/compare_the_triplets/py/solution.py | #!/bin/python3
import sys
cmp = lambda a, b: (a > b) - (b > a)
aliceScores = tuple(map(int, input().strip().split(' ')))
bobScores = tuple(map(int, input().strip().split(' ')))
scoreCmp = tuple(map(lambda a, b: cmp(a, b), aliceScores, bobScores))
aliceScore = len(tuple(filter(lambda x: x > 0, scoreCmp)))
bobScore = len(tuple(filter(lambda x: x < 0, scoreCmp)))
print(aliceScore, bobScore)
| mit | Python |
|
4764b5248cf91042a12ce6aef77a04c37360eb4f | Add initial shell of Pyglab class. | sloede/pyglab,sloede/pyglab | pyglab/pyglab.py | pyglab/pyglab.py | import requests
class Pyglab(object):
def __init__(self, token):
self.token = token
self.headers = {'PRIVATE-TOKEN', token}
self.user = None
def sudo(self, user):
"""Permanently set a different username. Returns the old username."""
previous_user = self.user
self.user = user
return previous_user
| mit | Python |
|
70e04b20c5d78b41546aa4ea1a1e2fd82af7527f | Add JSON HttpResponse that does the encoding for you. | bueda/django-comrade | comrade/http/__init__.py | comrade/http/__init__.py | from django.core.serializers import json, serialize
from django.db.models.query import QuerySet
from django.http import HttpResponse
from django.utils import simplejson
class HttpJsonResponse(HttpResponse):
def __init__(self, object, status=None):
if isinstance(object, QuerySet):
content = serialize('json', object)
else:
content = simplejson.dumps(object, cls=json.DjangoJSONEncoder,
ensure_ascii=False)
super(HttpJsonResponse, self).__init__(
content, content_type='application/json')
| mit | Python |
|
d3ba2b8cf84ba54d932fcc48b464f125798c0b27 | Add simple bash with git install script | nephomaniac/nephoria,nephomaniac/nephoria | toolbox/install_script_git.sh.py | toolbox/install_script_git.sh.py | #!/bin/bash
venv="nephoria_venv"
neph_branch="oldboto"
adminapi_branch="master"
yum install -y python-devel gcc git python-setuptools python-virtualenv
if [ ! -d adminapi ]; then
git clone https://github.com/nephomaniac/adminapi.git
fi
if [ ! -d nephoria ]; then
git clone https://github.com/nephomaniac/nephoria.git
fi
if [ "x$venv" != "x" ]; then
if [ ! -d $venv ]; then
virtualenv $venv
fi
source $venv/bin/activate
fi
cd adminapi
git checkout $adminapi_branch
python setup.py install
cd -
cd nephoria
git checkout $neph_branch
python setup.py install
cd - | bsd-2-clause | Python |
|
5ae194cacef0a24c3d6a0714d3f435939973b3cb | Add some helpful utilities | drkitty/python3-base,drkitty/python3-base | utils.py | utils.py | from functools import wraps
def cached_property(f):
name = f.__name__
@property
@wraps(f)
def inner(self):
if not hasattr(self, "_property_cache"):
self._property_cache = {}
if name not in self._property_cache:
self._property_cache[name] = f(self)
return self._property_cache[name]
return inner
class Constant():
def __init__(self, x):
self.x = x
def __repr__(self):
return self.x
| unlicense | Python |
|
93b1253389075174fa942e848d6c1f7666ffc906 | add solution for Combination Sum II | zhyu/leetcode,zhyu/leetcode | src/combinationSumII.py | src/combinationSumII.py | class Solution:
# @param candidates, a list of integers
# @param target, integer
# @return a list of lists of integers
def combinationSum2(self, candidates, target):
if not candidates:
return []
candidates.sort()
n = len(candidates)
res = set()
def solve(start, target, tmp):
if target < 0:
return
if target == 0:
res.add(tuple(tmp))
return
for i in xrange(start, n):
tmp.append(candidates[i])
solve(i+1, target-candidates[i], tmp)
tmp.pop()
solve(0, target, [])
return map(list, res)
| mit | Python |
|
19d5b2f58d712f49638dad83996f9e60a6ebc949 | Add a release script. | n-someya/pgcli,koljonen/pgcli,joewalnes/pgcli,janusnic/pgcli,darikg/pgcli,janusnic/pgcli,lk1ngaa7/pgcli,d33tah/pgcli,thedrow/pgcli,johshoff/pgcli,darikg/pgcli,dbcli/vcli,bitemyapp/pgcli,thedrow/pgcli,nosun/pgcli,suzukaze/pgcli,stuartquin/pgcli,dbcli/vcli,zhiyuanshi/pgcli,suzukaze/pgcli,MattOates/pgcli,dbcli/pgcli,j-bennet/pgcli,TamasNo1/pgcli,lk1ngaa7/pgcli,n-someya/pgcli,koljonen/pgcli,MattOates/pgcli,yx91490/pgcli,bitmonk/pgcli,TamasNo1/pgcli,johshoff/pgcli,nosun/pgcli,joewalnes/pgcli,bitemyapp/pgcli,j-bennet/pgcli,zhiyuanshi/pgcli,d33tah/pgcli,yx91490/pgcli,w4ngyi/pgcli,bitmonk/pgcli,w4ngyi/pgcli,dbcli/pgcli | release.py | release.py | #!/usr/bin/env python
import re
import ast
import subprocess
def version():
_version_re = re.compile(r'__version__\s+=\s+(.*)')
with open('pgcli/__init__.py', 'rb') as f:
version = str(ast.literal_eval(_version_re.search(
f.read().decode('utf-8')).group(1)))
return version
def create_git_tag(tag_name):
cmd = ['git', 'tag', tag_name]
print ' '.join(cmd)
subprocess.check_output(cmd)
def register_with_pypi():
cmd = ['python', 'setup.py', 'register']
print ' '.join(cmd)
subprocess.check_output(cmd)
def create_source_tarball():
cmd = ['python', 'setup.py', 'sdist']
print ' '.join(cmd)
subprocess.check_output(cmd)
if __name__ == '__main__':
ver = version()
print ver
create_git_tag('v%s' % ver)
register_with_pypi()
create_source_tarball()
| bsd-3-clause | Python |
|
0003b3fe31a1b92dda994b2f7eacf6cef7e08ce4 | Add check_blocked.py | osamak/okbot,osamak/okbot | check_blocked.py | check_blocked.py | # This script is licensed under the GNU Affero General Public License
# either version 3 of the License, or (at your option) any later
# version.
#
# This script was tested on GNU/Linux opreating system.
#
# To run this script:
# 1) Download the list of articles for the Wikipedia edition that
# you want to scan from http://download.wikimedia.org.
# 2) Using 'split' command, split th article list into peices. This
# will result in files that start with 'x' eg. 'xaa', xab', etc.
# 3) If you are working on a Wikipedia edition that's different from
# the Arabic one, change self.lang_code into the code of your
# edition.
# 4) Run the script from the directory of the split files.
import urllib2
import time
import os
import codecs
import shelve
class checkBlocked:
def __init__(self):
self.lang_code = 'ar'
self.list_directory = os.getcwd()
self.list_files = [i for i in os.listdir('.') if i.startswith('x')]
self.list_files.sort()
def fetch_list(self, next_list, old_list):
if old_list is not None:
print "Removing list", old_list
os.remove(self.list_directory+'/'+old_list)
list_lines = codecs.open(self.list_directory+'/'+next_list, 'r', encoding="utf-8").readlines()
list_items = [i.strip() for i in list_lines]
return list_items
def is_blocked(self, list_item):
url = "http://%s.wikipedia.org/wiki/" % self.lang_code + urllib2.quote(list_item.encode('utf8'))
print url
while True:
try:
urllib2.urlopen(url)
except urllib2.HTTPError:
print list_item, "isn't blocked."
return False
except urllib2.URLError:
print "Error, retrying..."
time.sleep(1)
continue
print list_item, "is blocked."
return True
def run(self):
old_list = None
try:
for list_file in self.list_files:
database = shelve.open("check_blocked.db")
list_items = self.fetch_list(list_file, old_list)
for list_item in list_items:
if self.is_blocked(list_item):
datebase_key = str(len(database))
datebase[datebase_key] = list_item
old_list = list_file
database.close()
except KeyboardInterrupt:
print "Existing..."
database.close()
if __name__ == '__main__':
bot = checkBlocked()
bot.run()
| agpl-3.0 | Python |
|
9b3e0c7eb28a67e2383cad6cbfa97fc4fd575756 | Add error classification | ContinuumIO/pypi-conda-builds | classify_logs.py | classify_logs.py | import re
import yaml
error_types = ["no package found",
"unclassified"]
def classify_build_log(log_file):
"""
Takes a build log file object as an input and returns
a tupe `(category, sub-category, sub-category)`
- missing dependency:
- Build Dependency
- Test Dependency
- Runtime error (other than missing dependency)
"""
log = log_file.readlines()
if no_packages_found(log):
return "no package found"
if has_missing_test_dependency(log):
return "missing test dependency"
return "unclassified"
pass
def has_missing_test_dependency(log):
"""
Return: (Status, missing packages)
"""
None
def no_packages_found(log):
p = re.compile(r"Error: No packages found")
return any([re.match(p, line) for line in log])
def classify_all_logs():
packages = yaml.load(file('packages.yaml', 'r'))
log_dir = "./logs/"
for package in packages:
if package['build'] is False:
log_file_name = log_dir + "%s_build.log" % (package['name'])
log_file = open(log_file_name, 'r')
error_type = classify_build_log(log_file)
else:
error_type = None
package['build_error_type'] = error_type
open('packages.yaml', 'w').writelines(yaml.dump(packages))
if __name__ == "__main__":
classify_all_logs()
| bsd-3-clause | Python |
|
aafa99714eff3c5021594ae5021bdd47b41c9c6b | save tpl environs after invoke shell constructor | faycheng/tpl,faycheng/tpl | assets/save_tpl_envs.py | assets/save_tpl_envs.py | # -*- coding:utf-8 -*-
import os
import sys
import json
def save_tpl_envs(path):
envs = {}
for key, value in os.environ.items():
if key.startswith('TPL_'):
envs[key[4:]] = value
with open(path, 'w') as fd:
fd.write(json.dumps(envs))
if __name__ == '__main__':
path = sys.argv[1]
save_tpl_envs(path)
| mit | Python |
|
af9b0ee39d18ca174b19143bdda0d478c4d5a834 | add a driver for hourly reporting | akrherz/iem,akrherz/iem,akrherz/iem,akrherz/iem,akrherz/iem | scripts/iemre/rerun_hourly.py | scripts/iemre/rerun_hourly.py | import mx.DateTime
import stage4_hourlyre
sts = mx.DateTime.DateTime(2010,5,1)
ets = mx.DateTime.DateTime(2010,5,13)
interval = mx.DateTime.RelativeDateTime(hours=1)
now = sts
while now < ets:
print now
stage4_hourlyre.merge( now )
now += interval
| mit | Python |
|
0920a23a72e1e14179b75b4d2a50e956ee9deec0 | add skeleton generation file | dssg/wikienergy,dssg/wikienergy,dssg/wikienergy,dssg/wikienergy,dssg/wikienergy | disaggregator/generate.py | disaggregator/generate.py | from appliance import ApplianceTrace
from appliance import ApplianceInstance
from appliance import ApplianceSet
from appliance import ApplianceType
import fhmm
| mit | Python |
|
8ccab210054c2776a36b7e3648fa1e27eb49a27b | add deeplearning cross-validation NOPASS. PUBDEV-1696. | YzPaul3/h2o-3,mathemage/h2o-3,PawarPawan/h2o-v3,tarasane/h2o-3,weaver-viii/h2o-3,michalkurka/h2o-3,h2oai/h2o-dev,nilbody/h2o-3,h2oai/h2o-dev,weaver-viii/h2o-3,h2oai/h2o-3,jangorecki/h2o-3,mrgloom/h2o-3,brightchen/h2o-3,bospetersen/h2o-3,tarasane/h2o-3,spennihana/h2o-3,h2oai/h2o-dev,nilbody/h2o-3,h2oai/h2o-3,michalkurka/h2o-3,kyoren/https-github.com-h2oai-h2o-3,jangorecki/h2o-3,h2oai/h2o-dev,h2oai/h2o-3,jangorecki/h2o-3,jangorecki/h2o-3,madmax983/h2o-3,brightchen/h2o-3,michalkurka/h2o-3,mathemage/h2o-3,ChristosChristofidis/h2o-3,YzPaul3/h2o-3,printedheart/h2o-3,pchmieli/h2o-3,h2oai/h2o-3,mathemage/h2o-3,weaver-viii/h2o-3,nilbody/h2o-3,kyoren/https-github.com-h2oai-h2o-3,kyoren/https-github.com-h2oai-h2o-3,nilbody/h2o-3,tarasane/h2o-3,tarasane/h2o-3,pchmieli/h2o-3,tarasane/h2o-3,mathemage/h2o-3,YzPaul3/h2o-3,PawarPawan/h2o-v3,michalkurka/h2o-3,datachand/h2o-3,brightchen/h2o-3,mathemage/h2o-3,junwucs/h2o-3,junwucs/h2o-3,brightchen/h2o-3,bospetersen/h2o-3,PawarPawan/h2o-v3,ChristosChristofidis/h2o-3,datachand/h2o-3,junwucs/h2o-3,junwucs/h2o-3,bospetersen/h2o-3,mrgloom/h2o-3,h2oai/h2o-dev,nilbody/h2o-3,YzPaul3/h2o-3,kyoren/https-github.com-h2oai-h2o-3,ChristosChristofidis/h2o-3,PawarPawan/h2o-v3,mrgloom/h2o-3,PawarPawan/h2o-v3,brightchen/h2o-3,spennihana/h2o-3,datachand/h2o-3,tarasane/h2o-3,bospetersen/h2o-3,mrgloom/h2o-3,datachand/h2o-3,jangorecki/h2o-3,printedheart/h2o-3,pchmieli/h2o-3,spennihana/h2o-3,h2oai/h2o-3,ChristosChristofidis/h2o-3,datachand/h2o-3,tarasane/h2o-3,YzPaul3/h2o-3,h2oai/h2o-3,ChristosChristofidis/h2o-3,h2oai/h2o-3,mathemage/h2o-3,ChristosChristofidis/h2o-3,weaver-viii/h2o-3,kyoren/https-github.com-h2oai-h2o-3,spennihana/h2o-3,datachand/h2o-3,spennihana/h2o-3,pchmieli/h2o-3,pchmieli/h2o-3,michalkurka/h2o-3,bospetersen/h2o-3,kyoren/https-github.com-h2oai-h2o-3,madmax983/h2o-3,pchmieli/h2o-3,mrgloom/h2o-3,YzPaul3/h2o-3,printedheart/h2o-3,weaver-viii/h2o-3,jangorecki/h2o-3,YzPaul3/h2o-3,madmax983/h2o-3,nilbody/h2o-3,junwucs/h2o-3,printedheart/h2o-3,PawarPawan/h2o-v3,junwucs/h2o-3,h2oai/h2o-3,weaver-viii/h2o-3,PawarPawan/h2o-v3,h2oai/h2o-dev,michalkurka/h2o-3,mrgloom/h2o-3,pchmieli/h2o-3,printedheart/h2o-3,brightchen/h2o-3,junwucs/h2o-3,h2oai/h2o-dev,printedheart/h2o-3,bospetersen/h2o-3,brightchen/h2o-3,bospetersen/h2o-3,madmax983/h2o-3,michalkurka/h2o-3,spennihana/h2o-3,mrgloom/h2o-3,ChristosChristofidis/h2o-3,datachand/h2o-3,spennihana/h2o-3,jangorecki/h2o-3,madmax983/h2o-3,nilbody/h2o-3,kyoren/https-github.com-h2oai-h2o-3,madmax983/h2o-3,printedheart/h2o-3,madmax983/h2o-3,mathemage/h2o-3,weaver-viii/h2o-3 | h2o-py/tests/testdir_algos/deeplearning/pyunit_NOPASS_cv_carsDeepLearning.py | h2o-py/tests/testdir_algos/deeplearning/pyunit_NOPASS_cv_carsDeepLearning.py | import sys
sys.path.insert(1, "../../../")
import h2o
import random
def cv_carsDL(ip,port):
# Connect to h2o
h2o.init(ip,port)
# read in the dataset and construct training set (and validation set)
cars = h2o.import_frame(path=h2o.locate("smalldata/junit/cars_20mpg.csv"))
# choose the type model-building exercise (multinomial classification or regression). 0:regression, 1:binomial,
# 2:multinomial
problem = random.sample(range(3),1)[0]
# pick the predictors and the correct response column
predictors = ["displacement","power","weight","acceleration","year"]
if problem == 1 :
response_col = "economy_20mpg"
cars[response_col] = cars[response_col].asfactor()
elif problem == 2 :
response_col = "cylinders"
cars[response_col] = cars[response_col].asfactor()
else :
response_col = "economy"
print "Response column: {0}".format(response_col)
## cross-validation
## basic
nfolds = random.randint(3,10)
dl = h2o.deeplearning(y=cars[response_col], x=cars[predictors], nfolds=nfolds)
## boundary case
# nfolds = 0
dl = h2o.deeplearning(y=cars[response_col], x=cars[predictors], nfolds=0)
## error cases
# 1. nfolds == 1 or < 0
# TODO: PUBDEV-1696
try:
dl = h2o.deeplearning(y=cars[response_col], x=cars[predictors], nfolds=random.randint(-10000,-1))
dl = h2o.deeplearning(y=cars[response_col], x=cars[predictors], nfolds=1)
assert False, "Expected model-build to fail when nfolds is 1 or < 0"
except EnvironmentError:
assert True
# 2. cross-validation and regular validation attempted
r = cars[0].runif()
train = cars[r > .2]
valid = cars[r <= .2]
try:
dl = h2o.deeplearning(y=train[response_col], x=train[predictors], nfolds=random.randint(3,10),
validation_y=valid[1], validation_x=valid[predictors])
assert False, "Expected model-build to fail when both cross-validation and regular validation is attempted"
except EnvironmentError:
assert True
# TODO: what should the model metrics look like? add cross-validation metric check to pyunit_metric_json_check.
if __name__ == "__main__":
h2o.run_test(sys.argv, cv_carsDL) | apache-2.0 | Python |
|
1483f6cece70cb5de115ea1edc630e98292a8170 | Add Sorting/Selection.py & Selection() | besirkurtulmus/AdvancedAlgorithms | Sorting/Selection.py | Sorting/Selection.py | # @auther Besir Kurtulmus
# coding: utf-8
'''
The MIT License (MIT)
Copyright (c) 2014 Ahmet Besir Kurtulmus
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
'''
from random import choice
from MergeSort import RandomList
def Selection(l, k):
"""
Description:
Args:
Examples:
"""
v = choice(l)
sL = []
sR = []
sV = []
for i in l:
if i < v:
sL.append(i)
elif i == v:
sV.append(i)
elif i > v:
sR.append(i)
if k <= len(sL):
Selection(sL, k)
elif k <= (len(sL) + len(sV)):
return v
elif k > (len(sL) + len(sV)):
Selection(sR, k - len(sL) - len(sV))
else:
return v
| mit | Python |
|
973696b0c50f235cfcef9e0cb30c6fc2f1028058 | add an index for the story_storytags table | ColdrickSotK/storyboard,ColdrickSotK/storyboard,ColdrickSotK/storyboard | storyboard/db/migration/alembic_migrations/versions/063_index_story_storytags.py | storyboard/db/migration/alembic_migrations/versions/063_index_story_storytags.py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""index story_storytags
Revision ID: a6e048164572
Revises: 062
Create Date: 2018-06-25 17:13:43.992561
"""
# revision identifiers, used by Alembic.
revision = '063'
down_revision = '062'
from alembic import op
def upgrade(active_plugins=None, options=None):
op.create_index('story_storytags_idx',
'story_storytags', ['story_id'])
def downgrade(active_plugins=None, options=None):
op.drop_index('story_storytags_idx')
| apache-2.0 | Python |
|
bd4153ff3c0824f7e901dd25e77cdaaeea2072c0 | add tests for basic outdoor pois | mapzen/vector-datasource,mapzen/vector-datasource,mapzen/vector-datasource | test/662-basic-outdoor-pois.py | test/662-basic-outdoor-pois.py | #http://www.openstreetmap.org/node/1387024181
assert_has_feature(
16, 10550, 25297, 'pois',
{ 'kind': 'bbq', 'min_zoom': 18 })
#http://www.openstreetmap.org/node/3497698404
assert_has_feature(
16, 10471, 25343, 'pois',
{ 'kind': 'bicycle_repair_station', 'min_zoom': 18 })
#http://www.openstreetmap.org/node/2910259124
assert_has_feature(
16, 10798, 25903, 'pois',
{ 'kind': 'dive_centre', 'min_zoom': 16 })
#http://www.openstreetmap.org/node/2844159164
assert_has_feature(
16, 18308, 23892, 'pois',
{ 'kind': 'life_ring', 'min_zoom': 18 })
#http://www.openstreetmap.org/node/4083762008
assert_has_feature(
16, 10805, 25927, 'pois',
{ 'kind': 'lifeguard_tower', 'min_zoom': 17 })
#http://www.openstreetmap.org/node/696801847
assert_has_feature(
16, 10597, 25151, 'pois',
{ 'kind': 'picnic_table', 'min_zoom': 18 })
#http://www.openstreetmap.org/node/1128776802
assert_has_feature(
16, 10466, 25372, 'pois',
{ 'kind': 'shower', 'min_zoom': 18 })
#http://www.openstreetmap.org/node/2287784170
assert_has_feature(
16, 10514, 25255, 'pois',
{ 'kind': 'waste_disposal', 'min_zoom': 18 })
#http://www.openstreetmap.org/node/2640323071
assert_has_feature(
16, 10502, 25290, 'pois',
{ 'kind': 'watering_place', 'min_zoom': 18 })
#https://www.openstreetmap.org/node/3954505509
assert_has_feature(
16, 10174, 23848, 'pois',
{ 'kind': 'water_point', 'min_zoom': 18 })
#https://www.openstreetmap.org/node/3984333433
assert_has_feature(
16, 12348, 25363, 'pois',
{ 'kind': 'water_point', 'min_zoom': 18 })
#http://www.openstreetmap.org/node/1978323412
assert_has_feature(
16, 10878, 25000, 'pois',
{ 'kind': 'pylon', 'min_zoom': 17 })
#http://www.openstreetmap.org/node/2398019418
assert_has_feature(
16, 10566, 25333, 'pois',
{ 'kind': 'power_pole', 'min_zoom': 18 })
#http://www.openstreetmap.org/node/1378418272
assert_has_feature(
16, 10480, 25352, 'pois',
{ 'kind': 'power_tower', 'min_zoom': 16 })
#http://www.openstreetmap.org/node/2890101480
assert_has_feature(
16, 11080, 26141, 'pois',
{ 'kind': 'petroleum_well', 'min_zoom': 17 }) | mit | Python |
|
3c4fd0477c7d6f9d0f30654271e73466d192d1e1 | Add data type for vectors | tschijnmo/drudge,tschijnmo/drudge,tschijnmo/drudge | drudge/vec.py | drudge/vec.py | """Vectors and utilities."""
import collections.abc
from sympy import sympify
class Vec:
"""Vectors.
Vectors are the basic non-commutative quantities. Its objects consist of
an base and some indices. The base is allowed to be any Python object,
although small hashable objects, like string, are advised. The indices
are always sympified into SymPy expressions.
Its objects can be created directly by giving the base and indices,
or existing vector objects can be subscripted to get new ones. The
semantics is similar to Haskell functions.
Note that users cannot directly assign to the attributes of this class.
This class can be used by itself, it can also be subclassed for special
use cases.
"""
__slots__ = ['_base', '_indices']
def __init__(self, base, indices=()):
"""Initialize a vector.
Atomic indices are added as the only index. Iterable values will
have all of its entries added.
"""
self._base = base
if not isinstance(indices, collections.abc.Iterable):
indices = (indices,)
self._indices = tuple(sympify(i) for i in indices)
@property
def base(self):
"""Get the base of the vector."""
return self._base
@property
def indices(self):
"""Get the indices of the vector."""
return self._indices
def __getitem__(self, item):
"""Append the given indices to the vector.
When multiple new indices are to be given, they have to be given as a
tuple.
"""
if not isinstance(item, tuple):
item = (item,)
new_indices = tuple(sympify(i) for i in item)
# Pay attention to subclassing.
return type(self)(self.base, self.indices + new_indices)
def __repr__(self):
"""Form repr string form the vector."""
return ''.join([
type(self).__name__, '(', repr(self.base), ', (',
', '.join(repr(i) for i in self.indices),
'))'
])
def __str__(self):
"""Form a more readable string representation."""
return ''.join([
str(self.base), '[', ', '.join(str(i) for i in self.indices), ']'
])
def __hash__(self):
"""Compute the hash value of a vector."""
return hash((self.base, self.indices))
def __eq__(self, other):
"""Compares the equality of two vectors."""
return (
(isinstance(self, type(other)) or isinstance(other, type(self))) and
self.base == other.base and self.indices == other.indices
)
| mit | Python |
|
d0d4688a8768dceeeb5d609a05de72fc24ac6b75 | Create pwned.py | 0x424D/crappy,0x424D/crappy | pwned/src/pwned.py | pwned/src/pwned.py | import hashlib, sys, urllib.request
def main():
hash = hashlib.sha1(bytes(sys.argv[1], "utf-8"))
digest = hash.hexdigest().upper()
url = f"https://api.pwnedpasswords.com/range/{digest[:5]}"
request = urllib.request.Request(url, headers={"User-Agent":"API-Programming-Exercise"})
page = urllib.request.urlopen(request)
data = (page.read().decode('utf-8').split())
for i in data:
tmp = i.split(":")
if digest[:5] + tmp[0] == digest:
print(f"{sys.argv[1]} was found")
print(f"Hash {digest}, {tmp[1]} occurrences")
if __name__ == "__main__":
main()
| agpl-3.0 | Python |
|
a81e65eaabb0f3e99721854d2dcaa7dd1f8b0a21 | Create SVM.py | rmatam/Deep-Learning | 02.Algorithms/SVM.py | 02.Algorithms/SVM.py | # -*- coding: utf-8 -*-
"""
Created on Thu Jan 19 13:23:12 2017
@author: rmatam
"""
# -*- coding: utf-8 -*-
# 2015/01/11
# Script passed in py2 & py3 with Ubuntu 14.04 env.
# Prerequirement: pip install numpy scipy scikit-learn
# furthermore info http://scikit-learn.org/stable/modules/generated/sklearn.feature_extraction.text.TfidfVectorizer.html.
# furthermore info http://scikit-learn.org/stable/modules/classes.html#module-sklearn.svm
# There have a lot of descriptions of setting variables on the website, please check it if you need the further setting.
from sklearn import svm
from sklearn.feature_extraction.text import TfidfVectorizer as tfidf
vec =tfidf(smooth_idf =False)
svc = svm.SVC(kernel='poly') # further settings on website: http://scikit-learn.org/stable/modules/generated/sklearn.svm.SVC.html
# training set, "List" type.
trainset =["good good good good good great great great", # corpus 1
"bad bad bad bad bad bad dirty dirty dirty", # corpus 2
]
trainTag =["pos", "neg"] # corpus's tags.
# test set, "List" type.
testset =["good good good good good great great great",
"good good good good good great great great bad",
"good good good good good great great great bad bad",
"good good good good good great great great bad bad bad",
"good good good good good great great great dirty",
"good good good good good great great great dirty dirty",
"good good good good good great great great dirty dirty dirty",
"bad bad bad bad bad bad dirty dirty dirty",
"bad bad bad bad bad bad dirty dirty dirty good",
"bad bad bad bad bad bad dirty dirty dirty good good",
"bad bad bad bad bad bad dirty dirty dirty good good good",
"bad bad bad bad bad bad dirty dirty dirty great",
"bad bad bad bad bad bad dirty dirty dirty great great",
"bad bad bad bad bad bad dirty dirty dirty great great great",
]
testTag =["pos", "pos", "pos", "pos", "pos", "pos", "pos",
"neg", "neg", "neg", "neg", "neg", "neg", "neg",
]
# training set is converting to the tfidf array.
trainRs =vec.fit_transform(trainset).toarray()
# test set is converting to the tfidf array.
testRs =vec.fit_transform(testset).toarray()
# the tfidf array result of training & test set.
print("Training set tfidf result.")
print(trainRs.shape)
print(trainRs)
print("----------------------------------------")
print("Test set tfidf result.")
print(testRs.shape)
print(testRs)
# training...
svc.fit(trainRs, trainTag) # further settings on website: http://scikit-learn.org/stable/modules/generated/sklearn.svm.SVC.html
# accuracy of the model.
print("----------------------------------------")
accuracy =svc.score(testRs, testTag)
print("SVM model accuracy:")
print(accuracy)
# predicting test set result.
print("----------------------------------------")
predict =svc.predict(testRs)
print("SVM model predict result:")
print(predict)
'''
Console Print:::
Training set tfidf result.
(2, 4)
[[ 0. 0. 0.85749293 0.51449576]
[ 0.89442719 0.4472136 0. 0. ]]
----------------------------------------
Test set tfidf result.
(14, 4)
[[ 0. 0. 0.85749293 0.51449576]
[ 0.16903085 0. 0.84515425 0.50709255]
[ 0.32444284 0. 0.81110711 0.48666426]
[ 0.45749571 0. 0.76249285 0.45749571]
[ 0. 0.16903085 0.84515425 0.50709255]
[ 0. 0.32444284 0.81110711 0.48666426]
[ 0. 0.45749571 0.76249285 0.45749571]
[ 0.89442719 0.4472136 0. 0. ]
[ 0.88465174 0.44232587 0.14744196 0. ]
[ 0.85714286 0.42857143 0.28571429 0. ]
[ 0.81649658 0.40824829 0.40824829 0. ]
[ 0.88465174 0.44232587 0. 0.14744196]
[ 0.85714286 0.42857143 0. 0.28571429]
[ 0.81649658 0.40824829 0. 0.40824829]]
----------------------------------------
SVM model accuracy:
1.0
----------------------------------------
SVM model predict result:
['pos' 'pos' 'pos' 'pos' 'pos' 'pos' 'pos' 'neg' 'neg' 'neg' 'neg' 'neg'
'neg' 'neg']
| apache-2.0 | Python |
|
806594afc5468d3cee183defba24501516b791f0 | add cities borders | opendataby/osm-geodata | belarus_city_borders.py | belarus_city_borders.py | from _helpers import cursor_wrap, dump
@cursor_wrap
def main(cursor):
sql = """
SELECT ct.osm_id, c.name AS country, '' AS region, '' AS subregion, ct.name AS city, ST_AsGeoJSON(ct.way)
FROM osm_polygon c
LEFT JOIN osm_polygon ct ON ST_Contains(c.way, ct.way)
WHERE c.osm_id = -59065 AND ct.admin_level = '4'
AND ct.tags->'place' IN ('city', 'town')
UNION
SELECT ct.osm_id, c.name AS country, r.name AS region, '' AS subregion, ct.name AS city, ST_AsGeoJSON(ct.way)
FROM osm_polygon c
LEFT JOIN osm_polygon r ON ST_Contains(c.way, r.way)
LEFT JOIN osm_polygon ct ON ST_Contains(r.way, ct.way)
WHERE c.osm_id = -59065 AND r.admin_level = '4' AND ct.admin_level = '6'
AND ct.tags->'place' IN ('city', 'town')
UNION
SELECT ct.osm_id, c.name AS country, r.name AS region, s.name AS subregion, ct.name AS city, ST_AsGeoJSON(ct.way)
FROM osm_polygon c
LEFT JOIN osm_polygon r ON ST_Contains(c.way, r.way)
LEFT JOIN osm_polygon s ON ST_Contains(r.way, s.way)
LEFT JOIN osm_polygon ct ON ST_Contains(s.way, ct.way)
WHERE c.osm_id = -59065 AND r.admin_level = '4' AND s.admin_level = '6'
AND ct.tags->'place' IN ('city', 'town')
"""
cursor.execute(sql)
dump(__file__, sorted(cursor.fetchall(), key=lambda item: item[1:5]),
('osmid', 'country', 'region', 'subregion', 'city', 'geojson'))
if __name__ == '__main__':
main()
| mit | Python |
|
fdb901a59e8dd61892f5033efe49e3bbbdae097f | Create CNlab1.py | gopika1697/DS,gopika1697/DS | CNlab1.py | CNlab1.py | #To check the validity of ip address
import sys
import textwrap
def valid(ip):
if ip.count('.')!=3:
print("Invalid")
sys.exit(0)
ipl=[]
ipl=ip.split('.')
for i in ipl:
if not i.isdigit():
print("Invalid")
sys.exit(0)
if int(i)>255:
print("Invalid")
sys.exit(0)
else:
print("Valid")
#To calculate bit mask
inp=raw_input("Enter the ip address\n")
li=inp.split('/')
ipv=li[0]
valid(li[0])
n=int(li[1])
h=32-int(li[1])
mask= '1'* n + '0'*h
maskd= '.'.join(str(int(i,2)) for i in textwrap.wrap(mask, 8))
print "Mask : ", maskd
maskd_list= maskd.split('.')
ipv_list=ipv.split('.')
#To calculate network id
k=0
net_id=[]
for i in range(0,4):
net_id.append(str(int(maskd_list[k]) & int(ipv_list[k])))
k+=1
print "Network id : " , '.'.join(net_id)
#To calculate brodcast address
zoo=[]
for i in net_id:
zoo.append("{0:08b}".format(int(i)))
zoos = ''.join(zoo)
broad=[]
for i in textwrap.wrap(zoos[:n] + str(int(zoos[n:],2) | int( '1'* h)), 8):
broad.append(str(int(i,2)))
print('Broadcast address : ', '.'.join(broad))
#To calculate no. of subnets
print "Number of subnets", 2 ** (n)
#To calculate nu. of hosts
print "Number of hosts", (2 ** (32-n)) - 2
#To print first address
print "First address : " + '.'.join(net_id[:3])+ '.' + str(int(net_id[3]) + 1)
#To print last address
print "Last address : " + '.'.join(broad[:3]) + '.' + str(int(broad[3]) - 1)
| apache-2.0 | Python |
|
bf6f58d5958275070c1018174217873ea08db904 | Add test pull task | opennode/nodeconductor,opennode/nodeconductor,opennode/nodeconductor | nodeconductor/structure/tests/tasks.py | nodeconductor/structure/tests/tasks.py | from celery import shared_task
from nodeconductor.core import utils as core_utils
@shared_task
def pull_instance(serialized_instance, pulled_disk):
""" Test-only task that allows to emulate pull operation """
instance = core_utils.deserialize_instance(serialized_instance)
instance.disk = pulled_disk
instance.save()
| mit | Python |
|
7ce8c06c5447d89f941d482c84693e432384def6 | rename `file` to `filename` for clarity. | ergl/pysellus,Pysellus/pysellus,cgvarela/pysellus,angelsanz/pysellus | pysellus/loader.py | pysellus/loader.py | import os
import sys
from inspect import isfunction
from importlib import import_module
def load(path):
if _is_python_file(path):
sys.path.insert(0, os.path.dirname(path))
module = import_module(_get_module_name_from_path(path))
return _get_checks_from_module(module)
functions = []
for module in _get_modules(path):
functions += _get_checks_from_module(module)
return functions
def _get_module_name_from_path(path):
return _remove_extension(path.split('/')[-1])
def _get_checks_from_module(module):
"""
Gets all setup functions from the given module.
Setup functions are required to start with 'pscheck_'
"""
functions = []
for name in dir(module):
value = getattr(module, name)
if isfunction(value) and name.startswith('pscheck_'):
functions.append(value)
return functions
def _get_modules(directory):
sys.path.insert(0, directory)
return [
import_module(filename)
for filename in _get_python_files(directory)
]
def _get_python_files(directory):
return [
_remove_extension(filename)
for filename in os.listdir(directory)
if not filename.startswith('__') and _is_python_file(filename)
]
def _is_python_file(filename):
return filename.endswith('.py')
def _remove_extension(filename):
return filename[:-3]
| import os
import sys
from inspect import isfunction
from importlib import import_module
def load(path):
if _is_python_file(path):
sys.path.insert(0, os.path.dirname(path))
module = import_module(_get_module_name_from_path(path))
return _get_checks_from_module(module)
functions = []
for module in _get_modules(path):
functions += _get_checks_from_module(module)
return functions
def _get_module_name_from_path(path):
return _remove_file_extension(path.split('/')[-1])
def _get_checks_from_module(module):
"""
Gets all setup functions from the given module.
Setup functions are required to start with 'pscheck_'
"""
functions = []
for name in dir(module):
value = getattr(module, name)
if isfunction(value) and name.startswith('pscheck_'):
functions.append(value)
return functions
def _get_modules(directory):
sys.path.insert(0, directory)
return [
import_module(filename)
for filename in _get_python_files(directory)
]
def _get_python_files(directory):
return [
_remove_file_extension(file)
for file in os.listdir(directory)
if not file.startswith('__') and _is_python_file(file)
]
def _is_python_file(filename):
return filename.endswith('.py')
def _remove_file_extension(filename):
return filename[:-3]
| mit | Python |
cc5f55fa6eb6d0ecaaef1c1e269fb40c2731fef5 | Add test helpers | andrei-karalionak/ggrc-core,j0gurt/ggrc-core,selahssea/ggrc-core,NejcZupec/ggrc-core,prasannav7/ggrc-core,andrei-karalionak/ggrc-core,kr41/ggrc-core,josthkko/ggrc-core,plamut/ggrc-core,NejcZupec/ggrc-core,josthkko/ggrc-core,AleksNeStu/ggrc-core,edofic/ggrc-core,jmakov/ggrc-core,NejcZupec/ggrc-core,j0gurt/ggrc-core,VinnieJohns/ggrc-core,AleksNeStu/ggrc-core,edofic/ggrc-core,plamut/ggrc-core,edofic/ggrc-core,VinnieJohns/ggrc-core,jmakov/ggrc-core,VinnieJohns/ggrc-core,kr41/ggrc-core,j0gurt/ggrc-core,plamut/ggrc-core,josthkko/ggrc-core,plamut/ggrc-core,AleksNeStu/ggrc-core,jmakov/ggrc-core,prasannav7/ggrc-core,selahssea/ggrc-core,andrei-karalionak/ggrc-core,NejcZupec/ggrc-core,jmakov/ggrc-core,prasannav7/ggrc-core,kr41/ggrc-core,kr41/ggrc-core,edofic/ggrc-core,prasannav7/ggrc-core,selahssea/ggrc-core,andrei-karalionak/ggrc-core,AleksNeStu/ggrc-core,VinnieJohns/ggrc-core,selahssea/ggrc-core,jmakov/ggrc-core,josthkko/ggrc-core,j0gurt/ggrc-core | src/lib/test_helpers.py | src/lib/test_helpers.py | # Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: [email protected]
# Maintained By: [email protected]
"""
Utility classes for page objects used in tests.
Details:
Most of the tests require a sequence of primitive methods of the page
object. If the sequence repeats itself among tests, it should be shared in
this module.
"""
import uuid
from lib import base
from lib.constants.test import create_new_program
class LhnMenu(base.Test):
@staticmethod
def create_new_program():
pass
class ModalNewProgramPage(base.Test):
"""Methods for simulating common user actions"""
@staticmethod
def enter_test_data(modal):
"""Fills out all fields in the modal
Args:
modal (lib.page.modal.new_program.NewProgramModal)
"""
unique_id = str(uuid.uuid4())
modal.enter_title(create_new_program.TITLE + unique_id)
modal.enter_description(
create_new_program.DESCRIPTION_SHORT)
modal.enter_notes(
create_new_program.NOTES_SHORT)
modal.enter_code(create_new_program.CODE + unique_id)
modal.filter_and_select_primary_contact("example")
modal.filter_and_select_secondary_contact("example")
modal.enter_program_url(
create_new_program.PROGRAM_URL)
modal.enter_reference_url(
create_new_program.REFERENCE_URL)
modal.enter_effective_date_start_month()
modal.enter_stop_date_end_month()
| apache-2.0 | Python |
|
8e7350cbfc96541d9a3ddc970309c60793bb4126 | fix TermsFacet | qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq,puttarajubr/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq | corehq/apps/es/facets.py | corehq/apps/es/facets.py | class FacetResult(object):
def __init__(self, raw, facet):
self.facet = facet
self.raw = raw
self.result = raw.get(self.facet.name, {}).get(self.facet.type, {})
class Facet(object):
name = None
type = None
params = None
result_class = FacetResult
def __init__(self):
raise NotImplementedError()
def parse_result(self, result):
return self.result_class(result, self)
class TermsResult(FacetResult):
def counts_by_term(self):
return {d['term']: d['count'] for d in self.result}
class TermsFacet(Facet):
type = "terms"
result_class = TermsResult
def __init__(self, term, name, size=None):
assert(name.isalnum(), "name must be a valid python variable name")
self.name = name
self.params = {
"field": term,
}
if size is not None:
self.params["size"] = size
class DateHistogram(Facet):
type = "date_histogram"
def __init__(self, name, datefield, interval):
self.name = name
self.params = {
"field": datefield,
"interval": interval
}
| class FacetResult(object):
def __init__(self, raw, facet):
self.facet = facet
self.raw = raw
self.result = raw.get(self.facet.name, {}).get(self.facet.type, {})
class Facet(object):
name = None
type = None
params = None
result_class = FacetResult
def __init__(self):
raise NotImplementedError()
def parse_result(self, result):
return self.result_class(result, self)
class TermsResult(FacetResult):
def counts_by_term(self):
return {d['term']: d['count'] for d in self.result}
class TermsFacet(Facet):
type = "terms"
result_class = TermsResult
def __init__(self, name, term, size=None):
assert(name.isalnum(), "name must be a valid python variable name")
self.name = name
self.params = {
"field": term,
}
if size is not None:
self.params["size"] = size
class DateHistogram(Facet):
type = "date_histogram"
def __init__(self, name, datefield, interval):
self.name = name
self.params = {
"field": datefield,
"interval": interval
}
| bsd-3-clause | Python |
04ded12c05b20fc3a25956712f8e0fb1723c3edb | Add a snippet (python/warnings). | jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets | python/warnings.py | python/warnings.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2012 Jérémie DECOCK (http://www.jdhp.org)
import warnings
def custom_formatwarning(message, category, filename, lineno, line=""):
"""Ignore everything except the message."""
return "Warning: " + str(message) + "\n"
def main():
"""Main function"""
warnings.formatwarning = custom_formatwarning
warnings.warn("Foo", UserWarning)
if __name__ == '__main__':
main()
| mit | Python |
|
630413b6bdc385095fe8da549b691d54fc6a4504 | Add ITWeek.py | Ophixia/scrape | ITWeek.py | ITWeek.py | import requests
from bs4 import BeautifulSoup
def main():
url = 'https://ex-portal3.reed.jp/list/SODECS2017_ja.html'
res = requests.get(url)
soup = BeautifulSoup(res.content, 'html.parser')
companies = soup.find_all('tr')
for company in companies:
print(company.text)
if __name__ == '__main__':
main()
| mit | Python |
|
1f71153cf814f7d34835cea6eafe44683035d874 | Add compare_files.py | theDrake/python-experiments | compare_files.py | compare_files.py | import difflib
def compare_files(filename1, filename2):
f = open(filename1, "r")
filelines1 = f.readlines()
f.close()
f = open(filename2, "r")
filelines2 = f.readlines()
f.close()
diffs = difflib.context_diff(filelines1,
filelines2,
fromfile=filename1,
tofile=filename2)
count = 0
for line in diffs:
print line,
count += 1
return count == 0
| mit | Python |
|
3ce2e0b8825c7abc219a812c5abda45184fbfdec | add wot wikia plugin | rascul/botwot | plugins/wotwikia.py | plugins/wotwikia.py | """ WoT Wikia Plugin (botwot plugins.wiki) """
# Copyright 2015 Ray Schulz <https://rascul.io>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
import requests
from pyaib.plugins import keyword, plugin_class
@plugin_class
class WotWikia(object):
def __init__(self, context, config):
pass
@keyword("wot")
def keyword_wot(self, context, msg, trigger, args, kargs):
"""
<query> - Search the WoT Wikia for <query>
"""
target_user = ""
query = ""
if len(args) >= 3 and args[-2] == "|":
target_user = args[-1]
query = " ".join(args[:-2])
else:
query = " ".join(args)
url = "http://wot.wikia.com/api/v1/Search/List"
payload = {'query': query, 'limit': 1}
r = requests.get(url, params=payload)
j = json.loads(r.text)
if j and 'items' in j:
if target_user:
msg.reply("%s: %s" % (target_user, j['items'][0]['url']))
else:
msg.reply(j['items'][0]['url'])
| apache-2.0 | Python |
|
46818f540d48bd967e8e0e5d846f0757f2ca6c1c | Add test for set_shard() | peastman/deepchem,peastman/deepchem,deepchem/deepchem,deepchem/deepchem | deepchem/data/tests/test_setshard.py | deepchem/data/tests/test_setshard.py | import deepchem as dc
import numpy as np
def test_setshard_with_X_y():
"""Test setharding on a simple example"""
X = np.random.rand(10, 3)
y = np.random.rand(10,)
dataset = dc.data.DiskDataset.from_numpy(X, y)
assert dataset.get_shape()[0][0] == 10
assert dataset.get_shape()[1][0] == 10
for i, (X, y, w, ids) in enumerate(dataset.itershards()):
X = X[1:]
y = y[1:]
w = w[1:]
ids = ids[1:]
dataset.set_shard(i, X, y, w, ids)
assert dataset.get_shape()[0][0] == 9
assert dataset.get_shape()[1][0] == 9
| mit | Python |
|
a8bbbb77e2036b66a5083bd2a1393b0de588af0c | Rename to alg_count_changes.py & count_changes() | bowen0701/algorithms_data_structures | alg_count_changes.py | alg_count_changes.py | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
"""Count Changes.
Count how many distinct ways you can make change that amount.
Assume that you have an infinite number of each kind of coin.
"""
def count_changes_recur(amount, coins, n):
"""Count changes by recursion.
Time complexity: O(2^n).
Space complexity: O(1).
"""
if amount < 0:
return 0
if amount == 0:
return 1
# When number of coins is 0 but there is still amount remaining.
if n < 0 and amount >= 1:
return 0
# Compute ways with coin n included plus that with coin excluded.
count_in = count_changes_recur(amount - coins[n], coins, n)
count_ex = count_changes_recur(amount, coins, n - 1)
count = count_in + count_ex
return count
def _count_changes_memo(amount, coins, T, n):
"""Helper function for count_changes_memo()."""
if amount == 0:
return 1
if amount < 0:
return 0
if n < 0 and amount >= 1:
return 0
count_in = _count_changes_memo(amount - coins[n - 1], coins, T, n)
count_ex = _count_changes_memo(amount, coins, T, n - 1)
T[n - 1][amount] = count_in + count_ex
return T[n - 1][amount]
def count_changes_memo(amount, coins, n):
"""Count changes by top-bottom dynamic programming:
recursion + memoization.
Time complexity: O(a * c), where a is amount, and c is number of coins.
Space complexity: O(a * c).
"""
T = [[0] * (amount + 1) for c in range(n + 1)]
for c in range(n + 1):
T[c][0] = 1
return _count_changes_memo(amount, coins, T, n)
def count_changes_dp(amount, coins):
"""Count changes by bottom-up dynamic programming.
Time complexity: O(a * c), where a is amount, and c is number of coins.
Space complexity: O(a * c).
"""
n = len(coins)
T = [[0] * (amount + 1) for c in range(n)]
for c in range(n):
T[c][0] = 1
for c in range(n):
for a in range(1, amount + 1):
if a >= coins[c]:
count_in = T[c][a - coins[c]]
else:
count_in = 0
if c >= 1:
count_ex = T[c - 1][a]
else:
count_ex = 0
T[c][a] = count_in + count_ex
return T[-1][-1]
def main():
import time
amount = 5
coins = [1, 2, 3] # Ans = 5.
n = len(coins) - 1
start_time = time.time()
print('Make change by recursion: {}'
.format(count_changes_recur(amount, coins, n)))
print('Time: {}'.format(time.time() - start_time))
start_time = time.time()
print('Make change by memo: {}'
.format(count_changes_memo(amount, coins, n)))
print('Time: {}'.format(time.time() - start_time))
start_time = time.time()
print('Make change by DP: {}'
.format(count_changes_dp(amount, coins)))
print('Time: {}'.format(time.time() - start_time))
if __name__ == '__main__':
main()
| bsd-2-clause | Python |
|
6ad081e91e337e1627b70674109f45ba35248f8c | Add missing migration file to the repo | cgwire/zou | zou/migrations/versions/e839d6603c09_add_person_id_to_shot_history.py | zou/migrations/versions/e839d6603c09_add_person_id_to_shot_history.py | """add person id to shot history
Revision ID: e839d6603c09
Revises: 346250b5304c
Create Date: 2020-12-14 12:00:19.045783
"""
from alembic import op
import sqlalchemy as sa
import sqlalchemy_utils
import sqlalchemy_utils
import uuid
# revision identifiers, used by Alembic.
revision = 'e839d6603c09'
down_revision = '346250b5304c'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('entity_version', sa.Column('person_id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), default=uuid.uuid4, nullable=True))
op.create_index(op.f('ix_entity_version_person_id'), 'entity_version', ['person_id'], unique=False)
op.create_foreign_key(None, 'entity_version', 'person', ['person_id'], ['id'])
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_constraint(None, 'entity_version', type_='foreignkey')
op.drop_index(op.f('ix_entity_version_person_id'), table_name='entity_version')
op.drop_column('entity_version', 'person_id')
# ### end Alembic commands ###
| agpl-3.0 | Python |
|
0538523f617ec1d410861b52a647c788c06c267a | Fix llg tests. | fangohr/oommf-python,ryanpepper/oommf-python,fangohr/oommf-python,ryanpepper/oommf-python,fangohr/oommf-python,ryanpepper/oommf-python,ryanpepper/oommf-python | pyoommf/test_llg.py | pyoommf/test_llg.py | from llg import LLG
def test_llg_mif():
t = 1.5e-9
m_init = (0, 1, 0)
Ms = 1e6
alpha = 0.01
gamma = 2.21e5
name = 'llgtest'
llg = LLG(t, m_init, Ms, alpha, gamma, name)
mif_string = llg.get_mif()
lines = mif_string.split('\n')
assert 'Specify Oxs_RungeKuttaEvolve {' in lines[0]
line2 = lines[1].split()
assert float(line2[1]) == alpha
line3 = lines[2].split()
assert float(line3[1]) == gamma
line8 = lines[8].split()
assert float(line8[1]) == t
line11 = lines[11].split()
assert float(line11[1]) == Ms
line13 = lines[13].split()
assert float(line13[1][1:]) == m_init[0]
assert float(line13[2]) == m_init[1]
assert float(line13[3][:-1]) == m_init[2]
def test_llg_formatting():
t = 1.5e-9
m_init = (0, 1, 0)
Ms = 1e6
alpha = 0.01
gamma = 2.21e5
name = 'llgtest'
llg = LLG(t, m_init, Ms, alpha, gamma, name)
mif_string = llg.get_mif()
assert mif_string[0] == 'S'
assert mif_string[-1] == '\n'
assert mif_string[-2] == '\n'
| from llg import LLG
def test_llg_mif():
t = 1.5e-9
m_init = (0, 1, 0)
Ms = 1e6
alpha = 0.01
gamma = 2.21e5
llg = LLG(t, m_init, Ms, alpha, gamma)
mif_string = llg.get_mif()
lines = mif_string.split('\n')
assert 'Specify Oxs_RungeKuttaEvolve {' in lines[0]
line2 = lines[1].split()
assert float(line2[1]) == alpha
line3 = lines[2].split()
assert float(line3[1]) == gamma
line8 = lines[8].split()
assert float(line8[1]) == t
line11 = lines[11].split()
assert float(line11[1]) == Ms
line13 = lines[13].split()
assert float(line13[1][1:]) == m_init[0]
assert float(line13[2]) == m_init[1]
assert float(line13[3][:-1]) == m_init[2]
def test_llg_formatting():
t = 1.5e-9
m_init = (0, 1, 0)
Ms = 1e6
alpha = 0.01
gamma = 2.21e5
llg = LLG(t, m_init, Ms, alpha, gamma)
mif_string = llg.get_mif()
assert mif_string[0] == 'S'
assert mif_string[-1] == '\n'
assert mif_string[-2] == '\n'
| bsd-2-clause | Python |
22252d6978f237a2a46415dcf54d4adbed92b1ce | Add LLG tests. | ryanpepper/oommf-python,ryanpepper/oommf-python,fangohr/oommf-python,ryanpepper/oommf-python,ryanpepper/oommf-python,fangohr/oommf-python,fangohr/oommf-python | pyoommf/test_llg.py | pyoommf/test_llg.py | from llg import LLG
def test_llg_mif():
t = 1.5e-9
m_init = (0, 1, 0)
Ms = 1e6
alpha = 0.01
gamma = 2.21e5
llg = LLG(t, m_init, Ms, alpha, gamma)
mif_string = llg.get_mif()
lines = mif_string.split('\n')
assert 'Specify Oxs_RungeKuttaEvolve {' in lines[0]
line2 = lines[1].split()
assert float(line2[1]) == alpha
line3 = lines[2].split()
assert float(line3[1]) == gamma
line8 = lines[8].split()
assert float(line8[1]) == t
line11 = lines[11].split()
assert float(line11[1]) == Ms
line13 = lines[13].split()
assert float(line13[1][1:]) == m_init[0]
assert float(line13[2]) == m_init[1]
assert float(line13[3][:-1]) == m_init[2]
def test_llg_formatting():
t = 1.5e-9
m_init = (0, 1, 0)
Ms = 1e6
alpha = 0.01
gamma = 2.21e5
llg = LLG(t, m_init, Ms, alpha, gamma)
mif_string = llg.get_mif()
assert mif_string[0] == 'S'
assert mif_string[-1] == '\n'
assert mif_string[-2] == '\n'
| bsd-2-clause | Python |
|
b21fbb09b33e40a33ad3ea33b0394fed421c8a6e | add num02 | muzuco/pythonstudy2014 | pythonTest/num02.py | pythonTest/num02.py | def reverse(x):
changeTuple=tuple(x)
reverseTuple=changeTuple[::-1]
print(''.join(reverseTuple))
test = "this is test string"
reverse(test)
| apache-2.0 | Python |
|
edc35e4aefe336eb1bf02dbf7104925389276fa6 | Add shellcheck for sh filetype | maralla/vim-linter,maralla/validator.vim,maralla/vim-fixup,maralla/vim-linter,maralla/vim-fixup | pythonx/lints/sh.py | pythonx/lints/sh.py | # -*- coding: utf-8 -*-
from validator import Validator
class Sh(Validator):
__filetype__ = "sh"
checker = "shellcheck"
args = "-x -f gcc"
regex = r"""
.+:
(?P<lnum>\d+):
(?P<col>\d+):
.*
\s
(
(?P<error>error)
|
(?P<warning>warning)
):
\s
(?P<text>.*)"""
| mit | Python |
|
c25cebf31648466111cb3d576e0a398bb4220ccf | Add test for sabnzbd cleanupfilename.py | FreekKalter/linux-scripts,FreekKalter/linux-scripts,FreekKalter/linux-scripts,FreekKalter/linux-scripts | sabnzbd/test_cleanupfilename.py | sabnzbd/test_cleanupfilename.py | import unittest
from cleanupfilename import rename
class TestRename(unittest.TestCase):
files = []
dirs = []
def setUp(self):
self.files = [('filename-sample.x264.mp4', 'filename.mp4'),
('filename.mp4', 'filename.mp4')]
self.dirs = [('filename sample mp4', 'filename'),
('filename 540p', 'filename'),
('filename [web-DL] part001.', 'filename'),
('actual.file.name-1080p.BluRay.x264-GECKOS[rarbg]', 'actual file name'),
]
def test_list(self):
for file, output in self.files:
self.assertEqual(rename(file, False), output)
for file, output in self.dirs:
self.assertEqual(rename(file, True), output)
if __name__ == '__main__':
unittest.main()
| bsd-3-clause | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.